VirtualBox

Changeset 101682 in vbox for trunk/src/VBox/VMM/include


Ignore:
Timestamp:
Oct 31, 2023 12:18:44 PM (18 months ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
159774
Message:

VMM/IEM,VBox/err.h: Refactored the native recompiler code to throw/longjmp on errors rather than returning UINT32_MAX/UINT8_MAX. This should make it easier to pinpoint why recompilation fails (we've got an RC) and get rid of hundreds of AssertReturn statements that clutters up the code and introduces lots of unnecessary branches. bugref:10371

Location:
trunk/src/VBox/VMM/include
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/include/IEMInternal.h

    r101640 r101682  
    18451845#  define IEM_TRY_SETJMP(a_pVCpu, a_rcTarget) \
    18461846        jmp_buf  JmpBuf; \
    1847         jmp_buf * volatile pSavedJmpBuf = pVCpu->iem.s.CTX_SUFF(pJmpBuf); \
    1848         pVCpu->iem.s.CTX_SUFF(pJmpBuf) = &JmpBuf; \
     1847        jmp_buf * volatile pSavedJmpBuf = (a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf); \
     1848        (a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf) = &JmpBuf; \
    18491849        if ((rcStrict = setjmp(JmpBuf)) == 0)
    18501850#  define IEM_TRY_SETJMP_AGAIN(a_pVCpu, a_rcTarget) \
    1851         pSavedJmpBuf = pVCpu->iem.s.CTX_SUFF(pJmpBuf); \
    1852         pVCpu->iem.s.CTX_SUFF(pJmpBuf) = &JmpBuf; \
     1851        pSavedJmpBuf = (a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf); \
     1852        (a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf) = &JmpBuf; \
    18531853        if ((rcStrict = setjmp(JmpBuf)) == 0)
    18541854#  define IEM_CATCH_LONGJMP_BEGIN(a_pVCpu, a_rcTarget) \
     
    55905590
    55915591/* Native recompiler public bits: */
    5592 PIEMTB iemNativeRecompile(PVMCPUCC pVCpu, PIEMTB pTb);
    5593 int    iemExecMemAllocatorInit(PVMCPU pVCpu, uint64_t cbMax, uint64_t cbInitial, uint32_t cbChunk);
    5594 void   iemExecMemAllocatorFree(PVMCPU pVCpu, void *pv, size_t cb);
     5592DECLHIDDEN(PIEMTB)  iemNativeRecompile(PVMCPUCC pVCpu, PIEMTB pTb) RT_NOEXCEPT;
     5593int                 iemExecMemAllocatorInit(PVMCPU pVCpu, uint64_t cbMax, uint64_t cbInitial, uint32_t cbChunk);
     5594void                iemExecMemAllocatorFree(PVMCPU pVCpu, void *pv, size_t cb);
    55955595
    55965596
  • trunk/src/VBox/VMM/include/IEMN8veRecompiler.h

    r101661 r101682  
    621621    /** The condition nesting stack. */
    622622    IEMNATIVECOND               aCondStack[2];
     623
     624#ifndef IEM_WITH_THROW_CATCH
     625    /** Pointer to the setjmp/longjmp buffer if we're not using C++ exceptions
     626     *  for recompilation error handling. */
     627    jmp_buf                     JmpBuf;
     628#endif
    623629} IEMRECOMPILERSTATE;
    624630/** Pointer to a native recompiler state. */
     
    626632
    627633
     634/** @def IEMNATIVE_TRY_SETJMP
     635 * Wrapper around setjmp / try, hiding all the ugly differences.
     636 *
     637 * @note Use with extreme care as this is a fragile macro.
     638 * @param   a_pReNative The native recompile state.
     639 * @param   a_rcTarget  The variable that should receive the status code in case
     640 *                      of a longjmp/throw.
     641 */
     642/** @def IEMNATIVE_CATCH_LONGJMP_BEGIN
     643 * Start wrapper for catch / setjmp-else.
     644 *
     645 * This will set up a scope.
     646 *
     647 * @note Use with extreme care as this is a fragile macro.
     648 * @param   a_pReNative The native recompile state.
     649 * @param   a_rcTarget  The variable that should receive the status code in case
     650 *                      of a longjmp/throw.
     651 */
     652/** @def IEMNATIVE_CATCH_LONGJMP_END
     653 * End wrapper for catch / setjmp-else.
     654 *
     655 * This will close the scope set up by IEMNATIVE_CATCH_LONGJMP_BEGIN and clean
     656 * up the state.
     657 *
     658 * @note Use with extreme care as this is a fragile macro.
     659 * @param   a_pReNative The native recompile state.
     660 */
     661/** @def IEMNATIVE_DO_LONGJMP
     662 *
     663 * Wrapper around longjmp / throw.
     664 *
     665 * @param   a_pReNative The native recompile state.
     666 * @param   a_rc        The status code jump back with / throw.
     667 */
     668#ifdef IEM_WITH_THROW_CATCH
     669# define IEMNATIVE_TRY_SETJMP(a_pReNative, a_rcTarget) \
     670       a_rcTarget = VINF_SUCCESS; \
     671       try
     672# define IEMNATIVE_CATCH_LONGJMP_BEGIN(a_pReNative, a_rcTarget) \
     673       catch (int rcThrown) \
     674       { \
     675           a_rcTarget = rcThrown
     676# define IEMNATIVE_CATCH_LONGJMP_END(a_pReNative) \
     677       } \
     678       ((void)0)
     679# define IEMNATIVE_DO_LONGJMP(a_pReNative, a_rc)  throw int(a_rc)
     680#else  /* !IEM_WITH_THROW_CATCH */
     681# define IEMNATIVE_TRY_SETJMP(a_pReNative, a_rcTarget) \
     682       if ((a_rcTarget = setjmp((a_pReNative)->JmpBuf)) == 0)
     683# define IEMNATIVE_CATCH_LONGJMP_BEGIN(a_pReNative, a_rcTarget) \
     684       else \
     685       { \
     686           ((void)0)
     687# define IEMNATIVE_CATCH_LONGJMP_END(a_pReNative) \
     688       }
     689# define IEMNATIVE_DO_LONGJMP(a_pReNative, a_rc)  longjmp((a_pReNative)->JmpBuf, (a_rc))
     690#endif /* !IEM_WITH_THROW_CATCH */
     691
     692
    628693/**
    629694 * Native recompiler worker for a threaded function.
    630695 *
    631  * @returns New code buffer offset, UINT32_MAX in case of failure.
     696 * @returns New code buffer offset; throws VBox status code in case of a failure.
    632697 * @param   pReNative   The native recompiler state.
    633698 * @param   off         The current code buffer offset.
    634699 * @param   pCallEntry  The threaded call entry.
    635700 *
    636  * @note    This is not allowed to throw anything atm.
    637  */
    638 typedef DECLCALLBACKTYPE(uint32_t, FNIEMNATIVERECOMPFUNC,(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    639                                                           PCIEMTHRDEDCALLENTRY pCallEntry));
     701 * @note    This may throw/longjmp VBox status codes (int) to abort compilation, so no RT_NOEXCEPT!
     702 */
     703typedef uint32_t (VBOXCALL FNIEMNATIVERECOMPFUNC)(PIEMRECOMPILERSTATE pReNative, uint32_t off, PCIEMTHRDEDCALLENTRY pCallEntry);
    640704/** Pointer to a native recompiler worker for a threaded function. */
    641705typedef FNIEMNATIVERECOMPFUNC *PFNIEMNATIVERECOMPFUNC;
    642706
    643 /** Defines a native recompiler worker for a threaded function. */
     707/** Defines a native recompiler worker for a threaded function.
     708 * @see FNIEMNATIVERECOMPFUNC  */
    644709#define IEM_DECL_IEMNATIVERECOMPFUNC_DEF(a_Name) \
    645     DECLCALLBACK(uint32_t) a_Name(PIEMRECOMPILERSTATE pReNative, uint32_t off, PCIEMTHRDEDCALLENTRY pCallEntry)
    646 /** Prototypes a native recompiler function for a threaded function. */
     710    uint32_t VBOXCALL a_Name(PIEMRECOMPILERSTATE pReNative, uint32_t off, PCIEMTHRDEDCALLENTRY pCallEntry)
     711
     712/** Prototypes a native recompiler function for a threaded function.
     713 * @see FNIEMNATIVERECOMPFUNC  */
    647714#define IEM_DECL_IEMNATIVERECOMPFUNC_PROTO(a_Name) FNIEMNATIVERECOMPFUNC a_Name
    648715
    649 DECLHIDDEN(uint32_t)        iemNativeLabelCreate(PIEMRECOMPILERSTATE pReNative, IEMNATIVELABELTYPE enmType,
    650                                                  uint32_t offWhere = UINT32_MAX, uint16_t uData = 0) RT_NOEXCEPT;
    651 DECLHIDDEN(void)            iemNativeLabelDefine(PIEMRECOMPILERSTATE pReNative, uint32_t idxLabel, uint32_t offWhere) RT_NOEXCEPT;
    652 DECLHIDDEN(bool)            iemNativeAddFixup(PIEMRECOMPILERSTATE pReNative, uint32_t offWhere, uint32_t idxLabel,
    653                                               IEMNATIVEFIXUPTYPE enmType, int8_t offAddend = 0) RT_NOEXCEPT;
    654 DECLHIDDEN(PIEMNATIVEINSTR) iemNativeInstrBufEnsureSlow(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    655                                                         uint32_t cInstrReq) RT_NOEXCEPT;
    656 
    657 DECLHIDDEN(uint8_t)         iemNativeRegAllocTmp(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
    658                                                  bool fPreferVolatile = true) RT_NOEXCEPT;
    659 DECLHIDDEN(uint8_t)         iemNativeRegAllocTmpImm(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint64_t uImm,
    660                                                     bool fPreferVolatile = true) RT_NOEXCEPT;
    661 DECLHIDDEN(uint8_t)         iemNativeRegAllocTmpForGuestReg(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
    662                                                             IEMNATIVEGSTREG enmGstReg,
    663                                                             IEMNATIVEGSTREGUSE enmIntendedUse) RT_NOEXCEPT;
    664 DECLHIDDEN(uint8_t)         iemNativeRegAllocTmpForGuestRegIfAlreadyPresent(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
    665                                                                             IEMNATIVEGSTREG enmGstReg) RT_NOEXCEPT;
    666 
    667 DECLHIDDEN(uint8_t)         iemNativeRegAllocVar(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint8_t idxVar) RT_NOEXCEPT;
    668 DECLHIDDEN(uint32_t)        iemNativeRegAllocArgs(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cArgs) RT_NOEXCEPT;
    669 DECLHIDDEN(uint8_t)         iemNativeRegAssignRc(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg) RT_NOEXCEPT;
     716DECL_HIDDEN_THROW(uint32_t) iemNativeLabelCreate(PIEMRECOMPILERSTATE pReNative, IEMNATIVELABELTYPE enmType,
     717                                                 uint32_t offWhere = UINT32_MAX, uint16_t uData = 0);
     718DECL_HIDDEN_THROW(void)     iemNativeLabelDefine(PIEMRECOMPILERSTATE pReNative, uint32_t idxLabel, uint32_t offWhere);
     719DECL_HIDDEN_THROW(void)     iemNativeAddFixup(PIEMRECOMPILERSTATE pReNative, uint32_t offWhere, uint32_t idxLabel,
     720                                              IEMNATIVEFIXUPTYPE enmType, int8_t offAddend = 0);
     721DECL_HIDDEN_THROW(PIEMNATIVEINSTR) iemNativeInstrBufEnsureSlow(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t cInstrReq);
     722
     723DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmp(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, bool fPreferVolatile = true);
     724DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpImm(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint64_t uImm,
     725                                                    bool fPreferVolatile = true);
     726DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestReg(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
     727                                                            IEMNATIVEGSTREG enmGstReg, IEMNATIVEGSTREGUSE enmIntendedUse);
     728DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestRegIfAlreadyPresent(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
     729                                                                            IEMNATIVEGSTREG enmGstReg);
     730
     731DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocVar(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint8_t idxVar);
     732DECL_HIDDEN_THROW(uint32_t) iemNativeRegAllocArgs(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cArgs);
     733DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAssignRc(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg);
    670734DECLHIDDEN(void)            iemNativeRegFree(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg) RT_NOEXCEPT;
    671735DECLHIDDEN(void)            iemNativeRegFreeTmp(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg) RT_NOEXCEPT;
    672736DECLHIDDEN(void)            iemNativeRegFreeTmpImm(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg) RT_NOEXCEPT;
    673737DECLHIDDEN(void)            iemNativeRegFreeAndFlushMask(PIEMRECOMPILERSTATE pReNative, uint32_t fHstRegMask) RT_NOEXCEPT;
    674 DECLHIDDEN(uint32_t)        iemNativeRegFlushPendingWrites(PIEMRECOMPILERSTATE pReNative, uint32_t off) RT_NOEXCEPT;
    675 
    676 DECLHIDDEN(uint32_t)        iemNativeEmitLoadGprWithGstShadowReg(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    677                                                                  uint8_t idxHstReg, IEMNATIVEGSTREG enmGstReg) RT_NOEXCEPT;
    678 DECLHIDDEN(uint32_t)        iemNativeEmitCheckCallRetAndPassUp(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    679                                                                uint8_t idxInstr) RT_NOEXCEPT;
     738DECL_HIDDEN_THROW(uint32_t) iemNativeRegFlushPendingWrites(PIEMRECOMPILERSTATE pReNative, uint32_t off);
     739
     740DECL_HIDDEN_THROW(uint32_t) iemNativeEmitLoadGprWithGstShadowReg(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     741                                                                 uint8_t idxHstReg, IEMNATIVEGSTREG enmGstReg);
     742DECL_HIDDEN_THROW(uint32_t) iemNativeEmitCheckCallRetAndPassUp(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr);
    680743
    681744
     
    688751 *          allocation size.
    689752 *
    690  * @returns Pointer to the instruction output buffer on success, NULL on
    691  *          failure.
     753 * @returns Pointer to the instruction output buffer on success; throws VBox
     754 *          status code on failure, so no need to check it.
    692755 * @param   pReNative   The native recompile state.
    693756 * @param   off         Current instruction offset.  Works safely for UINT32_MAX
     
    696759 *                      overestimate this a bit.
    697760 */
    698 DECL_FORCE_INLINE(PIEMNATIVEINSTR) iemNativeInstrBufEnsure(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t cInstrReq)
    699 {
    700     uint64_t const offChecked = off + (uint64_t)cInstrReq;
     761DECL_FORCE_INLINE_THROW(PIEMNATIVEINSTR)
     762iemNativeInstrBufEnsure(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t cInstrReq)
     763{
     764    uint64_t const offChecked = off + (uint64_t)cInstrReq; /** @todo may reconsider the need for UINT32_MAX safety... */
    701765    if (RT_LIKELY(offChecked <= pReNative->cInstrBufAlloc))
    702766    {
     
    721785 * in the disassembly.
    722786 */
    723 DECLINLINE(uint32_t) iemNativeEmitMarker(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t uInfo)
     787DECL_INLINE_THROW(uint32_t)
     788iemNativeEmitMarker(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t uInfo)
    724789{
    725790#ifdef RT_ARCH_AMD64
    726791    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    727     AssertReturn(pbCodeBuf, UINT32_MAX);
    728792    if (uInfo == 0)
    729793    {
     
    743807    }
    744808#elif RT_ARCH_ARM64
     809    /* nop */
    745810    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    746     AssertReturn(pu32CodeBuf, UINT32_MAX);
    747     /* nop */
    748811    pu32CodeBuf[off++] = 0xd503201f;
    749812
     
    764827 * Emits setting a GPR to zero.
    765828 */
    766 DECLINLINE(uint32_t) iemNativeEmitGprZero(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr)
    767 {
    768 #ifdef RT_ARCH_AMD64
     829DECL_INLINE_THROW(uint32_t)
     830iemNativeEmitGprZero(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr)
     831{
     832#ifdef RT_ARCH_AMD64
     833    /* xor gpr32, gpr32 */
    769834    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    770     AssertReturn(pbCodeBuf, UINT32_MAX);
    771     /* xor gpr32, gpr32 */
    772835    if (iGpr >= 8)
    773836        pbCodeBuf[off++] = X86_OP_REX_R | X86_OP_REX_B;
     
    776839
    777840#elif RT_ARCH_ARM64
     841    /* mov gpr, #0x0 */
    778842    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    779     AssertReturn(pu32CodeBuf, UINT32_MAX);
    780     /* mov gpr, #0x0 */
    781843    pu32CodeBuf[off++] = UINT32_C(0xd2800000) | iGpr;
    782844
     
    792854 * Emits loading a constant into a 64-bit GPR
    793855 */
    794 DECLINLINE(uint32_t) iemNativeEmitLoadGprImm64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint64_t uImm64)
     856DECL_INLINE_THROW(uint32_t)
     857iemNativeEmitLoadGprImm64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint64_t uImm64)
    795858{
    796859    if (!uImm64)
     
    802865        /* mov gpr, imm32 */
    803866        uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6);
    804         AssertReturn(pbCodeBuf, UINT32_MAX);
    805867        if (iGpr >= 8)
    806868            pbCodeBuf[off++] = X86_OP_REX_B;
     
    815877        /* mov gpr, imm64 */
    816878        uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 10);
    817         AssertReturn(pbCodeBuf, UINT32_MAX);
    818879        if (iGpr < 8)
    819880            pbCodeBuf[off++] = X86_OP_REX_W;
     
    833894#elif RT_ARCH_ARM64
    834895    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    835     AssertReturn(pu32CodeBuf, UINT32_MAX);
    836896
    837897    /*
     
    891951 *       only the ARM64 version does that.
    892952 */
    893 DECLINLINE(uint32_t) iemNativeEmitLoadGpr8Imm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint8_t uImm8)
     953DECL_INLINE_THROW(uint32_t)
     954iemNativeEmitLoadGpr8Imm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint8_t uImm8)
    894955{
    895956#ifdef RT_ARCH_AMD64
    896957    /* mov gpr, imm8 */
    897958    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    898     AssertReturn(pbCodeBuf, UINT32_MAX);
    899959    if (iGpr >= 8)
    900960        pbCodeBuf[off++] = X86_OP_REX_B;
     
    907967    /* movz gpr, imm16, lsl #0 */
    908968    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    909     AssertReturn(pu32CodeBuf, UINT32_MAX);
    910969    pu32CodeBuf[off++] = UINT32_C(0xd2800000) | (UINT32_C(0) << 21) | ((uint32_t)uImm8 << 5) | iGpr;
    911970
     
    922981 * Common bit of iemNativeEmitLoadGprFromVCpuU64 and friends.
    923982 */
    924 DECL_FORCE_INLINE(uint32_t) iemNativeEmitGprByVCpuDisp(uint8_t *pbCodeBuf, uint32_t off, uint8_t iGprReg, uint32_t offVCpu)
     983DECL_FORCE_INLINE(uint32_t)
     984iemNativeEmitGprByVCpuDisp(uint8_t *pbCodeBuf, uint32_t off, uint8_t iGprReg, uint32_t offVCpu)
    925985{
    926986    if (offVCpu < 128)
     
    9431003 * Common bit of iemNativeEmitLoadGprFromVCpuU64 and friends.
    9441004 */
    945 DECL_FORCE_INLINE(uint32_t) iemNativeEmitGprByVCpuLdSt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprReg,
    946                                                        uint32_t offVCpu, ARMV8A64INSTRLDSTTYPE enmOperation, unsigned cbData)
     1005DECL_FORCE_INLINE_THROW(uint32_t)
     1006iemNativeEmitGprByVCpuLdSt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprReg,
     1007                           uint32_t offVCpu, ARMV8A64INSTRLDSTTYPE enmOperation, unsigned cbData)
    9471008{
    9481009    /*
     
    9551016        /* Use the unsigned variant of ldr Wt, [<Xn|SP>, #off]. */
    9561017        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    957         AssertReturn(pu32CodeBuf, UINT32_MAX);
    9581018        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRUOff(enmOperation, iGpr, IEMNATIVE_REG_FIXED_PVMCPU, offVCpu / cbData);
    9591019    }
     
    9611021    {
    9621022        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    963         AssertReturn(pu32CodeBuf, UINT32_MAX);
    9641023        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRUOff(enmOperation, iGpr, IEMNATIVE_REG_FIXED_PCPUMCTX,
    9651024                                                      (offVCpu - RT_UOFFSETOF(VMCPU, cpum.GstCtx)) / cbData);
     
    9731032
    9741033        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    975         AssertReturn(pu32CodeBuf, UINT32_MAX);
    9761034        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRegIdx(enmOperation, iGpr, IEMNATIVE_REG_FIXED_PVMCPU, IEMNATIVE_REG_FIXED_TMP);
    9771035    }
     
    9851043 * Emits a 64-bit GPR load of a VCpu value.
    9861044 */
    987 DECLINLINE(uint32_t) iemNativeEmitLoadGprFromVCpuU64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1045DECL_INLINE_THROW(uint32_t)
     1046iemNativeEmitLoadGprFromVCpuU64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    9881047{
    9891048#ifdef RT_ARCH_AMD64
    9901049    /* mov reg64, mem64 */
    9911050    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    992     AssertReturn(pbCodeBuf, UINT32_MAX);
    9931051    if (iGpr < 8)
    9941052        pbCodeBuf[off++] = X86_OP_REX_W;
     
    10131071 * @note Bits 32 thru 63 in the GPR will be zero after the operation.
    10141072 */
    1015 DECLINLINE(uint32_t) iemNativeEmitLoadGprFromVCpuU32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1073DECL_INLINE_THROW(uint32_t)
     1074iemNativeEmitLoadGprFromVCpuU32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    10161075{
    10171076#ifdef RT_ARCH_AMD64
    10181077    /* mov reg32, mem32 */
    10191078    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1020     AssertReturn(pbCodeBuf, UINT32_MAX);
    10211079    if (iGpr >= 8)
    10221080        pbCodeBuf[off++] = X86_OP_REX_R;
     
    10391097 * @note Bits 16 thru 63 in the GPR will be zero after the operation.
    10401098 */
    1041 DECLINLINE(uint32_t) iemNativeEmitLoadGprFromVCpuU16(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1099DECL_INLINE_THROW(uint32_t)
     1100iemNativeEmitLoadGprFromVCpuU16(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    10421101{
    10431102#ifdef RT_ARCH_AMD64
    10441103    /* movzx reg32, mem16 */
    10451104    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8);
    1046     AssertReturn(pbCodeBuf, UINT32_MAX);
    10471105    if (iGpr >= 8)
    10481106        pbCodeBuf[off++] = X86_OP_REX_R;
     
    10661124 * @note Bits 8 thru 63 in the GPR will be zero after the operation.
    10671125 */
    1068 DECLINLINE(uint32_t) iemNativeEmitLoadGprFromVCpuU8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1126DECL_INLINE_THROW(uint32_t)
     1127iemNativeEmitLoadGprFromVCpuU8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    10691128{
    10701129#ifdef RT_ARCH_AMD64
    10711130    /* movzx reg32, mem8 */
    10721131    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8);
    1073     AssertReturn(pbCodeBuf, UINT32_MAX);
    10741132    if (iGpr >= 8)
    10751133        pbCodeBuf[off++] = X86_OP_REX_R;
     
    10921150 * Emits a store of a GPR value to a 64-bit VCpu field.
    10931151 */
    1094 DECLINLINE(uint32_t) iemNativeEmitStoreGprToVCpuU64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1152DECL_INLINE_THROW(uint32_t)
     1153iemNativeEmitStoreGprToVCpuU64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    10951154{
    10961155#ifdef RT_ARCH_AMD64
    10971156    /* mov mem64, reg64 */
    10981157    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1099     AssertReturn(pbCodeBuf, UINT32_MAX);
    11001158    if (iGpr < 8)
    11011159        pbCodeBuf[off++] = X86_OP_REX_W;
     
    11191177 * Emits a store of a GPR value to a 32-bit VCpu field.
    11201178 */
    1121 DECLINLINE(uint32_t) iemNativeEmitStoreGprFromVCpuU32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1179DECL_INLINE_THROW(uint32_t)
     1180iemNativeEmitStoreGprFromVCpuU32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    11221181{
    11231182#ifdef RT_ARCH_AMD64
    11241183    /* mov mem32, reg32 */
    11251184    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1126     AssertReturn(pbCodeBuf, UINT32_MAX);
    11271185    if (iGpr >= 8)
    11281186        pbCodeBuf[off++] = X86_OP_REX_R;
     
    11441202 * Emits a store of a GPR value to a 16-bit VCpu field.
    11451203 */
    1146 DECLINLINE(uint32_t) iemNativeEmitStoreGprFromVCpuU16(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1204DECL_INLINE_THROW(uint32_t)
     1205iemNativeEmitStoreGprFromVCpuU16(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    11471206{
    11481207#ifdef RT_ARCH_AMD64
    11491208    /* mov mem16, reg16 */
    11501209    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8);
    1151     AssertReturn(pbCodeBuf, UINT32_MAX);
    11521210    pbCodeBuf[off++] = X86_OP_PRF_SIZE_OP;
    11531211    if (iGpr >= 8)
     
    11701228 * Emits a store of a GPR value to a 8-bit VCpu field.
    11711229 */
    1172 DECLINLINE(uint32_t) iemNativeEmitStoreGprFromVCpuU8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1230DECL_INLINE_THROW(uint32_t)
     1231iemNativeEmitStoreGprFromVCpuU8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    11731232{
    11741233#ifdef RT_ARCH_AMD64
    11751234    /* mov mem8, reg8 */
    11761235    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1177     AssertReturn(pbCodeBuf, UINT32_MAX);
    11781236    if (iGpr >= 8)
    11791237        pbCodeBuf[off++] = X86_OP_REX_R;
     
    11951253 * Emits a gprdst = gprsrc load.
    11961254 */
    1197 DECLINLINE(uint32_t) iemNativeEmitLoadGprFromGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
     1255DECL_INLINE_THROW(uint32_t)
     1256iemNativeEmitLoadGprFromGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
    11981257{
    11991258#ifdef RT_ARCH_AMD64
    12001259    /* mov gprdst, gprsrc */
    1201     uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    1202     AssertReturn(pbCodeBuf, UINT32_MAX);
     1260    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    12031261    if ((iGprDst | iGprSrc) >= 8)
    12041262        pbCodeBuf[off++] = iGprDst < 8  ? X86_OP_REX_W | X86_OP_REX_B
     
    12111269
    12121270#elif RT_ARCH_ARM64
     1271    /* mov dst, src;   alias for: orr dst, xzr, src */
    12131272    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1214     AssertReturn(pu32CodeBuf, UINT32_MAX);
    1215     /* mov dst, src;   alias for: orr dst, xzr, src */
    12161273    pu32CodeBuf[off++] = UINT32_C(0xaa000000) | ((uint32_t)iGprSrc << 16) | ((uint32_t)ARMV8_A64_REG_XZR << 5) | iGprDst;
    12171274
     
    12531310 * Emits a 64-bit GRP load instruction with an BP relative source address.
    12541311 */
    1255 DECLINLINE(uint32_t) iemNativeEmitLoadGprByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t offDisp)
     1312DECL_INLINE_THROW(uint32_t)
     1313iemNativeEmitLoadGprByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t offDisp)
    12561314{
    12571315    /* mov gprdst, qword [rbp + offDisp]  */
    12581316    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1259     AssertReturn(pbCodeBuf, UINT32_MAX);
    12601317    if (iGprDst < 8)
    12611318        pbCodeBuf[off++] = X86_OP_REX_W;
     
    12721329 * Emits a 32-bit GRP load instruction with an BP relative source address.
    12731330 */
    1274 DECLINLINE(uint32_t) iemNativeEmitLoadGprByBpU32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t offDisp)
     1331DECL_INLINE_THROW(uint32_t)
     1332iemNativeEmitLoadGprByBpU32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t offDisp)
    12751333{
    12761334    /* mov gprdst, dword [rbp + offDisp]  */
    12771335    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1278     AssertReturn(pbCodeBuf, UINT32_MAX);
    12791336    if (iGprDst >= 8)
    12801337        pbCodeBuf[off++] = X86_OP_REX_R;
     
    12891346 * Emits a load effective address to a GRP with an BP relative source address.
    12901347 */
    1291 DECLINLINE(uint32_t) iemNativeEmitLeaGprByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t offDisp)
     1348DECL_INLINE_THROW(uint32_t)
     1349iemNativeEmitLeaGprByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t offDisp)
    12921350{
    12931351    /* lea gprdst, [rbp + offDisp] */
    12941352    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1295     AssertReturn(pbCodeBuf, UINT32_MAX);
    12961353    if (iGprDst < 8)
    12971354        pbCodeBuf[off++] = X86_OP_REX_W;
     
    13091366 * @note May trash IEMNATIVE_REG_FIXED_TMP0.
    13101367 */
    1311 DECLINLINE(uint32_t) iemNativeEmitStoreGprByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offDisp, uint8_t iGprSrc)
     1368DECL_INLINE_THROW(uint32_t)
     1369iemNativeEmitStoreGprByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offDisp, uint8_t iGprSrc)
    13121370{
    13131371#ifdef RT_ARCH_AMD64
    13141372    /* mov qword [rbp + offDisp], gprdst */
    13151373    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1316     AssertReturn(pbCodeBuf, UINT32_MAX);
    13171374    if (iGprSrc < 8)
    13181375        pbCodeBuf[off++] = X86_OP_REX_W;
     
    13271384        /* str w/ unsigned imm12 (scaled) */
    13281385        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1329         AssertReturn(pu32CodeBuf, UINT32_MAX);
    13301386        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRUOff(kArmv8A64InstrLdStType_St_Dword, iGprSrc,
    13311387                                                      ARMV8_A64_REG_BP, (uint32_t)offDisp / 8);
     
    13351391        /* stur w/ signed imm9 (unscaled) */
    13361392        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1337         AssertReturn(pu32CodeBuf, UINT32_MAX);
    13381393        pu32CodeBuf[off++] = Armv8A64MkInstrSturLdur(kArmv8A64InstrLdStType_St_Dword, iGprSrc, ARMV8_A64_REG_BP, offDisp);
    13391394    }
     
    13431398        off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_REG_FIXED_TMP0, (uint32_t)offDisp);
    13441399        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1345         AssertReturn(pu32CodeBuf, UINT32_MAX);
    13461400        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRegIdx(kArmv8A64InstrLdStType_St_Dword, iGprSrc, ARMV8_A64_REG_BP,
    13471401                                                       IEMNATIVE_REG_FIXED_TMP0, kArmv8A64InstrLdStExtend_Sxtw);
     
    13611415 * @note May trash IEMNATIVE_REG_FIXED_TMP0.
    13621416 */
    1363 DECLINLINE(uint32_t) iemNativeEmitStoreImm64ByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offDisp, uint64_t uImm64)
     1417DECL_INLINE_THROW(uint32_t)
     1418iemNativeEmitStoreImm64ByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offDisp, uint64_t uImm64)
    13641419{
    13651420#ifdef RT_ARCH_AMD64
     
    13671422    {
    13681423        /* mov qword [rbp + offDisp], imm32 - sign extended */
    1369         uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 11);
    1370         AssertReturn(pbCodeBuf, UINT32_MAX);
    1371 
     1424        uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 11);
    13721425        pbCodeBuf[off++] = X86_OP_REX_W;
    13731426        pbCodeBuf[off++] = 0xc7;
     
    14041457 * Common bit of iemNativeEmitLoadGprByGpr and friends.
    14051458 */
    1406 DECL_FORCE_INLINE(uint32_t) iemNativeEmitGprByGprDisp(uint8_t *pbCodeBuf, uint32_t off,
    1407                                                       uint8_t iGprReg, uint8_t iGprBase, int32_t offDisp)
     1459DECL_FORCE_INLINE(uint32_t)
     1460iemNativeEmitGprByGprDisp(uint8_t *pbCodeBuf, uint32_t off, uint8_t iGprReg, uint8_t iGprBase, int32_t offDisp)
    14081461{
    14091462    if (offDisp == 0 && (iGprBase & 7) != X86_GREG_xBP) /* Can use encoding w/o displacement field. */
     
    14361489 * Common bit of iemNativeEmitLoadGprFromVCpuU64 and friends.
    14371490 */
    1438 DECL_FORCE_INLINE(uint32_t) iemNativeEmitGprByGprLdSt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprReg,
    1439                                                       uint8_t iGprBase, int32_t offDisp,
    1440                                                       ARMV8A64INSTRLDSTTYPE enmOperation, unsigned cbData)
     1491DECL_FORCE_INLINE_THROW(uint32_t)
     1492iemNativeEmitGprByGprLdSt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprReg,
     1493                          uint8_t iGprBase, int32_t offDisp, ARMV8A64INSTRLDSTTYPE enmOperation, unsigned cbData)
    14411494{
    14421495    /*
     
    14491502        /* Use the unsigned variant of ldr Wt, [<Xn|SP>, #off]. */
    14501503        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1451         AssertReturn(pu32CodeBuf, UINT32_MAX);
    14521504        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRUOff(enmOperation, iGprReg, iGprBase, (uint32_t)offDisp / cbData);
    14531505    }
     
    14581510        /** @todo reduce by offVCpu by >> 3 or >> 2? if it saves instructions? */
    14591511        uint8_t const idxTmpReg = iemNativeRegAllocTmpImm(pReNative, off, (uint64)offDisp);
    1460         AssertReturn(idxTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    14611512
    14621513        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1463         AssertReturn(pu32CodeBuf, UINT32_MAX);
    14641514        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRegIdx(enmOperation, iGprReg, iGprBase, idxTmpReg);
    14651515
     
    14751525 * Emits a 64-bit GPR load via a GPR base address with a displacement.
    14761526 */
    1477 DECLINLINE(uint32_t) iemNativeEmitLoadGprByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    1478                                               uint8_t iGprDst, uint8_t iGprBase, int32_t offDisp)
     1527DECL_INLINE_THROW(uint32_t)
     1528iemNativeEmitLoadGprByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprBase, int32_t offDisp)
    14791529{
    14801530#ifdef RT_ARCH_AMD64
    14811531    /* mov reg64, mem64 */
    14821532    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8);
    1483     AssertReturn(pbCodeBuf, UINT32_MAX);
    14841533    pbCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprBase < 8 ? 0 : X86_OP_REX_B);
    14851534    pbCodeBuf[off++] = 0x8b;
     
    15011550 * @note Bits 63 thru 32 in @a iGprDst will be cleared.
    15021551 */
    1503 DECLINLINE(uint32_t) iemNativeEmitLoadGpr32ByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    1504                                                 uint8_t iGprDst, uint8_t iGprBase, int32_t offDisp)
     1552DECL_INLINE_THROW(uint32_t)
     1553iemNativeEmitLoadGpr32ByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprBase, int32_t offDisp)
    15051554{
    15061555#ifdef RT_ARCH_AMD64
    15071556    /* mov reg32, mem32 */
    15081557    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8);
    1509     AssertReturn(pbCodeBuf, UINT32_MAX);
    15101558    if (iGprDst >= 8 || iGprBase >= 8)
    15111559        pbCodeBuf[off++] = (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprBase < 8 ? 0 : X86_OP_REX_B);
     
    15331581 * Emits a 64-bit GPR subtract with a signed immediate subtrahend.
    15341582 */
    1535 DECLINLINE(uint32_t) iemNativeEmitSubGprImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t iSubtrahend)
     1583DECL_INLINE_THROW(uint32_t)
     1584iemNativeEmitSubGprImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t iSubtrahend)
    15361585{
    15371586    /* sub gprdst, imm8/imm32 */
    15381587    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1539     AssertReturn(pbCodeBuf, UINT32_MAX);
    15401588    if (iGprDst < 8)
    15411589        pbCodeBuf[off++] = X86_OP_REX_W;
     
    15671615 * @note The AMD64 version sets flags.
    15681616 */
    1569 DECLINLINE(uint32_t ) iemNativeEmitAddTwoGprs(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprAddend)
     1617DECL_INLINE_THROW(uint32_t)
     1618iemNativeEmitAddTwoGprs(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprAddend)
    15701619{
    15711620#if defined(RT_ARCH_AMD64)
    15721621    /* add Gv,Ev */
    15731622    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    1574     AssertReturn(pbCodeBuf, UINT32_MAX);
    15751623    pbCodeBuf[off++] = (iGprDst < 8 ? X86_OP_REX_W : X86_OP_REX_W | X86_OP_REX_R)
    15761624                     | (iGprAddend < 8 ? 0 : X86_OP_REX_B);
     
    15801628#elif defined(RT_ARCH_ARM64)
    15811629    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1582     AssertReturn(pu32CodeBuf, UINT32_MAX);
    15831630    pu32CodeBuf[off++] = Armv8A64MkInstrAddSubReg(false /*fSub*/, iGprDst, iGprDst, iGprAddend);
    15841631
     
    15941641 * Emits a 64-bit GPR additions with a 8-bit signed immediate.
    15951642 */
    1596 DECLINLINE(uint32_t ) iemNativeEmitAddGprImm8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int8_t iImm8)
     1643DECL_INLINE_THROW(uint32_t)
     1644iemNativeEmitAddGprImm8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int8_t iImm8)
    15971645{
    15981646#if defined(RT_ARCH_AMD64)
     1647    /* add or inc */
    15991648    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    1600     AssertReturn(pbCodeBuf, UINT32_MAX);
    1601     /* add or inc */
    16021649    pbCodeBuf[off++] = iGprDst < 8 ? X86_OP_REX_W : X86_OP_REX_W | X86_OP_REX_B;
    16031650    if (iImm8 != 1)
     
    16151662#elif defined(RT_ARCH_ARM64)
    16161663    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1617     AssertReturn(pu32CodeBuf, UINT32_MAX);
    16181664    if (iImm8 >= 0)
    16191665        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(false /*fSub*/, iGprDst, iGprDst, (uint8_t)iImm8);
     
    16331679 * @note Bits 32 thru 63 in the GPR will be zero after the operation.
    16341680 */
    1635 DECLINLINE(uint32_t ) iemNativeEmitAddGpr32Imm8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int8_t iImm8)
     1681DECL_INLINE_THROW(uint32_t)
     1682iemNativeEmitAddGpr32Imm8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int8_t iImm8)
    16361683{
    16371684#if defined(RT_ARCH_AMD64)
     1685    /* add or inc */
    16381686    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    1639     AssertReturn(pbCodeBuf, UINT32_MAX);
    1640     /* add or inc */
    16411687    if (iGprDst >= 8)
    16421688        pbCodeBuf[off++] = X86_OP_REX_B;
     
    16551701#elif defined(RT_ARCH_ARM64)
    16561702    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1657     AssertReturn(pu32CodeBuf, UINT32_MAX);
    16581703    if (iImm8 >= 0)
    16591704        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(false /*fSub*/, iGprDst, iGprDst, (uint8_t)iImm8, false /*f64Bit*/);
     
    16721717 * Emits a 64-bit GPR additions with a 64-bit signed addend.
    16731718 */
    1674 DECLINLINE(uint32_t ) iemNativeEmitAddGprImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int64_t iAddend)
     1719DECL_INLINE_THROW(uint32_t)
     1720iemNativeEmitAddGprImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int64_t iAddend)
    16751721{
    16761722#if defined(RT_ARCH_AMD64)
     
    16821728        /* add grp, imm32 */
    16831729        uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1684         AssertReturn(pbCodeBuf, UINT32_MAX);
    16851730        pbCodeBuf[off++] = iGprDst < 8 ? X86_OP_REX_W : X86_OP_REX_W | X86_OP_REX_B;
    16861731        pbCodeBuf[off++] = 0x81;
     
    16951740        /* Best to use a temporary register to deal with this in the simplest way: */
    16961741        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, (uint64_t)iAddend);
    1697         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    16981742
    16991743        /* add dst, tmpreg  */
    17001744        uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    1701         AssertReturn(pbCodeBuf, UINT32_MAX);
    17021745        pbCodeBuf[off++] = (iGprDst < 8 ? X86_OP_REX_W : X86_OP_REX_W | X86_OP_REX_R)
    17031746                         | (iTmpReg < 8 ? 0 : X86_OP_REX_B);
     
    17121755    {
    17131756        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1714         AssertReturn(pu32CodeBuf, UINT32_MAX);
    17151757        if (iAddend >= 0)
    17161758            pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(false /*fSub*/, iGprDst, iGprDst, (uint32_t)iAddend);
     
    17221764        /* Use temporary register for the immediate. */
    17231765        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, (uint64_t)iAddend);
    1724         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    17251766
    17261767        /* add gprdst, gprdst, tmpreg */
    17271768        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1728         AssertReturn(pu32CodeBuf, UINT32_MAX);
    17291769        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubReg(false /*fSub*/, iGprDst, iGprDst, iTmpReg);
    17301770
     
    17441784 * @note Bits 32 thru 63 in the GPR will be zero after the operation.
    17451785 */
    1746 DECLINLINE(uint32_t ) iemNativeEmitAddGpr32Imm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t iAddend)
     1786DECL_INLINE_THROW(uint32_t)
     1787iemNativeEmitAddGpr32Imm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t iAddend)
    17471788{
    17481789#if defined(RT_ARCH_AMD64)
     
    17521793    /* add grp, imm32 */
    17531794    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1754     AssertReturn(pbCodeBuf, UINT32_MAX);
    17551795    if (iGprDst >= 8)
    17561796        pbCodeBuf[off++] = X86_OP_REX_B;
     
    17661806    {
    17671807        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1768         AssertReturn(pu32CodeBuf, UINT32_MAX);
    17691808        if (iAddend >= 0)
    17701809            pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(false /*fSub*/, iGprDst, iGprDst, (uint32_t)iAddend, false /*f64Bit*/);
     
    17761815        /* Use temporary register for the immediate. */
    17771816        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, (uint32_t)iAddend);
    1778         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    17791817
    17801818        /* add gprdst, gprdst, tmpreg */
    17811819        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1782         AssertReturn(pu32CodeBuf, UINT32_MAX);
    17831820        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubReg(false /*fSub*/, iGprDst, iGprDst, iTmpReg, false /*f64Bit*/);
    17841821
     
    18021839 * Emits code for clearing bits 16 thru 63 in the GPR.
    18031840 */
    1804 DECLINLINE(uint32_t ) iemNativeEmitClear16UpGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst)
     1841DECL_INLINE_THROW(uint32_t)
     1842iemNativeEmitClear16UpGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst)
    18051843{
    18061844#if defined(RT_ARCH_AMD64)
    18071845    /* movzx reg32, reg16 */
    18081846    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    1809     AssertReturn(pbCodeBuf, UINT32_MAX);
    18101847    if (iGprDst >= 8)
    18111848        pbCodeBuf[off++] = X86_OP_REX_B | X86_OP_REX_R;
     
    18161853#elif defined(RT_ARCH_ARM64)
    18171854    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1818     AssertReturn(pu32CodeBuf, UINT32_MAX);
    18191855# if 1
    18201856    pu32CodeBuf[off++] = Armv8A64MkInstrUxth(iGprDst, iGprDst);
     
    18371873 *       and ARM64 hosts.
    18381874 */
    1839 DECLINLINE(uint32_t ) iemNativeEmitAndGprByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc,
    1840                                               bool fSetFlags = false)
     1875DECL_INLINE_THROW(uint32_t)
     1876iemNativeEmitAndGprByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc, bool fSetFlags = false)
    18411877{
    18421878#if defined(RT_ARCH_AMD64)
    18431879    /* and Gv, Ev */
    18441880    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    1845     AssertReturn(pbCodeBuf, UINT32_MAX);
    18461881    pbCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprSrc < 8 ? 0 : X86_OP_REX_B);
    18471882    pbCodeBuf[off++] = 0x23;
     
    18511886#elif defined(RT_ARCH_ARM64)
    18521887    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1853     AssertReturn(pu32CodeBuf, UINT32_MAX);
    18541888    if (!fSetFlags)
    18551889        pu32CodeBuf[off++] = Armv8A64MkInstrAnd(iGprDst, iGprDst, iGprSrc);
     
    18681902 * Emits code for AND'ing two 32-bit GPRs.
    18691903 */
    1870 DECLINLINE(uint32_t ) iemNativeEmitAndGpr32ByGpr32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
     1904DECL_INLINE_THROW(uint32_t)
     1905iemNativeEmitAndGpr32ByGpr32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
    18711906{
    18721907#if defined(RT_ARCH_AMD64)
    18731908    /* and Gv, Ev */
    18741909    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    1875     AssertReturn(pbCodeBuf, UINT32_MAX);
    18761910    if (iGprDst >= 8 || iGprSrc >= 8)
    18771911        pbCodeBuf[off++] = (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprSrc < 8 ? 0 : X86_OP_REX_B);
     
    18811915#elif defined(RT_ARCH_ARM64)
    18821916    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1883     AssertReturn(pu32CodeBuf, UINT32_MAX);
    18841917    pu32CodeBuf[off++] = Armv8A64MkInstrAnd(iGprDst, iGprDst, iGprSrc, false /*f64Bit*/);
    18851918
     
    18981931 *       and ARM64 hosts.
    18991932 */
    1900 DECLINLINE(uint32_t ) iemNativeEmitAndGprByImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint64_t uImm,
    1901                                               bool fSetFlags = false)
     1933DECL_INLINE_THROW(uint32_t)
     1934iemNativeEmitAndGprByImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint64_t uImm, bool fSetFlags = false)
    19021935{
    19031936#if defined(RT_ARCH_AMD64)
     
    19061939        /* and Ev, imm8 */
    19071940        uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    1908         AssertReturn(pbCodeBuf, UINT32_MAX);
    19091941        pbCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_R);
    19101942        pbCodeBuf[off++] = 0x83;
     
    19161948        /* and Ev, imm32 */
    19171949        uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1918         AssertReturn(pbCodeBuf, UINT32_MAX);
    19191950        pbCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_R);
    19201951        pbCodeBuf[off++] = 0x81;
     
    19291960        /* Use temporary register for the 64-bit immediate. */
    19301961        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    1931         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    19321962        off = iemNativeEmitAndGprByGpr(pReNative, off, iGprDst, iTmpReg);
    19331963        iemNativeRegFreeTmpImm(pReNative, iTmpReg);
     
    19411971    {
    19421972        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1943         AssertReturn(pu32CodeBuf, UINT32_MAX);
    19441973        if (!fSetFlags)
    19451974            pu32CodeBuf[off++] = Armv8A64MkInstrAndImm(iGprDst, iGprDst, uImmNandS, uImmR);
     
    19511980        /* Use temporary register for the 64-bit immediate. */
    19521981        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    1953         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    19541982        off = iemNativeEmitAndGprByGpr(pReNative, off, iGprDst, iTmpReg, fSetFlags);
    19551983        iemNativeRegFreeTmpImm(pReNative, iTmpReg);
     
    19671995 * Emits code for AND'ing an 32-bit GPRs with a constant.
    19681996 */
    1969 DECLINLINE(uint32_t ) iemNativeEmitAndGpr32ByImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint32_t uImm,
    1970                                                 bool fSetFlags = false)
     1997DECL_INLINE_THROW(uint32_t)
     1998iemNativeEmitAndGpr32ByImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint32_t uImm, bool fSetFlags = false)
    19711999{
    19722000#if defined(RT_ARCH_AMD64)
    19732001    /* and Ev, imm */
    19742002    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1975     AssertReturn(pbCodeBuf, UINT32_MAX);
    19762003    if (iGprDst >= 8)
    19772004        pbCodeBuf[off++] = X86_OP_REX_R;
     
    19992026    {
    20002027        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2001         AssertReturn(pu32CodeBuf, UINT32_MAX);
    20022028        if (!fSetFlags)
    20032029            pu32CodeBuf[off++] = Armv8A64MkInstrAndImm(iGprDst, iGprDst, uImmNandS, uImmR, false /*f64Bit*/);
     
    20092035        /* Use temporary register for the 64-bit immediate. */
    20102036        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    2011         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    20122037        if (!fSetFlags)
    20132038            off = iemNativeEmitAndGpr32ByGpr32(pReNative, off, iGprDst, iTmpReg);
     
    20282053 * Emits code for XOR'ing two 64-bit GPRs.
    20292054 */
    2030 DECLINLINE(uint32_t ) iemNativeEmitXorGprByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
     2055DECL_INLINE_THROW(uint32_t)
     2056iemNativeEmitXorGprByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
    20312057{
    20322058#if defined(RT_ARCH_AMD64)
    20332059    /* and Gv, Ev */
    20342060    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    2035     AssertReturn(pbCodeBuf, UINT32_MAX);
    20362061    pbCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprSrc < 8 ? 0 : X86_OP_REX_B);
    20372062    pbCodeBuf[off++] = 0x33;
     
    20402065#elif defined(RT_ARCH_ARM64)
    20412066    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2042     AssertReturn(pu32CodeBuf, UINT32_MAX);
    20432067    pu32CodeBuf[off++] = Armv8A64MkInstrEor(iGprDst, iGprDst, iGprSrc);
    20442068
     
    20542078 * Emits code for XOR'ing two 32-bit GPRs.
    20552079 */
    2056 DECLINLINE(uint32_t ) iemNativeEmitXorGpr32ByGpr32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
     2080DECL_INLINE_THROW(uint32_t)
     2081iemNativeEmitXorGpr32ByGpr32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
    20572082{
    20582083#if defined(RT_ARCH_AMD64)
    20592084    /* and Gv, Ev */
    20602085    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    2061     AssertReturn(pbCodeBuf, UINT32_MAX);
    20622086    if (iGprDst >= 8 || iGprSrc >= 8)
    20632087        pbCodeBuf[off++] = (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprSrc < 8 ? 0 : X86_OP_REX_B);
     
    20672091#elif defined(RT_ARCH_ARM64)
    20682092    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2069     AssertReturn(pu32CodeBuf, UINT32_MAX);
    20702093    pu32CodeBuf[off++] = Armv8A64MkInstrEor(iGprDst, iGprDst, iGprSrc, false /*f64Bit*/);
    20712094
     
    20852108 * Emits code for shifting a GPR a fixed number of bits to the left.
    20862109 */
    2087 DECLINLINE(uint32_t ) iemNativeEmitShiftGprLeft(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
     2110DECL_INLINE_THROW(uint32_t)
     2111iemNativeEmitShiftGprLeft(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
    20882112{
    20892113    Assert(cShift > 0 && cShift < 64);
     
    20922116    /* shl dst, cShift */
    20932117    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    2094     AssertReturn(pbCodeBuf, UINT32_MAX);
    20952118    pbCodeBuf[off++] = iGprDst < 8 ? X86_OP_REX_W : X86_OP_REX_W | X86_OP_REX_B;
    20962119    if (cShift != 1)
     
    21082131#elif defined(RT_ARCH_ARM64)
    21092132    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2110     AssertReturn(pu32CodeBuf, UINT32_MAX);
    21112133    pu32CodeBuf[off++] = Armv8A64MkInstrLslImm(iGprDst, iGprDst, cShift);
    21122134
     
    21222144 * Emits code for shifting a 32-bit GPR a fixed number of bits to the left.
    21232145 */
    2124 DECLINLINE(uint32_t ) iemNativeEmitShiftGpr32Left(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
     2146DECL_INLINE_THROW(uint32_t)
     2147iemNativeEmitShiftGpr32Left(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
    21252148{
    21262149    Assert(cShift > 0 && cShift < 32);
     
    21292152    /* shl dst, cShift */
    21302153    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    2131     AssertReturn(pbCodeBuf, UINT32_MAX);
    21322154    if (iGprDst >= 8)
    21332155        pbCodeBuf[off++] = X86_OP_REX_B;
     
    21462168#elif defined(RT_ARCH_ARM64)
    21472169    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2148     AssertReturn(pu32CodeBuf, UINT32_MAX);
    21492170    pu32CodeBuf[off++] = Armv8A64MkInstrLslImm(iGprDst, iGprDst, cShift, false /*64Bit*/);
    21502171
     
    21602181 * Emits code for (unsigned) shifting a GPR a fixed number of bits to the right.
    21612182 */
    2162 DECLINLINE(uint32_t ) iemNativeEmitShiftGprRight(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
     2183DECL_INLINE_THROW(uint32_t)
     2184iemNativeEmitShiftGprRight(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
    21632185{
    21642186    Assert(cShift > 0 && cShift < 64);
     
    21672189    /* shr dst, cShift */
    21682190    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    2169     AssertReturn(pbCodeBuf, UINT32_MAX);
    21702191    pbCodeBuf[off++] = iGprDst < 8 ? X86_OP_REX_W : X86_OP_REX_W | X86_OP_REX_B;
    21712192    if (cShift != 1)
     
    21832204#elif defined(RT_ARCH_ARM64)
    21842205    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2185     AssertReturn(pu32CodeBuf, UINT32_MAX);
    21862206    pu32CodeBuf[off++] = Armv8A64MkInstrLsrImm(iGprDst, iGprDst, cShift);
    21872207
     
    21982218 * right.
    21992219 */
    2200 DECLINLINE(uint32_t ) iemNativeEmitShiftGpr32Right(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
     2220DECL_INLINE_THROW(uint32_t)
     2221iemNativeEmitShiftGpr32Right(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
    22012222{
    22022223    Assert(cShift > 0 && cShift < 32);
     
    22052226    /* shr dst, cShift */
    22062227    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    2207     AssertReturn(pbCodeBuf, UINT32_MAX);
    22082228    if (iGprDst >= 8)
    22092229        pbCodeBuf[off++] = X86_OP_REX_B;
     
    22222242#elif defined(RT_ARCH_ARM64)
    22232243    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2224     AssertReturn(pu32CodeBuf, UINT32_MAX);
    22252244    pu32CodeBuf[off++] = Armv8A64MkInstrLsrImm(iGprDst, iGprDst, cShift, false /*64Bit*/);
    22262245
     
    22432262 * Emits an ARM64 compare instruction.
    22442263 */
    2245 DECLINLINE(uint32_t) iemNativeEmitCmpArm64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint8_t iGprRight,
    2246                                            bool f64Bit = true, uint32_t cShift = 0,
    2247                                           ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsr)
     2264DECL_INLINE_THROW(uint32_t)
     2265iemNativeEmitCmpArm64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint8_t iGprRight,
     2266                      bool f64Bit = true, uint32_t cShift = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsr)
    22482267{
    22492268    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2250     AssertReturn(pu32CodeBuf, UINT32_MAX);
    22512269    pu32CodeBuf[off++] = Armv8A64MkInstrAddSubReg(true /*fSub*/, ARMV8_A64_REG_XZR /*iRegResult*/, iGprLeft, iGprRight,
    22522270                                                  f64Bit, true /*fSetFlags*/, cShift, enmShift);
     
    22612279 * with conditional instruction.
    22622280 */
    2263 DECLINLINE(uint32_t) iemNativeEmitCmpGprWithGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint8_t iGprRight)
     2281DECL_INLINE_THROW(uint32_t)
     2282iemNativeEmitCmpGprWithGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint8_t iGprRight)
    22642283{
    22652284#ifdef RT_ARCH_AMD64
    22662285    /* cmp Gv, Ev */
    22672286    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    2268     AssertReturn(pbCodeBuf, UINT32_MAX);
    22692287    pbCodeBuf[off++] = X86_OP_REX_W | (iGprLeft >= 8 ? X86_OP_REX_R : 0) | (iGprRight >= 8 ? X86_OP_REX_B : 0);
    22702288    pbCodeBuf[off++] = 0x3b;
     
    22862304 * with conditional instruction.
    22872305 */
    2288 DECLINLINE(uint32_t) iemNativeEmitCmpGpr32WithGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2289                                                   uint8_t iGprLeft, uint8_t iGprRight)
     2306DECL_INLINE_THROW(uint32_t)
     2307iemNativeEmitCmpGpr32WithGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint8_t iGprRight)
    22902308{
    22912309#ifdef RT_ARCH_AMD64
    22922310    /* cmp Gv, Ev */
    22932311    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    2294     AssertReturn(pbCodeBuf, UINT32_MAX);
    22952312    if (iGprLeft >= 8 || iGprRight >= 8)
    22962313        pbCodeBuf[off++] = (iGprLeft >= 8 ? X86_OP_REX_R : 0) | (iGprRight >= 8 ? X86_OP_REX_B : 0);
     
    23132330 * flags/whatever for use with conditional instruction.
    23142331 */
    2315 DECLINLINE(uint32_t) iemNativeEmitCmpGprWithImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint64_t uImm)
     2332DECL_INLINE_THROW(uint32_t)
     2333iemNativeEmitCmpGprWithImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint64_t uImm)
    23162334{
    23172335#ifdef RT_ARCH_AMD64
     
    23202338        /* cmp Ev, Ib */
    23212339        uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    2322         AssertReturn(pbCodeBuf, UINT32_MAX);
    23232340        pbCodeBuf[off++] = X86_OP_REX_W | (iGprLeft >= 8 ? X86_OP_REX_B : 0);
    23242341        pbCodeBuf[off++] = 0x83;
     
    23302347        /* cmp Ev, imm */
    23312348        uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    2332         AssertReturn(pbCodeBuf, UINT32_MAX);
    23332349        pbCodeBuf[off++] = X86_OP_REX_W | (iGprLeft >= 8 ? X86_OP_REX_B : 0);
    23342350        pbCodeBuf[off++] = 0x81;
     
    23432359    {
    23442360        /* Use temporary register for the immediate. */
    2345         uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    2346         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    2347 
     2361        uint8_t const iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    23482362        off = iemNativeEmitCmpGprWithGpr(pReNative, off, iGprLeft, iTmpReg);
    2349 
    23502363        iemNativeRegFreeTmpImm(pReNative, iTmpReg);
    23512364    }
     
    23562369    {
    23572370        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2358         AssertReturn(pu32CodeBuf, UINT32_MAX);
    23592371        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(true /*fSub*/, ARMV8_A64_REG_XZR, iGprLeft, (uint32_t)uImm,
    23602372                                                         true /*64Bit*/, true /*fSetFlags*/);
     
    23632375    {
    23642376        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2365         AssertReturn(pu32CodeBuf, UINT32_MAX);
    23662377        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(true /*fSub*/, ARMV8_A64_REG_XZR, iGprLeft, (uint32_t)uImm,
    23672378                                                         true /*64Bit*/, true /*fSetFlags*/, true /*fShift12*/);
     
    23712382        /* Use temporary register for the immediate. */
    23722383        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    2373         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    2374 
    23752384        off = iemNativeEmitCmpGprWithGpr(pReNative, off, iGprLeft, iTmpReg);
    2376 
    23772385        iemNativeRegFreeTmpImm(pReNative, iTmpReg);
    23782386    }
     
    23912399 * flags/whatever for use with conditional instruction.
    23922400 */
    2393 DECLINLINE(uint32_t) iemNativeEmitCmpGpr32WithImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint32_t uImm)
     2401DECL_INLINE_THROW(uint32_t)
     2402iemNativeEmitCmpGpr32WithImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint32_t uImm)
    23942403{
    23952404#ifdef RT_ARCH_AMD64
    23962405    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    2397     AssertReturn(pbCodeBuf, UINT32_MAX);
    23982406    if (iGprLeft >= 8)
    23992407        pbCodeBuf[off++] = X86_OP_REX_B;
     
    24222430    {
    24232431        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2424         AssertReturn(pu32CodeBuf, UINT32_MAX);
    24252432        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(true /*fSub*/, ARMV8_A64_REG_XZR, iGprLeft, (uint32_t)uImm,
    24262433                                                         false /*64Bit*/, true /*fSetFlags*/);
     
    24292436    {
    24302437        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2431         AssertReturn(pu32CodeBuf, UINT32_MAX);
    24322438        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(true /*fSub*/, ARMV8_A64_REG_XZR, iGprLeft, (uint32_t)uImm,
    24332439                                                         false /*64Bit*/, true /*fSetFlags*/, true /*fShift12*/);
     
    24372443        /* Use temporary register for the immediate. */
    24382444        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    2439         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    2440 
    24412445        off = iemNativeEmitCmpGpr32WithGpr(pReNative, off, iGprLeft, iTmpReg);
    2442 
    24432446        iemNativeRegFreeTmpImm(pReNative, iTmpReg);
    24442447    }
     
    24612464 * Emits a JMP rel32 / B imm19 to the given label.
    24622465 */
    2463 DECLINLINE(uint32_t) iemNativeEmitJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
     2466DECL_INLINE_THROW(uint32_t)
     2467iemNativeEmitJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
    24642468{
    24652469    Assert(idxLabel < pReNative->cLabels);
     
    24672471#ifdef RT_ARCH_AMD64
    24682472    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6);
    2469     AssertReturn(pbCodeBuf, UINT32_MAX);
    24702473    if (pReNative->paLabels[idxLabel].off != UINT32_MAX)
    24712474    {
     
    24892492    {
    24902493        pbCodeBuf[off++] = 0xe9;                    /* jmp rel32 */
    2491         AssertReturn(iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_Rel32, -4), UINT32_MAX);
     2494        iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_Rel32, -4);
    24922495        pbCodeBuf[off++] = 0xfe;
    24932496        pbCodeBuf[off++] = 0xff;
     
    24992502#elif defined(RT_ARCH_ARM64)
    25002503    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2501     AssertReturn(pu32CodeBuf, UINT32_MAX);
    25022504    if (pReNative->paLabels[idxLabel].off != UINT32_MAX)
    25032505        pu32CodeBuf[off++] = Armv8A64MkInstrB(pReNative->paLabels[idxReturnLabel].off - off);
    25042506    else
    25052507    {
    2506         AssertReturn(iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5), UINT32_MAX);
     2508        iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5);
    25072509        pu32CodeBuf[off++] = Armv8A64MkInstrB(-1);
    25082510    }
     
    25192521 * Emits a JMP rel32 / B imm19 to a new undefined label.
    25202522 */
    2521 DECLINLINE(uint32_t) iemNativeEmitJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2522                                                 IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     2523DECL_INLINE_THROW(uint32_t)
     2524iemNativeEmitJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    25232525{
    25242526    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmLabelType, UINT32_MAX /*offWhere*/, uData);
    2525     AssertReturn(idxLabel != UINT32_MAX, UINT32_MAX);
    25262527    return iemNativeEmitJmpToLabel(pReNative, off, idxLabel);
    25272528}
     
    25582559 * Emits a Jcc rel32 / B.cc imm19 to the given label (ASSUMED requiring fixup).
    25592560 */
    2560 DECLINLINE(uint32_t) iemNativeEmitJccToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2561                                             uint32_t idxLabel, IEMNATIVEINSTRCOND enmCond)
     2561DECL_INLINE_THROW(uint32_t)
     2562iemNativeEmitJccToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel, IEMNATIVEINSTRCOND enmCond)
    25622563{
    25632564    Assert(idxLabel < pReNative->cLabels);
     
    25662567    /* jcc rel32 */
    25672568    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6);
    2568     AssertReturn(pbCodeBuf, UINT32_MAX);
    25692569    pbCodeBuf[off++] = 0x0f;
    25702570    pbCodeBuf[off++] = (uint8_t)enmCond | 0x80;
    2571     AssertReturn(iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_Rel32, -4), UINT32_MAX);
     2571    iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_Rel32, -4);
    25722572    pbCodeBuf[off++] = 0x00;
    25732573    pbCodeBuf[off++] = 0x00;
     
    25772577#elif defined(RT_ARCH_ARM64)
    25782578    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2579     AssertReturn(pu32CodeBuf, UINT32_MAX);
    2580     AssertReturn(iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5), UINT32_MAX);
     2579    iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5);
    25812580    pu32CodeBuf[off++] = Armv8A64MkInstrBCond(enmCond, -1);
    25822581
     
    25922591 * Emits a Jcc rel32 / B.cc imm19 to a new label.
    25932592 */
    2594 DECLINLINE(uint32_t) iemNativeEmitJccToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2595                                                 IEMNATIVELABELTYPE enmLabelType, uint16_t uData, IEMNATIVEINSTRCOND enmCond)
     2593DECL_INLINE_THROW(uint32_t)
     2594iemNativeEmitJccToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2595                           IEMNATIVELABELTYPE enmLabelType, uint16_t uData, IEMNATIVEINSTRCOND enmCond)
    25962596{
    25972597    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmLabelType, UINT32_MAX /*offWhere*/, uData);
    2598     AssertReturn(idxLabel != UINT32_MAX, UINT32_MAX);
    25992598    return iemNativeEmitJccToLabel(pReNative, off, idxLabel, enmCond);
    26002599}
     
    26042603 * Emits a JZ/JE rel32 / B.EQ imm19 to the given label.
    26052604 */
    2606 DECLINLINE(uint32_t) iemNativeEmitJzToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
     2605DECL_INLINE_THROW(uint32_t) iemNativeEmitJzToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
    26072606{
    26082607#ifdef RT_ARCH_AMD64
     
    26182617 * Emits a JZ/JE rel32 / B.EQ imm19 to a new label.
    26192618 */
    2620 DECLINLINE(uint32_t) iemNativeEmitJzToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2621                                                IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     2619DECL_INLINE_THROW(uint32_t) iemNativeEmitJzToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2620                                                      IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    26222621{
    26232622#ifdef RT_ARCH_AMD64
     
    26342633 * Emits a JNZ/JNE rel32 / B.NE imm19 to the given label.
    26352634 */
    2636 DECLINLINE(uint32_t) iemNativeEmitJnzToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
     2635DECL_INLINE_THROW(uint32_t) iemNativeEmitJnzToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
    26372636{
    26382637#ifdef RT_ARCH_AMD64
     
    26482647 * Emits a JNZ/JNE rel32 / B.NE imm19 to a new label.
    26492648 */
    2650 DECLINLINE(uint32_t) iemNativeEmitJnzToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2651                                                 IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     2649DECL_INLINE_THROW(uint32_t) iemNativeEmitJnzToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2650                                                       IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    26522651{
    26532652#ifdef RT_ARCH_AMD64
     
    26642663 * Emits a JBE/JNA rel32 / B.LS imm19 to the given label.
    26652664 */
    2666 DECLINLINE(uint32_t) iemNativeEmitJbeToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
     2665DECL_INLINE_THROW(uint32_t) iemNativeEmitJbeToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
    26672666{
    26682667#ifdef RT_ARCH_AMD64
     
    26782677 * Emits a JBE/JNA rel32 / B.LS imm19 to a new label.
    26792678 */
    2680 DECLINLINE(uint32_t) iemNativeEmitJbeToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2681                                                 IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     2679DECL_INLINE_THROW(uint32_t) iemNativeEmitJbeToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2680                                                       IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    26822681{
    26832682#ifdef RT_ARCH_AMD64
     
    26942693 * Emits a JA/JNBE rel32 / B.HI imm19 to the given label.
    26952694 */
    2696 DECLINLINE(uint32_t) iemNativeEmitJaToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
     2695DECL_INLINE_THROW(uint32_t) iemNativeEmitJaToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
    26972696{
    26982697#ifdef RT_ARCH_AMD64
     
    27082707 * Emits a JA/JNBE rel32 / B.HI imm19 to a new label.
    27092708 */
    2710 DECLINLINE(uint32_t) iemNativeEmitJaToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2711                                                IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     2709DECL_INLINE_THROW(uint32_t) iemNativeEmitJaToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2710                                                      IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    27122711{
    27132712#ifdef RT_ARCH_AMD64
     
    27252724 * How @a offJmp is applied is are target specific.
    27262725 */
    2727 DECLINLINE(uint32_t) iemNativeEmitJccToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2728                                             int32_t offTarget, IEMNATIVEINSTRCOND enmCond)
     2726DECL_INLINE_THROW(uint32_t)
     2727iemNativeEmitJccToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget, IEMNATIVEINSTRCOND enmCond)
    27292728{
    27302729#ifdef RT_ARCH_AMD64
    27312730    /* jcc rel32 */
    27322731    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6);
    2733     AssertReturn(pbCodeBuf, UINT32_MAX);
    27342732    if (offTarget < 128 && offTarget >= -128)
    27352733    {
     
    27492747#elif defined(RT_ARCH_ARM64)
    27502748    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2751     AssertReturn(pu32CodeBuf, UINT32_MAX);
    27522749    pu32CodeBuf[off++] = Armv8A64MkInstrBCond(enmCond, offTarget);
    27532750
     
    27642761 * How @a offJmp is applied is are target specific.
    27652762 */
    2766 DECLINLINE(uint32_t) iemNativeEmitJzToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
     2763DECL_INLINE_THROW(uint32_t) iemNativeEmitJzToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
    27672764{
    27682765#ifdef RT_ARCH_AMD64
     
    27802777 * How @a offJmp is applied is are target specific.
    27812778 */
    2782 DECLINLINE(uint32_t) iemNativeEmitJnzToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
     2779DECL_INLINE_THROW(uint32_t) iemNativeEmitJnzToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
    27832780{
    27842781#ifdef RT_ARCH_AMD64
     
    27962793 * How @a offJmp is applied is are target specific.
    27972794 */
    2798 DECLINLINE(uint32_t) iemNativeEmitJbeToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
     2795DECL_INLINE_THROW(uint32_t) iemNativeEmitJbeToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
    27992796{
    28002797#ifdef RT_ARCH_AMD64
     
    28122809 * How @a offJmp is applied is are target specific.
    28132810 */
    2814 DECLINLINE(uint32_t) iemNativeEmitJaToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
     2811DECL_INLINE_THROW(uint32_t) iemNativeEmitJaToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
    28152812{
    28162813#ifdef RT_ARCH_AMD64
     
    28612858 * Internal helper, don't call directly.
    28622859 */
    2863 DECLINLINE(uint32_t) iemNativeEmitTestBitInGprAndJmpToLabelIfCc(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2864                                                                 uint8_t iGprSrc, uint8_t iBitNo, uint32_t idxLabel,
    2865                                                                bool fJmpIfSet)
     2860DECL_INLINE_THROW(uint32_t)
     2861iemNativeEmitTestBitInGprAndJmpToLabelIfCc(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc,
     2862                                           uint8_t iBitNo, uint32_t idxLabel, bool fJmpIfSet)
    28662863{
    28672864    Assert(iBitNo < 64);
    28682865#ifdef RT_ARCH_AMD64
    28692866    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 5);
    2870     AssertReturn(pbCodeBuf, UINT32_MAX);
    28712867    if (iBitNo < 8)
    28722868    {
     
    28962892    /* Use the TBNZ instruction here. */
    28972893    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2898     AssertReturn(pu32CodeBuf, UINT32_MAX);
    2899     AssertReturn(iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm14At5), UINT32_MAX);
     2894    iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm14At5);
    29002895    pu32CodeBuf[off++] = Armv8A64MkInstrTbzTbnz(fJmpIfSet, 0, iGprSrc, iBitNo);
    29012896
     
    29142909 * @note On ARM64 the range is only +/-8191 instructions.
    29152910 */
    2916 DECLINLINE(uint32_t) iemNativeEmitTestBitInGprAndJmpToLabelIfSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2917                                                                  uint8_t iGprSrc, uint8_t iBitNo, uint32_t idxLabel)
     2911DECL_INLINE_THROW(uint32_t) iemNativeEmitTestBitInGprAndJmpToLabelIfSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2912                                                                        uint8_t iGprSrc, uint8_t iBitNo, uint32_t idxLabel)
    29182913{
    29192914    return iemNativeEmitTestBitInGprAndJmpToLabelIfCc(pReNative, off, iGprSrc, iBitNo, idxLabel, true /*fJmpIfSet*/);
     
    29272922 * @note On ARM64 the range is only +/-8191 instructions.
    29282923 */
    2929 DECLINLINE(uint32_t) iemNativeEmitTestBitInGprAndJmpToLabelIfNotSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2930                                                                     uint8_t iGprSrc, uint8_t iBitNo, uint32_t idxLabel)
     2924DECL_INLINE_THROW(uint32_t) iemNativeEmitTestBitInGprAndJmpToLabelIfNotSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2925                                                                           uint8_t iGprSrc, uint8_t iBitNo, uint32_t idxLabel)
    29312926{
    29322927    return iemNativeEmitTestBitInGprAndJmpToLabelIfCc(pReNative, off, iGprSrc, iBitNo, idxLabel, false /*fJmpIfSet*/);
     
    29382933 * flags accordingly.
    29392934 */
    2940 DECLINLINE(uint32_t) iemNativeEmitTestAnyBitsInGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint64_t fBits)
     2935DECL_INLINE_THROW(uint32_t)
     2936iemNativeEmitTestAnyBitsInGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint64_t fBits)
    29412937{
    29422938    Assert(fBits != 0);
     
    29462942    {
    29472943        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, fBits);
    2948         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    29492944
    29502945        /* test Ev,Gv */
    29512946        uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 5);
    2952         AssertReturn(pbCodeBuf, UINT32_MAX);
    29532947        pbCodeBuf[off++] = X86_OP_REX_W | (iGprSrc < 8 ? 0 : X86_OP_REX_R) | (iTmpReg < 8 ? 0 : X86_OP_REX_B);
    29542948        pbCodeBuf[off++] = 0x85;
     
    29612955        /* test Eb, imm8 or test Ev, imm32 */
    29622956        uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    2963         AssertReturn(pbCodeBuf, UINT32_MAX);
    29642957        if (fBits <= UINT8_MAX)
    29652958        {
     
    29842977    /** @todo implement me. */
    29852978    else
    2986         AssertFailedReturn(UINT32_MAX);
     2979        AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_EMIT_CASE_NOT_IMPLEMENTED_1));
    29872980
    29882981#elif defined(RT_ARCH_ARM64)
     
    29942987    else
    29952988    {
    2996         uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, fBits);
    2997         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    2998 
    29992989        /* ands Zr, iGprSrc, iTmpReg */
     2990        uint8_t const iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, fBits);
    30002991        uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    3001         AssertReturn(pu32CodeBuf, UINT32_MAX);
    30022992        pu32CodeBuf[off++] = Armv8A64MkInstrAnds(ARMV8_A64_REG_XZR, iGprSrc, iTmpReg);
    3003 
    30042993        iemNativeRegFreeTmpImm(pReNative, iTmpReg);
    30052994    }
     
    30173006 * are set in @a iGprSrc.
    30183007 */
    3019 DECLINLINE(uint32_t) iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfAnySet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3020                                                                         uint8_t iGprSrc, uint64_t fBits, uint32_t idxLabel)
     3008DECL_INLINE_THROW(uint32_t)
     3009iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfAnySet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3010                                                   uint8_t iGprSrc, uint64_t fBits, uint32_t idxLabel)
    30213011{
    30223012    Assert(fBits); Assert(!RT_IS_POWER_OF_TWO(fBits));
     
    30333023 * are set in @a iGprSrc.
    30343024 */
    3035 DECLINLINE(uint32_t) iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfNoneSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3036                                                                          uint8_t iGprSrc, uint64_t fBits, uint32_t idxLabel)
     3025DECL_INLINE_THROW(uint32_t)
     3026iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfNoneSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3027                                                    uint8_t iGprSrc, uint64_t fBits, uint32_t idxLabel)
    30373028{
    30383029    Assert(fBits); Assert(!RT_IS_POWER_OF_TWO(fBits));
     
    30503041 * The operand size is given by @a f64Bit.
    30513042 */
    3052 DECLINLINE(uint32_t) iemNativeEmitTestIfGprIsZeroAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3053                                                                uint8_t iGprSrc, bool f64Bit, uint32_t idxLabel)
     3043DECL_INLINE_THROW(uint32_t) iemNativeEmitTestIfGprIsZeroAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3044                                                                      uint8_t iGprSrc, bool f64Bit, uint32_t idxLabel)
    30543045{
    30553046    Assert(idxLabel < pReNative->cLabels);
     
    30583049    /* test reg32,reg32  / test reg64,reg64 */
    30593050    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    3060     AssertReturn(pbCodeBuf, UINT32_MAX);
    30613051    if (f64Bit)
    30623052        pbCodeBuf[off++] = X86_OP_REX_W | (iGprSrc < 8 ? 0 : X86_OP_REX_R | X86_OP_REX_B);
     
    30723062#elif defined(RT_ARCH_ARM64)
    30733063    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    3074     AssertReturn(pu32CodeBuf, UINT32_MAX);
    3075     AssertReturn(iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5), UINT32_MAX);
     3064    iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5);
    30763065    pu32CodeBuf[off++] = Armv8A64MkInstrCbzCbnz(false /*fJmpIfNotZero*/, 0, f64Bit);
    30773066    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     
    30893078 * The operand size is given by @a f64Bit.
    30903079 */
    3091 DECLINLINE(uint32_t) iemNativeEmitTestIfGprIsZeroAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc,
    3092                                                                   bool f64Bit, IEMNATIVELABELTYPE enmLabelType,
    3093                                                                  uint16_t uData = 0)
     3080DECL_INLINE_THROW(uint32_t)
     3081iemNativeEmitTestIfGprIsZeroAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, bool f64Bit,
     3082                                             IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    30943083{
    30953084    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmLabelType, UINT32_MAX /*offWhere*/, uData);
    3096     AssertReturn(idxLabel != UINT32_MAX, UINT32_MAX);
    30973085    return iemNativeEmitTestIfGprIsZeroAndJmpToLabel(pReNative, off, iGprSrc, f64Bit, idxLabel);
    30983086}
     
    31033091 * differs.
    31043092 */
    3105 DECLINLINE(uint32_t) iemNativeEmitTestIfGprNotEqualGprAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3106                                                                      uint8_t iGprLeft, uint8_t iGprRight, uint32_t idxLabel)
     3093DECL_INLINE_THROW(uint32_t)
     3094iemNativeEmitTestIfGprNotEqualGprAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3095                                               uint8_t iGprLeft, uint8_t iGprRight, uint32_t idxLabel)
    31073096{
    31083097    off = iemNativeEmitCmpGprWithGpr(pReNative, off, iGprLeft, iGprRight);
     
    31153104 * Emits code that jumps to a new label if @a iGprLeft and @a iGprRight differs.
    31163105 */
    3117 DECLINLINE(uint32_t) iemNativeEmitTestIfGprNotEqualGprAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3118                                                                        uint8_t iGprLeft, uint8_t iGprRight,
    3119                                                                        IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     3106DECL_INLINE_THROW(uint32_t)
     3107iemNativeEmitTestIfGprNotEqualGprAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3108                                                  uint8_t iGprLeft, uint8_t iGprRight,
     3109                                                  IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    31203110{
    31213111    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmLabelType, UINT32_MAX /*offWhere*/, uData);
    3122     AssertReturn(idxLabel != UINT32_MAX, UINT32_MAX);
    31233112    return iemNativeEmitTestIfGprNotEqualGprAndJmpToLabel(pReNative, off, iGprLeft, iGprRight, idxLabel);
    31243113}
     
    31283117 * Emits code that jumps to the given label if @a iGprSrc differs from @a uImm.
    31293118 */
    3130 DECLINLINE(uint32_t) iemNativeEmitTestIfGprNotEqualImmAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3131                                                                     uint8_t iGprSrc, uint64_t uImm, uint32_t idxLabel)
     3119DECL_INLINE_THROW(uint32_t)
     3120iemNativeEmitTestIfGprNotEqualImmAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3121                                               uint8_t iGprSrc, uint64_t uImm, uint32_t idxLabel)
    31323122{
    31333123    off = iemNativeEmitCmpGprWithImm(pReNative, off, iGprSrc, uImm);
     
    31403130 * Emits code that jumps to a new label if @a iGprSrc differs from @a uImm.
    31413131 */
    3142 DECLINLINE(uint32_t) iemNativeEmitTestIfGprNotEqualImmAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3143                                                                        uint8_t iGprSrc, uint64_t uImm,
    3144                                                                        IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     3132DECL_INLINE_THROW(uint32_t)
     3133iemNativeEmitTestIfGprNotEqualImmAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3134                                                  uint8_t iGprSrc, uint64_t uImm,
     3135                                                  IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    31453136{
    31463137    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmLabelType, UINT32_MAX /*offWhere*/, uData);
    3147     AssertReturn(idxLabel != UINT32_MAX, UINT32_MAX);
    31483138    return iemNativeEmitTestIfGprNotEqualImmAndJmpToLabel(pReNative, off, iGprSrc, uImm, idxLabel);
    31493139}
     
    31543144 * @a uImm.
    31553145 */
    3156 DECLINLINE(uint32_t) iemNativeEmitTestIfGpr32NotEqualImmAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3157                                                                       uint8_t iGprSrc, uint32_t uImm, uint32_t idxLabel)
     3146DECL_INLINE_THROW(uint32_t) iemNativeEmitTestIfGpr32NotEqualImmAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3147                                                                             uint8_t iGprSrc, uint32_t uImm, uint32_t idxLabel)
    31583148{
    31593149    off = iemNativeEmitCmpGpr32WithImm(pReNative, off, iGprSrc, uImm);
     
    31673157 * @a uImm.
    31683158 */
    3169 DECLINLINE(uint32_t) iemNativeEmitTestIfGpr32NotEqualImmAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3170                                                                          uint8_t iGprSrc, uint32_t uImm,
    3171                                                                          IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     3159DECL_INLINE_THROW(uint32_t)
     3160iemNativeEmitTestIfGpr32NotEqualImmAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3161                                                    uint8_t iGprSrc, uint32_t uImm,
     3162                                                    IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    31723163{
    31733164    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmLabelType, UINT32_MAX /*offWhere*/, uData);
    3174     AssertReturn(idxLabel != UINT32_MAX, UINT32_MAX);
    31753165    return iemNativeEmitTestIfGpr32NotEqualImmAndJmpToLabel(pReNative, off, iGprSrc, uImm, idxLabel);
    31763166}
     
    31813171 * Emits a call to a 64-bit address.
    31823172 */
    3183 DECLINLINE(uint32_t) iemNativeEmitCallImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uintptr_t uPfn)
     3173DECL_INLINE_THROW(uint32_t) iemNativeEmitCallImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uintptr_t uPfn)
    31843174{
    31853175#ifdef RT_ARCH_AMD64
     
    31883178    /* call rax */
    31893179    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2);
    3190     AssertReturn(pbCodeBuf, UINT32_MAX);
    31913180    pbCodeBuf[off++] = 0xff;
    31923181    pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 2, X86_GREG_xAX);
     
    31963185
    31973186    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    3198     AssertReturn(pu32CodeBuf, UINT32_MAX);
    31993187    pu32CodeBuf[off++] = Armv8A64MkInstrBlr(IEMNATIVE_REG_FIXED_TMP0);
     3188
    32003189#else
    32013190# error "port me"
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette