VirtualBox

Changeset 101682 in vbox


Ignore:
Timestamp:
Oct 31, 2023 12:18:44 PM (15 months ago)
Author:
vboxsync
Message:

VMM/IEM,VBox/err.h: Refactored the native recompiler code to throw/longjmp on errors rather than returning UINT32_MAX/UINT8_MAX. This should make it easier to pinpoint why recompilation fails (we've got an RC) and get rid of hundreds of AssertReturn statements that clutters up the code and introduces lots of unnecessary branches. bugref:10371

Location:
trunk
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • trunk/include/VBox/err.h

    r101035 r101682  
    25082508/** Recompiler: Translation block allocation failed. */
    25092509#define VERR_IEM_TB_ALLOC_FAILED                    (-5312)
     2510/** Recompiler: Too deeply nested conditionals. */
     2511#define VERR_IEM_COND_TOO_DEEPLY_NESTED             (-5313)
     2512/** Recompiler: Failed to reconcile the register/variable state on endif. */
     2513#define VERR_IEM_COND_ENDIF_RECONCILIATION_FAILED   (-5314)
     2514/** Recompiler: Failed to allocate more memory for debug info. */
     2515#define VERR_IEM_DBGINFO_OUT_OF_MEMORY              (-5315)
     2516/** Recompiler: Debug info internal processing error \#1. */
     2517#define VERR_IEM_DBGINFO_IPE_1                      (-5316)
     2518/** Recompiler: Debug info internal processing error \#2. */
     2519#define VERR_IEM_DBGINFO_IPE_2                      (-5317)
     2520/** Recompiler: Fixup internal processing error \#1. */
     2521#define VERR_IEM_FIXUP_IPE_1                        (-5318)
     2522/** Recompiler: Too many fixups. */
     2523#define VERR_IEM_FIXUP_TOO_MANY                     (-5319)
     2524/** Recompiler: Out of memory. */
     2525#define VERR_IEM_FIXUP_OUT_OF_MEMORY                (-5320)
     2526/** Recompiler: Hit instruction buffer size limit. */
     2527#define VERR_IEM_INSTR_BUF_TOO_LARGE                (-5321)
     2528/** Recompiler: Out of memory for the instruction buffer (regular heap). */
     2529#define VERR_IEM_INSTR_BUF_OUT_OF_MEMORY            (-5322)
     2530/** Recompiler: Too many labels. */
     2531#define VERR_IEM_LABEL_TOO_MANY                     (-5323)
     2532/** Recompiler: Out of memory for labels.   */
     2533#define VERR_IEM_LABEL_OUT_OF_MEMORY                (-5324)
     2534/** Recompiler: Label internal processing error \#1. */
     2535#define VERR_IEM_LABEL_IPE_1                        (-5325)
     2536/** Recompiler: Label internal processing error \#2. */
     2537#define VERR_IEM_LABEL_IPE_2                        (-5326)
     2538/** Recompiler: Label internal processing error \#3. */
     2539#define VERR_IEM_LABEL_IPE_3                        (-5327)
     2540/** Recompiler: Label internal processing error \#4. */
     2541#define VERR_IEM_LABEL_IPE_4                        (-5328)
     2542/** Recompiler: Label internal processing error \#5. */
     2543#define VERR_IEM_LABEL_IPE_5                        (-5329)
     2544/** Recompiler: Label internal processing error \#6. */
     2545#define VERR_IEM_LABEL_IPE_6                        (-5330)
     2546/** Recompiler: Label internal processing error \#7. */
     2547#define VERR_IEM_LABEL_IPE_7                        (-5331)
     2548
     2549/** Recompiler: Out of host register. */
     2550#define VERR_IEM_REG_OUT_OF_REGISTERS               (-5340)
     2551/** Recompiler: No temporary host register available. */
     2552#define VERR_IEM_REG_ALLOCATOR_NO_FREE_TMP          (-5341)
     2553/** Recompiler: Register allocator internal processing error \#1. */
     2554#define VERR_IEM_REG_IPE_1                          (-5342)
     2555/** Recompiler: Register allocator internal processing error \#2. */
     2556#define VERR_IEM_REG_IPE_2                          (-5343)
     2557/** Recompiler: Register allocator internal processing error \#3. */
     2558#define VERR_IEM_REG_IPE_3                          (-5344)
     2559/** Recompiler: Register allocator internal processing error \#4. */
     2560#define VERR_IEM_REG_IPE_4                          (-5345)
     2561/** Recompiler: Register allocator internal processing error \#5. */
     2562#define VERR_IEM_REG_IPE_5                          (-5346)
     2563/** Recompiler: Register allocator internal processing error \#6. */
     2564#define VERR_IEM_REG_IPE_6                          (-5347)
     2565/** Recompiler: Register allocator internal processing error \#7. */
     2566#define VERR_IEM_REG_IPE_7                          (-5348)
     2567/** Recompiler: Register allocator internal processing error \#8. */
     2568#define VERR_IEM_REG_IPE_8                          (-5349)
     2569
     2570/** Recompiler: Unimplemented case. */
     2571#define VERR_IEM_EMIT_CASE_NOT_IMPLEMENTED_1        (-5360)
    25102572
    25112573/** Restart the current instruction. For testing only. */
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp

    r101661 r101682  
    129129#ifdef VBOX_STRICT
    130130static uint32_t iemNativeEmitGuestRegValueCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    131                                                 uint8_t idxReg, IEMNATIVEGSTREG enmGstReg) RT_NOEXCEPT;
     131                                                uint8_t idxReg, IEMNATIVEGSTREG enmGstReg);
    132132#endif
    133133#ifdef IEMNATIVE_WITH_TB_DEBUG_INFO
    134 static bool iemNativeDbgInfoAddNativeOffset(PIEMRECOMPILERSTATE pReNative, uint32_t off) RT_NOEXCEPT;
    135 static bool iemNativeDbgInfoAddLabel(PIEMRECOMPILERSTATE pReNative, IEMNATIVELABELTYPE enmType, uint16_t uData) RT_NOEXCEPT;
     134static void iemNativeDbgInfoAddNativeOffset(PIEMRECOMPILERSTATE pReNative, uint32_t off);
     135static void iemNativeDbgInfoAddLabel(PIEMRECOMPILERSTATE pReNative, IEMNATIVELABELTYPE enmType, uint16_t uData);
    136136#endif
    137137
     
    17131713 * call iemNativeLabelDefine() later to set it.
    17141714 *
    1715  * @returns Label ID.
     1715 * @returns Label ID. Throws VBox status code on failure, so no need to check
     1716 *          the return value.
    17161717 * @param   pReNative   The native recompile state.
    17171718 * @param   enmType     The label type.
     
    17211722 *                      certain type of labels. Default is zero.
    17221723 */
    1723 DECLHIDDEN(uint32_t) iemNativeLabelCreate(PIEMRECOMPILERSTATE pReNative, IEMNATIVELABELTYPE enmType,
    1724                                           uint32_t offWhere /*= UINT32_MAX*/, uint16_t uData /*= 0*/) RT_NOEXCEPT
     1724DECL_HIDDEN_THROW(uint32_t)
     1725iemNativeLabelCreate(PIEMRECOMPILERSTATE pReNative, IEMNATIVELABELTYPE enmType,
     1726                     uint32_t offWhere /*= UINT32_MAX*/, uint16_t uData /*= 0*/)
    17251727{
    17261728    /*
     
    17461748            {
    17471749#ifdef VBOX_STRICT
    1748                 AssertReturn(uData == 0, UINT32_MAX);
    1749                 AssertReturn(offWhere == UINT32_MAX, UINT32_MAX);
    1750 #endif
    1751                 AssertReturn(paLabels[i].off == UINT32_MAX, UINT32_MAX);
     1750                AssertStmt(uData == 0, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_1));
     1751                AssertStmt(offWhere == UINT32_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_1));
     1752#endif
     1753                AssertStmt(paLabels[i].off == UINT32_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_2));
    17521754                return i;
    17531755            }
     
    17621764    {
    17631765        uint32_t cNew = pReNative->cLabelsAlloc;
    1764         AssertReturn(cNew, UINT32_MAX);
    1765         AssertReturn(cLabels == cNew, UINT32_MAX);
     1766        AssertStmt(cNew, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_3));
     1767        AssertStmt(cLabels == cNew, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_3));
    17661768        cNew *= 2;
    1767         AssertReturn(cNew <= _64K, UINT32_MAX); /* IEMNATIVEFIXUP::idxLabel type restrict this */
     1769        AssertStmt(cNew <= _64K, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_TOO_MANY)); /* IEMNATIVEFIXUP::idxLabel type restrict this */
    17681770        paLabels = (PIEMNATIVELABEL)RTMemRealloc(paLabels, cNew * sizeof(paLabels[0]));
    1769         AssertReturn(paLabels, UINT32_MAX);
     1771        AssertStmt(paLabels, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_OUT_OF_MEMORY));
    17701772        pReNative->paLabels     = paLabels;
    17711773        pReNative->cLabelsAlloc = cNew;
     
    18011803 * @param   offWhere    The position.
    18021804 */
    1803 DECLHIDDEN(void) iemNativeLabelDefine(PIEMRECOMPILERSTATE pReNative, uint32_t idxLabel, uint32_t offWhere) RT_NOEXCEPT
    1804 {
    1805     AssertReturnVoid(idxLabel < pReNative->cLabels);
     1805DECL_HIDDEN_THROW(void) iemNativeLabelDefine(PIEMRECOMPILERSTATE pReNative, uint32_t idxLabel, uint32_t offWhere)
     1806{
     1807    AssertStmt(idxLabel < pReNative->cLabels, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_4));
    18061808    PIEMNATIVELABEL const pLabel = &pReNative->paLabels[idxLabel];
    1807     AssertReturnVoid(pLabel->off == UINT32_MAX);
     1809    AssertStmt(pLabel->off == UINT32_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_5));
    18081810    pLabel->off = offWhere;
    18091811#ifdef IEMNATIVE_WITH_TB_DEBUG_INFO
     
    18391841
    18401842
    1841 
    18421843/**
    18431844 * Adds a fixup.
    18441845 *
    1845  * @returns Success indicator.
     1846 * @throws  VBox status code (int) on failure.
    18461847 * @param   pReNative   The native recompile state.
    18471848 * @param   offWhere    The instruction offset of the fixup location.
     
    18501851 * @param   offAddend   Fixup addend if applicable to the type. Default is 0.
    18511852 */
    1852 DECLHIDDEN(bool) iemNativeAddFixup(PIEMRECOMPILERSTATE pReNative, uint32_t offWhere, uint32_t idxLabel,
    1853                                    IEMNATIVEFIXUPTYPE enmType, int8_t offAddend /*= 0*/) RT_NOEXCEPT
     1853DECL_HIDDEN_THROW(void)
     1854iemNativeAddFixup(PIEMRECOMPILERSTATE pReNative, uint32_t offWhere, uint32_t idxLabel,
     1855                  IEMNATIVEFIXUPTYPE enmType, int8_t offAddend /*= 0*/)
    18541856{
    18551857    Assert(idxLabel <= UINT16_MAX);
     
    18661868    {
    18671869        uint32_t cNew = pReNative->cFixupsAlloc;
    1868         AssertReturn(cNew, false);
    1869         AssertReturn(cFixups == cNew, false);
     1870        AssertStmt(cNew, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_FIXUP_IPE_1));
     1871        AssertStmt(cFixups == cNew, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_FIXUP_IPE_1));
    18701872        cNew *= 2;
    1871         AssertReturn(cNew <= _128K, false);
     1873        AssertStmt(cNew <= _128K, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_FIXUP_TOO_MANY));
    18721874        paFixups = (PIEMNATIVEFIXUP)RTMemRealloc(paFixups, cNew * sizeof(paFixups[0]));
    1873         AssertReturn(paFixups, false);
     1875        AssertStmt(paFixups, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_FIXUP_OUT_OF_MEMORY));
    18741876        pReNative->paFixups     = paFixups;
    18751877        pReNative->cFixupsAlloc = cNew;
     
    18841886    paFixups[cFixups].offAddend = offAddend;
    18851887    pReNative->cFixups = cFixups + 1;
    1886     return true;
    1887 }
     1888}
     1889
    18881890
    18891891/**
    18901892 * Slow code path for iemNativeInstrBufEnsure.
    18911893 */
    1892 DECLHIDDEN(PIEMNATIVEINSTR) iemNativeInstrBufEnsureSlow(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    1893                                                         uint32_t cInstrReq) RT_NOEXCEPT
     1894DECL_HIDDEN_THROW(PIEMNATIVEINSTR) iemNativeInstrBufEnsureSlow(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t cInstrReq)
    18941895{
    18951896    /* Double the buffer size till we meet the request. */
     
    19021903    uint32_t const cbNew = cNew * sizeof(IEMNATIVEINSTR);
    19031904#ifdef RT_ARCH_ARM64
    1904     AssertReturn(cbNew <= _1M, NULL); /* Limited by the branch instruction range (18+2 bits). */
     1905    uint32_t const cbMaxInstrBuf = _1M; /* Limited by the branch instruction range (18+2 bits). */
    19051906#else
    1906     AssertReturn(cbNew <= _2M, NULL);
    1907 #endif
     1907    uint32_t const cbMaxInstrBuf = _2M;
     1908#endif
     1909    AssertStmt(cbNew <= cbMaxInstrBuf, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_INSTR_BUF_TOO_LARGE));
    19081910
    19091911    void *pvNew = RTMemRealloc(pReNative->pInstrBuf, cbNew);
    1910     AssertReturn(pvNew, NULL);
     1912    AssertStmt(pvNew, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_INSTR_BUF_OUT_OF_MEMORY));
    19111913
    19121914    pReNative->cInstrBufAlloc   = cNew;
     
    19181920/**
    19191921 * Grows the static debug info array used during recompilation.
    1920  * @returns Pointer to the new debug info block, NULL on failure.
    1921  */
    1922 DECL_NO_INLINE(static, PIEMTBDBG) iemNativeDbgInfoGrow(PIEMRECOMPILERSTATE pReNative, PIEMTBDBG pDbgInfo) RT_NOEXCEPT
     1922 *
     1923 * @returns Pointer to the new debug info block; throws VBox status code on
     1924 *          failure, so no need to check the return value.
     1925 */
     1926DECL_NO_INLINE(static, PIEMTBDBG) iemNativeDbgInfoGrow(PIEMRECOMPILERSTATE pReNative, PIEMTBDBG pDbgInfo)
    19231927{
    19241928    uint32_t cNew = pReNative->cDbgInfoAlloc * 2;
    1925     AssertReturn(cNew < _1M && cNew != 0, NULL);
     1929    AssertStmt(cNew < _1M && cNew != 0, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_DBGINFO_IPE_1));
    19261930    pDbgInfo = (PIEMTBDBG)RTMemRealloc(pDbgInfo, RT_UOFFSETOF_DYN(IEMTBDBG, aEntries[cNew]));
    1927     AssertReturn(pDbgInfo, NULL);
     1931    AssertStmt(pDbgInfo, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_DBGINFO_OUT_OF_MEMORY));
    19281932    pReNative->pDbgInfo      = pDbgInfo;
    19291933    pReNative->cDbgInfoAlloc = cNew;
     
    19351939 * Adds a new debug info uninitialized entry, returning the pointer to it.
    19361940 */
    1937 DECLINLINE(PIEMTBDBGENTRY) iemNativeDbgInfoAddNewEntry(PIEMRECOMPILERSTATE pReNative, PIEMTBDBG pDbgInfo)
     1941DECL_INLINE_THROW(PIEMTBDBGENTRY) iemNativeDbgInfoAddNewEntry(PIEMRECOMPILERSTATE pReNative, PIEMTBDBG pDbgInfo)
    19381942{
    19391943    if (RT_LIKELY(pDbgInfo->cEntries < pReNative->cDbgInfoAlloc))
    19401944    { /* likely */ }
    19411945    else
    1942     {
    19431946        pDbgInfo = iemNativeDbgInfoGrow(pReNative, pDbgInfo);
    1944         AssertReturn(pDbgInfo, NULL);
    1945     }
    19461947    return &pDbgInfo->aEntries[pDbgInfo->cEntries++];
    19471948}
     
    19511952 * Debug Info: Adds a native offset record, if necessary.
    19521953 */
    1953 static bool iemNativeDbgInfoAddNativeOffset(PIEMRECOMPILERSTATE pReNative, uint32_t off) RT_NOEXCEPT
     1954static void iemNativeDbgInfoAddNativeOffset(PIEMRECOMPILERSTATE pReNative, uint32_t off)
    19541955{
    19551956    PIEMTBDBG pDbgInfo = pReNative->pDbgInfo;
     
    19641965        {
    19651966            if (pDbgInfo->aEntries[idx].NativeOffset.offNative == off)
    1966                 return true;
    1967             AssertReturn(pDbgInfo->aEntries[idx].NativeOffset.offNative < off, false);
     1967                return;
     1968            AssertStmt(pDbgInfo->aEntries[idx].NativeOffset.offNative < off,
     1969                       IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_DBGINFO_IPE_2));
    19681970            break;
    19691971        }
     
    19731975     */
    19741976    PIEMTBDBGENTRY const pEntry = iemNativeDbgInfoAddNewEntry(pReNative, pDbgInfo);
    1975     AssertReturn(pEntry, false);
    19761977    pEntry->NativeOffset.uType     = kIemTbDbgEntryType_NativeOffset;
    19771978    pEntry->NativeOffset.offNative = off;
    1978 
    1979     return true;
    19801979}
    19811980
     
    19841983 * Debug Info: Record info about a label.
    19851984 */
    1986 static bool iemNativeDbgInfoAddLabel(PIEMRECOMPILERSTATE pReNative, IEMNATIVELABELTYPE enmType, uint16_t uData) RT_NOEXCEPT
     1985static void iemNativeDbgInfoAddLabel(PIEMRECOMPILERSTATE pReNative, IEMNATIVELABELTYPE enmType, uint16_t uData)
    19871986{
    19881987    PIEMTBDBGENTRY const pEntry = iemNativeDbgInfoAddNewEntry(pReNative, pReNative->pDbgInfo);
    1989     AssertReturn(pEntry, false);
    1990 
    19911988    pEntry->Label.uType    = kIemTbDbgEntryType_Label;
    19921989    pEntry->Label.uUnused  = 0;
    19931990    pEntry->Label.enmLabel = (uint8_t)enmType;
    19941991    pEntry->Label.uData    = uData;
    1995 
    1996     return true;
    19971992}
    19981993
     
    20011996 * Debug Info: Record info about a threaded call.
    20021997 */
    2003 static bool iemNativeDbgInfoAddThreadedCall(PIEMRECOMPILERSTATE pReNative, IEMTHREADEDFUNCS enmCall, bool fRecompiled) RT_NOEXCEPT
     1998static void iemNativeDbgInfoAddThreadedCall(PIEMRECOMPILERSTATE pReNative, IEMTHREADEDFUNCS enmCall, bool fRecompiled)
    20041999{
    20052000    PIEMTBDBGENTRY const pEntry = iemNativeDbgInfoAddNewEntry(pReNative, pReNative->pDbgInfo);
    2006     AssertReturn(pEntry, false);
    2007 
    20082001    pEntry->ThreadedCall.uType       = kIemTbDbgEntryType_ThreadedCall;
    20092002    pEntry->ThreadedCall.fRecompiled = fRecompiled;
    20102003    pEntry->ThreadedCall.uUnused     = 0;
    20112004    pEntry->ThreadedCall.enmCall     = (uint16_t)enmCall;
    2012 
    2013     return true;
    20142005}
    20152006
     
    20182009 * Debug Info: Record info about a new guest instruction.
    20192010 */
    2020 static bool iemNativeDbgInfoAddGuestInstruction(PIEMRECOMPILERSTATE pReNative, uint32_t fExec) RT_NOEXCEPT
     2011static void iemNativeDbgInfoAddGuestInstruction(PIEMRECOMPILERSTATE pReNative, uint32_t fExec)
    20212012{
    20222013    PIEMTBDBGENTRY const pEntry = iemNativeDbgInfoAddNewEntry(pReNative, pReNative->pDbgInfo);
    2023     AssertReturn(pEntry, false);
    2024 
    20252014    pEntry->GuestInstruction.uType   = kIemTbDbgEntryType_GuestInstruction;
    20262015    pEntry->GuestInstruction.uUnused = 0;
    20272016    pEntry->GuestInstruction.fExec   = fExec;
    2028 
    2029     return true;
    20302017}
    20312018
     
    20342021 * Debug Info: Record info about guest register shadowing.
    20352022 */
    2036 static bool iemNativeDbgInfoAddGuestRegShadowing(PIEMRECOMPILERSTATE pReNative, IEMNATIVEGSTREG enmGstReg,
    2037                                                  uint8_t idxHstReg = UINT8_MAX, uint8_t idxHstRegPrev = UINT8_MAX) RT_NOEXCEPT
     2023static void iemNativeDbgInfoAddGuestRegShadowing(PIEMRECOMPILERSTATE pReNative, IEMNATIVEGSTREG enmGstReg,
     2024                                                 uint8_t idxHstReg = UINT8_MAX, uint8_t idxHstRegPrev = UINT8_MAX)
    20382025{
    20392026    PIEMTBDBGENTRY const pEntry = iemNativeDbgInfoAddNewEntry(pReNative, pReNative->pDbgInfo);
    2040     AssertReturn(pEntry, false);
    2041 
    20422027    pEntry->GuestRegShadowing.uType         = kIemTbDbgEntryType_GuestRegShadowing;
    20432028    pEntry->GuestRegShadowing.uUnused       = 0;
     
    20452030    pEntry->GuestRegShadowing.idxHstReg     = idxHstReg;
    20462031    pEntry->GuestRegShadowing.idxHstRegPrev = idxHstRegPrev;
    2047 
    2048     return true;
    20492032}
    20502033
     
    22032186 * This ASSUMES the caller has done the minimal/optimal allocation checks and
    22042187 * failed.
    2205  */
    2206 static uint8_t iemNativeRegAllocFindFree(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, bool fAllowVolatile) RT_NOEXCEPT
     2188 *
     2189 * @returns Host register number on success; throws VBox status code on failure, so no
     2190 *          need to check the return value.
     2191 */
     2192static uint8_t iemNativeRegAllocFindFree(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, bool fAllowVolatile)
    22072193{
    22082194    uint32_t fRegMask = fAllowVolatile
     
    22572243                if (pReNative->Core.aVars[idxVar].enmKind == kIemNativeVarKind_Stack)
    22582244                {
    2259                     AssertReturn(pReNative->Core.aVars[idxVar].idxStackSlot != UINT8_MAX, UINT8_MAX);
    2260                     uint32_t off = *poff;
    2261                     *poff = off = iemNativeEmitStoreGprByBp(pReNative, off,
    2262                                                               pReNative->Core.aVars[idxVar].idxStackSlot * sizeof(uint64_t)
    2263                                                             - IEMNATIVE_FP_OFF_STACK_VARS,
    2264                                                             idxReg);
    2265                     AssertReturn(off != UINT32_MAX, UINT8_MAX);
     2245                    AssertStmt(pReNative->Core.aVars[idxVar].idxStackSlot != UINT8_MAX,
     2246                               IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_IPE_8));
     2247                    *poff = iemNativeEmitStoreGprByBp(pReNative, *poff,
     2248                                                        pReNative->Core.aVars[idxVar].idxStackSlot * sizeof(uint64_t)
     2249                                                      - IEMNATIVE_FP_OFF_STACK_VARS,
     2250                                                      idxReg);
    22662251                }
    22672252
     
    22762261    }
    22772262
    2278     AssertFailedReturn(UINT8_MAX);
     2263    AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_OUT_OF_REGISTERS));
    22792264}
    22802265
     
    23522337    else
    23532338    {
    2354         AssertReturn(pReNative->Core.aVars[idxVar].idxStackSlot != UINT8_MAX, UINT32_MAX);
     2339        AssertStmt(pReNative->Core.aVars[idxVar].idxStackSlot != UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_IPE_7));
    23552340        off = iemNativeEmitStoreGprByBp(pReNative, off,
    23562341                                          pReNative->Core.aVars[idxVar].idxStackSlot * sizeof(uint64_t)
    23572342                                        - IEMNATIVE_FP_OFF_STACK_VARS,
    23582343                                        idxRegOld);
    2359         AssertReturn(off != UINT32_MAX, UINT32_MAX);
    23602344
    23612345        pReNative->Core.bmHstRegsWithGstShadow &= ~RT_BIT_32(idxRegOld);
     
    23752359 * up a register.
    23762360 *
    2377  * @returns The host register number, UINT8_MAX on failure.
     2361 * @returns The host register number; throws VBox status code on failure,
     2362 *          so no need to check the return value.
    23782363 * @param   pReNative       The native recompile state.
    23792364 * @param   poff            Pointer to the variable with the code buffer position.
     
    23842369 *                          (@c false, for iemNativeRegAllocTmpForGuestReg()).
    23852370 */
    2386 DECLHIDDEN(uint8_t) iemNativeRegAllocTmp(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
    2387                                          bool fPreferVolatile /*= true*/) RT_NOEXCEPT
     2371DECL_HIDDEN_THROW(uint8_t) iemNativeRegAllocTmp(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, bool fPreferVolatile /*= true*/)
    23882372{
    23892373    /*
     
    24082392    {
    24092393        idxReg = iemNativeRegAllocFindFree(pReNative, poff, true /*fAllowVolatile*/);
    2410         AssertReturn(idxReg != UINT8_MAX, UINT8_MAX);
     2394        AssertStmt(idxReg != UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_ALLOCATOR_NO_FREE_TMP));
    24112395    }
    24122396    return iemNativeRegMarkAllocated(pReNative, idxReg, kIemNativeWhat_Tmp);
     
    24232407 * read-only.  Free using iemNativeRegFreeTmpImm.
    24242408 *
    2425  * @returns The host register number, UINT8_MAX on failure.
     2409 * @returns The host register number; throws VBox status code on failure, so no
     2410 *          need to check the return value.
    24262411 * @param   pReNative       The native recompile state.
    24272412 * @param   poff            Pointer to the variable with the code buffer position.
     
    24342419 * @note    Reusing immediate values has not been implemented yet.
    24352420 */
    2436 DECLHIDDEN(uint8_t) iemNativeRegAllocTmpImm(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint64_t uImm,
    2437                                             bool fPreferVolatile /*= true*/) RT_NOEXCEPT
    2438 {
    2439     uint8_t idxReg = iemNativeRegAllocTmp(pReNative, poff, fPreferVolatile);
    2440     if (idxReg < RT_ELEMENTS(pReNative->Core.aHstRegs))
    2441     {
    2442         uint32_t off = *poff;
    2443         *poff = off = iemNativeEmitLoadGprImm64(pReNative, off, idxReg, uImm);
    2444         AssertReturnStmt(off != UINT32_MAX, iemNativeRegFreeTmp(pReNative, idxReg), UINT8_MAX);
    2445     }
     2421DECL_HIDDEN_THROW(uint8_t)
     2422iemNativeRegAllocTmpImm(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint64_t uImm, bool fPreferVolatile /*= true*/)
     2423{
     2424    uint8_t const idxReg = iemNativeRegAllocTmp(pReNative, poff, fPreferVolatile);
     2425    *poff = iemNativeEmitLoadGprImm64(pReNative, *poff, idxReg, uImm);
    24462426    return idxReg;
    24472427}
     
    25422522 * be emitted if we have to free up a register to satify the request.
    25432523 *
    2544  * @returns The host register number, UINT8_MAX on failure.
     2524 * @returns The host register number; throws VBox status code on failure, so no
     2525 *          need to check the return value.
    25452526 * @param   pReNative       The native recompile state.
    25462527 * @param   poff            Pointer to the variable with the code buffer
     
    25522533 * @sa      iemNativeRegAllocTmpForGuestRegIfAlreadyPresent
    25532534 */
    2554 DECLHIDDEN(uint8_t) iemNativeRegAllocTmpForGuestReg(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
    2555                                                     IEMNATIVEGSTREG enmGstReg, IEMNATIVEGSTREGUSE enmIntendedUse) RT_NOEXCEPT
     2535DECL_HIDDEN_THROW(uint8_t)
     2536iemNativeRegAllocTmpForGuestReg(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
     2537                                IEMNATIVEGSTREG enmGstReg, IEMNATIVEGSTREGUSE enmIntendedUse)
    25562538{
    25572539    Assert(enmGstReg < kIemNativeGstReg_End && g_aGstShadowInfo[enmGstReg].cb != 0);
     
    25842566            {
    25852567                uint8_t const idxRegNew = iemNativeRegAllocTmp(pReNative, poff);
    2586                 Assert(idxRegNew < RT_ELEMENTS(pReNative->Core.aHstRegs));
    2587 
    2588                 uint32_t off = *poff;
    2589                 *poff = off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxRegNew, idxReg);
    2590                 AssertReturn(off != UINT32_MAX, UINT8_MAX);
     2568
     2569                *poff = iemNativeEmitLoadGprFromGpr(pReNative, *poff, idxRegNew, idxReg);
    25912570
    25922571                Log12(("iemNativeRegAllocTmpForGuestReg: Duplicated %s for guest %s into %s for destructive calc\n",
     
    26222601            /** @todo share register for readonly access. */
    26232602            uint8_t const idxRegNew = iemNativeRegAllocTmp(pReNative, poff, enmIntendedUse == kIemNativeGstRegUse_Calculation);
    2624             AssertReturn(idxRegNew < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT8_MAX);
    2625 
    2626             uint32_t off = *poff;
    2627             *poff = off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxRegNew, idxReg);
    2628             AssertReturn(off != UINT32_MAX, UINT8_MAX);
     2603
     2604            *poff = iemNativeEmitLoadGprFromGpr(pReNative, *poff, idxRegNew, idxReg);
    26292605
    26302606            if (enmIntendedUse != kIemNativeGstRegUse_ForUpdate)
     
    26442620#ifdef VBOX_STRICT
    26452621        /* Strict builds: Check that the value is correct. */
    2646         uint32_t off = *poff;
    2647         *poff = off = iemNativeEmitGuestRegValueCheck(pReNative, off, idxReg, enmGstReg);
    2648         AssertReturn(off != UINT32_MAX, UINT8_MAX);
     2622        *poff = iemNativeEmitGuestRegValueCheck(pReNative, *poff, idxReg, enmGstReg);
    26492623#endif
    26502624
     
    26562630     */
    26572631    uint8_t const idxRegNew = iemNativeRegAllocTmp(pReNative, poff, enmIntendedUse == kIemNativeGstRegUse_Calculation);
    2658     AssertReturn(idxRegNew < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT8_MAX);
    2659 
    2660     uint32_t off = *poff;
    2661     *poff = off = iemNativeEmitLoadGprWithGstShadowReg(pReNative, off, idxRegNew, enmGstReg);
    2662     AssertReturn(off != UINT32_MAX, UINT8_MAX);
     2632
     2633    *poff = iemNativeEmitLoadGprWithGstShadowReg(pReNative, *poff, idxRegNew, enmGstReg);
    26632634
    26642635    if (enmIntendedUse != kIemNativeGstRegUse_Calculation)
    2665         iemNativeRegMarkAsGstRegShadow(pReNative, idxRegNew, enmGstReg, off);
     2636        iemNativeRegMarkAsGstRegShadow(pReNative, idxRegNew, enmGstReg, *poff);
    26662637    Log12(("iemNativeRegAllocTmpForGuestReg: Allocated %s for guest %s %s\n",
    26672638           g_apszIemNativeHstRegNames[idxRegNew], g_aGstShadowInfo[enmGstReg].pszName, s_pszIntendedUse[enmIntendedUse]));
     
    26822653 * The intended use can only be readonly!
    26832654 *
    2684  * @returns The host register number, UINT8_MAX on failure.
     2655 * @returns The host register number, UINT8_MAX if not present.
    26852656 * @param   pReNative       The native recompile state.
    26862657 * @param   poff            Pointer to the instruction buffer offset.
     
    26882659 *                          found.
    26892660 * @param   enmGstReg       The guest register that will is to be updated.
     2661 * @note    In strict builds, this may throw instruction buffer growth failures.
     2662 *          Non-strict builds will not throw anything.
    26902663 * @sa iemNativeRegAllocTmpForGuestReg
    26912664 */
    2692 DECLHIDDEN(uint8_t) iemNativeRegAllocTmpForGuestRegIfAlreadyPresent(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
    2693                                                                     IEMNATIVEGSTREG enmGstReg) RT_NOEXCEPT
     2665DECL_HIDDEN_THROW(uint8_t)
     2666iemNativeRegAllocTmpForGuestRegIfAlreadyPresent(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
    26942667{
    26952668    Assert(enmGstReg < kIemNativeGstReg_End && g_aGstShadowInfo[enmGstReg].cb != 0);
     
    27192692#ifdef VBOX_STRICT
    27202693            /* Strict builds: Check that the value is correct. */
    2721             uint32_t off = *poff;
    2722             *poff = off = iemNativeEmitGuestRegValueCheck(pReNative, off, idxReg, enmGstReg);
    2723             AssertReturn(off != UINT32_MAX, UINT8_MAX);
     2694            *poff = iemNativeEmitGuestRegValueCheck(pReNative, *poff, idxReg, enmGstReg);
    27242695#else
    27252696            RT_NOREF(poff);
     
    27332704
    27342705
    2735 DECLHIDDEN(uint8_t)         iemNativeRegAllocVar(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint8_t idxVar) RT_NOEXCEPT;
     2706DECL_HIDDEN_THROW(uint8_t) iemNativeRegAllocVar(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint8_t idxVar);
    27362707
    27372708
     
    27392710 * Allocates argument registers for a function call.
    27402711 *
    2741  * @returns New code buffer offset on success, UINT32_MAX on failure.
     2712 * @returns New code buffer offset on success; throws VBox status code on failure, so no
     2713 *          need to check the return value.
    27422714 * @param   pReNative   The native recompile state.
    27432715 * @param   off         The current code buffer offset.
    27442716 * @param   cArgs       The number of arguments the function call takes.
    27452717 */
    2746 DECLHIDDEN(uint32_t) iemNativeRegAllocArgs(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cArgs) RT_NOEXCEPT
    2747 {
    2748     AssertReturn(cArgs <= IEMNATIVE_CALL_ARG_GREG_COUNT + IEMNATIVE_FRAME_STACK_ARG_COUNT, false);
     2718DECL_HIDDEN_THROW(uint32_t) iemNativeRegAllocArgs(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cArgs)
     2719{
     2720    AssertStmt(cArgs <= IEMNATIVE_CALL_ARG_GREG_COUNT + IEMNATIVE_FRAME_STACK_ARG_COUNT,
     2721               IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_IPE_4));
    27492722    Assert(RT_ELEMENTS(g_aidxIemNativeCallRegs) == IEMNATIVE_CALL_ARG_GREG_COUNT);
    27502723    Assert(RT_ELEMENTS(g_afIemNativeCallRegs) == IEMNATIVE_CALL_ARG_GREG_COUNT);
     
    27802753                    {
    27812754                        uint8_t const idxVar = pReNative->Core.aHstRegs[idxReg].idxVar;
    2782                         AssertReturn(idxVar < RT_ELEMENTS(pReNative->Core.aVars), false);
     2755                        AssertStmt(idxVar < RT_ELEMENTS(pReNative->Core.aVars),
     2756                                   IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_IPE_5));
    27832757                        Assert(pReNative->Core.aVars[idxVar].idxReg == idxReg);
    27842758                        Assert(pReNative->Core.bmVars & RT_BIT_32(idxVar));
     
    27892763                        {
    27902764                            off = iemNativeRegMoveOrSpillStackVar(pReNative, off, idxVar);
    2791                             AssertReturn(off != UINT32_MAX, false);
    27922765                            Assert(!(pReNative->Core.bmHstRegsWithGstShadow & RT_BIT_32(idxReg)));
    27932766                        }
     
    27982771                    case kIemNativeWhat_Arg:
    27992772                    case kIemNativeWhat_rc:
    2800                         AssertFailedReturn(false);
     2773                        AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_IPE_5));
    28012774                    default:
    2802                         AssertFailedReturn(false);
     2775                        AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_IPE_6));
    28032776                }
    28042777
     
    28222795
    28232796
    2824 DECLHIDDEN(uint8_t)         iemNativeRegAssignRc(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg) RT_NOEXCEPT;
     2797DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAssignRc(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg);
    28252798
    28262799
     
    29032876 * @param   fFreeArgVars    Whether to free argument variables for the call.
    29042877 */
    2905 DECLHIDDEN(uint32_t) iemNativeRegMoveAndFreeAndFlushAtCall(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2906                                                            uint8_t cArgs, bool fFreeArgVars) RT_NOEXCEPT
     2878DECL_HIDDEN_THROW(uint32_t)
     2879iemNativeRegMoveAndFreeAndFlushAtCall(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cArgs, bool fFreeArgVars)
    29072880{
    29082881    /*
    29092882     * Free argument variables first (simplified).
    29102883     */
    2911     AssertReturn(cArgs <= RT_ELEMENTS(pReNative->Core.aidxArgVars), UINT32_MAX);
     2884    AssertStmt(cArgs <= RT_ELEMENTS(pReNative->Core.aidxArgVars), IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_6));
    29122885    if (fFreeArgVars && cArgs > 0)
    29132886    {
     
    29572930                        pReNative->Core.aVars[idxVar].idxReg = UINT8_MAX;
    29582931                    else
    2959                     {
    29602932                        off = iemNativeRegMoveOrSpillStackVar(pReNative, off, idxVar);
    2961                         AssertReturn(off != UINT32_MAX, UINT32_MAX);
    2962                     }
    29632933                    continue;
    29642934                }
     
    29792949                case kIemNativeWhat_Invalid:
    29802950                case kIemNativeWhat_End:
    2981                     AssertFailedReturn(UINT32_MAX);
     2951                    AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_IPE_1));
    29822952            }
    2983             AssertFailedReturn(UINT32_MAX);
     2953            AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_IPE_2));
    29842954        }
    29852955
     
    30793049 * RIP updates, since these are the most common ones.
    30803050 */
    3081 DECLHIDDEN(uint32_t) iemNativeRegFlushPendingWrites(PIEMRECOMPILERSTATE pReNative, uint32_t off) RT_NOEXCEPT
     3051DECL_HIDDEN_THROW(uint32_t) iemNativeRegFlushPendingWrites(PIEMRECOMPILERSTATE pReNative, uint32_t off)
    30823052{
    30833053    RT_NOREF(pReNative, off);
     
    31033073 *       that is something the caller needs to do if applicable.
    31043074 */
    3105 DECLHIDDEN(uint32_t) iemNativeEmitLoadGprWithGstShadowReg(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3106                                                           uint8_t idxHstReg, IEMNATIVEGSTREG enmGstReg) RT_NOEXCEPT
     3075DECL_HIDDEN_THROW(uint32_t)
     3076iemNativeEmitLoadGprWithGstShadowReg(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxHstReg, IEMNATIVEGSTREG enmGstReg)
    31073077{
    31083078    Assert((unsigned)enmGstReg < RT_ELEMENTS(g_aGstShadowInfo));
     
    31223092#endif
    31233093        default:
    3124             AssertFailedReturn(UINT32_MAX);
     3094            AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IPE_NOT_REACHED_DEFAULT_CASE));
    31253095    }
    31263096}
     
    31363106 *       Trashes EFLAGS on AMD64.
    31373107 */
    3138 static uint32_t iemNativeEmitGuestRegValueCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3139                                                 uint8_t idxReg, IEMNATIVEGSTREG enmGstReg) RT_NOEXCEPT
     3108static uint32_t
     3109iemNativeEmitGuestRegValueCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxReg, IEMNATIVEGSTREG enmGstReg)
    31403110{
    31413111# ifdef RT_ARCH_AMD64
    3142     uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 32);
    3143     AssertReturn(pbCodeBuf, UINT32_MAX);
     3112    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 32);
    31443113
    31453114    /* cmp reg, [mem] */
     
    31593128                pbCodeBuf[off++] = X86_OP_PRF_SIZE_OP;
    31603129            else
    3161                 AssertReturn(g_aGstShadowInfo[enmGstReg].cb == sizeof(uint32_t), UINT32_MAX);
     3130                AssertStmt(g_aGstShadowInfo[enmGstReg].cb == sizeof(uint32_t),
     3131                           IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_7));
    31623132            if (idxReg >= 8)
    31633133                pbCodeBuf[off++] = X86_OP_REX_R;
     
    32303200    off = iemNativeEmitLoadGprWithGstShadowReg(pReNative, off, IEMNATIVE_REG_FIXED_TMP0, enmGstReg);
    32313201
    3232     uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    3233     AssertReturn(pu32CodeBuf, UINT32_MAX);
     3202    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    32343203    /* sub tmp0, tmp0, idxReg */
    32353204    pu32CodeBuf[off++] = Armv8A64MkInstrAddSubReg(true /*fSub*/, IEMNATIVE_REG_FIXED_TMP0, IEMNATIVE_REG_FIXED_TMP0, idxReg);
     
    32533222 * from the code if either are non-zero.
    32543223 */
    3255 DECLHIDDEN(uint32_t) iemNativeEmitCheckCallRetAndPassUp(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3256                                                         uint8_t idxInstr) RT_NOEXCEPT
     3224DECL_HIDDEN_THROW(uint32_t)
     3225iemNativeEmitCheckCallRetAndPassUp(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr)
    32573226{
    32583227#ifdef RT_ARCH_AMD64
     
    32693238    /* edx = eax | rcPassUp */
    32703239    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2);
    3271     AssertReturn(pbCodeBuf, UINT32_MAX);
    32723240    pbCodeBuf[off++] = 0x0b;                    /* or edx, eax */
    32733241    pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, X86_GREG_xDX, X86_GREG_xAX);
     
    32873255
    32883256    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    3289     AssertReturn(pu32CodeBuf, UINT32_MAX);
    32903257
    32913258    pu32CodeBuf[off++] = Armv8A64MkInstrOrr(ARMV8_A64_REG_X4, ARMV8_A64_REG_X3, ARMV8_A64_REG_X0, false /*f64Bit*/);
    32923259
    32933260    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, kIemNativeLabelType_NonZeroRetOrPassUp);
    3294     AssertReturn(idxLabel != UINT32_MAX, UINT32_MAX);
    3295     AssertReturn(iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5), UINT32_MAX);
     3261    iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5);
    32963262    pu32CodeBuf[off++] = Armv8A64MkInstrCbzCbnz(true /*fJmpIfNotZero*/, ARMV8_A64_REG_X4, false /*f64Bit*/);
    32973263
     
    33143280 * @param   idxInstr        The current instruction.
    33153281 */
    3316 DECLHIDDEN(uint32_t) iemNativeEmitCheckGprCanonicalMaybeRaiseGp0(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3317                                                                 uint8_t idxAddrReg, uint8_t idxInstr)
     3282DECL_HIDDEN_THROW(uint32_t)
     3283iemNativeEmitCheckGprCanonicalMaybeRaiseGp0(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxAddrReg, uint8_t idxInstr)
    33183284{
    33193285    RT_NOREF(idxInstr);
     
    33323298     */
    33333299    uint8_t const iTmpReg = iemNativeRegAllocTmp(pReNative, &off);
    3334     AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    33353300
    33363301    off = iemNativeEmitLoadGprFromGpr(pReNative, off, iTmpReg, idxAddrReg);
     
    33683333     */
    33693334    uint8_t const iTmpReg = iemNativeRegAllocTmp(pReNative, &off);
    3370     AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    33713335
    33723336    off = iemNativeEmitLoadGprImm64(pReNative, off, iTmpReg, UINT64_C(0x800000000000));
     
    34063370 * @param   idxInstr        The current instruction.
    34073371 */
    3408 DECLHIDDEN(uint32_t) iemNativeEmitCheckGpr32AgainstSegLimitMaybeRaiseGp0(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3409                                                                          uint8_t idxAddrReg, uint8_t idxSegReg, uint8_t idxInstr)
     3372DECL_HIDDEN_THROW(uint32_t)
     3373iemNativeEmitCheckGpr32AgainstSegLimitMaybeRaiseGp0(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3374                                                    uint8_t idxAddrReg, uint8_t idxSegReg, uint8_t idxInstr)
    34103375{
    34113376    /*
     
    34163381
    34173382    /** @todo implement expand down/whatnot checking */
    3418     AssertReturn(idxSegReg == X86_SREG_CS, UINT32_MAX);
     3383    AssertStmt(idxSegReg == X86_SREG_CS, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_EMIT_CASE_NOT_IMPLEMENTED_1));
    34193384
    34203385    uint8_t const iTmpLimReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off,
    34213386                                                               (IEMNATIVEGSTREG)(kIemNativeGstReg_SegLimitFirst + idxSegReg),
    34223387                                                               kIemNativeGstRegUse_ForUpdate);
    3423     AssertReturn(iTmpLimReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    34243388
    34253389    off = iemNativeEmitCmpGpr32WithGpr(pReNative, off, idxAddrReg, iTmpLimReg);
     
    34813445# endif
    34823446#endif
    3483     AssertReturn(off != UINT32_MAX, off);
    34843447
    34853448    /*
     
    35683531     */
    35693532    off = iemNativeEmitCheckCallRetAndPassUp(pReNative, off, pCallEntry->idxInstr);
    3570     AssertReturn(off != UINT32_MAX, off);
    35713533
    35723534    return off;
     
    36733635     */
    36743636    off = iemNativeEmitGprZero(pReNative,off, IEMNATIVE_CALL_RET_GREG);
    3675     AssertReturn(off != UINT32_MAX, UINT32_MAX);
    36763637
    36773638    /*
     
    36793640     */
    36803641    uint32_t const idxReturn = iemNativeLabelCreate(pReNative, kIemNativeLabelType_Return, off);
    3681     AssertReturn(idxReturn != UINT32_MAX, UINT32_MAX);
    36823642    *pidxReturnLabel = idxReturn;
    36833643
     
    36863646     */
    36873647#ifdef RT_ARCH_AMD64
    3688     uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 20);
    3689     AssertReturn(pbCodeBuf, UINT32_MAX);
     3648    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 20);
    36903649
    36913650    /* Reposition esp at the r15 restore point. */
     
    37143673
    37153674#elif RT_ARCH_ARM64
    3716     uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 10);
    3717     AssertReturn(pu32CodeBuf, UINT32_MAX);
     3675    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 10);
    37183676
    37193677    /* ldp x19, x20, [sp #IEMNATIVE_FRAME_VAR_SIZE]! ; Unallocate the variable space and restore x19+x20. */
     
    37713729     * unwind description for all the code.
    37723730     */
    3773     uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 32);
    3774     AssertReturn(pbCodeBuf, UINT32_MAX);
     3731    uint8_t *const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 32);
    37753732    pbCodeBuf[off++] = 0x50 + X86_GREG_xBP;     /* push rbp */
    37763733    pbCodeBuf[off++] = X86_OP_REX_W;            /* mov rbp, rsp */
     
    38143771     * return address our selves here.  We save all non-volatile registers.
    38153772     */
    3816     uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 10);
    3817     AssertReturn(pu32CodeBuf, UINT32_MAX);
     3773    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 10);
    38183774
    38193775# ifdef RT_OS_DARWIN /** @todo This seems to be requirement by libunwind for JIT FDEs. Investigate further as been unable
     
    38883844    return iemNativeEmitCImplCall1(pReNative, off, pCallEntry->idxInstr, (uintptr_t)a_pfnCImpl, a_cbInstr, a0)
    38893845
    3890 DECLINLINE(uint32_t) iemNativeEmitCImplCall1(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr,
    3891                                              uintptr_t pfnCImpl, uint8_t cbInstr, uint64_t uArg0)
     3846DECL_INLINE_THROW(uint32_t) iemNativeEmitCImplCall1(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr,
     3847                                                    uintptr_t pfnCImpl, uint8_t cbInstr, uint64_t uArg0)
    38923848{
    38933849    return iemNativeEmitCImplCall(pReNative, off, idxInstr, pfnCImpl, cbInstr, 1, uArg0, 0, 0);
     
    38983854    return iemNativeEmitCImplCall2(pReNative, off, pCallEntry->idxInstr, (uintptr_t)a_pfnCImpl, a_cbInstr, a0, a1)
    38993855
    3900 DECLINLINE(uint32_t) iemNativeEmitCImplCall2(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr,
    3901                                              uintptr_t pfnCImpl, uint8_t cbInstr, uint64_t uArg0, uint64_t uArg1)
     3856DECL_INLINE_THROW(uint32_t) iemNativeEmitCImplCall2(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr,
     3857                                                    uintptr_t pfnCImpl, uint8_t cbInstr, uint64_t uArg0, uint64_t uArg1)
    39023858{
    39033859    return iemNativeEmitCImplCall(pReNative, off, idxInstr, pfnCImpl, cbInstr, 2, uArg0, uArg1, 0);
     
    39083864    return iemNativeEmitCImplCall3(pReNative, off, pCallEntry->idxInstr, (uintptr_t)a_pfnCImpl, a_cbInstr, a0, a1, a2)
    39093865
    3910 DECLINLINE(uint32_t) iemNativeEmitCImplCall3(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr,
    3911                                              uintptr_t pfnCImpl, uint8_t cbInstr, uint64_t uArg0, uint64_t uArg1, uint64_t uArg2)
     3866DECL_INLINE_THROW(uint32_t) iemNativeEmitCImplCall3(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr,
     3867                                                    uintptr_t pfnCImpl, uint8_t cbInstr, uint64_t uArg0, uint64_t uArg1,
     3868                                                    uint64_t uArg2)
    39123869{
    39133870    return iemNativeEmitCImplCall(pReNative, off, idxInstr, pfnCImpl, cbInstr, 3, uArg0, uArg1, uArg2);
     
    39203877
    39213878#define IEM_MC_ADVANCE_RIP_AND_FINISH_THREADED_PC64(a_cbInstr) \
    3922     off = iemNativeEmitAddToRip64AndFinishingNoFlags(pReNative, off, (a_cbInstr)); \
    3923     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     3879    off = iemNativeEmitAddToRip64AndFinishingNoFlags(pReNative, off, (a_cbInstr))
    39243880
    39253881/** Same as iemRegAddToRip64AndFinishingNoFlags. */
    3926 DECLINLINE(uint32_t) iemNativeEmitAddToRip64AndFinishingNoFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cbInstr)
     3882DECL_INLINE_THROW(uint32_t)
     3883iemNativeEmitAddToRip64AndFinishingNoFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cbInstr)
    39273884{
    39283885    /* Allocate a temporary PC register. */
    39293886    uint8_t const idxPcReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Pc, kIemNativeGstRegUse_ForUpdate);
    3930     AssertReturn(idxPcReg != UINT8_MAX, UINT32_MAX);
    39313887
    39323888    /* Perform the addition and store the result. */
     
    39423898
    39433899#define IEM_MC_ADVANCE_RIP_AND_FINISH_THREADED_PC32(a_cbInstr) \
    3944     off = iemNativeEmitAddToEip32AndFinishingNoFlags(pReNative, off, (a_cbInstr)); \
    3945     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     3900    off = iemNativeEmitAddToEip32AndFinishingNoFlags(pReNative, off, (a_cbInstr))
    39463901
    39473902/** Same as iemRegAddToEip32AndFinishingNoFlags. */
    3948 DECLINLINE(uint32_t) iemNativeEmitAddToEip32AndFinishingNoFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cbInstr)
     3903DECL_INLINE_THROW(uint32_t)
     3904iemNativeEmitAddToEip32AndFinishingNoFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cbInstr)
    39493905{
    39503906    /* Allocate a temporary PC register. */
    39513907    uint8_t const idxPcReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Pc, kIemNativeGstRegUse_ForUpdate);
    3952     AssertReturn(idxPcReg != UINT8_MAX, UINT32_MAX);
    39533908
    39543909    /* Perform the addition and store the result. */
     
    39643919
    39653920#define IEM_MC_ADVANCE_RIP_AND_FINISH_THREADED_PC16(a_cbInstr) \
    3966     off = iemNativeEmitAddToIp16AndFinishingNoFlags(pReNative, off, (a_cbInstr)); \
    3967     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     3921    off = iemNativeEmitAddToIp16AndFinishingNoFlags(pReNative, off, (a_cbInstr))
    39683922
    39693923/** Same as iemRegAddToIp16AndFinishingNoFlags. */
    3970 DECLINLINE(uint32_t) iemNativeEmitAddToIp16AndFinishingNoFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cbInstr)
     3924DECL_INLINE_THROW(uint32_t)
     3925iemNativeEmitAddToIp16AndFinishingNoFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cbInstr)
    39713926{
    39723927    /* Allocate a temporary PC register. */
    39733928    uint8_t const idxPcReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Pc, kIemNativeGstRegUse_ForUpdate);
    3974     AssertReturn(idxPcReg != UINT8_MAX, UINT32_MAX);
    39753929
    39763930    /* Perform the addition and store the result. */
     
    39923946#define IEM_MC_REL_JMP_S8_AND_FINISH_THREADED_PC64(a_i8, a_cbInstr, a_enmEffOpSize) \
    39933947    off = iemNativeEmitRip64RelativeJumpAndFinishingNoFlags(pReNative, off, (a_cbInstr), (int8_t)(a_i8), \
    3994                                                             (a_enmEffOpSize), pCallEntry->idxInstr); \
    3995     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     3948                                                            (a_enmEffOpSize), pCallEntry->idxInstr)
    39963949
    39973950
    39983951#define IEM_MC_REL_JMP_S16_AND_FINISH_THREADED_PC64(a_i16, a_cbInstr) \
    39993952    off =  iemNativeEmitRip64RelativeJumpAndFinishingNoFlags(pReNative, off, (a_cbInstr), (int16_t)(a_i16), \
    4000                                                              IEMMODE_16BIT, pCallEntry->idxInstr); \
    4001     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     3953                                                             IEMMODE_16BIT, pCallEntry->idxInstr)
    40023954
    40033955#define IEM_MC_REL_JMP_S32_AND_FINISH_THREADED_PC64(a_i32, a_cbInstr) \
    40043956    off =  iemNativeEmitRip64RelativeJumpAndFinishingNoFlags(pReNative, off, (a_cbInstr), (a_i32), \
    4005                                                              IEMMODE_64BIT, pCallEntry->idxInstr); \
    4006     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     3957                                                             IEMMODE_64BIT, pCallEntry->idxInstr)
    40073958
    40083959/** Same as iemRegRip64RelativeJumpS8AndFinishNoFlags,
    40093960 *  iemRegRip64RelativeJumpS16AndFinishNoFlags and
    40103961 *  iemRegRip64RelativeJumpS32AndFinishNoFlags. */
    4011 DECLINLINE(uint32_t) iemNativeEmitRip64RelativeJumpAndFinishingNoFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    4012                                                                        uint8_t cbInstr, int32_t offDisp, IEMMODE enmEffOpSize,
    4013                                                                       uint8_t idxInstr)
     3962DECL_INLINE_THROW(uint32_t)
     3963iemNativeEmitRip64RelativeJumpAndFinishingNoFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cbInstr,
     3964                                                  int32_t offDisp, IEMMODE enmEffOpSize, uint8_t idxInstr)
    40143965{
    40153966    Assert(enmEffOpSize == IEMMODE_64BIT || enmEffOpSize == IEMMODE_16BIT);
     
    40203971    /* Allocate a temporary PC register. */
    40213972    uint8_t const idxPcReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Pc, kIemNativeGstRegUse_ForUpdate);
    4022     AssertReturn(idxPcReg != UINT8_MAX, UINT32_MAX);
    40233973
    40243974    /* Perform the addition. */
     
    40473997#define IEM_MC_REL_JMP_S8_AND_FINISH_THREADED_PC32(a_i8, a_cbInstr, a_enmEffOpSize) \
    40483998    off = iemNativeEmitEip32RelativeJumpAndFinishingNoFlags(pReNative, off, (a_cbInstr), (int8_t)(a_i8), \
    4049                                                             (a_enmEffOpSize), pCallEntry->idxInstr); \
    4050     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     3999                                                            (a_enmEffOpSize), pCallEntry->idxInstr)
    40514000
    40524001#define IEM_MC_REL_JMP_S16_AND_FINISH_THREADED_PC32(a_i16, a_cbInstr) \
    40534002    off = iemNativeEmitEip32RelativeJumpAndFinishingNoFlags(pReNative, off, (a_cbInstr), (int16_t)(a_i16), \
    4054                                                             IEMMODE_16BIT, pCallEntry->idxInstr); \
    4055     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     4003                                                            IEMMODE_16BIT, pCallEntry->idxInstr)
    40564004
    40574005#define IEM_MC_REL_JMP_S32_AND_FINISH_THREADED_PC32(a_i32, a_cbInstr) \
    40584006    off = iemNativeEmitEip32RelativeJumpAndFinishingNoFlags(pReNative, off, (a_cbInstr), (a_i32), \
    4059                                                             IEMMODE_32BIT, pCallEntry->idxInstr); \
    4060     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     4007                                                            IEMMODE_32BIT, pCallEntry->idxInstr)
    40614008
    40624009/** Same as iemRegEip32RelativeJumpS8AndFinishNoFlags,
    40634010 *  iemRegEip32RelativeJumpS16AndFinishNoFlags and
    40644011 *  iemRegEip32RelativeJumpS32AndFinishNoFlags. */
    4065 DECLINLINE(uint32_t) iemNativeEmitEip32RelativeJumpAndFinishingNoFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    4066                                                                        uint8_t cbInstr, int32_t offDisp, IEMMODE enmEffOpSize,
    4067                                                                       uint8_t idxInstr)
     4012DECL_INLINE_THROW(uint32_t)
     4013iemNativeEmitEip32RelativeJumpAndFinishingNoFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cbInstr,
     4014                                                  int32_t offDisp, IEMMODE enmEffOpSize, uint8_t idxInstr)
    40684015{
    40694016    Assert(enmEffOpSize == IEMMODE_32BIT || enmEffOpSize == IEMMODE_16BIT);
     
    40744021    /* Allocate a temporary PC register. */
    40754022    uint8_t const idxPcReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Pc, kIemNativeGstRegUse_ForUpdate);
    4076     AssertReturn(idxPcReg != UINT8_MAX, UINT32_MAX);
    40774023
    40784024    /* Perform the addition. */
     
    40964042
    40974043#define IEM_MC_REL_JMP_S8_AND_FINISH_THREADED_PC16(a_i8, a_cbInstr) \
    4098     off = iemNativeEmitIp16RelativeJumpAndFinishingNoFlags(pReNative, off, (a_cbInstr), (int8_t)(a_i8), pCallEntry->idxInstr); \
    4099     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     4044    off = iemNativeEmitIp16RelativeJumpAndFinishingNoFlags(pReNative, off, (a_cbInstr), (int8_t)(a_i8), pCallEntry->idxInstr)
    41004045
    41014046#define IEM_MC_REL_JMP_S16_AND_FINISH_THREADED_PC16(a_i16, a_cbInstr) \
    4102     off = iemNativeEmitIp16RelativeJumpAndFinishingNoFlags(pReNative, off, (a_cbInstr), (int16_t)(a_i16), pCallEntry->idxInstr); \
    4103     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     4047    off = iemNativeEmitIp16RelativeJumpAndFinishingNoFlags(pReNative, off, (a_cbInstr), (int16_t)(a_i16), pCallEntry->idxInstr)
    41044048
    41054049#define IEM_MC_REL_JMP_S32_AND_FINISH_THREADED_PC16(a_i32, a_cbInstr) \
    4106     off = iemNativeEmitIp16RelativeJumpAndFinishingNoFlags(pReNative, off, (a_cbInstr), (a_i32), pCallEntry->idxInstr); \
    4107     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     4050    off = iemNativeEmitIp16RelativeJumpAndFinishingNoFlags(pReNative, off, (a_cbInstr), (a_i32), pCallEntry->idxInstr)
    41084051
    41094052/** Same as iemRegIp16RelativeJumpS8AndFinishNoFlags. */
    4110 DECLINLINE(uint32_t) iemNativeEmitIp16RelativeJumpAndFinishingNoFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    4111                                                                       uint8_t cbInstr, int32_t offDisp, uint8_t idxInstr)
     4053DECL_INLINE_THROW(uint32_t)
     4054iemNativeEmitIp16RelativeJumpAndFinishingNoFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     4055                                                 uint8_t cbInstr, int32_t offDisp, uint8_t idxInstr)
    41124056{
    41134057    /* We speculatively modify PC and may raise #GP(0), so make sure the right value is in CPUMCTX. */
     
    41164060    /* Allocate a temporary PC register. */
    41174061    uint8_t const idxPcReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Pc, kIemNativeGstRegUse_ForUpdate);
    4118     AssertReturn(idxPcReg != UINT8_MAX, UINT32_MAX);
    41194062
    41204063    /* Perform the addition, clamp the result, check limit (may #GP(0) + exit TB) and store the result. */
     
    41414084 *          (too many nestings)
    41424085 */
    4143 DECLINLINE(PIEMNATIVECOND) iemNativeCondPushIf(PIEMRECOMPILERSTATE pReNative)
     4086DECL_INLINE_THROW(PIEMNATIVECOND) iemNativeCondPushIf(PIEMRECOMPILERSTATE pReNative)
    41444087{
    41454088    uint32_t const idxStack = pReNative->cCondDepth;
    4146     AssertReturn(idxStack < RT_ELEMENTS(pReNative->aCondStack), NULL);
     4089    AssertStmt(idxStack < RT_ELEMENTS(pReNative->aCondStack), IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_COND_TOO_DEEPLY_NESTED));
    41474090
    41484091    PIEMNATIVECOND const pEntry = &pReNative->aCondStack[idxStack];
     
    41524095    pEntry->fInElse       = false;
    41534096    pEntry->idxLabelElse  = iemNativeLabelCreate(pReNative, kIemNativeLabelType_Else, UINT32_MAX /*offWhere*/, uCondSeqNo);
    4154     AssertReturn(pEntry->idxLabelElse != UINT32_MAX, NULL);
    41554097    pEntry->idxLabelEndIf = iemNativeLabelCreate(pReNative, kIemNativeLabelType_Endif, UINT32_MAX /*offWhere*/, uCondSeqNo);
    4156     AssertReturn(pEntry->idxLabelEndIf != UINT32_MAX, NULL);
    41574098
    41584099    return pEntry;
     
    41634104 * Start of the if-block, snapshotting the register and variable state.
    41644105 */
    4165 DECLINLINE(void) iemNativeCondStartIfBlock(PIEMRECOMPILERSTATE pReNative, uint32_t offIfBlock, uint32_t idxLabelIf = UINT32_MAX)
     4106DECL_INLINE_THROW(void)
     4107iemNativeCondStartIfBlock(PIEMRECOMPILERSTATE pReNative, uint32_t offIfBlock, uint32_t idxLabelIf = UINT32_MAX)
    41664108{
    41674109    Assert(offIfBlock != UINT32_MAX);
     
    41874129#define IEM_MC_ELSE() } while (0); \
    41884130        off = iemNativeEmitElse(pReNative, off); \
    4189         AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    41904131        do {
    41914132
    41924133/** Emits code related to IEM_MC_ELSE. */
    4193 DECLINLINE(uint32_t) iemNativeEmitElse(PIEMRECOMPILERSTATE pReNative, uint32_t off)
     4134DECL_INLINE_THROW(uint32_t) iemNativeEmitElse(PIEMRECOMPILERSTATE pReNative, uint32_t off)
    41944135{
    41954136    /* Check sanity and get the conditional stack entry. */
     
    42164157
    42174158#define IEM_MC_ENDIF() } while (0); \
    4218         off = iemNativeEmitEndIf(pReNative, off); \
    4219         AssertReturn(off != UINT32_MAX, UINT32_MAX)
     4159        off = iemNativeEmitEndIf(pReNative, off)
    42204160
    42214161/** Emits code related to IEM_MC_ENDIF. */
    4222 DECLINLINE(uint32_t) iemNativeEmitEndIf(PIEMRECOMPILERSTATE pReNative, uint32_t off)
     4162DECL_INLINE_THROW(uint32_t) iemNativeEmitEndIf(PIEMRECOMPILERSTATE pReNative, uint32_t off)
    42234163{
    42244164    /* Check sanity and get the conditional stack entry. */
     
    43084248
    43094249        /* Finally, check that the host register allocations matches. */
    4310         AssertMsgReturn(pReNative->Core.bmHstRegs == pOther->bmHstRegs,
    4311                         ("Core.bmHstRegs=%#x pOther->bmHstRegs=%#x - %#x\n",
    4312                          pReNative->Core.bmHstRegs, pOther->bmHstRegs, pReNative->Core.bmHstRegs ^ pOther->bmHstRegs),
    4313                         UINT32_MAX);
     4250        AssertMsgStmt(pReNative->Core.bmHstRegs == pOther->bmHstRegs,
     4251                      ("Core.bmHstRegs=%#x pOther->bmHstRegs=%#x - %#x\n",
     4252                       pReNative->Core.bmHstRegs, pOther->bmHstRegs, pReNative->Core.bmHstRegs ^ pOther->bmHstRegs),
     4253                      IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_COND_ENDIF_RECONCILIATION_FAILED));
    43144254    }
    43154255
     
    43324272#define IEM_MC_IF_EFL_ANY_BITS_SET(a_fBits) \
    43334273        off = iemNativeEmitIfEflagAnysBitsSet(pReNative, off, (a_fBits)); \
    4334         AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    43354274        do {
    43364275
    43374276/** Emits code for IEM_MC_IF_EFL_ANY_BITS_SET. */
    4338 DECLINLINE(uint32_t) iemNativeEmitIfEflagAnysBitsSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitsInEfl)
    4339 {
    4340     PIEMNATIVECOND pEntry = iemNativeCondPushIf(pReNative);
    4341     AssertReturn(pEntry, UINT32_MAX);
     4277DECL_INLINE_THROW(uint32_t) iemNativeEmitIfEflagAnysBitsSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitsInEfl)
     4278{
     4279    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    43424280
    43434281    /* Get the eflags. */
    43444282    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    43454283                                                              kIemNativeGstRegUse_ReadOnly);
    4346     AssertReturn(idxEflReg != UINT8_MAX, UINT32_MAX);
    43474284
    43484285    /* Test and jump. */
     
    43614298#define IEM_MC_IF_EFL_NO_BITS_SET(a_fBits) \
    43624299        off = iemNativeEmitIfEflagNoBitsSet(pReNative, off, (a_fBits)); \
    4363         AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    43644300        do {
    43654301
    43664302/** Emits code for IEM_MC_IF_EFL_NO_BITS_SET. */
    4367 DECLINLINE(uint32_t) iemNativeEmitIfEflagNoBitsSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitsInEfl)
    4368 {
    4369     PIEMNATIVECOND pEntry = iemNativeCondPushIf(pReNative);
    4370     AssertReturn(pEntry, UINT32_MAX);
     4303DECL_INLINE_THROW(uint32_t) iemNativeEmitIfEflagNoBitsSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitsInEfl)
     4304{
     4305    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    43714306
    43724307    /* Get the eflags. */
    43734308    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    43744309                                                              kIemNativeGstRegUse_ReadOnly);
    4375     AssertReturn(idxEflReg != UINT8_MAX, UINT32_MAX);
    43764310
    43774311    /* Test and jump. */
     
    43904324#define IEM_MC_IF_EFL_BIT_SET(a_fBit) \
    43914325        off = iemNativeEmitIfEflagsBitSet(pReNative, off, (a_fBit)); \
    4392         AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    43934326        do {
    43944327
    43954328/** Emits code for IEM_MC_IF_EFL_BIT_SET. */
    4396 DECLINLINE(uint32_t) iemNativeEmitIfEflagsBitSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitInEfl)
    4397 {
    4398     PIEMNATIVECOND pEntry = iemNativeCondPushIf(pReNative);
    4399     AssertReturn(pEntry, UINT32_MAX);
     4329DECL_INLINE_THROW(uint32_t) iemNativeEmitIfEflagsBitSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitInEfl)
     4330{
     4331    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    44004332
    44014333    /* Get the eflags. */
    44024334    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    44034335                                                              kIemNativeGstRegUse_ReadOnly);
    4404     AssertReturn(idxEflReg != UINT8_MAX, UINT32_MAX);
    44054336
    44064337    unsigned const iBitNo = ASMBitFirstSetU32(fBitInEfl) - 1;
     
    44224353#define IEM_MC_IF_EFL_BIT_NOT_SET(a_fBit) \
    44234354        off = iemNativeEmitIfEflagsBitNotSet(pReNative, off, (a_fBit)); \
    4424         AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    44254355        do {
    44264356
    44274357/** Emits code for IEM_MC_IF_EFL_BIT_NOT_SET. */
    4428 DECLINLINE(uint32_t) iemNativeEmitIfEflagsBitNotSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitInEfl)
    4429 {
    4430     PIEMNATIVECOND pEntry = iemNativeCondPushIf(pReNative);
    4431     AssertReturn(pEntry, UINT32_MAX);
     4358DECL_INLINE_THROW(uint32_t) iemNativeEmitIfEflagsBitNotSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitInEfl)
     4359{
     4360    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    44324361
    44334362    /* Get the eflags. */
    44344363    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    44354364                                                              kIemNativeGstRegUse_ReadOnly);
    4436     AssertReturn(idxEflReg != UINT8_MAX, UINT32_MAX);
    44374365
    44384366    unsigned const iBitNo = ASMBitFirstSetU32(fBitInEfl) - 1;
     
    44544382#define IEM_MC_IF_EFL_BITS_EQ(a_fBit1, a_fBit2)         \
    44554383    off = iemNativeEmitIfEflagsTwoBitsEqual(pReNative, off, a_fBit1, a_fBit2, false /*fInverted*/); \
    4456     AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    44574384    do {
    44584385
    44594386#define IEM_MC_IF_EFL_BITS_NE(a_fBit1, a_fBit2)         \
    44604387    off = iemNativeEmitIfEflagsTwoBitsEqual(pReNative, off, a_fBit1, a_fBit2, true /*fInverted*/); \
    4461     AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    44624388    do {
    44634389
    44644390/** Emits code for IEM_MC_IF_EFL_BITS_EQ and IEM_MC_IF_EFL_BITS_NE. */
    4465 DECLINLINE(uint32_t) iemNativeEmitIfEflagsTwoBitsEqual(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    4466                                                        uint32_t fBit1InEfl, uint32_t fBit2InEfl, bool fInverted)
    4467 {
    4468     PIEMNATIVECOND pEntry = iemNativeCondPushIf(pReNative);
    4469     AssertReturn(pEntry, UINT32_MAX);
     4391DECL_INLINE_THROW(uint32_t)
     4392iemNativeEmitIfEflagsTwoBitsEqual(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     4393                                  uint32_t fBit1InEfl, uint32_t fBit2InEfl, bool fInverted)
     4394{
     4395    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    44704396
    44714397    /* Get the eflags. */
    44724398    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    44734399                                                              kIemNativeGstRegUse_ReadOnly);
    4474     AssertReturn(idxEflReg != UINT8_MAX, UINT32_MAX);
    44754400
    44764401    unsigned const iBitNo1 = ASMBitFirstSetU32(fBit1InEfl) - 1;
     
    44834408#ifdef RT_ARCH_AMD64
    44844409    uint8_t const idxTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, fBit1InEfl);
    4485     AssertReturn(idxTmpReg != UINT8_MAX, UINT32_MAX);
    44864410
    44874411    off = iemNativeEmitAndGpr32ByGpr32(pReNative, off, idxTmpReg, idxEflReg);
     
    44934417
    44944418#elif defined(RT_ARCH_ARM64)
    4495     uint8_t const idxTmpReg = iemNativeRegAllocTmp(pReNative, &off);
    4496     AssertReturn(idxTmpReg != UINT8_MAX, UINT32_MAX);
    4497 
    4498     uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2);
    4499     AssertReturn(pu32CodeBuf, UINT32_MAX);
     4419    uint8_t const    idxTmpReg   = iemNativeRegAllocTmp(pReNative, &off);
     4420    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2);
    45004421
    45014422    /* and tmpreg, eflreg, #1<<iBitNo1 */
     
    45334454#define IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(a_fBit, a_fBit1, a_fBit2) \
    45344455    off = iemNativeEmitIfEflagsBitNotSetAndTwoBitsEqual(pReNative, off, a_fBit, a_fBit1, a_fBit2, false /*fInverted*/); \
    4535     AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    45364456    do {
    45374457
    45384458#define IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(a_fBit, a_fBit1, a_fBit2) \
    45394459    off = iemNativeEmitIfEflagsBitNotSetAndTwoBitsEqual(pReNative, off, a_fBit, a_fBit1, a_fBit2, true /*fInverted*/); \
    4540     AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    45414460    do {
    45424461
    45434462/** Emits code for IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ and
    45444463 *  IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE. */
    4545 DECLINLINE(uint32_t) iemNativeEmitIfEflagsBitNotSetAndTwoBitsEqual(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitInEfl,
    4546                                                                   uint32_t fBit1InEfl, uint32_t fBit2InEfl, bool fInverted)
    4547 {
    4548     PIEMNATIVECOND pEntry = iemNativeCondPushIf(pReNative);
    4549     AssertReturn(pEntry, UINT32_MAX);
     4464DECL_INLINE_THROW(uint32_t)
     4465iemNativeEmitIfEflagsBitNotSetAndTwoBitsEqual(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitInEfl,
     4466                                              uint32_t fBit1InEfl, uint32_t fBit2InEfl, bool fInverted)
     4467{
     4468    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    45504469
    45514470    /* We need an if-block label for the non-inverted variant. */
    45524471    uint32_t const idxLabelIf = fInverted ? iemNativeLabelCreate(pReNative, kIemNativeLabelType_If, UINT32_MAX,
    45534472                                                                 pReNative->paLabels[pEntry->idxLabelElse].uData) : UINT32_MAX;
    4554     AssertReturn(idxLabelIf != UINT32_MAX || !fInverted, UINT32_MAX);
    45554473
    45564474    /* Get the eflags. */
    45574475    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    45584476                                                              kIemNativeGstRegUse_ReadOnly);
    4559     AssertReturn(idxEflReg != UINT8_MAX, UINT32_MAX);
    45604477
    45614478    /* Translate the flag masks to bit numbers. */
     
    45774494    uint8_t const idxTmpReg = iemNativeRegAllocTmp(pReNative, &off);
    45784495#endif
    4579     AssertReturn(idxTmpReg != UINT8_MAX, UINT32_MAX);
    45804496
    45814497    /* Check for the lone bit first. */
     
    45954511
    45964512#elif defined(RT_ARCH_ARM64)
    4597     uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2);
    4598     AssertReturn(pu32CodeBuf, UINT32_MAX);
     4513    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2);
    45994514
    46004515    /* and tmpreg, eflreg, #1<<iBitNo1 */
     
    46324547#define IEM_MC_IF_CX_IS_NZ() \
    46334548    off = iemNativeEmitIfCxIsNotZero(pReNative, off); \
    4634     AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    46354549    do {
    46364550
    46374551/** Emits code for IEM_MC_IF_CX_IS_NZ. */
    4638 DECLINLINE(uint32_t) iemNativeEmitIfCxIsNotZero(PIEMRECOMPILERSTATE pReNative, uint32_t off)
    4639 {
    4640     PIEMNATIVECOND pEntry = iemNativeCondPushIf(pReNative);
    4641     AssertReturn(pEntry, UINT32_MAX);
     4552DECL_INLINE_THROW(uint32_t) iemNativeEmitIfCxIsNotZero(PIEMRECOMPILERSTATE pReNative, uint32_t off)
     4553{
     4554    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    46424555
    46434556    uint8_t const idxGstRcxReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off,
    46444557                                                                 (IEMNATIVEGSTREG)(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
    46454558                                                                 kIemNativeGstRegUse_ReadOnly);
    4646     AssertReturn(idxGstRcxReg != UINT8_MAX, UINT32_MAX);
    46474559    off = iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfNoneSet(pReNative, off, idxGstRcxReg, UINT16_MAX, pEntry->idxLabelElse);
    46484560    iemNativeRegFreeTmp(pReNative, idxGstRcxReg);
     
    46554567#define IEM_MC_IF_ECX_IS_NZ() \
    46564568    off = iemNativeEmitIfRcxEcxIsNotZero(pReNative, off, false /*f64Bit*/); \
    4657     AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    46584569    do {
    46594570
    46604571#define IEM_MC_IF_RCX_IS_NZ() \
    46614572    off = iemNativeEmitIfRcxEcxIsNotZero(pReNative, off, true /*f64Bit*/); \
    4662     AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    46634573    do {
    46644574
    46654575/** Emits code for IEM_MC_IF_ECX_IS_NZ and IEM_MC_IF_RCX_IS_NZ. */
    4666 DECLINLINE(uint32_t) iemNativeEmitIfRcxEcxIsNotZero(PIEMRECOMPILERSTATE pReNative, uint32_t off, bool f64Bit)
    4667 {
    4668     PIEMNATIVECOND pEntry = iemNativeCondPushIf(pReNative);
    4669     AssertReturn(pEntry, UINT32_MAX);
     4576DECL_INLINE_THROW(uint32_t) iemNativeEmitIfRcxEcxIsNotZero(PIEMRECOMPILERSTATE pReNative, uint32_t off, bool f64Bit)
     4577{
     4578    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    46704579
    46714580    uint8_t const idxGstRcxReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off,
    46724581                                                                 (IEMNATIVEGSTREG)(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
    46734582                                                                 kIemNativeGstRegUse_ReadOnly);
    4674     AssertReturn(idxGstRcxReg != UINT8_MAX, UINT32_MAX);
    46754583    off = iemNativeEmitTestIfGprIsZeroAndJmpToLabel(pReNative, off, idxGstRcxReg, f64Bit, pEntry->idxLabelElse);
    46764584    iemNativeRegFreeTmp(pReNative, idxGstRcxReg);
     
    46834591#define IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(a_fBit) \
    46844592    off = iemNativeEmitIfCxIsNotZeroAndTestEflagsBit(pReNative, off, a_fBit, true /*fCheckIfSet*/); \
    4685     AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    46864593    do {
    46874594
    46884595#define IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(a_fBit) \
    46894596    off = iemNativeEmitIfCxIsNotZeroAndTestEflagsBit(pReNative, off, a_fBit, false /*fCheckIfSet*/); \
    4690     AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    46914597    do {
    46924598
    46934599/** Emits code for IEM_MC_IF_CX_IS_NZ. */
    4694 DECLINLINE(uint32_t) iemNativeEmitIfCxIsNotZeroAndTestEflagsBit(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    4695                                                                  uint32_t fBitInEfl, bool fCheckIfSet)
    4696 {
    4697     PIEMNATIVECOND pEntry = iemNativeCondPushIf(pReNative);
    4698     AssertReturn(pEntry, UINT32_MAX);
     4600DECL_INLINE_THROW(uint32_t)
     4601iemNativeEmitIfCxIsNotZeroAndTestEflagsBit(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitInEfl, bool fCheckIfSet)
     4602{
     4603    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    46994604
    47004605    /* We have to load both RCX and EFLAGS before we can start branching,
     
    47024607       register allocator state.
    47034608       Doing EFLAGS first as it's more likely to be loaded, right? */
    4704     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    4705                                                               kIemNativeGstRegUse_ReadOnly);
    4706     AssertReturn(idxEflReg != UINT8_MAX, UINT32_MAX);
    4707 
     4609    uint8_t const idxEflReg    = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
     4610                                                                 kIemNativeGstRegUse_ReadOnly);
    47084611    uint8_t const idxGstRcxReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off,
    47094612                                                                 (IEMNATIVEGSTREG)(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
    47104613                                                                 kIemNativeGstRegUse_ReadOnly);
    4711     AssertReturn(idxGstRcxReg != UINT8_MAX, UINT32_MAX);
    47124614
    47134615    /** @todo we could reduce this to a single branch instruction by spending a
     
    47334635#define IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(a_fBit) \
    47344636    off = iemNativeEmitIfRcxEcxIsNotZeroAndTestEflagsBit(pReNative, off, a_fBit, true /*fCheckIfSet*/, false /*f64Bit*/); \
    4735     AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    47364637    do {
    47374638
    47384639#define IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(a_fBit) \
    47394640    off = iemNativeEmitIfRcxEcxIsNotZeroAndTestEflagsBit(pReNative, off, a_fBit, false /*fCheckIfSet*/, false /*f64Bit*/); \
    4740     AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    47414641    do {
    47424642
    47434643#define IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(a_fBit) \
    47444644    off = iemNativeEmitIfRcxEcxIsNotZeroAndTestEflagsBit(pReNative, off, a_fBit, true /*fCheckIfSet*/, true /*f64Bit*/); \
    4745     AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    47464645    do {
    47474646
    47484647#define IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(a_fBit) \
    47494648    off = iemNativeEmitIfRcxEcxIsNotZeroAndTestEflagsBit(pReNative, off, a_fBit, false /*fCheckIfSet*/, true /*f64Bit*/); \
    4750     AssertReturn(off != UINT32_MAX, UINT32_MAX); \
    47514649    do {
    47524650
     
    47554653 *  IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET and
    47564654 *  IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET. */
    4757 DECLINLINE(uint32_t) iemNativeEmitIfRcxEcxIsNotZeroAndTestEflagsBit(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    4758                                                                     uint32_t fBitInEfl, bool fCheckIfSet, bool f64Bit)
    4759 {
    4760     PIEMNATIVECOND pEntry = iemNativeCondPushIf(pReNative);
    4761     AssertReturn(pEntry, UINT32_MAX);
     4655DECL_INLINE_THROW(uint32_t)
     4656iemNativeEmitIfRcxEcxIsNotZeroAndTestEflagsBit(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     4657                                               uint32_t fBitInEfl, bool fCheckIfSet, bool f64Bit)
     4658{
     4659    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    47624660
    47634661    /* We have to load both RCX and EFLAGS before we can start branching,
     
    47654663       register allocator state.
    47664664       Doing EFLAGS first as it's more likely to be loaded, right? */
    4767     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    4768                                                               kIemNativeGstRegUse_ReadOnly);
    4769     AssertReturn(idxEflReg != UINT8_MAX, UINT32_MAX);
    4770 
     4665    uint8_t const idxEflReg    = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
     4666                                                                 kIemNativeGstRegUse_ReadOnly);
    47714667    uint8_t const idxGstRcxReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off,
    47724668                                                                 (IEMNATIVEGSTREG)(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
    47734669                                                                 kIemNativeGstRegUse_ReadOnly);
    4774     AssertReturn(idxGstRcxReg != UINT8_MAX, UINT32_MAX);
    47754670
    47764671    /** @todo we could reduce this to a single branch instruction by spending a
     
    48004695
    48014696#define IEM_MC_SUB_GREG_U16(a_iGReg, a_u8SubtrahendConst) \
    4802     off = iemNativeEmitSubGregU16(pReNative, off, a_iGReg, a_u8SubtrahendConst); \
    4803     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     4697    off = iemNativeEmitSubGregU16(pReNative, off, a_iGReg, a_u8SubtrahendConst)
    48044698
    48054699/** Emits code for IEM_MC_SUB_GREG_U16. */
     
    48094703                                                                 (IEMNATIVEGSTREG)(kIemNativeGstReg_GprFirst + iGReg),
    48104704                                                                  kIemNativeGstRegUse_ForUpdate);
    4811     AssertReturn(idxGstTmpReg != UINT8_MAX, UINT32_MAX);
    48124705
    48134706#ifdef RT_ARCH_AMD64
    4814     uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    4815     AssertReturn(pbCodeBuf, UINT32_MAX);
     4707    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    48164708    pbCodeBuf[off++] = X86_OP_PRF_SIZE_OP;
    48174709    if (idxGstTmpReg >= 8)
     
    48314723
    48324724#else
    4833     uint8_t const idxTmpReg = iemNativeRegAllocTmp(pReNative, &off);
    4834     AssertReturn(idxTmpReg != UINT8_MAX, UINT32_MAX);
    4835 
    4836     uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2);
    4837     AssertReturn(pu32CodeBuf, UINT32_MAX);
     4725    uint8_t const    idxTmpReg   = iemNativeRegAllocTmp(pReNative, &off);
     4726    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2);
    48384727
    48394728    /* sub tmp, gstgrp, uSubtrahend */
     
    48564745
    48574746#define IEM_MC_SUB_GREG_U32(a_iGReg, a_u8Const) \
    4858     off = iemNativeEmitSubGregU32U64(pReNative, off, a_iGReg, a_u8Const, false /*f64Bit*/); \
    4859     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     4747    off = iemNativeEmitSubGregU32U64(pReNative, off, a_iGReg, a_u8Const, false /*f64Bit*/)
    48604748
    48614749#define IEM_MC_SUB_GREG_U64(a_iGReg, a_u8Const) \
    4862     off = iemNativeEmitSubGregU32U64(pReNative, off, a_iGReg, a_u8Const, true /*f64Bit*/); \
    4863     AssertReturn(off != UINT32_MAX, UINT32_MAX)
     4750    off = iemNativeEmitSubGregU32U64(pReNative, off, a_iGReg, a_u8Const, true /*f64Bit*/)
    48644751
    48654752/** Emits code for IEM_MC_SUB_GREG_U32 and IEM_MC_SUB_GREG_U64. */
    4866 DECLINLINE(uint32_t) iemNativeEmitSubGregU32U64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGReg,
    4867                                                 uint8_t uSubtrahend, bool f64Bit)
     4753DECL_INLINE_THROW(uint32_t)
     4754iemNativeEmitSubGregU32U64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGReg, uint8_t uSubtrahend, bool f64Bit)
    48684755{
    48694756    uint8_t const idxGstTmpReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off,
    48704757                                                                 (IEMNATIVEGSTREG)(kIemNativeGstReg_GprFirst + iGReg),
    48714758                                                                  kIemNativeGstRegUse_ForUpdate);
    4872     AssertReturn(idxGstTmpReg != UINT8_MAX, UINT32_MAX);
    48734759
    48744760#ifdef RT_ARCH_AMD64
    48754761    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6);
    4876     AssertReturn(pbCodeBuf, UINT32_MAX);
    48774762    if (f64Bit)
    48784763        pbCodeBuf[off++] = X86_OP_REX_W | (idxGstTmpReg >= 8 ? X86_OP_REX_B : 0);
     
    49044789    /* sub tmp, gstgrp, uSubtrahend */
    49054790    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    4906     AssertReturn(pu32CodeBuf, UINT32_MAX);
    49074791    pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(true /*fSub*/, idxTmpReg, idxGstTmpReg, uSubtrahend, f64Bit);
    49084792
     
    49454829{
    49464830    RT_NOREF(pCallEntry);
    4947 //pReNative->pInstrBuf[off++] = 0xcc;
    49484831
    49494832    /* It's too convenient to use iemNativeEmitTestBitInGprAndJmpToLabelIfNotSet below
     
    49514834    uint32_t const idxLabelVmCheck = iemNativeLabelCreate(pReNative, kIemNativeLabelType_CheckIrq,
    49524835                                                          UINT32_MAX, pReNative->uCheckIrqSeqNo++);
    4953     AssertReturn(idxLabelVmCheck != UINT32_MAX, UINT32_MAX);
    49544836
    49554837    uint32_t const idxLabelReturnBreak = iemNativeLabelCreate(pReNative, kIemNativeLabelType_ReturnBreak);
    4956     AssertReturn(idxLabelReturnBreak != UINT32_MAX, UINT32_MAX);
    49574838
    49584839    /* Again, we need to load the extended EFLAGS before we actually need them
     
    49624843    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    49634844                                                              kIemNativeGstRegUse_ReadOnly);
    4964     AssertReturn(idxEflReg != UINT8_MAX, UINT32_MAX);
    4965 
    4966     uint8_t const idxPcReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Pc,
    4967                                                              kIemNativeGstRegUse_ReadOnly);
    4968     AssertReturn(idxPcReg != UINT8_MAX, UINT32_MAX);
     4845
     4846    uint8_t const idxPcReg  = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Pc, kIemNativeGstRegUse_ReadOnly);
    49694847
    49704848    uint8_t idxTmpReg = iemNativeRegAllocTmp(pReNative, &off);
    4971     AssertReturn(idxTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    49724849
    49734850    /*
     
    50374914{
    50384915    uint32_t const fExpectedExec = (uint32_t)pCallEntry->auParams[0];
    5039     uint8_t idxTmpReg = iemNativeRegAllocTmp(pReNative, &off);
    5040 
    5041     AssertReturn(idxTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
     4916    uint8_t const  idxTmpReg     = iemNativeRegAllocTmp(pReNative, &off);
     4917
    50424918    off = iemNativeEmitLoadGprFromVCpuU32(pReNative, off, idxTmpReg, RT_UOFFSETOF(VMCPUCC, iem.s.fExec));
    50434919    off = iemNativeEmitAndGpr32ByImm(pReNative, off, idxTmpReg, IEMTB_F_KEY_MASK);
     
    50874963 * @param   cbBuf   The output buffer size.  At least 32 bytes.
    50884964 */
    5089 const char *iemTbFlagsToString(uint32_t fFlags, char *pszBuf, size_t cbBuf)
     4965DECLHIDDEN(const char *) iemTbFlagsToString(uint32_t fFlags, char *pszBuf, size_t cbBuf) RT_NOEXCEPT
    50904966{
    50914967    Assert(cbBuf >= 32);
     
    51735049
    51745050
    5175 void iemNativeDisassembleTb(PCIEMTB pTb, PCDBGFINFOHLP pHlp)
     5051DECLHIDDEN(void) iemNativeDisassembleTb(PCIEMTB pTb, PCDBGFINFOHLP pHlp) RT_NOEXCEPT
    51765052{
    51775053    AssertReturnVoid((pTb->fFlags & IEMTB_F_TYPE_MASK) == IEMTB_F_TYPE_NATIVE);
     
    55475423 * @param   pTb     The threaded translation to recompile to native.
    55485424 */
    5549 PIEMTB iemNativeRecompile(PVMCPUCC pVCpu, PIEMTB pTb)
     5425DECLHIDDEN(PIEMTB) iemNativeRecompile(PVMCPUCC pVCpu, PIEMTB pTb) RT_NOEXCEPT
    55505426{
    55515427    /*
     
    55635439
    55645440    /*
    5565      * Emit prolog code (fixed).
    5566      */
    5567     uint32_t off = iemNativeEmitProlog(pReNative, 0);
    5568     AssertReturn(off != UINT32_MAX, pTb);
    5569 
    5570     /*
    5571      * Convert the calls to native code.
    5572      */
    5573 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO
    5574     int32_t              iGstInstr  = -1;
    5575     uint32_t             fExec      = pTb->fFlags;
    5576 #endif
    5577     PCIEMTHRDEDCALLENTRY pCallEntry = pTb->Thrd.paCalls;
    5578     uint32_t             cCallsLeft = pTb->Thrd.cCalls;
     5441     * Recompiling and emitting code is done using try/throw/catch or setjmp/longjmp
     5442     * for aborting if an error happens.
     5443     */
     5444    uint32_t        cCallsLeft = pTb->Thrd.cCalls;
    55795445#ifdef LOG_ENABLED
    5580     uint32_t const       cCallsOrg  = cCallsLeft;
    5581 #endif
    5582     while (cCallsLeft-- > 0)
    5583     {
    5584         PFNIEMNATIVERECOMPFUNC const pfnRecom = g_apfnIemNativeRecompileFunctions[pCallEntry->enmFunction];
    5585 
     5446    uint32_t const  cCallsOrg  = cCallsLeft;
     5447#endif
     5448    uint32_t        off        = 0;
     5449    int             rc         = VINF_SUCCESS;
     5450    IEMNATIVE_TRY_SETJMP(pReNative, rc)
     5451    {
    55865452        /*
    5587          * Debug info and assembly markup.
     5453         * Emit prolog code (fixed).
     5454         */
     5455        off = iemNativeEmitProlog(pReNative, off);
     5456
     5457        /*
     5458         * Convert the calls to native code.
    55885459         */
    55895460#ifdef IEMNATIVE_WITH_TB_DEBUG_INFO
    5590         if (pCallEntry->enmFunction == kIemThreadedFunc_BltIn_CheckMode)
    5591             fExec = pCallEntry->auParams[0];
    5592         iemNativeDbgInfoAddNativeOffset(pReNative, off);
    5593         if (iGstInstr < (int32_t)pCallEntry->idxInstr)
     5461        int32_t  iGstInstr  = -1;
     5462        uint32_t fExec      = pTb->fFlags;
     5463#endif
     5464        PCIEMTHRDEDCALLENTRY pCallEntry = pTb->Thrd.paCalls;
     5465        while (cCallsLeft-- > 0)
    55945466        {
    5595             if (iGstInstr < (int32_t)pTb->cInstructions)
    5596                 iemNativeDbgInfoAddGuestInstruction(pReNative, fExec);
     5467            PFNIEMNATIVERECOMPFUNC const pfnRecom = g_apfnIemNativeRecompileFunctions[pCallEntry->enmFunction];
     5468
     5469            /*
     5470             * Debug info and assembly markup.
     5471             */
     5472#ifdef IEMNATIVE_WITH_TB_DEBUG_INFO
     5473            if (pCallEntry->enmFunction == kIemThreadedFunc_BltIn_CheckMode)
     5474                fExec = pCallEntry->auParams[0];
     5475            iemNativeDbgInfoAddNativeOffset(pReNative, off);
     5476            if (iGstInstr < (int32_t)pCallEntry->idxInstr)
     5477            {
     5478                if (iGstInstr < (int32_t)pTb->cInstructions)
     5479                    iemNativeDbgInfoAddGuestInstruction(pReNative, fExec);
     5480                else
     5481                    Assert(iGstInstr == pTb->cInstructions);
     5482                iGstInstr = pCallEntry->idxInstr;
     5483            }
     5484            iemNativeDbgInfoAddThreadedCall(pReNative, (IEMTHREADEDFUNCS)pCallEntry->enmFunction, pfnRecom != NULL);
     5485#elif defined(VBOX_STRICT)
     5486            off = iemNativeEmitMarker(pReNative, off,
     5487                                      RT_MAKE_U32((pTb->Thrd.cCalls - cCallsLeft - 1) | (pfnRecom ? 0x8000 : 0),
     5488                                                  pCallEntry->enmFunction));
     5489#endif
     5490
     5491            /*
     5492             * Actual work.
     5493             */
     5494            if (pfnRecom) /** @todo stats on this.   */
     5495            {
     5496                //STAM_COUNTER_INC()
     5497                off = pfnRecom(pReNative, off, pCallEntry);
     5498            }
    55975499            else
    5598                 Assert(iGstInstr == pTb->cInstructions);
    5599             iGstInstr = pCallEntry->idxInstr;
     5500                off = iemNativeEmitThreadedCall(pReNative, off, pCallEntry);
     5501            Assert(off <= pReNative->cInstrBufAlloc);
     5502            Assert(pReNative->cCondDepth == 0);
     5503
     5504            /*
     5505             * Advance.
     5506             */
     5507            pCallEntry++;
    56005508        }
    5601         iemNativeDbgInfoAddThreadedCall(pReNative, (IEMTHREADEDFUNCS)pCallEntry->enmFunction, pfnRecom != NULL);
    5602 #elif defined(VBOX_STRICT)
    5603         off = iemNativeEmitMarker(pReNative, off,
    5604                                   RT_MAKE_U32((pTb->Thrd.cCalls - cCallsLeft - 1) | (pfnRecom ? 0x8000 : 0),
    5605                                               pCallEntry->enmFunction));
    5606         AssertReturn(off != UINT32_MAX, pTb);
    5607 #endif
    56085509
    56095510        /*
    5610          * Actual work.
     5511         * Emit the epilog code.
    56115512         */
    5612         if (pfnRecom) /** @todo stats on this.   */
    5613         {
    5614             //STAM_COUNTER_INC()
    5615             off = pfnRecom(pReNative, off, pCallEntry);
    5616         }
    5617         else
    5618             off = iemNativeEmitThreadedCall(pReNative, off, pCallEntry);
    5619         AssertReturn(off != UINT32_MAX, pTb);
    5620         Assert(pReNative->cCondDepth == 0);
     5513        uint32_t idxReturnLabel;
     5514        off = iemNativeEmitEpilog(pReNative, off, &idxReturnLabel);
    56215515
    56225516        /*
    5623          * Advance.
     5517         * Generate special jump labels.
    56245518         */
    5625         pCallEntry++;
     5519        if (pReNative->bmLabelTypes & RT_BIT_64(kIemNativeLabelType_ReturnBreak))
     5520            off = iemNativeEmitReturnBreak(pReNative, off, idxReturnLabel);
     5521        if (pReNative->bmLabelTypes & RT_BIT_64(kIemNativeLabelType_RaiseGp0))
     5522            off = iemNativeEmitRaiseGp0(pReNative, off, idxReturnLabel);
    56265523    }
    5627 
    5628     /*
    5629      * Emit the epilog code.
    5630      */
    5631     uint32_t idxReturnLabel;
    5632     off = iemNativeEmitEpilog(pReNative, off, &idxReturnLabel);
    5633     AssertReturn(off != UINT32_MAX, pTb);
    5634 
    5635     /*
    5636      * Generate special jump labels.
    5637      */
    5638     if (pReNative->bmLabelTypes & RT_BIT_64(kIemNativeLabelType_ReturnBreak))
    5639     {
    5640         off = iemNativeEmitReturnBreak(pReNative, off, idxReturnLabel);
    5641         AssertReturn(off != UINT32_MAX, pTb);
     5524    IEMNATIVE_CATCH_LONGJMP_BEGIN(pReNative, rc);
     5525    {
     5526        Log(("iemNativeRecompile: Caught %Rrc while recompiling!\n", rc));
     5527        return pTb;
    56425528    }
    5643     if (pReNative->bmLabelTypes & RT_BIT_64(kIemNativeLabelType_RaiseGp0))
    5644     {
    5645         off = iemNativeEmitRaiseGp0(pReNative, off, idxReturnLabel);
    5646         AssertReturn(off != UINT32_MAX, pTb);
    5647     }
     5529    IEMNATIVE_CATCH_LONGJMP_END(pReNative);
     5530    Assert(off <= pReNative->cInstrBufAlloc);
    56485531
    56495532    /*
  • trunk/src/VBox/VMM/include/IEMInternal.h

    r101640 r101682  
    18451845#  define IEM_TRY_SETJMP(a_pVCpu, a_rcTarget) \
    18461846        jmp_buf  JmpBuf; \
    1847         jmp_buf * volatile pSavedJmpBuf = pVCpu->iem.s.CTX_SUFF(pJmpBuf); \
    1848         pVCpu->iem.s.CTX_SUFF(pJmpBuf) = &JmpBuf; \
     1847        jmp_buf * volatile pSavedJmpBuf = (a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf); \
     1848        (a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf) = &JmpBuf; \
    18491849        if ((rcStrict = setjmp(JmpBuf)) == 0)
    18501850#  define IEM_TRY_SETJMP_AGAIN(a_pVCpu, a_rcTarget) \
    1851         pSavedJmpBuf = pVCpu->iem.s.CTX_SUFF(pJmpBuf); \
    1852         pVCpu->iem.s.CTX_SUFF(pJmpBuf) = &JmpBuf; \
     1851        pSavedJmpBuf = (a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf); \
     1852        (a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf) = &JmpBuf; \
    18531853        if ((rcStrict = setjmp(JmpBuf)) == 0)
    18541854#  define IEM_CATCH_LONGJMP_BEGIN(a_pVCpu, a_rcTarget) \
     
    55905590
    55915591/* Native recompiler public bits: */
    5592 PIEMTB iemNativeRecompile(PVMCPUCC pVCpu, PIEMTB pTb);
    5593 int    iemExecMemAllocatorInit(PVMCPU pVCpu, uint64_t cbMax, uint64_t cbInitial, uint32_t cbChunk);
    5594 void   iemExecMemAllocatorFree(PVMCPU pVCpu, void *pv, size_t cb);
     5592DECLHIDDEN(PIEMTB)  iemNativeRecompile(PVMCPUCC pVCpu, PIEMTB pTb) RT_NOEXCEPT;
     5593int                 iemExecMemAllocatorInit(PVMCPU pVCpu, uint64_t cbMax, uint64_t cbInitial, uint32_t cbChunk);
     5594void                iemExecMemAllocatorFree(PVMCPU pVCpu, void *pv, size_t cb);
    55955595
    55965596
  • trunk/src/VBox/VMM/include/IEMN8veRecompiler.h

    r101661 r101682  
    621621    /** The condition nesting stack. */
    622622    IEMNATIVECOND               aCondStack[2];
     623
     624#ifndef IEM_WITH_THROW_CATCH
     625    /** Pointer to the setjmp/longjmp buffer if we're not using C++ exceptions
     626     *  for recompilation error handling. */
     627    jmp_buf                     JmpBuf;
     628#endif
    623629} IEMRECOMPILERSTATE;
    624630/** Pointer to a native recompiler state. */
     
    626632
    627633
     634/** @def IEMNATIVE_TRY_SETJMP
     635 * Wrapper around setjmp / try, hiding all the ugly differences.
     636 *
     637 * @note Use with extreme care as this is a fragile macro.
     638 * @param   a_pReNative The native recompile state.
     639 * @param   a_rcTarget  The variable that should receive the status code in case
     640 *                      of a longjmp/throw.
     641 */
     642/** @def IEMNATIVE_CATCH_LONGJMP_BEGIN
     643 * Start wrapper for catch / setjmp-else.
     644 *
     645 * This will set up a scope.
     646 *
     647 * @note Use with extreme care as this is a fragile macro.
     648 * @param   a_pReNative The native recompile state.
     649 * @param   a_rcTarget  The variable that should receive the status code in case
     650 *                      of a longjmp/throw.
     651 */
     652/** @def IEMNATIVE_CATCH_LONGJMP_END
     653 * End wrapper for catch / setjmp-else.
     654 *
     655 * This will close the scope set up by IEMNATIVE_CATCH_LONGJMP_BEGIN and clean
     656 * up the state.
     657 *
     658 * @note Use with extreme care as this is a fragile macro.
     659 * @param   a_pReNative The native recompile state.
     660 */
     661/** @def IEMNATIVE_DO_LONGJMP
     662 *
     663 * Wrapper around longjmp / throw.
     664 *
     665 * @param   a_pReNative The native recompile state.
     666 * @param   a_rc        The status code jump back with / throw.
     667 */
     668#ifdef IEM_WITH_THROW_CATCH
     669# define IEMNATIVE_TRY_SETJMP(a_pReNative, a_rcTarget) \
     670       a_rcTarget = VINF_SUCCESS; \
     671       try
     672# define IEMNATIVE_CATCH_LONGJMP_BEGIN(a_pReNative, a_rcTarget) \
     673       catch (int rcThrown) \
     674       { \
     675           a_rcTarget = rcThrown
     676# define IEMNATIVE_CATCH_LONGJMP_END(a_pReNative) \
     677       } \
     678       ((void)0)
     679# define IEMNATIVE_DO_LONGJMP(a_pReNative, a_rc)  throw int(a_rc)
     680#else  /* !IEM_WITH_THROW_CATCH */
     681# define IEMNATIVE_TRY_SETJMP(a_pReNative, a_rcTarget) \
     682       if ((a_rcTarget = setjmp((a_pReNative)->JmpBuf)) == 0)
     683# define IEMNATIVE_CATCH_LONGJMP_BEGIN(a_pReNative, a_rcTarget) \
     684       else \
     685       { \
     686           ((void)0)
     687# define IEMNATIVE_CATCH_LONGJMP_END(a_pReNative) \
     688       }
     689# define IEMNATIVE_DO_LONGJMP(a_pReNative, a_rc)  longjmp((a_pReNative)->JmpBuf, (a_rc))
     690#endif /* !IEM_WITH_THROW_CATCH */
     691
     692
    628693/**
    629694 * Native recompiler worker for a threaded function.
    630695 *
    631  * @returns New code buffer offset, UINT32_MAX in case of failure.
     696 * @returns New code buffer offset; throws VBox status code in case of a failure.
    632697 * @param   pReNative   The native recompiler state.
    633698 * @param   off         The current code buffer offset.
    634699 * @param   pCallEntry  The threaded call entry.
    635700 *
    636  * @note    This is not allowed to throw anything atm.
    637  */
    638 typedef DECLCALLBACKTYPE(uint32_t, FNIEMNATIVERECOMPFUNC,(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    639                                                           PCIEMTHRDEDCALLENTRY pCallEntry));
     701 * @note    This may throw/longjmp VBox status codes (int) to abort compilation, so no RT_NOEXCEPT!
     702 */
     703typedef uint32_t (VBOXCALL FNIEMNATIVERECOMPFUNC)(PIEMRECOMPILERSTATE pReNative, uint32_t off, PCIEMTHRDEDCALLENTRY pCallEntry);
    640704/** Pointer to a native recompiler worker for a threaded function. */
    641705typedef FNIEMNATIVERECOMPFUNC *PFNIEMNATIVERECOMPFUNC;
    642706
    643 /** Defines a native recompiler worker for a threaded function. */
     707/** Defines a native recompiler worker for a threaded function.
     708 * @see FNIEMNATIVERECOMPFUNC  */
    644709#define IEM_DECL_IEMNATIVERECOMPFUNC_DEF(a_Name) \
    645     DECLCALLBACK(uint32_t) a_Name(PIEMRECOMPILERSTATE pReNative, uint32_t off, PCIEMTHRDEDCALLENTRY pCallEntry)
    646 /** Prototypes a native recompiler function for a threaded function. */
     710    uint32_t VBOXCALL a_Name(PIEMRECOMPILERSTATE pReNative, uint32_t off, PCIEMTHRDEDCALLENTRY pCallEntry)
     711
     712/** Prototypes a native recompiler function for a threaded function.
     713 * @see FNIEMNATIVERECOMPFUNC  */
    647714#define IEM_DECL_IEMNATIVERECOMPFUNC_PROTO(a_Name) FNIEMNATIVERECOMPFUNC a_Name
    648715
    649 DECLHIDDEN(uint32_t)        iemNativeLabelCreate(PIEMRECOMPILERSTATE pReNative, IEMNATIVELABELTYPE enmType,
    650                                                  uint32_t offWhere = UINT32_MAX, uint16_t uData = 0) RT_NOEXCEPT;
    651 DECLHIDDEN(void)            iemNativeLabelDefine(PIEMRECOMPILERSTATE pReNative, uint32_t idxLabel, uint32_t offWhere) RT_NOEXCEPT;
    652 DECLHIDDEN(bool)            iemNativeAddFixup(PIEMRECOMPILERSTATE pReNative, uint32_t offWhere, uint32_t idxLabel,
    653                                               IEMNATIVEFIXUPTYPE enmType, int8_t offAddend = 0) RT_NOEXCEPT;
    654 DECLHIDDEN(PIEMNATIVEINSTR) iemNativeInstrBufEnsureSlow(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    655                                                         uint32_t cInstrReq) RT_NOEXCEPT;
    656 
    657 DECLHIDDEN(uint8_t)         iemNativeRegAllocTmp(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
    658                                                  bool fPreferVolatile = true) RT_NOEXCEPT;
    659 DECLHIDDEN(uint8_t)         iemNativeRegAllocTmpImm(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint64_t uImm,
    660                                                     bool fPreferVolatile = true) RT_NOEXCEPT;
    661 DECLHIDDEN(uint8_t)         iemNativeRegAllocTmpForGuestReg(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
    662                                                             IEMNATIVEGSTREG enmGstReg,
    663                                                             IEMNATIVEGSTREGUSE enmIntendedUse) RT_NOEXCEPT;
    664 DECLHIDDEN(uint8_t)         iemNativeRegAllocTmpForGuestRegIfAlreadyPresent(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
    665                                                                             IEMNATIVEGSTREG enmGstReg) RT_NOEXCEPT;
    666 
    667 DECLHIDDEN(uint8_t)         iemNativeRegAllocVar(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint8_t idxVar) RT_NOEXCEPT;
    668 DECLHIDDEN(uint32_t)        iemNativeRegAllocArgs(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cArgs) RT_NOEXCEPT;
    669 DECLHIDDEN(uint8_t)         iemNativeRegAssignRc(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg) RT_NOEXCEPT;
     716DECL_HIDDEN_THROW(uint32_t) iemNativeLabelCreate(PIEMRECOMPILERSTATE pReNative, IEMNATIVELABELTYPE enmType,
     717                                                 uint32_t offWhere = UINT32_MAX, uint16_t uData = 0);
     718DECL_HIDDEN_THROW(void)     iemNativeLabelDefine(PIEMRECOMPILERSTATE pReNative, uint32_t idxLabel, uint32_t offWhere);
     719DECL_HIDDEN_THROW(void)     iemNativeAddFixup(PIEMRECOMPILERSTATE pReNative, uint32_t offWhere, uint32_t idxLabel,
     720                                              IEMNATIVEFIXUPTYPE enmType, int8_t offAddend = 0);
     721DECL_HIDDEN_THROW(PIEMNATIVEINSTR) iemNativeInstrBufEnsureSlow(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t cInstrReq);
     722
     723DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmp(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, bool fPreferVolatile = true);
     724DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpImm(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint64_t uImm,
     725                                                    bool fPreferVolatile = true);
     726DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestReg(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
     727                                                            IEMNATIVEGSTREG enmGstReg, IEMNATIVEGSTREGUSE enmIntendedUse);
     728DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestRegIfAlreadyPresent(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
     729                                                                            IEMNATIVEGSTREG enmGstReg);
     730
     731DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocVar(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint8_t idxVar);
     732DECL_HIDDEN_THROW(uint32_t) iemNativeRegAllocArgs(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cArgs);
     733DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAssignRc(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg);
    670734DECLHIDDEN(void)            iemNativeRegFree(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg) RT_NOEXCEPT;
    671735DECLHIDDEN(void)            iemNativeRegFreeTmp(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg) RT_NOEXCEPT;
    672736DECLHIDDEN(void)            iemNativeRegFreeTmpImm(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg) RT_NOEXCEPT;
    673737DECLHIDDEN(void)            iemNativeRegFreeAndFlushMask(PIEMRECOMPILERSTATE pReNative, uint32_t fHstRegMask) RT_NOEXCEPT;
    674 DECLHIDDEN(uint32_t)        iemNativeRegFlushPendingWrites(PIEMRECOMPILERSTATE pReNative, uint32_t off) RT_NOEXCEPT;
    675 
    676 DECLHIDDEN(uint32_t)        iemNativeEmitLoadGprWithGstShadowReg(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    677                                                                  uint8_t idxHstReg, IEMNATIVEGSTREG enmGstReg) RT_NOEXCEPT;
    678 DECLHIDDEN(uint32_t)        iemNativeEmitCheckCallRetAndPassUp(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    679                                                                uint8_t idxInstr) RT_NOEXCEPT;
     738DECL_HIDDEN_THROW(uint32_t) iemNativeRegFlushPendingWrites(PIEMRECOMPILERSTATE pReNative, uint32_t off);
     739
     740DECL_HIDDEN_THROW(uint32_t) iemNativeEmitLoadGprWithGstShadowReg(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     741                                                                 uint8_t idxHstReg, IEMNATIVEGSTREG enmGstReg);
     742DECL_HIDDEN_THROW(uint32_t) iemNativeEmitCheckCallRetAndPassUp(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr);
    680743
    681744
     
    688751 *          allocation size.
    689752 *
    690  * @returns Pointer to the instruction output buffer on success, NULL on
    691  *          failure.
     753 * @returns Pointer to the instruction output buffer on success; throws VBox
     754 *          status code on failure, so no need to check it.
    692755 * @param   pReNative   The native recompile state.
    693756 * @param   off         Current instruction offset.  Works safely for UINT32_MAX
     
    696759 *                      overestimate this a bit.
    697760 */
    698 DECL_FORCE_INLINE(PIEMNATIVEINSTR) iemNativeInstrBufEnsure(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t cInstrReq)
    699 {
    700     uint64_t const offChecked = off + (uint64_t)cInstrReq;
     761DECL_FORCE_INLINE_THROW(PIEMNATIVEINSTR)
     762iemNativeInstrBufEnsure(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t cInstrReq)
     763{
     764    uint64_t const offChecked = off + (uint64_t)cInstrReq; /** @todo may reconsider the need for UINT32_MAX safety... */
    701765    if (RT_LIKELY(offChecked <= pReNative->cInstrBufAlloc))
    702766    {
     
    721785 * in the disassembly.
    722786 */
    723 DECLINLINE(uint32_t) iemNativeEmitMarker(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t uInfo)
     787DECL_INLINE_THROW(uint32_t)
     788iemNativeEmitMarker(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t uInfo)
    724789{
    725790#ifdef RT_ARCH_AMD64
    726791    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    727     AssertReturn(pbCodeBuf, UINT32_MAX);
    728792    if (uInfo == 0)
    729793    {
     
    743807    }
    744808#elif RT_ARCH_ARM64
     809    /* nop */
    745810    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    746     AssertReturn(pu32CodeBuf, UINT32_MAX);
    747     /* nop */
    748811    pu32CodeBuf[off++] = 0xd503201f;
    749812
     
    764827 * Emits setting a GPR to zero.
    765828 */
    766 DECLINLINE(uint32_t) iemNativeEmitGprZero(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr)
    767 {
    768 #ifdef RT_ARCH_AMD64
     829DECL_INLINE_THROW(uint32_t)
     830iemNativeEmitGprZero(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr)
     831{
     832#ifdef RT_ARCH_AMD64
     833    /* xor gpr32, gpr32 */
    769834    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    770     AssertReturn(pbCodeBuf, UINT32_MAX);
    771     /* xor gpr32, gpr32 */
    772835    if (iGpr >= 8)
    773836        pbCodeBuf[off++] = X86_OP_REX_R | X86_OP_REX_B;
     
    776839
    777840#elif RT_ARCH_ARM64
     841    /* mov gpr, #0x0 */
    778842    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    779     AssertReturn(pu32CodeBuf, UINT32_MAX);
    780     /* mov gpr, #0x0 */
    781843    pu32CodeBuf[off++] = UINT32_C(0xd2800000) | iGpr;
    782844
     
    792854 * Emits loading a constant into a 64-bit GPR
    793855 */
    794 DECLINLINE(uint32_t) iemNativeEmitLoadGprImm64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint64_t uImm64)
     856DECL_INLINE_THROW(uint32_t)
     857iemNativeEmitLoadGprImm64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint64_t uImm64)
    795858{
    796859    if (!uImm64)
     
    802865        /* mov gpr, imm32 */
    803866        uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6);
    804         AssertReturn(pbCodeBuf, UINT32_MAX);
    805867        if (iGpr >= 8)
    806868            pbCodeBuf[off++] = X86_OP_REX_B;
     
    815877        /* mov gpr, imm64 */
    816878        uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 10);
    817         AssertReturn(pbCodeBuf, UINT32_MAX);
    818879        if (iGpr < 8)
    819880            pbCodeBuf[off++] = X86_OP_REX_W;
     
    833894#elif RT_ARCH_ARM64
    834895    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    835     AssertReturn(pu32CodeBuf, UINT32_MAX);
    836896
    837897    /*
     
    891951 *       only the ARM64 version does that.
    892952 */
    893 DECLINLINE(uint32_t) iemNativeEmitLoadGpr8Imm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint8_t uImm8)
     953DECL_INLINE_THROW(uint32_t)
     954iemNativeEmitLoadGpr8Imm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint8_t uImm8)
    894955{
    895956#ifdef RT_ARCH_AMD64
    896957    /* mov gpr, imm8 */
    897958    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    898     AssertReturn(pbCodeBuf, UINT32_MAX);
    899959    if (iGpr >= 8)
    900960        pbCodeBuf[off++] = X86_OP_REX_B;
     
    907967    /* movz gpr, imm16, lsl #0 */
    908968    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    909     AssertReturn(pu32CodeBuf, UINT32_MAX);
    910969    pu32CodeBuf[off++] = UINT32_C(0xd2800000) | (UINT32_C(0) << 21) | ((uint32_t)uImm8 << 5) | iGpr;
    911970
     
    922981 * Common bit of iemNativeEmitLoadGprFromVCpuU64 and friends.
    923982 */
    924 DECL_FORCE_INLINE(uint32_t) iemNativeEmitGprByVCpuDisp(uint8_t *pbCodeBuf, uint32_t off, uint8_t iGprReg, uint32_t offVCpu)
     983DECL_FORCE_INLINE(uint32_t)
     984iemNativeEmitGprByVCpuDisp(uint8_t *pbCodeBuf, uint32_t off, uint8_t iGprReg, uint32_t offVCpu)
    925985{
    926986    if (offVCpu < 128)
     
    9431003 * Common bit of iemNativeEmitLoadGprFromVCpuU64 and friends.
    9441004 */
    945 DECL_FORCE_INLINE(uint32_t) iemNativeEmitGprByVCpuLdSt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprReg,
    946                                                        uint32_t offVCpu, ARMV8A64INSTRLDSTTYPE enmOperation, unsigned cbData)
     1005DECL_FORCE_INLINE_THROW(uint32_t)
     1006iemNativeEmitGprByVCpuLdSt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprReg,
     1007                           uint32_t offVCpu, ARMV8A64INSTRLDSTTYPE enmOperation, unsigned cbData)
    9471008{
    9481009    /*
     
    9551016        /* Use the unsigned variant of ldr Wt, [<Xn|SP>, #off]. */
    9561017        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    957         AssertReturn(pu32CodeBuf, UINT32_MAX);
    9581018        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRUOff(enmOperation, iGpr, IEMNATIVE_REG_FIXED_PVMCPU, offVCpu / cbData);
    9591019    }
     
    9611021    {
    9621022        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    963         AssertReturn(pu32CodeBuf, UINT32_MAX);
    9641023        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRUOff(enmOperation, iGpr, IEMNATIVE_REG_FIXED_PCPUMCTX,
    9651024                                                      (offVCpu - RT_UOFFSETOF(VMCPU, cpum.GstCtx)) / cbData);
     
    9731032
    9741033        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    975         AssertReturn(pu32CodeBuf, UINT32_MAX);
    9761034        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRegIdx(enmOperation, iGpr, IEMNATIVE_REG_FIXED_PVMCPU, IEMNATIVE_REG_FIXED_TMP);
    9771035    }
     
    9851043 * Emits a 64-bit GPR load of a VCpu value.
    9861044 */
    987 DECLINLINE(uint32_t) iemNativeEmitLoadGprFromVCpuU64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1045DECL_INLINE_THROW(uint32_t)
     1046iemNativeEmitLoadGprFromVCpuU64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    9881047{
    9891048#ifdef RT_ARCH_AMD64
    9901049    /* mov reg64, mem64 */
    9911050    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    992     AssertReturn(pbCodeBuf, UINT32_MAX);
    9931051    if (iGpr < 8)
    9941052        pbCodeBuf[off++] = X86_OP_REX_W;
     
    10131071 * @note Bits 32 thru 63 in the GPR will be zero after the operation.
    10141072 */
    1015 DECLINLINE(uint32_t) iemNativeEmitLoadGprFromVCpuU32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1073DECL_INLINE_THROW(uint32_t)
     1074iemNativeEmitLoadGprFromVCpuU32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    10161075{
    10171076#ifdef RT_ARCH_AMD64
    10181077    /* mov reg32, mem32 */
    10191078    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1020     AssertReturn(pbCodeBuf, UINT32_MAX);
    10211079    if (iGpr >= 8)
    10221080        pbCodeBuf[off++] = X86_OP_REX_R;
     
    10391097 * @note Bits 16 thru 63 in the GPR will be zero after the operation.
    10401098 */
    1041 DECLINLINE(uint32_t) iemNativeEmitLoadGprFromVCpuU16(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1099DECL_INLINE_THROW(uint32_t)
     1100iemNativeEmitLoadGprFromVCpuU16(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    10421101{
    10431102#ifdef RT_ARCH_AMD64
    10441103    /* movzx reg32, mem16 */
    10451104    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8);
    1046     AssertReturn(pbCodeBuf, UINT32_MAX);
    10471105    if (iGpr >= 8)
    10481106        pbCodeBuf[off++] = X86_OP_REX_R;
     
    10661124 * @note Bits 8 thru 63 in the GPR will be zero after the operation.
    10671125 */
    1068 DECLINLINE(uint32_t) iemNativeEmitLoadGprFromVCpuU8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1126DECL_INLINE_THROW(uint32_t)
     1127iemNativeEmitLoadGprFromVCpuU8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    10691128{
    10701129#ifdef RT_ARCH_AMD64
    10711130    /* movzx reg32, mem8 */
    10721131    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8);
    1073     AssertReturn(pbCodeBuf, UINT32_MAX);
    10741132    if (iGpr >= 8)
    10751133        pbCodeBuf[off++] = X86_OP_REX_R;
     
    10921150 * Emits a store of a GPR value to a 64-bit VCpu field.
    10931151 */
    1094 DECLINLINE(uint32_t) iemNativeEmitStoreGprToVCpuU64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1152DECL_INLINE_THROW(uint32_t)
     1153iemNativeEmitStoreGprToVCpuU64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    10951154{
    10961155#ifdef RT_ARCH_AMD64
    10971156    /* mov mem64, reg64 */
    10981157    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1099     AssertReturn(pbCodeBuf, UINT32_MAX);
    11001158    if (iGpr < 8)
    11011159        pbCodeBuf[off++] = X86_OP_REX_W;
     
    11191177 * Emits a store of a GPR value to a 32-bit VCpu field.
    11201178 */
    1121 DECLINLINE(uint32_t) iemNativeEmitStoreGprFromVCpuU32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1179DECL_INLINE_THROW(uint32_t)
     1180iemNativeEmitStoreGprFromVCpuU32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    11221181{
    11231182#ifdef RT_ARCH_AMD64
    11241183    /* mov mem32, reg32 */
    11251184    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1126     AssertReturn(pbCodeBuf, UINT32_MAX);
    11271185    if (iGpr >= 8)
    11281186        pbCodeBuf[off++] = X86_OP_REX_R;
     
    11441202 * Emits a store of a GPR value to a 16-bit VCpu field.
    11451203 */
    1146 DECLINLINE(uint32_t) iemNativeEmitStoreGprFromVCpuU16(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1204DECL_INLINE_THROW(uint32_t)
     1205iemNativeEmitStoreGprFromVCpuU16(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    11471206{
    11481207#ifdef RT_ARCH_AMD64
    11491208    /* mov mem16, reg16 */
    11501209    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8);
    1151     AssertReturn(pbCodeBuf, UINT32_MAX);
    11521210    pbCodeBuf[off++] = X86_OP_PRF_SIZE_OP;
    11531211    if (iGpr >= 8)
     
    11701228 * Emits a store of a GPR value to a 8-bit VCpu field.
    11711229 */
    1172 DECLINLINE(uint32_t) iemNativeEmitStoreGprFromVCpuU8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     1230DECL_INLINE_THROW(uint32_t)
     1231iemNativeEmitStoreGprFromVCpuU8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    11731232{
    11741233#ifdef RT_ARCH_AMD64
    11751234    /* mov mem8, reg8 */
    11761235    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1177     AssertReturn(pbCodeBuf, UINT32_MAX);
    11781236    if (iGpr >= 8)
    11791237        pbCodeBuf[off++] = X86_OP_REX_R;
     
    11951253 * Emits a gprdst = gprsrc load.
    11961254 */
    1197 DECLINLINE(uint32_t) iemNativeEmitLoadGprFromGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
     1255DECL_INLINE_THROW(uint32_t)
     1256iemNativeEmitLoadGprFromGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
    11981257{
    11991258#ifdef RT_ARCH_AMD64
    12001259    /* mov gprdst, gprsrc */
    1201     uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    1202     AssertReturn(pbCodeBuf, UINT32_MAX);
     1260    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    12031261    if ((iGprDst | iGprSrc) >= 8)
    12041262        pbCodeBuf[off++] = iGprDst < 8  ? X86_OP_REX_W | X86_OP_REX_B
     
    12111269
    12121270#elif RT_ARCH_ARM64
     1271    /* mov dst, src;   alias for: orr dst, xzr, src */
    12131272    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1214     AssertReturn(pu32CodeBuf, UINT32_MAX);
    1215     /* mov dst, src;   alias for: orr dst, xzr, src */
    12161273    pu32CodeBuf[off++] = UINT32_C(0xaa000000) | ((uint32_t)iGprSrc << 16) | ((uint32_t)ARMV8_A64_REG_XZR << 5) | iGprDst;
    12171274
     
    12531310 * Emits a 64-bit GRP load instruction with an BP relative source address.
    12541311 */
    1255 DECLINLINE(uint32_t) iemNativeEmitLoadGprByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t offDisp)
     1312DECL_INLINE_THROW(uint32_t)
     1313iemNativeEmitLoadGprByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t offDisp)
    12561314{
    12571315    /* mov gprdst, qword [rbp + offDisp]  */
    12581316    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1259     AssertReturn(pbCodeBuf, UINT32_MAX);
    12601317    if (iGprDst < 8)
    12611318        pbCodeBuf[off++] = X86_OP_REX_W;
     
    12721329 * Emits a 32-bit GRP load instruction with an BP relative source address.
    12731330 */
    1274 DECLINLINE(uint32_t) iemNativeEmitLoadGprByBpU32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t offDisp)
     1331DECL_INLINE_THROW(uint32_t)
     1332iemNativeEmitLoadGprByBpU32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t offDisp)
    12751333{
    12761334    /* mov gprdst, dword [rbp + offDisp]  */
    12771335    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1278     AssertReturn(pbCodeBuf, UINT32_MAX);
    12791336    if (iGprDst >= 8)
    12801337        pbCodeBuf[off++] = X86_OP_REX_R;
     
    12891346 * Emits a load effective address to a GRP with an BP relative source address.
    12901347 */
    1291 DECLINLINE(uint32_t) iemNativeEmitLeaGprByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t offDisp)
     1348DECL_INLINE_THROW(uint32_t)
     1349iemNativeEmitLeaGprByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t offDisp)
    12921350{
    12931351    /* lea gprdst, [rbp + offDisp] */
    12941352    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1295     AssertReturn(pbCodeBuf, UINT32_MAX);
    12961353    if (iGprDst < 8)
    12971354        pbCodeBuf[off++] = X86_OP_REX_W;
     
    13091366 * @note May trash IEMNATIVE_REG_FIXED_TMP0.
    13101367 */
    1311 DECLINLINE(uint32_t) iemNativeEmitStoreGprByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offDisp, uint8_t iGprSrc)
     1368DECL_INLINE_THROW(uint32_t)
     1369iemNativeEmitStoreGprByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offDisp, uint8_t iGprSrc)
    13121370{
    13131371#ifdef RT_ARCH_AMD64
    13141372    /* mov qword [rbp + offDisp], gprdst */
    13151373    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1316     AssertReturn(pbCodeBuf, UINT32_MAX);
    13171374    if (iGprSrc < 8)
    13181375        pbCodeBuf[off++] = X86_OP_REX_W;
     
    13271384        /* str w/ unsigned imm12 (scaled) */
    13281385        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1329         AssertReturn(pu32CodeBuf, UINT32_MAX);
    13301386        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRUOff(kArmv8A64InstrLdStType_St_Dword, iGprSrc,
    13311387                                                      ARMV8_A64_REG_BP, (uint32_t)offDisp / 8);
     
    13351391        /* stur w/ signed imm9 (unscaled) */
    13361392        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1337         AssertReturn(pu32CodeBuf, UINT32_MAX);
    13381393        pu32CodeBuf[off++] = Armv8A64MkInstrSturLdur(kArmv8A64InstrLdStType_St_Dword, iGprSrc, ARMV8_A64_REG_BP, offDisp);
    13391394    }
     
    13431398        off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_REG_FIXED_TMP0, (uint32_t)offDisp);
    13441399        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1345         AssertReturn(pu32CodeBuf, UINT32_MAX);
    13461400        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRegIdx(kArmv8A64InstrLdStType_St_Dword, iGprSrc, ARMV8_A64_REG_BP,
    13471401                                                       IEMNATIVE_REG_FIXED_TMP0, kArmv8A64InstrLdStExtend_Sxtw);
     
    13611415 * @note May trash IEMNATIVE_REG_FIXED_TMP0.
    13621416 */
    1363 DECLINLINE(uint32_t) iemNativeEmitStoreImm64ByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offDisp, uint64_t uImm64)
     1417DECL_INLINE_THROW(uint32_t)
     1418iemNativeEmitStoreImm64ByBp(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offDisp, uint64_t uImm64)
    13641419{
    13651420#ifdef RT_ARCH_AMD64
     
    13671422    {
    13681423        /* mov qword [rbp + offDisp], imm32 - sign extended */
    1369         uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 11);
    1370         AssertReturn(pbCodeBuf, UINT32_MAX);
    1371 
     1424        uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 11);
    13721425        pbCodeBuf[off++] = X86_OP_REX_W;
    13731426        pbCodeBuf[off++] = 0xc7;
     
    14041457 * Common bit of iemNativeEmitLoadGprByGpr and friends.
    14051458 */
    1406 DECL_FORCE_INLINE(uint32_t) iemNativeEmitGprByGprDisp(uint8_t *pbCodeBuf, uint32_t off,
    1407                                                       uint8_t iGprReg, uint8_t iGprBase, int32_t offDisp)
     1459DECL_FORCE_INLINE(uint32_t)
     1460iemNativeEmitGprByGprDisp(uint8_t *pbCodeBuf, uint32_t off, uint8_t iGprReg, uint8_t iGprBase, int32_t offDisp)
    14081461{
    14091462    if (offDisp == 0 && (iGprBase & 7) != X86_GREG_xBP) /* Can use encoding w/o displacement field. */
     
    14361489 * Common bit of iemNativeEmitLoadGprFromVCpuU64 and friends.
    14371490 */
    1438 DECL_FORCE_INLINE(uint32_t) iemNativeEmitGprByGprLdSt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprReg,
    1439                                                       uint8_t iGprBase, int32_t offDisp,
    1440                                                       ARMV8A64INSTRLDSTTYPE enmOperation, unsigned cbData)
     1491DECL_FORCE_INLINE_THROW(uint32_t)
     1492iemNativeEmitGprByGprLdSt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprReg,
     1493                          uint8_t iGprBase, int32_t offDisp, ARMV8A64INSTRLDSTTYPE enmOperation, unsigned cbData)
    14411494{
    14421495    /*
     
    14491502        /* Use the unsigned variant of ldr Wt, [<Xn|SP>, #off]. */
    14501503        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1451         AssertReturn(pu32CodeBuf, UINT32_MAX);
    14521504        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRUOff(enmOperation, iGprReg, iGprBase, (uint32_t)offDisp / cbData);
    14531505    }
     
    14581510        /** @todo reduce by offVCpu by >> 3 or >> 2? if it saves instructions? */
    14591511        uint8_t const idxTmpReg = iemNativeRegAllocTmpImm(pReNative, off, (uint64)offDisp);
    1460         AssertReturn(idxTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    14611512
    14621513        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1463         AssertReturn(pu32CodeBuf, UINT32_MAX);
    14641514        pu32CodeBuf[off++] = Armv8A64MkInstrStLdRegIdx(enmOperation, iGprReg, iGprBase, idxTmpReg);
    14651515
     
    14751525 * Emits a 64-bit GPR load via a GPR base address with a displacement.
    14761526 */
    1477 DECLINLINE(uint32_t) iemNativeEmitLoadGprByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    1478                                               uint8_t iGprDst, uint8_t iGprBase, int32_t offDisp)
     1527DECL_INLINE_THROW(uint32_t)
     1528iemNativeEmitLoadGprByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprBase, int32_t offDisp)
    14791529{
    14801530#ifdef RT_ARCH_AMD64
    14811531    /* mov reg64, mem64 */
    14821532    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8);
    1483     AssertReturn(pbCodeBuf, UINT32_MAX);
    14841533    pbCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprBase < 8 ? 0 : X86_OP_REX_B);
    14851534    pbCodeBuf[off++] = 0x8b;
     
    15011550 * @note Bits 63 thru 32 in @a iGprDst will be cleared.
    15021551 */
    1503 DECLINLINE(uint32_t) iemNativeEmitLoadGpr32ByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    1504                                                 uint8_t iGprDst, uint8_t iGprBase, int32_t offDisp)
     1552DECL_INLINE_THROW(uint32_t)
     1553iemNativeEmitLoadGpr32ByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprBase, int32_t offDisp)
    15051554{
    15061555#ifdef RT_ARCH_AMD64
    15071556    /* mov reg32, mem32 */
    15081557    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8);
    1509     AssertReturn(pbCodeBuf, UINT32_MAX);
    15101558    if (iGprDst >= 8 || iGprBase >= 8)
    15111559        pbCodeBuf[off++] = (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprBase < 8 ? 0 : X86_OP_REX_B);
     
    15331581 * Emits a 64-bit GPR subtract with a signed immediate subtrahend.
    15341582 */
    1535 DECLINLINE(uint32_t) iemNativeEmitSubGprImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t iSubtrahend)
     1583DECL_INLINE_THROW(uint32_t)
     1584iemNativeEmitSubGprImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t iSubtrahend)
    15361585{
    15371586    /* sub gprdst, imm8/imm32 */
    15381587    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1539     AssertReturn(pbCodeBuf, UINT32_MAX);
    15401588    if (iGprDst < 8)
    15411589        pbCodeBuf[off++] = X86_OP_REX_W;
     
    15671615 * @note The AMD64 version sets flags.
    15681616 */
    1569 DECLINLINE(uint32_t ) iemNativeEmitAddTwoGprs(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprAddend)
     1617DECL_INLINE_THROW(uint32_t)
     1618iemNativeEmitAddTwoGprs(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprAddend)
    15701619{
    15711620#if defined(RT_ARCH_AMD64)
    15721621    /* add Gv,Ev */
    15731622    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    1574     AssertReturn(pbCodeBuf, UINT32_MAX);
    15751623    pbCodeBuf[off++] = (iGprDst < 8 ? X86_OP_REX_W : X86_OP_REX_W | X86_OP_REX_R)
    15761624                     | (iGprAddend < 8 ? 0 : X86_OP_REX_B);
     
    15801628#elif defined(RT_ARCH_ARM64)
    15811629    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1582     AssertReturn(pu32CodeBuf, UINT32_MAX);
    15831630    pu32CodeBuf[off++] = Armv8A64MkInstrAddSubReg(false /*fSub*/, iGprDst, iGprDst, iGprAddend);
    15841631
     
    15941641 * Emits a 64-bit GPR additions with a 8-bit signed immediate.
    15951642 */
    1596 DECLINLINE(uint32_t ) iemNativeEmitAddGprImm8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int8_t iImm8)
     1643DECL_INLINE_THROW(uint32_t)
     1644iemNativeEmitAddGprImm8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int8_t iImm8)
    15971645{
    15981646#if defined(RT_ARCH_AMD64)
     1647    /* add or inc */
    15991648    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    1600     AssertReturn(pbCodeBuf, UINT32_MAX);
    1601     /* add or inc */
    16021649    pbCodeBuf[off++] = iGprDst < 8 ? X86_OP_REX_W : X86_OP_REX_W | X86_OP_REX_B;
    16031650    if (iImm8 != 1)
     
    16151662#elif defined(RT_ARCH_ARM64)
    16161663    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1617     AssertReturn(pu32CodeBuf, UINT32_MAX);
    16181664    if (iImm8 >= 0)
    16191665        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(false /*fSub*/, iGprDst, iGprDst, (uint8_t)iImm8);
     
    16331679 * @note Bits 32 thru 63 in the GPR will be zero after the operation.
    16341680 */
    1635 DECLINLINE(uint32_t ) iemNativeEmitAddGpr32Imm8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int8_t iImm8)
     1681DECL_INLINE_THROW(uint32_t)
     1682iemNativeEmitAddGpr32Imm8(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int8_t iImm8)
    16361683{
    16371684#if defined(RT_ARCH_AMD64)
     1685    /* add or inc */
    16381686    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    1639     AssertReturn(pbCodeBuf, UINT32_MAX);
    1640     /* add or inc */
    16411687    if (iGprDst >= 8)
    16421688        pbCodeBuf[off++] = X86_OP_REX_B;
     
    16551701#elif defined(RT_ARCH_ARM64)
    16561702    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1657     AssertReturn(pu32CodeBuf, UINT32_MAX);
    16581703    if (iImm8 >= 0)
    16591704        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(false /*fSub*/, iGprDst, iGprDst, (uint8_t)iImm8, false /*f64Bit*/);
     
    16721717 * Emits a 64-bit GPR additions with a 64-bit signed addend.
    16731718 */
    1674 DECLINLINE(uint32_t ) iemNativeEmitAddGprImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int64_t iAddend)
     1719DECL_INLINE_THROW(uint32_t)
     1720iemNativeEmitAddGprImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int64_t iAddend)
    16751721{
    16761722#if defined(RT_ARCH_AMD64)
     
    16821728        /* add grp, imm32 */
    16831729        uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1684         AssertReturn(pbCodeBuf, UINT32_MAX);
    16851730        pbCodeBuf[off++] = iGprDst < 8 ? X86_OP_REX_W : X86_OP_REX_W | X86_OP_REX_B;
    16861731        pbCodeBuf[off++] = 0x81;
     
    16951740        /* Best to use a temporary register to deal with this in the simplest way: */
    16961741        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, (uint64_t)iAddend);
    1697         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    16981742
    16991743        /* add dst, tmpreg  */
    17001744        uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    1701         AssertReturn(pbCodeBuf, UINT32_MAX);
    17021745        pbCodeBuf[off++] = (iGprDst < 8 ? X86_OP_REX_W : X86_OP_REX_W | X86_OP_REX_R)
    17031746                         | (iTmpReg < 8 ? 0 : X86_OP_REX_B);
     
    17121755    {
    17131756        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1714         AssertReturn(pu32CodeBuf, UINT32_MAX);
    17151757        if (iAddend >= 0)
    17161758            pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(false /*fSub*/, iGprDst, iGprDst, (uint32_t)iAddend);
     
    17221764        /* Use temporary register for the immediate. */
    17231765        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, (uint64_t)iAddend);
    1724         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    17251766
    17261767        /* add gprdst, gprdst, tmpreg */
    17271768        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1728         AssertReturn(pu32CodeBuf, UINT32_MAX);
    17291769        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubReg(false /*fSub*/, iGprDst, iGprDst, iTmpReg);
    17301770
     
    17441784 * @note Bits 32 thru 63 in the GPR will be zero after the operation.
    17451785 */
    1746 DECLINLINE(uint32_t ) iemNativeEmitAddGpr32Imm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t iAddend)
     1786DECL_INLINE_THROW(uint32_t)
     1787iemNativeEmitAddGpr32Imm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, int32_t iAddend)
    17471788{
    17481789#if defined(RT_ARCH_AMD64)
     
    17521793    /* add grp, imm32 */
    17531794    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1754     AssertReturn(pbCodeBuf, UINT32_MAX);
    17551795    if (iGprDst >= 8)
    17561796        pbCodeBuf[off++] = X86_OP_REX_B;
     
    17661806    {
    17671807        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1768         AssertReturn(pu32CodeBuf, UINT32_MAX);
    17691808        if (iAddend >= 0)
    17701809            pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(false /*fSub*/, iGprDst, iGprDst, (uint32_t)iAddend, false /*f64Bit*/);
     
    17761815        /* Use temporary register for the immediate. */
    17771816        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, (uint32_t)iAddend);
    1778         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    17791817
    17801818        /* add gprdst, gprdst, tmpreg */
    17811819        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1782         AssertReturn(pu32CodeBuf, UINT32_MAX);
    17831820        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubReg(false /*fSub*/, iGprDst, iGprDst, iTmpReg, false /*f64Bit*/);
    17841821
     
    18021839 * Emits code for clearing bits 16 thru 63 in the GPR.
    18031840 */
    1804 DECLINLINE(uint32_t ) iemNativeEmitClear16UpGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst)
     1841DECL_INLINE_THROW(uint32_t)
     1842iemNativeEmitClear16UpGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst)
    18051843{
    18061844#if defined(RT_ARCH_AMD64)
    18071845    /* movzx reg32, reg16 */
    18081846    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    1809     AssertReturn(pbCodeBuf, UINT32_MAX);
    18101847    if (iGprDst >= 8)
    18111848        pbCodeBuf[off++] = X86_OP_REX_B | X86_OP_REX_R;
     
    18161853#elif defined(RT_ARCH_ARM64)
    18171854    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1818     AssertReturn(pu32CodeBuf, UINT32_MAX);
    18191855# if 1
    18201856    pu32CodeBuf[off++] = Armv8A64MkInstrUxth(iGprDst, iGprDst);
     
    18371873 *       and ARM64 hosts.
    18381874 */
    1839 DECLINLINE(uint32_t ) iemNativeEmitAndGprByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc,
    1840                                               bool fSetFlags = false)
     1875DECL_INLINE_THROW(uint32_t)
     1876iemNativeEmitAndGprByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc, bool fSetFlags = false)
    18411877{
    18421878#if defined(RT_ARCH_AMD64)
    18431879    /* and Gv, Ev */
    18441880    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    1845     AssertReturn(pbCodeBuf, UINT32_MAX);
    18461881    pbCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprSrc < 8 ? 0 : X86_OP_REX_B);
    18471882    pbCodeBuf[off++] = 0x23;
     
    18511886#elif defined(RT_ARCH_ARM64)
    18521887    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1853     AssertReturn(pu32CodeBuf, UINT32_MAX);
    18541888    if (!fSetFlags)
    18551889        pu32CodeBuf[off++] = Armv8A64MkInstrAnd(iGprDst, iGprDst, iGprSrc);
     
    18681902 * Emits code for AND'ing two 32-bit GPRs.
    18691903 */
    1870 DECLINLINE(uint32_t ) iemNativeEmitAndGpr32ByGpr32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
     1904DECL_INLINE_THROW(uint32_t)
     1905iemNativeEmitAndGpr32ByGpr32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
    18711906{
    18721907#if defined(RT_ARCH_AMD64)
    18731908    /* and Gv, Ev */
    18741909    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    1875     AssertReturn(pbCodeBuf, UINT32_MAX);
    18761910    if (iGprDst >= 8 || iGprSrc >= 8)
    18771911        pbCodeBuf[off++] = (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprSrc < 8 ? 0 : X86_OP_REX_B);
     
    18811915#elif defined(RT_ARCH_ARM64)
    18821916    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1883     AssertReturn(pu32CodeBuf, UINT32_MAX);
    18841917    pu32CodeBuf[off++] = Armv8A64MkInstrAnd(iGprDst, iGprDst, iGprSrc, false /*f64Bit*/);
    18851918
     
    18981931 *       and ARM64 hosts.
    18991932 */
    1900 DECLINLINE(uint32_t ) iemNativeEmitAndGprByImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint64_t uImm,
    1901                                               bool fSetFlags = false)
     1933DECL_INLINE_THROW(uint32_t)
     1934iemNativeEmitAndGprByImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint64_t uImm, bool fSetFlags = false)
    19021935{
    19031936#if defined(RT_ARCH_AMD64)
     
    19061939        /* and Ev, imm8 */
    19071940        uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    1908         AssertReturn(pbCodeBuf, UINT32_MAX);
    19091941        pbCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_R);
    19101942        pbCodeBuf[off++] = 0x83;
     
    19161948        /* and Ev, imm32 */
    19171949        uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1918         AssertReturn(pbCodeBuf, UINT32_MAX);
    19191950        pbCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_R);
    19201951        pbCodeBuf[off++] = 0x81;
     
    19291960        /* Use temporary register for the 64-bit immediate. */
    19301961        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    1931         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    19321962        off = iemNativeEmitAndGprByGpr(pReNative, off, iGprDst, iTmpReg);
    19331963        iemNativeRegFreeTmpImm(pReNative, iTmpReg);
     
    19411971    {
    19421972        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    1943         AssertReturn(pu32CodeBuf, UINT32_MAX);
    19441973        if (!fSetFlags)
    19451974            pu32CodeBuf[off++] = Armv8A64MkInstrAndImm(iGprDst, iGprDst, uImmNandS, uImmR);
     
    19511980        /* Use temporary register for the 64-bit immediate. */
    19521981        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    1953         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    19541982        off = iemNativeEmitAndGprByGpr(pReNative, off, iGprDst, iTmpReg, fSetFlags);
    19551983        iemNativeRegFreeTmpImm(pReNative, iTmpReg);
     
    19671995 * Emits code for AND'ing an 32-bit GPRs with a constant.
    19681996 */
    1969 DECLINLINE(uint32_t ) iemNativeEmitAndGpr32ByImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint32_t uImm,
    1970                                                 bool fSetFlags = false)
     1997DECL_INLINE_THROW(uint32_t)
     1998iemNativeEmitAndGpr32ByImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint32_t uImm, bool fSetFlags = false)
    19711999{
    19722000#if defined(RT_ARCH_AMD64)
    19732001    /* and Ev, imm */
    19742002    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    1975     AssertReturn(pbCodeBuf, UINT32_MAX);
    19762003    if (iGprDst >= 8)
    19772004        pbCodeBuf[off++] = X86_OP_REX_R;
     
    19992026    {
    20002027        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2001         AssertReturn(pu32CodeBuf, UINT32_MAX);
    20022028        if (!fSetFlags)
    20032029            pu32CodeBuf[off++] = Armv8A64MkInstrAndImm(iGprDst, iGprDst, uImmNandS, uImmR, false /*f64Bit*/);
     
    20092035        /* Use temporary register for the 64-bit immediate. */
    20102036        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    2011         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    20122037        if (!fSetFlags)
    20132038            off = iemNativeEmitAndGpr32ByGpr32(pReNative, off, iGprDst, iTmpReg);
     
    20282053 * Emits code for XOR'ing two 64-bit GPRs.
    20292054 */
    2030 DECLINLINE(uint32_t ) iemNativeEmitXorGprByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
     2055DECL_INLINE_THROW(uint32_t)
     2056iemNativeEmitXorGprByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
    20312057{
    20322058#if defined(RT_ARCH_AMD64)
    20332059    /* and Gv, Ev */
    20342060    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    2035     AssertReturn(pbCodeBuf, UINT32_MAX);
    20362061    pbCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprSrc < 8 ? 0 : X86_OP_REX_B);
    20372062    pbCodeBuf[off++] = 0x33;
     
    20402065#elif defined(RT_ARCH_ARM64)
    20412066    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2042     AssertReturn(pu32CodeBuf, UINT32_MAX);
    20432067    pu32CodeBuf[off++] = Armv8A64MkInstrEor(iGprDst, iGprDst, iGprSrc);
    20442068
     
    20542078 * Emits code for XOR'ing two 32-bit GPRs.
    20552079 */
    2056 DECLINLINE(uint32_t ) iemNativeEmitXorGpr32ByGpr32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
     2080DECL_INLINE_THROW(uint32_t)
     2081iemNativeEmitXorGpr32ByGpr32(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc)
    20572082{
    20582083#if defined(RT_ARCH_AMD64)
    20592084    /* and Gv, Ev */
    20602085    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    2061     AssertReturn(pbCodeBuf, UINT32_MAX);
    20622086    if (iGprDst >= 8 || iGprSrc >= 8)
    20632087        pbCodeBuf[off++] = (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprSrc < 8 ? 0 : X86_OP_REX_B);
     
    20672091#elif defined(RT_ARCH_ARM64)
    20682092    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2069     AssertReturn(pu32CodeBuf, UINT32_MAX);
    20702093    pu32CodeBuf[off++] = Armv8A64MkInstrEor(iGprDst, iGprDst, iGprSrc, false /*f64Bit*/);
    20712094
     
    20852108 * Emits code for shifting a GPR a fixed number of bits to the left.
    20862109 */
    2087 DECLINLINE(uint32_t ) iemNativeEmitShiftGprLeft(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
     2110DECL_INLINE_THROW(uint32_t)
     2111iemNativeEmitShiftGprLeft(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
    20882112{
    20892113    Assert(cShift > 0 && cShift < 64);
     
    20922116    /* shl dst, cShift */
    20932117    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    2094     AssertReturn(pbCodeBuf, UINT32_MAX);
    20952118    pbCodeBuf[off++] = iGprDst < 8 ? X86_OP_REX_W : X86_OP_REX_W | X86_OP_REX_B;
    20962119    if (cShift != 1)
     
    21082131#elif defined(RT_ARCH_ARM64)
    21092132    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2110     AssertReturn(pu32CodeBuf, UINT32_MAX);
    21112133    pu32CodeBuf[off++] = Armv8A64MkInstrLslImm(iGprDst, iGprDst, cShift);
    21122134
     
    21222144 * Emits code for shifting a 32-bit GPR a fixed number of bits to the left.
    21232145 */
    2124 DECLINLINE(uint32_t ) iemNativeEmitShiftGpr32Left(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
     2146DECL_INLINE_THROW(uint32_t)
     2147iemNativeEmitShiftGpr32Left(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
    21252148{
    21262149    Assert(cShift > 0 && cShift < 32);
     
    21292152    /* shl dst, cShift */
    21302153    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    2131     AssertReturn(pbCodeBuf, UINT32_MAX);
    21322154    if (iGprDst >= 8)
    21332155        pbCodeBuf[off++] = X86_OP_REX_B;
     
    21462168#elif defined(RT_ARCH_ARM64)
    21472169    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2148     AssertReturn(pu32CodeBuf, UINT32_MAX);
    21492170    pu32CodeBuf[off++] = Armv8A64MkInstrLslImm(iGprDst, iGprDst, cShift, false /*64Bit*/);
    21502171
     
    21602181 * Emits code for (unsigned) shifting a GPR a fixed number of bits to the right.
    21612182 */
    2162 DECLINLINE(uint32_t ) iemNativeEmitShiftGprRight(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
     2183DECL_INLINE_THROW(uint32_t)
     2184iemNativeEmitShiftGprRight(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
    21632185{
    21642186    Assert(cShift > 0 && cShift < 64);
     
    21672189    /* shr dst, cShift */
    21682190    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    2169     AssertReturn(pbCodeBuf, UINT32_MAX);
    21702191    pbCodeBuf[off++] = iGprDst < 8 ? X86_OP_REX_W : X86_OP_REX_W | X86_OP_REX_B;
    21712192    if (cShift != 1)
     
    21832204#elif defined(RT_ARCH_ARM64)
    21842205    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2185     AssertReturn(pu32CodeBuf, UINT32_MAX);
    21862206    pu32CodeBuf[off++] = Armv8A64MkInstrLsrImm(iGprDst, iGprDst, cShift);
    21872207
     
    21982218 * right.
    21992219 */
    2200 DECLINLINE(uint32_t ) iemNativeEmitShiftGpr32Right(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
     2220DECL_INLINE_THROW(uint32_t)
     2221iemNativeEmitShiftGpr32Right(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t cShift)
    22012222{
    22022223    Assert(cShift > 0 && cShift < 32);
     
    22052226    /* shr dst, cShift */
    22062227    uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    2207     AssertReturn(pbCodeBuf, UINT32_MAX);
    22082228    if (iGprDst >= 8)
    22092229        pbCodeBuf[off++] = X86_OP_REX_B;
     
    22222242#elif defined(RT_ARCH_ARM64)
    22232243    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2224     AssertReturn(pu32CodeBuf, UINT32_MAX);
    22252244    pu32CodeBuf[off++] = Armv8A64MkInstrLsrImm(iGprDst, iGprDst, cShift, false /*64Bit*/);
    22262245
     
    22432262 * Emits an ARM64 compare instruction.
    22442263 */
    2245 DECLINLINE(uint32_t) iemNativeEmitCmpArm64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint8_t iGprRight,
    2246                                            bool f64Bit = true, uint32_t cShift = 0,
    2247                                           ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsr)
     2264DECL_INLINE_THROW(uint32_t)
     2265iemNativeEmitCmpArm64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint8_t iGprRight,
     2266                      bool f64Bit = true, uint32_t cShift = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsr)
    22482267{
    22492268    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2250     AssertReturn(pu32CodeBuf, UINT32_MAX);
    22512269    pu32CodeBuf[off++] = Armv8A64MkInstrAddSubReg(true /*fSub*/, ARMV8_A64_REG_XZR /*iRegResult*/, iGprLeft, iGprRight,
    22522270                                                  f64Bit, true /*fSetFlags*/, cShift, enmShift);
     
    22612279 * with conditional instruction.
    22622280 */
    2263 DECLINLINE(uint32_t) iemNativeEmitCmpGprWithGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint8_t iGprRight)
     2281DECL_INLINE_THROW(uint32_t)
     2282iemNativeEmitCmpGprWithGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint8_t iGprRight)
    22642283{
    22652284#ifdef RT_ARCH_AMD64
    22662285    /* cmp Gv, Ev */
    22672286    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    2268     AssertReturn(pbCodeBuf, UINT32_MAX);
    22692287    pbCodeBuf[off++] = X86_OP_REX_W | (iGprLeft >= 8 ? X86_OP_REX_R : 0) | (iGprRight >= 8 ? X86_OP_REX_B : 0);
    22702288    pbCodeBuf[off++] = 0x3b;
     
    22862304 * with conditional instruction.
    22872305 */
    2288 DECLINLINE(uint32_t) iemNativeEmitCmpGpr32WithGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2289                                                   uint8_t iGprLeft, uint8_t iGprRight)
     2306DECL_INLINE_THROW(uint32_t)
     2307iemNativeEmitCmpGpr32WithGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint8_t iGprRight)
    22902308{
    22912309#ifdef RT_ARCH_AMD64
    22922310    /* cmp Gv, Ev */
    22932311    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    2294     AssertReturn(pbCodeBuf, UINT32_MAX);
    22952312    if (iGprLeft >= 8 || iGprRight >= 8)
    22962313        pbCodeBuf[off++] = (iGprLeft >= 8 ? X86_OP_REX_R : 0) | (iGprRight >= 8 ? X86_OP_REX_B : 0);
     
    23132330 * flags/whatever for use with conditional instruction.
    23142331 */
    2315 DECLINLINE(uint32_t) iemNativeEmitCmpGprWithImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint64_t uImm)
     2332DECL_INLINE_THROW(uint32_t)
     2333iemNativeEmitCmpGprWithImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint64_t uImm)
    23162334{
    23172335#ifdef RT_ARCH_AMD64
     
    23202338        /* cmp Ev, Ib */
    23212339        uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4);
    2322         AssertReturn(pbCodeBuf, UINT32_MAX);
    23232340        pbCodeBuf[off++] = X86_OP_REX_W | (iGprLeft >= 8 ? X86_OP_REX_B : 0);
    23242341        pbCodeBuf[off++] = 0x83;
     
    23302347        /* cmp Ev, imm */
    23312348        uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    2332         AssertReturn(pbCodeBuf, UINT32_MAX);
    23332349        pbCodeBuf[off++] = X86_OP_REX_W | (iGprLeft >= 8 ? X86_OP_REX_B : 0);
    23342350        pbCodeBuf[off++] = 0x81;
     
    23432359    {
    23442360        /* Use temporary register for the immediate. */
    2345         uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    2346         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    2347 
     2361        uint8_t const iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    23482362        off = iemNativeEmitCmpGprWithGpr(pReNative, off, iGprLeft, iTmpReg);
    2349 
    23502363        iemNativeRegFreeTmpImm(pReNative, iTmpReg);
    23512364    }
     
    23562369    {
    23572370        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2358         AssertReturn(pu32CodeBuf, UINT32_MAX);
    23592371        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(true /*fSub*/, ARMV8_A64_REG_XZR, iGprLeft, (uint32_t)uImm,
    23602372                                                         true /*64Bit*/, true /*fSetFlags*/);
     
    23632375    {
    23642376        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2365         AssertReturn(pu32CodeBuf, UINT32_MAX);
    23662377        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(true /*fSub*/, ARMV8_A64_REG_XZR, iGprLeft, (uint32_t)uImm,
    23672378                                                         true /*64Bit*/, true /*fSetFlags*/, true /*fShift12*/);
     
    23712382        /* Use temporary register for the immediate. */
    23722383        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    2373         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    2374 
    23752384        off = iemNativeEmitCmpGprWithGpr(pReNative, off, iGprLeft, iTmpReg);
    2376 
    23772385        iemNativeRegFreeTmpImm(pReNative, iTmpReg);
    23782386    }
     
    23912399 * flags/whatever for use with conditional instruction.
    23922400 */
    2393 DECLINLINE(uint32_t) iemNativeEmitCmpGpr32WithImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint32_t uImm)
     2401DECL_INLINE_THROW(uint32_t)
     2402iemNativeEmitCmpGpr32WithImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint32_t uImm)
    23942403{
    23952404#ifdef RT_ARCH_AMD64
    23962405    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    2397     AssertReturn(pbCodeBuf, UINT32_MAX);
    23982406    if (iGprLeft >= 8)
    23992407        pbCodeBuf[off++] = X86_OP_REX_B;
     
    24222430    {
    24232431        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2424         AssertReturn(pu32CodeBuf, UINT32_MAX);
    24252432        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(true /*fSub*/, ARMV8_A64_REG_XZR, iGprLeft, (uint32_t)uImm,
    24262433                                                         false /*64Bit*/, true /*fSetFlags*/);
     
    24292436    {
    24302437        uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2431         AssertReturn(pu32CodeBuf, UINT32_MAX);
    24322438        pu32CodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(true /*fSub*/, ARMV8_A64_REG_XZR, iGprLeft, (uint32_t)uImm,
    24332439                                                         false /*64Bit*/, true /*fSetFlags*/, true /*fShift12*/);
     
    24372443        /* Use temporary register for the immediate. */
    24382444        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm);
    2439         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    2440 
    24412445        off = iemNativeEmitCmpGpr32WithGpr(pReNative, off, iGprLeft, iTmpReg);
    2442 
    24432446        iemNativeRegFreeTmpImm(pReNative, iTmpReg);
    24442447    }
     
    24612464 * Emits a JMP rel32 / B imm19 to the given label.
    24622465 */
    2463 DECLINLINE(uint32_t) iemNativeEmitJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
     2466DECL_INLINE_THROW(uint32_t)
     2467iemNativeEmitJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
    24642468{
    24652469    Assert(idxLabel < pReNative->cLabels);
     
    24672471#ifdef RT_ARCH_AMD64
    24682472    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6);
    2469     AssertReturn(pbCodeBuf, UINT32_MAX);
    24702473    if (pReNative->paLabels[idxLabel].off != UINT32_MAX)
    24712474    {
     
    24892492    {
    24902493        pbCodeBuf[off++] = 0xe9;                    /* jmp rel32 */
    2491         AssertReturn(iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_Rel32, -4), UINT32_MAX);
     2494        iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_Rel32, -4);
    24922495        pbCodeBuf[off++] = 0xfe;
    24932496        pbCodeBuf[off++] = 0xff;
     
    24992502#elif defined(RT_ARCH_ARM64)
    25002503    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2501     AssertReturn(pu32CodeBuf, UINT32_MAX);
    25022504    if (pReNative->paLabels[idxLabel].off != UINT32_MAX)
    25032505        pu32CodeBuf[off++] = Armv8A64MkInstrB(pReNative->paLabels[idxReturnLabel].off - off);
    25042506    else
    25052507    {
    2506         AssertReturn(iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5), UINT32_MAX);
     2508        iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5);
    25072509        pu32CodeBuf[off++] = Armv8A64MkInstrB(-1);
    25082510    }
     
    25192521 * Emits a JMP rel32 / B imm19 to a new undefined label.
    25202522 */
    2521 DECLINLINE(uint32_t) iemNativeEmitJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2522                                                 IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     2523DECL_INLINE_THROW(uint32_t)
     2524iemNativeEmitJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    25232525{
    25242526    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmLabelType, UINT32_MAX /*offWhere*/, uData);
    2525     AssertReturn(idxLabel != UINT32_MAX, UINT32_MAX);
    25262527    return iemNativeEmitJmpToLabel(pReNative, off, idxLabel);
    25272528}
     
    25582559 * Emits a Jcc rel32 / B.cc imm19 to the given label (ASSUMED requiring fixup).
    25592560 */
    2560 DECLINLINE(uint32_t) iemNativeEmitJccToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2561                                             uint32_t idxLabel, IEMNATIVEINSTRCOND enmCond)
     2561DECL_INLINE_THROW(uint32_t)
     2562iemNativeEmitJccToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel, IEMNATIVEINSTRCOND enmCond)
    25622563{
    25632564    Assert(idxLabel < pReNative->cLabels);
     
    25662567    /* jcc rel32 */
    25672568    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6);
    2568     AssertReturn(pbCodeBuf, UINT32_MAX);
    25692569    pbCodeBuf[off++] = 0x0f;
    25702570    pbCodeBuf[off++] = (uint8_t)enmCond | 0x80;
    2571     AssertReturn(iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_Rel32, -4), UINT32_MAX);
     2571    iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_Rel32, -4);
    25722572    pbCodeBuf[off++] = 0x00;
    25732573    pbCodeBuf[off++] = 0x00;
     
    25772577#elif defined(RT_ARCH_ARM64)
    25782578    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2579     AssertReturn(pu32CodeBuf, UINT32_MAX);
    2580     AssertReturn(iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5), UINT32_MAX);
     2579    iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5);
    25812580    pu32CodeBuf[off++] = Armv8A64MkInstrBCond(enmCond, -1);
    25822581
     
    25922591 * Emits a Jcc rel32 / B.cc imm19 to a new label.
    25932592 */
    2594 DECLINLINE(uint32_t) iemNativeEmitJccToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2595                                                 IEMNATIVELABELTYPE enmLabelType, uint16_t uData, IEMNATIVEINSTRCOND enmCond)
     2593DECL_INLINE_THROW(uint32_t)
     2594iemNativeEmitJccToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2595                           IEMNATIVELABELTYPE enmLabelType, uint16_t uData, IEMNATIVEINSTRCOND enmCond)
    25962596{
    25972597    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmLabelType, UINT32_MAX /*offWhere*/, uData);
    2598     AssertReturn(idxLabel != UINT32_MAX, UINT32_MAX);
    25992598    return iemNativeEmitJccToLabel(pReNative, off, idxLabel, enmCond);
    26002599}
     
    26042603 * Emits a JZ/JE rel32 / B.EQ imm19 to the given label.
    26052604 */
    2606 DECLINLINE(uint32_t) iemNativeEmitJzToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
     2605DECL_INLINE_THROW(uint32_t) iemNativeEmitJzToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
    26072606{
    26082607#ifdef RT_ARCH_AMD64
     
    26182617 * Emits a JZ/JE rel32 / B.EQ imm19 to a new label.
    26192618 */
    2620 DECLINLINE(uint32_t) iemNativeEmitJzToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2621                                                IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     2619DECL_INLINE_THROW(uint32_t) iemNativeEmitJzToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2620                                                      IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    26222621{
    26232622#ifdef RT_ARCH_AMD64
     
    26342633 * Emits a JNZ/JNE rel32 / B.NE imm19 to the given label.
    26352634 */
    2636 DECLINLINE(uint32_t) iemNativeEmitJnzToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
     2635DECL_INLINE_THROW(uint32_t) iemNativeEmitJnzToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
    26372636{
    26382637#ifdef RT_ARCH_AMD64
     
    26482647 * Emits a JNZ/JNE rel32 / B.NE imm19 to a new label.
    26492648 */
    2650 DECLINLINE(uint32_t) iemNativeEmitJnzToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2651                                                 IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     2649DECL_INLINE_THROW(uint32_t) iemNativeEmitJnzToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2650                                                       IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    26522651{
    26532652#ifdef RT_ARCH_AMD64
     
    26642663 * Emits a JBE/JNA rel32 / B.LS imm19 to the given label.
    26652664 */
    2666 DECLINLINE(uint32_t) iemNativeEmitJbeToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
     2665DECL_INLINE_THROW(uint32_t) iemNativeEmitJbeToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
    26672666{
    26682667#ifdef RT_ARCH_AMD64
     
    26782677 * Emits a JBE/JNA rel32 / B.LS imm19 to a new label.
    26792678 */
    2680 DECLINLINE(uint32_t) iemNativeEmitJbeToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2681                                                 IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     2679DECL_INLINE_THROW(uint32_t) iemNativeEmitJbeToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2680                                                       IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    26822681{
    26832682#ifdef RT_ARCH_AMD64
     
    26942693 * Emits a JA/JNBE rel32 / B.HI imm19 to the given label.
    26952694 */
    2696 DECLINLINE(uint32_t) iemNativeEmitJaToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
     2695DECL_INLINE_THROW(uint32_t) iemNativeEmitJaToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxLabel)
    26972696{
    26982697#ifdef RT_ARCH_AMD64
     
    27082707 * Emits a JA/JNBE rel32 / B.HI imm19 to a new label.
    27092708 */
    2710 DECLINLINE(uint32_t) iemNativeEmitJaToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2711                                                IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     2709DECL_INLINE_THROW(uint32_t) iemNativeEmitJaToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2710                                                      IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    27122711{
    27132712#ifdef RT_ARCH_AMD64
     
    27252724 * How @a offJmp is applied is are target specific.
    27262725 */
    2727 DECLINLINE(uint32_t) iemNativeEmitJccToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2728                                             int32_t offTarget, IEMNATIVEINSTRCOND enmCond)
     2726DECL_INLINE_THROW(uint32_t)
     2727iemNativeEmitJccToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget, IEMNATIVEINSTRCOND enmCond)
    27292728{
    27302729#ifdef RT_ARCH_AMD64
    27312730    /* jcc rel32 */
    27322731    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6);
    2733     AssertReturn(pbCodeBuf, UINT32_MAX);
    27342732    if (offTarget < 128 && offTarget >= -128)
    27352733    {
     
    27492747#elif defined(RT_ARCH_ARM64)
    27502748    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2751     AssertReturn(pu32CodeBuf, UINT32_MAX);
    27522749    pu32CodeBuf[off++] = Armv8A64MkInstrBCond(enmCond, offTarget);
    27532750
     
    27642761 * How @a offJmp is applied is are target specific.
    27652762 */
    2766 DECLINLINE(uint32_t) iemNativeEmitJzToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
     2763DECL_INLINE_THROW(uint32_t) iemNativeEmitJzToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
    27672764{
    27682765#ifdef RT_ARCH_AMD64
     
    27802777 * How @a offJmp is applied is are target specific.
    27812778 */
    2782 DECLINLINE(uint32_t) iemNativeEmitJnzToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
     2779DECL_INLINE_THROW(uint32_t) iemNativeEmitJnzToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
    27832780{
    27842781#ifdef RT_ARCH_AMD64
     
    27962793 * How @a offJmp is applied is are target specific.
    27972794 */
    2798 DECLINLINE(uint32_t) iemNativeEmitJbeToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
     2795DECL_INLINE_THROW(uint32_t) iemNativeEmitJbeToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
    27992796{
    28002797#ifdef RT_ARCH_AMD64
     
    28122809 * How @a offJmp is applied is are target specific.
    28132810 */
    2814 DECLINLINE(uint32_t) iemNativeEmitJaToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
     2811DECL_INLINE_THROW(uint32_t) iemNativeEmitJaToFixed(PIEMRECOMPILERSTATE pReNative, uint32_t off, int32_t offTarget)
    28152812{
    28162813#ifdef RT_ARCH_AMD64
     
    28612858 * Internal helper, don't call directly.
    28622859 */
    2863 DECLINLINE(uint32_t) iemNativeEmitTestBitInGprAndJmpToLabelIfCc(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2864                                                                 uint8_t iGprSrc, uint8_t iBitNo, uint32_t idxLabel,
    2865                                                                bool fJmpIfSet)
     2860DECL_INLINE_THROW(uint32_t)
     2861iemNativeEmitTestBitInGprAndJmpToLabelIfCc(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc,
     2862                                           uint8_t iBitNo, uint32_t idxLabel, bool fJmpIfSet)
    28662863{
    28672864    Assert(iBitNo < 64);
    28682865#ifdef RT_ARCH_AMD64
    28692866    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 5);
    2870     AssertReturn(pbCodeBuf, UINT32_MAX);
    28712867    if (iBitNo < 8)
    28722868    {
     
    28962892    /* Use the TBNZ instruction here. */
    28972893    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    2898     AssertReturn(pu32CodeBuf, UINT32_MAX);
    2899     AssertReturn(iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm14At5), UINT32_MAX);
     2894    iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm14At5);
    29002895    pu32CodeBuf[off++] = Armv8A64MkInstrTbzTbnz(fJmpIfSet, 0, iGprSrc, iBitNo);
    29012896
     
    29142909 * @note On ARM64 the range is only +/-8191 instructions.
    29152910 */
    2916 DECLINLINE(uint32_t) iemNativeEmitTestBitInGprAndJmpToLabelIfSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2917                                                                  uint8_t iGprSrc, uint8_t iBitNo, uint32_t idxLabel)
     2911DECL_INLINE_THROW(uint32_t) iemNativeEmitTestBitInGprAndJmpToLabelIfSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2912                                                                        uint8_t iGprSrc, uint8_t iBitNo, uint32_t idxLabel)
    29182913{
    29192914    return iemNativeEmitTestBitInGprAndJmpToLabelIfCc(pReNative, off, iGprSrc, iBitNo, idxLabel, true /*fJmpIfSet*/);
     
    29272922 * @note On ARM64 the range is only +/-8191 instructions.
    29282923 */
    2929 DECLINLINE(uint32_t) iemNativeEmitTestBitInGprAndJmpToLabelIfNotSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    2930                                                                     uint8_t iGprSrc, uint8_t iBitNo, uint32_t idxLabel)
     2924DECL_INLINE_THROW(uint32_t) iemNativeEmitTestBitInGprAndJmpToLabelIfNotSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     2925                                                                           uint8_t iGprSrc, uint8_t iBitNo, uint32_t idxLabel)
    29312926{
    29322927    return iemNativeEmitTestBitInGprAndJmpToLabelIfCc(pReNative, off, iGprSrc, iBitNo, idxLabel, false /*fJmpIfSet*/);
     
    29382933 * flags accordingly.
    29392934 */
    2940 DECLINLINE(uint32_t) iemNativeEmitTestAnyBitsInGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint64_t fBits)
     2935DECL_INLINE_THROW(uint32_t)
     2936iemNativeEmitTestAnyBitsInGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint64_t fBits)
    29412937{
    29422938    Assert(fBits != 0);
     
    29462942    {
    29472943        uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, fBits);
    2948         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    29492944
    29502945        /* test Ev,Gv */
    29512946        uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 5);
    2952         AssertReturn(pbCodeBuf, UINT32_MAX);
    29532947        pbCodeBuf[off++] = X86_OP_REX_W | (iGprSrc < 8 ? 0 : X86_OP_REX_R) | (iTmpReg < 8 ? 0 : X86_OP_REX_B);
    29542948        pbCodeBuf[off++] = 0x85;
     
    29612955        /* test Eb, imm8 or test Ev, imm32 */
    29622956        uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7);
    2963         AssertReturn(pbCodeBuf, UINT32_MAX);
    29642957        if (fBits <= UINT8_MAX)
    29652958        {
     
    29842977    /** @todo implement me. */
    29852978    else
    2986         AssertFailedReturn(UINT32_MAX);
     2979        AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_EMIT_CASE_NOT_IMPLEMENTED_1));
    29872980
    29882981#elif defined(RT_ARCH_ARM64)
     
    29942987    else
    29952988    {
    2996         uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, fBits);
    2997         AssertReturn(iTmpReg < RT_ELEMENTS(pReNative->Core.aHstRegs), UINT32_MAX);
    2998 
    29992989        /* ands Zr, iGprSrc, iTmpReg */
     2990        uint8_t const iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, fBits);
    30002991        uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    3001         AssertReturn(pu32CodeBuf, UINT32_MAX);
    30022992        pu32CodeBuf[off++] = Armv8A64MkInstrAnds(ARMV8_A64_REG_XZR, iGprSrc, iTmpReg);
    3003 
    30042993        iemNativeRegFreeTmpImm(pReNative, iTmpReg);
    30052994    }
     
    30173006 * are set in @a iGprSrc.
    30183007 */
    3019 DECLINLINE(uint32_t) iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfAnySet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3020                                                                         uint8_t iGprSrc, uint64_t fBits, uint32_t idxLabel)
     3008DECL_INLINE_THROW(uint32_t)
     3009iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfAnySet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3010                                                   uint8_t iGprSrc, uint64_t fBits, uint32_t idxLabel)
    30213011{
    30223012    Assert(fBits); Assert(!RT_IS_POWER_OF_TWO(fBits));
     
    30333023 * are set in @a iGprSrc.
    30343024 */
    3035 DECLINLINE(uint32_t) iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfNoneSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3036                                                                          uint8_t iGprSrc, uint64_t fBits, uint32_t idxLabel)
     3025DECL_INLINE_THROW(uint32_t)
     3026iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfNoneSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3027                                                    uint8_t iGprSrc, uint64_t fBits, uint32_t idxLabel)
    30373028{
    30383029    Assert(fBits); Assert(!RT_IS_POWER_OF_TWO(fBits));
     
    30503041 * The operand size is given by @a f64Bit.
    30513042 */
    3052 DECLINLINE(uint32_t) iemNativeEmitTestIfGprIsZeroAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3053                                                                uint8_t iGprSrc, bool f64Bit, uint32_t idxLabel)
     3043DECL_INLINE_THROW(uint32_t) iemNativeEmitTestIfGprIsZeroAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3044                                                                      uint8_t iGprSrc, bool f64Bit, uint32_t idxLabel)
    30543045{
    30553046    Assert(idxLabel < pReNative->cLabels);
     
    30583049    /* test reg32,reg32  / test reg64,reg64 */
    30593050    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    3060     AssertReturn(pbCodeBuf, UINT32_MAX);
    30613051    if (f64Bit)
    30623052        pbCodeBuf[off++] = X86_OP_REX_W | (iGprSrc < 8 ? 0 : X86_OP_REX_R | X86_OP_REX_B);
     
    30723062#elif defined(RT_ARCH_ARM64)
    30733063    uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    3074     AssertReturn(pu32CodeBuf, UINT32_MAX);
    3075     AssertReturn(iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5), UINT32_MAX);
     3064    iemNativeAddFixup(pReNative, off, idxLabel, kIemNativeFixupType_RelImm19At5);
    30763065    pu32CodeBuf[off++] = Armv8A64MkInstrCbzCbnz(false /*fJmpIfNotZero*/, 0, f64Bit);
    30773066    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     
    30893078 * The operand size is given by @a f64Bit.
    30903079 */
    3091 DECLINLINE(uint32_t) iemNativeEmitTestIfGprIsZeroAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc,
    3092                                                                   bool f64Bit, IEMNATIVELABELTYPE enmLabelType,
    3093                                                                  uint16_t uData = 0)
     3080DECL_INLINE_THROW(uint32_t)
     3081iemNativeEmitTestIfGprIsZeroAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, bool f64Bit,
     3082                                             IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    30943083{
    30953084    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmLabelType, UINT32_MAX /*offWhere*/, uData);
    3096     AssertReturn(idxLabel != UINT32_MAX, UINT32_MAX);
    30973085    return iemNativeEmitTestIfGprIsZeroAndJmpToLabel(pReNative, off, iGprSrc, f64Bit, idxLabel);
    30983086}
     
    31033091 * differs.
    31043092 */
    3105 DECLINLINE(uint32_t) iemNativeEmitTestIfGprNotEqualGprAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3106                                                                      uint8_t iGprLeft, uint8_t iGprRight, uint32_t idxLabel)
     3093DECL_INLINE_THROW(uint32_t)
     3094iemNativeEmitTestIfGprNotEqualGprAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3095                                               uint8_t iGprLeft, uint8_t iGprRight, uint32_t idxLabel)
    31073096{
    31083097    off = iemNativeEmitCmpGprWithGpr(pReNative, off, iGprLeft, iGprRight);
     
    31153104 * Emits code that jumps to a new label if @a iGprLeft and @a iGprRight differs.
    31163105 */
    3117 DECLINLINE(uint32_t) iemNativeEmitTestIfGprNotEqualGprAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3118                                                                        uint8_t iGprLeft, uint8_t iGprRight,
    3119                                                                        IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     3106DECL_INLINE_THROW(uint32_t)
     3107iemNativeEmitTestIfGprNotEqualGprAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3108                                                  uint8_t iGprLeft, uint8_t iGprRight,
     3109                                                  IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    31203110{
    31213111    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmLabelType, UINT32_MAX /*offWhere*/, uData);
    3122     AssertReturn(idxLabel != UINT32_MAX, UINT32_MAX);
    31233112    return iemNativeEmitTestIfGprNotEqualGprAndJmpToLabel(pReNative, off, iGprLeft, iGprRight, idxLabel);
    31243113}
     
    31283117 * Emits code that jumps to the given label if @a iGprSrc differs from @a uImm.
    31293118 */
    3130 DECLINLINE(uint32_t) iemNativeEmitTestIfGprNotEqualImmAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3131                                                                     uint8_t iGprSrc, uint64_t uImm, uint32_t idxLabel)
     3119DECL_INLINE_THROW(uint32_t)
     3120iemNativeEmitTestIfGprNotEqualImmAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3121                                               uint8_t iGprSrc, uint64_t uImm, uint32_t idxLabel)
    31323122{
    31333123    off = iemNativeEmitCmpGprWithImm(pReNative, off, iGprSrc, uImm);
     
    31403130 * Emits code that jumps to a new label if @a iGprSrc differs from @a uImm.
    31413131 */
    3142 DECLINLINE(uint32_t) iemNativeEmitTestIfGprNotEqualImmAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3143                                                                        uint8_t iGprSrc, uint64_t uImm,
    3144                                                                        IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     3132DECL_INLINE_THROW(uint32_t)
     3133iemNativeEmitTestIfGprNotEqualImmAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3134                                                  uint8_t iGprSrc, uint64_t uImm,
     3135                                                  IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    31453136{
    31463137    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmLabelType, UINT32_MAX /*offWhere*/, uData);
    3147     AssertReturn(idxLabel != UINT32_MAX, UINT32_MAX);
    31483138    return iemNativeEmitTestIfGprNotEqualImmAndJmpToLabel(pReNative, off, iGprSrc, uImm, idxLabel);
    31493139}
     
    31543144 * @a uImm.
    31553145 */
    3156 DECLINLINE(uint32_t) iemNativeEmitTestIfGpr32NotEqualImmAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3157                                                                       uint8_t iGprSrc, uint32_t uImm, uint32_t idxLabel)
     3146DECL_INLINE_THROW(uint32_t) iemNativeEmitTestIfGpr32NotEqualImmAndJmpToLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3147                                                                             uint8_t iGprSrc, uint32_t uImm, uint32_t idxLabel)
    31583148{
    31593149    off = iemNativeEmitCmpGpr32WithImm(pReNative, off, iGprSrc, uImm);
     
    31673157 * @a uImm.
    31683158 */
    3169 DECLINLINE(uint32_t) iemNativeEmitTestIfGpr32NotEqualImmAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3170                                                                          uint8_t iGprSrc, uint32_t uImm,
    3171                                                                          IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
     3159DECL_INLINE_THROW(uint32_t)
     3160iemNativeEmitTestIfGpr32NotEqualImmAndJmpToNewLabel(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3161                                                    uint8_t iGprSrc, uint32_t uImm,
     3162                                                    IEMNATIVELABELTYPE enmLabelType, uint16_t uData = 0)
    31723163{
    31733164    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmLabelType, UINT32_MAX /*offWhere*/, uData);
    3174     AssertReturn(idxLabel != UINT32_MAX, UINT32_MAX);
    31753165    return iemNativeEmitTestIfGpr32NotEqualImmAndJmpToLabel(pReNative, off, iGprSrc, uImm, idxLabel);
    31763166}
     
    31813171 * Emits a call to a 64-bit address.
    31823172 */
    3183 DECLINLINE(uint32_t) iemNativeEmitCallImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uintptr_t uPfn)
     3173DECL_INLINE_THROW(uint32_t) iemNativeEmitCallImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uintptr_t uPfn)
    31843174{
    31853175#ifdef RT_ARCH_AMD64
     
    31883178    /* call rax */
    31893179    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2);
    3190     AssertReturn(pbCodeBuf, UINT32_MAX);
    31913180    pbCodeBuf[off++] = 0xff;
    31923181    pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 2, X86_GREG_xAX);
     
    31963185
    31973186    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    3198     AssertReturn(pu32CodeBuf, UINT32_MAX);
    31993187    pu32CodeBuf[off++] = Armv8A64MkInstrBlr(IEMNATIVE_REG_FIXED_TMP0);
     3188
    32003189#else
    32013190# error "port me"
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette