VirtualBox

Changeset 106315 in vbox for trunk/src/VBox/VMM


Ignore:
Timestamp:
Oct 15, 2024 1:05:43 AM (3 months ago)
Author:
vboxsync
Message:

VMM/IEM: Reduce the number of parameters passed to the shadowed-guest-register allocator. bugref:10720

Location:
trunk/src/VBox/VMM
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompBltIn.cpp

    r106187 r106315  
    210210{
    211211    uint8_t const         idxEflReg  = !a_fCheckIrqs ? UINT8_MAX
    212                                      : iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly,
    213                                                                           RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_OTHER));
     212                                     : iemNativeRegAllocTmpForGuestEFlagsReadOnly(pReNative, &off,
     213                                                                                  RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_OTHER));
    214214    uint8_t const         idxTmpReg1 = iemNativeRegAllocTmp(pReNative, &off);
    215215    uint8_t const         idxTmpReg2 = a_fCheckIrqs ? iemNativeRegAllocTmp(pReNative, &off) : UINT8_MAX;
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompFuncs.h

    r106196 r106315  
    440440    off = iemNativeRegFlushPendingWrites(pReNative, off);
    441441
    442     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ForUpdate,
    443                                                                  RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_OTHER),
    444                                                                  RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_OTHER));
     442    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlagsForUpdate(pReNative, &off,
     443                                                                          RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_OTHER),
     444                                                                          RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_OTHER));
    445445    off = iemNativeEmitTbExitIfAnyBitsSetInGpr<kIemNativeLabelType_ReturnWithFlags>(pReNative, off, idxEflReg,
    446446                                                                                      X86_EFL_TF
     
    34273427
    34283428    /* Get the eflags. */
    3429     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBits);
     3429    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlagsReadOnly(pReNative, &off, fLivenessEflBits);
    34303430
    34313431    /* Test and jump. */
     
    34553455
    34563456    /* Get the eflags. */
    3457     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBits);
     3457    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlagsReadOnly(pReNative, &off, fLivenessEflBits);
    34583458
    34593459    /* Test and jump. */
     
    34843484
    34853485    /* Get the eflags. */
    3486     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBit);
     3486    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlagsReadOnly(pReNative, &off, fLivenessEflBit);
    34873487
    34883488    /* Test and jump. */
     
    35133513
    35143514    /* Get the eflags. */
    3515     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBit);
     3515    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlagsReadOnly(pReNative, &off, fLivenessEflBit);
    35163516
    35173517    /* Test and jump. */
     
    35533553
    35543554    /* Get the eflags. */
    3555     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBits);
     3555    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlagsReadOnly(pReNative, &off, fLivenessEflBits);
    35563556
    35573557#ifdef RT_ARCH_AMD64
     
    36353635
    36363636    /* Get the eflags. */
    3637     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBits);
     3637    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlagsReadOnly(pReNative, &off, fLivenessEflBits);
    36383638
    36393639#ifdef RT_ARCH_AMD64
     
    38123812       register allocator state.
    38133813       Doing EFLAGS first as it's more likely to be loaded, right? */
    3814     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBit);
     3814    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlagsReadOnly(pReNative, &off, fLivenessEflBit);
    38153815    uint8_t const idxGstRcxReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, IEMNATIVEGSTREG_GPR(X86_GREG_xCX),
    38163816                                                                 kIemNativeGstRegUse_ReadOnly);
     
    38813881       register allocator state.
    38823882       Doing EFLAGS first as it's more likely to be loaded, right? */
    3883     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEFlBit);
     3883    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlagsReadOnly(pReNative, &off, fLivenessEFlBit);
    38843884    uint8_t const idxGstRcxReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, IEMNATIVEGSTREG_GPR(X86_GREG_xCX),
    38853885                                                                 kIemNativeGstRegUse_ReadOnly);
     
    59625962         *        zero, but since iemNativeVarRegisterSet clears the shadowing,
    59635963         *        that's counter productive... */
    5964         uint8_t const idxGstReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ForUpdate,
    5965                                                                      a_fLivenessEflInput, a_fLivenessEflOutput);
     5964        uint8_t const idxGstReg = iemNativeRegAllocTmpForGuestEFlagsForUpdate(pReNative, &off,
     5965                                                                              a_fLivenessEflInput, a_fLivenessEflOutput);
    59665966        iemNativeVarRegisterSet(pReNative, idxVarEFlags, idxGstReg, off, true /*fAllocated*/);
    59675967    }
     
    61036103DECL_INLINE_THROW(uint32_t) iemNativeEmitModifyEFlagsBit(PIEMRECOMPILERSTATE pReNative, uint32_t off)
    61046104{
    6105     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ForUpdate,
    6106                                                                  a_enmOp == kIemNativeEmitEflOp_Flip ? a_fLivenessEflBit : 0,
    6107                                                                  a_fLivenessEflBit);
     6105    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlagsForUpdate(pReNative, &off,
     6106                                                                            a_enmOp == kIemNativeEmitEflOp_Flip
     6107                                                                          ? a_fLivenessEflBit : 0,
     6108                                                                          a_fLivenessEflBit);
    61086109
    61096110    /* Using 'if constexpr' forces code elimination in debug builds with VC. */
     
    97979798
    97989799        uint8_t const idxRegTmp = iemNativeRegAllocTmp(pReNative, &off, false /*fPreferVolatile*/);
    9799         uint8_t const idxRegMxCsr = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_MxCsr, kIemNativeGstRegUse_ReadOnly);
     9800        uint8_t const idxRegMxCsr = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_MxCsr,
     9801                                                                    kIemNativeGstRegUse_ReadOnly);
    98009802
    98019803        /*
     
    98499851        /** @todo Check the host supported flags (needs additional work to get the host features from CPUM)
    98509852         *        and implement alternate handling if FEAT_AFP is present. */
    9851         uint8_t const idxRegMxCsr = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_MxCsr, kIemNativeGstRegUse_ReadOnly);
     9853        uint8_t const idxRegMxCsr = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_MxCsr,
     9854                                                                    kIemNativeGstRegUse_ReadOnly);
    98529855
    98539856        PIEMNATIVEINSTR pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 10);
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp

    r106314 r106315  
    35943594 * iemNativeRegAllocTmpForGuestEFlags().
    35953595 *
    3596  * See iemNativeRegAllocTmpForGuestReg() for details.
    3597  */
    3598 static uint8_t
    3599 iemNativeRegAllocTmpForGuestRegCommon(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg,
    3600                                       IEMNATIVEGSTREGUSE enmIntendedUse, bool fNoVolatileRegs)
     3596 * See iemNativeRegAllocTmpForGuestRegInt() for details.
     3597 */
     3598template<IEMNATIVEGSTREGUSE const a_enmIntendedUse, uint32_t const a_fRegMask>
     3599static uint8_t iemNativeRegAllocTmpForGuestRegCommon(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
    36013600{
    36023601    Assert(enmGstReg < kIemNativeGstReg_End && g_aGstShadowInfo[enmGstReg].cb != 0);
     
    36043603    static const char * const s_pszIntendedUse[] = { "fetch", "update", "full write", "destructive calc" };
    36053604#endif
    3606     uint32_t const fRegMask = !fNoVolatileRegs
    3607                             ? IEMNATIVE_HST_GREG_MASK & ~IEMNATIVE_REG_FIXED_MASK
    3608                             : IEMNATIVE_HST_GREG_MASK & ~IEMNATIVE_REG_FIXED_MASK & ~IEMNATIVE_CALL_VOLATILE_GREG_MASK;
    36093605
    36103606    /*
     
    36293625            /* If the purpose is calculations, try duplicate the register value as
    36303626               we'll be clobbering the shadow. */
    3631             if (   enmIntendedUse == kIemNativeGstRegUse_Calculation
     3627            if (   a_enmIntendedUse == kIemNativeGstRegUse_Calculation
    36323628                && (  ~pReNative->Core.bmHstRegs
    36333629                    & ~pReNative->Core.bmHstRegsWithGstShadow
    36343630                    & (~IEMNATIVE_REG_FIXED_MASK & IEMNATIVE_HST_GREG_MASK)))
    36353631            {
    3636                 uint8_t const idxRegNew = iemNativeRegAllocTmpEx(pReNative, poff, fRegMask);
     3632                uint8_t const idxRegNew = iemNativeRegAllocTmpEx(pReNative, poff, a_fRegMask);
    36373633
    36383634                *poff = iemNativeEmitLoadGprFromGpr(pReNative, *poff, idxRegNew, idxReg);
     
    36453641            /* If the current register matches the restrictions, go ahead and allocate
    36463642               it for the caller. */
    3647             else if (fRegMask & RT_BIT_32(idxReg))
     3643            else if (a_fRegMask & RT_BIT_32(idxReg))
    36483644            {
    36493645                pReNative->Core.bmHstRegs |= RT_BIT_32(idxReg);
    36503646                pReNative->Core.aHstRegs[idxReg].enmWhat = kIemNativeWhat_Tmp;
    36513647                pReNative->Core.aHstRegs[idxReg].idxVar  = UINT8_MAX;
    3652                 if (enmIntendedUse != kIemNativeGstRegUse_Calculation)
    3653                     Log12(("iemNativeRegAllocTmpForGuestReg: Reusing %s for guest %s %s\n",
    3654                            g_apszIemNativeHstRegNames[idxReg], g_aGstShadowInfo[enmGstReg].pszName, s_pszIntendedUse[enmIntendedUse]));
     3648                if RT_CONSTEXPR_IF(a_enmIntendedUse != kIemNativeGstRegUse_Calculation)
     3649                    Log12(("iemNativeRegAllocTmpForGuestReg: Reusing %s for guest %s %s\n", g_apszIemNativeHstRegNames[idxReg],
     3650                           g_aGstShadowInfo[enmGstReg].pszName, s_pszIntendedUse[a_enmIntendedUse]));
    36553651                else
    36563652                {
     
    36653661            else
    36663662            {
    3667                 Assert(fNoVolatileRegs);
    3668                 uint8_t const idxRegNew = iemNativeRegAllocTmpEx(pReNative, poff, fRegMask & ~RT_BIT_32(idxReg),
    3669                                                                     !fNoVolatileRegs
    3670                                                                  && enmIntendedUse == kIemNativeGstRegUse_Calculation);
     3663                Assert(!(a_fRegMask & IEMNATIVE_CALL_VOLATILE_GREG_MASK));
     3664                uint8_t const idxRegNew = iemNativeRegAllocTmpEx(pReNative, poff, a_fRegMask & ~RT_BIT_32(idxReg),
     3665                                                                    (a_fRegMask & IEMNATIVE_CALL_VOLATILE_GREG_MASK)
     3666                                                                 && a_enmIntendedUse == kIemNativeGstRegUse_Calculation);
    36713667                *poff = iemNativeEmitLoadGprFromGpr(pReNative, *poff, idxRegNew, idxReg);
    3672                 if (enmIntendedUse != kIemNativeGstRegUse_Calculation)
     3668                if RT_CONSTEXPR_IF(a_enmIntendedUse != kIemNativeGstRegUse_Calculation)
    36733669                {
    36743670                    iemNativeRegTransferGstRegShadowing(pReNative, idxReg, idxRegNew, enmGstReg, *poff);
    36753671                    Log12(("iemNativeRegAllocTmpForGuestReg: Transfering %s to %s for guest %s %s\n",
    36763672                           g_apszIemNativeHstRegNames[idxReg], g_apszIemNativeHstRegNames[idxRegNew],
    3677                            g_aGstShadowInfo[enmGstReg].pszName, s_pszIntendedUse[enmIntendedUse]));
     3673                           g_aGstShadowInfo[enmGstReg].pszName, s_pszIntendedUse[a_enmIntendedUse]));
    36783674                }
    36793675                else
     
    36923688             * guest shadow copy assignment to the new register.
    36933689             */
    3694             AssertMsg(   enmIntendedUse != kIemNativeGstRegUse_ForUpdate
    3695                       && enmIntendedUse != kIemNativeGstRegUse_ForFullWrite,
    3696                       ("This shouldn't happen: idxReg=%d enmGstReg=%d enmIntendedUse=%s\n",
    3697                        idxReg, enmGstReg, s_pszIntendedUse[enmIntendedUse]));
     3690            AssertMsg(   a_enmIntendedUse != kIemNativeGstRegUse_ForUpdate
     3691                      && a_enmIntendedUse != kIemNativeGstRegUse_ForFullWrite,
     3692                      ("This shouldn't happen: idxReg=%d enmGstReg=%d a_enmIntendedUse=%s\n",
     3693                       idxReg, enmGstReg, s_pszIntendedUse[a_enmIntendedUse]));
    36983694
    36993695            /** @todo share register for readonly access. */
    3700             uint8_t const idxRegNew = iemNativeRegAllocTmpEx(pReNative, poff, fRegMask,
    3701                                                              enmIntendedUse == kIemNativeGstRegUse_Calculation);
    3702 
    3703             if (enmIntendedUse != kIemNativeGstRegUse_ForFullWrite)
     3696            uint8_t const idxRegNew = iemNativeRegAllocTmpEx(pReNative, poff, a_fRegMask,
     3697                                                             a_enmIntendedUse == kIemNativeGstRegUse_Calculation);
     3698
     3699            if RT_CONSTEXPR_IF(a_enmIntendedUse != kIemNativeGstRegUse_ForFullWrite)
    37043700                *poff = iemNativeEmitLoadGprFromGpr(pReNative, *poff, idxRegNew, idxReg);
    37053701
    3706             if (   enmIntendedUse != kIemNativeGstRegUse_ForUpdate
    3707                 && enmIntendedUse != kIemNativeGstRegUse_ForFullWrite)
     3702            if RT_CONSTEXPR_IF(   a_enmIntendedUse != kIemNativeGstRegUse_ForUpdate
     3703                               && a_enmIntendedUse != kIemNativeGstRegUse_ForFullWrite)
    37083704                Log12(("iemNativeRegAllocTmpForGuestReg: Duplicated %s for guest %s into %s for %s\n",
    37093705                       g_apszIemNativeHstRegNames[idxReg], g_aGstShadowInfo[enmGstReg].pszName,
    3710                        g_apszIemNativeHstRegNames[idxRegNew], s_pszIntendedUse[enmIntendedUse]));
     3706                       g_apszIemNativeHstRegNames[idxRegNew], s_pszIntendedUse[a_enmIntendedUse]));
    37113707            else
    37123708            {
     
    37143710                Log12(("iemNativeRegAllocTmpForGuestReg: Moved %s for guest %s into %s for %s\n",
    37153711                       g_apszIemNativeHstRegNames[idxReg], g_aGstShadowInfo[enmGstReg].pszName,
    3716                        g_apszIemNativeHstRegNames[idxRegNew], s_pszIntendedUse[enmIntendedUse]));
     3712                       g_apszIemNativeHstRegNames[idxRegNew], s_pszIntendedUse[a_enmIntendedUse]));
    37173713            }
    37183714            idxReg = idxRegNew;
    37193715        }
    3720         Assert(RT_BIT_32(idxReg) & fRegMask); /* See assumption in fNoVolatileRegs docs. */
     3716        Assert(RT_BIT_32(idxReg) & a_fRegMask); /* See assumption in fNoVolatileRegs docs. */
    37213717
    37223718#ifdef VBOX_STRICT
     
    37273723#ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK
    37283724        /** @todo r=aeichner Implement for registers other than GPR as well. */
    3729         if (   (   enmIntendedUse == kIemNativeGstRegUse_ForFullWrite
    3730                 || enmIntendedUse == kIemNativeGstRegUse_ForUpdate)
    3731             && (   (   enmGstReg >= kIemNativeGstReg_GprFirst
     3725        if RT_CONSTEXPR_IF(   a_enmIntendedUse == kIemNativeGstRegUse_ForFullWrite
     3726                           || a_enmIntendedUse == kIemNativeGstRegUse_ForUpdate)
     3727            if (   (   enmGstReg >= kIemNativeGstReg_GprFirst
    37323728                    && enmGstReg <= kIemNativeGstReg_GprLast)
    3733                 || enmGstReg == kIemNativeGstReg_MxCsr))
     3729                || enmGstReg == kIemNativeGstReg_MxCsr)
     3730            {
     3731# ifdef IEMNATIVE_WITH_TB_DEBUG_INFO
     3732                iemNativeDbgInfoAddNativeOffset(pReNative, *poff);
     3733                iemNativeDbgInfoAddGuestRegDirty(pReNative, false /*fSimdReg*/, enmGstReg, idxReg);
     3734# endif
     3735                pReNative->Core.bmGstRegShadowDirty |= RT_BIT_64(enmGstReg);
     3736            }
     3737#endif
     3738
     3739        return idxReg;
     3740    }
     3741
     3742    /*
     3743     * Allocate a new register, load it with the guest value and designate it as a copy of the
     3744     */
     3745    uint8_t const idxRegNew = iemNativeRegAllocTmpEx(pReNative, poff, a_fRegMask,
     3746                                                     a_enmIntendedUse == kIemNativeGstRegUse_Calculation);
     3747
     3748    if RT_CONSTEXPR_IF(a_enmIntendedUse != kIemNativeGstRegUse_ForFullWrite)
     3749        *poff = iemNativeEmitLoadGprWithGstShadowReg(pReNative, *poff, idxRegNew, enmGstReg);
     3750
     3751    if RT_CONSTEXPR_IF(a_enmIntendedUse != kIemNativeGstRegUse_Calculation)
     3752        iemNativeRegMarkAsGstRegShadow(pReNative, idxRegNew, enmGstReg, *poff);
     3753    Log12(("iemNativeRegAllocTmpForGuestReg: Allocated %s for guest %s %s\n",
     3754           g_apszIemNativeHstRegNames[idxRegNew], g_aGstShadowInfo[enmGstReg].pszName, s_pszIntendedUse[a_enmIntendedUse]));
     3755
     3756#ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK
     3757    /** @todo r=aeichner Implement for registers other than GPR as well. */
     3758    if RT_CONSTEXPR_IF(   a_enmIntendedUse == kIemNativeGstRegUse_ForFullWrite
     3759                       || a_enmIntendedUse == kIemNativeGstRegUse_ForUpdate)
     3760        if (   (   enmGstReg >= kIemNativeGstReg_GprFirst
     3761                && enmGstReg <= kIemNativeGstReg_GprLast)
     3762            || enmGstReg == kIemNativeGstReg_MxCsr)
    37343763        {
    37353764# ifdef IEMNATIVE_WITH_TB_DEBUG_INFO
    37363765            iemNativeDbgInfoAddNativeOffset(pReNative, *poff);
    3737             iemNativeDbgInfoAddGuestRegDirty(pReNative, false /*fSimdReg*/, enmGstReg, idxReg);
     3766            iemNativeDbgInfoAddGuestRegDirty(pReNative, false /*fSimdReg*/, enmGstReg, idxRegNew);
    37383767# endif
    37393768            pReNative->Core.bmGstRegShadowDirty |= RT_BIT_64(enmGstReg);
    37403769        }
    3741 #endif
    3742 
    3743         return idxReg;
    3744     }
    3745 
    3746     /*
    3747      * Allocate a new register, load it with the guest value and designate it as a copy of the
    3748      */
    3749     uint8_t const idxRegNew = iemNativeRegAllocTmpEx(pReNative, poff, fRegMask, enmIntendedUse == kIemNativeGstRegUse_Calculation);
    3750 
    3751     if (enmIntendedUse != kIemNativeGstRegUse_ForFullWrite)
    3752         *poff = iemNativeEmitLoadGprWithGstShadowReg(pReNative, *poff, idxRegNew, enmGstReg);
    3753 
    3754     if (enmIntendedUse != kIemNativeGstRegUse_Calculation)
    3755         iemNativeRegMarkAsGstRegShadow(pReNative, idxRegNew, enmGstReg, *poff);
    3756     Log12(("iemNativeRegAllocTmpForGuestReg: Allocated %s for guest %s %s\n",
    3757            g_apszIemNativeHstRegNames[idxRegNew], g_aGstShadowInfo[enmGstReg].pszName, s_pszIntendedUse[enmIntendedUse]));
    3758 
    3759 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK
    3760     /** @todo r=aeichner Implement for registers other than GPR as well. */
    3761     if (   (   enmIntendedUse == kIemNativeGstRegUse_ForFullWrite
    3762             || enmIntendedUse == kIemNativeGstRegUse_ForUpdate)
    3763         && (   (   enmGstReg >= kIemNativeGstReg_GprFirst
    3764                 && enmGstReg <= kIemNativeGstReg_GprLast)
    3765             || enmGstReg == kIemNativeGstReg_MxCsr))
    3766     {
    3767 # ifdef IEMNATIVE_WITH_TB_DEBUG_INFO
    3768         iemNativeDbgInfoAddNativeOffset(pReNative, *poff);
    3769         iemNativeDbgInfoAddGuestRegDirty(pReNative, false /*fSimdReg*/, enmGstReg, idxRegNew);
    3770 # endif
    3771         pReNative->Core.bmGstRegShadowDirty |= RT_BIT_64(enmGstReg);
    3772     }
    37733770#endif
    37743771
     
    37933790 *                          the request.
    37943791 * @param   enmGstReg       The guest register that will is to be updated.
    3795  * @param   enmIntendedUse How the caller will be using the host register.
    3796  * @param   fNoVolatileRegs Set if no volatile register allowed, clear if any
     3792 * @param   a_enmIntendedUse How the caller will be using the host register.
     3793 * @param   a_fNonVolatileRegs Set if no volatile register allowed, clear if any
    37973794 *                          register is okay (default).  The ASSUMPTION here is
    37983795 *                          that the caller has already flushed all volatile
    37993796 *                          registers, so this is only applied if we allocate a
    38003797 *                          new register.
    3801  * @param   fSkipLivenessAssert     Hack for liveness input validation of EFLAGS.
    38023798 * @sa      iemNativeRegAllocTmpForGuestRegIfAlreadyPresent
    38033799 */
    3804 DECL_HIDDEN_THROW(uint8_t)
    3805 iemNativeRegAllocTmpForGuestReg(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg,
    3806                                 IEMNATIVEGSTREGUSE enmIntendedUse /*= kIemNativeGstRegUse_ReadOnly*/,
    3807                                 bool fNoVolatileRegs /*= false*/, bool fSkipLivenessAssert /*= false*/)
     3800template<IEMNATIVEGSTREGUSE const a_enmIntendedUse, bool const a_fNonVolatileRegs>
     3801DECL_FORCE_INLINE_THROW(uint8_t)
     3802iemNativeRegAllocTmpForGuestRegInt(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
    38083803{
    38093804#ifdef IEMNATIVE_WITH_LIVENESS_ANALYSIS
    3810     AssertMsg(   fSkipLivenessAssert
    3811               || pReNative->idxCurCall == 0
     3805    AssertMsg(   pReNative->idxCurCall == 0
    38123806              || enmGstReg == kIemNativeGstReg_Pc
    3813               || (enmIntendedUse == kIemNativeGstRegUse_ForFullWrite
     3807              || (a_enmIntendedUse == kIemNativeGstRegUse_ForFullWrite
    38143808                  ? IEMLIVENESS_STATE_IS_CLOBBER_EXPECTED(iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg))
    3815                   : enmIntendedUse == kIemNativeGstRegUse_ForUpdate
     3809                  : a_enmIntendedUse == kIemNativeGstRegUse_ForUpdate
    38163810                  ? IEMLIVENESS_STATE_IS_MODIFY_EXPECTED( iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg))
    38173811                  : IEMLIVENESS_STATE_IS_INPUT_EXPECTED(  iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg)) ),
    38183812              ("%s - %u\n", g_aGstShadowInfo[enmGstReg].pszName, iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg)));
    38193813#endif
    3820     RT_NOREF(fSkipLivenessAssert);
    3821 
    3822     return iemNativeRegAllocTmpForGuestRegCommon(pReNative, poff, enmGstReg, enmIntendedUse, fNoVolatileRegs);
    3823 }
     3814
     3815    if RT_CONSTEXPR_IF(!a_fNonVolatileRegs)
     3816        return iemNativeRegAllocTmpForGuestRegCommon<a_enmIntendedUse,
     3817                                                       IEMNATIVE_HST_GREG_MASK
     3818                                                     & ~IEMNATIVE_REG_FIXED_MASK>(pReNative, poff, enmGstReg);
     3819    else /* keep else, is required by MSC */
     3820        return iemNativeRegAllocTmpForGuestRegCommon<a_enmIntendedUse,
     3821                                                       IEMNATIVE_HST_GREG_MASK
     3822                                                     & ~IEMNATIVE_REG_FIXED_MASK
     3823                                                     & ~IEMNATIVE_CALL_VOLATILE_GREG_MASK>(pReNative, poff, enmGstReg);
     3824}
     3825
     3826/* Variants including volatile registers: */
     3827
     3828DECL_HIDDEN_THROW(uint8_t)
     3829iemNativeRegAllocTmpForGuestRegReadOnly(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
     3830{
     3831    return iemNativeRegAllocTmpForGuestRegInt<kIemNativeGstRegUse_ReadOnly, false>(pReNative, poff, enmGstReg);
     3832}
     3833
     3834DECL_HIDDEN_THROW(uint8_t)
     3835iemNativeRegAllocTmpForGuestRegUpdate(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
     3836{
     3837    return iemNativeRegAllocTmpForGuestRegInt<kIemNativeGstRegUse_ForUpdate, false>(pReNative, poff, enmGstReg);
     3838}
     3839
     3840DECL_HIDDEN_THROW(uint8_t)
     3841iemNativeRegAllocTmpForGuestRegFullWrite(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
     3842{
     3843    return iemNativeRegAllocTmpForGuestRegInt<kIemNativeGstRegUse_ForFullWrite, false>(pReNative, poff, enmGstReg);
     3844}
     3845
     3846DECL_HIDDEN_THROW(uint8_t)
     3847iemNativeRegAllocTmpForGuestRegCalculation(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
     3848{
     3849    return iemNativeRegAllocTmpForGuestRegInt<kIemNativeGstRegUse_Calculation, false>(pReNative, poff, enmGstReg);
     3850}
     3851
     3852/* Variants excluding any volatile registers: */
     3853
     3854DECL_HIDDEN_THROW(uint8_t)
     3855iemNativeRegAllocTmpForGuestRegReadOnlyNoVolatile(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
     3856{
     3857    return iemNativeRegAllocTmpForGuestRegInt<kIemNativeGstRegUse_ReadOnly, true>(pReNative, poff, enmGstReg);
     3858}
     3859
     3860DECL_HIDDEN_THROW(uint8_t)
     3861iemNativeRegAllocTmpForGuestRegUpdateNoVolatile(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
     3862{
     3863    return iemNativeRegAllocTmpForGuestRegInt<kIemNativeGstRegUse_ForUpdate, true>(pReNative, poff, enmGstReg);
     3864}
     3865
     3866DECL_HIDDEN_THROW(uint8_t)
     3867iemNativeRegAllocTmpForGuestRegFullWriteNoVolatile(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
     3868{
     3869    return iemNativeRegAllocTmpForGuestRegInt<kIemNativeGstRegUse_ForFullWrite, true>(pReNative, poff, enmGstReg);
     3870}
     3871
     3872DECL_HIDDEN_THROW(uint8_t)
     3873iemNativeRegAllocTmpForGuestRegCalculationNoVolatile(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
     3874{
     3875    return iemNativeRegAllocTmpForGuestRegInt<kIemNativeGstRegUse_Calculation, true>(pReNative, poff, enmGstReg);
     3876}
     3877
    38243878
    38253879
     
    38323886 * kIemNativeGstReg_EFlags as argument.
    38333887 */
    3834 DECL_HIDDEN_THROW(uint8_t)
    3835 iemNativeRegAllocTmpForGuestEFlags(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREGUSE enmIntendedUse,
    3836                                    uint64_t fRead, uint64_t fWrite /*= 0*/, uint64_t fPotentialCall /*= 0*/)
     3888template<IEMNATIVEGSTREGUSE const a_enmIntendedUse>
     3889DECL_FORCE_INLINE_THROW(uint8_t)
     3890iemNativeRegAllocTmpForGuestEFlags(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint64_t fRead,
     3891                                   uint64_t fWrite /*= 0*/, uint64_t fPotentialCall /*= 0*/)
    38373892{
    38383893    if (pReNative->idxCurCall != 0 && (fRead || fWrite /*|| fPotentialCall*/))
     
    38633918    }
    38643919    RT_NOREF(fPotentialCall);
    3865     return iemNativeRegAllocTmpForGuestRegCommon(pReNative, poff, kIemNativeGstReg_EFlags,
    3866                                                  enmIntendedUse, false /*fNoVolatileRegs*/);
    3867 }
     3920
     3921    AssertCompile(a_enmIntendedUse == kIemNativeGstRegUse_ReadOnly || a_enmIntendedUse == kIemNativeGstRegUse_ForUpdate);
     3922    if RT_CONSTEXPR_IF(a_enmIntendedUse == kIemNativeGstRegUse_ReadOnly)
     3923        return iemNativeRegAllocTmpForGuestRegCommon<kIemNativeGstRegUse_ReadOnly,
     3924                                                       IEMNATIVE_CALL_VOLATILE_GREG_MASK
     3925                                                     & IEMNATIVE_HST_GREG_MASK
     3926                                                     & ~IEMNATIVE_REG_FIXED_MASK>(pReNative, poff, kIemNativeGstReg_EFlags);
     3927    else /* keep else, is required by MSC */
     3928        return iemNativeRegAllocTmpForGuestRegCommon<kIemNativeGstRegUse_ForUpdate,
     3929                                                       IEMNATIVE_CALL_VOLATILE_GREG_MASK
     3930                                                     & IEMNATIVE_HST_GREG_MASK
     3931                                                     & ~IEMNATIVE_REG_FIXED_MASK>(pReNative, poff, kIemNativeGstReg_EFlags);
     3932}
     3933
     3934
     3935DECL_HIDDEN_THROW(uint8_t)
     3936iemNativeRegAllocTmpForGuestEFlagsReadOnly(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
     3937                                           uint64_t fRead, uint64_t fWrite /*= 0*/, uint64_t fPotentialCall /*= 0*/)
     3938{
     3939    return iemNativeRegAllocTmpForGuestEFlags<kIemNativeGstRegUse_ReadOnly>(pReNative, poff, fRead, fWrite, fPotentialCall);
     3940}
     3941
     3942DECL_HIDDEN_THROW(uint8_t)
     3943iemNativeRegAllocTmpForGuestEFlagsForUpdate(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint64_t fRead,
     3944                                            uint64_t fWrite /*= 0*/, uint64_t fPotentialCall /*= 0*/)
     3945{
     3946    return iemNativeRegAllocTmpForGuestEFlags<kIemNativeGstRegUse_ForUpdate>(pReNative, poff, fRead, fWrite, fPotentialCall);
     3947}
     3948
    38683949#endif
    38693950
  • trunk/src/VBox/VMM/include/IEMN8veRecompiler.h

    r106203 r106315  
    22302230DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpImm(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint64_t uImm,
    22312231                                                    bool fPreferVolatile = true);
    2232 DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestReg(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
    2233                                                             IEMNATIVEGSTREG enmGstReg,
    2234                                                             IEMNATIVEGSTREGUSE enmIntendedUse = kIemNativeGstRegUse_ReadOnly,
    2235                                                             bool fNoVolatileRegs = false, bool fSkipLivenessAssert = false);
     2232
     2233DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestRegReadOnly(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg);
     2234DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestRegUpdate(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg);
     2235DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestRegFullWrite(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg);
     2236DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestRegCalculation(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg);
     2237DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestRegReadOnlyNoVolatile(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg);
     2238DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestRegUpdateNoVolatile(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg);
     2239DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestRegFullWriteNoVolatile(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg);
     2240DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestRegCalculationNoVolatile(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg);
     2241
    22362242#if defined(IEMNATIVE_WITH_LIVENESS_ANALYSIS) && defined(VBOX_STRICT)
    2237 DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestEFlags(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
    2238                                                                IEMNATIVEGSTREGUSE enmIntendedUse, uint64_t fRead,
    2239                                                                uint64_t fWrite = 0, uint64_t fPotentialCall = 0);
    2240 #else
    2241 DECL_FORCE_INLINE_THROW(uint8_t)
    2242 iemNativeRegAllocTmpForGuestEFlags(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREGUSE enmIntendedUse,
    2243                                    uint64_t fRead, uint64_t fWrite = 0, uint64_t fPotentialCall = 0)
    2244 {
    2245     RT_NOREF(fRead, fWrite, fPotentialCall);
    2246     return iemNativeRegAllocTmpForGuestReg(pReNative, poff, kIemNativeGstReg_EFlags, enmIntendedUse);
    2247 }
     2243DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestEFlagsReadOnly(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
     2244                                                                       uint64_t fRead, uint64_t fWrite = 0, uint64_t fPotentialCall = 0);
     2245DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestEFlagsForUpdate(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
     2246                                                                        uint64_t fRead, uint64_t fWrite = 0, uint64_t fPotentialCall = 0);
    22482247#endif
    22492248
     
    28452844*********************************************************************************************************************************/
    28462845
     2846#ifdef RT_ARCH_ARM64
     2847# include <iprt/armv8.h>
     2848#endif
     2849
     2850
    28472851/**
    28482852 * Marks host register @a idxHstReg as containing a shadow copy of guest
     
    30663070
    30673071
     3072/**
     3073 * Allocates a temporary host general purpose register for keeping a guest
     3074 * register value.
     3075 *
     3076 * Since we may already have a register holding the guest register value,
     3077 * code will be emitted to do the loading if that's not the case. Code may also
     3078 * be emitted if we have to free up a register to satify the request.
     3079 *
     3080 * @returns The host register number; throws VBox status code on failure, so no
     3081 *          need to check the return value.
     3082 * @param   pReNative       The native recompile state.
     3083 * @param   poff            Pointer to the variable with the code buffer
     3084 *                          position. This will be update if we need to move a
     3085 *                          variable from register to stack in order to satisfy
     3086 *                          the request.
     3087 * @param   enmGstReg       The guest register that will is to be updated.
     3088 * @param   enmIntendedUse  How the caller will be using the host register.
     3089 * @param   fNoVolatileRegs Set if no volatile register allowed, clear if any
     3090 *                          register is okay (default).  The ASSUMPTION here is
     3091 *                          that the caller has already flushed all volatile
     3092 *                          registers, so this is only applied if we allocate a
     3093 *                          new register.
     3094 * @sa      iemNativeRegAllocTmpForGuestEFlags
     3095 *          iemNativeRegAllocTmpForGuestRegIfAlreadyPresent
     3096 *          iemNativeRegAllocTmpForGuestRegInt
     3097 */
     3098DECL_FORCE_INLINE_THROW(uint8_t)
     3099iemNativeRegAllocTmpForGuestReg(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg,
     3100                                IEMNATIVEGSTREGUSE const enmIntendedUse = kIemNativeGstRegUse_ReadOnly,
     3101                                bool const fNoVolatileRegs = false)
     3102{
     3103    if (enmIntendedUse == kIemNativeGstRegUse_ReadOnly)
     3104        return !fNoVolatileRegs
     3105             ? iemNativeRegAllocTmpForGuestRegReadOnly(pReNative, poff, enmGstReg)
     3106             : iemNativeRegAllocTmpForGuestRegReadOnlyNoVolatile(pReNative, poff, enmGstReg);
     3107    if (enmIntendedUse == kIemNativeGstRegUse_ForUpdate)
     3108        return !fNoVolatileRegs
     3109             ? iemNativeRegAllocTmpForGuestRegUpdate(pReNative, poff, enmGstReg)
     3110             : iemNativeRegAllocTmpForGuestRegUpdateNoVolatile(pReNative, poff, enmGstReg);
     3111    if (enmIntendedUse == kIemNativeGstRegUse_ForFullWrite)
     3112        return !fNoVolatileRegs
     3113             ? iemNativeRegAllocTmpForGuestRegFullWrite(pReNative, poff, enmGstReg)
     3114             : iemNativeRegAllocTmpForGuestRegFullWriteNoVolatile(pReNative, poff, enmGstReg);
     3115    Assert(enmIntendedUse == kIemNativeGstRegUse_Calculation);
     3116    return !fNoVolatileRegs
     3117         ? iemNativeRegAllocTmpForGuestRegCalculation(pReNative, poff, enmGstReg)
     3118         : iemNativeRegAllocTmpForGuestRegCalculationNoVolatile(pReNative, poff, enmGstReg);
     3119}
     3120
     3121#if !defined(IEMNATIVE_WITH_LIVENESS_ANALYSIS) || !defined(VBOX_STRICT)
     3122
     3123DECL_FORCE_INLINE_THROW(uint8_t)
     3124iemNativeRegAllocTmpForGuestEFlagsReadOnly(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint64_t fRead,
     3125                                           uint64_t fWrite = 0, uint64_t fPotentialCall = 0)
     3126{
     3127    RT_NOREF(fRead, fWrite, fPotentialCall);
     3128    return iemNativeRegAllocTmpForGuestRegReadOnly(pReNative, poff, kIemNativeGstReg_EFlags);
     3129}
     3130
     3131DECL_FORCE_INLINE_THROW(uint8_t)
     3132iemNativeRegAllocTmpForGuestEFlagsForUpdate(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint64_t fRead,
     3133                                            uint64_t fWrite = 0, uint64_t fPotentialCall = 0)
     3134{
     3135    RT_NOREF(fRead, fWrite, fPotentialCall);
     3136    return iemNativeRegAllocTmpForGuestRegUpdate(pReNative, poff, kIemNativeGstReg_EFlags);
     3137}
     3138
     3139#endif
     3140
     3141
    30683142
    30693143/*********************************************************************************************************************************
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette