VirtualBox

Changeset 106090 in vbox for trunk/src/VBox/VMM


Ignore:
Timestamp:
Sep 19, 2024 9:13:54 AM (2 months ago)
Author:
vboxsync
Message:

VMM/IEM: More liveness work for delayed eflags updating. bugref:10720 bugref:10372

Location:
trunk/src/VBox/VMM
Files:
7 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veLiveness.h

    r106078 r106090  
    6565        IEMLIVENESSENTRY LiveState       = { { 0, 0, 0, 0 } }; \
    6666        IEMLIVENESSBIT   LiveMask        = { 0 }; \
    67         bool             fDoneXpctOrCall = false
     67        bool             fNoInherit      = false
    6868#endif
    6969
     
    8585#else
    8686# define IEM_LIVENESS_MARK_POTENTIAL_CALL() do { \
    87             if (!fDoneXpctOrCall) \
    88             { \
    89                 LiveState.aBits[IEMLIVENESS_BIT_READ].bm64 |= pIncoming->aBits[IEMLIVENESS_BIT_READ].bm64 & ~LiveMask.bm64; \
    90                 LiveMask.bm64   |= IEMLIVENESSBIT_MASK; \
    91                 fDoneXpctOrCall  = true; \
    92             } \
    9387            LiveState.aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64 |= IEMLIVENESSBIT_MASK; \
    9488        } while (0)
    9589# define IEM_LIVENESS_MARK_CALL() do { \
    96             if (!fDoneXpctOrCall) \
    97             { \
    98                 LiveState.aBits[IEMLIVENESS_BIT_READ].bm64 |= pIncoming->aBits[IEMLIVENESS_BIT_READ].bm64 & ~LiveMask.bm64; \
    99                 LiveMask.bm64   |= IEMLIVENESSBIT_MASK; \
    100                 fDoneXpctOrCall  = true; \
    101             } \
    10290            LiveState.aBits[IEMLIVENESS_BIT_CALL].bm64 |= IEMLIVENESSBIT_MASK; \
     91            LiveMask.bm64                              |= IEMLIVENESSBIT_MASK; \
     92            fNoInherit                                  = true; /* full mask */ \
    10393        } while (0)
    10494#endif
     
    285275#else
    286276# define IEM_MC_END() \
    287         /* Combine the incoming state with what we've accumulated in this block. */ \
    288         /* We can help the compiler by skipping OR'ing when having applied XPCT_OR_CALL, */ \
    289         /* since that already imports all the incoming state. Saves a lot with cl.exe. */ \
    290         if (!fDoneXpctOrCall) \
     277        /* Use the mask to effect inheriting. */ \
     278        if (!fNoInherit) \
    291279        { \
     280            uint64_t const fInhMask = ~LiveMask.bm64; \
    292281            pOutgoing->aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64 = LiveState.aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64 \
    293                                                          | (~LiveMask.bm64 & pIncoming->aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64); \
    294             pOutgoing->aBits[IEMLIVENESS_BIT_READ].bm64  = LiveState.aBits[IEMLIVENESS_BIT_READ].bm64 \
    295                                                          | (~LiveMask.bm64 & pIncoming->aBits[IEMLIVENESS_BIT_READ].bm64); \
     282                                                                | (pIncoming->aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64 & fInhMask); \
     283            pOutgoing->aBits[IEMLIVENESS_BIT_READ ].bm64 = LiveState.aBits[IEMLIVENESS_BIT_READ ].bm64 \
     284                                                       | (pIncoming->aBits[IEMLIVENESS_BIT_READ ].bm64 & fInhMask); \
    296285            pOutgoing->aBits[IEMLIVENESS_BIT_WRITE].bm64 = LiveState.aBits[IEMLIVENESS_BIT_WRITE].bm64 \
    297                                                          | (~LiveMask.bm64 & pIncoming->aBits[IEMLIVENESS_BIT_WRITE].bm64); \
    298             pOutgoing->aBits[IEMLIVENESS_BIT_CALL].bm64  = LiveState.aBits[IEMLIVENESS_BIT_CALL].bm64 \
    299                                                          | (~LiveMask.bm64 & pIncoming->aBits[IEMLIVENESS_BIT_CALL].bm64); \
     286                                                       | (pIncoming->aBits[IEMLIVENESS_BIT_WRITE].bm64 & fInhMask); \
     287            pOutgoing->aBits[IEMLIVENESS_BIT_CALL ].bm64 = LiveState.aBits[IEMLIVENESS_BIT_CALL ].bm64 \
     288                                                       | (pIncoming->aBits[IEMLIVENESS_BIT_CALL ].bm64 & fInhMask); \
    300289        } \
    301290        else \
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompBltIn.cpp

    r106078 r106090  
    190190IEM_DECL_IEMNATIVELIVENESSFUNC_DEF(iemNativeLivenessFunc_BltIn_DeferToCImpl0)
    191191{
    192     IEM_LIVENESS_RAW_INIT_WITH_CALL_AND_POTENTIAL_CALL(pOutgoing, pIncoming);
     192    IEM_LIVENESS_RAW_INIT_WITH_CALL(pOutgoing, pIncoming);
    193193    RT_NOREF(pCallEntry);
    194194}
     
    393393    IEM_LIVENESS_RAW_INIT_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    394394    IEM_LIVENESS_RAW_EFLAGS_ONE_INPUT(pOutgoing, fEflOther);
     395    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    395396    RT_NOREF(pCallEntry);
    396397}
     
    411412{
    412413    IEM_LIVENESS_RAW_INIT_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
     414    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    413415    RT_NOREF(pCallEntry);
    414416}
     
    429431    IEM_LIVENESS_RAW_INIT_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    430432    IEM_LIVENESS_RAW_EFLAGS_ONE_INPUT(pOutgoing, fEflOther);
     433    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    431434    RT_NOREF(pCallEntry);
    432435}
     
    455458{
    456459    IEM_LIVENESS_RAW_INIT_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
     460    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    457461    RT_NOREF(pCallEntry);
    458462}
     
    17911795    IEM_LIVENESS_RAW_INIT_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    17921796    LIVENESS_CHECK_CS_LIM(pOutgoing);
     1797    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    17931798    RT_NOREF(pCallEntry);
    17941799}
     
    18191824    LIVENESS_CHECK_CS_LIM(pOutgoing);
    18201825    LIVENESS_CHECK_OPCODES(pOutgoing);
     1826    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    18211827    RT_NOREF(pCallEntry);
    18221828}
     
    18451851    IEM_LIVENESS_RAW_INIT_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    18461852    LIVENESS_CHECK_OPCODES(pOutgoing);
     1853    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    18471854    RT_NOREF(pCallEntry);
    18481855}
     
    18731880    LIVENESS_CONSIDER_CS_LIM_CHECKING(pOutgoing);
    18741881    LIVENESS_CHECK_OPCODES(pOutgoing);
     1882    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    18751883    RT_NOREF(pCallEntry);
    18761884}
     
    19111919    LIVENESS_CHECK_PC_AFTER_BRANCH(pOutgoing, pCallEntry);
    19121920    LIVENESS_CHECK_OPCODES(pOutgoing);
     1921    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    19131922    RT_NOREF(pCallEntry);
    19141923}
     
    19431952    LIVENESS_CHECK_PC_AFTER_BRANCH(pOutgoing, pCallEntry);
    19441953    LIVENESS_CHECK_OPCODES(pOutgoing);
     1954    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    19451955    RT_NOREF(pCallEntry);
    19461956}
     
    19781988    LIVENESS_CHECK_PC_AFTER_BRANCH(pOutgoing, pCallEntry);
    19791989    LIVENESS_CHECK_OPCODES(pOutgoing);
     1990    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    19801991    RT_NOREF(pCallEntry);
    19811992}
     
    20162027    LIVENESS_LOAD_TLB_AFTER_BRANCH(pOutgoing, pCallEntry);
    20172028    LIVENESS_CHECK_OPCODES(pOutgoing);
     2029    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    20182030    RT_NOREF(pCallEntry);
    20192031}
     
    20522064    LIVENESS_LOAD_TLB_AFTER_BRANCH(pOutgoing, pCallEntry);
    20532065    LIVENESS_CHECK_OPCODES(pOutgoing);
     2066    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    20542067    RT_NOREF(pCallEntry);
    20552068}
     
    20902103    LIVENESS_LOAD_TLB_AFTER_BRANCH(pOutgoing, pCallEntry);
    20912104    LIVENESS_CHECK_OPCODES(pOutgoing);
     2105    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    20922106    RT_NOREF(pCallEntry);
    20932107}
     
    21342148    LIVENESS_CHECK_OPCODES(pOutgoing);
    21352149    LIVENESS_LOAD_TLB_FOR_NEW_PAGE(pOutgoing, pCallEntry);
     2150    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    21362151    RT_NOREF(pCallEntry);
    21372152}
     
    21712186    LIVENESS_CHECK_OPCODES(pOutgoing);
    21722187    LIVENESS_LOAD_TLB_FOR_NEW_PAGE(pOutgoing, pCallEntry);
     2188    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    21732189    RT_NOREF(pCallEntry);
    21742190}
     
    22112227    LIVENESS_CHECK_OPCODES(pOutgoing);
    22122228    LIVENESS_LOAD_TLB_FOR_NEW_PAGE(pOutgoing, pCallEntry);
     2229    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    22132230    RT_NOREF(pCallEntry);
    22142231}
     
    22472264    LIVENESS_LOAD_TLB_FOR_NEW_PAGE(pOutgoing, pCallEntry);
    22482265    LIVENESS_CHECK_OPCODES(pOutgoing);
     2266    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    22492267    RT_NOREF(pCallEntry);
    22502268}
     
    22812299    LIVENESS_LOAD_TLB_FOR_NEW_PAGE(pOutgoing, pCallEntry);
    22822300    LIVENESS_CHECK_OPCODES(pOutgoing);
     2301    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    22832302    RT_NOREF(pCallEntry);
    22842303}
     
    23172336    LIVENESS_LOAD_TLB_FOR_NEW_PAGE(pOutgoing, pCallEntry);
    23182337    LIVENESS_CHECK_OPCODES(pOutgoing);
     2338    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    23192339    RT_NOREF(pCallEntry);
    23202340}
     
    23492369    LIVENESS_LOAD_TLB_FOR_NEW_PAGE(pOutgoing, pCallEntry);
    23502370    LIVENESS_CHECK_OPCODES(pOutgoing);
     2371    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    23512372    RT_NOREF(pCallEntry);
    23522373}
     
    23792400    LIVENESS_LOAD_TLB_FOR_NEW_PAGE(pOutgoing, pCallEntry);
    23802401    LIVENESS_CHECK_OPCODES(pOutgoing);
     2402    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    23812403    RT_NOREF(pCallEntry);
    23822404}
     
    24122434    LIVENESS_LOAD_TLB_FOR_NEW_PAGE(pOutgoing, pCallEntry);
    24132435    LIVENESS_CHECK_OPCODES(pOutgoing);
     2436    IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    24142437    RT_NOREF(pCallEntry);
    24152438}
     
    24592482IEM_DECL_IEMNATIVELIVENESSFUNC_DEF(iemNativeLivenessFunc_BltIn_Jump)
    24602483{
    2461     /** @todo This isn't right:    */
    2462     IEM_LIVENESS_RAW_INIT_WITH_POTENTIAL_CALL(pOutgoing, pIncoming);
    2463     RT_NOREF(pCallEntry);
    2464 }
    2465 
     2484    /* We could also use UNUSED here, but this'll is equivialent (at the moment). */
     2485    IEM_LIVENESS_RAW_INIT_WITH_CALL(pOutgoing, pIncoming);
     2486    RT_NOREF(pCallEntry);
     2487}
     2488
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompFuncs.h

    r106078 r106090  
    58125812        {
    58135813              pLivenessEntry->aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64
     5814            & pLivenessEntry->aBits[IEMLIVENESS_BIT_WRITE].bm64
    58145815            & ~(  pLivenessEntry->aBits[IEMLIVENESS_BIT_READ].bm64
    58155816                | pLivenessEntry->aBits[IEMLIVENESS_BIT_CALL].bm64)
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp

    r106078 r106090  
    31733173#ifdef IEMNATIVE_WITH_LIVENESS_ANALYSIS
    31743174        /*
    3175          * When we have livness information, we use it to kick out all shadowed
     3175         * When we have liveness information, we use it to kick out all shadowed
    31763176         * guest register that will not be needed any more in this TB.  If we're
    31773177         * lucky, this may prevent us from ending up here again.
     
    31853185        {
    31863186            PCIEMLIVENESSENTRY const pLivenessEntry = &pReNative->paLivenessEntries[idxCurCall - 1];
    3187 
    3188 # ifndef IEMLIVENESS_EXTENDED_LAYOUT
    3189             /* Construct a mask of the guest registers in the UNUSED and XCPT_OR_CALL state. */
    3190             AssertCompile(IEMLIVENESS_STATE_UNUSED == 1 && IEMLIVENESS_STATE_XCPT_OR_CALL == 2);
    3191             uint64_t fToFreeMask = pLivenessEntry->Bit0.bm64 ^ pLivenessEntry->Bit1.bm64; /* mask of regs in either UNUSED */
    3192 #else
    3193             /* Construct a mask of the registers not in the read or write state.
    3194                Note! We could skips writes, if they aren't from us, as this is just
    3195                      a hack to prevent trashing registers that have just been written
    3196                      or will be written when we retire the current instruction. */
    3197             uint64_t fToFreeMask = ~pLivenessEntry->aBits[IEMLIVENESS_BIT_READ].bm64
    3198                                  & ~pLivenessEntry->aBits[IEMLIVENESS_BIT_WRITE].bm64
    3199                                  & IEMLIVENESSBIT_MASK;
    3200 #endif
     3187            uint64_t                 fToFreeMask    = IEMLIVENESS_STATE_GET_CAN_BE_FREED_SET(pLivenessEntry);
     3188
    32013189            /* Merge EFLAGS. */
    32023190            uint64_t fTmp = fToFreeMask & (fToFreeMask >> 3);   /* AF2,PF2,CF2,Other2 = AF,PF,CF,Other & OF,SF,ZF,AF */
     
    49444932        {
    49454933            PCIEMLIVENESSENTRY const pLivenessEntry = &pReNative->paLivenessEntries[idxCurCall - 1];
    4946 
    4947 # ifndef IEMLIVENESS_EXTENDED_LAYOUT
    4948             /* Construct a mask of the guest registers in the UNUSED and XCPT_OR_CALL state. */
    4949             AssertCompile(IEMLIVENESS_STATE_UNUSED == 1 && IEMLIVENESS_STATE_XCPT_OR_CALL == 2);
    4950             uint64_t fToFreeMask = pLivenessEntry->Bit0.bm64 ^ pLivenessEntry->Bit1.bm64; /* mask of regs in either UNUSED */
    4951 #else
    4952             /* Construct a mask of the registers not in the read or write state.
    4953                Note! We could skips writes, if they aren't from us, as this is just
    4954                      a hack to prevent trashing registers that have just been written
    4955                      or will be written when we retire the current instruction. */
    4956             uint64_t fToFreeMask = ~pLivenessEntry->aBits[IEMLIVENESS_BIT_READ].bm64
    4957                                  & ~pLivenessEntry->aBits[IEMLIVENESS_BIT_WRITE].bm64
    4958                                  & IEMLIVENESSBIT_MASK;
    4959 #endif
     4934            uint64_t const           fToFreeMask    = IEMLIVENESS_STATE_GET_CAN_BE_FREED_SET(pLivenessEntry);
     4935
    49604936            /* If it matches any shadowed registers. */
    49614937            if (pReNative->Core.bmGstRegShadows & fToFreeMask)
     
    65376513
    65386514    return off;
     6515}
     6516
     6517
     6518/**
     6519 * The default liveness function, matching iemNativeEmitThreadedCall.
     6520 */
     6521IEM_DECL_IEMNATIVELIVENESSFUNC_DEF(iemNativeLivenessFunc_ThreadedCall)
     6522{
     6523    IEM_LIVENESS_RAW_INIT_WITH_CALL(pOutgoing, pIncoming);
     6524    RT_NOREF(pCallEntry);
    65396525}
    65406526
     
    100129998        {
    100139999            PFNIEMNATIVELIVENESSFUNC const pfnLiveness = g_apfnIemNativeLivenessFunctions[pCallEntry->enmFunction];
    10014             if (pfnLiveness)
    10015                 pfnLiveness(pCallEntry, &paLivenessEntries[idxCall], &paLivenessEntries[idxCall - 1]);
    10016             else
    10017                 IEM_LIVENESS_RAW_INIT_WITH_CALL_AND_POTENTIAL_CALL(&paLivenessEntries[idxCall - 1], &paLivenessEntries[idxCall]);
     10000            Assert(pfnLiveness);
     10001            pfnLiveness(pCallEntry, &paLivenessEntries[idxCall], &paLivenessEntries[idxCall - 1]);
    1001810002            pCallEntry--;
    1001910003            idxCall--;
    1002010004        }
    10021 
    10022 # ifdef VBOX_WITH_STATISTICS
    10023         /* Check if there are any EFLAGS optimization to be had here.  This requires someone settings them
    10024            to 'clobbered' rather that 'input'.  */
    10025         /** @todo */
    10026 # endif
    1002710005    }
    1002810006#endif
     
    1014310121                static const char s_achState[] = "CUXI";
    1014410122# else
    10145                 static const char s_achState[] = "UxRrWwMmCcQqKkNn";
     10123                /*                                0123   4567   89ab   cdef */
     10124                /*                                              CCCC   CCCC */
     10125                /*                                       WWWW          WWWW */
     10126                /*                                  RR     RR     RR     RR */
     10127                /*                                 P P    P P    P P    P P */
     10128                static const char s_achState[] = "UxRr" "WwMm" "CcQq" "KkNn";
    1014610129# endif
    1014710130
  • trunk/src/VBox/VMM/VMMAll/IEMAllThrdPython.py

    r106061 r106090  
    35873587                    oOut.write('    iemNativeLivenessFunc_BltIn_%s,\n' % (sFuncNm,))
    35883588                else:
    3589                     oOut.write('    NULL, /*BltIn_%s*/\n' % (sFuncNm,))
     3589                    oOut.write('    iemNativeLivenessFunc_ThreadedCall, /*BltIn_%s*/\n' % (sFuncNm,))
    35903590
    35913591            iThreadedFunction = 1 + len(self.katBltIns);
     
    36033603                            oOut.write('    /*%4u*/ %s,\n' % (iThreadedFunction, sName,));
    36043604                        else:
    3605                             oOut.write('    /*%4u*/ NULL /*%s*/,\n' % (iThreadedFunction, sName,));
     3605                            oOut.write('    /*%4u*/ iemNativeLivenessFunc_ThreadedCall /*%s*/,\n' % (iThreadedFunction, sName,));
    36063606
    36073607            oOut.write(  '};\n'
  • trunk/src/VBox/VMM/VMMR3/IEMR3.cpp

    r106061 r106090  
    738738                        "/IEM/CPU%u/re/NativeEFlagsSkippedLogical", idCpu);
    739739
    740         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflCfSkippable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.CF updating",       "/IEM/CPU%u/re/NativeLivenessEFlagsCfSkippable", idCpu);
    741         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflPfSkippable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.PF updating",       "/IEM/CPU%u/re/NativeLivenessEFlagsPfSkippable", idCpu);
    742         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflAfSkippable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.AF updating",       "/IEM/CPU%u/re/NativeLivenessEFlagsAfSkippable", idCpu);
    743         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflZfSkippable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.ZF updating",       "/IEM/CPU%u/re/NativeLivenessEFlagsZfSkippable", idCpu);
    744         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflSfSkippable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.SF updating",       "/IEM/CPU%u/re/NativeLivenessEFlagsSfSkippable", idCpu);
    745         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflOfSkippable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.OF updating",       "/IEM/CPU%u/re/NativeLivenessEFlagsOfSkippable", idCpu);
    746 
    747         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflCfRequired,     STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.CF updating",        "/IEM/CPU%u/re/NativeLivenessEFlagsCfRequired", idCpu);
    748         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflPfRequired,     STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.PF updating",        "/IEM/CPU%u/re/NativeLivenessEFlagsPfRequired", idCpu);
    749         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflAfRequired,     STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.AF updating",        "/IEM/CPU%u/re/NativeLivenessEFlagsAfRequired", idCpu);
    750         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflZfRequired,     STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.ZF updating",        "/IEM/CPU%u/re/NativeLivenessEFlagsZfRequired", idCpu);
    751         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflSfRequired,     STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.SF updating",        "/IEM/CPU%u/re/NativeLivenessEFlagsSfRequired", idCpu);
    752         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflOfRequired,     STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.OF updating",        "/IEM/CPU%u/re/NativeLivenessEFlagsOfRequired", idCpu);
     740        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflCfSkippable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.CF updating",       "/IEM/CPU%u/re/NativeLivenessEFlags/CfSkippable", idCpu);
     741        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflPfSkippable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.PF updating",       "/IEM/CPU%u/re/NativeLivenessEFlags/PfSkippable", idCpu);
     742        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflAfSkippable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.AF updating",       "/IEM/CPU%u/re/NativeLivenessEFlags/AfSkippable", idCpu);
     743        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflZfSkippable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.ZF updating",       "/IEM/CPU%u/re/NativeLivenessEFlags/ZfSkippable", idCpu);
     744        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflSfSkippable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.SF updating",       "/IEM/CPU%u/re/NativeLivenessEFlags/SfSkippable", idCpu);
     745        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflOfSkippable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.OF updating",       "/IEM/CPU%u/re/NativeLivenessEFlags/OfSkippable", idCpu);
     746
     747        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflCfRequired,     STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.CF updating",        "/IEM/CPU%u/re/NativeLivenessEFlags/CfRequired", idCpu);
     748        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflPfRequired,     STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.PF updating",        "/IEM/CPU%u/re/NativeLivenessEFlags/PfRequired", idCpu);
     749        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflAfRequired,     STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.AF updating",        "/IEM/CPU%u/re/NativeLivenessEFlags/AfRequired", idCpu);
     750        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflZfRequired,     STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.ZF updating",        "/IEM/CPU%u/re/NativeLivenessEFlags/ZfRequired", idCpu);
     751        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflSfRequired,     STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.SF updating",        "/IEM/CPU%u/re/NativeLivenessEFlags/SfRequired", idCpu);
     752        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflOfRequired,     STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.OF updating",        "/IEM/CPU%u/re/NativeLivenessEFlags/OfRequired", idCpu);
    753753
    754754#   ifdef IEMLIVENESS_EXTENDED_LAYOUT
    755         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflCfDelayable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.CF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsCfDelayable", idCpu);
    756         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflPfDelayable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.PF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsPfDelayable", idCpu);
    757         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflAfDelayable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.AF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsAfDelayable", idCpu);
    758         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflZfDelayable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.ZF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsZfDelayable", idCpu);
    759         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflSfDelayable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.SF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsSfDelayable", idCpu);
    760         STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflOfDelayable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.OF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsOfDelayable", idCpu);
     755        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflCfDelayable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.CF updating", "/IEM/CPU%u/re/NativeLivenessEFlags/CfDelayable", idCpu);
     756        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflPfDelayable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.PF updating", "/IEM/CPU%u/re/NativeLivenessEFlags/PfDelayable", idCpu);
     757        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflAfDelayable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.AF updating", "/IEM/CPU%u/re/NativeLivenessEFlags/AfDelayable", idCpu);
     758        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflZfDelayable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.ZF updating", "/IEM/CPU%u/re/NativeLivenessEFlags/ZfDelayable", idCpu);
     759        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflSfDelayable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.SF updating", "/IEM/CPU%u/re/NativeLivenessEFlags/SfDelayable", idCpu);
     760        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflOfDelayable,    STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.OF updating", "/IEM/CPU%u/re/NativeLivenessEFlags/OfDelayable", idCpu);
    761761#   endif
    762762
    763763        /* Sum up all status bits ('_' is a sorting hack). */
    764         RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags?fSkippable*", idCpu);
     764        RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags/?fSkippable*", idCpu);
    765765        STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Total skippable EFLAGS status bit updating",
    766                           "/IEM/CPU%u/re/NativeLivenessEFlags_StatusSkippable", idCpu);
    767 
    768         RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags?fRequired*", idCpu);
     766                          "/IEM/CPU%u/re/NativeLivenessEFlags/totalSkippable", idCpu);
     767
     768        RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags/?fRequired*", idCpu);
    769769        STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Total required STATUS status bit updating",
    770                           "/IEM/CPU%u/re/NativeLivenessEFlags_StatusRequired", idCpu);
     770                          "/IEM/CPU%u/re/NativeLivenessEFlags/totalRequired", idCpu);
    771771
    772772#   ifdef IEMLIVENESS_EXTENDED_LAYOUT
    773         RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags?fDelayable*", idCpu);
     773        RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags/?fDelayable*", idCpu);
    774774        STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Total potentially delayable STATUS status bit updating",
    775                           "/IEM/CPU%u/re/NativeLivenessEFlags_StatusDelayable", idCpu);
     775                          "/IEM/CPU%u/re/NativeLivenessEFlags/totalDelayable", idCpu);
    776776#   endif
    777777
    778         RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags?f*", idCpu);
     778        RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags/?f*", idCpu);
    779779        STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Total STATUS status bit events of any kind",
    780                           "/IEM/CPU%u/re/NativeLivenessEFlags_StatusTotal", idCpu);
    781 
    782         /* Ratio of the status bit skippables. */
    783         RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags_StatusTotal", idCpu);
    784         RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/re/NativeLivenessEFlags_StatusSkippable", idCpu);
     780                          "/IEM/CPU%u/re/NativeLivenessEFlags/totalTotal", idCpu);
     781
     782        /* Corresponding ratios / percentages of the totals. */
     783        RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags/totalTotal", idCpu);
     784        RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/re/NativeLivenessEFlags/totalSkippable", idCpu);
    785785        STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, false, szPat,
    786786                               "Total skippable EFLAGS status bit updating percentage",
    787                                "/IEM/CPU%u/re/NativeLivenessEFlags_StatusSkippablePct", idCpu);
     787                               "/IEM/CPU%u/re/NativeLivenessEFlags/totalSkippablePct", idCpu);
     788
     789        RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags/totalTotal", idCpu);
     790        RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/re/NativeLivenessEFlags/totalRequired", idCpu);
     791        STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, false, szPat,
     792                               "Total required EFLAGS status bit updating percentage",
     793                               "/IEM/CPU%u/re/NativeLivenessEFlags/totalRequiredPct", idCpu);
    788794
    789795#   ifdef IEMLIVENESS_EXTENDED_LAYOUT
    790         /* Ratio of the status bit skippables. */
    791         RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/re/NativeLivenessEFlags_StatusDelayable", idCpu);
     796        RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/re/NativeLivenessEFlags/totalDelayable", idCpu);
    792797        STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, false, szPat,
    793798                               "Total potentially delayable EFLAGS status bit updating percentage",
    794                                "/IEM/CPU%u/re/NativeLivenessEFlags_StatusDelayablePct", idCpu);
     799                               "/IEM/CPU%u/re/NativeLivenessEFlags/totalDelayablePct", idCpu);
    795800#   endif
    796801
    797802        /* Ratios of individual bits. */
    798         size_t const offFlagChar = RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlagsCf*", idCpu) - 3;
     803        size_t const offFlagChar = RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags/Cf*", idCpu) - 3;
    799804        Assert(szPat[offFlagChar] == 'C');
    800         RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/re/NativeLivenessEFlagsCfSkippable", idCpu);
     805        RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/re/NativeLivenessEFlags/CfSkippable", idCpu);
    801806        Assert(szVal[offFlagChar] == 'C');
    802         szPat[offFlagChar] = szVal[offFlagChar] = 'C'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.CF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlagsCfSkippablePct", idCpu);
    803         szPat[offFlagChar] = szVal[offFlagChar] = 'P'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.PF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlagsPfSkippablePct", idCpu);
    804         szPat[offFlagChar] = szVal[offFlagChar] = 'A'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.AF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlagsAfSkippablePct", idCpu);
    805         szPat[offFlagChar] = szVal[offFlagChar] = 'Z'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.ZF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlagsZfSkippablePct", idCpu);
    806         szPat[offFlagChar] = szVal[offFlagChar] = 'S'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.SF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlagsSfSkippablePct", idCpu);
    807         szPat[offFlagChar] = szVal[offFlagChar] = 'O'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.OF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlagsOfSkippablePct", idCpu);
    808 
     807        szPat[offFlagChar] = szVal[offFlagChar] = 'C'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.CF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlags/CfSkippablePct", idCpu);
     808        szPat[offFlagChar] = szVal[offFlagChar] = 'P'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.PF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlags/PfSkippablePct", idCpu);
     809        szPat[offFlagChar] = szVal[offFlagChar] = 'A'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.AF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlags/AfSkippablePct", idCpu);
     810        szPat[offFlagChar] = szVal[offFlagChar] = 'Z'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.ZF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlags/ZfSkippablePct", idCpu);
     811        szPat[offFlagChar] = szVal[offFlagChar] = 'S'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.SF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlags/SfSkippablePct", idCpu);
     812        szPat[offFlagChar] = szVal[offFlagChar] = 'O'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.OF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlags/OfSkippablePct", idCpu);
     813
     814        /* PC updates total and skipped, with PCT ratio. */
    809815        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativePcUpdateTotal,   STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Total RIP updates",   "/IEM/CPU%u/re/NativePcUpdateTotal", idCpu);
    810816        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativePcUpdateDelayed, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Delayed RIP updates", "/IEM/CPU%u/re/NativePcUpdateDelayed", idCpu);
     817        RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativePcUpdateTotal", idCpu);
     818        RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/re/NativePcUpdateDelayed", idCpu);
     819        STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, false, szPat,
     820                               "Delayed RIP updating percentage",
     821                               "/IEM/CPU%u/re/NativePcUpdateDelayed_StatusDelayedPct", idCpu);
     822
    811823#  endif /* VBOX_WITH_STATISTICS */
    812824#  ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK
     
    851863                        "/IEM/CPU%u/re/NativeMaybeAvxXcptCheckOmitted", idCpu);
    852864#   endif
    853 
    854         /* Ratio of the status bit skippables. */
    855         RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativePcUpdateTotal", idCpu);
    856         RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/re/NativePcUpdateDelayed", idCpu);
    857         STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, false, szPat,
    858                                "Delayed RIP updating percentage",
    859                                "/IEM/CPU%u/re/NativePcUpdateDelayed_StatusDelayedPct", idCpu);
    860865
    861866        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbFinished, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
  • trunk/src/VBox/VMM/include/IEMN8veRecompiler.h

    r106078 r106090  
    714714/** @name 64-bit value masks for IEMLIVENESSENTRY.
    715715 * @{ */                                      /*         0xzzzzyyyyxxxxwwww */
     716/** @todo Changing this to 0x003ffffffffffffe would reduce the liveness code
     717 * size by 3.2% on arm in extended layout.  That means moving kIemNativeGstReg_Pc
     718 * to zero, which may have other consequences so needs to be tested in full first. */
    716719#define IEMLIVENESSBIT_MASK                     UINT64_C(0x003ffffffffeffff)
    717720
     
    749752 * state and when merging in incoming state for registers not touched by the
    750753 * current call.
     754 *
     755 *
     756 * Extended Layout:
     757 *
     758 * The extended layout variation differs from the above as it records the
     759 * different register accesses as individual bits, and it is currently used for
     760 * the delayed EFLAGS calculation experiments.   The latter means that
     761 * calls/tb-exits and potential calls/exceptions/tb-exits are recorded
     762 * separately so the latter can be checked for in combination with clobbering.
    751763 *
    752764 * @{ */
     
    796808# define IEMLIVENESS_STATE_BIT_COUNT    2
    797809
    798 /** Check if we're expecting read & write accesses to a register with the given (previous) liveness state. */
     810/** Check if we're expecting read & write accesses to a register with the given (previous) liveness state.
     811 * @note only used in assertions. */
    799812# define IEMLIVENESS_STATE_IS_MODIFY_EXPECTED(a_uState)  ((uint32_t)((a_uState) - 1U) >= (uint32_t)(IEMLIVENESS_STATE_INPUT - 1U))
    800 /** Check if we're expecting read accesses to a register with the given (previous) liveness state. */
     813/** Check if we're expecting read accesses to a register with the given (previous) liveness state.
     814 * @note only used in assertions. */
    801815# define IEMLIVENESS_STATE_IS_INPUT_EXPECTED(a_uState)   IEMLIVENESS_STATE_IS_MODIFY_EXPECTED(a_uState)
    802816/** Check if a register clobbering is expected given the (previous) liveness state.
    803817 * The state must be either CLOBBERED or XCPT_OR_CALL, but it may also
    804  * include INPUT if the register is used in more than one place. */
     818 * include INPUT if the register is used in more than one place.
     819 * @note only used in assertions. */
    805820# define IEMLIVENESS_STATE_IS_CLOBBER_EXPECTED(a_uState) ((uint32_t)(a_uState) != IEMLIVENESS_STATE_UNUSED)
    806821
    807822/** Check if all status flags are going to be clobbered and doesn't need
    808823 *  calculating in the current step.
    809  * @param a_pCurEntry  The current liveness entry. */
    810 # define IEMLIVENESS_STATE_ARE_STATUS_EFL_TO_BE_CLOBBERED(a_pCurEntry)  \
     824 * @param a_pCurEntry  The current liveness entry.
     825 * @note  Used by actual code. */
     826# define IEMLIVENESS_STATE_ARE_STATUS_EFL_TO_BE_CLOBBERED(a_pCurEntry) \
    811827    ( (((a_pCurEntry)->Bit0.bm64 | (a_pCurEntry)->Bit1.bm64) & IEMLIVENESSBIT_STATUS_EFL_MASK) == 0 )
     828
     829/** Construct a mask of the guest registers in the UNUSED and XCPT_OR_CALL
     830 *  states, as these are no longer needed.
     831 * @param a_pCurEntry  The current liveness entry.
     832 * @note  Used by actual code. */
     833AssertCompile(IEMLIVENESS_STATE_UNUSED == 1 && IEMLIVENESS_STATE_XCPT_OR_CALL == 2);
     834# define IEMLIVENESS_STATE_GET_CAN_BE_FREED_SET(a_pCurEntry) \
     835    ( (a_pCurEntry)->Bit0.bm64 ^ (a_pCurEntry)->Bit1.bm64 )
     836
    812837
    813838#else  /* IEMLIVENESS_EXTENDED_LAYOUT */
     
    839864          & IEMLIVENESSBIT_STATUS_EFL_MASK) )
    840865
     866/** Construct a mask of the registers not in the read or write state.
     867 * @note  We could skips writes, if they aren't from us, as this is just a hack
     868 *        to prevent trashing registers that have just been written or will be
     869 *        written when we retire the current instruction.
     870 * @param a_pCurEntry  The current liveness entry.
     871 * @note  Used by actual code. */
     872# define IEMLIVENESS_STATE_GET_CAN_BE_FREED_SET(a_pCurEntry) \
     873            (  ~(a_pCurEntry)->aBits[IEMLIVENESS_BIT_READ].bm64 \
     874             & ~(a_pCurEntry)->aBits[IEMLIVENESS_BIT_WRITE].bm64 \
     875             & IEMLIVENESSBIT_MASK )
     876
     877
    841878#endif /* IEMLIVENESS_EXTENDED_LAYOUT */
    842879/** @} */
     
    866903
    867904/** Initializing the outgoing state with a potential xcpt or call state.
    868  * This only works when all later changes will be IEMLIVENESS_STATE_INPUT. */
     905 * This only works when all later changes will be IEMLIVENESS_STATE_INPUT.
     906 *
     907 * @note Must invoke IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL when done!
     908 */
    869909#ifndef IEMLIVENESS_EXTENDED_LAYOUT
    870910# define IEM_LIVENESS_RAW_INIT_WITH_POTENTIAL_CALL(a_pOutgoing, a_pIncoming) \
     
    877917    do { \
    878918        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64 = IEMLIVENESSBIT_MASK; \
    879         (a_pOutgoing)->aBits[IEMLIVENESS_BIT_READ          ].bm64 = (a_pIncoming)->aBits[IEMLIVENESS_BIT_READ].bm64; \
     919        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_READ          ].bm64 = 0; \
    880920        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_WRITE         ].bm64 = 0; \
    881921        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_CALL          ].bm64 = 0; \
     
    883923#endif
    884924
     925/** Completes IEM_LIVENESS_RAW_INIT_WITH_POTENTIAL_CALL after applying any
     926 * other state modifications.
     927 */
     928#ifndef IEMLIVENESS_EXTENDED_LAYOUT
     929# define IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(a_pOutgoing, a_pIncoming) ((void)0)
     930#else
     931# define IEM_LIVENESS_RAW_FINISH_WITH_POTENTIAL_CALL(a_pOutgoing, a_pIncoming) \
     932    do { \
     933        uint64_t const fInhMask = ~(  (a_pOutgoing)->aBits[IEMLIVENESS_BIT_CALL].bm64 \
     934                                    | (a_pOutgoing)->aBits[IEMLIVENESS_BIT_WRITE].bm64); \
     935        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64 |= (a_pIncoming)->aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64 & fInhMask; \
     936        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_READ          ].bm64 |= (a_pIncoming)->aBits[IEMLIVENESS_BIT_READ].bm64  & fInhMask; \
     937        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_WRITE         ].bm64 |= (a_pIncoming)->aBits[IEMLIVENESS_BIT_WRITE].bm64 & fInhMask; \
     938        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_CALL          ].bm64 |= (a_pIncoming)->aBits[IEMLIVENESS_BIT_CALL].bm64  & fInhMask; \
     939    } while (0)
     940#endif
     941
    885942/** Initializing the outgoing state with an unconditional call state.
    886  * This only works when all later changes will be IEMLIVENESS_STATE_INPUT. */
     943 * This should only really be used alone. */
    887944#ifndef IEMLIVENESS_EXTENDED_LAYOUT
    888945# define IEM_LIVENESS_RAW_INIT_WITH_CALL(a_pOutgoing, a_pIncoming) \
     
    894951# define IEM_LIVENESS_RAW_INIT_WITH_CALL(a_pOutgoing, a_pIncoming) \
    895952    do { \
     953        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_CALL          ].bm64 = IEMLIVENESSBIT_MASK; \
    896954        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64 = 0; \
    897         (a_pOutgoing)->aBits[IEMLIVENESS_BIT_READ          ].bm64 = (a_pIncoming)->aBits[IEMLIVENESS_BIT_READ].bm64; \
     955        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_READ          ].bm64 = 0; \
    898956        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_WRITE         ].bm64 = 0; \
    899         (a_pOutgoing)->aBits[IEMLIVENESS_BIT_CALL          ].bm64 = IEMLIVENESSBIT_MASK; \
     957        RT_NOREF(a_pIncoming); \
    900958    } while (0)
    901959#endif
    902960
     961#if 0 /* unused */
    903962/** Initializing the outgoing state with an unconditional call state as well as
    904963 *  an potential call/exception preceeding it.
    905  * This should only be used alone, really. */
     964 * This should only really be used alone. */
    906965#ifndef IEMLIVENESS_EXTENDED_LAYOUT
    907966# define IEM_LIVENESS_RAW_INIT_WITH_CALL_AND_POTENTIAL_CALL(a_pOutgoing, a_pIncoming) \
     
    914973    do { \
    915974        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64 = IEMLIVENESSBIT_MASK; \
    916         (a_pOutgoing)->aBits[IEMLIVENESS_BIT_READ          ].bm64 = (a_pIncoming)->aBits[IEMLIVENESS_BIT_READ].bm64; \
     975        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_CALL          ].bm64 = IEMLIVENESSBIT_MASK; \
     976        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_READ          ].bm64 = 0; \
    917977        (a_pOutgoing)->aBits[IEMLIVENESS_BIT_WRITE         ].bm64 = 0; \
    918         (a_pOutgoing)->aBits[IEMLIVENESS_BIT_CALL          ].bm64 = IEMLIVENESSBIT_MASK; \
    919978    } while (0)
     979#endif
    920980#endif
    921981
     
    18631923DECL_HIDDEN_THROW(uint32_t) iemNativeEmitThreadedCall(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    18641924                                                      PCIEMTHRDEDCALLENTRY pCallEntry);
     1925IEM_DECL_IEMNATIVELIVENESSFUNC_PROTO(iemNativeLivenessFunc_ThreadedCall);
    18651926DECL_HIDDEN_THROW(uint32_t) iemNativeEmitLeaGprByGstRegRef(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxGprDst,
    18661927                                                           IEMNATIVEGSTREGREF enmClass, uint8_t idxRegInClass);
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette