VirtualBox

Changeset 19803 in vbox for trunk/src/VBox/VMM/TM.cpp


Ignore:
Timestamp:
May 19, 2009 8:33:18 AM (16 years ago)
Author:
vboxsync
Message:

TM: More smp hacking on the virtual sync clock.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/TM.cpp

    r19753 r19803  
    589589    STAM_REG(pVM, &pVM->tm.s.StatVirtualGetSetFF,                     STAMTYPE_COUNTER, "/TM/VirtualGetSetFF",                 STAMUNIT_OCCURENCES, "Times we set the FF when calling TMTimerGet.");
    590590    STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGet,                      STAMTYPE_COUNTER, "/TM/VirtualSyncGet",                  STAMUNIT_OCCURENCES, "The number of times tmVirtualSyncGetEx was called.");
    591     STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetSetFF,                 STAMTYPE_COUNTER, "/TM/VirtualSyncGetSetFF",             STAMUNIT_OCCURENCES, "Times we set the FF when calling tmVirtualSyncGetEx.");
    592     STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetELoop,                 STAMTYPE_COUNTER, "/TM/VirtualSyncGetELoop",             STAMUNIT_OCCURENCES, "Times we give up because too many loops in tmVirtualSyncGetEx.");
    593     STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetLocked,                STAMTYPE_COUNTER, "/TM/VirtualSyncGetLocked",            STAMUNIT_OCCURENCES, "Times we successfully acquired the lock in tmVirtualSyncGetEx.");
     591    STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetELoop,                 STAMTYPE_COUNTER, "/TM/VirtualSyncGet/ELoop",            STAMUNIT_OCCURENCES, "Times we give up because too many loops in tmVirtualSyncGetEx.");
     592    STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetExpired,               STAMTYPE_COUNTER, "/TM/VirtualSyncGet/Expired",          STAMUNIT_OCCURENCES, "Times tmVirtualSyncGetEx encountered an expired timer stopping the clock.");
     593    STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetLocked,                STAMTYPE_COUNTER, "/TM/VirtualSyncGet/Locked",           STAMUNIT_OCCURENCES, "Times we successfully acquired the lock in tmVirtualSyncGetEx.");
     594    STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetLockless,              STAMTYPE_COUNTER, "/TM/VirtualSyncGet/Lockless",         STAMUNIT_OCCURENCES, "Times tmVirtualSyncGetEx returned without needing to take the lock.");
     595    STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetSetFF,                 STAMTYPE_COUNTER, "/TM/VirtualSyncGet/SetFF",            STAMUNIT_OCCURENCES, "Times we set the FF when calling tmVirtualSyncGetEx.");
    594596    STAM_REG(pVM, &pVM->tm.s.StatVirtualPause,                        STAMTYPE_COUNTER, "/TM/VirtualPause",                    STAMUNIT_OCCURENCES, "The number of times TMR3TimerPause was called.");
    595597    STAM_REG(pVM, &pVM->tm.s.StatVirtualResume,                       STAMTYPE_COUNTER, "/TM/VirtualResume",                   STAMUNIT_OCCURENCES, "The number of times TMR3TimerResume was called.");
     
    968970            const uint64_t offNew = offVirtualNow - offVirtualSyncNow;
    969971            Assert(offOld <= offNew);
    970             ASMAtomicXchgU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp, offNew);
    971             ASMAtomicXchgU64((uint64_t volatile *)&pVM->tm.s.offVirtualSync, offNew);
    972             ASMAtomicXchgBool(&pVM->tm.s.fVirtualSyncCatchUp, false);
     972            ASMAtomicWriteU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp, offNew);
     973            ASMAtomicWriteU64((uint64_t volatile *)&pVM->tm.s.offVirtualSync, offNew);
     974            ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncCatchUp, false);
    973975            LogRel(("TM: Aborting catch-up attempt on reset with a %RU64 ns lag on reset; new total: %RU64 ns\n", offNew - offOld, offNew));
    974976        }
     
    18621864 *
    18631865 * @param   pVM             The VM to run the timers for.
     1866 *
     1867 * @remarks The caller must own both the TM/EMT and the Virtual Sync locks.
    18641868 */
    18651869static void tmR3TimerQueueRunVirtualSync(PVM pVM)
     
    18911895     * and 2) lag behind at a steady rate.
    18921896     */
    1893     const uint64_t u64VirtualNow = TMVirtualGetNoCheck(pVM);
    1894     uint64_t u64Now;
     1897    const uint64_t  u64VirtualNow  = TMVirtualGetNoCheck(pVM);
     1898    uint64_t const  offSyncGivenUp = pVM->tm.s.offVirtualSyncGivenUp;
     1899    uint64_t        u64Now;
    18951900    if (!pVM->tm.s.fVirtualSyncTicking)
    18961901    {
     
    19011906    else
    19021907    {
    1903         /* Calc 'now'. (update order doesn't really matter here) */
    1904         uint64_t off = pVM->tm.s.offVirtualSync;
     1908        /* Calc 'now'. */
     1909        bool     fStopCatchup  = false;
     1910        bool     fUpdateStuff  = false;
     1911        uint64_t off           = pVM->tm.s.offVirtualSync;
    19051912        if (pVM->tm.s.fVirtualSyncCatchUp)
    19061913        {
     
    19091916            {
    19101917                uint64_t u64Sub = ASMMultU64ByU32DivByU32(u64Delta, pVM->tm.s.u32VirtualSyncCatchUpPercentage, 100);
    1911                 if (off > u64Sub + pVM->tm.s.offVirtualSyncGivenUp)
     1918                if (off > u64Sub + offSyncGivenUp)
    19121919                {
    19131920                    off -= u64Sub;
    1914                     Log4(("TM: %RU64/%RU64: sub %RU64 (run)\n", u64VirtualNow - off, off - pVM->tm.s.offVirtualSyncGivenUp, u64Sub));
     1921                    Log4(("TM: %RU64/%RU64: sub %RU64 (run)\n", u64VirtualNow - off, off - offSyncGivenUp, u64Sub));
    19151922                }
    19161923                else
    19171924                {
    19181925                    STAM_PROFILE_ADV_STOP(&pVM->tm.s.StatVirtualSyncCatchup, c);
    1919                     ASMAtomicXchgBool(&pVM->tm.s.fVirtualSyncCatchUp, false);
    1920                     off = pVM->tm.s.offVirtualSyncGivenUp;
     1926                    fStopCatchup = true;
     1927                    off = offSyncGivenUp;
    19211928                    Log4(("TM: %RU64/0: caught up (run)\n", u64VirtualNow));
    19221929                }
    19231930            }
    1924             ASMAtomicXchgU64(&pVM->tm.s.offVirtualSync, off);
    1925             pVM->tm.s.u64VirtualSyncCatchUpPrev = u64VirtualNow;
    19261931        }
    19271932        u64Now = u64VirtualNow - off;
    19281933
    19291934        /* Check if stopped by expired timer. */
     1935        uint64_t u64Expire = pNext->u64Expire;
    19301936        if (u64Now >= pNext->u64Expire)
    19311937        {
    19321938            STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncRunStop);
    19331939            u64Now = pNext->u64Expire;
    1934             ASMAtomicXchgU64(&pVM->tm.s.u64VirtualSync, u64Now);
    1935             ASMAtomicXchgBool(&pVM->tm.s.fVirtualSyncTicking, false);
    1936             Log4(("TM: %RU64/%RU64: exp tmr (run)\n", u64Now, u64VirtualNow - u64Now - pVM->tm.s.offVirtualSyncGivenUp));
    1937 
     1940            ASMAtomicWriteU64(&pVM->tm.s.u64VirtualSync, u64Now);
     1941            ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncTicking, false);
     1942            Log4(("TM: %RU64/%RU64: exp tmr (run)\n", u64Now, u64VirtualNow - u64Now - offSyncGivenUp));
     1943        }
     1944        else if (fUpdateStuff)
     1945        {
     1946            ASMAtomicWriteU64(&pVM->tm.s.offVirtualSync, off);
     1947            ASMAtomicWriteU64(&pVM->tm.s.u64VirtualSyncCatchUpPrev, u64VirtualNow);
     1948            if (fStopCatchup)
     1949                ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncCatchUp, false);
    19381950        }
    19391951    }
     
    19411953    /* calc end of frame. */
    19421954    uint64_t u64Max = u64Now + pVM->tm.s.u32VirtualSyncScheduleSlack;
    1943     if (u64Max > u64VirtualNow - pVM->tm.s.offVirtualSyncGivenUp)
    1944         u64Max = u64VirtualNow - pVM->tm.s.offVirtualSyncGivenUp;
     1955    if (u64Max > u64VirtualNow - offSyncGivenUp)
     1956        u64Max = u64VirtualNow - offSyncGivenUp;
    19451957
    19461958    /* assert sanity */
    1947     Assert(u64Now <= u64VirtualNow - pVM->tm.s.offVirtualSyncGivenUp);
    1948     Assert(u64Max <= u64VirtualNow - pVM->tm.s.offVirtualSyncGivenUp);
     1959    Assert(u64Now <= u64VirtualNow - offSyncGivenUp);
     1960    Assert(u64Max <= u64VirtualNow - offSyncGivenUp);
    19491961    Assert(u64Now <= u64Max);
     1962    Assert(offSyncGivenUp == pVM->tm.s.offVirtualSyncGivenUp);
    19501963
    19511964    /*
     
    19841997            u64Prev = pTimer->u64Expire;
    19851998#endif
    1986             ASMAtomicXchgSize(&pVM->tm.s.fVirtualSyncTicking, false);
    1987             ASMAtomicXchgU64(&pVM->tm.s.u64VirtualSync, pTimer->u64Expire);
     1999            ASMAtomicWriteU64(&pVM->tm.s.u64VirtualSync, pTimer->u64Expire);
     2000            ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncTicking, false);
    19882001
    19892002            /* fire */
     
    20322045        /* Let the time run a little bit while we were busy running timers(?). */
    20332046        uint64_t u64Elapsed;
    2034 #define MAX_ELAPSED 30000 /* ns */
     2047#define MAX_ELAPSED 30000U /* ns */
    20352048        if (offSlack > MAX_ELAPSED)
    20362049            u64Elapsed = 0;
     
    20592072                /* stop */
    20602073                STAM_PROFILE_ADV_STOP(&pVM->tm.s.StatVirtualSyncCatchup, c);
    2061                 ASMAtomicXchgBool(&pVM->tm.s.fVirtualSyncCatchUp, false);
     2074                ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncCatchUp, false);
    20622075                Log4(("TM: %RU64/%RU64: caught up\n", u64VirtualNow2 - offNew, offLag));
    20632076            }
     
    20722085                {
    20732086                    STAM_COUNTER_INC(&pVM->tm.s.aStatVirtualSyncCatchupAdjust[i]);
    2074                     ASMAtomicXchgU32(&pVM->tm.s.u32VirtualSyncCatchUpPercentage, pVM->tm.s.aVirtualSyncCatchUpPeriods[i].u32Percentage);
     2087                    ASMAtomicWriteU32(&pVM->tm.s.u32VirtualSyncCatchUpPercentage, pVM->tm.s.aVirtualSyncCatchUpPeriods[i].u32Percentage);
    20752088                    Log4(("TM: %RU64/%RU64: adj %u%%\n", u64VirtualNow2 - offNew, offLag, pVM->tm.s.u32VirtualSyncCatchUpPercentage));
    20762089                }
     
    20822095                STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGiveUp);
    20832096                STAM_PROFILE_ADV_STOP(&pVM->tm.s.StatVirtualSyncCatchup, c);
    2084                 ASMAtomicXchgU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp, offNew);
    2085                 ASMAtomicXchgBool(&pVM->tm.s.fVirtualSyncCatchUp, false);
     2097                ASMAtomicWriteU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp, offNew);
     2098                ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncCatchUp, false);
    20862099                Log4(("TM: %RU64/%RU64: give up %u%%\n", u64VirtualNow2 - offNew, offLag, pVM->tm.s.u32VirtualSyncCatchUpPercentage));
    20872100                LogRel(("TM: Giving up catch-up attempt at a %RU64 ns lag; new total: %RU64 ns\n", offLag, offNew));
     
    20992112                    i++;
    21002113                STAM_COUNTER_INC(&pVM->tm.s.aStatVirtualSyncCatchupInitial[i]);
    2101                 ASMAtomicXchgU32(&pVM->tm.s.u32VirtualSyncCatchUpPercentage, pVM->tm.s.aVirtualSyncCatchUpPeriods[i].u32Percentage);
    2102                 ASMAtomicXchgBool(&pVM->tm.s.fVirtualSyncCatchUp, true);
     2114                ASMAtomicWriteU32(&pVM->tm.s.u32VirtualSyncCatchUpPercentage, pVM->tm.s.aVirtualSyncCatchUpPeriods[i].u32Percentage);
     2115                ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncCatchUp, true);
    21032116                Log4(("TM: %RU64/%RU64: catch-up %u%%\n", u64VirtualNow2 - offNew, offLag, pVM->tm.s.u32VirtualSyncCatchUpPercentage));
    21042117            }
     
    21072120                /* don't bother */
    21082121                STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGiveUpBeforeStarting);
    2109                 ASMAtomicXchgU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp, offNew);
     2122                ASMAtomicWriteU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp, offNew);
    21102123                Log4(("TM: %RU64/%RU64: give up\n", u64VirtualNow2 - offNew, offLag));
    21112124                LogRel(("TM: Not bothering to attempt catching up a %RU64 ns lag; new total: %RU64\n", offLag, offNew));
     
    21172130         */
    21182131        Assert(!(offNew & RT_BIT_64(63)));
    2119         ASMAtomicXchgU64(&pVM->tm.s.offVirtualSync, offNew);
    2120         ASMAtomicXchgBool(&pVM->tm.s.fVirtualSyncTicking, true);
     2132        ASMAtomicWriteU64(&pVM->tm.s.offVirtualSync, offNew);
     2133        ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncTicking, true);
    21212134    }
    21222135}
     
    23072320{
    23082321    RTTimeNow(pTime);
    2309     RTTimeSpecSubNano(pTime, pVM->tm.s.offVirtualSync - pVM->tm.s.offVirtualSyncGivenUp);
     2322    RTTimeSpecSubNano(pTime, ASMAtomicReadU64(&pVM->tm.s.offVirtualSync) - ASMAtomicReadU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp));
    23102323    RTTimeSpecAddNano(pTime, pVM->tm.s.offUTC);
    23112324    return pTime;
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette