Changeset 19803 in vbox for trunk/src/VBox/VMM/TM.cpp
- Timestamp:
- May 19, 2009 8:33:18 AM (16 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/TM.cpp
r19753 r19803 589 589 STAM_REG(pVM, &pVM->tm.s.StatVirtualGetSetFF, STAMTYPE_COUNTER, "/TM/VirtualGetSetFF", STAMUNIT_OCCURENCES, "Times we set the FF when calling TMTimerGet."); 590 590 STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGet, STAMTYPE_COUNTER, "/TM/VirtualSyncGet", STAMUNIT_OCCURENCES, "The number of times tmVirtualSyncGetEx was called."); 591 STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetSetFF, STAMTYPE_COUNTER, "/TM/VirtualSyncGetSetFF", STAMUNIT_OCCURENCES, "Times we set the FF when calling tmVirtualSyncGetEx."); 592 STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetELoop, STAMTYPE_COUNTER, "/TM/VirtualSyncGetELoop", STAMUNIT_OCCURENCES, "Times we give up because too many loops in tmVirtualSyncGetEx."); 593 STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetLocked, STAMTYPE_COUNTER, "/TM/VirtualSyncGetLocked", STAMUNIT_OCCURENCES, "Times we successfully acquired the lock in tmVirtualSyncGetEx."); 591 STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetELoop, STAMTYPE_COUNTER, "/TM/VirtualSyncGet/ELoop", STAMUNIT_OCCURENCES, "Times we give up because too many loops in tmVirtualSyncGetEx."); 592 STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetExpired, STAMTYPE_COUNTER, "/TM/VirtualSyncGet/Expired", STAMUNIT_OCCURENCES, "Times tmVirtualSyncGetEx encountered an expired timer stopping the clock."); 593 STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetLocked, STAMTYPE_COUNTER, "/TM/VirtualSyncGet/Locked", STAMUNIT_OCCURENCES, "Times we successfully acquired the lock in tmVirtualSyncGetEx."); 594 STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetLockless, STAMTYPE_COUNTER, "/TM/VirtualSyncGet/Lockless", STAMUNIT_OCCURENCES, "Times tmVirtualSyncGetEx returned without needing to take the lock."); 595 STAM_REG(pVM, &pVM->tm.s.StatVirtualSyncGetSetFF, STAMTYPE_COUNTER, "/TM/VirtualSyncGet/SetFF", STAMUNIT_OCCURENCES, "Times we set the FF when calling tmVirtualSyncGetEx."); 594 596 STAM_REG(pVM, &pVM->tm.s.StatVirtualPause, STAMTYPE_COUNTER, "/TM/VirtualPause", STAMUNIT_OCCURENCES, "The number of times TMR3TimerPause was called."); 595 597 STAM_REG(pVM, &pVM->tm.s.StatVirtualResume, STAMTYPE_COUNTER, "/TM/VirtualResume", STAMUNIT_OCCURENCES, "The number of times TMR3TimerResume was called."); … … 968 970 const uint64_t offNew = offVirtualNow - offVirtualSyncNow; 969 971 Assert(offOld <= offNew); 970 ASMAtomic XchgU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp, offNew);971 ASMAtomic XchgU64((uint64_t volatile *)&pVM->tm.s.offVirtualSync, offNew);972 ASMAtomic XchgBool(&pVM->tm.s.fVirtualSyncCatchUp, false);972 ASMAtomicWriteU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp, offNew); 973 ASMAtomicWriteU64((uint64_t volatile *)&pVM->tm.s.offVirtualSync, offNew); 974 ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncCatchUp, false); 973 975 LogRel(("TM: Aborting catch-up attempt on reset with a %RU64 ns lag on reset; new total: %RU64 ns\n", offNew - offOld, offNew)); 974 976 } … … 1862 1864 * 1863 1865 * @param pVM The VM to run the timers for. 1866 * 1867 * @remarks The caller must own both the TM/EMT and the Virtual Sync locks. 1864 1868 */ 1865 1869 static void tmR3TimerQueueRunVirtualSync(PVM pVM) … … 1891 1895 * and 2) lag behind at a steady rate. 1892 1896 */ 1893 const uint64_t u64VirtualNow = TMVirtualGetNoCheck(pVM); 1894 uint64_t u64Now; 1897 const uint64_t u64VirtualNow = TMVirtualGetNoCheck(pVM); 1898 uint64_t const offSyncGivenUp = pVM->tm.s.offVirtualSyncGivenUp; 1899 uint64_t u64Now; 1895 1900 if (!pVM->tm.s.fVirtualSyncTicking) 1896 1901 { … … 1901 1906 else 1902 1907 { 1903 /* Calc 'now'. (update order doesn't really matter here) */ 1904 uint64_t off = pVM->tm.s.offVirtualSync; 1908 /* Calc 'now'. */ 1909 bool fStopCatchup = false; 1910 bool fUpdateStuff = false; 1911 uint64_t off = pVM->tm.s.offVirtualSync; 1905 1912 if (pVM->tm.s.fVirtualSyncCatchUp) 1906 1913 { … … 1909 1916 { 1910 1917 uint64_t u64Sub = ASMMultU64ByU32DivByU32(u64Delta, pVM->tm.s.u32VirtualSyncCatchUpPercentage, 100); 1911 if (off > u64Sub + pVM->tm.s.offVirtualSyncGivenUp)1918 if (off > u64Sub + offSyncGivenUp) 1912 1919 { 1913 1920 off -= u64Sub; 1914 Log4(("TM: %RU64/%RU64: sub %RU64 (run)\n", u64VirtualNow - off, off - pVM->tm.s.offVirtualSyncGivenUp, u64Sub));1921 Log4(("TM: %RU64/%RU64: sub %RU64 (run)\n", u64VirtualNow - off, off - offSyncGivenUp, u64Sub)); 1915 1922 } 1916 1923 else 1917 1924 { 1918 1925 STAM_PROFILE_ADV_STOP(&pVM->tm.s.StatVirtualSyncCatchup, c); 1919 ASMAtomicXchgBool(&pVM->tm.s.fVirtualSyncCatchUp, false);1920 off = pVM->tm.s.offVirtualSyncGivenUp;1926 fStopCatchup = true; 1927 off = offSyncGivenUp; 1921 1928 Log4(("TM: %RU64/0: caught up (run)\n", u64VirtualNow)); 1922 1929 } 1923 1930 } 1924 ASMAtomicXchgU64(&pVM->tm.s.offVirtualSync, off);1925 pVM->tm.s.u64VirtualSyncCatchUpPrev = u64VirtualNow;1926 1931 } 1927 1932 u64Now = u64VirtualNow - off; 1928 1933 1929 1934 /* Check if stopped by expired timer. */ 1935 uint64_t u64Expire = pNext->u64Expire; 1930 1936 if (u64Now >= pNext->u64Expire) 1931 1937 { 1932 1938 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncRunStop); 1933 1939 u64Now = pNext->u64Expire; 1934 ASMAtomicXchgU64(&pVM->tm.s.u64VirtualSync, u64Now); 1935 ASMAtomicXchgBool(&pVM->tm.s.fVirtualSyncTicking, false); 1936 Log4(("TM: %RU64/%RU64: exp tmr (run)\n", u64Now, u64VirtualNow - u64Now - pVM->tm.s.offVirtualSyncGivenUp)); 1937 1940 ASMAtomicWriteU64(&pVM->tm.s.u64VirtualSync, u64Now); 1941 ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncTicking, false); 1942 Log4(("TM: %RU64/%RU64: exp tmr (run)\n", u64Now, u64VirtualNow - u64Now - offSyncGivenUp)); 1943 } 1944 else if (fUpdateStuff) 1945 { 1946 ASMAtomicWriteU64(&pVM->tm.s.offVirtualSync, off); 1947 ASMAtomicWriteU64(&pVM->tm.s.u64VirtualSyncCatchUpPrev, u64VirtualNow); 1948 if (fStopCatchup) 1949 ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncCatchUp, false); 1938 1950 } 1939 1951 } … … 1941 1953 /* calc end of frame. */ 1942 1954 uint64_t u64Max = u64Now + pVM->tm.s.u32VirtualSyncScheduleSlack; 1943 if (u64Max > u64VirtualNow - pVM->tm.s.offVirtualSyncGivenUp)1944 u64Max = u64VirtualNow - pVM->tm.s.offVirtualSyncGivenUp;1955 if (u64Max > u64VirtualNow - offSyncGivenUp) 1956 u64Max = u64VirtualNow - offSyncGivenUp; 1945 1957 1946 1958 /* assert sanity */ 1947 Assert(u64Now <= u64VirtualNow - pVM->tm.s.offVirtualSyncGivenUp);1948 Assert(u64Max <= u64VirtualNow - pVM->tm.s.offVirtualSyncGivenUp);1959 Assert(u64Now <= u64VirtualNow - offSyncGivenUp); 1960 Assert(u64Max <= u64VirtualNow - offSyncGivenUp); 1949 1961 Assert(u64Now <= u64Max); 1962 Assert(offSyncGivenUp == pVM->tm.s.offVirtualSyncGivenUp); 1950 1963 1951 1964 /* … … 1984 1997 u64Prev = pTimer->u64Expire; 1985 1998 #endif 1986 ASMAtomic XchgSize(&pVM->tm.s.fVirtualSyncTicking, false);1987 ASMAtomic XchgU64(&pVM->tm.s.u64VirtualSync, pTimer->u64Expire);1999 ASMAtomicWriteU64(&pVM->tm.s.u64VirtualSync, pTimer->u64Expire); 2000 ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncTicking, false); 1988 2001 1989 2002 /* fire */ … … 2032 2045 /* Let the time run a little bit while we were busy running timers(?). */ 2033 2046 uint64_t u64Elapsed; 2034 #define MAX_ELAPSED 30000 /* ns */2047 #define MAX_ELAPSED 30000U /* ns */ 2035 2048 if (offSlack > MAX_ELAPSED) 2036 2049 u64Elapsed = 0; … … 2059 2072 /* stop */ 2060 2073 STAM_PROFILE_ADV_STOP(&pVM->tm.s.StatVirtualSyncCatchup, c); 2061 ASMAtomic XchgBool(&pVM->tm.s.fVirtualSyncCatchUp, false);2074 ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncCatchUp, false); 2062 2075 Log4(("TM: %RU64/%RU64: caught up\n", u64VirtualNow2 - offNew, offLag)); 2063 2076 } … … 2072 2085 { 2073 2086 STAM_COUNTER_INC(&pVM->tm.s.aStatVirtualSyncCatchupAdjust[i]); 2074 ASMAtomic XchgU32(&pVM->tm.s.u32VirtualSyncCatchUpPercentage, pVM->tm.s.aVirtualSyncCatchUpPeriods[i].u32Percentage);2087 ASMAtomicWriteU32(&pVM->tm.s.u32VirtualSyncCatchUpPercentage, pVM->tm.s.aVirtualSyncCatchUpPeriods[i].u32Percentage); 2075 2088 Log4(("TM: %RU64/%RU64: adj %u%%\n", u64VirtualNow2 - offNew, offLag, pVM->tm.s.u32VirtualSyncCatchUpPercentage)); 2076 2089 } … … 2082 2095 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGiveUp); 2083 2096 STAM_PROFILE_ADV_STOP(&pVM->tm.s.StatVirtualSyncCatchup, c); 2084 ASMAtomic XchgU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp, offNew);2085 ASMAtomic XchgBool(&pVM->tm.s.fVirtualSyncCatchUp, false);2097 ASMAtomicWriteU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp, offNew); 2098 ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncCatchUp, false); 2086 2099 Log4(("TM: %RU64/%RU64: give up %u%%\n", u64VirtualNow2 - offNew, offLag, pVM->tm.s.u32VirtualSyncCatchUpPercentage)); 2087 2100 LogRel(("TM: Giving up catch-up attempt at a %RU64 ns lag; new total: %RU64 ns\n", offLag, offNew)); … … 2099 2112 i++; 2100 2113 STAM_COUNTER_INC(&pVM->tm.s.aStatVirtualSyncCatchupInitial[i]); 2101 ASMAtomic XchgU32(&pVM->tm.s.u32VirtualSyncCatchUpPercentage, pVM->tm.s.aVirtualSyncCatchUpPeriods[i].u32Percentage);2102 ASMAtomic XchgBool(&pVM->tm.s.fVirtualSyncCatchUp, true);2114 ASMAtomicWriteU32(&pVM->tm.s.u32VirtualSyncCatchUpPercentage, pVM->tm.s.aVirtualSyncCatchUpPeriods[i].u32Percentage); 2115 ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncCatchUp, true); 2103 2116 Log4(("TM: %RU64/%RU64: catch-up %u%%\n", u64VirtualNow2 - offNew, offLag, pVM->tm.s.u32VirtualSyncCatchUpPercentage)); 2104 2117 } … … 2107 2120 /* don't bother */ 2108 2121 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGiveUpBeforeStarting); 2109 ASMAtomic XchgU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp, offNew);2122 ASMAtomicWriteU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp, offNew); 2110 2123 Log4(("TM: %RU64/%RU64: give up\n", u64VirtualNow2 - offNew, offLag)); 2111 2124 LogRel(("TM: Not bothering to attempt catching up a %RU64 ns lag; new total: %RU64\n", offLag, offNew)); … … 2117 2130 */ 2118 2131 Assert(!(offNew & RT_BIT_64(63))); 2119 ASMAtomic XchgU64(&pVM->tm.s.offVirtualSync, offNew);2120 ASMAtomic XchgBool(&pVM->tm.s.fVirtualSyncTicking, true);2132 ASMAtomicWriteU64(&pVM->tm.s.offVirtualSync, offNew); 2133 ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncTicking, true); 2121 2134 } 2122 2135 } … … 2307 2320 { 2308 2321 RTTimeNow(pTime); 2309 RTTimeSpecSubNano(pTime, pVM->tm.s.offVirtualSync - pVM->tm.s.offVirtualSyncGivenUp);2322 RTTimeSpecSubNano(pTime, ASMAtomicReadU64(&pVM->tm.s.offVirtualSync) - ASMAtomicReadU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp)); 2310 2323 RTTimeSpecAddNano(pTime, pVM->tm.s.offUTC); 2311 2324 return pTime;
Note:
See TracChangeset
for help on using the changeset viewer.