- Timestamp:
- May 29, 2007 11:22:57 AM (18 years ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/TM.cpp
r2904 r2908 1408 1408 const uint64_t u64VirtualNow = TMVirtualGetEx(pVM, false /* don't check timers */); 1409 1409 uint64_t u64Now; 1410 uint64_t off = 0, u64Delta = 0, u64Sub = 0; /* debugging - to be removed */1411 bool fWasInCatchup = false; /* debugging - to be removed */1412 bool fWasTicking = pVM->tm.s.fVirtualSyncTicking; /* debugging - to be removed*/1413 1410 if (!pVM->tm.s.fVirtualSyncTicking) 1414 1411 { … … 1420 1417 { 1421 1418 /* Calc 'now'. (update order doesn't really matter here) */ 1422 /*uint64_t*/off = pVM->tm.s.offVirtualSync;1419 uint64_t off = pVM->tm.s.offVirtualSync; 1423 1420 if (pVM->tm.s.fVirtualSyncCatchUp) 1424 1421 { 1425 fWasInCatchup = pVM->tm.s.fVirtualSyncCatchUp; /* debugging - to be removed */ 1426 /*uint64_t*/ u64Delta = u64VirtualNow - pVM->tm.s.u64VirtualSyncCatchUpPrev; 1422 uint64_t u64Delta = u64VirtualNow - pVM->tm.s.u64VirtualSyncCatchUpPrev; 1427 1423 if (RT_LIKELY(!(u64Delta >> 32))) 1428 1424 { 1429 /*uint64_t*/u64Sub = ASMMultU64ByU32DivByU32(u64Delta, pVM->tm.s.u32VirtualSyncCatchUpPercentage, 100);1425 uint64_t u64Sub = ASMMultU64ByU32DivByU32(u64Delta, pVM->tm.s.u32VirtualSyncCatchUpPercentage, 100); 1430 1426 if (off > u64Sub + pVM->tm.s.offVirtualSyncGivenUp) 1431 1427 { … … 1464 1460 1465 1461 /* assert sanity */ 1466 if (RT_UNLIKELY( !(u64Now <= u64VirtualNow - pVM->tm.s.offVirtualSyncGivenUp)1467 || !(u64Max <= u64VirtualNow - pVM->tm.s.offVirtualSyncGivenUp)1468 || !(u64Now <= u64Max)))1469 {1470 LogRel(("TM: Add the following to defect #1414:\n"1471 " u64Now=%016RX64\n"1472 " u64Max=%016RX64\n"1473 " pNext->u64Expire=%016RX64\n"1474 " u64VirtualSync=%016RX64\n"1475 " u64VirtualNow=%016RX64\n"1476 " off=%016RX64\n"1477 " u64Delta=%016RX64\n"1478 " u64Sub=%016RX64\n"1479 " offVirtualSync=%016RX64\n"1480 " offVirtualSyncGivenUp=%016RX64\n"1481 " u64VirtualSyncCatchUpPrev=%016RX64\n"1482 " u64VirtualSyncStoppedTS=%016RX64\n"1483 "u32VirtualSyncCatchUpPercentage=%08RX32\n"1484 " fVirtualSyncTicking=%RTbool (prev=%RTbool)\n"1485 " fVirtualSyncCatchUp=%RTbool (prev=%RTbool)\n",1486 u64Now,1487 u64Max,1488 pNext->u64Expire,1489 pVM->tm.s.u64VirtualSync,1490 u64VirtualNow,1491 off,1492 u64Delta,1493 u64Sub,1494 pVM->tm.s.offVirtualSync,1495 pVM->tm.s.offVirtualSyncGivenUp,1496 pVM->tm.s.u64VirtualSyncCatchUpPrev,1497 pVM->tm.s.u64VirtualSyncStoppedTS,1498 pVM->tm.s.u32VirtualSyncCatchUpPercentage,1499 pVM->tm.s.fVirtualSyncTicking, fWasTicking,1500 pVM->tm.s.fVirtualSyncCatchUp, fWasInCatchup));1501 1462 Assert(u64Now <= u64VirtualNow - pVM->tm.s.offVirtualSyncGivenUp); 1502 1463 Assert(u64Max <= u64VirtualNow - pVM->tm.s.offVirtualSyncGivenUp); 1503 1464 Assert(u64Now <= u64Max); 1504 }1505 1465 1506 1466 /* … … 1572 1532 const uint64_t u64VirtualNow2 = TMVirtualGetEx(pVM, false /* don't check timers */); 1573 1533 Assert(u64VirtualNow2 >= u64VirtualNow); 1574 if (RT_UNLIKELY(u64VirtualNow2 < u64VirtualNow)) LogRel(("TM: u64VirtualNow2=%#RX64 < u64VirtualNow=%#RX64\n", u64VirtualNow2, u64VirtualNow)); /* debugging - to be removed. */1575 1534 AssertMsg(pVM->tm.s.u64VirtualSync >= u64Now, ("%RU64 < %RU64\n", pVM->tm.s.u64VirtualSync, u64Now)); 1576 1535 const uint64_t offSlack = pVM->tm.s.u64VirtualSync - u64Now; 1577 if (RT_UNLIKELY(offSlack & BIT64(63))) LogRel(("TM: pVM->tm.s.u64VirtualSync=%#RX64 - u64Now=%#RX64 -> %#RX64\n", pVM->tm.s.u64VirtualSync, u64Now, offSlack)); /* debugging - to be removed. */1578 1536 STAM_STATS({ 1579 1537 if (offSlack) … … 1663 1621 { 1664 1622 /* don't bother */ 1665 if (offLag & BIT64(63)) //debugging - remove.1666 LogRel(("TM: offLag is negative! offLag=%RI64 (%#RX64) offNew=%#RX64 u64Elapsed=%#RX64 offSlack=%#RX64 u64VirtualNow2=%#RX64 u64VirtualNow=%#RX64 u64VirtualSync=%#RX64 offVirtualSyncGivenUp=%#RX64 u64Now=%#RX64 u64Max=%#RX64\n",1667 offLag, offLag, offNew, u64Elapsed, offSlack, u64VirtualNow2, u64VirtualNow, pVM->tm.s.u64VirtualSync, pVM->tm.s.offVirtualSyncGivenUp, u64Now, u64Max));1668 1623 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGiveUpBeforeStarting); 1669 1624 ASMAtomicXchgU64((uint64_t volatile *)&pVM->tm.s.offVirtualSyncGivenUp, offNew); -
trunk/src/VBox/VMM/TMInternal.h
r2869 r2908 349 349 * Thus the current lag is offVirtualSync - offVirtualSyncGivenUp. */ 350 350 uint64_t offVirtualSyncGivenUp; 351 uint64_t volatile u64VirtualSyncStoppedTS;/**< for debugging - remove later */352 351 /** The TMCLOCK_VIRTUAL at the previous TMVirtualGetSync call when catch-up is active. */ 353 352 uint64_t volatile u64VirtualSyncCatchUpPrev; -
trunk/src/VBox/VMM/VMMAll/TMAllVirtual.cpp
r2885 r2908 210 210 return u64NanoTS; 211 211 } 212 213 212 214 213 … … 349 348 Assert(pVM->tm.s.fVirtualTicking); 350 349 u64 = tmVirtualGetRaw(pVM); 351 const uint64_t u64VirtualNow = u64;352 350 if ( fCheckTimers 353 351 && !VM_FF_ISSET(pVM, VM_FF_TIMER) … … 428 426 ASMAtomicXchgU64(&pVM->tm.s.u64VirtualSync, u64); 429 427 ASMAtomicXchgBool(&pVM->tm.s.fVirtualSyncTicking, false); 430 pVM->tm.s.u64VirtualSyncStoppedTS = u64VirtualNow;431 428 if ( fCheckTimers 432 429 && !VM_FF_ISSET(pVM, VM_FF_TIMER))
Note:
See TracChangeset
for help on using the changeset viewer.