Changeset 5509 in vbox
- Timestamp:
- Oct 26, 2007 11:20:37 AM (17 years ago)
- svn:sync-xref-src-repo-rev:
- 25713
- Location:
- trunk
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/VBox/tm.h
r5167 r5509 599 599 600 600 /** 601 * Finalizes the TM initialization. 602 * 603 * @returns VBox status code. 604 * @param pVM The VM to operate on. 605 */ 606 TMR3DECL(int) TMR3InitFinalize(PVM pVM); 607 608 /** 601 609 * Applies relocations to data and code managed by this 602 610 * component. This function will be called at init and -
trunk/src/VBox/VMM/TM.cpp
r5505 r5509 231 231 pVM->tm.s.VirtualGetRawDataR3.pfnBad = tmVirtualNanoTSBad; 232 232 pVM->tm.s.VirtualGetRawDataR3.pfnRediscover = tmVirtualNanoTSRediscover; 233 if (ASMCpuId_EDX(1) & X86_CPUID_FEATURE_EDX_SSE2) 234 { 235 if (g_pSUPGlobalInfoPage->u32Mode == SUPGIPMODE_SYNC_TSC) 236 pVM->tm.s.pfnVirtualGetRawR3 = RTTimeNanoTSLFenceSync; 237 else 238 pVM->tm.s.pfnVirtualGetRawR3 = RTTimeNanoTSLFenceAsync; 239 } 240 else 241 { 242 if (g_pSUPGlobalInfoPage->u32Mode == SUPGIPMODE_SYNC_TSC) 243 pVM->tm.s.pfnVirtualGetRawR3 = RTTimeNanoTSLegacySync; 244 else 245 pVM->tm.s.pfnVirtualGetRawR3 = RTTimeNanoTSLegacyAsync; 246 } 233 247 234 248 pVM->tm.s.VirtualGetRawDataGC.pu64Prev = MMHyperR3ToGC(pVM, (void *)&pVM->tm.s.u64VirtualRawPrev); 235 #if 0 /* too early */236 rc = PDMR3GetSymbolGCLazy(pVM, NULL, "tmVirtualNanoTSBad", &pVM->tm.s.VirtualGetRawDataGC.pfnBad);237 AssertRCReturn(rc, rc);238 rc = PDMR3GetSymbolGCLazy(pVM, NULL, "tmVirtualNanoTSRediscover", &pVM->tm.s.VirtualGetRawDataGC.pfnRediscover);239 AssertRCReturn(rc, rc);240 #endif241 242 249 pVM->tm.s.VirtualGetRawDataR0.pu64Prev = MMHyperR3ToR0(pVM, (void *)&pVM->tm.s.u64VirtualRawPrev); 243 250 AssertReturn(pVM->tm.s.VirtualGetRawDataR0.pu64Prev, VERR_INTERNAL_ERROR); 244 rc = PDMR3GetSymbolR0Lazy(pVM, NULL, "tmVirtualNanoTSBad", &pVM->tm.s.VirtualGetRawDataR0.pfnBad); 245 AssertRCReturn(rc, rc); 246 rc = PDMR3GetSymbolR0Lazy(pVM, NULL, "tmVirtualNanoTSRediscover", &pVM->tm.s.VirtualGetRawDataR0.pfnRediscover); 247 AssertRCReturn(rc, rc); 248 249 if (ASMCpuId_EDX(1) & X86_CPUID_FEATURE_EDX_SSE2) 250 { 251 if (g_pSUPGlobalInfoPage->u32Mode == SUPGIPMODE_SYNC_TSC) 252 { 253 pVM->tm.s.pfnVirtualGetRawR3 = RTTimeNanoTSLFenceSync; 254 rc = PDMR3GetSymbolR0Lazy(pVM, NULL, "RTTimeNanoTSLFenceSync", &pVM->tm.s.pfnVirtualGetRawR0); 255 AssertRCReturn(rc, rc); 256 } 257 else 258 { 259 pVM->tm.s.pfnVirtualGetRawR3 = RTTimeNanoTSLFenceAsync; 260 rc = PDMR3GetSymbolR0Lazy(pVM, NULL, "RTTimeNanoTSLFenceAsync", &pVM->tm.s.pfnVirtualGetRawR0); 261 AssertRCReturn(rc, rc); 262 } 263 } 264 else 265 { 266 if (g_pSUPGlobalInfoPage->u32Mode == SUPGIPMODE_SYNC_TSC) 267 { 268 pVM->tm.s.pfnVirtualGetRawR3 = RTTimeNanoTSLegacySync; 269 rc = PDMR3GetSymbolR0Lazy(pVM, NULL, "RTTimeNanoTSLegacySync", &pVM->tm.s.pfnVirtualGetRawR0); 270 AssertRCReturn(rc, rc); 271 } 272 else 273 { 274 pVM->tm.s.pfnVirtualGetRawR3 = RTTimeNanoTSLegacyAsync; 275 rc = PDMR3GetSymbolR0Lazy(pVM, NULL, "RTTimeNanoTSLegacyAsync", &pVM->tm.s.pfnVirtualGetRawR0); 276 AssertRCReturn(rc, rc); 277 } 278 } 251 /* The rest is done in TMR3InitFinalize since it's too early to call PDM. */ 252 279 253 280 254 /* … … 717 691 718 692 /** 719 * Applies relocations to data and code managed by this 720 * component. This function will be called at init and 721 * whenever the VMM need to relocate it self inside the GC. 722 * 723 * @param pVM The VM. 724 * @param offDelta Relocation delta relative to old location. 725 */ 726 TMR3DECL(void) TMR3Relocate(PVM pVM, RTGCINTPTR offDelta) 693 * Finalizes the TM initialization. 694 * 695 * @returns VBox status code. 696 * @param pVM The VM to operate on. 697 */ 698 TMR3DECL(int) TMR3InitFinalize(PVM pVM) 727 699 { 728 700 int rc; 729 LogFlow(("TMR3Relocate\n")); 730 731 pVM->tm.s.pvGIPGC = MMHyperR3ToGC(pVM, pVM->tm.s.pvGIPR3); 732 pVM->tm.s.paTimerQueuesGC = MMHyperR3ToGC(pVM, pVM->tm.s.paTimerQueuesR3); 733 pVM->tm.s.paTimerQueuesR0 = MMHyperR3ToR0(pVM, pVM->tm.s.paTimerQueuesR3); 734 735 pVM->tm.s.VirtualGetRawDataGC.pu64Prev = MMHyperR3ToGC(pVM, (void *)&pVM->tm.s.u64VirtualRawPrev); 736 AssertFatal(pVM->tm.s.VirtualGetRawDataGC.pu64Prev); 701 737 702 rc = PDMR3GetSymbolGCLazy(pVM, NULL, "tmVirtualNanoTSBad", &pVM->tm.s.VirtualGetRawDataGC.pfnBad); 738 Assert FatalRC(rc);703 AssertRCReturn(rc, rc); 739 704 rc = PDMR3GetSymbolGCLazy(pVM, NULL, "tmVirtualNanoTSRediscover", &pVM->tm.s.VirtualGetRawDataGC.pfnRediscover); 740 AssertFatalRC(rc); 741 705 AssertRCReturn(rc, rc); 742 706 if (pVM->tm.s.pfnVirtualGetRawR3 == RTTimeNanoTSLFenceSync) 743 707 rc = PDMR3GetSymbolGCLazy(pVM, NULL, "RTTimeNanoTSLFenceSync", &pVM->tm.s.pfnVirtualGetRawGC); … … 750 714 else 751 715 AssertFatalFailed(); 716 AssertRCReturn(rc, rc); 717 718 rc = PDMR3GetSymbolR0Lazy(pVM, NULL, "tmVirtualNanoTSBad", &pVM->tm.s.VirtualGetRawDataR0.pfnBad); 719 AssertRCReturn(rc, rc); 720 rc = PDMR3GetSymbolR0Lazy(pVM, NULL, "tmVirtualNanoTSRediscover", &pVM->tm.s.VirtualGetRawDataR0.pfnRediscover); 721 AssertRCReturn(rc, rc); 722 if (pVM->tm.s.pfnVirtualGetRawR3 == RTTimeNanoTSLFenceSync) 723 rc = PDMR3GetSymbolR0Lazy(pVM, NULL, "RTTimeNanoTSLFenceSync", &pVM->tm.s.pfnVirtualGetRawR0); 724 else if (pVM->tm.s.pfnVirtualGetRawR3 == RTTimeNanoTSLFenceAsync) 725 rc = PDMR3GetSymbolR0Lazy(pVM, NULL, "RTTimeNanoTSLFenceAsync", &pVM->tm.s.pfnVirtualGetRawR0); 726 else if (pVM->tm.s.pfnVirtualGetRawR3 == RTTimeNanoTSLegacySync) 727 rc = PDMR3GetSymbolR0Lazy(pVM, NULL, "RTTimeNanoTSLegacySync", &pVM->tm.s.pfnVirtualGetRawR0); 728 else if (pVM->tm.s.pfnVirtualGetRawR3 == RTTimeNanoTSLegacyAsync) 729 rc = PDMR3GetSymbolR0Lazy(pVM, NULL, "RTTimeNanoTSLegacyAsync", &pVM->tm.s.pfnVirtualGetRawR0); 730 else 731 AssertFatalFailed(); 732 AssertRCReturn(rc, rc); 733 734 return VINF_SUCCESS; 735 } 736 737 738 /** 739 * Applies relocations to data and code managed by this 740 * component. This function will be called at init and 741 * whenever the VMM need to relocate it self inside the GC. 742 * 743 * @param pVM The VM. 744 * @param offDelta Relocation delta relative to old location. 745 */ 746 TMR3DECL(void) TMR3Relocate(PVM pVM, RTGCINTPTR offDelta) 747 { 748 int rc; 749 LogFlow(("TMR3Relocate\n")); 750 751 pVM->tm.s.pvGIPGC = MMHyperR3ToGC(pVM, pVM->tm.s.pvGIPR3); 752 pVM->tm.s.paTimerQueuesGC = MMHyperR3ToGC(pVM, pVM->tm.s.paTimerQueuesR3); 753 pVM->tm.s.paTimerQueuesR0 = MMHyperR3ToR0(pVM, pVM->tm.s.paTimerQueuesR3); 754 755 pVM->tm.s.VirtualGetRawDataGC.pu64Prev = MMHyperR3ToGC(pVM, (void *)&pVM->tm.s.u64VirtualRawPrev); 756 AssertFatal(pVM->tm.s.VirtualGetRawDataGC.pu64Prev); 757 rc = PDMR3GetSymbolGCLazy(pVM, NULL, "tmVirtualNanoTSBad", &pVM->tm.s.VirtualGetRawDataGC.pfnBad); 758 AssertFatalRC(rc); 759 rc = PDMR3GetSymbolGCLazy(pVM, NULL, "tmVirtualNanoTSRediscover", &pVM->tm.s.VirtualGetRawDataGC.pfnRediscover); 760 AssertFatalRC(rc); 761 762 if (pVM->tm.s.pfnVirtualGetRawR3 == RTTimeNanoTSLFenceSync) 763 rc = PDMR3GetSymbolGCLazy(pVM, NULL, "RTTimeNanoTSLFenceSync", &pVM->tm.s.pfnVirtualGetRawGC); 764 else if (pVM->tm.s.pfnVirtualGetRawR3 == RTTimeNanoTSLFenceAsync) 765 rc = PDMR3GetSymbolGCLazy(pVM, NULL, "RTTimeNanoTSLFenceAsync", &pVM->tm.s.pfnVirtualGetRawGC); 766 else if (pVM->tm.s.pfnVirtualGetRawR3 == RTTimeNanoTSLegacySync) 767 rc = PDMR3GetSymbolGCLazy(pVM, NULL, "RTTimeNanoTSLegacySync", &pVM->tm.s.pfnVirtualGetRawGC); 768 else if (pVM->tm.s.pfnVirtualGetRawR3 == RTTimeNanoTSLegacyAsync) 769 rc = PDMR3GetSymbolGCLazy(pVM, NULL, "RTTimeNanoTSLegacyAsync", &pVM->tm.s.pfnVirtualGetRawGC); 770 else 771 AssertFatalFailed(); 772 AssertFatalRC(rc); 752 773 753 774 /* -
trunk/src/VBox/VMM/VM.cpp
r5119 r5509 587 587 rc = SELMR3InitFinalize(pVM); 588 588 if (VBOX_SUCCESS(rc)) 589 rc = TMR3InitFinalize(pVM); 590 if (VBOX_SUCCESS(rc)) 589 591 rc = VMMR3InitFinalize(pVM); 590 592 if (VBOX_SUCCESS(rc))
Note:
See TracChangeset
for help on using the changeset viewer.