VirtualBox

Changeset 73250 in vbox for trunk/src/VBox/VMM/VMMAll


Ignore:
Timestamp:
Jul 19, 2018 5:57:31 PM (7 years ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
123856
Message:

PGM: Working on eliminating PGMMODEDATA and the corresponding PGMCPU section so we can do mode switching in ring-0. This forth part dealing with shadow+guest paging pointers and finally removing the PGMMODEDATA type. bugref:9044

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/PGMAll.cpp

    r73249 r73250  
    898898     * Call the worker.
    899899     */
     900    uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     901    AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     902    AssertReturn(g_aPgmBothModeData[idxBth].pfnTrap0eHandler, VERR_PGM_MODE_IPE);
    900903    bool fLockTaken = false;
    901     int rc = PGM_BTH_PFN(Trap0eHandler, pVCpu)(pVCpu, uErr, pRegFrame, pvFault, &fLockTaken);
     904    int rc = g_aPgmBothModeData[idxBth].pfnTrap0eHandler(pVCpu, uErr, pRegFrame, pvFault, &fLockTaken);
    902905    if (fLockTaken)
    903906    {
     
    953956{
    954957    STAM_PROFILE_START(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,Prefetch), a);
    955     int rc = PGM_BTH_PFN(PrefetchPage, pVCpu)(pVCpu, GCPtrPage);
     958
     959    uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     960    AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     961    AssertReturn(g_aPgmBothModeData[idxBth].pfnPrefetchPage, VERR_PGM_MODE_IPE);
     962    int rc = g_aPgmBothModeData[idxBth].pfnPrefetchPage(pVCpu, GCPtrPage);
     963
    956964    STAM_PROFILE_STOP(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,Prefetch), a);
    957965    AssertMsg(rc == VINF_SUCCESS || rc == VINF_PGM_SYNC_CR3 || RT_FAILURE(rc), ("rc=%Rrc\n", rc));
     
    10951103             */
    10961104            Assert(X86_TRAP_PF_RW == X86_PTE_RW && X86_TRAP_PF_US == X86_PTE_US);
    1097             uint32_t uErr = fAccess & (X86_TRAP_PF_RW | X86_TRAP_PF_US);
    1098             rc = PGM_BTH_PFN(VerifyAccessSyncPage, pVCpu)(pVCpu, Addr, fPageGst, uErr);
     1105            uint32_t  const uErr   = fAccess & (X86_TRAP_PF_RW | X86_TRAP_PF_US);
     1106            uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     1107            AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     1108            AssertReturn(g_aPgmBothModeData[idxBth].pfnVerifyAccessSyncPage, VERR_PGM_MODE_IPE);
     1109            rc = g_aPgmBothModeData[idxBth].pfnVerifyAccessSyncPage(pVCpu, Addr, fPageGst, uErr);
    10991110            if (rc != VINF_SUCCESS)
    11001111                return rc;
     
    12051216    STAM_PROFILE_START(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,InvalidatePage), a);
    12061217    pgmLock(pVM);
    1207     rc = PGM_BTH_PFN(InvalidatePage, pVCpu)(pVCpu, GCPtrPage);
     1218
     1219    uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     1220    AssertReturnStmt(idxBth < RT_ELEMENTS(g_aPgmBothModeData), pgmUnlock(pVM), VERR_PGM_MODE_IPE);
     1221    AssertReturnStmt(g_aPgmBothModeData[idxBth].pfnInvalidatePage, pgmUnlock(pVM), VERR_PGM_MODE_IPE);
     1222    rc = g_aPgmBothModeData[idxBth].pfnInvalidatePage(pVCpu, GCPtrPage);
     1223
    12081224    pgmUnlock(pVM);
    12091225    STAM_PROFILE_STOP(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,InvalidatePage), a);
     
    25582574    if (pVCpu->pgm.s.GCPhysCR3 != GCPhysCR3)
    25592575    {
    2560         RTGCPHYS GCPhysOldCR3 = pVCpu->pgm.s.GCPhysCR3;
    2561         pVCpu->pgm.s.GCPhysCR3  = GCPhysCR3;
    2562         rc = PGM_BTH_PFN(MapCR3, pVCpu)(pVCpu, GCPhysCR3);
     2576        RTGCPHYS const GCPhysOldCR3 = pVCpu->pgm.s.GCPhysCR3;
     2577
     2578        uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     2579        AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     2580        AssertReturn(g_aPgmBothModeData[idxBth].pfnMapCR3, VERR_PGM_MODE_IPE);
     2581
     2582        pVCpu->pgm.s.GCPhysCR3 = GCPhysCR3;
     2583        rc = g_aPgmBothModeData[idxBth].pfnMapCR3(pVCpu, GCPhysCR3);
    25632584        if (RT_LIKELY(rc == VINF_SUCCESS))
    25642585        {
     
    26622683    if (pVCpu->pgm.s.GCPhysCR3 != GCPhysCR3)
    26632684    {
     2685        uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     2686        AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     2687        AssertReturn(g_aPgmBothModeData[idxBth].pfnMapCR3, VERR_PGM_MODE_IPE);
     2688
    26642689        pVCpu->pgm.s.GCPhysCR3 = GCPhysCR3;
    2665         rc = PGM_BTH_PFN(MapCR3, pVCpu)(pVCpu, GCPhysCR3);
     2690        rc = g_aPgmBothModeData[idxBth].pfnMapCR3(pVCpu, GCPhysCR3);
     2691
    26662692        AssertRCSuccess(rc); /* Assumes VINF_PGM_SYNC_CR3 doesn't apply to nested paging. */ /** @todo this isn't true for the mac, but we need hw to test/fix this. */
    26672693    }
     
    27522778        if (pVCpu->pgm.s.GCPhysCR3 != GCPhysCR3)
    27532779        {
     2780            uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     2781            AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     2782            AssertReturn(g_aPgmBothModeData[idxBth].pfnMapCR3, VERR_PGM_MODE_IPE);
    27542783            pVCpu->pgm.s.GCPhysCR3 = GCPhysCR3;
    2755             rc = PGM_BTH_PFN(MapCR3, pVCpu)(pVCpu, GCPhysCR3);
     2784            rc = g_aPgmBothModeData[idxBth].pfnMapCR3(pVCpu, GCPhysCR3);
    27562785        }
    27572786
     
    27772806     */
    27782807    STAM_PROFILE_START(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,SyncCR3), a);
    2779     rc = PGM_BTH_PFN(SyncCR3, pVCpu)(pVCpu, cr0, cr3, cr4, fGlobal);
     2808
     2809    uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     2810    AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     2811    AssertReturn(g_aPgmBothModeData[idxBth].pfnSyncCR3, VERR_PGM_MODE_IPE);
     2812    rc = g_aPgmBothModeData[idxBth].pfnSyncCR3(pVCpu, cr0, cr3, cr4, fGlobal);
     2813
    27802814    STAM_PROFILE_STOP(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,SyncCR3), a);
    27812815    AssertMsg(rc == VINF_SUCCESS || rc == VINF_PGM_SYNC_CR3 || RT_FAILURE(rc), ("rc=%Rrc\n", rc));
     
    34193453{
    34203454    STAM_PROFILE_START(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,SyncCR3), a);
     3455
     3456    uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     3457    AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), -VERR_PGM_MODE_IPE);
     3458    AssertReturn(g_aPgmBothModeData[idxBth].pfnAssertCR3, -VERR_PGM_MODE_IPE);
     3459
    34213460    pgmLock(pVM);
    3422     unsigned cErrors = PGM_BTH_PFN(AssertCR3, pVCpu)(pVCpu, cr3, cr4, 0, ~(RTGCPTR)0);
     3461    unsigned cErrors = g_aPgmBothModeData[idxBth].pfnAssertCR3(pVCpu, cr3, cr4, 0, ~(RTGCPTR)0);
    34233462    pgmUnlock(pVM);
     3463
    34243464    STAM_PROFILE_STOP(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,SyncCR3), a);
    34253465    return cErrors;
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette