VirtualBox

Changeset 17178 in vbox for trunk


Ignore:
Timestamp:
Feb 26, 2009 5:23:42 PM (16 years ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
43457
Message:

VBOX_WITH_PGMPOOL_PAGING_ONLY: Properly flush the TLB for PAE PDPT entries.

Location:
trunk/src/VBox/VMM/VMMAll
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/PGMAll.cpp

    r17166 r17178  
    876876    int            rc;
    877877
    878 # if defined(IN_RC) && defined(VBOX_WITH_PGMPOOL_PAGING_ONLY)
    879     /* Make sure the dynamic pPdeDst mapping will not be reused during this function. */
    880     PGMDynLockHCPage(pVM, (uint8_t *)pPdpe);
    881 # endif
    882 
    883878    /* Allocate page directory if not present. */
    884879    if (    !pPdpe->n.u1Present
     
    889884        RTGCPTR64   GCPdPt;
    890885        PGMPOOLKIND enmKind;
     886
     887# if defined(IN_RC) && defined(VBOX_WITH_PGMPOOL_PAGING_ONLY)
     888        /* Make sure the dynamic pPdeDst mapping will not be reused during this function. */
     889        PGMDynLockHCPage(pVM, (uint8_t *)pPdpe);
     890# endif
    891891
    892892        if (fNestedPaging || !fPaging)
     
    925925        }
    926926        AssertRCReturn(rc, rc);
     927
     928        /* The PD was cached or created; hook it up now. */
     929        pPdpe->u |= pShwPage->Core.Key
     930                 | (pGstPdpe->u & ~(X86_PDPE_PG_MASK | X86_PDPE_AVL_MASK | X86_PDPE_PCD | X86_PDPE_PWT));
     931
     932# if defined(IN_RC) && defined(VBOX_WITH_PGMPOOL_PAGING_ONLY)
     933        /* In 32 bits PAE mode we *must* invalidate the TLB when changing a PDPT entry; the CPU fetches them only during cr3 load, so any
     934         * non-present PDPT will continue to cause page faults.
     935         */
     936        ASMReloadCR3();
     937        PGMDynUnlockHCPage(pVM, (uint8_t *)pPdpe);
     938# endif
    927939    }
    928940    else
     
    930942        pShwPage = pgmPoolGetPage(pPool, pPdpe->u & X86_PDPE_PG_MASK);
    931943        AssertReturn(pShwPage, VERR_INTERNAL_ERROR);
    932     }
    933     /* The PD was cached or created; hook it up now. */
    934     pPdpe->u |= pShwPage->Core.Key
    935              | (pGstPdpe->u & ~(X86_PDPE_PG_MASK | X86_PDPE_AVL_MASK | X86_PDPE_PCD | X86_PDPE_PWT));
    936 
    937 # if defined(IN_RC) && defined(VBOX_WITH_PGMPOOL_PAGING_ONLY)
    938     PGMDynUnlockHCPage(pVM, (uint8_t *)pPdpe);
    939 # endif
     944
     945        Assert((pPdpe->u & X86_PDPE_PG_MASK) == pShwPage->Core.Key));
     946    }
    940947    *ppPD = (PX86PDPAE)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
    941948    return VINF_SUCCESS;
  • trunk/src/VBox/VMM/VMMAll/PGMAllPool.cpp

    r17177 r17178  
    32033203        case PGMPOOLKIND_PAE_PD3_FOR_32BIT_PD:
    32043204        case PGMPOOLKIND_PAE_PD_FOR_PAE_PD:
     3205#if defined(IN_RC) && defined(VBOX_WITH_PGMPOOL_PAGING_ONLY)
     3206            /* In 32 bits PAE mode we *must* invalidate the TLB when changing a PDPT entry; the CPU fetches them only during cr3 load, so any
     3207             * non-present PDPT will continue to cause page faults.
     3208             */
     3209            ASMReloadCR3();
     3210#endif
     3211            /* no break */
    32053212        case PGMPOOLKIND_PAE_PD_PHYS:
    32063213        case PGMPOOLKIND_PAE_PDPT_PHYS:
     
    32103217        case PGMPOOLKIND_64BIT_PDPT_FOR_PHYS:
    32113218        case PGMPOOLKIND_64BIT_PD_FOR_PHYS:
    3212 # if !defined(VBOX_WITH_2X_4GB_ADDR_SPACE_IN_R0) && !defined(VBOX_WITH_PGMPOOL_PAGING_ONLY)
     3219#if !defined(VBOX_WITH_2X_4GB_ADDR_SPACE_IN_R0) && !defined(VBOX_WITH_PGMPOOL_PAGING_ONLY)
    32133220        case PGMPOOLKIND_ROOT_PAE_PD:
    32143221#endif
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette