VirtualBox

Changeset 31170 in vbox for trunk/src/VBox/VMM/VMMAll


Ignore:
Timestamp:
Jul 28, 2010 3:20:02 PM (15 years ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
64156
Message:

PGM: More inline cleanup (prep for tracking dynamic mappings in raw-mode).

Location:
trunk/src/VBox/VMM/VMMAll
Files:
5 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/PGMAll.cpp

    r31167 r31170  
    10011001        pgmPoolCacheUsed(pPool, pShwPage);
    10021002    }
    1003     *ppPD = (PX86PDPAE)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     1003    *ppPD = (PX86PDPAE)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    10041004    return VINF_SUCCESS;
    10051005}
     
    11051105
    11061106    const unsigned iPdPt = (GCPtr >> X86_PDPT_SHIFT) & X86_PDPT_MASK_AMD64;
    1107     PX86PDPT  pPdpt = (PX86PDPT)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     1107    PX86PDPT  pPdpt = (PX86PDPT)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    11081108    PX86PDPE  pPdpe = &pPdpt->a[iPdPt];
    11091109
     
    11421142             | (uGstPdpe & ~(X86_PDPE_PG_MASK | X86_PDPE_AVL_MASK | X86_PDPE_PCD | X86_PDPE_PWT));
    11431143
    1144     *ppPD = (PX86PDPAE)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     1144    *ppPD = (PX86PDPAE)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    11451145    return VINF_SUCCESS;
    11461146}
     
    11791179
    11801180    const unsigned  iPdPt = (GCPtr >> X86_PDPT_SHIFT) & X86_PDPT_MASK_AMD64;
    1181     PCX86PDPT       pPdpt = *ppPdpt = (PX86PDPT)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     1181    PCX86PDPT       pPdpt = *ppPdpt = (PX86PDPT)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    11821182    if (!pPdpt->a[iPdPt].n.u1Present)
    11831183        return VERR_PAGE_DIRECTORY_PTR_NOT_PRESENT;
     
    11861186    AssertReturn(pShwPage, VERR_INTERNAL_ERROR);
    11871187
    1188     *ppPD = (PX86PDPAE)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     1188    *ppPD = (PX86PDPAE)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    11891189    return VINF_SUCCESS;
    11901190}
     
    12141214    Assert(PGMIsLockOwner(pVM));
    12151215
    1216     pPml4 = (PEPTPML4)PGMPOOL_PAGE_2_PTR(pVM, pVCpu->pgm.s.CTX_SUFF(pShwPageCR3));
     1216    pPml4 = (PEPTPML4)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pVCpu->pgm.s.CTX_SUFF(pShwPageCR3));
    12171217    Assert(pPml4);
    12181218
     
    12421242
    12431243    const unsigned iPdPt = (GCPtr >> EPT_PDPT_SHIFT) & EPT_PDPT_MASK;
    1244     PEPTPDPT  pPdpt = (PEPTPDPT)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     1244    PEPTPDPT  pPdpt = (PEPTPDPT)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    12451245    PEPTPDPTE pPdpe = &pPdpt->a[iPdPt];
    12461246
     
    12701270    pPdpe->n.u1Execute  = 1;
    12711271
    1272     *ppPD = (PEPTPD)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     1272    *ppPD = (PEPTPD)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    12731273    return VINF_SUCCESS;
    12741274}
     
    22892289            //Log(("PGMDynMapGCPage: GCPhys=%RGp pPage=%R[pgmpage]\n", GCPhys, pPage));
    22902290#ifdef VBOX_WITH_2X_4GB_ADDR_SPACE_IN_R0
    2291             rc = pgmR0DynMapHCPageInlined(&pVM->pgm.s, PGM_PAGE_GET_HCPHYS(pPage), ppv);
     2291            rc = pgmR0DynMapHCPageInlined(VMMGetCpu(pVM), PGM_PAGE_GET_HCPHYS(pPage), ppv);
    22922292#else
    22932293            rc = PGMDynMapHCPage(pVM, PGM_PAGE_GET_HCPHYS(pPage), ppv);
  • trunk/src/VBox/VMM/VMMAll/PGMAllBth.h

    r31167 r31170  
    10951095    Assert(pShwPde);
    10961096
    1097     pPDDst             = (PX86PDPAE)PGMPOOL_PAGE_2_PTR(pVM, pShwPde);
     1097    pPDDst             = (PX86PDPAE)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPde);
    10981098    PX86PDEPAE pPdeDst = &pPDDst->a[iPDDst];
    10991099
     
    12321232# if 0 /* likely cause of a major performance regression; must be SyncPageWorkerTrackDeref then */
    12331233                const unsigned iPTEDst = (GCPtrPage >> SHW_PT_SHIFT) & SHW_PT_MASK;
    1234                 PSHWPT pPT = (PSHWPT)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     1234                PSHWPT pPT = (PSHWPT)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    12351235                if (pPT->a[iPTEDst].n.u1Present)
    12361236                {
     
    17431743    Assert(pShwPde);
    17441744
    1745     pPDDst             = (PX86PDPAE)PGMPOOL_PAGE_2_PTR(pVM, pShwPde);
     1745    pPDDst             = (PX86PDPAE)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPde);
    17461746    PX86PDEPAE pPdeDst = &pPDDst->a[iPDDst];
    17471747
     
    18201820        if (PdeSrc.n.u1Accessed)
    18211821        {
    1822             PSHWPT pPTDst = (PSHWPT)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     1822            PSHWPT pPTDst = (PSHWPT)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    18231823            if (!fBigPage)
    18241824            {
     
    21072107
    21082108    PPGMPOOLPAGE  pShwPage = pgmPoolGetPage(pPool, PdeDst.u & SHW_PDE_PG_MASK);
    2109     PSHWPT        pPTDst   = (PSHWPT)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     2109    PSHWPT        pPTDst   = (PSHWPT)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    21102110
    21112111    Assert(cPages == 1 || !(uErr & X86_TRAP_PF_P));
     
    23122312            if (pShwPage)
    23132313            {
    2314                 PSHWPT      pPTDst   = (PSHWPT)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     2314                PSHWPT      pPTDst   = (PSHWPT)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    23152315                PSHWPTE     pPteDst  = &pPTDst->a[(GCPtrPage >> SHW_PT_SHIFT) & SHW_PT_MASK];
    23162316                if (    pPteDst->n.u1Present
     
    23602360        if (pShwPage)
    23612361        {
    2362             PSHWPT      pPTDst   = (PSHWPT)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     2362            PSHWPT      pPTDst   = (PSHWPT)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    23632363            PSHWPTE     pPteDst  = &pPTDst->a[(GCPtrPage >> SHW_PT_SHIFT) & SHW_PT_MASK];
    23642364            if (pPteDst->n.u1Present)    /** @todo Optimize accessed bit emulation? */
     
    24972497    Assert(pShwPde);
    24982498
    2499     pPDDst  = (PX86PDPAE)PGMPOOL_PAGE_2_PTR(pVM, pShwPde);
     2499    pPDDst  = (PX86PDPAE)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPde);
    25002500    pPdeDst = &pPDDst->a[iPDDst];
    25012501
     
    26122612        }
    26132613        if (rc == VINF_SUCCESS)
    2614             pPTDst = (PSHWPT)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     2614            pPTDst = (PSHWPT)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    26152615        else if (rc == VINF_PGM_CACHED_PAGE)
    26162616        {
     
    29512951    Assert(pShwPde);
    29522952
    2953     pPDDst  = (PX86PDPAE)PGMPOOL_PAGE_2_PTR(pVM, pShwPde);
     2953    pPDDst  = (PX86PDPAE)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPde);
    29542954    pPdeDst = &pPDDst->a[iPDDst];
    29552955
     
    30813081    if (    rc == VINF_SUCCESS
    30823082        ||  rc == VINF_PGM_CACHED_PAGE)
    3083         pPTDst = (PSHWPT)PGMPOOL_PAGE_2_PTR(pVM, pShwPage);
     3083        pPTDst = (PSHWPT)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPage);
    30843084    else
    30853085        AssertMsgFailedReturn(("rc=%Rrc\n", rc), VERR_INTERNAL_ERROR);
     
    38213821                        continue;
    38223822                    }
    3823                     const SHWPT *pPTDst = (const SHWPT *)PGMPOOL_PAGE_2_PTR(pVM, pPoolPage);
     3823                    const SHWPT *pPTDst = (const SHWPT *)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pPoolPage);
    38243824
    38253825                    if (PdeDst.u & (X86_PDE4M_PWT | X86_PDE4M_PCD))
  • trunk/src/VBox/VMM/VMMAll/PGMAllMap.cpp

    r31167 r31170  
    426426            case PGMMODE_32_BIT:
    427427            {
    428                 PX86PD          pShw32BitPd = (PX86PD)PGMPOOL_PAGE_2_PTR(pVM, pShwPageCR3);
     428                PX86PD          pShw32BitPd = (PX86PD)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPageCR3);
    429429                AssertFatal(pShw32BitPd);
    430430
     
    439439                const unsigned  iPdpt     = iOldPDE / 256;      /* iOldPDE * 2 / 512; iOldPDE is in 4 MB pages */
    440440                unsigned        iPaePde   = iOldPDE * 2 % 512;
    441                 PX86PDPT        pShwPdpt  = (PX86PDPT)PGMPOOL_PAGE_2_PTR(pVM, pShwPageCR3);
     441                PX86PDPT        pShwPdpt  = (PX86PDPT)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPageCR3);
    442442                PX86PDPAE       pShwPaePd = pgmShwGetPaePDPtr(pVCpu, pShwPdpt, (iPdpt << X86_PDPT_SHIFT));
    443443
     
    538538            case PGMMODE_32_BIT:
    539539            {
    540                 PCX86PD         pShw32BitPd = (PCX86PD)PGMPOOL_PAGE_2_PTR(pVM, pShwPageCR3);
     540                PCX86PD         pShw32BitPd = (PCX86PD)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu2, pShwPageCR3);
    541541                AssertFatal(pShw32BitPd);
    542542
     
    553553                const unsigned  iPdpt     = iPDE / 256;         /* iPDE * 2 / 512; iPDE is in 4 MB pages */
    554554                unsigned        iPaePDE   = iPDE * 2 % 512;
    555                 PX86PDPT        pShwPdpt  = (PX86PDPT)PGMPOOL_PAGE_2_PTR(pVM, pShwPageCR3);
     555                PX86PDPT        pShwPdpt  = (PX86PDPT)PGMPOOL_PAGE_2_PTR_V2(pVM, pVCpu, pShwPageCR3);
    556556                PCX86PDPAE      pShwPaePd = pgmShwGetPaePDPtr(pVCpu, pShwPdpt, iPdpt << X86_PDPT_SHIFT);
    557557                AssertFatal(pShwPaePd);
  • trunk/src/VBox/VMM/VMMAll/PGMAllPhys.cpp

    r31136 r31170  
    749749     * Map it by HCPhys.
    750750     */
    751     return pgmR0DynMapHCPageInlined(&pVM->pgm.s, HCPhys, ppv);
     751    return pgmR0DynMapHCPageInlined(VMMGetCpu(pVM), HCPhys, ppv);
    752752
    753753#else
     
    826826    Assert(HCPhys != pVM->pgm.s.HCPhysZeroPg);
    827827# ifdef VBOX_WITH_2X_4GB_ADDR_SPACE_IN_R0
    828     pgmR0DynMapHCPageInlined(&pVM->pgm.s, HCPhys, ppv);
     828    pgmR0DynMapHCPageInlined(VMMGetCpu(pVM), HCPhys, ppv);
    829829# else
    830830    PGMDynMapHCPage(pVM, HCPhys, ppv);
     
    11631163     */
    11641164#if defined(IN_RC) || defined(VBOX_WITH_2X_4GB_ADDR_SPACE_IN_R0)
    1165     *ppv = pgmDynMapHCPageOff(&pVM->pgm.s, PGM_PAGE_GET_HCPHYS(pPage) | (GCPhys & PAGE_OFFSET_MASK));
     1165    *ppv = pgmDynMapHCPageOff(pVM, PGM_PAGE_GET_HCPHYS(pPage) | (GCPhys & PAGE_OFFSET_MASK));
    11661166#else
    11671167    PPGMPAGEMAPTLBE pTlbe;
     
    12011201     */
    12021202#if defined(IN_RC) || defined(VBOX_WITH_2X_4GB_ADDR_SPACE_IN_R0)
    1203     *ppv = pgmDynMapHCPageOff(&pVM->pgm.s, PGM_PAGE_GET_HCPHYS(pPage) | (GCPhys & PAGE_OFFSET_MASK)); /** @todo add a read only flag? */
     1203    *ppv = pgmDynMapHCPageOff(pVM, PGM_PAGE_GET_HCPHYS(pPage) | (GCPhys & PAGE_OFFSET_MASK)); /** @todo add a read only flag? */
    12041204#else
    12051205    PPGMPAGEMAPTLBE pTlbe;
     
    12591259        if (RT_SUCCESS(rc))
    12601260        {
    1261             *ppv = pgmDynMapHCPageOff(&pVM->pgm.s, PGM_PAGE_GET_HCPHYS(pPage) | (GCPhys & PAGE_OFFSET_MASK)); /** @todo add a read only flag? */
     1261            *ppv = pgmDynMapHCPageOff(pVM, PGM_PAGE_GET_HCPHYS(pPage) | (GCPhys & PAGE_OFFSET_MASK)); /** @todo add a read only flag? */
    12621262# if 0
    12631263            pLock->pvMap = 0;
     
    13701370        else
    13711371        {
    1372             *ppv = pgmDynMapHCPageOff(&pVM->pgm.s, PGM_PAGE_GET_HCPHYS(pPage) | (GCPhys & PAGE_OFFSET_MASK)); /** @todo add a read only flag? */
     1372            *ppv = pgmDynMapHCPageOff(pVM, PGM_PAGE_GET_HCPHYS(pPage) | (GCPhys & PAGE_OFFSET_MASK)); /** @todo add a read only flag? */
    13731373# if 0
    13741374            pLock->pvMap = 0;
  • trunk/src/VBox/VMM/VMMAll/PGMAllPool.cpp

    r31123 r31170  
    102102DECLINLINE(void *) PGMPOOL_PAGE_2_LOCKED_PTR(PVM pVM, PPGMPOOLPAGE pPage)
    103103{
    104     void *pv = pgmPoolMapPageInlined(&pVM->pgm.s, pPage);
     104    void *pv = pgmPoolMapPageInlined(pVM, pPage);
    105105
    106106    /* Make sure the dynamic mapping will not be reused. */
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette