VirtualBox

Changeset 8557 in vbox for trunk/src/VBox


Ignore:
Timestamp:
May 5, 2008 10:00:33 AM (17 years ago)
Author:
vboxsync
Message:

Updates for 64 bits paging. Removed unused ShwGet/Set/ModifyPDEByIndex functions.

Location:
trunk/src/VBox/VMM
Files:
5 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/PGM.cpp

    r8536 r8557  
    24852485    Assert(pVM->pgm.s.pfnR3ShwGetPage);
    24862486    pVM->pgm.s.pfnR3ShwModifyPage           = pModeData->pfnR3ShwModifyPage;
    2487     pVM->pgm.s.pfnR3ShwGetPDEByIndex        = pModeData->pfnR3ShwGetPDEByIndex;
    2488     pVM->pgm.s.pfnR3ShwSetPDEByIndex        = pModeData->pfnR3ShwSetPDEByIndex;
    2489     pVM->pgm.s.pfnR3ShwModifyPDEByIndex     = pModeData->pfnR3ShwModifyPDEByIndex;
    24902487
    24912488    pVM->pgm.s.pfnGCShwGetPage              = pModeData->pfnGCShwGetPage;
    24922489    pVM->pgm.s.pfnGCShwModifyPage           = pModeData->pfnGCShwModifyPage;
    2493     pVM->pgm.s.pfnGCShwGetPDEByIndex        = pModeData->pfnGCShwGetPDEByIndex;
    2494     pVM->pgm.s.pfnGCShwSetPDEByIndex        = pModeData->pfnGCShwSetPDEByIndex;
    2495     pVM->pgm.s.pfnGCShwModifyPDEByIndex     = pModeData->pfnGCShwModifyPDEByIndex;
    24962490
    24972491    pVM->pgm.s.pfnR0ShwGetPage              = pModeData->pfnR0ShwGetPage;
    24982492    pVM->pgm.s.pfnR0ShwModifyPage           = pModeData->pfnR0ShwModifyPage;
    2499     pVM->pgm.s.pfnR0ShwGetPDEByIndex        = pModeData->pfnR0ShwGetPDEByIndex;
    2500     pVM->pgm.s.pfnR0ShwSetPDEByIndex        = pModeData->pfnR0ShwSetPDEByIndex;
    2501     pVM->pgm.s.pfnR0ShwModifyPDEByIndex     = pModeData->pfnR0ShwModifyPDEByIndex;
    25022493
    25032494
  • trunk/src/VBox/VMM/PGMInternal.h

    r8533 r8557  
    17721772    DECLR3CALLBACKMEMBER(int,  pfnR3ShwGetPage,(PVM pVM, RTGCUINTPTR GCPtr, uint64_t *pfFlags, PRTHCPHYS pHCPhys));
    17731773    DECLR3CALLBACKMEMBER(int,  pfnR3ShwModifyPage,(PVM pVM, RTGCUINTPTR GCPtr, size_t cbPages, uint64_t fFlags, uint64_t fMask));
    1774     DECLR3CALLBACKMEMBER(int,  pfnR3ShwGetPDEByIndex,(PVM pVM, uint32_t iPD, PX86PDEPAE pPde));
    1775     DECLR3CALLBACKMEMBER(int,  pfnR3ShwSetPDEByIndex,(PVM pVM, uint32_t iPD, X86PDEPAE Pde));
    1776     DECLR3CALLBACKMEMBER(int,  pfnR3ShwModifyPDEByIndex,(PVM pVM, uint32_t iPD, uint64_t fFlags, uint64_t fMask));
    17771774
    17781775    DECLGCCALLBACKMEMBER(int,  pfnGCShwGetPage,(PVM pVM, RTGCUINTPTR GCPtr, uint64_t *pfFlags, PRTHCPHYS pHCPhys));
    17791776    DECLGCCALLBACKMEMBER(int,  pfnGCShwModifyPage,(PVM pVM, RTGCUINTPTR GCPtr, size_t cbPages, uint64_t fFlags, uint64_t fMask));
    1780     DECLGCCALLBACKMEMBER(int,  pfnGCShwGetPDEByIndex,(PVM pVM, uint32_t iPD, PX86PDEPAE pPde));
    1781     DECLGCCALLBACKMEMBER(int,  pfnGCShwSetPDEByIndex,(PVM pVM, uint32_t iPD, X86PDEPAE Pde));
    1782     DECLGCCALLBACKMEMBER(int,  pfnGCShwModifyPDEByIndex,(PVM pVM, uint32_t iPD, uint64_t fFlags, uint64_t fMask));
    17831777
    17841778    DECLR0CALLBACKMEMBER(int,  pfnR0ShwGetPage,(PVM pVM, RTGCUINTPTR GCPtr, uint64_t *pfFlags, PRTHCPHYS pHCPhys));
    17851779    DECLR0CALLBACKMEMBER(int,  pfnR0ShwModifyPage,(PVM pVM, RTGCUINTPTR GCPtr, size_t cbPages, uint64_t fFlags, uint64_t fMask));
    1786     DECLR0CALLBACKMEMBER(int,  pfnR0ShwGetPDEByIndex,(PVM pVM, uint32_t iPD, PX86PDEPAE pPde));
    1787     DECLR0CALLBACKMEMBER(int,  pfnR0ShwSetPDEByIndex,(PVM pVM, uint32_t iPD, X86PDEPAE Pde));
    1788     DECLR0CALLBACKMEMBER(int,  pfnR0ShwModifyPDEByIndex,(PVM pVM, uint32_t iPD, uint64_t fFlags, uint64_t fMask));
    17891780    /** @} */
    17901781
     
    20001991    DECLR3CALLBACKMEMBER(int,  pfnR3ShwGetPage,(PVM pVM, RTGCUINTPTR GCPtr, uint64_t *pfFlags, PRTHCPHYS pHCPhys));
    20011992    DECLR3CALLBACKMEMBER(int,  pfnR3ShwModifyPage,(PVM pVM, RTGCUINTPTR GCPtr, size_t cbPages, uint64_t fFlags, uint64_t fMask));
    2002     DECLR3CALLBACKMEMBER(int,  pfnR3ShwGetPDEByIndex,(PVM pVM, uint32_t iPD, PX86PDEPAE pPde));
    2003     DECLR3CALLBACKMEMBER(int,  pfnR3ShwSetPDEByIndex,(PVM pVM, uint32_t iPD, X86PDEPAE Pde));
    2004     DECLR3CALLBACKMEMBER(int,  pfnR3ShwModifyPDEByIndex,(PVM pVM, uint32_t iPD, uint64_t fFlags, uint64_t fMask));
    20051993
    20061994    DECLGCCALLBACKMEMBER(int,  pfnGCShwGetPage,(PVM pVM, RTGCUINTPTR GCPtr, uint64_t *pfFlags, PRTHCPHYS pHCPhys));
    20071995    DECLGCCALLBACKMEMBER(int,  pfnGCShwModifyPage,(PVM pVM, RTGCUINTPTR GCPtr, size_t cbPages, uint64_t fFlags, uint64_t fMask));
    2008     DECLGCCALLBACKMEMBER(int,  pfnGCShwGetPDEByIndex,(PVM pVM, uint32_t iPD, PX86PDEPAE pPde));
    2009     DECLGCCALLBACKMEMBER(int,  pfnGCShwSetPDEByIndex,(PVM pVM, uint32_t iPD, X86PDEPAE Pde));
    2010     DECLGCCALLBACKMEMBER(int,  pfnGCShwModifyPDEByIndex,(PVM pVM, uint32_t iPD, uint64_t fFlags, uint64_t fMask));
    2011 #if GC_ARCH_BITS == 32 && HC_ARCH_BITS == 64
    2012     RTGCPTR                    alignment0; /**< structure size alignment. */
    2013 #endif
    20141996
    20151997    DECLR0CALLBACKMEMBER(int,  pfnR0ShwGetPage,(PVM pVM, RTGCUINTPTR GCPtr, uint64_t *pfFlags, PRTHCPHYS pHCPhys));
    20161998    DECLR0CALLBACKMEMBER(int,  pfnR0ShwModifyPage,(PVM pVM, RTGCUINTPTR GCPtr, size_t cbPages, uint64_t fFlags, uint64_t fMask));
    2017     DECLR0CALLBACKMEMBER(int,  pfnR0ShwGetPDEByIndex,(PVM pVM, uint32_t iPD, PX86PDEPAE pPde));
    2018     DECLR0CALLBACKMEMBER(int,  pfnR0ShwSetPDEByIndex,(PVM pVM, uint32_t iPD, X86PDEPAE Pde));
    2019     DECLR0CALLBACKMEMBER(int,  pfnR0ShwModifyPDEByIndex,(PVM pVM, uint32_t iPD, uint64_t fFlags, uint64_t fMask));
    20201999
    20212000    /** @} */
  • trunk/src/VBox/VMM/PGMShw.h

    r8415 r8557  
    8787PGM_SHW_DECL(int, GetPage)(PVM pVM, RTGCUINTPTR GCPtr, uint64_t *pfFlags, PRTHCPHYS pHCPhys);
    8888PGM_SHW_DECL(int, ModifyPage)(PVM pVM, RTGCUINTPTR GCPtr, size_t cb, uint64_t fFlags, uint64_t fMask);
    89 PGM_SHW_DECL(int, GetPDEByIndex)(PVM pVM, uint32_t iPD, PX86PDEPAE pPde);
    90 PGM_SHW_DECL(int, SetPDEByIndex)(PVM pVM, uint32_t iPD, X86PDEPAE Pde);
    91 PGM_SHW_DECL(int, ModifyPDEByIndex)(PVM pVM, uint32_t iPD, uint64_t fFlags, uint64_t fMask);
    9289__END_DECLS
    9390
     
    111108    pModeData->pfnR3ShwGetPage           = PGM_SHW_NAME(GetPage);
    112109    pModeData->pfnR3ShwModifyPage        = PGM_SHW_NAME(ModifyPage);
    113     pModeData->pfnR3ShwGetPDEByIndex     = PGM_SHW_NAME(GetPDEByIndex);
    114     pModeData->pfnR3ShwSetPDEByIndex     = PGM_SHW_NAME(SetPDEByIndex);
    115     pModeData->pfnR3ShwModifyPDEByIndex  = PGM_SHW_NAME(ModifyPDEByIndex);
    116110
    117111    if (fResolveGCAndR0)
     
    125119        rc = PDMR3GetSymbolGC(pVM, NULL, PGM_SHW_NAME_GC_STR(ModifyPage),  &pModeData->pfnGCShwModifyPage);
    126120        AssertMsgRCReturn(rc, ("%s -> rc=%Vrc\n", PGM_SHW_NAME_GC_STR(ModifyPage),  rc), rc);
    127         rc = PDMR3GetSymbolGC(pVM, NULL, PGM_SHW_NAME_GC_STR(GetPDEByIndex),  &pModeData->pfnGCShwGetPDEByIndex);
    128         AssertMsgRCReturn(rc, ("%s -> rc=%Vrc\n", PGM_SHW_NAME_GC_STR(GetPDEByIndex),  rc), rc);
    129         rc = PDMR3GetSymbolGC(pVM, NULL, PGM_SHW_NAME_GC_STR(SetPDEByIndex),  &pModeData->pfnGCShwSetPDEByIndex);
    130         AssertMsgRCReturn(rc, ("%s -> rc=%Vrc\n", PGM_SHW_NAME_GC_STR(SetPDEByIndex),  rc), rc);
    131         rc = PDMR3GetSymbolGC(pVM, NULL, PGM_SHW_NAME_GC_STR(ModifyPDEByIndex),  &pModeData->pfnGCShwModifyPDEByIndex);
    132         AssertMsgRCReturn(rc, ("%s -> rc=%Vrc\n", PGM_SHW_NAME_GC_STR(ModifyPDEByIndex),  rc), rc);
    133121#endif /* Not AMD64 shadow paging. */
    134122
     
    138126        rc = PDMR3GetSymbolR0(pVM, NULL, PGM_SHW_NAME_R0_STR(ModifyPage),  &pModeData->pfnR0ShwModifyPage);
    139127        AssertMsgRCReturn(rc, ("%s -> rc=%Vrc\n", PGM_SHW_NAME_R0_STR(ModifyPage),  rc), rc);
    140         rc = PDMR3GetSymbolR0(pVM, NULL, PGM_SHW_NAME_R0_STR(GetPDEByIndex),  &pModeData->pfnR0ShwGetPDEByIndex);
    141         AssertMsgRCReturn(rc, ("%s -> rc=%Vrc\n", PGM_SHW_NAME_R0_STR(GetPDEByIndex),  rc), rc);
    142         rc = PDMR3GetSymbolR0(pVM, NULL, PGM_SHW_NAME_R0_STR(SetPDEByIndex),  &pModeData->pfnR0ShwSetPDEByIndex);
    143         AssertMsgRCReturn(rc, ("%s -> rc=%Vrc\n", PGM_SHW_NAME_R0_STR(SetPDEByIndex),  rc), rc);
    144         rc = PDMR3GetSymbolR0(pVM, NULL, PGM_SHW_NAME_R0_STR(ModifyPDEByIndex),  &pModeData->pfnR0ShwModifyPDEByIndex);
    145         AssertMsgRCReturn(rc, ("%s -> rc=%Vrc\n", PGM_SHW_NAME_R0_STR(ModifyPDEByIndex),  rc), rc);
    146128    }
    147129    return VINF_SUCCESS;
  • trunk/src/VBox/VMM/VMMAll/PGMAllShw.h

    r8155 r8557  
    9595PGM_SHW_DECL(int, GetPage)(PVM pVM, RTGCUINTPTR GCPtr, uint64_t *pfFlags, PRTHCPHYS pHCPhys);
    9696PGM_SHW_DECL(int, ModifyPage)(PVM pVM, RTGCUINTPTR GCPtr, size_t cbPages, uint64_t fFlags, uint64_t fMask);
    97 PGM_SHW_DECL(int, GetPDEByIndex)(PVM pVM, uint32_t iPD, PX86PDEPAE pPde);
    98 PGM_SHW_DECL(int, SetPDEByIndex)(PVM pVM, uint32_t iPD, X86PDEPAE Pde);
    99 PGM_SHW_DECL(int, ModifyPDEByIndex)(PVM pVM, uint32_t iPD, uint64_t fFlags, uint64_t fMask);
    10097__END_DECLS
    10198
     
    119116     */
    120117#if PGM_SHW_TYPE == PGM_TYPE_AMD64
    121     /*
    122      * For the first 4G we have preallocated page directories.
    123      * Since the two upper levels contains only fixed flags, we skip those when possible.
    124      */
     118    bool      fNoExecuteBitValid = !!(CPUMGetGuestEFER(pVM) & MSR_K6_EFER_NXE);
    125119    X86PDEPAE Pde;
    126 #if GC_ARCH_BITS == 64
    127     if (GCPtr < _4G)
    128 #endif
    129     {
    130         const unsigned iPDPT = (GCPtr >> SHW_PDPT_SHIFT)  & SHW_PDPT_MASK;
    131         const unsigned iPd    = (GCPtr >> X86_PD_PAE_SHIFT) & X86_PD_PAE_MASK;
    132         Pde = CTXMID(pVM->pgm.s.ap,PaePDs)[iPDPT]->a[iPd];
    133     }
    134 #if GC_ARCH_BITS == 64
    135     else
    136     {
    137         /* PML4 */
    138         const unsigned iPml4  = (GCPtr >> X86_PML4_SHIFT) & X86_PML4_MASK;
    139         X86PML4E Pml4e = CTXMID(pVM->pgm.s.p,PaePML4)->a[iPml4];
    140         if (!Pml4e.n.u1Present)
    141             return VERR_PAGE_TABLE_NOT_PRESENT;
    142 
    143         /* PDPT */
    144         PX86PDPT pPDPT;
    145         int rc = PGM_HCPHYS_2_PTR(pVM, Pml4e.u & X86_PML4E_PG_MASK, &pPDPT);
    146         if (VBOX_FAILURE(rc))
    147             return rc;
    148         const unsigned iPDPT = (GCPtr >> SHW_PDPT_SHIFT) & SHW_PDPT_MASK;
    149         X86PDPE Pdpe = pPDPT->a[iPDPT];
    150         if (!Pdpe.n.u1Present)
    151             return VERR_PAGE_TABLE_NOT_PRESENT;
    152 
    153         /* PD */
    154         PX86PDPAE pPd;
    155         rc = PGM_HCPHYS_2_PTR(pVM, Pdpe.u & X86_PDPE_PG_MASK, &pPd);
    156         if (VBOX_FAILURE(rc))
    157             return rc;
    158         const unsigned iPd = (GCPtr >> X86_PD_PAE_SHIFT) & X86_PD_PAE_MASK;
    159         Pdpe = pPDPT->a[iPd];
    160     }
    161 #endif /* GC_ARCH_BITS == 64 */
     120
     121    /* PML4 */
     122    const unsigned iPml4  = ((RTGCUINTPTR64)GCPtr >> X86_PML4_SHIFT) & X86_PML4_MASK;
     123    X86PML4E Pml4e = CTXMID(pVM->pgm.s.p,PaePML4)->a[iPml4];
     124    if (!Pml4e.n.u1Present)
     125        return VERR_PAGE_TABLE_NOT_PRESENT;
     126
     127    /* PDPT */
     128    PX86PDPT pPDPT;
     129    int rc = PGM_HCPHYS_2_PTR(pVM, Pml4e.u & X86_PML4E_PG_MASK, &pPDPT);
     130    if (VBOX_FAILURE(rc))
     131        return rc;
     132    const unsigned iPDPT = (GCPtr >> SHW_PDPT_SHIFT) & SHW_PDPT_MASK;
     133    X86PDPE Pdpe = pPDPT->a[iPDPT];
     134    if (!Pdpe.n.u1Present)
     135        return VERR_PAGE_TABLE_NOT_PRESENT;
     136
     137    /* PD */
     138    PX86PDPAE pPd;
     139    rc = PGM_HCPHYS_2_PTR(pVM, Pdpe.u & X86_PDPE_PG_MASK, &pPd);
     140    if (VBOX_FAILURE(rc))
     141        return rc;
     142    const unsigned iPd = (GCPtr >> SHW_PD_SHIFT) & SHW_PD_MASK;
     143    Pde = pPd->a[iPd];
     144
     145    /* Merge accessed, write, user and no-execute bits into the PDE. */
     146    Pde.n.u1Accessed  &= Pml4e.n.u1Accessed & Pdpe.lm.u1Accessed;
     147    Pde.n.u1Write     &= Pml4e.n.u1Write & Pdpe.lm.u1Write;
     148    Pde.n.u1User      &= Pml4e.n.u1User & Pdpe.lm.u1User;
     149    Pde.n.u1NoExecute &= Pml4e.n.u1NoExecute & Pdpe.lm.u1NoExecute;
    162150
    163151#elif PGM_SHW_TYPE == PGM_TYPE_PAE
     152    bool           fNoExecuteBitValid = !!(CPUMGetGuestEFER(pVM) & MSR_K6_EFER_NXE);
    164153    const unsigned iPDPT = (GCPtr >> SHW_PDPT_SHIFT) & SHW_PDPT_MASK;
    165154    const unsigned iPd = (GCPtr >> X86_PD_PAE_SHIFT) & X86_PD_PAE_MASK;
    166     X86PDEPAE Pde = CTXMID(pVM->pgm.s.ap,PaePDs)[iPDPT]->a[iPd];
     155    X86PDEPAE      Pde = CTXMID(pVM->pgm.s.ap,PaePDs)[iPDPT]->a[iPd];
    167156
    168157#else /* PGM_TYPE_32BIT */
     
    172161    if (!Pde.n.u1Present)
    173162        return VERR_PAGE_TABLE_NOT_PRESENT;
     163
     164    Assert(!Pde.b.u1Size);
    174165
    175166    /*
     
    185176    else /* mapping: */
    186177    {
     178#if PGM_SHW_TYPE == PGM_TYPE_AMD64
     179        AssertFailed(); /* can't happen */
     180#else
    187181        Assert(pgmMapAreMappingsEnabled(&pVM->pgm.s));
    188182
    189183        PPGMMAPPING pMap = pgmGetMapping(pVM, (RTGCPTR)GCPtr);
    190184        AssertMsgReturn(pMap, ("GCPtr=%VGv\n", GCPtr), VERR_INTERNAL_ERROR);
    191 #if PGM_SHW_TYPE == PGM_TYPE_32BIT
     185# if PGM_SHW_TYPE == PGM_TYPE_32BIT
    192186        pPT = pMap->aPTs[(GCPtr - pMap->GCPtr) >> X86_PD_SHIFT].CTXALLSUFF(pPT);
    193 #else /* PAE and AMD64: */
     187# else /* PAE */
    194188        pPT = pMap->aPTs[(GCPtr - pMap->GCPtr) >> X86_PD_SHIFT].CTXALLSUFF(paPaePTs);
    195 #endif
     189# endif
     190#endif
    196191    }
    197192    const unsigned iPt = (GCPtr >> SHW_PT_SHIFT) & SHW_PT_MASK;
     
    202197    /*
    203198     * Store the results.
    204      * RW and US flags depend on the entire page transation hierarchy - except for
     199     * RW and US flags depend on the entire page translation hierarchy - except for
    205200     * legacy PAE which has a simplified PDPE.
    206201     */
    207202    if (pfFlags)
     203    {
    208204        *pfFlags = (Pte.u & ~SHW_PTE_PG_MASK)
    209205                 & ((Pde.u & (X86_PTE_RW | X86_PTE_US)) | ~(uint64_t)(X86_PTE_RW | X86_PTE_US));
     206# if PGM_WITH_NX(PGM_SHW_TYPE)
     207        /* The NX bit is determined by a bitwise OR between the PT and PD */
     208        if (fNoExecuteBitValid)
     209            *pfFlags |= (Pte.u & Pde.u & X86_PTE_PAE_NX);
     210# endif
     211    }
     212
    210213    if (pHCPhys)
    211214        *pHCPhys = Pte.u & SHW_PTE_PG_MASK;
     
    231234PGM_SHW_DECL(int, ModifyPage)(PVM pVM, RTGCUINTPTR GCPtr, size_t cb, uint64_t fFlags, uint64_t fMask)
    232235{
     236    int rc;
     237
    233238    /*
    234239     * Walk page tables and pages till we're done.
     
    240245         */
    241246#if PGM_SHW_TYPE == PGM_TYPE_AMD64
    242         /*
    243          * For the first 4G we have preallocated page directories.
    244          * Since the two upper levels contains only fixed flags, we skip those when possible.
    245          */
    246247        X86PDEPAE Pde;
    247 #if GC_ARCH_BITS == 64
    248         if (GCPtr < _4G)
    249 #endif
    250         {
    251             const unsigned iPDPT = (GCPtr >> SHW_PDPT_SHIFT)  & SHW_PDPT_MASK;
    252             const unsigned iPd    = (GCPtr >> X86_PD_PAE_SHIFT) & X86_PD_PAE_MASK;
    253             Pde = CTXMID(pVM->pgm.s.ap,PaePDs)[iPDPT]->a[iPd];
    254         }
    255 #if GC_ARCH_BITS == 64
    256         else
    257         {
    258             /* PML4 */
    259             const unsigned iPml4  = (GCPtr >> X86_PML4_SHIFT) & X86_PML4_MASK;
    260             X86PML4E Pml4e = CTXMID(pVM->pgm.s.p,PaePML4)->a[iPml4];
    261             if (!Pml4e.n.u1Present)
    262                 return VERR_PAGE_TABLE_NOT_PRESENT;
    263 
    264             /* PDPT */
    265             PX86PDPT pPDPT;
    266             int rc = PGM_HCPHYS_2_PTR(pVM, Pml4e.u & X86_PML4E_PG_MASK, &pPDPT);
    267             if (VBOX_FAILURE(rc))
    268                 return rc;
    269             const unsigned iPDPT = (GCPtr >> SHW_PDPT_SHIFT) & SHW_PDPT_MASK;
    270             X86PDPE Pdpe = pPDPT->a[iPDPT];
    271             if (!Pdpe.n.u1Present)
    272                 return VERR_PAGE_TABLE_NOT_PRESENT;
    273 
    274             /* PD */
    275             PX86PDPAE pPd;
    276             rc = PGM_HCPHYS_2_PTR(pVM, Pdpe.u & X86_PDPE_PG_MASK, &pPd);
    277             if (VBOX_FAILURE(rc))
    278                 return rc;
    279             const unsigned iPd = (GCPtr >> X86_PD_PAE_SHIFT) & X86_PD_PAE_MASK;
    280             Pdpe = pPDPT->a[iPd];
    281         }
    282 #endif /* GC_ARCH_BITS == 64 */
     248        /* PML4 */
     249        const unsigned iPml4  = ((RTGCUINTPTR64)GCPtr >> X86_PML4_SHIFT) & X86_PML4_MASK;
     250        X86PML4E Pml4e = CTXMID(pVM->pgm.s.p,PaePML4)->a[iPml4];
     251        if (!Pml4e.n.u1Present)
     252            return VERR_PAGE_TABLE_NOT_PRESENT;
     253
     254        /* PDPT */
     255        PX86PDPT pPDPT;
     256        rc = PGM_HCPHYS_2_PTR(pVM, Pml4e.u & X86_PML4E_PG_MASK, &pPDPT);
     257        if (VBOX_FAILURE(rc))
     258            return rc;
     259        const unsigned iPDPT = (GCPtr >> SHW_PDPT_SHIFT) & SHW_PDPT_MASK;
     260        X86PDPE Pdpe = pPDPT->a[iPDPT];
     261        if (!Pdpe.n.u1Present)
     262            return VERR_PAGE_TABLE_NOT_PRESENT;
     263
     264        /* PD */
     265        PX86PDPAE pPd;
     266        rc = PGM_HCPHYS_2_PTR(pVM, Pdpe.u & X86_PDPE_PG_MASK, &pPd);
     267        if (VBOX_FAILURE(rc))
     268            return rc;
     269        const unsigned iPd = (GCPtr >> SHW_PD_SHIFT) & SHW_PD_MASK;
     270        Pde = pPd->a[iPd];
    283271
    284272#elif PGM_SHW_TYPE == PGM_TYPE_PAE
     
    294282            return VERR_PAGE_TABLE_NOT_PRESENT;
    295283
    296 
    297284        /*
    298285         * Map the page table.
    299286         */
    300287        PSHWPT pPT;
    301         int rc = PGM_HCPHYS_2_PTR(pVM, Pde.u & SHW_PDE_PG_MASK, &pPT);
     288        rc = PGM_HCPHYS_2_PTR(pVM, Pde.u & SHW_PDE_PG_MASK, &pPT);
    302289        if (VBOX_FAILURE(rc))
    303290            return rc;
     
    323310}
    324311
    325 /**
    326  * Retrieve shadow PDE
    327  *
    328  * @returns VBox status code.
    329  * @param   pVM         The virtual machine.
    330  * @param   iPD         Shadow PDE index.
    331  * @param   pPde        Where to store the shadow PDE entry.
    332  */
    333 PGM_SHW_DECL(int, GetPDEByIndex)(PVM pVM, unsigned iPD, PX86PDEPAE pPde)
    334 {
    335 #if PGM_SHW_TYPE == PGM_TYPE_32BIT || PGM_SHW_TYPE == PGM_TYPE_PAE
    336     /*
    337      * Get page directory addresses.
    338      */
    339     Assert(iPD < SHW_TOTAL_PD_ENTRIES);
    340 # if PGM_SHW_TYPE == PGM_TYPE_32BIT
    341     PX86PDE pPdeSrc = &CTXMID(pVM->pgm.s.p,32BitPD)->a[iPD];
    342 # else
    343     PX86PDEPAE pPdeSrc = &CTXMID(pVM->pgm.s.ap,PaePDs)[0]->a[iPD];    /* We treat this as a PD with 2048 entries. */
    344 # endif
    345 
    346     pPde->u = (X86PGPAEUINT)pPdeSrc->u;
    347     return VINF_SUCCESS;
    348 
    349 #else
    350     AssertFailed();
    351     return VERR_NOT_IMPLEMENTED;
    352 #endif
    353 }
    354 
    355 /**
    356  * Set shadow PDE
    357  *
    358  * @returns VBox status code.
    359  * @param   pVM         The virtual machine.
    360  * @param   iPD         Shadow PDE index.
    361  * @param   Pde         Shadow PDE.
    362  */
    363 PGM_SHW_DECL(int, SetPDEByIndex)(PVM pVM, unsigned iPD, X86PDEPAE Pde)
    364 {
    365 #if PGM_SHW_TYPE == PGM_TYPE_32BIT || PGM_SHW_TYPE == PGM_TYPE_PAE
    366     /*
    367      * Get page directory addresses and update the specified entry.
    368      */
    369     Assert(iPD < SHW_TOTAL_PD_ENTRIES);
    370 # if PGM_SHW_TYPE == PGM_TYPE_32BIT
    371     Assert(Pde.au32[1] == 0); /* First uint32_t is backwards compatible. */
    372     Assert(Pde.n.u1Size == 0);
    373     PX86PDE pPdeDst = &CTXMID(pVM->pgm.s.p,32BitPD)->a[iPD];
    374     pPdeDst->u = Pde.au32[0];
    375 # else
    376     PX86PDEPAE pPdeDst = &CTXMID(pVM->pgm.s.ap,PaePDs)[0]->a[iPD];  /* We treat this as a PD with 2048 entries. */
    377     pPdeDst->u = Pde.u;
    378 # endif
    379     Assert(pPdeDst->n.u1Present);
    380 
    381     return VINF_SUCCESS;
    382 #else
    383     AssertFailed();
    384     return VERR_NOT_IMPLEMENTED;
    385 #endif
    386 }
    387 
    388 /**
    389  * Modify shadow PDE
    390  *
    391  * @returns VBox status code.
    392  * @param   pVM         The virtual machine.
    393  * @param   iPD         Shadow PDE index.
    394  * @param   fFlags      The OR  mask - page flags X86_PDE_*, excluding the page mask of course.
    395  * @param   fMask       The AND mask - page flags X86_PDE_*.
    396  *                      Be extremely CAREFUL with ~'ing values because they can be 32-bit!
    397  */
    398 PGM_SHW_DECL(int, ModifyPDEByIndex)(PVM pVM, uint32_t iPD, uint64_t fFlags, uint64_t fMask)
    399 {
    400 #if PGM_SHW_TYPE == PGM_TYPE_32BIT || PGM_SHW_TYPE == PGM_TYPE_PAE
    401     /*
    402      * Get page directory addresses and update the specified entry.
    403      */
    404     Assert(iPD < SHW_TOTAL_PD_ENTRIES);
    405 # if PGM_SHW_TYPE == PGM_TYPE_32BIT
    406     PX86PDE pPdeDst = &CTXMID(pVM->pgm.s.p,32BitPD)->a[iPD];
    407 
    408     pPdeDst->u = ((pPdeDst->u & ((X86PGUINT)fMask | SHW_PDE_PG_MASK)) | ((X86PGUINT)fFlags & ~SHW_PDE_PG_MASK));
    409     Assert(!pPdeDst->n.u1Size);
    410 # else
    411     PX86PDEPAE pPdeDst = &CTXMID(pVM->pgm.s.ap,PaePDs)[0]->a[iPD];      /* We treat this as a PD with 2048 entries. */
    412 
    413     pPdeDst->u = (pPdeDst->u & (fMask | SHW_PDE_PG_MASK)) | (fFlags & ~SHW_PDE_PG_MASK);
    414 # endif
    415     Assert(pPdeDst->n.u1Present);
    416 
    417     return VINF_SUCCESS;
    418 #else
    419     AssertFailed();
    420     return VERR_NOT_IMPLEMENTED;
    421 #endif
    422 }
  • trunk/src/VBox/VMM/testcase/tstVMStructGC.cpp

    r8454 r8557  
    399399    GEN_CHECK_OFF(PGM, pfnR3ShwGetPage);
    400400    GEN_CHECK_OFF(PGM, pfnR3ShwModifyPage);
    401     GEN_CHECK_OFF(PGM, pfnR3ShwGetPDEByIndex);
    402     GEN_CHECK_OFF(PGM, pfnR3ShwSetPDEByIndex);
    403     GEN_CHECK_OFF(PGM, pfnR3ShwModifyPDEByIndex);
    404401    GEN_CHECK_OFF(PGM, pfnGCShwGetPage);
    405402    GEN_CHECK_OFF(PGM, pfnGCShwModifyPage);
    406     GEN_CHECK_OFF(PGM, pfnGCShwGetPDEByIndex);
    407     GEN_CHECK_OFF(PGM, pfnGCShwSetPDEByIndex);
    408     GEN_CHECK_OFF(PGM, pfnGCShwModifyPDEByIndex);
    409403    GEN_CHECK_OFF(PGM, pfnR3GstRelocate);
    410404    GEN_CHECK_OFF(PGM, pfnR3GstExit);
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette