VirtualBox

Changeset 73250 in vbox for trunk


Ignore:
Timestamp:
Jul 19, 2018 5:57:31 PM (7 years ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
123856
Message:

PGM: Working on eliminating PGMMODEDATA and the corresponding PGMCPU section so we can do mode switching in ring-0. This forth part dealing with shadow+guest paging pointers and finally removing the PGMMODEDATA type. bugref:9044

Location:
trunk/src/VBox/VMM
Files:
6 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/PGMAll.cpp

    r73249 r73250  
    898898     * Call the worker.
    899899     */
     900    uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     901    AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     902    AssertReturn(g_aPgmBothModeData[idxBth].pfnTrap0eHandler, VERR_PGM_MODE_IPE);
    900903    bool fLockTaken = false;
    901     int rc = PGM_BTH_PFN(Trap0eHandler, pVCpu)(pVCpu, uErr, pRegFrame, pvFault, &fLockTaken);
     904    int rc = g_aPgmBothModeData[idxBth].pfnTrap0eHandler(pVCpu, uErr, pRegFrame, pvFault, &fLockTaken);
    902905    if (fLockTaken)
    903906    {
     
    953956{
    954957    STAM_PROFILE_START(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,Prefetch), a);
    955     int rc = PGM_BTH_PFN(PrefetchPage, pVCpu)(pVCpu, GCPtrPage);
     958
     959    uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     960    AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     961    AssertReturn(g_aPgmBothModeData[idxBth].pfnPrefetchPage, VERR_PGM_MODE_IPE);
     962    int rc = g_aPgmBothModeData[idxBth].pfnPrefetchPage(pVCpu, GCPtrPage);
     963
    956964    STAM_PROFILE_STOP(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,Prefetch), a);
    957965    AssertMsg(rc == VINF_SUCCESS || rc == VINF_PGM_SYNC_CR3 || RT_FAILURE(rc), ("rc=%Rrc\n", rc));
     
    10951103             */
    10961104            Assert(X86_TRAP_PF_RW == X86_PTE_RW && X86_TRAP_PF_US == X86_PTE_US);
    1097             uint32_t uErr = fAccess & (X86_TRAP_PF_RW | X86_TRAP_PF_US);
    1098             rc = PGM_BTH_PFN(VerifyAccessSyncPage, pVCpu)(pVCpu, Addr, fPageGst, uErr);
     1105            uint32_t  const uErr   = fAccess & (X86_TRAP_PF_RW | X86_TRAP_PF_US);
     1106            uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     1107            AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     1108            AssertReturn(g_aPgmBothModeData[idxBth].pfnVerifyAccessSyncPage, VERR_PGM_MODE_IPE);
     1109            rc = g_aPgmBothModeData[idxBth].pfnVerifyAccessSyncPage(pVCpu, Addr, fPageGst, uErr);
    10991110            if (rc != VINF_SUCCESS)
    11001111                return rc;
     
    12051216    STAM_PROFILE_START(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,InvalidatePage), a);
    12061217    pgmLock(pVM);
    1207     rc = PGM_BTH_PFN(InvalidatePage, pVCpu)(pVCpu, GCPtrPage);
     1218
     1219    uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     1220    AssertReturnStmt(idxBth < RT_ELEMENTS(g_aPgmBothModeData), pgmUnlock(pVM), VERR_PGM_MODE_IPE);
     1221    AssertReturnStmt(g_aPgmBothModeData[idxBth].pfnInvalidatePage, pgmUnlock(pVM), VERR_PGM_MODE_IPE);
     1222    rc = g_aPgmBothModeData[idxBth].pfnInvalidatePage(pVCpu, GCPtrPage);
     1223
    12081224    pgmUnlock(pVM);
    12091225    STAM_PROFILE_STOP(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,InvalidatePage), a);
     
    25582574    if (pVCpu->pgm.s.GCPhysCR3 != GCPhysCR3)
    25592575    {
    2560         RTGCPHYS GCPhysOldCR3 = pVCpu->pgm.s.GCPhysCR3;
    2561         pVCpu->pgm.s.GCPhysCR3  = GCPhysCR3;
    2562         rc = PGM_BTH_PFN(MapCR3, pVCpu)(pVCpu, GCPhysCR3);
     2576        RTGCPHYS const GCPhysOldCR3 = pVCpu->pgm.s.GCPhysCR3;
     2577
     2578        uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     2579        AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     2580        AssertReturn(g_aPgmBothModeData[idxBth].pfnMapCR3, VERR_PGM_MODE_IPE);
     2581
     2582        pVCpu->pgm.s.GCPhysCR3 = GCPhysCR3;
     2583        rc = g_aPgmBothModeData[idxBth].pfnMapCR3(pVCpu, GCPhysCR3);
    25632584        if (RT_LIKELY(rc == VINF_SUCCESS))
    25642585        {
     
    26622683    if (pVCpu->pgm.s.GCPhysCR3 != GCPhysCR3)
    26632684    {
     2685        uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     2686        AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     2687        AssertReturn(g_aPgmBothModeData[idxBth].pfnMapCR3, VERR_PGM_MODE_IPE);
     2688
    26642689        pVCpu->pgm.s.GCPhysCR3 = GCPhysCR3;
    2665         rc = PGM_BTH_PFN(MapCR3, pVCpu)(pVCpu, GCPhysCR3);
     2690        rc = g_aPgmBothModeData[idxBth].pfnMapCR3(pVCpu, GCPhysCR3);
     2691
    26662692        AssertRCSuccess(rc); /* Assumes VINF_PGM_SYNC_CR3 doesn't apply to nested paging. */ /** @todo this isn't true for the mac, but we need hw to test/fix this. */
    26672693    }
     
    27522778        if (pVCpu->pgm.s.GCPhysCR3 != GCPhysCR3)
    27532779        {
     2780            uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     2781            AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     2782            AssertReturn(g_aPgmBothModeData[idxBth].pfnMapCR3, VERR_PGM_MODE_IPE);
    27542783            pVCpu->pgm.s.GCPhysCR3 = GCPhysCR3;
    2755             rc = PGM_BTH_PFN(MapCR3, pVCpu)(pVCpu, GCPhysCR3);
     2784            rc = g_aPgmBothModeData[idxBth].pfnMapCR3(pVCpu, GCPhysCR3);
    27562785        }
    27572786
     
    27772806     */
    27782807    STAM_PROFILE_START(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,SyncCR3), a);
    2779     rc = PGM_BTH_PFN(SyncCR3, pVCpu)(pVCpu, cr0, cr3, cr4, fGlobal);
     2808
     2809    uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     2810    AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     2811    AssertReturn(g_aPgmBothModeData[idxBth].pfnSyncCR3, VERR_PGM_MODE_IPE);
     2812    rc = g_aPgmBothModeData[idxBth].pfnSyncCR3(pVCpu, cr0, cr3, cr4, fGlobal);
     2813
    27802814    STAM_PROFILE_STOP(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,SyncCR3), a);
    27812815    AssertMsg(rc == VINF_SUCCESS || rc == VINF_PGM_SYNC_CR3 || RT_FAILURE(rc), ("rc=%Rrc\n", rc));
     
    34193453{
    34203454    STAM_PROFILE_START(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,SyncCR3), a);
     3455
     3456    uintptr_t const idxBth = pVCpu->pgm.s.idxBothModeData;
     3457    AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), -VERR_PGM_MODE_IPE);
     3458    AssertReturn(g_aPgmBothModeData[idxBth].pfnAssertCR3, -VERR_PGM_MODE_IPE);
     3459
    34213460    pgmLock(pVM);
    3422     unsigned cErrors = PGM_BTH_PFN(AssertCR3, pVCpu)(pVCpu, cr3, cr4, 0, ~(RTGCPTR)0);
     3461    unsigned cErrors = g_aPgmBothModeData[idxBth].pfnAssertCR3(pVCpu, cr3, cr4, 0, ~(RTGCPTR)0);
    34233462    pgmUnlock(pVM);
     3463
    34243464    STAM_PROFILE_STOP(&pVCpu->pgm.s.CTX_SUFF(pStats)->CTX_MID_Z(Stat,SyncCR3), a);
    34253465    return cErrors;
  • trunk/src/VBox/VMM/VMMR3/PGM.cpp

    r73249 r73250  
    681681static FNVMATSTATE        pgmR3ResetNoMorePhysWritesFlag;
    682682#endif
    683 static int                pgmR3ModeDataInit(PVM pVM, bool fResolveGCAndR0);
    684 static void               pgmR3ModeDataSwitch(PVM pVM, PVMCPU pVCpu, PGMMODE enmShw, PGMMODE enmGst);
     683static void               pgmR3ModeDataSwitch(PVMCPU pVCpu, PGMMODE enmShw, PGMMODE enmGst);
    685684static PGMMODE            pgmR3CalcShadowMode(PVM pVM, PGMMODE enmGuestMode, SUPPAGINGMODE enmHostMode, PGMMODE enmShadowMode, VMMSWITCHER *penmSwitcher);
    686685
     
    19241923            return VERR_PGM_UNSUPPORTED_HOST_PAGING_MODE;
    19251924    }
    1926     rc = pgmR3ModeDataInit(pVM, false /* don't resolve GC and R0 syms yet */);
    1927     if (RT_SUCCESS(rc))
    1928     {
    1929         LogFlow(("pgmR3InitPaging: returns successfully\n"));
     1925
     1926    LogFlow(("pgmR3InitPaging: returns successfully\n"));
    19301927#if HC_ARCH_BITS == 64
    1931         LogRel(("PGM: HCPhysInterPD=%RHp HCPhysInterPaePDPT=%RHp HCPhysInterPaePML4=%RHp\n",
    1932                 pVM->pgm.s.HCPhysInterPD, pVM->pgm.s.HCPhysInterPaePDPT, pVM->pgm.s.HCPhysInterPaePML4));
    1933         LogRel(("PGM: apInterPTs={%RHp,%RHp} apInterPaePTs={%RHp,%RHp} apInterPaePDs={%RHp,%RHp,%RHp,%RHp} pInterPaePDPT64=%RHp\n",
    1934                 MMPage2Phys(pVM, pVM->pgm.s.apInterPTs[0]),    MMPage2Phys(pVM, pVM->pgm.s.apInterPTs[1]),
    1935                 MMPage2Phys(pVM, pVM->pgm.s.apInterPaePTs[0]), MMPage2Phys(pVM, pVM->pgm.s.apInterPaePTs[1]),
    1936                 MMPage2Phys(pVM, pVM->pgm.s.apInterPaePDs[0]), MMPage2Phys(pVM, pVM->pgm.s.apInterPaePDs[1]), MMPage2Phys(pVM, pVM->pgm.s.apInterPaePDs[2]), MMPage2Phys(pVM, pVM->pgm.s.apInterPaePDs[3]),
    1937                 MMPage2Phys(pVM, pVM->pgm.s.pInterPaePDPT64)));
     1928    LogRel(("PGM: HCPhysInterPD=%RHp HCPhysInterPaePDPT=%RHp HCPhysInterPaePML4=%RHp\n",
     1929            pVM->pgm.s.HCPhysInterPD, pVM->pgm.s.HCPhysInterPaePDPT, pVM->pgm.s.HCPhysInterPaePML4));
     1930    LogRel(("PGM: apInterPTs={%RHp,%RHp} apInterPaePTs={%RHp,%RHp} apInterPaePDs={%RHp,%RHp,%RHp,%RHp} pInterPaePDPT64=%RHp\n",
     1931            MMPage2Phys(pVM, pVM->pgm.s.apInterPTs[0]),    MMPage2Phys(pVM, pVM->pgm.s.apInterPTs[1]),
     1932            MMPage2Phys(pVM, pVM->pgm.s.apInterPaePTs[0]), MMPage2Phys(pVM, pVM->pgm.s.apInterPaePTs[1]),
     1933            MMPage2Phys(pVM, pVM->pgm.s.apInterPaePDs[0]), MMPage2Phys(pVM, pVM->pgm.s.apInterPaePDs[1]), MMPage2Phys(pVM, pVM->pgm.s.apInterPaePDs[2]), MMPage2Phys(pVM, pVM->pgm.s.apInterPaePDs[3]),
     1934            MMPage2Phys(pVM, pVM->pgm.s.pInterPaePDPT64)));
    19381935#endif
    19391936
    1940         /*
    1941          * Log the host paging mode. It may come in handy.
    1942          */
    1943         const char *pszHostMode;
    1944         switch (pVM->pgm.s.enmHostMode)
    1945         {
    1946             case SUPPAGINGMODE_32_BIT:              pszHostMode = "32-bit"; break;
    1947             case SUPPAGINGMODE_32_BIT_GLOBAL:       pszHostMode = "32-bit+PGE"; break;
    1948             case SUPPAGINGMODE_PAE:                 pszHostMode = "PAE"; break;
    1949             case SUPPAGINGMODE_PAE_GLOBAL:          pszHostMode = "PAE+PGE"; break;
    1950             case SUPPAGINGMODE_PAE_NX:              pszHostMode = "PAE+NXE"; break;
    1951             case SUPPAGINGMODE_PAE_GLOBAL_NX:       pszHostMode = "PAE+PGE+NXE"; break;
    1952             case SUPPAGINGMODE_AMD64:               pszHostMode = "AMD64"; break;
    1953             case SUPPAGINGMODE_AMD64_GLOBAL:        pszHostMode = "AMD64+PGE"; break;
    1954             case SUPPAGINGMODE_AMD64_NX:            pszHostMode = "AMD64+NX"; break;
    1955             case SUPPAGINGMODE_AMD64_GLOBAL_NX:     pszHostMode = "AMD64+PGE+NX"; break;
    1956             default:                                pszHostMode = "???"; break;
    1957         }
    1958         LogRel(("PGM: Host paging mode: %s\n", pszHostMode));
    1959 
    1960         return VINF_SUCCESS;
    1961     }
    1962 
    1963     LogFlow(("pgmR3InitPaging: returns %Rrc\n", rc));
    1964     return rc;
     1937    /*
     1938     * Log the host paging mode. It may come in handy.
     1939     */
     1940    const char *pszHostMode;
     1941    switch (pVM->pgm.s.enmHostMode)
     1942    {
     1943        case SUPPAGINGMODE_32_BIT:              pszHostMode = "32-bit"; break;
     1944        case SUPPAGINGMODE_32_BIT_GLOBAL:       pszHostMode = "32-bit+PGE"; break;
     1945        case SUPPAGINGMODE_PAE:                 pszHostMode = "PAE"; break;
     1946        case SUPPAGINGMODE_PAE_GLOBAL:          pszHostMode = "PAE+PGE"; break;
     1947        case SUPPAGINGMODE_PAE_NX:              pszHostMode = "PAE+NXE"; break;
     1948        case SUPPAGINGMODE_PAE_GLOBAL_NX:       pszHostMode = "PAE+PGE+NXE"; break;
     1949        case SUPPAGINGMODE_AMD64:               pszHostMode = "AMD64"; break;
     1950        case SUPPAGINGMODE_AMD64_GLOBAL:        pszHostMode = "AMD64+PGE"; break;
     1951        case SUPPAGINGMODE_AMD64_NX:            pszHostMode = "AMD64+NX"; break;
     1952        case SUPPAGINGMODE_AMD64_GLOBAL_NX:     pszHostMode = "AMD64+PGE+NX"; break;
     1953        default:                                pszHostMode = "???"; break;
     1954    }
     1955    LogRel(("PGM: Host paging mode: %s\n", pszHostMode));
     1956
     1957    return VINF_SUCCESS;
    19651958}
    19661959
     
    25922585    pVM->pgm.s.GCPtrCR3Mapping += offDelta;
    25932586
    2594     pgmR3ModeDataInit(pVM, true /* resolve GC/R0 symbols */);
    2595 
    25962587    /* Shadow, guest and both mode switch & relocation for each VCPU. */
    25972588    for (VMCPUID i = 0; i < pVM->cCpus; i++)
     
    25992590        PVMCPU  pVCpu = &pVM->aCpus[i];
    26002591
    2601         pgmR3ModeDataSwitch(pVM, pVCpu, pVCpu->pgm.s.enmShadowMode, pVCpu->pgm.s.enmGuestMode);
     2592        pgmR3ModeDataSwitch(pVCpu, pVCpu->pgm.s.enmShadowMode, pVCpu->pgm.s.enmGuestMode);
    26022593
    26032594        uintptr_t idxShw = pVCpu->pgm.s.idxShadowModeData;
     
    26112602            g_aPgmGuestModeData[idxGst].pfnRelocate(pVCpu, offDelta);
    26122603
    2613         PGM_BTH_PFN(Relocate, pVCpu)(pVCpu, offDelta);
     2604        uintptr_t idxBth = pVCpu->pgm.s.idxBothModeData;
     2605        if (    idxBth < RT_ELEMENTS(g_aPgmBothModeData)
     2606            && g_aPgmBothModeData[idxBth].pfnRelocate)
     2607            g_aPgmBothModeData[idxBth].pfnRelocate(pVCpu, offDelta);
    26142608    }
    26152609
     
    33023296
    33033297/**
    3304  * Initializes the paging mode data kept in PGM::paModeData.
    3305  *
    3306  * @param   pVM             The cross context VM structure.
    3307  * @param   fResolveGCAndR0 Indicate whether or not GC and Ring-0 symbols can be resolved now.
    3308  *                          This is used early in the init process to avoid trouble with PDM
    3309  *                          not being initialized yet.
    3310  */
    3311 static int pgmR3ModeDataInit(PVM pVM, bool fResolveGCAndR0)
    3312 {
    3313     PPGMMODEDATA pModeData;
    3314     int rc;
    3315 
    3316     /*
    3317      * Allocate the array on the first call.
    3318      */
    3319     if (!pVM->pgm.s.paModeData)
    3320     {
    3321         pVM->pgm.s.paModeData = (PPGMMODEDATA)MMR3HeapAllocZ(pVM, MM_TAG_PGM, sizeof(PGMMODEDATA) * pgmModeDataMaxIndex());
    3322         AssertReturn(pVM->pgm.s.paModeData, VERR_NO_MEMORY);
    3323     }
    3324 
    3325     /*
    3326      * Initialize the array entries.
    3327      */
    3328     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_32BIT, PGM_TYPE_REAL)];
    3329     pModeData->uShwType = PGM_TYPE_32BIT;
    3330     pModeData->uGstType = PGM_TYPE_REAL;
    3331     rc = PGM_BTH_NAME_32BIT_REAL(InitData)( pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3332 
    3333     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_32BIT, PGMMODE_PROTECTED)];
    3334     pModeData->uShwType = PGM_TYPE_32BIT;
    3335     pModeData->uGstType = PGM_TYPE_PROT;
    3336     rc = PGM_BTH_NAME_32BIT_PROT(InitData)( pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3337 
    3338     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_32BIT, PGM_TYPE_32BIT)];
    3339     pModeData->uShwType = PGM_TYPE_32BIT;
    3340     pModeData->uGstType = PGM_TYPE_32BIT;
    3341     rc = PGM_BTH_NAME_32BIT_32BIT(InitData)(pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3342 
    3343     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_PAE, PGM_TYPE_REAL)];
    3344     pModeData->uShwType = PGM_TYPE_PAE;
    3345     pModeData->uGstType = PGM_TYPE_REAL;
    3346     rc = PGM_BTH_NAME_PAE_REAL(InitData)(   pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3347 
    3348     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_PAE, PGM_TYPE_PROT)];
    3349     pModeData->uShwType = PGM_TYPE_PAE;
    3350     pModeData->uGstType = PGM_TYPE_PROT;
    3351     rc = PGM_BTH_NAME_PAE_PROT(InitData)(   pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3352 
    3353     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_PAE, PGM_TYPE_32BIT)];
    3354     pModeData->uShwType = PGM_TYPE_PAE;
    3355     pModeData->uGstType = PGM_TYPE_32BIT;
    3356     rc = PGM_BTH_NAME_PAE_32BIT(InitData)(  pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3357 
    3358     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_PAE, PGM_TYPE_PAE)];
    3359     pModeData->uShwType = PGM_TYPE_PAE;
    3360     pModeData->uGstType = PGM_TYPE_PAE;
    3361     rc = PGM_BTH_NAME_PAE_PAE(InitData)(    pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3362 
    3363 #ifdef VBOX_WITH_64_BITS_GUESTS
    3364     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_AMD64, PGM_TYPE_AMD64)];
    3365     pModeData->uShwType = PGM_TYPE_AMD64;
    3366     pModeData->uGstType = PGM_TYPE_AMD64;
    3367     rc = PGM_BTH_NAME_AMD64_AMD64(InitData)( pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3368 #endif
    3369 
    3370     /* The 32-bit nested paging mode. */
    3371     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_32BIT, PGM_TYPE_REAL)];
    3372     pModeData->uShwType = PGM_TYPE_NESTED_32BIT;
    3373     pModeData->uGstType = PGM_TYPE_REAL;
    3374     rc = PGM_BTH_NAME_NESTED_32BIT_REAL(InitData)( pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3375 
    3376     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_32BIT, PGMMODE_PROTECTED)];
    3377     pModeData->uShwType = PGM_TYPE_NESTED_32BIT;
    3378     pModeData->uGstType = PGM_TYPE_PROT;
    3379     rc = PGM_BTH_NAME_NESTED_32BIT_PROT(InitData)( pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3380 
    3381     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_32BIT, PGM_TYPE_32BIT)];
    3382     pModeData->uShwType = PGM_TYPE_NESTED_32BIT;
    3383     pModeData->uGstType = PGM_TYPE_32BIT;
    3384     rc = PGM_BTH_NAME_NESTED_32BIT_32BIT(InitData)(pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3385 
    3386     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_32BIT, PGM_TYPE_PAE)];
    3387     pModeData->uShwType = PGM_TYPE_NESTED_32BIT;
    3388     pModeData->uGstType = PGM_TYPE_PAE;
    3389     rc = PGM_BTH_NAME_NESTED_32BIT_PAE(InitData)(  pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3390 
    3391 #ifdef VBOX_WITH_64_BITS_GUESTS
    3392     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_32BIT, PGM_TYPE_AMD64)];
    3393     pModeData->uShwType = PGM_TYPE_NESTED_32BIT;
    3394     pModeData->uGstType = PGM_TYPE_AMD64;
    3395     rc = PGM_BTH_NAME_NESTED_32BIT_AMD64(InitData)( pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3396 #endif
    3397 
    3398     /* The PAE nested paging mode. */
    3399     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_PAE, PGM_TYPE_REAL)];
    3400     pModeData->uShwType = PGM_TYPE_NESTED_PAE;
    3401     pModeData->uGstType = PGM_TYPE_REAL;
    3402     rc = PGM_BTH_NAME_NESTED_PAE_REAL(InitData)( pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3403 
    3404     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_PAE, PGMMODE_PROTECTED)];
    3405     pModeData->uShwType = PGM_TYPE_NESTED_PAE;
    3406     pModeData->uGstType = PGM_TYPE_PROT;
    3407     rc = PGM_BTH_NAME_NESTED_PAE_PROT(InitData)( pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3408 
    3409     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_PAE, PGM_TYPE_32BIT)];
    3410     pModeData->uShwType = PGM_TYPE_NESTED_PAE;
    3411     pModeData->uGstType = PGM_TYPE_32BIT;
    3412     rc = PGM_BTH_NAME_NESTED_PAE_32BIT(InitData)(pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3413 
    3414     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_PAE, PGM_TYPE_PAE)];
    3415     pModeData->uShwType = PGM_TYPE_NESTED_PAE;
    3416     pModeData->uGstType = PGM_TYPE_PAE;
    3417     rc = PGM_BTH_NAME_NESTED_PAE_PAE(InitData)(  pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3418 
    3419 #ifdef VBOX_WITH_64_BITS_GUESTS
    3420     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_PAE, PGM_TYPE_AMD64)];
    3421     pModeData->uShwType = PGM_TYPE_NESTED_PAE;
    3422     pModeData->uGstType = PGM_TYPE_AMD64;
    3423     rc = PGM_BTH_NAME_NESTED_PAE_AMD64(InitData)( pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3424 #endif
    3425 
    3426     /* The AMD64 nested paging mode. */
    3427     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_AMD64, PGM_TYPE_REAL)];
    3428     pModeData->uShwType = PGM_TYPE_NESTED_AMD64;
    3429     pModeData->uGstType = PGM_TYPE_REAL;
    3430     rc = PGM_BTH_NAME_NESTED_AMD64_REAL(InitData)( pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3431 
    3432     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_AMD64, PGMMODE_PROTECTED)];
    3433     pModeData->uShwType = PGM_TYPE_NESTED_AMD64;
    3434     pModeData->uGstType = PGM_TYPE_PROT;
    3435     rc = PGM_BTH_NAME_NESTED_AMD64_PROT(InitData)( pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3436 
    3437     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_AMD64, PGM_TYPE_32BIT)];
    3438     pModeData->uShwType = PGM_TYPE_NESTED_AMD64;
    3439     pModeData->uGstType = PGM_TYPE_32BIT;
    3440     rc = PGM_BTH_NAME_NESTED_AMD64_32BIT(InitData)(pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3441 
    3442     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_AMD64, PGM_TYPE_PAE)];
    3443     pModeData->uShwType = PGM_TYPE_NESTED_AMD64;
    3444     pModeData->uGstType = PGM_TYPE_PAE;
    3445     rc = PGM_BTH_NAME_NESTED_AMD64_PAE(InitData)(  pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3446 
    3447 #ifdef VBOX_WITH_64_BITS_GUESTS
    3448     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_NESTED_AMD64, PGM_TYPE_AMD64)];
    3449     pModeData->uShwType = PGM_TYPE_NESTED_AMD64;
    3450     pModeData->uGstType = PGM_TYPE_AMD64;
    3451     rc = PGM_BTH_NAME_NESTED_AMD64_AMD64(InitData)( pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3452 #endif
    3453 
    3454     /* Extended paging (EPT) / Intel VT-x */
    3455     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_EPT, PGM_TYPE_REAL)];
    3456     pModeData->uShwType = PGM_TYPE_EPT;
    3457     pModeData->uGstType = PGM_TYPE_REAL;
    3458     rc = PGM_BTH_NAME_EPT_REAL(InitData)(   pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3459 
    3460     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_EPT, PGM_TYPE_PROT)];
    3461     pModeData->uShwType = PGM_TYPE_EPT;
    3462     pModeData->uGstType = PGM_TYPE_PROT;
    3463     rc = PGM_BTH_NAME_EPT_PROT(InitData)(   pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3464 
    3465     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_EPT, PGM_TYPE_32BIT)];
    3466     pModeData->uShwType = PGM_TYPE_EPT;
    3467     pModeData->uGstType = PGM_TYPE_32BIT;
    3468     rc = PGM_BTH_NAME_EPT_32BIT(InitData)(  pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3469 
    3470     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_EPT, PGM_TYPE_PAE)];
    3471     pModeData->uShwType = PGM_TYPE_EPT;
    3472     pModeData->uGstType = PGM_TYPE_PAE;
    3473     rc = PGM_BTH_NAME_EPT_PAE(InitData)(    pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3474 
    3475 #ifdef VBOX_WITH_64_BITS_GUESTS
    3476     pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndex(PGM_TYPE_EPT, PGM_TYPE_AMD64)];
    3477     pModeData->uShwType = PGM_TYPE_EPT;
    3478     pModeData->uGstType = PGM_TYPE_AMD64;
    3479     rc = PGM_BTH_NAME_EPT_AMD64(InitData)(  pVM, pModeData, fResolveGCAndR0); AssertRCReturn(rc, rc);
    3480 #endif
    3481     return VINF_SUCCESS;
    3482 }
    3483 
    3484 
    3485 /**
    34863298 * Switch to different (or relocated in the relocate case) mode data.
    34873299 *
    3488  * @param   pVM         The cross context VM structure.
    34893300 * @param   pVCpu       The cross context virtual CPU structure.
    34903301 * @param   enmShw      The shadow paging mode.
    34913302 * @param   enmGst      The guest paging mode.
    34923303 */
    3493 static void pgmR3ModeDataSwitch(PVM pVM, PVMCPU pVCpu, PGMMODE enmShw, PGMMODE enmGst)
     3304static void pgmR3ModeDataSwitch(PVMCPU pVCpu, PGMMODE enmShw, PGMMODE enmGst)
    34943305{
    34953306    /*
     
    35303341#endif
    35313342    NOREF(idxBth);
    3532 
    3533     /*
    3534      * The following code will be gradually reduced and finally removed:
    3535      */
    3536     PPGMMODEDATA pModeData = &pVM->pgm.s.paModeData[pgmModeDataIndexByMode(enmShw, enmGst)];
    3537 
    3538     Assert(pModeData->uGstType == pgmModeToType(enmGst));
    3539     Assert(pModeData->uShwType == pgmModeToType(enmShw));
    3540 
    3541     /* both */
    3542     pVCpu->pgm.s.pfnR3BthRelocate             = pModeData->pfnR3BthRelocate;
    3543     pVCpu->pgm.s.pfnR3BthInvalidatePage       = pModeData->pfnR3BthInvalidatePage;
    3544     pVCpu->pgm.s.pfnR3BthSyncCR3              = pModeData->pfnR3BthSyncCR3;
    3545     Assert(pVCpu->pgm.s.pfnR3BthSyncCR3);
    3546     pVCpu->pgm.s.pfnR3BthPrefetchPage         = pModeData->pfnR3BthPrefetchPage;
    3547     pVCpu->pgm.s.pfnR3BthVerifyAccessSyncPage = pModeData->pfnR3BthVerifyAccessSyncPage;
    3548 #ifdef VBOX_STRICT
    3549     pVCpu->pgm.s.pfnR3BthAssertCR3            = pModeData->pfnR3BthAssertCR3;
    3550 #endif
    3551     pVCpu->pgm.s.pfnR3BthMapCR3               = pModeData->pfnR3BthMapCR3;
    3552     pVCpu->pgm.s.pfnR3BthUnmapCR3             = pModeData->pfnR3BthUnmapCR3;
    3553 
    3554     pVCpu->pgm.s.pfnRCBthTrap0eHandler        = pModeData->pfnRCBthTrap0eHandler;
    3555     pVCpu->pgm.s.pfnRCBthInvalidatePage       = pModeData->pfnRCBthInvalidatePage;
    3556     pVCpu->pgm.s.pfnRCBthSyncCR3              = pModeData->pfnRCBthSyncCR3;
    3557     pVCpu->pgm.s.pfnRCBthPrefetchPage         = pModeData->pfnRCBthPrefetchPage;
    3558     pVCpu->pgm.s.pfnRCBthVerifyAccessSyncPage = pModeData->pfnRCBthVerifyAccessSyncPage;
    3559 #ifdef VBOX_STRICT
    3560     pVCpu->pgm.s.pfnRCBthAssertCR3            = pModeData->pfnRCBthAssertCR3;
    3561 #endif
    3562     pVCpu->pgm.s.pfnRCBthMapCR3               = pModeData->pfnRCBthMapCR3;
    3563     pVCpu->pgm.s.pfnRCBthUnmapCR3             = pModeData->pfnRCBthUnmapCR3;
    3564 
    3565     pVCpu->pgm.s.pfnR0BthTrap0eHandler        = pModeData->pfnR0BthTrap0eHandler;
    3566     pVCpu->pgm.s.pfnR0BthInvalidatePage       = pModeData->pfnR0BthInvalidatePage;
    3567     pVCpu->pgm.s.pfnR0BthSyncCR3              = pModeData->pfnR0BthSyncCR3;
    3568     pVCpu->pgm.s.pfnR0BthPrefetchPage         = pModeData->pfnR0BthPrefetchPage;
    3569     pVCpu->pgm.s.pfnR0BthVerifyAccessSyncPage = pModeData->pfnR0BthVerifyAccessSyncPage;
    3570 #ifdef VBOX_STRICT
    3571     pVCpu->pgm.s.pfnR0BthAssertCR3            = pModeData->pfnR0BthAssertCR3;
    3572 #endif
    3573     pVCpu->pgm.s.pfnR0BthMapCR3               = pModeData->pfnR0BthMapCR3;
    3574     pVCpu->pgm.s.pfnR0BthUnmapCR3             = pModeData->pfnR0BthUnmapCR3;
    35753343}
    35763344
     
    38873655     * Load new paging mode data.
    38883656     */
    3889     pgmR3ModeDataSwitch(pVM, pVCpu, enmShadowMode, enmGuestMode);
     3657    pgmR3ModeDataSwitch(pVCpu, enmShadowMode, enmGuestMode);
    38903658
    38913659    /*
     
    41463914{
    41473915    /* Unmap the old CR3 value before flushing everything. */
    4148     int rc = PGM_BTH_PFN(UnmapCR3, pVCpu)(pVCpu);
    4149     AssertRC(rc);
     3916    int       rc     = VINF_SUCCESS;
     3917    uintptr_t idxBth = pVCpu->pgm.s.idxBothModeData;
     3918    if (   idxBth < RT_ELEMENTS(g_aPgmBothModeData)
     3919        && g_aPgmBothModeData[idxBth].pfnMapCR3)
     3920    {
     3921        rc = g_aPgmBothModeData[idxBth].pfnUnmapCR3(pVCpu);
     3922        AssertRC(rc);
     3923    }
    41503924
    41513925    /* Exit the current shadow paging mode as well; nested paging and EPT use a root CR3 which will get flushed here. */
  • trunk/src/VBox/VMM/VMMR3/PGMBth.h

    r73246 r73250  
    2121*******************************************************************************/
    2222RT_C_DECLS_BEGIN
    23 PGM_BTH_DECL(int, InitData)(PVM pVM, PPGMMODEDATA pModeData, bool fResolveGCAndR0);
    2423PGM_BTH_DECL(int, Enter)(PVMCPU pVCpu, RTGCPHYS GCPhysCR3);
    2524PGM_BTH_DECL(int, Relocate)(PVMCPU pVCpu, RTGCPTR offDelta);
     
    3534RT_C_DECLS_END
    3635
    37 
    38 /**
    39  * Initializes the both bit of the paging mode data.
    40  *
    41  * @returns VBox status code.
    42  * @param   pVM             The cross context VM structure.
    43  * @param   pModeData       The pointer table to initialize.
    44  * @param   fResolveGCAndR0 Indicate whether or not GC and Ring-0 symbols can be resolved now.
    45  *                          This is used early in the init process to avoid trouble with PDM
    46  *                          not being initialized yet.
    47  */
    48 PGM_BTH_DECL(int, InitData)(PVM pVM, PPGMMODEDATA pModeData, bool fResolveGCAndR0)
    49 {
    50     Assert(pModeData->uShwType == PGM_SHW_TYPE); Assert(pModeData->uGstType == PGM_GST_TYPE);
    51 
    52     /* Ring 3 */
    53     pModeData->pfnR3BthRelocate             = PGM_BTH_NAME(Relocate);
    54     pModeData->pfnR3BthSyncCR3              = PGM_BTH_NAME(SyncCR3);
    55     pModeData->pfnR3BthInvalidatePage       = PGM_BTH_NAME(InvalidatePage);
    56     pModeData->pfnR3BthPrefetchPage         = PGM_BTH_NAME(PrefetchPage);
    57     pModeData->pfnR3BthVerifyAccessSyncPage = PGM_BTH_NAME(VerifyAccessSyncPage);
    58 #ifdef VBOX_STRICT
    59     pModeData->pfnR3BthAssertCR3            = PGM_BTH_NAME(AssertCR3);
    60 #endif
    61     pModeData->pfnR3BthMapCR3               = PGM_BTH_NAME(MapCR3);
    62     pModeData->pfnR3BthUnmapCR3             = PGM_BTH_NAME(UnmapCR3);
    63 
    64     if (fResolveGCAndR0)
    65     {
    66         int rc;
    67 
    68         if (VM_IS_RAW_MODE_ENABLED(pVM))
    69         {
    70 #if PGM_SHW_TYPE != PGM_TYPE_AMD64 && !PGM_TYPE_IS_NESTED_OR_EPT(PGM_SHW_TYPE) /* No AMD64 for traditional virtualization, only VT-x and AMD-V. */
    71             /* RC */
    72             rc = PDMR3LdrGetSymbolRC(pVM, NULL,       PGM_BTH_NAME_RC_STR(Trap0eHandler),       &pModeData->pfnRCBthTrap0eHandler);
    73             AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_RC_STR(Trap0eHandler),  rc), rc);
    74             rc = PDMR3LdrGetSymbolRC(pVM, NULL,       PGM_BTH_NAME_RC_STR(InvalidatePage),      &pModeData->pfnRCBthInvalidatePage);
    75             AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_RC_STR(InvalidatePage), rc), rc);
    76             rc = PDMR3LdrGetSymbolRC(pVM, NULL,       PGM_BTH_NAME_RC_STR(SyncCR3),             &pModeData->pfnRCBthSyncCR3);
    77             AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_RC_STR(SyncCR3), rc), rc);
    78             rc = PDMR3LdrGetSymbolRC(pVM, NULL,       PGM_BTH_NAME_RC_STR(PrefetchPage),        &pModeData->pfnRCBthPrefetchPage);
    79             AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_RC_STR(PrefetchPage), rc), rc);
    80             rc = PDMR3LdrGetSymbolRC(pVM, NULL,       PGM_BTH_NAME_RC_STR(VerifyAccessSyncPage),&pModeData->pfnRCBthVerifyAccessSyncPage);
    81             AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_RC_STR(VerifyAccessSyncPage), rc), rc);
    82 # ifdef VBOX_STRICT
    83             rc = PDMR3LdrGetSymbolRC(pVM, NULL,       PGM_BTH_NAME_RC_STR(AssertCR3),           &pModeData->pfnRCBthAssertCR3);
    84             AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_RC_STR(AssertCR3), rc), rc);
    85 # endif
    86             rc = PDMR3LdrGetSymbolRC(pVM, NULL,       PGM_BTH_NAME_RC_STR(MapCR3),              &pModeData->pfnRCBthMapCR3);
    87             AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_RC_STR(MapCR3), rc), rc);
    88             rc = PDMR3LdrGetSymbolRC(pVM, NULL,       PGM_BTH_NAME_RC_STR(UnmapCR3),            &pModeData->pfnRCBthUnmapCR3);
    89             AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_RC_STR(UnmapCR3), rc), rc);
    90 #endif /* Not AMD64 or nested/ept shadow paging. */
    91         }
    92 
    93         /* Ring 0 */
    94         rc = PDMR3LdrGetSymbolR0(pVM, NULL,       PGM_BTH_NAME_R0_STR(Trap0eHandler),       &pModeData->pfnR0BthTrap0eHandler);
    95         AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_R0_STR(Trap0eHandler),  rc), rc);
    96         rc = PDMR3LdrGetSymbolR0(pVM, NULL,       PGM_BTH_NAME_R0_STR(InvalidatePage),      &pModeData->pfnR0BthInvalidatePage);
    97         AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_R0_STR(InvalidatePage), rc), rc);
    98         rc = PDMR3LdrGetSymbolR0(pVM, NULL,       PGM_BTH_NAME_R0_STR(SyncCR3),             &pModeData->pfnR0BthSyncCR3);
    99         AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_R0_STR(SyncCR3), rc), rc);
    100         rc = PDMR3LdrGetSymbolR0(pVM, NULL,       PGM_BTH_NAME_R0_STR(PrefetchPage),        &pModeData->pfnR0BthPrefetchPage);
    101         AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_R0_STR(PrefetchPage), rc), rc);
    102         rc = PDMR3LdrGetSymbolR0(pVM, NULL,       PGM_BTH_NAME_R0_STR(VerifyAccessSyncPage),&pModeData->pfnR0BthVerifyAccessSyncPage);
    103         AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_R0_STR(VerifyAccessSyncPage), rc), rc);
    104 #ifdef VBOX_STRICT
    105         rc = PDMR3LdrGetSymbolR0(pVM, NULL,       PGM_BTH_NAME_R0_STR(AssertCR3),           &pModeData->pfnR0BthAssertCR3);
    106         AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_R0_STR(AssertCR3), rc), rc);
    107 #endif
    108         rc = PDMR3LdrGetSymbolR0(pVM, NULL,       PGM_BTH_NAME_R0_STR(MapCR3),              &pModeData->pfnR0BthMapCR3);
    109         AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_R0_STR(MapCR3), rc), rc);
    110         rc = PDMR3LdrGetSymbolR0(pVM, NULL,       PGM_BTH_NAME_R0_STR(UnmapCR3),            &pModeData->pfnR0BthUnmapCR3);
    111         AssertMsgRCReturn(rc, ("%s -> rc=%Rrc\n", PGM_BTH_NAME_R0_STR(UnmapCR3), rc), rc);
    112     }
    113     return VINF_SUCCESS;
    114 }
    11536
    11637
  • trunk/src/VBox/VMM/VMMR3/PGMGst.h

    r73199 r73250  
    4545     * Map and monitor CR3
    4646     */
    47     int rc = PGM_BTH_PFN(MapCR3, pVCpu)(pVCpu, GCPhysCR3);
    48     return rc;
     47    uintptr_t idxBth = pVCpu->pgm.s.idxBothModeData;
     48    AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     49    AssertReturn(g_aPgmBothModeData[idxBth].pfnMapCR3, VERR_PGM_MODE_IPE);
     50    return g_aPgmBothModeData[idxBth].pfnMapCR3(pVCpu, GCPhysCR3);
    4951}
    5052
     
    7981PGM_GST_DECL(int, Exit)(PVMCPU pVCpu)
    8082{
    81     int rc;
    82 
    83     rc = PGM_BTH_PFN(UnmapCR3, pVCpu)(pVCpu);
    84     return rc;
     83    uintptr_t idxBth = pVCpu->pgm.s.idxBothModeData;
     84    AssertReturn(idxBth < RT_ELEMENTS(g_aPgmBothModeData), VERR_PGM_MODE_IPE);
     85    AssertReturn(g_aPgmBothModeData[idxBth].pfnUnmapCR3, VERR_PGM_MODE_IPE);
     86    return g_aPgmBothModeData[idxBth].pfnUnmapCR3(pVCpu);
    8587}
    8688
  • trunk/src/VBox/VMM/include/PGMInternal.h

    r73249 r73250  
    31163116
    31173117#define PGM_BTH_DECL(type, name)        PGM_CTX_DECL(type) PGM_BTH_NAME(name)
    3118 #define PGM_BTH_PFN(name, pVCpu)        ((pVCpu)->pgm.s.PGM_CTX(pfn,Bth##name))
    31193118/** @} */
    31203119
     
    32053204/** The guest+shadow mode data array. */
    32063205extern PGMMODEDATABTH const g_aPgmBothModeData[PGM_BOTH_MODE_DATA_ARRAY_SIZE];
    3207 
    3208 
    3209 
    3210 /**
    3211  * Data for each paging mode.
    3212  */
    3213 typedef struct PGMMODEDATA
    3214 {
    3215     /** The guest mode type. */
    3216     uint32_t                        uGstType;
    3217     /** The shadow mode type. */
    3218     uint32_t                        uShwType;
    3219 
    3220     /** @name Function pointers for Both Shadow and Guest paging.
    3221      * @{
    3222      */
    3223     DECLR3CALLBACKMEMBER(int,       pfnR3BthRelocate,(PVMCPU pVCpu, RTGCPTR offDelta));
    3224     /*                           no pfnR3BthTrap0eHandler */
    3225     DECLR3CALLBACKMEMBER(int,       pfnR3BthInvalidatePage,(PVMCPU pVCpu, RTGCPTR GCPtrPage));
    3226     DECLR3CALLBACKMEMBER(int,       pfnR3BthSyncCR3,(PVMCPU pVCpu, uint64_t cr0, uint64_t cr3, uint64_t cr4, bool fGlobal));
    3227     DECLR3CALLBACKMEMBER(int,       pfnR3BthPrefetchPage,(PVMCPU pVCpu, RTGCPTR GCPtrPage));
    3228     DECLR3CALLBACKMEMBER(int,       pfnR3BthVerifyAccessSyncPage,(PVMCPU pVCpu, RTGCPTR GCPtrPage, unsigned fFlags, unsigned uError));
    3229 #ifdef VBOX_STRICT
    3230     DECLR3CALLBACKMEMBER(unsigned,  pfnR3BthAssertCR3,(PVMCPU pVCpu, uint64_t cr3, uint64_t cr4, RTGCPTR GCPtr, RTGCPTR cb));
    3231 #endif
    3232     DECLR3CALLBACKMEMBER(int,       pfnR3BthMapCR3,(PVMCPU pVCpu, RTGCPHYS GCPhysCR3));
    3233     DECLR3CALLBACKMEMBER(int,       pfnR3BthUnmapCR3,(PVMCPU pVCpu));
    3234 
    3235     DECLRCCALLBACKMEMBER(int,       pfnRCBthTrap0eHandler,(PVMCPU pVCpu, RTGCUINT uErr, PCPUMCTXCORE pRegFrame, RTGCPTR pvFault, bool *pfLockTaken));
    3236     DECLRCCALLBACKMEMBER(int,       pfnRCBthInvalidatePage,(PVMCPU pVCpu, RTGCPTR GCPtrPage));
    3237     DECLRCCALLBACKMEMBER(int,       pfnRCBthSyncCR3,(PVMCPU pVCpu, uint64_t cr0, uint64_t cr3, uint64_t cr4, bool fGlobal));
    3238     DECLRCCALLBACKMEMBER(int,       pfnRCBthPrefetchPage,(PVMCPU pVCpu, RTGCPTR GCPtrPage));
    3239     DECLRCCALLBACKMEMBER(int,       pfnRCBthVerifyAccessSyncPage,(PVMCPU pVCpu, RTGCPTR GCPtrPage, unsigned fFlags, unsigned uError));
    3240 #ifdef VBOX_STRICT
    3241     DECLRCCALLBACKMEMBER(unsigned,  pfnRCBthAssertCR3,(PVMCPU pVCpu, uint64_t cr3, uint64_t cr4, RTGCPTR GCPtr, RTGCPTR cb));
    3242 #endif
    3243     DECLRCCALLBACKMEMBER(int,       pfnRCBthMapCR3,(PVMCPU pVCpu, RTGCPHYS GCPhysCR3));
    3244     DECLRCCALLBACKMEMBER(int,       pfnRCBthUnmapCR3,(PVMCPU pVCpu));
    3245 
    3246     DECLR0CALLBACKMEMBER(int,       pfnR0BthTrap0eHandler,(PVMCPU pVCpu, RTGCUINT uErr, PCPUMCTXCORE pRegFrame, RTGCPTR pvFault, bool *pfLockTaken));
    3247     DECLR0CALLBACKMEMBER(int,       pfnR0BthInvalidatePage,(PVMCPU pVCpu, RTGCPTR GCPtrPage));
    3248     DECLR0CALLBACKMEMBER(int,       pfnR0BthSyncCR3,(PVMCPU pVCpu, uint64_t cr0, uint64_t cr3, uint64_t cr4, bool fGlobal));
    3249     DECLR0CALLBACKMEMBER(int,       pfnR0BthPrefetchPage,(PVMCPU pVCpu, RTGCPTR GCPtrPage));
    3250     DECLR0CALLBACKMEMBER(int,       pfnR0BthVerifyAccessSyncPage,(PVMCPU pVCpu, RTGCPTR GCPtrPage, unsigned fFlags, unsigned uError));
    3251 #ifdef VBOX_STRICT
    3252     DECLR0CALLBACKMEMBER(unsigned,  pfnR0BthAssertCR3,(PVMCPU pVCpu, uint64_t cr3, uint64_t cr4, RTGCPTR GCPtr, RTGCPTR cb));
    3253 #endif
    3254     DECLR0CALLBACKMEMBER(int,       pfnR0BthMapCR3,(PVMCPU pVCpu, RTGCPHYS GCPhysCR3));
    3255     DECLR0CALLBACKMEMBER(int,       pfnR0BthUnmapCR3,(PVMCPU pVCpu));
    3256     /** @} */
    3257 } PGMMODEDATA, *PPGMMODEDATA;
    32583206
    32593207
     
    34753423     * Registration order. */
    34763424    R3PTRTYPE(PPGMREGMMIORANGE)     pRegMmioRangesR3;
    3477     /** Pointer to SHW+GST mode data (function pointers).
    3478      * The index into this table is made up from */
    3479     R3PTRTYPE(PPGMMODEDATA)         paModeData;
    3480     RTR3PTR                         R3PtrAlignment0;
    34813425    /** MMIO2 lookup array for ring-3.  Indexed by idMmio2 minus 1. */
    34823426    R3PTRTYPE(PPGMREGMMIORANGE)     apMmio2RangesR3[PGM_MMIO2_MAX_RANGES];
     
    41534097    /** @} */
    41544098
    4155     /** @name Function pointers for Both Shadow and Guest paging.
    4156      * @{
    4157      */
    4158     DECLR3CALLBACKMEMBER(int,       pfnR3BthRelocate,(PVMCPU pVCpu, RTGCPTR offDelta));
    4159     /*                           no pfnR3BthTrap0eHandler */
    4160     DECLR3CALLBACKMEMBER(int,       pfnR3BthInvalidatePage,(PVMCPU pVCpu, RTGCPTR GCPtrPage));
    4161     DECLR3CALLBACKMEMBER(int,       pfnR3BthSyncCR3,(PVMCPU pVCpu, uint64_t cr0, uint64_t cr3, uint64_t cr4, bool fGlobal));
    4162     DECLR3CALLBACKMEMBER(int,       pfnR3BthPrefetchPage,(PVMCPU pVCpu, RTGCPTR GCPtrPage));
    4163     DECLR3CALLBACKMEMBER(int,       pfnR3BthVerifyAccessSyncPage,(PVMCPU pVCpu, RTGCPTR GCPtrPage, unsigned fFlags, unsigned uError));
    4164     DECLR3CALLBACKMEMBER(unsigned,  pfnR3BthAssertCR3,(PVMCPU pVCpu, uint64_t cr3, uint64_t cr4, RTGCPTR GCPtr, RTGCPTR cb));
    4165     DECLR3CALLBACKMEMBER(int,       pfnR3BthMapCR3,(PVMCPU pVCpu, RTGCPHYS GCPhysCR3));
    4166     DECLR3CALLBACKMEMBER(int,       pfnR3BthUnmapCR3,(PVMCPU pVCpu));
    4167 
    4168     DECLR0CALLBACKMEMBER(int,       pfnR0BthTrap0eHandler,(PVMCPU pVCpu, RTGCUINT uErr, PCPUMCTXCORE pRegFrame, RTGCPTR pvFault, bool *pfLockTaken));
    4169     DECLR0CALLBACKMEMBER(int,       pfnR0BthInvalidatePage,(PVMCPU pVCpu, RTGCPTR GCPtrPage));
    4170     DECLR0CALLBACKMEMBER(int,       pfnR0BthSyncCR3,(PVMCPU pVCpu, uint64_t cr0, uint64_t cr3, uint64_t cr4, bool fGlobal));
    4171     DECLR0CALLBACKMEMBER(int,       pfnR0BthPrefetchPage,(PVMCPU pVCpu, RTGCPTR GCPtrPage));
    4172     DECLR0CALLBACKMEMBER(int,       pfnR0BthVerifyAccessSyncPage,(PVMCPU pVCpu, RTGCPTR GCPtrPage, unsigned fFlags, unsigned uError));
    4173     DECLR0CALLBACKMEMBER(unsigned,  pfnR0BthAssertCR3,(PVMCPU pVCpu, uint64_t cr3, uint64_t cr4, RTGCPTR GCPtr, RTGCPTR cb));
    4174     DECLR0CALLBACKMEMBER(int,       pfnR0BthMapCR3,(PVMCPU pVCpu, RTGCPHYS GCPhysCR3));
    4175     DECLR0CALLBACKMEMBER(int,       pfnR0BthUnmapCR3,(PVMCPU pVCpu));
    4176 
    4177     DECLRCCALLBACKMEMBER(int,       pfnRCBthTrap0eHandler,(PVMCPU pVCpu, RTGCUINT uErr, PCPUMCTXCORE pRegFrame, RTGCPTR pvFault, bool *pfLockTaken));
    4178     DECLRCCALLBACKMEMBER(int,       pfnRCBthInvalidatePage,(PVMCPU pVCpu, RTGCPTR GCPtrPage));
    4179     DECLRCCALLBACKMEMBER(int,       pfnRCBthSyncCR3,(PVMCPU pVCpu, uint64_t cr0, uint64_t cr3, uint64_t cr4, bool fGlobal));
    4180     DECLRCCALLBACKMEMBER(int,       pfnRCBthPrefetchPage,(PVMCPU pVCpu, RTGCPTR GCPtrPage));
    4181     DECLRCCALLBACKMEMBER(int,       pfnRCBthVerifyAccessSyncPage,(PVMCPU pVCpu, RTGCPTR GCPtrPage, unsigned fFlags, unsigned uError));
    4182     DECLRCCALLBACKMEMBER(unsigned,  pfnRCBthAssertCR3,(PVMCPU pVCpu, uint64_t cr3, uint64_t cr4, RTGCPTR GCPtr, RTGCPTR cb));
    4183     DECLRCCALLBACKMEMBER(int,       pfnRCBthMapCR3,(PVMCPU pVCpu, RTGCPHYS GCPhysCR3));
    4184     DECLRCCALLBACKMEMBER(int,       pfnRCBthUnmapCR3,(PVMCPU pVCpu));
    4185 #if 0
    4186     RTRCPTR                         alignment2; /**< structure size alignment. */
    4187 #endif
    4188     /** @} */
    4189 
    41904099    /** For saving stack space, the disassembler state is allocated here instead of
    41914100     * on the stack. */
  • trunk/src/VBox/VMM/testcase/tstVMStruct.h

    r73246 r73250  
    667667    GEN_CHECK_OFF(PGMCPU, pShwPageCR3R0);
    668668    GEN_CHECK_OFF(PGMCPU, pShwPageCR3RC);
    669     GEN_CHECK_OFF(PGMCPU, pfnR3BthMapCR3);
    670     GEN_CHECK_OFF(PGMCPU, pfnR3BthUnmapCR3);
    671     GEN_CHECK_OFF(PGMCPU, pfnR3BthRelocate);
    672     GEN_CHECK_OFF(PGMCPU, pfnR3BthSyncCR3);
    673     GEN_CHECK_OFF(PGMCPU, pfnR3BthInvalidatePage);
    674     GEN_CHECK_OFF(PGMCPU, pfnR3BthPrefetchPage);
    675     GEN_CHECK_OFF(PGMCPU, pfnR3BthVerifyAccessSyncPage);
    676     GEN_CHECK_OFF(PGMCPU, pfnR3BthAssertCR3);
    677     GEN_CHECK_OFF(PGMCPU, pfnRCBthTrap0eHandler);
    678     GEN_CHECK_OFF(PGMCPU, pfnRCBthInvalidatePage);
    679     GEN_CHECK_OFF(PGMCPU, pfnRCBthPrefetchPage);
    680     GEN_CHECK_OFF(PGMCPU, pfnRCBthVerifyAccessSyncPage);
    681     GEN_CHECK_OFF(PGMCPU, pfnRCBthAssertCR3);
    682     GEN_CHECK_OFF(PGMCPU, pfnR0BthTrap0eHandler);
    683     GEN_CHECK_OFF(PGMCPU, pfnR0BthInvalidatePage);
    684     GEN_CHECK_OFF(PGMCPU, pfnR0BthPrefetchPage);
    685     GEN_CHECK_OFF(PGMCPU, pfnR0BthVerifyAccessSyncPage);
    686     GEN_CHECK_OFF(PGMCPU, pfnR0BthAssertCR3);
    687669    GEN_CHECK_OFF(PGMCPU, DisState);
    688670    GEN_CHECK_OFF(PGMCPU, cGuestModeChanges);
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette