VirtualBox

Changeset 80118 in vbox for trunk/src/VBox/VMM/VMMAll


Ignore:
Timestamp:
Aug 4, 2019 2:39:54 AM (6 years ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
132559
Message:

VMM: Kicking out raw-mode and 32-bit hosts - MM, PGM, ++. bugref:9517 bugref:9511

Location:
trunk/src/VBox/VMM/VMMAll
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/HMAll.cpp

    r79828 r80118  
    449449
    450450#endif /* IN_RING0 */
    451 #ifndef IN_RC
    452451
    453452/**
     
    695694}
    696695
    697 #endif /* !IN_RC */
    698696
    699697/**
     
    744742
    745743
    746 #ifndef IN_RC
    747744/**
    748745 * Notification callback which is called whenever there is a chance that a CR3
     
    758755VMM_INT_DECL(void) HMHCChangedPagingMode(PVM pVM, PVMCPU pVCpu, PGMMODE enmShadowMode, PGMMODE enmGuestMode)
    759756{
    760 # ifdef IN_RING3
     757#ifdef IN_RING3
    761758    /* Ignore page mode changes during state loading. */
    762759    if (VMR3GetState(pVM) == VMSTATE_LOADING)
    763760        return;
    764 # endif
     761#endif
    765762
    766763    pVCpu->hm.s.enmShadowMode = enmShadowMode;
     
    776773    }
    777774
    778 # ifdef IN_RING0
     775#ifdef IN_RING0
    779776    /*
    780777     * We need to tickle SVM and VT-x state updates.
     
    792789        ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, fChanged);
    793790    }
    794 # endif
     791#endif
    795792
    796793    Log4(("HMHCChangedPagingMode: Guest paging mode '%s', shadow paging mode '%s'\n", PGMGetModeName(enmGuestMode),
    797794          PGMGetModeName(enmShadowMode)));
    798795}
    799 #endif /* !IN_RC */
    800796
    801797
  • trunk/src/VBox/VMM/VMMAll/PGMAll.cpp

    r80080 r80118  
    10391039
    10401040
     1041#ifndef PGM_WITHOUT_MAPPINGS
    10411042/**
    10421043 * Gets the mapping corresponding to the specified address (if any).
     
    10611062    return NULL;
    10621063}
     1064#endif
    10631065
    10641066
     
    23382340        AssertRC(rc);
    23392341#endif
    2340         if (RT_SUCCESS(rc) && fChanged)
    2341         {
    2342             RCPtr = (RTRCPTR)(RTRCUINTPTR)(pVM->pgm.s.GCPtrCR3Mapping + (1 + iPdpt) * PAGE_SIZE);
    2343             rc = PGMMap(pVM, (RTRCUINTPTR)RCPtr, PGM_PAGE_GET_HCPHYS(pPage), PAGE_SIZE, 0);
    2344         }
    23452342        if (RT_SUCCESS(rc))
    23462343        {
     
    24792476{
    24802477    PPGMPOOLPAGE pPoolPage = pVCpu->pgm.s.CTX_SUFF(pShwPageCR3);
    2481     AssertPtrReturn(pPoolPage, 0);
     2478    AssertPtrReturn(pPoolPage, NIL_RTHCPHYS);
    24822479    return pPoolPage->Core.Key;
    2483 }
    2484 
    2485 
    2486 /**
    2487  * Gets the current CR3 register value for the nested memory context.
    2488  * @returns CR3 value.
    2489  * @param   pVCpu           The cross context virtual CPU structure.
    2490  * @param   enmShadowMode   The shadow paging mode.
    2491  */
    2492 VMMDECL(RTHCPHYS) PGMGetNestedCR3(PVMCPU pVCpu, PGMMODE enmShadowMode)
    2493 {
    2494     NOREF(enmShadowMode);
    2495     Assert(pVCpu->pgm.s.CTX_SUFF(pShwPageCR3));
    2496     return pVCpu->pgm.s.CTX_SUFF(pShwPageCR3)->Core.Key;
    2497 }
    2498 
    2499 
    2500 /**
    2501  * Gets the current CR3 register value for the HC intermediate memory context.
    2502  * @returns CR3 value.
    2503  * @param   pVM         The cross context VM structure.
    2504  */
    2505 VMMDECL(RTHCPHYS) PGMGetInterHCCR3(PVM pVM)
    2506 {
    2507     switch (pVM->pgm.s.enmHostMode)
    2508     {
    2509         case SUPPAGINGMODE_32_BIT:
    2510         case SUPPAGINGMODE_32_BIT_GLOBAL:
    2511             return pVM->pgm.s.HCPhysInterPD;
    2512 
    2513         case SUPPAGINGMODE_PAE:
    2514         case SUPPAGINGMODE_PAE_GLOBAL:
    2515         case SUPPAGINGMODE_PAE_NX:
    2516         case SUPPAGINGMODE_PAE_GLOBAL_NX:
    2517             return pVM->pgm.s.HCPhysInterPaePDPT;
    2518 
    2519         case SUPPAGINGMODE_AMD64:
    2520         case SUPPAGINGMODE_AMD64_GLOBAL:
    2521         case SUPPAGINGMODE_AMD64_NX:
    2522         case SUPPAGINGMODE_AMD64_GLOBAL_NX:
    2523             return pVM->pgm.s.HCPhysInterPaePDPT;
    2524 
    2525         default:
    2526             AssertMsgFailed(("enmHostMode=%d\n", pVM->pgm.s.enmHostMode));
    2527             return NIL_RTHCPHYS;
    2528     }
    2529 }
    2530 
    2531 
    2532 /**
    2533  * Gets the current CR3 register value for the RC intermediate memory context.
    2534  * @returns CR3 value.
    2535  * @param   pVM         The cross context VM structure.
    2536  * @param   pVCpu       The cross context virtual CPU structure.
    2537  */
    2538 VMMDECL(RTHCPHYS) PGMGetInterRCCR3(PVM pVM, PVMCPU pVCpu)
    2539 {
    2540     switch (pVCpu->pgm.s.enmShadowMode)
    2541     {
    2542         case PGMMODE_32_BIT:
    2543             return pVM->pgm.s.HCPhysInterPD;
    2544 
    2545         case PGMMODE_PAE:
    2546         case PGMMODE_PAE_NX:
    2547             return pVM->pgm.s.HCPhysInterPaePDPT;
    2548 
    2549         case PGMMODE_AMD64:
    2550         case PGMMODE_AMD64_NX:
    2551             return pVM->pgm.s.HCPhysInterPaePML4;
    2552 
    2553         case PGMMODE_NESTED_32BIT:
    2554         case PGMMODE_NESTED_PAE:
    2555         case PGMMODE_NESTED_AMD64:
    2556         case PGMMODE_EPT:
    2557             return 0; /* not relevant */
    2558 
    2559         default:
    2560             AssertMsgFailed(("enmShadowMode=%d\n", pVCpu->pgm.s.enmShadowMode));
    2561             return NIL_RTHCPHYS;
    2562     }
    2563 }
    2564 
    2565 
    2566 /**
    2567  * Gets the CR3 register value for the 32-Bit intermediate memory context.
    2568  * @returns CR3 value.
    2569  * @param   pVM         The cross context VM structure.
    2570  */
    2571 VMMDECL(RTHCPHYS) PGMGetInter32BitCR3(PVM pVM)
    2572 {
    2573     return pVM->pgm.s.HCPhysInterPD;
    2574 }
    2575 
    2576 
    2577 /**
    2578  * Gets the CR3 register value for the PAE intermediate memory context.
    2579  * @returns CR3 value.
    2580  * @param   pVM         The cross context VM structure.
    2581  */
    2582 VMMDECL(RTHCPHYS) PGMGetInterPaeCR3(PVM pVM)
    2583 {
    2584     return pVM->pgm.s.HCPhysInterPaePDPT;
    2585 }
    2586 
    2587 
    2588 /**
    2589  * Gets the CR3 register value for the AMD64 intermediate memory context.
    2590  * @returns CR3 value.
    2591  * @param   pVM         The cross context VM structure.
    2592  */
    2593 VMMDECL(RTHCPHYS) PGMGetInterAmd64CR3(PVM pVM)
    2594 {
    2595     return pVM->pgm.s.HCPhysInterPaePML4;
    25962480}
    25972481
     
    32423126     * Calc the shadow mode and switcher.
    32433127     */
    3244     PGMMODE     enmShadowMode = PGMMODE_INVALID;
    3245     enmShadowMode = pgmCalcShadowMode(pVM, enmGuestMode, pVM->pgm.s.enmHostMode, pVCpu->pgm.s.enmShadowMode);
     3128    PGMMODE enmShadowMode = pgmCalcShadowMode(pVM, enmGuestMode, pVM->pgm.s.enmHostMode, pVCpu->pgm.s.enmShadowMode);
    32463129
    32473130    /*
     
    38653748#ifdef VBOX_STRICT
    38663749
     3750# ifndef PGM_WITHOUT_MAPPINGS
    38673751/**
    38683752 * Asserts that there are no mapping conflicts.
     
    38873771    {
    38883772        /** @todo This is slow and should be optimized, but since it's just assertions I don't care now. */
    3889         for (RTGCPTR GCPtr = pMapping->GCPtr;
    3890               GCPtr <= pMapping->GCPtrLast;
    3891               GCPtr += PAGE_SIZE)
     3773        for (RTGCPTR GCPtr = pMapping->GCPtr; GCPtr <= pMapping->GCPtrLast; GCPtr += PAGE_SIZE)
    38923774        {
    38933775            int rc = PGMGstGetPage(pVCpu, (RTGCPTR)GCPtr, NULL, NULL);
     
    39033785    return cErrors;
    39043786}
     3787# endif
    39053788
    39063789
  • trunk/src/VBox/VMM/VMMAll/PGMAllBth.h

    r80014 r80118  
    46944694    if (RT_SUCCESS(rc))
    46954695    {
    4696         rc = PGMMap(pVM, (RTGCPTR)pVM->pgm.s.GCPtrCR3Mapping, HCPhysGuestCR3, PAGE_SIZE, 0);
    4697         if (RT_SUCCESS(rc))
    4698         {
    4699 # ifdef IN_RC
    4700             PGM_INVL_PG(pVCpu, pVM->pgm.s.GCPtrCR3Mapping);
    4701 # endif
    47024696# if PGM_GST_TYPE == PGM_TYPE_32BIT
    4703             pVCpu->pgm.s.pGst32BitPdR3 = (R3PTRTYPE(PX86PD))HCPtrGuestCR3;
     4697        pVCpu->pgm.s.pGst32BitPdR3 = (R3PTRTYPE(PX86PD))HCPtrGuestCR3;
    47044698#  ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
    4705             pVCpu->pgm.s.pGst32BitPdR0 = (R0PTRTYPE(PX86PD))HCPtrGuestCR3;
    4706 #  endif
    4707             pVCpu->pgm.s.pGst32BitPdRC = (RCPTRTYPE(PX86PD))(RTRCUINTPTR)pVM->pgm.s.GCPtrCR3Mapping;
     4699        pVCpu->pgm.s.pGst32BitPdR0 = (R0PTRTYPE(PX86PD))HCPtrGuestCR3;
     4700#  endif
    47084701
    47094702# elif PGM_GST_TYPE == PGM_TYPE_PAE
    4710             unsigned off = GCPhysCR3 & GST_CR3_PAGE_MASK & PAGE_OFFSET_MASK;
    4711             pVCpu->pgm.s.pGstPaePdptR3 = (R3PTRTYPE(PX86PDPT))HCPtrGuestCR3;
     4703        pVCpu->pgm.s.pGstPaePdptR3 = (R3PTRTYPE(PX86PDPT))HCPtrGuestCR3;
    47124704#  ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
    4713             pVCpu->pgm.s.pGstPaePdptR0 = (R0PTRTYPE(PX86PDPT))HCPtrGuestCR3;
    4714 #  endif
    4715             pVCpu->pgm.s.pGstPaePdptRC = (RCPTRTYPE(PX86PDPT))((RTRCUINTPTR)pVM->pgm.s.GCPtrCR3Mapping + off);
    4716             LogFlow(("Cached mapping %RRv\n", pVCpu->pgm.s.pGstPaePdptRC));
    4717 
    4718             /*
    4719              * Map the 4 PDs too.
    4720              */
    4721             PX86PDPT pGuestPDPT = pgmGstGetPaePDPTPtr(pVCpu);
    4722             RTGCPTR  GCPtr      = pVM->pgm.s.GCPtrCR3Mapping + PAGE_SIZE;
    4723             for (unsigned i = 0; i < X86_PG_PAE_PDPE_ENTRIES; i++, GCPtr += PAGE_SIZE)
     4705        pVCpu->pgm.s.pGstPaePdptR0 = (R0PTRTYPE(PX86PDPT))HCPtrGuestCR3;
     4706#  endif
     4707
     4708        /*
     4709         * Map the 4 PDs too.
     4710         */
     4711        PX86PDPT pGuestPDPT = pgmGstGetPaePDPTPtr(pVCpu);
     4712        for (unsigned i = 0; i < X86_PG_PAE_PDPE_ENTRIES; i++)
     4713        {
     4714            pVCpu->pgm.s.aGstPaePdpeRegs[i].u = pGuestPDPT->a[i].u;
     4715            if (pGuestPDPT->a[i].n.u1Present)
    47244716            {
    4725                 pVCpu->pgm.s.aGstPaePdpeRegs[i].u = pGuestPDPT->a[i].u;
    4726                 if (pGuestPDPT->a[i].n.u1Present)
     4717                RTHCPTR     HCPtr;
     4718                RTHCPHYS    HCPhys;
     4719                RTGCPHYS    GCPhys = PGM_A20_APPLY(pVCpu, pGuestPDPT->a[i].u & X86_PDPE_PG_MASK);
     4720                pgmLock(pVM);
     4721                PPGMPAGE    pPage  = pgmPhysGetPage(pVM, GCPhys);
     4722                AssertReturn(pPage, VERR_PGM_INVALID_PDPE_ADDR);
     4723                HCPhys = PGM_PAGE_GET_HCPHYS(pPage);
     4724#  if defined(IN_RC) || defined(VBOX_WITH_2X_4GB_ADDR_SPACE_IN_R0)
     4725                HCPtr = NIL_RTHCPTR;
     4726                int rc2 = VINF_SUCCESS;
     4727#  else
     4728                int rc2 = pgmPhysGCPhys2CCPtrInternalDepr(pVM, pPage, GCPhys, (void **)&HCPtr);
     4729#  endif
     4730                pgmUnlock(pVM);
     4731                if (RT_SUCCESS(rc2))
    47274732                {
    4728                     RTHCPTR     HCPtr;
    4729                     RTHCPHYS    HCPhys;
    4730                     RTGCPHYS    GCPhys = PGM_A20_APPLY(pVCpu, pGuestPDPT->a[i].u & X86_PDPE_PG_MASK);
    4731                     pgmLock(pVM);
    4732                     PPGMPAGE    pPage  = pgmPhysGetPage(pVM, GCPhys);
    4733                     AssertReturn(pPage, VERR_PGM_INVALID_PDPE_ADDR);
    4734                     HCPhys = PGM_PAGE_GET_HCPHYS(pPage);
    4735 #  if defined(IN_RC) || defined(VBOX_WITH_2X_4GB_ADDR_SPACE_IN_R0)
    4736                     HCPtr = NIL_RTHCPTR;
    4737                     int rc2 = VINF_SUCCESS;
    4738 #  else
    4739                     int rc2 = pgmPhysGCPhys2CCPtrInternalDepr(pVM, pPage, GCPhys, (void **)&HCPtr);
    4740 #  endif
    4741                     pgmUnlock(pVM);
    4742                     if (RT_SUCCESS(rc2))
    4743                     {
    4744                         rc = PGMMap(pVM, GCPtr, HCPhys, PAGE_SIZE, 0);
    4745                         AssertRCReturn(rc, rc);
    4746 
    4747                         pVCpu->pgm.s.apGstPaePDsR3[i]     = (R3PTRTYPE(PX86PDPAE))HCPtr;
     4733                    pVCpu->pgm.s.apGstPaePDsR3[i]     = (R3PTRTYPE(PX86PDPAE))HCPtr;
    47484734#  ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
    4749                         pVCpu->pgm.s.apGstPaePDsR0[i]     = (R0PTRTYPE(PX86PDPAE))HCPtr;
    4750 #  endif
    4751                         pVCpu->pgm.s.apGstPaePDsRC[i]     = (RCPTRTYPE(PX86PDPAE))(RTRCUINTPTR)GCPtr;
    4752                         pVCpu->pgm.s.aGCPhysGstPaePDs[i]  = GCPhys;
     4735                    pVCpu->pgm.s.apGstPaePDsR0[i]     = (R0PTRTYPE(PX86PDPAE))HCPtr;
     4736#  endif
     4737                    pVCpu->pgm.s.aGCPhysGstPaePDs[i]  = GCPhys;
     4738                    continue;
     4739                }
     4740                AssertMsgFailed(("pgmR3Gst32BitMapCR3: rc2=%d GCPhys=%RGp i=%d\n", rc2, GCPhys, i));
     4741            }
     4742
     4743            pVCpu->pgm.s.apGstPaePDsR3[i]     = 0;
     4744#  ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
     4745            pVCpu->pgm.s.apGstPaePDsR0[i]     = 0;
     4746#  endif
     4747            pVCpu->pgm.s.apGstPaePDsRC[i]     = 0;
     4748            pVCpu->pgm.s.aGCPhysGstPaePDs[i]  = NIL_RTGCPHYS;
    47534749#  ifdef IN_RC
    4754                         PGM_INVL_PG(pVCpu, GCPtr);
    4755 #  endif
    4756                         continue;
    4757                     }
    4758                     AssertMsgFailed(("pgmR3Gst32BitMapCR3: rc2=%d GCPhys=%RGp i=%d\n", rc2, GCPhys, i));
    4759                 }
    4760 
    4761                 pVCpu->pgm.s.apGstPaePDsR3[i]     = 0;
     4750            PGM_INVL_PG(pVCpu, GCPtr); /** @todo this shouldn't be necessary? */
     4751#  endif
     4752        }
     4753
     4754# elif PGM_GST_TYPE == PGM_TYPE_AMD64
     4755        pVCpu->pgm.s.pGstAmd64Pml4R3 = (R3PTRTYPE(PX86PML4))HCPtrGuestCR3;
    47624756#  ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
    4763                 pVCpu->pgm.s.apGstPaePDsR0[i]     = 0;
    4764 #  endif
    4765                 pVCpu->pgm.s.apGstPaePDsRC[i]     = 0;
    4766                 pVCpu->pgm.s.aGCPhysGstPaePDs[i]  = NIL_RTGCPHYS;
    4767 #  ifdef IN_RC
    4768                 PGM_INVL_PG(pVCpu, GCPtr); /** @todo this shouldn't be necessary? */
    4769 #  endif
    4770             }
    4771 
    4772 # elif PGM_GST_TYPE == PGM_TYPE_AMD64
    4773             pVCpu->pgm.s.pGstAmd64Pml4R3 = (R3PTRTYPE(PX86PML4))HCPtrGuestCR3;
    4774 #  ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
    4775             pVCpu->pgm.s.pGstAmd64Pml4R0 = (R0PTRTYPE(PX86PML4))HCPtrGuestCR3;
    4776 #  endif
    4777 # endif
    4778         }
    4779         else
    4780             AssertMsgFailed(("rc=%Rrc GCPhysGuestPD=%RGp\n", rc, GCPhysCR3));
     4757        pVCpu->pgm.s.pGstAmd64Pml4R0 = (R0PTRTYPE(PX86PML4))HCPtrGuestCR3;
     4758#  endif
     4759# endif
    47814760    }
    47824761    else
  • trunk/src/VBox/VMM/VMMAll/PGMAllMap.cpp

    r80024 r80118  
    3131
    3232
     33#ifndef PGM_WITHOUT_MAPPINGS
     34
    3335/**
    3436 * Maps a range of physical pages at a given virtual address
     
    260262}
    261263
    262 #ifndef PGM_WITHOUT_MAPPINGS
    263264
    264265/**
     
    544545}
    545546
    546 #endif /* PGM_WITHOUT_MAPPINGS */
    547 #if defined(VBOX_STRICT) && !defined(IN_RING0)
     547# if defined(VBOX_STRICT) && !defined(IN_RING0)
    548548
    549549/**
     
    656656}
    657657
    658 #endif /* defined(VBOX_STRICT) && !defined(IN_RING0) */
    659 #ifndef PGM_WITHOUT_MAPPINGS
     658
     659# endif /* defined(VBOX_STRICT) && !defined(IN_RING0) */
    660660
    661661/**
     
    682682    Log4(("pgmMapActivateCR3: fixed mappings=%RTbool idxShwPageCR3=%#x\n", pVM->pgm.s.fMappingsFixed, pShwPageCR3 ? pShwPageCR3->idx : NIL_PGMPOOL_IDX));
    683683
    684 #ifdef VBOX_STRICT
     684# ifdef VBOX_STRICT
    685685    PVMCPU pVCpu = VMMGetCpu0(pVM);
    686686    Assert(pShwPageCR3 && pShwPageCR3 == pVCpu->pgm.s.CTX_SUFF(pShwPageCR3));
    687 #endif
     687# endif
    688688
    689689    /*
     
    903903                {
    904904                    STAM_COUNTER_INC(&pVM->pgm.s.CTX_SUFF(pStats)->StatR3DetectedConflicts);
    905 #ifdef IN_RING3
     905# ifdef IN_RING3
    906906                    Log(("PGMHasMappingConflicts: Conflict was detected at %RGv for mapping %s (PAE)\n"
    907907                         "                        PDE=%016RX64.\n",
     
    910910                    AssertRCReturn(rc, rc);
    911911                    break;
    912 #else
     912# else
    913913                    Log(("PGMHasMappingConflicts: Conflict was detected at %RGv for mapping (PAE)\n"
    914914                         "                        PDE=%016RX64.\n",
    915915                         GCPtr, Pde.u));
    916916                    return VINF_PGM_SYNC_CR3;
    917 #endif
     917# endif
    918918                }
    919919                GCPtr += (1 << X86_PD_PAE_SHIFT);
     
    929929}
    930930
    931 #endif /* PGM_WITHOUT_MAPPINGS */
    932 
     931#endif /* !PGM_WITHOUT_MAPPINGS */
     932
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette