VirtualBox

Changeset 91014 in vbox


Ignore:
Timestamp:
Aug 31, 2021 1:03:39 AM (3 years ago)
Author:
vboxsync
Message:

VMM: Made VBOX_WITH_RAM_IN_KERNEL non-optional, removing all the tests for it. bugref:9627

Location:
trunk
Files:
12 edited

Legend:

Unmodified
Added
Removed
  • trunk/Config.kmk

    r91008 r91014  
    455455if1of ($(KBUILD_TARGET), win)
    456456 VBOX_WITH_NATIVE_NEM = 1
    457 endif
    458 # Enables mapping guest RAM into host kernel space.
    459 if1of ($(KBUILD_TARGET), darwin linux solaris win)
    460  VBOX_WITH_RAM_IN_KERNEL := 1
    461457endif
    462458# Enables the new breakpoint handling code, see @bugref{8650}
  • trunk/src/VBox/VMM/Config.kmk

    r86653 r91014  
    6969 endif
    7070endif
    71 ifdef VBOX_WITH_RAM_IN_KERNEL
    72  VMM_COMMON_DEFS += VBOX_WITH_RAM_IN_KERNEL
    73  if1of ($(KBUILD_TARGET), linux solaris) # Hosts that implements SUPR0HCPhysToVirt.
    74   VMM_COMMON_DEFS += VBOX_WITH_LINEAR_HOST_PHYS_MEM
    75  endif
     71if1of ($(KBUILD_TARGET), linux solaris) # Hosts that implements SUPR0HCPhysToVirt.
     72 VMM_COMMON_DEFS += VBOX_WITH_LINEAR_HOST_PHYS_MEM
    7673endif
    7774
  • trunk/src/VBox/VMM/VMMAll/PGMAll.cpp

    r90997 r91014  
    22062206    if (RT_SUCCESS(rc))
    22072207    {
    2208 # ifdef VBOX_WITH_RAM_IN_KERNEL
    22092208        rc = pgmPhysGCPhys2CCPtrInternalDepr(pVM, pPage, GCPhysCR3, (void **)ppPd);
    22102209        if (RT_SUCCESS(rc))
    22112210        {
    2212 #  ifdef IN_RING3
     2211# ifdef IN_RING3
    22132212            pVCpu->pgm.s.pGst32BitPdR0 = NIL_RTR0PTR;
    22142213            pVCpu->pgm.s.pGst32BitPdR3 = *ppPd;
    2215 #  else
     2214# else
    22162215            pVCpu->pgm.s.pGst32BitPdR3 = NIL_RTR0PTR;
    22172216            pVCpu->pgm.s.pGst32BitPdR0 = *ppPd;
    2218 #  endif
     2217# endif
    22192218            PGM_UNLOCK(pVM);
    22202219            return VINF_SUCCESS;
    22212220        }
    2222 # else
    2223         RTHCPTR HCPtrGuestCR3;
    2224         rc = pgmPhysGCPhys2CCPtrInternalDepr(pVM, pPage, GCPhysCR3, (void **)&HCPtrGuestCR3);
    2225         if (RT_SUCCESS(rc))
    2226         {
    2227             pVCpu->pgm.s.pGst32BitPdR3 = (R3PTRTYPE(PX86PD))HCPtrGuestCR3;
    2228 #  ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
    2229             pVCpu->pgm.s.pGst32BitPdR0 = (R0PTRTYPE(PX86PD))HCPtrGuestCR3;
    2230 #  endif
    2231             *ppPd = (PX86PD)HCPtrGuestCR3;
    2232 
    2233             PGM_UNLOCK(pVM);
    2234             return VINF_SUCCESS;
    2235         }
    2236 # endif
    22372221        AssertRC(rc);
    22382222    }
     
    22632247    if (RT_SUCCESS(rc))
    22642248    {
    2265 # ifdef VBOX_WITH_RAM_IN_KERNEL
    22662249        rc = pgmPhysGCPhys2CCPtrInternalDepr(pVM, pPage, GCPhysCR3, (void **)ppPdpt);
    22672250        if (RT_SUCCESS(rc))
    22682251        {
    2269 #  ifdef IN_RING3
     2252# ifdef IN_RING3
    22702253            pVCpu->pgm.s.pGstPaePdptR0 = NIL_RTR0PTR;
    22712254            pVCpu->pgm.s.pGstPaePdptR3 = *ppPdpt;
    2272 #  else
     2255# else
    22732256            pVCpu->pgm.s.pGstPaePdptR3 = NIL_RTR3PTR;
    22742257            pVCpu->pgm.s.pGstPaePdptR0 = *ppPdpt;
    2275 #  endif
     2258# endif
    22762259            PGM_UNLOCK(pVM);
    22772260            return VINF_SUCCESS;
    22782261        }
    2279 # else
    2280         RTHCPTR HCPtrGuestCR3;
    2281         rc = pgmPhysGCPhys2CCPtrInternalDepr(pVM, pPage, GCPhysCR3, (void **)&HCPtrGuestCR3);
    2282         if (RT_SUCCESS(rc))
    2283         {
    2284             pVCpu->pgm.s.pGstPaePdptR3 = (R3PTRTYPE(PX86PDPT))HCPtrGuestCR3;
    2285 #  ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
    2286             pVCpu->pgm.s.pGstPaePdptR0 = (R0PTRTYPE(PX86PDPT))HCPtrGuestCR3;
    2287 #  endif
    2288             *ppPdpt = (PX86PDPT)HCPtrGuestCR3;
    2289 
    2290             PGM_UNLOCK(pVM);
    2291             return VINF_SUCCESS;
    2292         }
    2293 # endif
    22942262        AssertRC(rc);
    22952263    }
     
    23262294    if (RT_SUCCESS(rc))
    23272295    {
    2328 # ifdef VBOX_WITH_RAM_IN_KERNEL
    23292296        rc = pgmPhysGCPhys2CCPtrInternalDepr(pVM, pPage, GCPhys, (void **)ppPd);
    23302297        AssertRC(rc);
    23312298        if (RT_SUCCESS(rc))
    23322299        {
    2333 #  ifdef IN_RING3
     2300# ifdef IN_RING3
    23342301            pVCpu->pgm.s.apGstPaePDsR0[iPdpt]          = NIL_RTR0PTR;
    23352302            pVCpu->pgm.s.apGstPaePDsR3[iPdpt]          = *ppPd;
    2336 #  else
     2303# else
    23372304            pVCpu->pgm.s.apGstPaePDsR3[iPdpt]          = NIL_RTR3PTR;
    23382305            pVCpu->pgm.s.apGstPaePDsR0[iPdpt]          = *ppPd;
    2339 #  endif
     2306# endif
    23402307            if (fChanged)
    23412308                pVCpu->pgm.s.aGCPhysGstPaePDs[iPdpt]   = GCPhys;
     
    23432310            return VINF_SUCCESS;
    23442311        }
    2345 # else
    2346         RTHCPTR     HCPtr       = NIL_RTHCPTR;
    2347         rc = pgmPhysGCPhys2CCPtrInternalDepr(pVM, pPage, GCPhys, &HCPtr);
    2348         AssertRC(rc);
    2349         if (RT_SUCCESS(rc))
    2350         {
    2351             pVCpu->pgm.s.apGstPaePDsR3[iPdpt]          = (R3PTRTYPE(PX86PDPAE))HCPtr;
    2352 #  ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
    2353             pVCpu->pgm.s.apGstPaePDsR0[iPdpt]          = (R0PTRTYPE(PX86PDPAE))HCPtr;
    2354 #  endif
    2355             if (fChanged)
    2356                 pVCpu->pgm.s.aGCPhysGstPaePDs[iPdpt]   = GCPhys;
    2357 
    2358             *ppPd = pVCpu->pgm.s.CTX_SUFF(apGstPaePDs)[iPdpt];
    2359             PGM_UNLOCK(pVM);
    2360             return VINF_SUCCESS;
    2361         }
    2362 # endif
    23632312    }
    23642313
     
    23942343    if (RT_SUCCESS(rc))
    23952344    {
    2396 # ifdef VBOX_WITH_RAM_IN_KERNEL
    23972345        rc = pgmPhysGCPhys2CCPtrInternalDepr(pVM, pPage, GCPhysCR3, (void **)ppPml4);
    23982346        if (RT_SUCCESS(rc))
    23992347        {
    2400 #  ifdef IN_RING3
     2348# ifdef IN_RING3
    24012349            pVCpu->pgm.s.pGstAmd64Pml4R0 = NIL_RTR0PTR;
    24022350            pVCpu->pgm.s.pGstAmd64Pml4R3 = *ppPml4;
    2403 #  else
     2351# else
    24042352            pVCpu->pgm.s.pGstAmd64Pml4R3 = NIL_RTR3PTR;
    24052353            pVCpu->pgm.s.pGstAmd64Pml4R0 = *ppPml4;
    2406 #  endif
     2354# endif
    24072355            PGM_UNLOCK(pVM);
    24082356            return VINF_SUCCESS;
    24092357        }
    2410 # else
    2411         RTHCPTR HCPtrGuestCR3;
    2412         rc = pgmPhysGCPhys2CCPtrInternalDepr(pVM, pPage, GCPhysCR3, (void **)&HCPtrGuestCR3);
    2413         if (RT_SUCCESS(rc))
    2414         {
    2415             pVCpu->pgm.s.pGstAmd64Pml4R3 = (R3PTRTYPE(PX86PML4))HCPtrGuestCR3;
    2416 #  ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
    2417             pVCpu->pgm.s.pGstAmd64Pml4R0 = (R0PTRTYPE(PX86PML4))HCPtrGuestCR3;
    2418 #  endif
    2419             *ppPml4 = (PX86PML4)HCPtrGuestCR3;
    2420 
    2421             PGM_UNLOCK(pVM);
    2422             return VINF_SUCCESS;
    2423         }
    2424 # endif
    24252358    }
    24262359
  • trunk/src/VBox/VMM/VMMAll/PGMAllBth.h

    r90439 r91014  
    43234323    {
    43244324# if PGM_GST_TYPE == PGM_TYPE_32BIT
    4325 #  ifdef VBOX_WITH_RAM_IN_KERNEL
    4326 #   ifdef IN_RING3
     4325#  ifdef IN_RING3
    43274326        pVCpu->pgm.s.pGst32BitPdR3 = (PX86PD)HCPtrGuestCR3;
    43284327        pVCpu->pgm.s.pGst32BitPdR0 = NIL_RTR0PTR;
    4329  else
     4328else
    43304329        pVCpu->pgm.s.pGst32BitPdR3 = NIL_RTR3PTR;
    43314330        pVCpu->pgm.s.pGst32BitPdR0 = (PX86PD)HCPtrGuestCR3;
    4332 #   endif
    4333 #  else
    4334         pVCpu->pgm.s.pGst32BitPdR3 = (R3PTRTYPE(PX86PD))HCPtrGuestCR3;
    4335 #   ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
    4336         pVCpu->pgm.s.pGst32BitPdR0 = (R0PTRTYPE(PX86PD))HCPtrGuestCR3;
    4337 #   endif
    43384331#  endif
    43394332
    43404333# elif PGM_GST_TYPE == PGM_TYPE_PAE
    4341 #  ifdef VBOX_WITH_RAM_IN_KERNEL
    4342 #   ifdef IN_RING3
     4334#  ifdef IN_RING3
    43434335        pVCpu->pgm.s.pGstPaePdptR3 = (PX86PDPT)HCPtrGuestCR3;
    43444336        pVCpu->pgm.s.pGstPaePdptR0 = NIL_RTR0PTR;
    4345  else
     4337else
    43464338        pVCpu->pgm.s.pGstPaePdptR3 = NIL_RTR3PTR;
    43474339        pVCpu->pgm.s.pGstPaePdptR0 = (PX86PDPT)HCPtrGuestCR3;
    4348 #   endif
    4349 #  else
    4350         pVCpu->pgm.s.pGstPaePdptR3 = (R3PTRTYPE(PX86PDPT))HCPtrGuestCR3;
    4351 #   ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
    4352         pVCpu->pgm.s.pGstPaePdptR0 = (R0PTRTYPE(PX86PDPT))HCPtrGuestCR3;
    4353 #   endif
    43544340#  endif
    43554341
     
    43724358                if (RT_SUCCESS(rc2))
    43734359                {
    4374 #  ifdef VBOX_WITH_RAM_IN_KERNEL
    4375 #   ifdef IN_RING3
     4360#  ifdef IN_RING3
    43764361                    pVCpu->pgm.s.apGstPaePDsR3[i]     = (PX86PDPAE)HCPtr;
    43774362                    pVCpu->pgm.s.apGstPaePDsR0[i]     = NIL_RTR0PTR;
    4378  else
     4363else
    43794364                    pVCpu->pgm.s.apGstPaePDsR3[i]     = NIL_RTR3PTR;
    43804365                    pVCpu->pgm.s.apGstPaePDsR0[i]     = (PX86PDPAE)HCPtr;
    4381 #   endif
    4382 #  else
    4383                     pVCpu->pgm.s.apGstPaePDsR3[i]     = (R3PTRTYPE(PX86PDPAE))HCPtr;
    4384 #   ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
    4385                     pVCpu->pgm.s.apGstPaePDsR0[i]     = (R0PTRTYPE(PX86PDPAE))HCPtr;
    4386 #   endif
    43874366#  endif
    43884367                    pVCpu->pgm.s.aGCPhysGstPaePDs[i]  = GCPhys;
     
    44004379
    44014380# elif PGM_GST_TYPE == PGM_TYPE_AMD64
    4402 #  ifdef VBOX_WITH_RAM_IN_KERNEL
    4403 #   ifdef IN_RING3
     4381#  ifdef IN_RING3
    44044382        pVCpu->pgm.s.pGstAmd64Pml4R3 = (PX86PML4)HCPtrGuestCR3;
    44054383        pVCpu->pgm.s.pGstAmd64Pml4R0 = NIL_RTR0PTR;
    4406  else
     4384else
    44074385        pVCpu->pgm.s.pGstAmd64Pml4R3 = NIL_RTR3PTR;
    44084386        pVCpu->pgm.s.pGstAmd64Pml4R0 = (PX86PML4)HCPtrGuestCR3;
    4409 #   endif
    4410 #  else
    4411         pVCpu->pgm.s.pGstAmd64Pml4R3 = (R3PTRTYPE(PX86PML4))HCPtrGuestCR3;
    4412 #   ifndef VBOX_WITH_2X_4GB_ADDR_SPACE
    4413         pVCpu->pgm.s.pGstAmd64Pml4R0 = (R0PTRTYPE(PX86PML4))HCPtrGuestCR3;
    4414 #   endif
    44154387#  endif
    44164388# endif
  • trunk/src/VBox/VMM/VMMAll/PGMAllPhys.cpp

    r90439 r91014  
    573573        pVM->pgm.s.PhysTlbR0.aEntries[i].GCPhys = NIL_RTGCPHYS;
    574574        pVM->pgm.s.PhysTlbR0.aEntries[i].pPage = 0;
    575 #ifndef VBOX_WITH_RAM_IN_KERNEL
    576         pVM->pgm.s.PhysTlbR0.aEntries[i].pMap = 0;
    577 #endif
    578575        pVM->pgm.s.PhysTlbR0.aEntries[i].pv = 0;
    579576    }
     
    607604    pVM->pgm.s.PhysTlbR0.aEntries[idx].GCPhys = NIL_RTGCPHYS;
    608605    pVM->pgm.s.PhysTlbR0.aEntries[idx].pPage = 0;
    609 #ifndef VBOX_WITH_RAM_IN_KERNEL
    610     pVM->pgm.s.PhysTlbR0.aEntries[idx].pMap = 0;
    611 #endif
    612606    pVM->pgm.s.PhysTlbR0.aEntries[idx].pv = 0;
    613607
     
    11511145    AssertReturn(idChunk != NIL_GMM_CHUNKID, VERR_INVALID_PARAMETER);
    11521146
    1153 #if defined(IN_RING0) && defined(VBOX_WITH_RAM_IN_KERNEL)
     1147#ifdef IN_RING0
    11541148# ifdef VBOX_WITH_LINEAR_HOST_PHYS_MEM
    11551149    return SUPR0HCPhysToVirt(HCPhys & ~(RTHCPHYS)PAGE_OFFSET_MASK, ppv);
     
    11811175        else
    11821176        {
    1183 # ifdef IN_RING0
    1184             int rc = VMMRZCallRing3NoCpu(pVM, VMMCALLRING3_PGM_MAP_CHUNK, idChunk);
    1185             AssertRCReturn(rc, rc);
    1186             pMap = (PPGMCHUNKR3MAP)RTAvlU32Get(&pVM->pgm.s.ChunkR3Map.pTree, idChunk);
    1187             Assert(pMap);
    1188 # else
    11891177            int rc = pgmR3PhysChunkMap(pVM, idChunk, &pMap);
    11901178            if (RT_FAILURE(rc))
    11911179                return rc;
    1192 # endif
    11931180        }
    11941181
     
    12461233        AssertLogRelReturn(iPage < (pMmio2Range->RamRange.cb >> PAGE_SHIFT), VERR_PGM_PHYS_PAGE_MAP_MMIO2_IPE);
    12471234        *ppMap = NULL;
    1248 # if   defined(IN_RING0) && defined(VBOX_WITH_RAM_IN_KERNEL) && defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
     1235# if   defined(IN_RING0) && defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
    12491236        return SUPR0HCPhysToVirt(PGM_PAGE_GET_HCPHYS(pPage), ppv);
    1250 # elif defined(IN_RING0) && defined(VBOX_WITH_RAM_IN_KERNEL)
     1237# elif defined(IN_RING0)
    12511238        *ppv = (uint8_t *)pMmio2Range->pvR0 + ((uintptr_t)iPage << PAGE_SHIFT);
    12521239        return VINF_SUCCESS;
     
    12761263    }
    12771264
    1278 # if defined(IN_RING0) && defined(VBOX_WITH_RAM_IN_KERNEL) && defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
     1265# if defined(IN_RING0) && defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
    12791266    /*
    12801267     * Just use the physical address.
     
    12831270    return SUPR0HCPhysToVirt(PGM_PAGE_GET_HCPHYS(pPage), ppv);
    12841271
    1285 # elif defined(IN_RING0) && defined(VBOX_WITH_RAM_IN_KERNEL)
     1272# elif defined(IN_RING0)
    12861273    /*
    12871274     * Go by page ID thru GMMR0.
     
    13171304        else
    13181305        {
    1319 # ifdef IN_RING0
    1320             int rc = VMMRZCallRing3NoCpu(pVM, VMMCALLRING3_PGM_MAP_CHUNK, idChunk);
    1321             AssertRCReturn(rc, rc);
    1322             pMap = (PPGMCHUNKR3MAP)RTAvlU32Get(&pVM->pgm.s.ChunkR3Map.pTree, idChunk);
    1323             Assert(pMap);
    1324 # else
    13251306            int rc = pgmR3PhysChunkMap(pVM, idChunk, &pMap);
    13261307            if (RT_FAILURE(rc))
    13271308                return rc;
    1328 # endif
    13291309            AssertPtr(pMap->pv);
    13301310        }
     
    13401320    *ppMap = pMap;
    13411321    return VINF_SUCCESS;
    1342 # endif /* !IN_RING0 || !VBOX_WITH_RAM_IN_KERNEL */
     1322# endif /* !IN_RING0 */
    13431323}
    13441324
     
    14931473        if (RT_FAILURE(rc))
    14941474            return rc;
    1495 # if !defined(IN_RING0) || !defined(VBOX_WITH_RAM_IN_KERNEL)
     1475# ifndef IN_RING0
    14961476        pTlbe->pMap = pMap;
    14971477# endif
     
    15021482    {
    15031483        AssertMsg(PGM_PAGE_GET_HCPHYS(pPage) == pVM->pgm.s.HCPhysZeroPg, ("%RGp/%R[pgmpage]\n", GCPhys, pPage));
    1504 # if !defined(IN_RING0) || !defined(VBOX_WITH_RAM_IN_KERNEL)
     1484# ifndef IN_RING0
    15051485        pTlbe->pMap = NULL;
    15061486# endif
     
    15791559DECLINLINE(void) pgmPhysPageMapLockForWriting(PVM pVM, PPGMPAGE pPage, PPGMPAGEMAPTLBE pTlbe, PPGMPAGEMAPLOCK pLock)
    15801560{
    1581 # if !defined(IN_RING0) || !defined(VBOX_WITH_RAM_IN_KERNEL)
     1561# ifndef IN_RING0
    15821562    PPGMPAGEMAP pMap = pTlbe->pMap;
    15831563    if (pMap)
     
    15981578        PGM_PAGE_INC_WRITE_LOCKS(pPage);
    15991579        AssertMsgFailed(("%R[pgmpage] is entering permanent write locked state!\n", pPage));
    1600 # if !defined(IN_RING0) || !defined(VBOX_WITH_RAM_IN_KERNEL)
     1580# ifndef IN_RING0
    16011581        if (pMap)
    16021582            pMap->cRefs++; /* Extra ref to prevent it from going away. */
     
    16051585
    16061586    pLock->uPageAndType = (uintptr_t)pPage | PGMPAGEMAPLOCK_TYPE_WRITE;
    1607 # if !defined(IN_RING0) || !defined(VBOX_WITH_RAM_IN_KERNEL)
     1587# ifndef IN_RING0
    16081588    pLock->pvMap = pMap;
    16091589# else
     
    16221602DECLINLINE(void) pgmPhysPageMapLockForReading(PVM pVM, PPGMPAGE pPage, PPGMPAGEMAPTLBE pTlbe, PPGMPAGEMAPLOCK pLock)
    16231603{
    1624 # if !defined(IN_RING0) || !defined(VBOX_WITH_RAM_IN_KERNEL)
     1604# ifndef IN_RING0
    16251605    PPGMPAGEMAP pMap = pTlbe->pMap;
    16261606    if (pMap)
     
    16411621        PGM_PAGE_INC_READ_LOCKS(pPage);
    16421622        AssertMsgFailed(("%R[pgmpage] is entering permanent read locked state!\n", pPage));
    1643 # if !defined(IN_RING0) || !defined(VBOX_WITH_RAM_IN_KERNEL)
     1623# ifndef IN_RING0
    16441624        if (pMap)
    16451625            pMap->cRefs++; /* Extra ref to prevent it from going away. */
     
    16481628
    16491629    pLock->uPageAndType = (uintptr_t)pPage | PGMPAGEMAPLOCK_TYPE_READ;
    1650 # if !defined(IN_RING0) || !defined(VBOX_WITH_RAM_IN_KERNEL)
     1630# ifndef IN_RING0
    16511631    pLock->pvMap = pMap;
    16521632# else
     
    19601940VMMDECL(void) PGMPhysReleasePageMappingLock(PVMCC pVM, PPGMPAGEMAPLOCK pLock)
    19611941{
    1962 # if !defined(IN_RING0) || !defined(VBOX_WITH_RAM_IN_KERNEL)
     1942# ifndef IN_RING0
    19631943    PPGMPAGEMAP pMap       = (PPGMPAGEMAP)pLock->pvMap;
    19641944# endif
     
    20041984    }
    20051985
    2006 # if !defined(IN_RING0) || !defined(VBOX_WITH_RAM_IN_KERNEL)
     1986# ifndef IN_RING0
    20071987    if (pMap)
    20081988    {
  • trunk/src/VBox/VMM/VMMR0/GMMR0.cpp

    r90784 r91014  
    194194#endif
    195195
    196 #if (!defined(VBOX_WITH_RAM_IN_KERNEL) || defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)) \
    197  && !defined(RT_OS_DARWIN)
     196#if defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM) && !defined(RT_OS_DARWIN)
    198197/** Enable the legacy mode code (will be dropped soon). */
    199198# define GMM_WITH_LEGACY_MODE
     
    413412     * and related frees.) */
    414413    RTR0MEMOBJ          hMemObj;
    415 #if defined(VBOX_WITH_RAM_IN_KERNEL) && !defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
     414#ifndef VBOX_WITH_LINEAR_HOST_PHYS_MEM
    416415    /** Pointer to the kernel mapping. */
    417416    uint8_t            *pbMapping;
     
    22382237#endif
    22392238
    2240 #if defined(VBOX_WITH_RAM_IN_KERNEL) && !defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
     2239#ifndef VBOX_WITH_LINEAR_HOST_PHYS_MEM
    22412240    /*
    22422241     * Get a ring-0 mapping of the object.
     
    22702269         */
    22712270        pChunk->hMemObj     = hMemObj;
    2272 #if defined(VBOX_WITH_RAM_IN_KERNEL) && !defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
     2271#ifndef VBOX_WITH_LINEAR_HOST_PHYS_MEM
    22732272        pChunk->pbMapping   = pbMapping;
    22742273#endif
     
    35213520    RTMemFree(pChunk);
    35223521
    3523 #if defined(VBOX_WITH_RAM_IN_KERNEL) && !defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
     3522#ifndef VBOX_WITH_LINEAR_HOST_PHYS_MEM
    35243523    int rc = RTR0MemObjFree(hMemObj, true /* fFreeMappings */);
    35253524#else
     
    45244523}
    45254524
    4526 #if defined(VBOX_WITH_RAM_IN_KERNEL) && !defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
    4527 
     4525
     4526#ifndef VBOX_WITH_LINEAR_HOST_PHYS_MEM
    45284527/**
    45294528 * Gets the ring-0 virtual address for the given page.
     
    46014600    return VERR_GMM_NOT_PAGE_OWNER;
    46024601}
    4603 
    4604 #endif
     4602#endif /* !VBOX_WITH_LINEAR_HOST_PHYS_MEM */
    46054603
    46064604#ifdef VBOX_WITH_PAGE_SHARING
  • trunk/src/VBox/VMM/VMMR0/GVMMR0.cpp

    r90897 r91014  
    107107 * be logged, written to the VMs assertion text buffer, and @a a_BadExpr is
    108108 * executed. */
    109 #if (defined(VBOX_STRICT) || 1) && !defined(VBOX_WITH_RAM_IN_KERNEL)
    110 # define GVMM_CHECK_SMAP_SETUP() uint32_t const fKernelFeatures = SUPR0GetKernelFeatures()
    111 # define GVMM_CHECK_SMAP_CHECK(a_BadExpr) \
    112     do { \
    113         if (fKernelFeatures & SUPKERNELFEATURES_SMAP) \
    114         { \
    115             RTCCUINTREG fEflCheck = ASMGetFlags(); \
    116             if (RT_LIKELY(fEflCheck & X86_EFL_AC)) \
    117             { /* likely */ } \
    118             else \
    119             { \
    120                 SUPR0Printf("%s, line %d: EFLAGS.AC is clear! (%#x)\n", __FUNCTION__, __LINE__, (uint32_t)fEflCheck); \
    121                 a_BadExpr; \
    122             } \
    123         } \
    124     } while (0)
    125 # define GVMM_CHECK_SMAP_CHECK2(a_pGVM, a_BadExpr) \
    126     do { \
    127         if (fKernelFeatures & SUPKERNELFEATURES_SMAP) \
    128         { \
    129             RTCCUINTREG fEflCheck = ASMGetFlags(); \
    130             if (RT_LIKELY(fEflCheck & X86_EFL_AC)) \
    131             { /* likely */ } \
    132             else \
    133             { \
    134                 SUPR0BadContext((a_pGVM) ? (a_pGVM)->pSession : NULL, __FILE__, __LINE__, "EFLAGS.AC is zero!"); \
    135                 a_BadExpr; \
    136             } \
    137         } \
    138     } while (0)
    139 #else
    140 # define GVMM_CHECK_SMAP_SETUP()           uint32_t const fKernelFeatures = 0
    141 # define GVMM_CHECK_SMAP_CHECK(a_BadExpr)           NOREF(fKernelFeatures)
    142 # define GVMM_CHECK_SMAP_CHECK2(a_pGVM, a_BadExpr)   NOREF(fKernelFeatures)
    143 #endif
     109#define GVMM_CHECK_SMAP_SETUP()           uint32_t const fKernelFeatures = 0
     110#define GVMM_CHECK_SMAP_CHECK(a_BadExpr)           NOREF(fKernelFeatures)
     111#define GVMM_CHECK_SMAP_CHECK2(a_pGVM, a_BadExpr)   NOREF(fKernelFeatures)
    144112
    145113/** Special value that GVMMR0DeregisterVCpu sets. */
  • trunk/src/VBox/VMM/VMMR0/PGMR0.cpp

    r90439 r91014  
    376376    PPGMREGMMIO2RANGE pFirstRegMmio = pgmR0PhysMMIOExFind(pGVM, pDevIns, hMmio2);
    377377    AssertReturn(pFirstRegMmio, VERR_NOT_FOUND);
    378 #if defined(VBOX_WITH_RAM_IN_KERNEL) && !defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
     378#ifndef VBOX_WITH_LINEAR_HOST_PHYS_MEM
    379379    uint8_t * const pvR0  = (uint8_t *)pFirstRegMmio->pvR0;
    380380#else
     
    394394     * Do the mapping.
    395395     */
    396 #if defined(VBOX_WITH_RAM_IN_KERNEL) && !defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
     396#ifndef VBOX_WITH_LINEAR_HOST_PHYS_MEM
    397397    AssertPtr(pvR0);
    398398    *ppvMapping = pvR0 + offSub;
  • trunk/src/VBox/VMM/VMMR0/VMMR0.cpp

    r90999 r91014  
    9292 * be logged, written to the VMs assertion text buffer, and @a a_BadExpr is
    9393 * executed. */
    94 #if (defined(VBOX_STRICT) || 1) && !defined(VBOX_WITH_RAM_IN_KERNEL)
    95 # define VMM_CHECK_SMAP_SETUP() uint32_t const fKernelFeatures = SUPR0GetKernelFeatures()
    96 # define VMM_CHECK_SMAP_CHECK(a_BadExpr) \
    97     do { \
    98         if (fKernelFeatures & SUPKERNELFEATURES_SMAP) \
    99         { \
    100             RTCCUINTREG fEflCheck = ASMGetFlags(); \
    101             if (RT_LIKELY(fEflCheck & X86_EFL_AC)) \
    102             { /* likely */ } \
    103             else \
    104             { \
    105                 SUPR0Printf("%s, line %d: EFLAGS.AC is clear! (%#x)\n", __FUNCTION__, __LINE__, (uint32_t)fEflCheck); \
    106                 a_BadExpr; \
    107             } \
    108         } \
    109     } while (0)
    110 # define VMM_CHECK_SMAP_CHECK2(a_pGVM, a_BadExpr) \
    111     do { \
    112         if (fKernelFeatures & SUPKERNELFEATURES_SMAP) \
    113         { \
    114             RTCCUINTREG fEflCheck = ASMGetFlags(); \
    115             if (RT_LIKELY(fEflCheck & X86_EFL_AC)) \
    116             { /* likely */ } \
    117             else if (a_pGVM) \
    118             { \
    119                 SUPR0BadContext((a_pGVM)->pSession, __FILE__, __LINE__, "EFLAGS.AC is zero!"); \
    120                 RTStrPrintf((a_pGVM)->vmm.s.szRing0AssertMsg1, sizeof((a_pGVM)->vmm.s.szRing0AssertMsg1), \
    121                             "%s, line %d: EFLAGS.AC is clear! (%#x)\n", __FUNCTION__, __LINE__, (uint32_t)fEflCheck); \
    122                 a_BadExpr; \
    123             } \
    124             else \
    125             { \
    126                 SUPR0Printf("%s, line %d: EFLAGS.AC is clear! (%#x)\n", __FUNCTION__, __LINE__, (uint32_t)fEflCheck); \
    127                 a_BadExpr; \
    128             } \
    129         } \
    130     } while (0)
    131 #else
    132 # define VMM_CHECK_SMAP_SETUP()                         uint32_t const fKernelFeatures = 0
    133 # define VMM_CHECK_SMAP_CHECK(a_BadExpr)                NOREF(fKernelFeatures)
    134 # define VMM_CHECK_SMAP_CHECK2(a_pGVM, a_BadExpr)       NOREF(fKernelFeatures)
    135 #endif
     94#define VMM_CHECK_SMAP_SETUP()                          uint32_t const fKernelFeatures = 0
     95#define VMM_CHECK_SMAP_CHECK(a_BadExpr)                 NOREF(fKernelFeatures)
     96#define VMM_CHECK_SMAP_CHECK2(a_pGVM, a_BadExpr)        NOREF(fKernelFeatures)
    13697
    13798
  • trunk/src/VBox/VMM/VMMR3/PGMPhys.cpp

    r90439 r91014  
    30373037        {
    30383038            void *pvPages;
    3039 #if defined(VBOX_WITH_RAM_IN_KERNEL) && !defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
     3039#ifndef VBOX_WITH_LINEAR_HOST_PHYS_MEM
    30403040            RTR0PTR pvPagesR0;
    30413041            rc = SUPR3PageAllocEx(cPages, 0 /*fFlags*/, &pvPages, &pvPagesR0, paPages);
     
    30623062                    {
    30633063                        pCur->pvR3          = pbCurPages;
    3064 #if defined(VBOX_WITH_RAM_IN_KERNEL) && !defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
     3064#ifndef VBOX_WITH_LINEAR_HOST_PHYS_MEM
    30653065                        pCur->pvR0          = pvPagesR0 + (iSrcPage << PAGE_SHIFT);
    30663066#endif
     
    46404640#endif
    46414641
    4642 #ifndef VBOX_WITH_RAM_IN_KERNEL
    4643     for (unsigned i = 0; i < RT_ELEMENTS(pVM->pgm.s.PhysTlbR0.aEntries); i++)
    4644         if (pVM->pgm.s.PhysTlbR0.aEntries[i].pMap == pChunk)
    4645             return 0;
    4646 #endif
    46474642    for (unsigned i = 0; i < RT_ELEMENTS(pVM->pgm.s.PhysTlbR3.aEntries); i++)
    46484643        if (pVM->pgm.s.PhysTlbR3.aEntries[i].pMap == pChunk)
  • trunk/src/VBox/VMM/include/PGMInline.h

    r87141 r91014  
    271271        rc = VINF_SUCCESS;
    272272        AssertPtr(pTlbe->pv);
    273 #if defined(IN_RING3) || !defined(VBOX_WITH_RAM_IN_KERNEL)
     273#ifdef IN_RING3
    274274        Assert(!pTlbe->pMap || RT_VALID_PTR(pTlbe->pMap->pv));
    275275#endif
  • trunk/src/VBox/VMM/include/PGMInternal.h

    r90439 r91014  
    14761476    /** Pointer to the ring-3 mapping of the allocation. */
    14771477    RTR3PTR                             pvR3;
    1478 #if defined(VBOX_WITH_RAM_IN_KERNEL) && !defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
     1478#ifndef VBOX_WITH_LINEAR_HOST_PHYS_MEM
    14791479    /** Pointer to the ring-0 mapping of the allocation. */
    14801480    RTR0PTR                             pvR0;
     
    14931493    uint8_t                             idMmio2;
    14941494    /** Alignment padding for putting the ram range on a PGMPAGE alignment boundary. */
    1495 #if defined(VBOX_WITH_RAM_IN_KERNEL) && !defined(VBOX_WITH_LINEAR_HOST_PHYS_MEM)
     1495#ifndef VBOX_WITH_LINEAR_HOST_PHYS_MEM
    14961496    uint8_t                             abAlignment[HC_ARCH_BITS == 32 ? 6 + 4 : 2];
    14971497#else
     
    16101610#endif
    16111611    /** The chunk map. */
    1612 #if defined(VBOX_WITH_2X_4GB_ADDR_SPACE) || defined(VBOX_WITH_RAM_IN_KERNEL)
    16131612    R3PTRTYPE(PPGMCHUNKR3MAP) volatile  pChunk;
    1614 #else
    1615     R3R0PTRTYPE(PPGMCHUNKR3MAP) volatile  pChunk;
    1616 #endif
    16171613} PGMCHUNKR3MAPTLBE;
    16181614/** Pointer to the an allocation chunk ring-3 mapping TLB entry. */
     
    16621658    RTGCPHYS volatile                   GCPhys;
    16631659    /** The guest page. */
    1664 #if defined(VBOX_WITH_2X_4GB_ADDR_SPACE) || defined(VBOX_WITH_RAM_IN_KERNEL)
    16651660    R3PTRTYPE(PPGMPAGE) volatile        pPage;
    1666 #else
    1667     R3R0PTRTYPE(PPGMPAGE) volatile      pPage;
    1668 #endif
    16691661    /** Pointer to the page mapping tracking structure, PGMCHUNKR3MAP. */
    1670 #if defined(VBOX_WITH_2X_4GB_ADDR_SPACE) || defined(VBOX_WITH_RAM_IN_KERNEL)
    16711662    R3PTRTYPE(PPGMCHUNKR3MAP) volatile  pMap;
    1672 #else
    1673     R3R0PTRTYPE(PPGMCHUNKR3MAP) volatile pMap;
    1674 #endif
    16751663    /** The address */
    1676 #if defined(VBOX_WITH_2X_4GB_ADDR_SPACE) || defined(VBOX_WITH_RAM_IN_KERNEL)
    16771664    R3PTRTYPE(void *) volatile          pv;
    1678 #else
    1679     R3R0PTRTYPE(void *) volatile        pv;
    1680 #endif
    16811665#if HC_ARCH_BITS == 32
    16821666    uint32_t                            u32Padding; /**< alignment padding. */
     
    17121696/** @} */
    17131697
    1714 #if defined(VBOX_WITH_RAM_IN_KERNEL) || defined(DOXYGEN_RUNNING)
     1698
    17151699/** @name Ring-0 page mapping TLB
    17161700 * @{  */
     
    17541738#define PGM_PAGER0MAPTLB_IDX(GCPhys)    ( ((GCPhys) >> PAGE_SHIFT) & (PGM_PAGER0MAPTLB_ENTRIES - 1) )
    17551739/** @} */
    1756 #endif /* VBOX_WITH_RAM_IN_KERNEL || DOXYGEN_RUNNING */
     1740
    17571741
    17581742/**
     
    19181902/** @typedef PPPGMPAGEMAP
    19191903 * Pointer to a page mapper unit pointer for current context. */
    1920 #if defined(IN_RING0) && defined(VBOX_WITH_RAM_IN_KERNEL)
     1904#if defined(IN_RING0)
    19211905typedef PPGMPAGER0MAPTLB                PPGMPAGEMAPTLB;
    19221906typedef PPGMPAGER0MAPTLBE               PPGMPAGEMAPTLBE;
     
    32743258        PGMCHUNKR3MAPTLB            Tlb;
    32753259        /** The chunk tree, ordered by chunk id. */
    3276 #if defined(VBOX_WITH_2X_4GB_ADDR_SPACE) || defined(VBOX_WITH_RAM_IN_KERNEL)
    32773260        R3PTRTYPE(PAVLU32NODECORE)  pTree;
    3278 #else
    3279         R3R0PTRTYPE(PAVLU32NODECORE) pTree;
    3280 #endif
    32813261#if HC_ARCH_BITS == 32
    32823262        uint32_t                    u32Alignment0;
     
    32963276    /** The page mapping TLB for ring-3. */
    32973277    PGMPAGER3MAPTLB                 PhysTlbR3;
    3298 #ifdef VBOX_WITH_RAM_IN_KERNEL
    32993278    /** The page mapping TLB for ring-0. */
    33003279    PGMPAGER0MAPTLB                 PhysTlbR0;
    3301 #else
    3302     /** The page mapping TLB for ring-0 (still using ring-3 mappings). */
    3303     PGMPAGER3MAPTLB                 PhysTlbR0;
    3304 #endif
    33053280
    33063281    /** @name   The zero page.
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette