VirtualBox

Ignore:
Timestamp:
Mar 5, 2009 1:37:58 AM (16 years ago)
Author:
vboxsync
Message:

PGM,GMM: Hacking on the new phys code.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMR0/GMMR0.cpp

    r14299 r17371  
    196196    struct GMMPAGECOMMON
    197197    {
    198         uint32_t uStuff1 : 32;
    199         uint32_t uStuff2 : 20;
     198        uint32_t    uStuff1 : 32;
     199        uint32_t    uStuff2 : 30;
    200200        /** The page state. */
    201201        uint32_t    u2State : 2;
     
    346346#endif
    347347
    348 /** @def GMM_GCPHYS_END
    349  * The end of the valid guest physical address as it applies to GMM pages.
     348/** @def GMM_GCPHYS_LAST
     349 * The last of the valid guest physical address as it applies to GMM pages.
    350350 *
    351351 * This must reflect the constraints imposed by the RTGCPHYS type and
    352352 * the guest page frame number used internally in GMMPAGE. */
    353 #define GMM_GCPHYS_END              UINT32_C(0xfffff000)
     353#if 1
     354# define GMM_GCPHYS_LAST            UINT32_C(0xfffff000)            /* 2^32 (4GB)  - 0x1000 */
     355#else /** @todo enable this after changing NIL_RTHCPHYS to ~(RTHCPHYS)0! */
     356#if HC_ARCH_BITS == 64
     357# define GMM_GCPHYS_LAST            UINT64_C(0x00000fffffff0000)    /* 2^44 (16TB) - 0x10000 */
     358#else
     359# define GMM_GCPHYS_LAST            UINT64_C(0x0000000fffff0000)    /* 2^36 (64GB) - 0x10000 */
     360#endif
     361#endif
    354362
    355363
     
    603611         * Check and see if RTR0MemObjAllocPhysNC works.
    604612         */
    605 #if 0 /* later */
     613#if 0 /* later, see #3170. */
    606614        RTR0MEMOBJ MemObj;
    607615        rc = RTR0MemObjAllocPhysNC(&MemObj, _64K, NIL_RTHCPHYS);
     
    618626        pGMM->fLegacyMode = true;
    619627#endif
     628
     629        /*
     630         * Query system page count and guess a reasonable cMaxPages value.
     631         */
     632        pGMM->cMaxPages = UINT32_MAX; /** @todo IPRT function for query ram size and such. */
    620633
    621634        g_pGMM = pGMM;
     
    13141327    if (pChunk->cFree > 0)
    13151328    {
     1329        pChunk->pSet = pSet;
    13161330        pChunk->pFreePrev = NULL;
    13171331        unsigned iList = (pChunk->cFree - 1) >> GMM_CHUNK_FREE_SET_SHIFT;
     
    14261440            pGMM->cChunks++;
    14271441            gmmR0LinkChunk(pChunk, pSet);
     1442            LogFlow(("gmmR0RegisterChunk: pChunk=%p id=%#x cChunks=%d\n", pChunk, pChunk->Core.Key, pGMM->cChunks));
    14281443            return VINF_SUCCESS;
    14291444        }
     
    15271542    AssertReleaseMsg(iPage < RT_ELEMENTS(pChunk->aPages), ("%d\n", iPage));
    15281543    PGMMPAGE pPage = &pChunk->aPages[iPage];
     1544    Log3(("pPage=%x iPage=%#x iFreeHead=%#x iNext=%#x u2State=%d\n", pPage, iPage, pChunk->iFreeHead, pPage->Free.iNext, pPage->Common.u2State));
    15291545    Assert(GMM_PAGE_IS_FREE(pPage));
    15301546    pChunk->iFreeHead = pPage->Free.iNext;
     
    15341550    AssertCompile(GMM_PAGE_STATE_PRIVATE == 0);
    15351551    pPage->Private.hGVM = hGVM;
    1536     AssertCompile(NIL_RTHCPHYS >= GMM_GCPHYS_END);
    1537     AssertCompile(GMM_GCPHYS_UNSHAREABLE >= GMM_GCPHYS_END);
    1538     if (pPageDesc->HCPhysGCPhys < GMM_GCPHYS_END)
     1552    AssertCompile(NIL_RTHCPHYS >= GMM_GCPHYS_LAST);
     1553    AssertCompile(GMM_GCPHYS_UNSHAREABLE >= GMM_GCPHYS_LAST);
     1554    if (pPageDesc->HCPhysGCPhys <= GMM_GCPHYS_LAST)
    15391555        pPage->Private.pfn = pPageDesc->HCPhysGCPhys >> PAGE_SHIFT;
    15401556    else
     
    16451661
    16461662            if (    pCur->hGVM == hGVM
    1647                 &&  (   pCur->cFree < GMM_CHUNK_NUM_PAGES
     1663                &&  (   pCur->cFree <= GMM_CHUNK_NUM_PAGES
    16481664                     || pGMM->fLegacyMode))
    16491665            {
     
    16801696    switch (enmAccount)
    16811697    {
    1682         case GMMACCOUNT_BASE:   pGVM->gmm.s.Allocated.cBasePages   += iPage;
    1683         case GMMACCOUNT_SHADOW: pGVM->gmm.s.Allocated.cShadowPages += iPage;
    1684         case GMMACCOUNT_FIXED:  pGVM->gmm.s.Allocated.cFixedPages  += iPage;
     1698        case GMMACCOUNT_BASE:   pGVM->gmm.s.Allocated.cBasePages   += iPage; break;
     1699        case GMMACCOUNT_SHADOW: pGVM->gmm.s.Allocated.cShadowPages += iPage; break;
     1700        case GMMACCOUNT_FIXED:  pGVM->gmm.s.Allocated.cFixedPages  += iPage; break;
    16851701        default:
    16861702            AssertMsgFailedReturn(("enmAccount=%d\n", enmAccount), VERR_INTERNAL_ERROR);
     
    17411757    for (; iPage < cPagesToUpdate; iPage++)
    17421758    {
    1743         AssertMsgReturn(    (    paPages[iPage].HCPhysGCPhys < GMM_GCPHYS_END
     1759        AssertMsgReturn(    (    paPages[iPage].HCPhysGCPhys <= GMM_GCPHYS_LAST
    17441760                             && !(paPages[iPage].HCPhysGCPhys & PAGE_OFFSET_MASK))
    17451761                        ||  paPages[iPage].HCPhysGCPhys == NIL_RTHCPHYS
     
    17851801                        if (RT_LIKELY(pPage->Private.hGVM == pGVM->hSelf))
    17861802                        {
    1787                             AssertCompile(NIL_RTHCPHYS > GMM_GCPHYS_END && GMM_GCPHYS_UNSHAREABLE > GMM_GCPHYS_END);
    1788                             if (RT_LIKELY(paPages[iPage].HCPhysGCPhys < GMM_GCPHYS_END))
     1803                            AssertCompile(NIL_RTHCPHYS > GMM_GCPHYS_LAST && GMM_GCPHYS_UNSHAREABLE > GMM_GCPHYS_LAST);
     1804                            if (RT_LIKELY(paPages[iPage].HCPhysGCPhys <= GMM_GCPHYS_LAST))
    17891805                                pPage->Private.pfn = paPages[iPage].HCPhysGCPhys >> PAGE_SHIFT;
    17901806                            else if (paPages[iPage].HCPhysGCPhys == GMM_GCPHYS_UNSHAREABLE)
     
    18251841                    if (RT_LIKELY(GMM_PAGE_IS_SHARED(pPage)))
    18261842                    {
    1827                         AssertCompile(NIL_RTHCPHYS > GMM_GCPHYS_END && GMM_GCPHYS_UNSHAREABLE > GMM_GCPHYS_END);
     1843                        AssertCompile(NIL_RTHCPHYS > GMM_GCPHYS_LAST && GMM_GCPHYS_UNSHAREABLE > GMM_GCPHYS_LAST);
    18281844                        Assert(pPage->Shared.cRefs);
    18291845                        Assert(pGVM->gmm.s.cSharedPages);
     
    19081924                        ||  paPages[iPage].HCPhysGCPhys == GMM_GCPHYS_UNSHAREABLE
    19091925                        ||  (    enmAccount == GMMACCOUNT_BASE
    1910                              &&  paPages[iPage].HCPhysGCPhys < GMM_GCPHYS_END
     1926                             &&  paPages[iPage].HCPhysGCPhys <= GMM_GCPHYS_LAST
    19111927                             && !(paPages[iPage].HCPhysGCPhys & PAGE_OFFSET_MASK)),
    19121928                        ("#%#x: %RHp enmAccount=%d\n", iPage, paPages[iPage].HCPhysGCPhys, enmAccount),
     
    19281944
    19291945    RTSemFastMutexRelease(pGMM->Mtx);
    1930     LogFlow(("GMMR0UpdateReservation: returns %Rrc\n", rc));
     1946    LogFlow(("GMMR0AllocatePages: returns %Rrc\n", rc));
    19311947    return rc;
    19321948}
     
    25172533static int gmmR0UnmapChunk(PGMM pGMM, PGVM pGVM, PGMMCHUNK pChunk)
    25182534{
    2519     /*
    2520      * Find the mapping and try unmapping it.
    2521      */
    2522     for (uint32_t i = 0; i < pChunk->cMappings; i++)
    2523     {
    2524         Assert(pChunk->paMappings[i].pGVM && pChunk->paMappings[i].MapObj != NIL_RTR0MEMOBJ);
    2525         if (pChunk->paMappings[i].pGVM == pGVM)
     2535    if (!pGMM->fLegacyMode)
     2536    {
     2537        /*
     2538         * Find the mapping and try unmapping it.
     2539         */
     2540        for (uint32_t i = 0; i < pChunk->cMappings; i++)
    25262541        {
    2527             /* unmap */
    2528             int rc = RTR0MemObjFree(pChunk->paMappings[i].MapObj, false /* fFreeMappings (NA) */);
    2529             if (RT_SUCCESS(rc))
     2542            Assert(pChunk->paMappings[i].pGVM && pChunk->paMappings[i].MapObj != NIL_RTR0MEMOBJ);
     2543            if (pChunk->paMappings[i].pGVM == pGVM)
    25302544            {
    2531                 /* update the record. */
    2532                 pChunk->cMappings--;
    2533                 if (i < pChunk->cMappings)
    2534                     pChunk->paMappings[i] = pChunk->paMappings[pChunk->cMappings];
    2535                 pChunk->paMappings[pChunk->cMappings].MapObj = NIL_RTR0MEMOBJ;
    2536                 pChunk->paMappings[pChunk->cMappings].pGVM = NULL;
     2545                /* unmap */
     2546                int rc = RTR0MemObjFree(pChunk->paMappings[i].MapObj, false /* fFreeMappings (NA) */);
     2547                if (RT_SUCCESS(rc))
     2548                {
     2549                    /* update the record. */
     2550                    pChunk->cMappings--;
     2551                    if (i < pChunk->cMappings)
     2552                        pChunk->paMappings[i] = pChunk->paMappings[pChunk->cMappings];
     2553                    pChunk->paMappings[pChunk->cMappings].MapObj = NIL_RTR0MEMOBJ;
     2554                    pChunk->paMappings[pChunk->cMappings].pGVM = NULL;
     2555                }
     2556                return rc;
    25372557            }
    2538             return rc;
    25392558        }
    25402559    }
     2560    else if (pChunk->hGVM == pGVM->hSelf)
     2561        return VINF_SUCCESS;
    25412562
    25422563    Log(("gmmR0MapChunk: Chunk %#x is not mapped into pGVM=%p/%#x\n", pChunk->Core.Key, pGVM, pGVM->hSelf));
     
    25582579static int gmmR0MapChunk(PGMM pGMM, PGVM pGVM, PGMMCHUNK pChunk, PRTR3PTR ppvR3)
    25592580{
     2581    /*
     2582     * If we're in legacy mode this is simple.
     2583     */
     2584    if (pGMM->fLegacyMode)
     2585    {
     2586        if (pChunk->hGVM != pGVM->hSelf)
     2587        {
     2588            Log(("gmmR0MapChunk: chunk %#x is already mapped at %p!\n", pChunk->Core.Key, *ppvR3));
     2589            return VERR_GMM_CHUNK_NOT_FOUND;
     2590        }
     2591
     2592        *ppvR3 = RTR0MemObjAddressR3(pChunk->MemObj);
     2593        return VINF_SUCCESS;
     2594    }
     2595
    25602596    /*
    25612597     * Check to see if the chunk is already mapped.
     
    26492685    }
    26502686
    2651     if (pGMM->fLegacyMode)
    2652     {
    2653         Log(("GMMR0MapUnmapChunk: legacy mode!\n"));
    2654         return VERR_NOT_SUPPORTED;
    2655     }
    2656 
    26572687    /*
    26582688     * Take the semaphore and do the work.
     
    27752805    }
    27762806
     2807    LogFlow(("GMMR0SeedChunk: rc=%d (pvR3=%p)\n", rc, pvR3));
    27772808    return rc;
    27782809}
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette