VirtualBox

Ignore:
Timestamp:
Apr 10, 2025 9:21:50 AM (12 days ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
168431
Message:

VMM/PGMAllGst-armv8.cpp.h: MSC buildfix (complains about unreachable code in templated code), bugref:10388

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/PGMAllGst-armv8.cpp.h

    r108924 r108925  
    532532    AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_64KB    == ARMV8_TCR_EL1_AARCH64_TG1_64KB);
    533533
    534     uint64_t fLookupMaskFull;
    535     RTGCPTR  offPageMask;
    536 
    537     RTGCPTR offLvl1BlockMask;
    538     RTGCPTR offLvl2BlockMask;
    539 
    540     uint64_t fNextTableOrPageMask;
    541     uint8_t  cLvl0Shift;
    542     uint8_t  cLvl1Shift;
    543     uint8_t  cLvl2Shift;
    544     uint8_t  cLvl3Shift;
    545 
    546     RTGCPHYS fGCPhysLvl1BlockBase;
    547     RTGCPHYS fGCPhysLvl2BlockBase;
    548 
    549     /** @todo This needs to go into defines in armv8.h if final. */
    550     if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_4KB)
     534    if RT_CONSTEXPR_IF(a_GranuleSz != ARMV8_TCR_EL1_AARCH64_TG0_INVALID)
    551535    {
    552         fLookupMaskFull      = RT_BIT_64(9) - 1;
    553         offLvl1BlockMask     = (RTGCPTR)(_1G - 1);
    554         offLvl2BlockMask     = (RTGCPTR)(_2M - 1);
    555         offPageMask          = (RTGCPTR)(_4K - 1);
    556         fNextTableOrPageMask = UINT64_C(0xfffffffff000);
    557         cLvl0Shift           = 39;
    558         cLvl1Shift           = 30;
    559         cLvl2Shift           = 21;
    560         cLvl3Shift           = 12;
    561         fGCPhysLvl1BlockBase = UINT64_C(0xffffc0000000);
    562         fGCPhysLvl2BlockBase = UINT64_C(0xffffffe00000);
    563     }
    564     else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_16KB)
    565     {
    566         fLookupMaskFull      = RT_BIT_64(11) - 1;
    567         offLvl1BlockMask     = 0; /** @todo TCR_EL1.DS support. */
    568         offLvl2BlockMask     = (RTGCPTR)(_32M - 1);
    569         offPageMask          = (RTGCPTR)(_16K - 1);
    570         fNextTableOrPageMask = UINT64_C(0xffffffffc000);
    571         cLvl0Shift           = 47;
    572         cLvl1Shift           = 36;
    573         cLvl2Shift           = 25;
    574         cLvl3Shift           = 14;
    575         fGCPhysLvl1BlockBase = 0; /* Not supported. */
    576         fGCPhysLvl2BlockBase = UINT64_C(0xfffffe000000);
    577     }
    578     else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_64KB)
    579     {
    580         Assert(a_InitialLookupLvl > 0);
    581 
    582         fLookupMaskFull      = RT_BIT_64(13)   - 1;
    583         offLvl1BlockMask     = 0; /** @todo FEAT_LPA (RTGCPTR)(4*_1T - 1) */
    584         offLvl2BlockMask     = (RTGCPTR)(_512M - 1);
    585         offPageMask          = (RTGCPTR)(_64K  - 1);
    586         fNextTableOrPageMask = UINT64_C(0xffffffff0000);
    587         cLvl0Shift           = 0; /* No Level 0 with 64KiB granules. */
    588         cLvl1Shift           = 42;
    589         cLvl2Shift           = 29;
    590         cLvl3Shift           = 16;
    591         fGCPhysLvl1BlockBase = 0; /* Not supported. */
    592         fGCPhysLvl2BlockBase = UINT64_C(0xffffe0000000);
     536        uint64_t fLookupMaskFull;
     537        RTGCPTR  offPageMask;
     538
     539        RTGCPTR offLvl1BlockMask;
     540        RTGCPTR offLvl2BlockMask;
     541
     542        uint64_t fNextTableOrPageMask;
     543        uint8_t  cLvl0Shift;
     544        uint8_t  cLvl1Shift;
     545        uint8_t  cLvl2Shift;
     546        uint8_t  cLvl3Shift;
     547
     548        RTGCPHYS fGCPhysLvl1BlockBase;
     549        RTGCPHYS fGCPhysLvl2BlockBase;
     550
     551        /** @todo This needs to go into defines in armv8.h if final. */
     552        if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_4KB)
     553        {
     554            fLookupMaskFull      = RT_BIT_64(9) - 1;
     555            offLvl1BlockMask     = (RTGCPTR)(_1G - 1);
     556            offLvl2BlockMask     = (RTGCPTR)(_2M - 1);
     557            offPageMask          = (RTGCPTR)(_4K - 1);
     558            fNextTableOrPageMask = UINT64_C(0xfffffffff000);
     559            cLvl0Shift           = 39;
     560            cLvl1Shift           = 30;
     561            cLvl2Shift           = 21;
     562            cLvl3Shift           = 12;
     563            fGCPhysLvl1BlockBase = UINT64_C(0xffffc0000000);
     564            fGCPhysLvl2BlockBase = UINT64_C(0xffffffe00000);
     565        }
     566        else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_16KB)
     567        {
     568            fLookupMaskFull      = RT_BIT_64(11) - 1;
     569            offLvl1BlockMask     = 0; /** @todo TCR_EL1.DS support. */
     570            offLvl2BlockMask     = (RTGCPTR)(_32M - 1);
     571            offPageMask          = (RTGCPTR)(_16K - 1);
     572            fNextTableOrPageMask = UINT64_C(0xffffffffc000);
     573            cLvl0Shift           = 47;
     574            cLvl1Shift           = 36;
     575            cLvl2Shift           = 25;
     576            cLvl3Shift           = 14;
     577            fGCPhysLvl1BlockBase = 0; /* Not supported. */
     578            fGCPhysLvl2BlockBase = UINT64_C(0xfffffe000000);
     579        }
     580        else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_64KB)
     581        {
     582            Assert(a_InitialLookupLvl > 0);
     583
     584            fLookupMaskFull      = RT_BIT_64(13)   - 1;
     585            offLvl1BlockMask     = 0; /** @todo FEAT_LPA (RTGCPTR)(4*_1T - 1) */
     586            offLvl2BlockMask     = (RTGCPTR)(_512M - 1);
     587            offPageMask          = (RTGCPTR)(_64K  - 1);
     588            fNextTableOrPageMask = UINT64_C(0xffffffff0000);
     589            cLvl0Shift           = 0; /* No Level 0 with 64KiB granules. */
     590            cLvl1Shift           = 42;
     591            cLvl2Shift           = 29;
     592            cLvl3Shift           = 16;
     593            fGCPhysLvl1BlockBase = 0; /* Not supported. */
     594            fGCPhysLvl2BlockBase = UINT64_C(0xffffe0000000);
     595        }
     596
     597        /* Get the initial lookup mask. */
     598        uint8_t const bEl = (fFlags & PGMQPAGE_F_USER_MODE) ? 0 : 1; /** @todo EL2 support */
     599        uint64_t fLookupMask;
     600        if RT_CONSTEXPR_IF(a_fTtbr0 == true)
     601            fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr0[bEl];
     602        else
     603            fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr1[bEl];
     604
     605        RTGCPHYS         GCPhysPt = CPUMGetEffectiveTtbr(pVCpu, GCPtr);
     606        PARMV8VMSA64DESC paDesc   = NULL;
     607        ARMV8VMSA64DESC  Desc;
     608        int rc;
     609        if RT_CONSTEXPR_IF(a_InitialLookupLvl == 0)
     610        {
     611            Assert(cLvl0Shift != 0);
     612            uint8_t const uLvl = 0;
     613
     614            rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
     615            if (RT_SUCCESS(rc)) { /* probable */ }
     616            else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
     617
     618            Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl0Shift) & fLookupMask]);
     619            if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
     620            else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
     621
     622            if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
     623            else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl); /** @todo Only supported if TCR_EL1.DS is set. */
     624
     625            /* Full lookup mask from now on. */
     626            fLookupMask = fLookupMaskFull;
     627            GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
     628        }
     629
     630        if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 1)
     631        {
     632            uint8_t const uLvl = 1;
     633
     634            rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
     635            if (RT_SUCCESS(rc)) { /* probable */ }
     636            else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
     637
     638            Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl1Shift) & fLookupMask]);
     639            if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
     640            else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
     641
     642            if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
     643            else
     644            {
     645                if (offLvl1BlockMask != 0)
     646                {
     647                    /* Block descriptor. */
     648                    pWalk->GCPtr      = GCPtr;
     649                    pWalk->fInfo      = PGM_WALKINFO_GIGANTIC_PAGE;
     650                    pWalk->GCPhys     = (RTGCPHYS)(Desc & fGCPhysLvl1BlockBase) | (GCPtr & offLvl1BlockMask);
     651                    return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
     652                }
     653                else
     654                    return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl);
     655            }
     656
     657            /* Full lookup mask from now on. */
     658            fLookupMask = fLookupMaskFull;
     659            GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
     660        }
     661
     662        if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 2)
     663        {
     664            uint8_t const uLvl = 2;
     665
     666            rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
     667            if (RT_SUCCESS(rc)) { /* probable */ }
     668            else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
     669
     670            Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl2Shift) & fLookupMask]);
     671            if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
     672            else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
     673
     674            if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
     675            else
     676            {
     677                /* Block descriptor. */
     678                pWalk->GCPtr      = GCPtr;
     679                pWalk->fInfo      = PGM_WALKINFO_BIG_PAGE;
     680                pWalk->GCPhys     = (RTGCPHYS)(Desc & fGCPhysLvl2BlockBase) | (GCPtr & offLvl2BlockMask);
     681                return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
     682            }
     683
     684            /* Full lookup mask from now on. */
     685            fLookupMask = fLookupMaskFull;
     686            GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
     687        }
     688
     689        AssertCompile(a_InitialLookupLvl <= 3);
     690        uint8_t const uLvl = 3;
     691
     692        /* Next level. */
     693        rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
     694        if (RT_SUCCESS(rc)) { /* probable */ }
     695        else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
     696
     697        Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl3Shift) & fLookupMask]);
     698        if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
     699        else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
     700
     701        if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
     702        else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl); /* No block descriptors. */
     703
     704        pWalk->GCPtr  = GCPtr;
     705        pWalk->GCPhys = (RTGCPHYS)(Desc & fNextTableOrPageMask) | (GCPtr & offPageMask);
     706        return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
    593707    }
    594708    else
    595709        AssertReleaseFailedReturn(VERR_PGM_MODE_IPE);
    596 
    597     /* Get the initial lookup mask. */
    598     uint8_t const bEl = (fFlags & PGMQPAGE_F_USER_MODE) ? 0 : 1; /** @todo EL2 support */
    599     uint64_t fLookupMask;
    600     if RT_CONSTEXPR_IF(a_fTtbr0 == true)
    601         fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr0[bEl];
    602     else
    603         fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr1[bEl];
    604 
    605     RTGCPHYS         GCPhysPt = CPUMGetEffectiveTtbr(pVCpu, GCPtr);
    606     PARMV8VMSA64DESC paDesc   = NULL;
    607     ARMV8VMSA64DESC  Desc;
    608     int rc;
    609     if RT_CONSTEXPR_IF(a_InitialLookupLvl == 0)
    610     {
    611         Assert(cLvl0Shift != 0);
    612         uint8_t const uLvl = 0;
    613 
    614         rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
    615         if (RT_SUCCESS(rc)) { /* probable */ }
    616         else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
    617 
    618         Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl0Shift) & fLookupMask]);
    619         if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
    620         else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
    621 
    622         if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
    623         else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl); /** @todo Only supported if TCR_EL1.DS is set. */
    624 
    625         /* Full lookup mask from now on. */
    626         fLookupMask = fLookupMaskFull;
    627         GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
    628     }
    629 
    630     if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 1)
    631     {
    632         uint8_t const uLvl = 1;
    633 
    634         rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
    635         if (RT_SUCCESS(rc)) { /* probable */ }
    636         else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
    637 
    638         Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl1Shift) & fLookupMask]);
    639         if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
    640         else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
    641 
    642         if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
    643         else
    644         {
    645             if (offLvl1BlockMask != 0)
    646             {
    647                 /* Block descriptor. */
    648                 pWalk->GCPtr      = GCPtr;
    649                 pWalk->fInfo      = PGM_WALKINFO_GIGANTIC_PAGE;
    650                 pWalk->GCPhys     = (RTGCPHYS)(Desc & fGCPhysLvl1BlockBase) | (GCPtr & offLvl1BlockMask);
    651                 return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
    652             }
    653             else
    654                 return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl);
    655         }
    656 
    657         /* Full lookup mask from now on. */
    658         fLookupMask = fLookupMaskFull;
    659         GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
    660     }
    661 
    662     if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 2)
    663     {
    664         uint8_t const uLvl = 2;
    665 
    666         rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
    667         if (RT_SUCCESS(rc)) { /* probable */ }
    668         else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
    669 
    670         Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl2Shift) & fLookupMask]);
    671         if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
    672         else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
    673 
    674         if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
    675         else
    676         {
    677             /* Block descriptor. */
    678             pWalk->GCPtr      = GCPtr;
    679             pWalk->fInfo      = PGM_WALKINFO_BIG_PAGE;
    680             pWalk->GCPhys     = (RTGCPHYS)(Desc & fGCPhysLvl2BlockBase) | (GCPtr & offLvl2BlockMask);
    681             return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
    682         }
    683 
    684         /* Full lookup mask from now on. */
    685         fLookupMask = fLookupMaskFull;
    686         GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
    687     }
    688 
    689     AssertCompile(a_InitialLookupLvl <= 3);
    690     uint8_t const uLvl = 3;
    691 
    692     /* Next level. */
    693     rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
    694     if (RT_SUCCESS(rc)) { /* probable */ }
    695     else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
    696 
    697     Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl3Shift) & fLookupMask]);
    698     if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
    699     else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
    700 
    701     if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
    702     else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl); /* No block descriptors. */
    703 
    704     pWalk->GCPtr  = GCPtr;
    705     pWalk->GCPhys = (RTGCPHYS)(Desc & fNextTableOrPageMask) | (GCPtr & offPageMask);
    706     return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
    707710}
    708711
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette