VirtualBox

Ignore:
Timestamp:
Jun 15, 2024 11:10:19 AM (6 months ago)
Author:
vboxsync
Message:

VMM/PGM: Elimintate the GstWalk local from PGM_GST_NAME(WalkFast). bugref:10687

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/PGMAllGst.h

    r104938 r104939  
    609609# endif
    610610
    611     GSTPTWALK GstWalk = {0};
     611# if PGM_GST_TYPE == PGM_TYPE_AMD64 || PGM_GST_TYPE == PGM_TYPE_PAE
     612    PX86PDPT  pPdpt;
     613# endif
    612614    uint64_t fEffective;
    613615    {
     
    616618         * The PML4 table.
    617619         */
    618         rc = pgmGstGetLongModePML4PtrEx(pVCpu, &GstWalk.pPml4);
     620        PX86PML4 pPml4;
     621        rc = pgmGstGetLongModePML4PtrEx(pVCpu, &pPml4);
    619622        if (RT_SUCCESS(rc)) { /* probable */ }
    620623        else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 4, rc);
    621624
    622625        PX86PML4E pPml4e;
    623         GstWalk.pPml4e  = pPml4e  = &GstWalk.pPml4->a[(GCPtr >> X86_PML4_SHIFT) & X86_PML4_MASK];
     626        pPml4e  = &pPml4->a[(GCPtr >> X86_PML4_SHIFT) & X86_PML4_MASK];
    624627        X86PML4E  Pml4e;
    625         GstWalk.Pml4e.u = Pml4e.u = ASMAtomicUoReadU64(&pPml4e->u);
     628        Pml4e.u = ASMAtomicUoReadU64(&pPml4e->u);
    626629
    627630        if (GST_IS_PGENTRY_PRESENT(pVCpu, Pml4e)) { /* probable */ }
     
    641644        RTGCPHYS GCPhysPdpt = Pml4e.u & X86_PML4E_PG_MASK;
    642645        PGM_GST_SLAT_WALK_FAST(pVCpu, GCPtr, GCPhysPdpt, false /*a_fFinal*/, GCPhysPdpt, pWalk);
    643         rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPdpt, (void **)&GstWalk.pPdpt);
     646        rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPdpt, (void **)&pPdpt);
    644647        if (RT_SUCCESS(rc)) { /* probable */ }
    645648        else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 3, rc);
    646649
    647650# elif PGM_GST_TYPE == PGM_TYPE_PAE
    648         rc = pgmGstGetPaePDPTPtrEx(pVCpu, &GstWalk.pPdpt);
     651        rc = pgmGstGetPaePDPTPtrEx(pVCpu, &pPdpt);
    649652        if (RT_SUCCESS(rc)) { /* probable */ }
    650653        else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 8, rc);
    651654# endif
    652655    }
     656    PGSTPD pPd;
    653657    {
    654658# if PGM_GST_TYPE == PGM_TYPE_AMD64 || PGM_GST_TYPE == PGM_TYPE_PAE
    655659        PX86PDPE pPdpe;
    656         GstWalk.pPdpe  = pPdpe  = &GstWalk.pPdpt->a[(GCPtr >> GST_PDPT_SHIFT) & GST_PDPT_MASK];
     660        pPdpe  = &pPdpt->a[(GCPtr >> GST_PDPT_SHIFT) & GST_PDPT_MASK];
    657661        X86PDPE  Pdpe;
    658         GstWalk.Pdpe.u = Pdpe.u = ASMAtomicUoReadU64(&pPdpe->u);
     662        Pdpe.u = ASMAtomicUoReadU64(&pPdpe->u);
    659663
    660664        if (GST_IS_PGENTRY_PRESENT(pVCpu, Pdpe)) { /* probable */ }
     
    685689        RTGCPHYS GCPhysPd = Pdpe.u & X86_PDPE_PG_MASK;
    686690        PGM_GST_SLAT_WALK_FAST(pVCpu, GCPtr, GCPhysPd, false /*a_fFinal*/, GCPhysPd, pWalk);
    687         rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPd, (void **)&GstWalk.pPd);
     691        rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPd, (void **)&pPd);
    688692        if (RT_SUCCESS(rc)) { /* probable */ }
    689693        else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 2, rc);
    690694
    691695# elif PGM_GST_TYPE == PGM_TYPE_32BIT
    692         rc = pgmGstGet32bitPDPtrEx(pVCpu, &GstWalk.pPd);
     696        rc = pgmGstGet32bitPDPtrEx(pVCpu, &pPd);
    693697        if (RT_SUCCESS(rc)) { /* probable */ }
    694698        else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 8, rc);
    695699# endif
    696700    }
     701    PGSTPT pPt;
    697702    {
    698703        PGSTPDE pPde;
    699         GstWalk.pPde  = pPde  = &GstWalk.pPd->a[(GCPtr >> GST_PD_SHIFT) & GST_PD_MASK];
     704        pPde  = &pPd->a[(GCPtr >> GST_PD_SHIFT) & GST_PD_MASK];
    700705        GSTPDE  Pde;
    701706# if PGM_GST_TYPE != PGM_TYPE_32BIT
    702         GstWalk.Pde.u = Pde.u = ASMAtomicUoReadU64(&pPde->u);
     707        Pde.u = ASMAtomicUoReadU64(&pPde->u);
    703708# else
    704         GstWalk.Pde.u = Pde.u = ASMAtomicUoReadU32(&pPde->u);
     709        Pde.u = ASMAtomicUoReadU32(&pPde->u);
    705710# endif
    706711        if (GST_IS_PGENTRY_PRESENT(pVCpu, Pde)) { /* probable */ }
     
    789794        RTGCPHYS GCPhysPt = GST_GET_PDE_GCPHYS(Pde);
    790795        PGM_GST_SLAT_WALK_FAST(pVCpu, GCPtr, GCPhysPt, false /*a_fFinal*/, GCPhysPt, pWalk);
    791         rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&GstWalk.pPt);
     796        rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&pPt);
    792797        if (RT_SUCCESS(rc)) { /* probable */ }
    793798        else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 1, rc);
     
    795800    {
    796801        PGSTPTE pPte;
    797         GstWalk.pPte  = pPte  = &GstWalk.pPt->a[(GCPtr >> GST_PT_SHIFT) & GST_PT_MASK];
     802        pPte  = &pPt->a[(GCPtr >> GST_PT_SHIFT) & GST_PT_MASK];
    798803        GSTPTE  Pte;
    799804# if PGM_GST_TYPE != PGM_TYPE_32BIT
    800         GstWalk.Pte.u = Pte.u = ASMAtomicUoReadU64(&pPte->u);
     805        Pte.u = ASMAtomicUoReadU64(&pPte->u);
    801806# else
    802         GstWalk.Pte.u = Pte.u = ASMAtomicUoReadU32(&pPte->u);
     807        Pte.u = ASMAtomicUoReadU32(&pPte->u);
    803808# endif
    804809
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette