Changeset 91580 in vbox for trunk/src/VBox/VMM/VMMAll/PGMAllBth.h
- Timestamp:
- Oct 6, 2021 7:22:04 AM (3 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/PGMAllBth.h
r91271 r91580 53 53 PGM_BTH_DECL(unsigned, AssertCR3)(PVMCPUCC pVCpu, uint64_t cr3, uint64_t cr4, RTGCPTR GCPtr = 0, RTGCPTR cb = ~(RTGCPTR)0); 54 54 #endif 55 PGM_BTH_DECL(int, MapCR3)(PVMCPUCC pVCpu, RTGCPHYS GCPhysCR3 );55 PGM_BTH_DECL(int, MapCR3)(PVMCPUCC pVCpu, RTGCPHYS GCPhysCR3, bool fPdpesMapped); 56 56 PGM_BTH_DECL(int, UnmapCR3)(PVMCPUCC pVCpu); 57 57 … … 4295 4295 * 4296 4296 * @param pVCpu The cross context virtual CPU structure. 4297 * @param GCPhysCR3 The physical address in the CR3 register. (A20 4298 * mask already applied.) 4297 * @param GCPhysCR3 The physical address in the CR3 register. (A20 mask 4298 * already applied.) 4299 * @param fPdpesMapped Whether the PAE PDPEs (and PDPT) have been mapped. 4299 4300 */ 4300 PGM_BTH_DECL(int, MapCR3)(PVMCPUCC pVCpu, RTGCPHYS GCPhysCR3 )4301 PGM_BTH_DECL(int, MapCR3)(PVMCPUCC pVCpu, RTGCPHYS GCPhysCR3, bool fPdpesMapped) 4301 4302 { 4302 4303 PVMCC pVM = pVCpu->CTX_SUFF(pVM); NOREF(pVM); 4304 int rc = VINF_SUCCESS; 4303 4305 4304 4306 /* Update guest paging info. */ … … 4310 4312 PGM_A20_ASSERT_MASKED(pVCpu, GCPhysCR3); 4311 4313 4312 /* 4313 * Map the page CR3 points at. 4314 */ 4315 RTHCPTR HCPtrGuestCR3; 4316 PGM_LOCK_VOID(pVM); 4317 PPGMPAGE pPageCR3 = pgmPhysGetPage(pVM, GCPhysCR3); 4318 AssertReturn(pPageCR3, VERR_PGM_INVALID_CR3_ADDR); 4319 /** @todo this needs some reworking wrt. locking? */ 4320 int rc = pgmPhysGCPhys2CCPtrInternalDepr(pVM, pPageCR3, GCPhysCR3 & GST_CR3_PAGE_MASK, (void **)&HCPtrGuestCR3); /** @todo r=bird: This GCPhysCR3 masking isn't necessary. */ 4321 PGM_UNLOCK(pVM); 4322 if (RT_SUCCESS(rc)) 4323 { 4314 # if PGM_GST_TYPE == PGM_TYPE_PAE 4315 if (!fPdpesMapped) 4316 # else 4317 NOREF(fPdpesMapped); 4318 #endif 4319 { 4320 /* 4321 * Map the page CR3 points at. 4322 */ 4323 RTHCPTR HCPtrGuestCR3; 4324 PGM_LOCK_VOID(pVM); 4325 PPGMPAGE pPageCR3 = pgmPhysGetPage(pVM, GCPhysCR3); 4326 AssertReturnStmt(pPageCR3, PGM_UNLOCK(pVM), VERR_PGM_INVALID_CR3_ADDR); 4327 /** @todo this needs some reworking wrt. locking? */ 4328 rc = pgmPhysGCPhys2CCPtrInternalDepr(pVM, pPageCR3, GCPhysCR3 & GST_CR3_PAGE_MASK, (void **)&HCPtrGuestCR3); /** @todo r=bird: This GCPhysCR3 masking isn't necessary. */ 4329 PGM_UNLOCK(pVM); 4330 if (RT_SUCCESS(rc)) 4331 { 4324 4332 # if PGM_GST_TYPE == PGM_TYPE_32BIT 4325 4333 # ifdef IN_RING3 4326 pVCpu->pgm.s.pGst32BitPdR3 = (PX86PD)HCPtrGuestCR3;4327 pVCpu->pgm.s.pGst32BitPdR0 = NIL_RTR0PTR;4334 pVCpu->pgm.s.pGst32BitPdR3 = (PX86PD)HCPtrGuestCR3; 4335 pVCpu->pgm.s.pGst32BitPdR0 = NIL_RTR0PTR; 4328 4336 # else 4329 pVCpu->pgm.s.pGst32BitPdR3 = NIL_RTR3PTR;4330 pVCpu->pgm.s.pGst32BitPdR0 = (PX86PD)HCPtrGuestCR3;4337 pVCpu->pgm.s.pGst32BitPdR3 = NIL_RTR3PTR; 4338 pVCpu->pgm.s.pGst32BitPdR0 = (PX86PD)HCPtrGuestCR3; 4331 4339 # endif 4332 4340 4333 4341 # elif PGM_GST_TYPE == PGM_TYPE_PAE 4334 4342 # ifdef IN_RING3 4335 pVCpu->pgm.s.pGstPaePdptR3 = (PX86PDPT)HCPtrGuestCR3;4336 pVCpu->pgm.s.pGstPaePdptR0 = NIL_RTR0PTR;4343 pVCpu->pgm.s.pGstPaePdptR3 = (PX86PDPT)HCPtrGuestCR3; 4344 pVCpu->pgm.s.pGstPaePdptR0 = NIL_RTR0PTR; 4337 4345 # else 4338 pVCpu->pgm.s.pGstPaePdptR3 = NIL_RTR3PTR; 4339 pVCpu->pgm.s.pGstPaePdptR0 = (PX86PDPT)HCPtrGuestCR3; 4340 # endif 4341 4342 /* 4343 * Map the 4 PDs too. 4344 */ 4345 X86PDPE aGstPaePdpes[X86_PG_PAE_PDPE_ENTRIES]; 4346 memcpy(&aGstPaePdpes, HCPtrGuestCR3, sizeof(aGstPaePdpes)); 4347 CPUMSetGuestPaePdpes(pVCpu, &aGstPaePdpes[0]); 4348 for (unsigned i = 0; i < X86_PG_PAE_PDPE_ENTRIES; i++) 4349 { 4350 X86PDPE PaePdpe = aGstPaePdpes[i]; 4351 if (PaePdpe.u & X86_PDPE_P) 4352 { 4353 RTHCPTR HCPtr; 4354 RTGCPHYS GCPhys = PGM_A20_APPLY(pVCpu, PaePdpe.u & X86_PDPE_PG_MASK); 4355 PGM_LOCK_VOID(pVM); 4356 PPGMPAGE pPage = pgmPhysGetPage(pVM, GCPhys); 4357 AssertReturn(pPage, VERR_PGM_INVALID_PDPE_ADDR); 4358 int rc2 = pgmPhysGCPhys2CCPtrInternalDepr(pVM, pPage, GCPhys, (void **)&HCPtr); 4359 PGM_UNLOCK(pVM); 4360 if (RT_SUCCESS(rc2)) 4361 { 4362 # ifdef IN_RING3 4363 pVCpu->pgm.s.apGstPaePDsR3[i] = (PX86PDPAE)HCPtr; 4364 pVCpu->pgm.s.apGstPaePDsR0[i] = NIL_RTR0PTR; 4365 # else 4366 pVCpu->pgm.s.apGstPaePDsR3[i] = NIL_RTR3PTR; 4367 pVCpu->pgm.s.apGstPaePDsR0[i] = (PX86PDPAE)HCPtr; 4368 # endif 4369 pVCpu->pgm.s.aGCPhysGstPaePDs[i] = GCPhys; 4370 continue; 4371 } 4372 AssertMsgFailed(("pgmR3Gst32BitMapCR3: rc2=%d GCPhys=%RGp i=%d\n", rc2, GCPhys, i)); 4373 } 4374 4375 pVCpu->pgm.s.apGstPaePDsR3[i] = 0; 4376 pVCpu->pgm.s.apGstPaePDsR0[i] = 0; 4377 pVCpu->pgm.s.aGCPhysGstPaePDs[i] = NIL_RTGCPHYS; 4378 } 4346 pVCpu->pgm.s.pGstPaePdptR3 = NIL_RTR3PTR; 4347 pVCpu->pgm.s.pGstPaePdptR0 = (PX86PDPT)HCPtrGuestCR3; 4348 # endif 4349 4350 /* 4351 * Update CPUM and map the 4 PDs too. 4352 */ 4353 X86PDPE aGstPaePdpes[X86_PG_PAE_PDPE_ENTRIES]; 4354 memcpy(&aGstPaePdpes, HCPtrGuestCR3, sizeof(aGstPaePdpes)); 4355 CPUMSetGuestPaePdpes(pVCpu, &aGstPaePdpes[0]); 4356 PGMGstMapPaePdpes(pVCpu, &aGstPaePdpes[0]); 4379 4357 4380 4358 # elif PGM_GST_TYPE == PGM_TYPE_AMD64 4381 4359 # ifdef IN_RING3 4382 pVCpu->pgm.s.pGstAmd64Pml4R3 = (PX86PML4)HCPtrGuestCR3;4383 pVCpu->pgm.s.pGstAmd64Pml4R0 = NIL_RTR0PTR;4360 pVCpu->pgm.s.pGstAmd64Pml4R3 = (PX86PML4)HCPtrGuestCR3; 4361 pVCpu->pgm.s.pGstAmd64Pml4R0 = NIL_RTR0PTR; 4384 4362 # else 4385 pVCpu->pgm.s.pGstAmd64Pml4R3 = NIL_RTR3PTR;4386 pVCpu->pgm.s.pGstAmd64Pml4R0 = (PX86PML4)HCPtrGuestCR3;4387 # endif 4388 # endif 4389 }4390 else4391 AssertMsgFailed(("rc=%Rrc GCPhysGuestPD=%RGp\n", rc, GCPhysCR3));4392 4363 pVCpu->pgm.s.pGstAmd64Pml4R3 = NIL_RTR3PTR; 4364 pVCpu->pgm.s.pGstAmd64Pml4R0 = (PX86PML4)HCPtrGuestCR3; 4365 # endif 4366 # endif 4367 } 4368 else 4369 AssertMsgFailed(("rc=%Rrc GCPhysGuestPD=%RGp\n", rc, GCPhysCR3)); 4370 } 4393 4371 #else /* prot/real stub */ 4394 int rc = VINF_SUCCESS;4372 NOREF(fPdpesMapped); 4395 4373 #endif 4396 4374 … … 4422 4400 4423 4401 Assert(!(GCPhysCR3 >> (PAGE_SHIFT + 32))); 4424 rc = pgmPoolAlloc(pVM, GCPhysCR3 & GST_CR3_PAGE_MASK, BTH_PGMPOOLKIND_ROOT, PGMPOOLACCESS_DONTCARE, PGM_A20_IS_ENABLED(pVCpu), 4425 NIL_PGMPOOL_IDX, UINT32_MAX, true /*fLockPage*/, 4426 &pNewShwPageCR3); 4427 AssertFatalRC(rc); 4428 rc = VINF_SUCCESS; 4402 int const rc2 = pgmPoolAlloc(pVM, GCPhysCR3 & GST_CR3_PAGE_MASK, BTH_PGMPOOLKIND_ROOT, PGMPOOLACCESS_DONTCARE, 4403 PGM_A20_IS_ENABLED(pVCpu), NIL_PGMPOOL_IDX, UINT32_MAX, true /*fLockPage*/, &pNewShwPageCR3); 4404 AssertFatalRC(rc2); 4429 4405 4430 4406 pVCpu->pgm.s.CTX_SUFF(pShwPageCR3) = pNewShwPageCR3; … … 4444 4420 Assert(VMCPU_FF_IS_SET(pVCpu, VMCPU_FF_PGM_SYNC_CR3_NON_GLOBAL) || VMCPU_FF_IS_SET(pVCpu, VMCPU_FF_PGM_SYNC_CR3)); 4445 4421 # endif 4446 rc= pgmMapActivateCR3(pVM, pNewShwPageCR3);4447 AssertRCReturn(rc , rc);4422 int const rc3 = pgmMapActivateCR3(pVM, pNewShwPageCR3); 4423 AssertRCReturn(rc3, rc3); 4448 4424 # endif 4449 4425
Note:
See TracChangeset
for help on using the changeset viewer.