Changeset 65280 in vbox for trunk/src/VBox/VMM
- Timestamp:
- Jan 13, 2017 12:19:22 PM (8 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMR0/HMVMXR0.cpp
r65276 r65280 1573 1573 Assert(!VMMRZCallRing3IsEnabled(pVCpu)); 1574 1574 1575 #define VMXLOCAL_LAZY_LOAD_GUEST_MSR(uMsr, a_GuestMsr, a_HostMsr) \1576 do { \1577 if (pMixedCtx->msr##a_GuestMsr != pVCpu->hm.s.vmx.u64Host##a_HostMsr##Msr) \1578 ASMWrMsr(uMsr, pMixedCtx->msr##a_GuestMsr); \1579 else \1580 Assert(ASMRdMsr(uMsr) == pVCpu->hm.s.vmx.u64Host##a_HostMsr##Msr); \1581 } while (0)1582 1583 1575 Assert(pVCpu->hm.s.vmx.fLazyMsrs & VMX_LAZY_MSRS_SAVED_HOST); 1584 if (!(pVCpu->hm.s.vmx.fLazyMsrs & VMX_LAZY_MSRS_LOADED_GUEST))1585 {1586 1576 #if HC_ARCH_BITS == 64 1587 if (pVCpu->CTX_SUFF(pVM)->hm.s.fAllow64BitGuests) 1588 { 1589 VMXLOCAL_LAZY_LOAD_GUEST_MSR(MSR_K8_LSTAR, LSTAR, LStar); 1590 VMXLOCAL_LAZY_LOAD_GUEST_MSR(MSR_K6_STAR, STAR, Star); 1591 VMXLOCAL_LAZY_LOAD_GUEST_MSR(MSR_K8_SF_MASK, SFMASK, SFMask); 1592 VMXLOCAL_LAZY_LOAD_GUEST_MSR(MSR_K8_KERNEL_GS_BASE, KERNELGSBASE, KernelGSBase); 1593 } 1577 if (pVCpu->CTX_SUFF(pVM)->hm.s.fAllow64BitGuests) 1578 { 1579 /* 1580 * If the guest MSRs are not loaded -and- if all the guest MSRs are identical 1581 * to the MSRs on the CPU (which are the saved host MSRs, see assertion above) then 1582 * we can skip a few MSR writes. 1583 * 1584 * Otherwise, it implies either 1. they're not loaded, or 2. they're loaded but the 1585 * guest MSR values in the guest-CPU context might be different to what's currently 1586 * loaded in the CPU. In either case, we need to write the new guest MSR values to the 1587 * CPU, see @bugref{8728}. 1588 */ 1589 if ( !(pVCpu->hm.s.vmx.fLazyMsrs & VMX_LAZY_MSRS_LOADED_GUEST) 1590 && pMixedCtx->msrKERNELGSBASE == pVCpu->hm.s.vmx.u64HostKernelGSBaseMsr 1591 && pMixedCtx->msrLSTAR == pVCpu->hm.s.vmx.u64HostLStarMsr 1592 && pMixedCtx->msrSTAR == pVCpu->hm.s.vmx.u64HostStarMsr 1593 && pMixedCtx->msrSFMASK == pVCpu->hm.s.vmx.u64HostSFMaskMsr) 1594 { 1595 #ifdef VBOX_STRICT 1596 Assert(ASMRdMsr(MSR_K8_KERNEL_GS_BASE) == pMixedCtx->msrKERNELGSBASE); 1597 Assert(ASMRdMsr(MSR_K8_LSTAR) == pMixedCtx->msrLSTAR); 1598 Assert(ASMRdMsr(MSR_K6_STAR) == pMixedCtx->msrSTAR); 1599 Assert(ASMRdMsr(MSR_K8_SF_MASK) == pMixedCtx->msrSFMASK); 1600 #endif 1601 } 1602 else 1603 { 1604 ASMWrMsr(MSR_K8_KERNEL_GS_BASE, pMixedCtx->msrKERNELGSBASE); 1605 ASMWrMsr(MSR_K8_LSTAR, pMixedCtx->msrLSTAR); 1606 ASMWrMsr(MSR_K6_STAR, pMixedCtx->msrSTAR); 1607 ASMWrMsr(MSR_K8_SF_MASK, pMixedCtx->msrSFMASK); 1608 } 1609 } 1594 1610 #else 1595 1611 RT_NOREF(pMixedCtx); 1596 1612 #endif 1597 pVCpu->hm.s.vmx.fLazyMsrs |= VMX_LAZY_MSRS_LOADED_GUEST; 1598 } 1599 1600 #undef VMXLOCAL_LAZY_LOAD_GUEST_MSR 1613 pVCpu->hm.s.vmx.fLazyMsrs |= VMX_LAZY_MSRS_LOADED_GUEST; 1601 1614 } 1602 1615
Note:
See TracChangeset
for help on using the changeset viewer.