Changeset 54201 in vbox for trunk/src/VBox/VMM/VMMR0
- Timestamp:
- Feb 13, 2015 5:13:28 PM (10 years ago)
- svn:sync-xref-src-repo-rev:
- 98263
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMR0/HMR0.cpp
r54153 r54201 799 799 NOREF(idCpu); NOREF(pvUser2); 800 800 801 uint64_t u64FeatMsr = ASMRdMsr(MSR_IA32_FEATURE_CONTROL); 802 bool const fMaybeSmxMode = RT_BOOL(ASMGetCR4() & X86_CR4_SMXE); 803 bool fMsrLocked = RT_BOOL(u64FeatMsr & MSR_IA32_FEATURE_CONTROL_LOCK); 804 bool fSmxVmxAllowed = RT_BOOL(u64FeatMsr & MSR_IA32_FEATURE_CONTROL_SMX_VMXON); 805 bool fVmxAllowed = RT_BOOL(u64FeatMsr & MSR_IA32_FEATURE_CONTROL_VMXON); 806 807 /* Check if the LOCK bit is set but excludes the required VMXON bit. */ 808 int rc = VERR_HM_IPE_1; 809 if (fMsrLocked) 810 { 811 if (fVmxAllowed && fSmxVmxAllowed) 812 rc = VINF_SUCCESS; 813 else if (!fVmxAllowed && !fSmxVmxAllowed) 814 rc = VERR_VMX_MSR_ALL_VMXON_DISABLED; 815 else if (!fMaybeSmxMode) 816 { 817 if (fVmxAllowed) 818 rc = VINF_SUCCESS; 819 else 820 rc = VERR_VMX_MSR_VMXON_DISABLED; 821 } 822 else 823 { 824 /* 825 * CR4.SMXE is set but this doesn't mean the CPU is necessarily in SMX mode. We shall assume 826 * that it is -not- and that it is a stupid BIOS/OS setting CR4.SMXE for no good reason. 827 * See @bugref{6873}. 828 */ 829 Assert(fMaybeSmxMode == true); 830 rc = VINF_SUCCESS; 831 } 832 } 833 else 834 { 835 /* 836 * MSR is not yet locked; we can change it ourselves here. 837 * Once the lock bit is set, this MSR can no longer be modified. 838 * 839 * Set both the VMXON and SMX_VMXON bits as we can't determine SMX mode 840 * accurately. See @bugref{6873}. 841 */ 842 u64FeatMsr |= MSR_IA32_FEATURE_CONTROL_LOCK 843 | MSR_IA32_FEATURE_CONTROL_SMX_VMXON 844 | MSR_IA32_FEATURE_CONTROL_VMXON; 845 ASMWrMsr(MSR_IA32_FEATURE_CONTROL, u64FeatMsr); 846 847 /* Verify. */ 848 u64FeatMsr = ASMRdMsr(MSR_IA32_FEATURE_CONTROL); 849 fMsrLocked = RT_BOOL(u64FeatMsr & MSR_IA32_FEATURE_CONTROL_LOCK); 850 fSmxVmxAllowed = fMsrLocked && RT_BOOL(u64FeatMsr & MSR_IA32_FEATURE_CONTROL_SMX_VMXON); 851 fVmxAllowed = fMsrLocked && RT_BOOL(u64FeatMsr & MSR_IA32_FEATURE_CONTROL_VMXON); 852 if (fSmxVmxAllowed && fVmxAllowed) 853 rc = VINF_SUCCESS; 854 else 855 rc = VERR_VMX_MSR_LOCKING_FAILED; 856 } 857 801 int rc = SUPR0GetVmxUsability(NULL /* pfIsSmxModeAmbiguous */); 858 802 hmR0FirstRcSetStatus(pFirstRc, rc); 859 803 } … … 875 819 NOREF(idCpu); NOREF(pvUser2); 876 820 877 /* Check if SVM is disabled. */ 878 int rc; 879 uint64_t fVmCr = ASMRdMsr(MSR_K8_VM_CR); 880 if (!(fVmCr & MSR_K8_VM_CR_SVM_DISABLE)) 881 { 882 /* Turn on SVM in the EFER MSR. */ 883 uint64_t fEfer = ASMRdMsr(MSR_K6_EFER); 884 if (fEfer & MSR_K6_EFER_SVME) 885 rc = VERR_SVM_IN_USE; 886 else 887 { 888 ASMWrMsr(MSR_K6_EFER, fEfer | MSR_K6_EFER_SVME); 889 890 /* Paranoia. */ 891 fEfer = ASMRdMsr(MSR_K6_EFER); 892 if (fEfer & MSR_K6_EFER_SVME) 893 { 894 /* Restore previous value. */ 895 ASMWrMsr(MSR_K6_EFER, fEfer & ~MSR_K6_EFER_SVME); 896 rc = VINF_SUCCESS; 897 } 898 else 899 rc = VERR_SVM_ILLEGAL_EFER_MSR; 900 } 901 } 902 else 903 rc = VERR_SVM_DISABLED; 904 821 int rc = SUPR0GetSvmUsability(true /* fInitSvm */); 905 822 hmR0FirstRcSetStatus(pFirstRc, rc); 906 823 }
Note:
See TracChangeset
for help on using the changeset viewer.