Changeset 98150 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- Jan 20, 2023 6:46:21 AM (2 years ago)
- svn:sync-xref-src-repo-rev:
- 155323
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllCImpl.cpp
r98103 r98150 5927 5927 } 5928 5928 5929 #ifdef VBOX_WITH_NESTED_HWVIRT_VMX 5929 5930 /* Check for bits that must remain set or cleared in VMX operation, 5930 5931 see Intel spec. 23.8 "Restrictions on VMX operation". */ 5931 5932 if (IEM_VMX_IS_ROOT_MODE(pVCpu)) 5932 5933 { 5933 #ifdef VBOX_WITH_NESTED_HWVIRT_VMX 5934 uint64_t const uCr0Fixed0 = IEM_VMX_IS_NON_ROOT_MODE(pVCpu) ? iemVmxGetCr0Fixed0(pVCpu) : VMX_V_CR0_FIXED0; 5935 #else 5936 uint64_t const uCr0Fixed0 = VMX_V_CR0_FIXED0; 5937 #endif 5934 uint64_t const uCr0Fixed0 = iemVmxGetCr0Fixed0(pVCpu, IEM_VMX_IS_NON_ROOT_MODE(pVCpu)); 5938 5935 if ((uNewCrX & uCr0Fixed0) != uCr0Fixed0) 5939 5936 { … … 5949 5946 } 5950 5947 } 5948 #endif 5951 5949 5952 5950 /* -
trunk/src/VBox/VMM/VMMAll/IEMAllCImplVmxInstr.cpp
r98103 r98150 1740 1740 { 1741 1741 /* Bits 63:32, 28:19, 17, 15:6, ET, CD, NW and CR0 fixed bits are not modified. */ 1742 uint64_t const uCr0Mb1 = iemVmxGetCr0Fixed0(pVCpu); 1743 uint64_t const uCr0Mb0 = VMX_V_CR0_FIXED1; 1744 uint64_t const fCr0IgnMask = VMX_EXIT_HOST_CR0_IGNORE_MASK | uCr0Mb1 | ~uCr0Mb0; 1742 uint64_t const fCr0IgnMask = VMX_EXIT_HOST_CR0_IGNORE_MASK; 1745 1743 uint64_t const uHostCr0 = pVmcs->u64HostCr0.u; 1746 1744 uint64_t const uGuestCr0 = pVCpu->cpum.GstCtx.cr0; 1747 1745 uint64_t const uValidHostCr0 = (uHostCr0 & ~fCr0IgnMask) | (uGuestCr0 & fCr0IgnMask); 1748 1746 1749 /* Verify we have not modified CR0 fixed bits in VMX non-root operation. */ 1750 Assert((uGuestCr0 & uCr0Mb1) == uCr0Mb1); 1751 Assert((uGuestCr0 & ~uCr0Mb0) == 0); 1747 /* Verify we have not modified CR0 fixed bits in VMX operation. */ 1748 #ifdef VBOX_STRICT 1749 uint64_t const uCr0Mb1 = iemVmxGetCr0Fixed0(pVCpu, true /* fVmxNonRootMode */); 1750 bool const fUx = RT_BOOL(pVmcs->u32ProcCtls2 & VMX_PROC_CTLS2_UNRESTRICTED_GUEST); 1751 AssertMsg( (uValidHostCr0 & uCr0Mb1) == uCr0Mb1 1752 && (uValidHostCr0 & ~VMX_V_CR0_FIXED1) == 0, 1753 ("host=%#RX64 guest=%#RX64 mb1=%#RX64 valid_host_cr0=%#RX64 fUx=%RTbool\n", 1754 uHostCr0, uGuestCr0, uCr0Mb1, uValidHostCr0, fUx)); 1755 #endif 1756 Assert(!(uValidHostCr0 >> 32)); 1752 1757 CPUMSetGuestCR0(pVCpu, uValidHostCr0); 1753 1758 } … … 1758 1763 uint64_t const uCr4Mb1 = pVCpu->cpum.GstCtx.hwvirt.vmx.Msrs.u64Cr4Fixed0; 1759 1764 uint64_t const uCr4Mb0 = pVCpu->cpum.GstCtx.hwvirt.vmx.Msrs.u64Cr4Fixed1; 1760 uint64_t const fCr4IgnMask = uCr4Mb1 | ~uCr4Mb0;1761 1765 uint64_t const uHostCr4 = pVmcs->u64HostCr4.u; 1762 uint64_t const uGuestCr4 = pVCpu->cpum.GstCtx.cr4; 1763 uint64_t uValidHostCr4 = (uHostCr4 & ~fCr4IgnMask) | (uGuestCr4 & fCr4IgnMask); 1766 uint64_t uValidHostCr4 = (uHostCr4 & uCr4Mb0) | uCr4Mb1; 1764 1767 if (fHostInLongMode) 1765 1768 uValidHostCr4 |= X86_CR4_PAE; … … 1768 1771 1769 1772 /* Verify we have not modified CR4 fixed bits in VMX non-root operation. */ 1770 Assert((uGuestCr4 & uCr4Mb1) == uCr4Mb1); 1771 Assert((uGuestCr4 & ~uCr4Mb0) == 0); 1773 AssertMsg( (uValidHostCr4 & uCr4Mb1) == uCr4Mb1 1774 && (uValidHostCr4 & ~uCr4Mb0) == 0, 1775 ("host=%#RX64 guest=%#RX64, uCr4Mb1=%#RX64 uCr4Mb0=%#RX64 valid_host_cr4=%#RX64\n", 1776 uHostCr4, pVCpu->cpum.GstCtx.cr4, uCr4Mb1, uCr4Mb0, uValidHostCr4)); 1772 1777 CPUMSetGuestCR4(pVCpu, uValidHostCr4); 1773 1778 } … … 5135 5140 { 5136 5141 /* CR0 MB1 bits. */ 5137 uint64_t const u64Cr0Fixed0 = iemVmxGetCr0Fixed0(pVCpu );5142 uint64_t const u64Cr0Fixed0 = iemVmxGetCr0Fixed0(pVCpu, true /* fVmxNonRootMode */); 5138 5143 if ((pVmcs->u64GuestCr0.u & u64Cr0Fixed0) == u64Cr0Fixed0) 5139 5144 { /* likely */ } … … 6079 6084 { 6080 6085 /* CR0 MB1 bits. */ 6081 uint64_t const u64Cr0Fixed0 = iemVmxGetCr0Fixed0(pVCpu );6086 uint64_t const u64Cr0Fixed0 = iemVmxGetCr0Fixed0(pVCpu, true /* fVmxNonRootMode */); 6082 6087 if ((pVmcs->u64HostCr0.u & u64Cr0Fixed0) == u64Cr0Fixed0) 6083 6088 { /* likely */ } -
trunk/src/VBox/VMM/VMMAll/VMXAllTemplate.cpp.h
r98103 r98150 734 734 * and @bugref{6944}. */ 735 735 PCVMCC pVM = pVCpu->CTX_SUFF(pVM); 736 AssertCompile(RT_HI_U32(VMX_EXIT_HOST_CR0_IGNORE_MASK) == UINT32_C(0xffffffff)); /* Paranoia. */ 736 737 return ( X86_CR0_PE 737 738 | X86_CR0_NE … … 2139 2140 u64GuestCr0 &= ~(uint64_t)(X86_CR0_CD | X86_CR0_NW); 2140 2141 2142 Assert(!RT_HI_U32(u64GuestCr0)); 2143 Assert(u64GuestCr0 & X86_CR0_NE); 2144 2141 2145 /* Commit the CR0 and related fields to the guest VMCS. */ 2142 2146 int rc = VMX_VMCS_WRITE_NW(pVCpu, VMX_VMCS_GUEST_CR0, u64GuestCr0); AssertRC(rc); … … 2171 2175 uint64_t u64GuestCr0 = pVCpu->cpum.GstCtx.cr0; 2172 2176 uint64_t const u64ShadowCr0 = CPUMGetGuestVmxMaskedCr0(&pVCpu->cpum.GstCtx, pVmcsInfo->u64Cr0Mask); 2173 Assert(!RT_HI_U32(u64GuestCr0));2174 Assert(u64GuestCr0 & X86_CR0_NE);2175 2177 2176 2178 /* Apply the hardware specified CR0 fixed bits and enable caching. */ … … 2179 2181 u64GuestCr0 &= ~(uint64_t)(X86_CR0_CD | X86_CR0_NW); 2180 2182 2183 Assert(!RT_HI_U32(u64GuestCr0)); 2184 Assert(u64GuestCr0 & X86_CR0_NE); 2185 2181 2186 /* Commit the CR0 and CR0 read-shadow to the nested-guest VMCS. */ 2182 2187 int rc = VMX_VMCS_WRITE_NW(pVCpu, VMX_VMCS_GUEST_CR0, u64GuestCr0); AssertRC(rc); 2183 2188 rc = VMX_VMCS_WRITE_NW(pVCpu, VMX_VMCS_CTRL_CR0_READ_SHADOW, u64ShadowCr0); AssertRC(rc); 2184 2189 2185 Log4Func(("cr0=%#RX64 shadow=%#RX64 (set=%#RX64 zap=%#RX64)\n", u64GuestCr0, u64ShadowCr0, fSetCr0, fZapCr0)); 2190 Log4Func(("cr0=%#RX64 shadow=%#RX64 vmcs_read_shw=%#RX64 (set=%#RX64 zap=%#RX64)\n", u64GuestCr0, u64ShadowCr0, 2191 pVCpu->cpum.GstCtx.hwvirt.vmx.Vmcs.u64Cr0ReadShadow.u, fSetCr0, fZapCr0)); 2186 2192 } 2187 2193 … … 2416 2422 u64GuestCr4 |= fSetCr4; 2417 2423 u64GuestCr4 &= fZapCr4; 2424 2425 Assert(!RT_HI_U32(u64GuestCr4)); 2426 Assert(u64GuestCr4 & X86_CR4_VMXE); 2418 2427 2419 2428 /* Commit the CR4 and CR4 read-shadow to the guest VMCS. */ … … 4089 4098 PCVMXVMCSINFO const pVmcsInfoGst = &pVCpu->hmr0.s.vmx.VmcsInfo; 4090 4099 PVMXVVMCS const pVmcsNstGst = &pVCpu->cpum.GstCtx.hwvirt.vmx.Vmcs; 4091 u64Cr0 = (u64Cr0 & ~pVmcsInfo->u64Cr0Mask) 4092 | (pVmcsNstGst->u64GuestCr0.u & pVmcsNstGst->u64Cr0Mask.u) 4093 | (u64Shadow & (pVmcsInfoGst->u64Cr0Mask & ~pVmcsNstGst->u64Cr0Mask.u)); 4100 u64Cr0 = (u64Cr0 & ~(pVmcsInfoGst->u64Cr0Mask & pVmcsNstGst->u64Cr0Mask.u)) 4101 | (pVmcsNstGst->u64GuestCr0.u & pVmcsNstGst->u64Cr0Mask.u) 4102 | (u64Shadow & (pVmcsInfoGst->u64Cr0Mask & ~pVmcsNstGst->u64Cr0Mask.u)); 4103 Assert(u64Cr0 & X86_CR0_NE); 4094 4104 } 4095 4105 #endif … … 4125 4135 PCVMXVMCSINFO const pVmcsInfoGst = &pVCpu->hmr0.s.vmx.VmcsInfo; 4126 4136 PVMXVVMCS const pVmcsNstGst = &pVCpu->cpum.GstCtx.hwvirt.vmx.Vmcs; 4127 u64Cr4 = (u64Cr4 & ~pVmcsInfo->u64Cr4Mask) 4128 | (pVmcsNstGst->u64GuestCr4.u & pVmcsNstGst->u64Cr4Mask.u) 4129 | (u64Shadow & (pVmcsInfoGst->u64Cr4Mask & ~pVmcsNstGst->u64Cr4Mask.u)); 4137 u64Cr4 = (u64Cr4 & ~(pVmcsInfo->u64Cr4Mask & pVmcsNstGst->u64Cr4Mask.u)) 4138 | (pVmcsNstGst->u64GuestCr4.u & pVmcsNstGst->u64Cr4Mask.u) 4139 | (u64Shadow & (pVmcsInfoGst->u64Cr4Mask & ~pVmcsNstGst->u64Cr4Mask.u)); 4140 Assert(u64Cr4 & X86_CR4_VMXE); 4130 4141 } 4131 4142 #endif
Note:
See TracChangeset
for help on using the changeset viewer.