VirtualBox

Changeset 98150 in vbox for trunk/src/VBox/VMM/VMMAll


Ignore:
Timestamp:
Jan 20, 2023 6:46:21 AM (2 years ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
155323
Message:

VMM: Nested VMX: bugref:10318 Fix VMX CR0/CR4 fixed bits masking.

Location:
trunk/src/VBox/VMM/VMMAll
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/IEMAllCImpl.cpp

    r98103 r98150  
    59275927            }
    59285928
     5929#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
    59295930            /* Check for bits that must remain set or cleared in VMX operation,
    59305931               see Intel spec. 23.8 "Restrictions on VMX operation". */
    59315932            if (IEM_VMX_IS_ROOT_MODE(pVCpu))
    59325933            {
    5933 #ifdef VBOX_WITH_NESTED_HWVIRT_VMX
    5934                 uint64_t const uCr0Fixed0 = IEM_VMX_IS_NON_ROOT_MODE(pVCpu) ? iemVmxGetCr0Fixed0(pVCpu) : VMX_V_CR0_FIXED0;
    5935 #else
    5936                 uint64_t const uCr0Fixed0 = VMX_V_CR0_FIXED0;
    5937 #endif
     5934                uint64_t const uCr0Fixed0 = iemVmxGetCr0Fixed0(pVCpu, IEM_VMX_IS_NON_ROOT_MODE(pVCpu));
    59385935                if ((uNewCrX & uCr0Fixed0) != uCr0Fixed0)
    59395936                {
     
    59495946                }
    59505947            }
     5948#endif
    59515949
    59525950            /*
  • trunk/src/VBox/VMM/VMMAll/IEMAllCImplVmxInstr.cpp

    r98103 r98150  
    17401740    {
    17411741        /* Bits 63:32, 28:19, 17, 15:6, ET, CD, NW and CR0 fixed bits are not modified. */
    1742         uint64_t const uCr0Mb1       = iemVmxGetCr0Fixed0(pVCpu);
    1743         uint64_t const uCr0Mb0       = VMX_V_CR0_FIXED1;
    1744         uint64_t const fCr0IgnMask   = VMX_EXIT_HOST_CR0_IGNORE_MASK | uCr0Mb1 | ~uCr0Mb0;
     1742        uint64_t const fCr0IgnMask   = VMX_EXIT_HOST_CR0_IGNORE_MASK;
    17451743        uint64_t const uHostCr0      = pVmcs->u64HostCr0.u;
    17461744        uint64_t const uGuestCr0     = pVCpu->cpum.GstCtx.cr0;
    17471745        uint64_t const uValidHostCr0 = (uHostCr0 & ~fCr0IgnMask) | (uGuestCr0 & fCr0IgnMask);
    17481746
    1749         /* Verify we have not modified CR0 fixed bits in VMX non-root operation. */
    1750         Assert((uGuestCr0 &  uCr0Mb1) == uCr0Mb1);
    1751         Assert((uGuestCr0 & ~uCr0Mb0) == 0);
     1747        /* Verify we have not modified CR0 fixed bits in VMX operation. */
     1748#ifdef VBOX_STRICT
     1749        uint64_t const uCr0Mb1 = iemVmxGetCr0Fixed0(pVCpu, true /* fVmxNonRootMode */);
     1750        bool const fUx         = RT_BOOL(pVmcs->u32ProcCtls2 & VMX_PROC_CTLS2_UNRESTRICTED_GUEST);
     1751        AssertMsg(   (uValidHostCr0 & uCr0Mb1) == uCr0Mb1
     1752                  && (uValidHostCr0 & ~VMX_V_CR0_FIXED1) == 0,
     1753                  ("host=%#RX64 guest=%#RX64 mb1=%#RX64 valid_host_cr0=%#RX64 fUx=%RTbool\n",
     1754                   uHostCr0, uGuestCr0, uCr0Mb1, uValidHostCr0, fUx));
     1755#endif
     1756        Assert(!(uValidHostCr0 >> 32));
    17521757        CPUMSetGuestCR0(pVCpu, uValidHostCr0);
    17531758    }
     
    17581763        uint64_t const uCr4Mb1       = pVCpu->cpum.GstCtx.hwvirt.vmx.Msrs.u64Cr4Fixed0;
    17591764        uint64_t const uCr4Mb0       = pVCpu->cpum.GstCtx.hwvirt.vmx.Msrs.u64Cr4Fixed1;
    1760         uint64_t const fCr4IgnMask   = uCr4Mb1 | ~uCr4Mb0;
    17611765        uint64_t const uHostCr4      = pVmcs->u64HostCr4.u;
    1762         uint64_t const uGuestCr4     = pVCpu->cpum.GstCtx.cr4;
    1763         uint64_t       uValidHostCr4 = (uHostCr4 & ~fCr4IgnMask) | (uGuestCr4 & fCr4IgnMask);
     1766        uint64_t       uValidHostCr4 = (uHostCr4 & uCr4Mb0) | uCr4Mb1;
    17641767        if (fHostInLongMode)
    17651768            uValidHostCr4 |= X86_CR4_PAE;
     
    17681771
    17691772        /* Verify we have not modified CR4 fixed bits in VMX non-root operation. */
    1770         Assert((uGuestCr4 &  uCr4Mb1) == uCr4Mb1);
    1771         Assert((uGuestCr4 & ~uCr4Mb0) == 0);
     1773        AssertMsg(   (uValidHostCr4 &  uCr4Mb1) == uCr4Mb1
     1774                  && (uValidHostCr4 & ~uCr4Mb0) == 0,
     1775                  ("host=%#RX64 guest=%#RX64, uCr4Mb1=%#RX64 uCr4Mb0=%#RX64 valid_host_cr4=%#RX64\n",
     1776                   uHostCr4, pVCpu->cpum.GstCtx.cr4, uCr4Mb1, uCr4Mb0, uValidHostCr4));
    17721777        CPUMSetGuestCR4(pVCpu, uValidHostCr4);
    17731778    }
     
    51355140    {
    51365141        /* CR0 MB1 bits. */
    5137         uint64_t const u64Cr0Fixed0 = iemVmxGetCr0Fixed0(pVCpu);
     5142        uint64_t const u64Cr0Fixed0 = iemVmxGetCr0Fixed0(pVCpu, true /* fVmxNonRootMode */);
    51385143        if ((pVmcs->u64GuestCr0.u & u64Cr0Fixed0) == u64Cr0Fixed0)
    51395144        { /* likely */ }
     
    60796084    {
    60806085        /* CR0 MB1 bits. */
    6081         uint64_t const u64Cr0Fixed0 = iemVmxGetCr0Fixed0(pVCpu);
     6086        uint64_t const u64Cr0Fixed0 = iemVmxGetCr0Fixed0(pVCpu, true /* fVmxNonRootMode */);
    60826087        if ((pVmcs->u64HostCr0.u & u64Cr0Fixed0) == u64Cr0Fixed0)
    60836088        { /* likely */ }
  • trunk/src/VBox/VMM/VMMAll/VMXAllTemplate.cpp.h

    r98103 r98150  
    734734     *        and @bugref{6944}. */
    735735    PCVMCC pVM = pVCpu->CTX_SUFF(pVM);
     736    AssertCompile(RT_HI_U32(VMX_EXIT_HOST_CR0_IGNORE_MASK) == UINT32_C(0xffffffff));    /* Paranoia. */
    736737    return (  X86_CR0_PE
    737738            | X86_CR0_NE
     
    21392140            u64GuestCr0 &= ~(uint64_t)(X86_CR0_CD | X86_CR0_NW);
    21402141
     2142            Assert(!RT_HI_U32(u64GuestCr0));
     2143            Assert(u64GuestCr0 & X86_CR0_NE);
     2144
    21412145            /* Commit the CR0 and related fields to the guest VMCS. */
    21422146            int rc = VMX_VMCS_WRITE_NW(pVCpu, VMX_VMCS_GUEST_CR0, u64GuestCr0);               AssertRC(rc);
     
    21712175            uint64_t       u64GuestCr0  = pVCpu->cpum.GstCtx.cr0;
    21722176            uint64_t const u64ShadowCr0 = CPUMGetGuestVmxMaskedCr0(&pVCpu->cpum.GstCtx, pVmcsInfo->u64Cr0Mask);
    2173             Assert(!RT_HI_U32(u64GuestCr0));
    2174             Assert(u64GuestCr0 & X86_CR0_NE);
    21752177
    21762178            /* Apply the hardware specified CR0 fixed bits and enable caching. */
     
    21792181            u64GuestCr0 &= ~(uint64_t)(X86_CR0_CD | X86_CR0_NW);
    21802182
     2183            Assert(!RT_HI_U32(u64GuestCr0));
     2184            Assert(u64GuestCr0 & X86_CR0_NE);
     2185
    21812186            /* Commit the CR0 and CR0 read-shadow to the nested-guest VMCS. */
    21822187            int rc = VMX_VMCS_WRITE_NW(pVCpu, VMX_VMCS_GUEST_CR0, u64GuestCr0);               AssertRC(rc);
    21832188            rc     = VMX_VMCS_WRITE_NW(pVCpu, VMX_VMCS_CTRL_CR0_READ_SHADOW, u64ShadowCr0);   AssertRC(rc);
    21842189
    2185             Log4Func(("cr0=%#RX64 shadow=%#RX64 (set=%#RX64 zap=%#RX64)\n", u64GuestCr0, u64ShadowCr0, fSetCr0, fZapCr0));
     2190            Log4Func(("cr0=%#RX64 shadow=%#RX64 vmcs_read_shw=%#RX64 (set=%#RX64 zap=%#RX64)\n", u64GuestCr0, u64ShadowCr0,
     2191                      pVCpu->cpum.GstCtx.hwvirt.vmx.Vmcs.u64Cr0ReadShadow.u, fSetCr0, fZapCr0));
    21862192        }
    21872193
     
    24162422        u64GuestCr4 |= fSetCr4;
    24172423        u64GuestCr4 &= fZapCr4;
     2424
     2425        Assert(!RT_HI_U32(u64GuestCr4));
     2426        Assert(u64GuestCr4 & X86_CR4_VMXE);
    24182427
    24192428        /* Commit the CR4 and CR4 read-shadow to the guest VMCS. */
     
    40894098            PCVMXVMCSINFO const pVmcsInfoGst = &pVCpu->hmr0.s.vmx.VmcsInfo;
    40904099            PVMXVVMCS const     pVmcsNstGst  = &pVCpu->cpum.GstCtx.hwvirt.vmx.Vmcs;
    4091             u64Cr0 = (u64Cr0                     & ~pVmcsInfo->u64Cr0Mask)
    4092                    | (pVmcsNstGst->u64GuestCr0.u &  pVmcsNstGst->u64Cr0Mask.u)
    4093                    | (u64Shadow                  & (pVmcsInfoGst->u64Cr0Mask & ~pVmcsNstGst->u64Cr0Mask.u));
     4100            u64Cr0 = (u64Cr0                     & ~(pVmcsInfoGst->u64Cr0Mask & pVmcsNstGst->u64Cr0Mask.u))
     4101                   | (pVmcsNstGst->u64GuestCr0.u &   pVmcsNstGst->u64Cr0Mask.u)
     4102                   | (u64Shadow                  &  (pVmcsInfoGst->u64Cr0Mask & ~pVmcsNstGst->u64Cr0Mask.u));
     4103            Assert(u64Cr0 & X86_CR0_NE);
    40944104        }
    40954105#endif
     
    41254135            PCVMXVMCSINFO const pVmcsInfoGst = &pVCpu->hmr0.s.vmx.VmcsInfo;
    41264136            PVMXVVMCS const     pVmcsNstGst  = &pVCpu->cpum.GstCtx.hwvirt.vmx.Vmcs;
    4127             u64Cr4 = (u64Cr4                     & ~pVmcsInfo->u64Cr4Mask)
    4128                    | (pVmcsNstGst->u64GuestCr4.u &  pVmcsNstGst->u64Cr4Mask.u)
    4129                    | (u64Shadow                  & (pVmcsInfoGst->u64Cr4Mask & ~pVmcsNstGst->u64Cr4Mask.u));
     4137            u64Cr4 = (u64Cr4                     & ~(pVmcsInfo->u64Cr4Mask & pVmcsNstGst->u64Cr4Mask.u))
     4138                   | (pVmcsNstGst->u64GuestCr4.u &   pVmcsNstGst->u64Cr4Mask.u)
     4139                   | (u64Shadow                  &  (pVmcsInfoGst->u64Cr4Mask & ~pVmcsNstGst->u64Cr4Mask.u));
     4140            Assert(u64Cr4 & X86_CR4_VMXE);
    41304141        }
    41314142#endif
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette