Changeset 72852 in vbox for trunk/src/VBox/VMM/VMMR0/HMVMXR0.cpp
- Timestamp:
- Jul 4, 2018 5:41:23 AM (6 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMR0/HMVMXR0.cpp
r72851 r72852 3523 3523 if (ASMAtomicUoReadU64(&pVCpu->hm.s.fCtxChanged) & HM_CHANGED_VMX_GUEST_XCPT_INTERCEPTS) 3524 3524 { 3525 uint32_t uXcptBitmap = pVCpu->hm.s.vmx.u32XcptBitmap; 3526 3525 3527 /* The remaining exception intercepts are handled elsewhere, e.g. in hmR0VmxExportSharedCR0(). */ 3526 3528 if (pVCpu->hm.s.fGIMTrapXcptUD) 3527 pVCpu->hm.s.vmx.u32XcptBitmap |= RT_BIT(X86_XCPT_UD);3529 uXcptBitmap |= RT_BIT(X86_XCPT_UD); 3528 3530 #ifndef HMVMX_ALWAYS_TRAP_ALL_XCPTS 3529 3531 else 3530 pVCpu->hm.s.vmx.u32XcptBitmap &= ~RT_BIT(X86_XCPT_UD);3532 uXcptBitmap &= ~RT_BIT(X86_XCPT_UD); 3531 3533 #endif 3532 3534 3533 Assert(pVCpu->hm.s.vmx.u32XcptBitmap & RT_BIT_32(X86_XCPT_AC)); 3534 Assert(pVCpu->hm.s.vmx.u32XcptBitmap & RT_BIT_32(X86_XCPT_DB)); 3535 3536 /** @todo Optimize by checking cache before writing to VMCS. */ 3537 int rc = VMXWriteVmcs32(VMX_VMCS32_CTRL_EXCEPTION_BITMAP, pVCpu->hm.s.vmx.u32XcptBitmap); 3538 AssertRCReturn(rc, rc); 3535 Assert(uXcptBitmap & RT_BIT_32(X86_XCPT_AC)); 3536 Assert(uXcptBitmap & RT_BIT_32(X86_XCPT_DB)); 3537 3538 if (uXcptBitmap != pVCpu->hm.s.vmx.u32XcptBitmap) 3539 { 3540 int rc = VMXWriteVmcs32(VMX_VMCS32_CTRL_EXCEPTION_BITMAP, uXcptBitmap); 3541 AssertRCReturn(rc, rc); 3542 pVCpu->hm.s.vmx.u32XcptBitmap = uXcptBitmap; 3543 } 3539 3544 3540 3545 ASMAtomicUoAndU64(&pVCpu->hm.s.fCtxChanged, ~HM_CHANGED_VMX_GUEST_XCPT_INTERCEPTS); 3541 Log4Func(("VMX_VMCS32_CTRL_EXCEPTION_BITMAP=%#RX64\n", pVCpu->hm.s.vmx.u32XcptBitmap));3546 Log4Func(("VMX_VMCS32_CTRL_EXCEPTION_BITMAP=%#RX64\n", uXcptBitmap)); 3542 3547 } 3543 3548 return VINF_SUCCESS;
Note:
See TracChangeset
for help on using the changeset viewer.