VirtualBox

Changeset 46391 in vbox


Ignore:
Timestamp:
Jun 5, 2013 9:43:58 AM (12 years ago)
Author:
vboxsync
Message:

VMM/HMVMXR0: Use log level 4 by default so that instrumenting logging (with lower levels) is much easier.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMR0/HMVMXR0.cpp

    r46381 r46391  
    15661566        {
    15671567            /*  Shouldn't happen. VPID is supported but INVVPID is not supported by the CPU. Ignore VPID capability. */
    1568             Log(("hmR0VmxSetupTaggedTlb: VPID supported without INVEPT support. Ignoring VPID.\n"));
     1568            Log4(("hmR0VmxSetupTaggedTlb: VPID supported without INVEPT support. Ignoring VPID.\n"));
    15691569            pVM->hm.s.vmx.enmFlushVpid = VMX_FLUSH_VPID_NOT_SUPPORTED;
    15701570            pVM->hm.s.vmx.fVpid = false;
     
    27142714        rc = VMXWriteVmcsGstN(VMX_VMCS_GUEST_RIP, pMixedCtx->rip);
    27152715        AssertRCReturn(rc, rc);
    2716         Log(("Load: VMX_VMCS_GUEST_RIP=%#RX64\n", pMixedCtx->rip));
     2716        Log4(("Load: VMX_VMCS_GUEST_RIP=%#RX64\n", pMixedCtx->rip));
    27172717        pVCpu->hm.s.fContextUseFlags &= ~HM_CHANGED_GUEST_RIP;
    27182718    }
     
    27842784        AssertRCReturn(rc, rc);
    27852785
    2786         Log(("Load: VMX_VMCS_GUEST_RFLAGS=%#RX32\n", uEFlags.u32));
     2786        Log4(("Load: VMX_VMCS_GUEST_RFLAGS=%#RX32\n", uEFlags.u32));
    27872787        pVCpu->hm.s.fContextUseFlags &= ~HM_CHANGED_GUEST_RFLAGS;
    27882788    }
     
    28442844        rc  = VMXWriteVmcs32(VMX_VMCS_CTRL_CR0_READ_SHADOW, u32GuestCR0);
    28452845        AssertRCReturn(rc, rc);
    2846         Log(("Load: VMX_VMCS_CTRL_CR0_READ_SHADOW=%#RX32\n", u32GuestCR0));
     2846        Log4(("Load: VMX_VMCS_CTRL_CR0_READ_SHADOW=%#RX32\n", u32GuestCR0));
    28472847
    28482848        /* Setup VT-x's view of the guest CR0. */
     
    29552955        rc = VMXWriteVmcs32(VMX_VMCS32_CTRL_EXCEPTION_BITMAP, pVCpu->hm.s.vmx.u32XcptBitmap);
    29562956        AssertRCReturn(rc, rc);
    2957         Log(("Load: VMX_VMCS_GUEST_CR0=%#RX32 (uSetCR0=%#RX32 uZapCR0=%#RX32)\n", u32GuestCR0, uSetCR0, uZapCR0));
     2957        Log4(("Load: VMX_VMCS_GUEST_CR0=%#RX32 (uSetCR0=%#RX32 uZapCR0=%#RX32)\n", u32GuestCR0, uSetCR0, uZapCR0));
    29582958
    29592959        /*
     
    30203020            rc = VMXWriteVmcs64(VMX_VMCS64_CTRL_EPTP_FULL, pVCpu->hm.s.vmx.HCPhysEPTP);
    30213021            AssertRCReturn(rc, rc);
    3022             Log(("Load: VMX_VMCS64_CTRL_EPTP_FULL=%#RX64\n", pVCpu->hm.s.vmx.HCPhysEPTP));
     3022            Log4(("Load: VMX_VMCS64_CTRL_EPTP_FULL=%#RX64\n", pVCpu->hm.s.vmx.HCPhysEPTP));
    30233023
    30243024            if (   pVM->hm.s.vmx.fUnrestrictedGuest
     
    30573057            }
    30583058
    3059             Log(("Load: VMX_VMCS_GUEST_CR3=%#RGv (GstN)\n", GCPhysGuestCR3));
     3059            Log4(("Load: VMX_VMCS_GUEST_CR3=%#RGv (GstN)\n", GCPhysGuestCR3));
    30603060            rc = VMXWriteVmcsGstN(VMX_VMCS_GUEST_CR3, GCPhysGuestCR3);
    30613061        }
     
    30653065            RTHCPHYS HCPhysGuestCR3 = PGMGetHyperCR3(pVCpu);
    30663066
    3067             Log(("Load: VMX_VMCS_GUEST_CR3=%#RHv (HstN)\n", HCPhysGuestCR3));
     3067            Log4(("Load: VMX_VMCS_GUEST_CR3=%#RHv (HstN)\n", HCPhysGuestCR3));
    30683068            rc = VMXWriteVmcsHstN(VMX_VMCS_GUEST_CR3, HCPhysGuestCR3);
    30693069        }
     
    30843084        rc = VMXWriteVmcs32(VMX_VMCS_CTRL_CR4_READ_SHADOW, u32GuestCR4);
    30853085        AssertRCReturn(rc, rc);
    3086         Log(("Load: VMX_VMCS_CTRL_CR4_READ_SHADOW=%#RX32\n", u32GuestCR4));
     3086        Log4(("Load: VMX_VMCS_CTRL_CR4_READ_SHADOW=%#RX32\n", u32GuestCR4));
    30873087
    30883088        /* Setup VT-x's view of the guest CR4. */
     
    31523152
    31533153        /* Write VT-x's view of the guest CR4 into the VMCS. */
    3154         Log(("Load: VMX_VMCS_GUEST_CR4=%#RX32 (Set=%#RX32 Zap=%#RX32)\n", u32GuestCR4, uSetCR4, uZapCR4));
     3154        Log4(("Load: VMX_VMCS_GUEST_CR4=%#RX32 (Set=%#RX32 Zap=%#RX32)\n", u32GuestCR4, uSetCR4, uZapCR4));
    31553155        rc = VMXWriteVmcs32(VMX_VMCS_GUEST_CR4, u32GuestCR4);
    31563156        AssertRCReturn(rc, rc);
     
    35543554                   in real-mode (e.g. OpenBSD 4.0) */
    35553555                REMFlushTBs(pVM);
    3556                 Log(("Load: Switch to protected mode detected!\n"));
     3556                Log4(("Load: Switch to protected mode detected!\n"));
    35573557                pVCpu->hm.s.vmx.fWasInRealMode = false;
    35583558            }
     
    35783578        AssertRCReturn(rc, rc);
    35793579
    3580         Log(("Load: CS=%#RX16 Base=%#RX64 Limit=%#RX32 Attr=%#RX32\n", pMixedCtx->cs.Sel, pMixedCtx->cs.u64Base,
     3580        Log4(("Load: CS=%#RX16 Base=%#RX64 Limit=%#RX32 Attr=%#RX32\n", pMixedCtx->cs.Sel, pMixedCtx->cs.u64Base,
    35813581             pMixedCtx->cs.u32Limit, pMixedCtx->cs.Attr.u));
    35823582#ifdef VBOX_STRICT
     
    36483648        rc = VMXWriteVmcs32(VMX_VMCS32_GUEST_TR_ACCESS_RIGHTS, u32AccessRights);       AssertRCReturn(rc, rc);
    36493649
    3650         Log(("Load: VMX_VMCS_GUEST_TR_BASE=%#RX64\n", u64Base));
     3650        Log4(("Load: VMX_VMCS_GUEST_TR_BASE=%#RX64\n", u64Base));
    36513651        pVCpu->hm.s.fContextUseFlags &= ~HM_CHANGED_GUEST_TR;
    36523652    }
     
    36613661
    36623662        Assert(!(pMixedCtx->gdtr.cbGdt & UINT64_C(0xffff0000)));       /* Bits 31:16 MBZ. */
    3663         Log(("Load: VMX_VMCS_GUEST_GDTR_BASE=%#RX64\n", pMixedCtx->gdtr.pGdt));
     3663        Log4(("Load: VMX_VMCS_GUEST_GDTR_BASE=%#RX64\n", pMixedCtx->gdtr.pGdt));
    36643664        pVCpu->hm.s.fContextUseFlags &= ~HM_CHANGED_GUEST_GDTR;
    36653665    }
     
    36973697        }
    36983698
    3699         Log(("Load: VMX_VMCS_GUEST_LDTR_BASE=%#RX64\n",  pMixedCtx->ldtr.u64Base));
     3699        Log4(("Load: VMX_VMCS_GUEST_LDTR_BASE=%#RX64\n",  pMixedCtx->ldtr.u64Base));
    37003700        pVCpu->hm.s.fContextUseFlags &= ~HM_CHANGED_GUEST_LDTR;
    37013701    }
     
    37103710
    37113711        Assert(!(pMixedCtx->idtr.cbIdt & UINT64_C(0xffff0000)));       /* Bits 31:16 MBZ. */
    3712         Log(("Load: VMX_VMCS_GUEST_IDTR_BASE=%#RX64\n", pMixedCtx->idtr.pIdt));
     3712        Log4(("Load: VMX_VMCS_GUEST_IDTR_BASE=%#RX64\n", pMixedCtx->idtr.pIdt));
    37133713        pVCpu->hm.s.fContextUseFlags &= ~HM_CHANGED_GUEST_IDTR;
    37143714    }
     
    39333933    Assert(!RTThreadPreemptIsEnabled(NIL_RTTHREAD));
    39343934
    3935     Log(("VM-entry failure: %Rrc\n", rcVMRun));
     3935    Log4(("VM-entry failure: %Rrc\n", rcVMRun));
    39363936    switch (rcVMRun)
    39373937    {
     
    39483948
    39493949#ifdef VBOX_STRICT
    3950                 Log(("uExitReason        %#RX32 (VmxTransient %#RX16)\n", pVCpu->hm.s.vmx.lasterror.u32ExitReason,
     3950                Log4(("uExitReason        %#RX32 (VmxTransient %#RX16)\n", pVCpu->hm.s.vmx.lasterror.u32ExitReason,
    39513951                     pVmxTransient->uExitReason));
    3952                 Log(("Exit Qualification %#RX64\n", pVmxTransient->uExitQualification));
    3953                 Log(("InstrError         %#RX32\n", pVCpu->hm.s.vmx.lasterror.u32InstrError));
     3952                Log4(("Exit Qualification %#RX64\n", pVmxTransient->uExitQualification));
     3953                Log4(("InstrError         %#RX32\n", pVCpu->hm.s.vmx.lasterror.u32InstrError));
    39543954                if (pVCpu->hm.s.vmx.lasterror.u32InstrError <= HMVMX_INSTR_ERROR_MAX)
    3955                     Log(("InstrError Desc.  \"%s\"\n", g_apszVmxInstrErrors[pVCpu->hm.s.vmx.lasterror.u32InstrError]));
     3955                    Log4(("InstrError Desc.  \"%s\"\n", g_apszVmxInstrErrors[pVCpu->hm.s.vmx.lasterror.u32InstrError]));
    39563956                else
    3957                     Log(("InstrError Desc.    Range exceeded %u\n", HMVMX_INSTR_ERROR_MAX));
     3957                    Log4(("InstrError Desc.    Range exceeded %u\n", HMVMX_INSTR_ERROR_MAX));
    39583958
    39593959                /* VMX control bits. */
     
    39623962                HMVMXHCUINTREG  uHCReg;
    39633963                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_PIN_EXEC, &u32Val);                  AssertRC(rc);
    3964                 Log(("VMX_VMCS32_CTRL_PIN_EXEC                %#RX32\n", u32Val));
     3964                Log4(("VMX_VMCS32_CTRL_PIN_EXEC                %#RX32\n", u32Val));
    39653965                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_PROC_EXEC, &u32Val);                 AssertRC(rc);
    3966                 Log(("VMX_VMCS32_CTRL_PROC_EXEC               %#RX32\n", u32Val));
     3966                Log4(("VMX_VMCS32_CTRL_PROC_EXEC               %#RX32\n", u32Val));
    39673967                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_PROC_EXEC2, &u32Val);                AssertRC(rc);
    3968                 Log(("VMX_VMCS32_CTRL_PROC_EXEC2              %#RX32\n", u32Val));
     3968                Log4(("VMX_VMCS32_CTRL_PROC_EXEC2              %#RX32\n", u32Val));
    39693969                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_ENTRY, &u32Val);                     AssertRC(rc);
    3970                 Log(("VMX_VMCS32_CTRL_ENTRY                   %#RX32\n", u32Val));
     3970                Log4(("VMX_VMCS32_CTRL_ENTRY                   %#RX32\n", u32Val));
    39713971                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_EXIT, &u32Val);                      AssertRC(rc);
    3972                 Log(("VMX_VMCS32_CTRL_EXIT                    %#RX32\n", u32Val));
     3972                Log4(("VMX_VMCS32_CTRL_EXIT                    %#RX32\n", u32Val));
    39733973                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_CR3_TARGET_COUNT, &u32Val);          AssertRC(rc);
    3974                 Log(("VMX_VMCS32_CTRL_CR3_TARGET_COUNT        %#RX32\n", u32Val));
     3974                Log4(("VMX_VMCS32_CTRL_CR3_TARGET_COUNT        %#RX32\n", u32Val));
    39753975                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_ENTRY_INTERRUPTION_INFO, &u32Val);   AssertRC(rc);
    3976                 Log(("VMX_VMCS32_CTRL_ENTRY_INTERRUPTION_INFO %#RX32\n", u32Val));
     3976                Log4(("VMX_VMCS32_CTRL_ENTRY_INTERRUPTION_INFO %#RX32\n", u32Val));
    39773977                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_ENTRY_EXCEPTION_ERRCODE, &u32Val);   AssertRC(rc);
    3978                 Log(("VMX_VMCS32_CTRL_ENTRY_EXCEPTION_ERRCODE %#RX32\n", u32Val));
     3978                Log4(("VMX_VMCS32_CTRL_ENTRY_EXCEPTION_ERRCODE %#RX32\n", u32Val));
    39793979                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_ENTRY_INSTR_LENGTH, &u32Val);        AssertRC(rc);
    3980                 Log(("VMX_VMCS32_CTRL_ENTRY_INSTR_LENGTH      %u\n", u32Val));
     3980                Log4(("VMX_VMCS32_CTRL_ENTRY_INSTR_LENGTH      %u\n", u32Val));
    39813981                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_TPR_THRESHOLD, &u32Val);             AssertRC(rc);
    3982                 Log(("VMX_VMCS32_CTRL_TPR_THRESHOLD           %u\n", u32Val));
     3982                Log4(("VMX_VMCS32_CTRL_TPR_THRESHOLD           %u\n", u32Val));
    39833983                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_EXIT_MSR_STORE_COUNT, &u32Val);      AssertRC(rc);
    3984                 Log(("VMX_VMCS32_CTRL_EXIT_MSR_STORE_COUNT    %u (guest MSRs)\n", u32Val));
     3984                Log4(("VMX_VMCS32_CTRL_EXIT_MSR_STORE_COUNT    %u (guest MSRs)\n", u32Val));
    39853985                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_EXIT_MSR_LOAD_COUNT, &u32Val);       AssertRC(rc);
    3986                 Log(("VMX_VMCS32_CTRL_EXIT_MSR_LOAD_COUNT     %u (host MSRs)\n", u32Val));
     3986                Log4(("VMX_VMCS32_CTRL_EXIT_MSR_LOAD_COUNT     %u (host MSRs)\n", u32Val));
    39873987                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_ENTRY_MSR_LOAD_COUNT, &u32Val);      AssertRC(rc);
    3988                 Log(("VMX_VMCS32_CTRL_ENTRY_MSR_LOAD_COUNT    %u (guest MSRs)\n", u32Val));
     3988                Log4(("VMX_VMCS32_CTRL_ENTRY_MSR_LOAD_COUNT    %u (guest MSRs)\n", u32Val));
    39893989                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_EXCEPTION_BITMAP, &u32Val);          AssertRC(rc);
    3990                 Log(("VMX_VMCS32_CTRL_EXCEPTION_BITMAP        %#RX32\n", u32Val));
     3990                Log4(("VMX_VMCS32_CTRL_EXCEPTION_BITMAP        %#RX32\n", u32Val));
    39913991                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_PAGEFAULT_ERROR_MASK, &u32Val);      AssertRC(rc);
    3992                 Log(("VMX_VMCS32_CTRL_PAGEFAULT_ERROR_MASK    %#RX32\n", u32Val));
     3992                Log4(("VMX_VMCS32_CTRL_PAGEFAULT_ERROR_MASK    %#RX32\n", u32Val));
    39933993                rc = VMXReadVmcs32(VMX_VMCS32_CTRL_PAGEFAULT_ERROR_MATCH, &u32Val);     AssertRC(rc);
    3994                 Log(("VMX_VMCS32_CTRL_PAGEFAULT_ERROR_MATCH   %#RX32\n", u32Val));
     3994                Log4(("VMX_VMCS32_CTRL_PAGEFAULT_ERROR_MATCH   %#RX32\n", u32Val));
    39953995                rc = VMXReadVmcsHstN(VMX_VMCS_CTRL_CR0_MASK, &uHCReg);                  AssertRC(rc);
    3996                 Log(("VMX_VMCS_CTRL_CR0_MASK                  %#RHr\n", uHCReg));
     3996                Log4(("VMX_VMCS_CTRL_CR0_MASK                  %#RHr\n", uHCReg));
    39973997                rc = VMXReadVmcsHstN(VMX_VMCS_CTRL_CR0_READ_SHADOW, &uHCReg);           AssertRC(rc);
    3998                 Log(("VMX_VMCS_CTRL_CR4_READ_SHADOW           %#RHr\n", uHCReg));
     3998                Log4(("VMX_VMCS_CTRL_CR4_READ_SHADOW           %#RHr\n", uHCReg));
    39993999                rc = VMXReadVmcsHstN(VMX_VMCS_CTRL_CR4_MASK, &uHCReg);                  AssertRC(rc);
    4000                 Log(("VMX_VMCS_CTRL_CR4_MASK                  %#RHr\n", uHCReg));
     4000                Log4(("VMX_VMCS_CTRL_CR4_MASK                  %#RHr\n", uHCReg));
    40014001                rc = VMXReadVmcsHstN(VMX_VMCS_CTRL_CR4_READ_SHADOW, &uHCReg);           AssertRC(rc);
    4002                 Log(("VMX_VMCS_CTRL_CR4_READ_SHADOW           %#RHr\n", uHCReg));
     4002                Log4(("VMX_VMCS_CTRL_CR4_READ_SHADOW           %#RHr\n", uHCReg));
    40034003                rc = VMXReadVmcs64(VMX_VMCS64_CTRL_EPTP_FULL, &u64Val);                 AssertRC(rc);
    4004                 Log(("VMX_VMCS64_CTRL_EPTP_FULL               %#RX64\n", u64Val));
     4004                Log4(("VMX_VMCS64_CTRL_EPTP_FULL               %#RX64\n", u64Val));
    40054005
    40064006                /* Guest bits. */
    40074007                rc = VMXReadVmcsGstN(VMX_VMCS_GUEST_RIP, &u64Val);          AssertRC(rc);
    4008                 Log(("Old Guest Rip %#RX64 New %#RX64\n", pCtx->rip, u64Val));
     4008                Log4(("Old Guest Rip %#RX64 New %#RX64\n", pCtx->rip, u64Val));
    40094009                rc = VMXReadVmcsGstN(VMX_VMCS_GUEST_RSP, &u64Val);          AssertRC(rc);
    4010                 Log(("Old Guest Rsp %#RX64 New %#RX64\n", pCtx->rsp, u64Val));
     4010                Log4(("Old Guest Rsp %#RX64 New %#RX64\n", pCtx->rsp, u64Val));
    40114011                rc = VMXReadVmcs32(VMX_VMCS_GUEST_RFLAGS, &u32Val);         AssertRC(rc);
    4012                 Log(("Old Guest Rflags %#RX32 New %#RX32\n", pCtx->eflags.u32, u32Val));
     4012                Log4(("Old Guest Rflags %#RX32 New %#RX32\n", pCtx->eflags.u32, u32Val));
    40134013                rc = VMXReadVmcs32(VMX_VMCS16_GUEST_FIELD_VPID, &u32Val);   AssertRC(rc);
    4014                 Log(("VMX_VMCS16_GUEST_FIELD_VPID %u\n", u32Val));
     4014                Log4(("VMX_VMCS16_GUEST_FIELD_VPID %u\n", u32Val));
    40154015
    40164016                /* Host bits. */
    40174017                rc = VMXReadVmcsHstN(VMX_VMCS_HOST_CR0, &uHCReg);           AssertRC(rc);
    4018                 Log(("Host CR0 %#RHr\n", uHCReg));
     4018                Log4(("Host CR0 %#RHr\n", uHCReg));
    40194019                rc = VMXReadVmcsHstN(VMX_VMCS_HOST_CR3, &uHCReg);           AssertRC(rc);
    4020                 Log(("Host CR3 %#RHr\n", uHCReg));
     4020                Log4(("Host CR3 %#RHr\n", uHCReg));
    40214021                rc = VMXReadVmcsHstN(VMX_VMCS_HOST_CR4, &uHCReg);           AssertRC(rc);
    4022                 Log(("Host CR4 %#RHr\n", uHCReg));
     4022                Log4(("Host CR4 %#RHr\n", uHCReg));
    40234023
    40244024                RTGDTR      HostGdtr;
     
    40264026                ASMGetGDTR(&HostGdtr);
    40274027                rc = VMXReadVmcs32(VMX_VMCS16_HOST_FIELD_CS, &u32Val);      AssertRC(rc);
    4028                 Log(("Host CS %#08x\n", u32Val));
     4028                Log4(("Host CS %#08x\n", u32Val));
    40294029                if (u32Val < HostGdtr.cbGdt)
    40304030                {
     
    40344034
    40354035                rc = VMXReadVmcs32(VMX_VMCS16_HOST_FIELD_DS, &u32Val);      AssertRC(rc);
    4036                 Log(("Host DS %#08x\n", u32Val));
     4036                Log4(("Host DS %#08x\n", u32Val));
    40374037                if (u32Val < HostGdtr.cbGdt)
    40384038                {
     
    40424042
    40434043                rc = VMXReadVmcs32(VMX_VMCS16_HOST_FIELD_ES, &u32Val);      AssertRC(rc);
    4044                 Log(("Host ES %#08x\n", u32Val));
     4044                Log4(("Host ES %#08x\n", u32Val));
    40454045                if (u32Val < HostGdtr.cbGdt)
    40464046                {
     
    40504050
    40514051                rc = VMXReadVmcs32(VMX_VMCS16_HOST_FIELD_FS, &u32Val);      AssertRC(rc);
    4052                 Log(("Host FS %#08x\n", u32Val));
     4052                Log4(("Host FS %#08x\n", u32Val));
    40534053                if (u32Val < HostGdtr.cbGdt)
    40544054                {
     
    40584058
    40594059                rc = VMXReadVmcs32(VMX_VMCS16_HOST_FIELD_GS, &u32Val);      AssertRC(rc);
    4060                 Log(("Host GS %#08x\n", u32Val));
     4060                Log4(("Host GS %#08x\n", u32Val));
    40614061                if (u32Val < HostGdtr.cbGdt)
    40624062                {
     
    40664066
    40674067                rc = VMXReadVmcs32(VMX_VMCS16_HOST_FIELD_SS, &u32Val);      AssertRC(rc);
    4068                 Log(("Host SS %#08x\n", u32Val));
     4068                Log4(("Host SS %#08x\n", u32Val));
    40694069                if (u32Val < HostGdtr.cbGdt)
    40704070                {
     
    40744074
    40754075                rc = VMXReadVmcs32(VMX_VMCS16_HOST_FIELD_TR,  &u32Val);     AssertRC(rc);
    4076                 Log(("Host TR %#08x\n", u32Val));
     4076                Log4(("Host TR %#08x\n", u32Val));
    40774077                if (u32Val < HostGdtr.cbGdt)
    40784078                {
     
    40824082
    40834083                rc = VMXReadVmcsHstN(VMX_VMCS_HOST_TR_BASE, &uHCReg);       AssertRC(rc);
    4084                 Log(("Host TR Base %#RHv\n", uHCReg));
     4084                Log4(("Host TR Base %#RHv\n", uHCReg));
    40854085                rc = VMXReadVmcsHstN(VMX_VMCS_HOST_GDTR_BASE, &uHCReg);     AssertRC(rc);
    4086                 Log(("Host GDTR Base %#RHv\n", uHCReg));
     4086                Log4(("Host GDTR Base %#RHv\n", uHCReg));
    40874087                rc = VMXReadVmcsHstN(VMX_VMCS_HOST_IDTR_BASE, &uHCReg);     AssertRC(rc);
    4088                 Log(("Host IDTR Base %#RHv\n", uHCReg));
     4088                Log4(("Host IDTR Base %#RHv\n", uHCReg));
    40894089                rc = VMXReadVmcs32(VMX_VMCS32_HOST_SYSENTER_CS, &u32Val);   AssertRC(rc);
    4090                 Log(("Host SYSENTER CS  %#08x\n", u32Val));
     4090                Log4(("Host SYSENTER CS  %#08x\n", u32Val));
    40914091                rc = VMXReadVmcsHstN(VMX_VMCS_HOST_SYSENTER_EIP, &uHCReg);  AssertRC(rc);
    4092                 Log(("Host SYSENTER EIP %#RHv\n", uHCReg));
     4092                Log4(("Host SYSENTER EIP %#RHv\n", uHCReg));
    40934093                rc = VMXReadVmcsHstN(VMX_VMCS_HOST_SYSENTER_ESP, &uHCReg);  AssertRC(rc);
    4094                 Log(("Host SYSENTER ESP %#RHv\n", uHCReg));
     4094                Log4(("Host SYSENTER ESP %#RHv\n", uHCReg));
    40954095                rc = VMXReadVmcsHstN(VMX_VMCS_HOST_RSP, &uHCReg);           AssertRC(rc);
    4096                 Log(("Host RSP %#RHv\n", uHCReg));
     4096                Log4(("Host RSP %#RHv\n", uHCReg));
    40974097                rc = VMXReadVmcsHstN(VMX_VMCS_HOST_RIP, &uHCReg);           AssertRC(rc);
    4098                 Log(("Host RIP %#RHv\n", uHCReg));
     4098                Log4(("Host RIP %#RHv\n", uHCReg));
    40994099# if HC_ARCH_BITS == 64 || defined(VBOX_WITH_HYBRID_32BIT_KERNEL)
    41004100                if (HMVMX_IS_64BIT_HOST_MODE())
    41014101                {
    4102                     Log(("MSR_K6_EFER            = %#RX64\n", ASMRdMsr(MSR_K6_EFER)));
    4103                     Log(("MSR_K6_STAR            = %#RX64\n", ASMRdMsr(MSR_K6_STAR)));
    4104                     Log(("MSR_K8_LSTAR           = %#RX64\n", ASMRdMsr(MSR_K8_LSTAR)));
    4105                     Log(("MSR_K8_CSTAR           = %#RX64\n", ASMRdMsr(MSR_K8_CSTAR)));
    4106                     Log(("MSR_K8_SF_MASK         = %#RX64\n", ASMRdMsr(MSR_K8_SF_MASK)));
    4107                     Log(("MSR_K8_KERNEL_GS_BASE  = %#RX64\n", ASMRdMsr(MSR_K8_KERNEL_GS_BASE)));
     4102                    Log4(("MSR_K6_EFER            = %#RX64\n", ASMRdMsr(MSR_K6_EFER)));
     4103                    Log4(("MSR_K6_STAR            = %#RX64\n", ASMRdMsr(MSR_K6_STAR)));
     4104                    Log4(("MSR_K8_LSTAR           = %#RX64\n", ASMRdMsr(MSR_K8_LSTAR)));
     4105                    Log4(("MSR_K8_CSTAR           = %#RX64\n", ASMRdMsr(MSR_K8_CSTAR)));
     4106                    Log4(("MSR_K8_SF_MASK         = %#RX64\n", ASMRdMsr(MSR_K8_SF_MASK)));
     4107                    Log4(("MSR_K8_KERNEL_GS_BASE  = %#RX64\n", ASMRdMsr(MSR_K8_KERNEL_GS_BASE)));
    41084108                }
    41094109# endif
     
    47664766                && uExitVector == X86_XCPT_PF)
    47674767            {
    4768                 Log(("IDT: Contributory #PF uCR2=%#RX64\n", pMixedCtx->cr2));
     4768                Log4(("IDT: Contributory #PF uCR2=%#RX64\n", pMixedCtx->cr2));
    47694769            }
    47704770#endif
     
    47734773            {
    47744774                pVmxTransient->fVectoringPF = true;
    4775                 Log(("IDT: Vectoring #PF uCR2=%#RX64\n", pMixedCtx->cr2));
     4775                Log4(("IDT: Vectoring #PF uCR2=%#RX64\n", pMixedCtx->cr2));
    47764776            }
    47774777            else if (   (pVCpu->hm.s.vmx.u32XcptBitmap & HMVMX_CONTRIBUTORY_XCPT_MASK)
     
    48124812                                       0 /* cbInstr */,  u32ErrCode, pMixedCtx->cr2);
    48134813                rc = VINF_SUCCESS;
    4814                 Log(("IDT: Pending vectoring event %#RX64 Err=%#RX32\n", pVCpu->hm.s.Event.u64IntrInfo,
     4814                Log4(("IDT: Pending vectoring event %#RX64 Err=%#RX32\n", pVCpu->hm.s.Event.u64IntrInfo,
    48154815                     pVCpu->hm.s.Event.u32ErrCode));
    48164816                break;
     
    48214821                hmR0VmxSetPendingXcptDF(pVCpu, pMixedCtx);
    48224822                rc = VINF_VMX_DOUBLE_FAULT;
    4823                 Log(("IDT: Pending vectoring #DF %#RX64 uIdtVector=%#x uExitVector=%#x\n", pVCpu->hm.s.Event.u64IntrInfo,
     4823                Log4(("IDT: Pending vectoring #DF %#RX64 uIdtVector=%#x uExitVector=%#x\n", pVCpu->hm.s.Event.u64IntrInfo,
    48244824                     uIdtVector, uExitVector));
    48254825                break;
     
    48284828            case VMXREFLECTXCPT_TF:
    48294829            {
    4830                 Log(("IDT: Pending vectoring triple-fault uIdt=%#x uExit=%#x\n", uIdtVector, uExitVector));
     4830                Log4(("IDT: Pending vectoring triple-fault uIdt=%#x uExit=%#x\n", uIdtVector, uExitVector));
    48314831                rc = VINF_EM_RESET;
    48324832                break;
     
    49814981        {
    49824982            Assert(pVCpu->CTX_SUFF(pVM)->hm.s.vmx.pRealModeTSS);
    4983             Log(("Saving real-mode EFLAGS VT-x view=%#RX32\n", pMixedCtx->eflags.u32));
     4983            Log4(("Saving real-mode EFLAGS VT-x view=%#RX32\n", pMixedCtx->eflags.u32));
    49844984
    49854985            pMixedCtx->eflags.Bits.u1VM   = 0;
     
    56435643            {
    56445644                AssertRC(rc);
    5645                 Log(("hmR0VmxCheckForceFlags: PGMSyncCR3 forcing us back to ring-3. rc=%d\n", rc));
     5645                Log4(("hmR0VmxCheckForceFlags: PGMSyncCR3 forcing us back to ring-3. rc=%d\n", rc));
    56465646                return rc;
    56475647            }
     
    56555655            STAM_COUNTER_INC(&pVCpu->hm.s.StatSwitchHmToR3FF);
    56565656            rc = RT_UNLIKELY(VM_FF_IS_PENDING(pVM, VM_FF_PGM_NO_MEMORY)) ? VINF_EM_NO_MEMORY : VINF_EM_RAW_TO_R3;
    5657             Log(("hmR0VmxCheckForceFlags: HM_TO_R3 forcing us back to ring-3. rc=%d\n", rc));
     5657            Log4(("hmR0VmxCheckForceFlags: HM_TO_R3 forcing us back to ring-3. rc=%d\n", rc));
    56585658            return rc;
    56595659        }
     
    56635663            || VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_REQUEST))
    56645664        {
    5665             Log(("hmR0VmxCheckForceFlags: Pending VM request forcing us back to ring-3\n"));
     5665            Log4(("hmR0VmxCheckForceFlags: Pending VM request forcing us back to ring-3\n"));
    56665666            return VINF_EM_PENDING_REQUEST;
    56675667        }
     
    56705670        if (VM_FF_IS_PENDING(pVM, VM_FF_PGM_POOL_FLUSH_PENDING))
    56715671        {
    5672             Log(("hmR0VmxCheckForceFlags: PGM pool flush pending forcing us back to ring-3\n"));
     5672            Log4(("hmR0VmxCheckForceFlags: PGM pool flush pending forcing us back to ring-3\n"));
    56735673            return VINF_PGM_POOL_FLUSH_PENDING;
    56745674        }
     
    56775677        if (VM_FF_IS_PENDING(pVM, VM_FF_PDM_DMA))
    56785678        {
    5679             Log(("hmR0VmxCheckForceFlags: Pending DMA request forcing us back to ring-3\n"));
     5679            Log4(("hmR0VmxCheckForceFlags: Pending DMA request forcing us back to ring-3\n"));
    56805680            return VINF_EM_RAW_TO_R3;
    56815681        }
     
    57515751    rc = TRPMResetTrap(pVCpu);
    57525752    AssertRC(rc);
    5753     Log(("TRPM->HM event: u32IntrInfo=%#RX32 enmTrpmEvent=%d cbInstr=%u uErrCode=%#RX32 GCPtrFaultAddress=%#RGv\n",
     5753    Log4(("TRPM->HM event: u32IntrInfo=%#RX32 enmTrpmEvent=%d cbInstr=%u uErrCode=%#RX32 GCPtrFaultAddress=%#RGv\n",
    57545754         u32IntrInfo, enmTrpmEvent, cbInstr, uErrCode, GCPtrFaultAddress));
    57555755    hmR0VmxSetPendingEvent(pVCpu, u32IntrInfo, cbInstr, uErrCode, GCPtrFaultAddress);
     
    57965796    }
    57975797
    5798     Log(("HM event->TRPM: uVector=%#x enmTrapType=%d\n", uVector, enmTrapType));
     5798    Log4(("HM event->TRPM: uVector=%#x enmTrapType=%d\n", uVector, enmTrapType));
    57995799
    58005800    int rc = TRPMAssertTrap(pVCpu, uVector, enmTrapType);
     
    59135913    /* Please, no longjumps here (any logging shouldn't flush jump back to ring-3). NO LOGGING BEFORE THIS POINT! */
    59145914    VMMRZCallRing3Disable(pVCpu);
    5915     Log(("hmR0VmxExitToRing3: rcExit=%d\n", rcExit));
     5915    Log4(("hmR0VmxExitToRing3: rcExit=%d\n", rcExit));
    59165916
    59175917    /* We need to do this only while truly exiting the "inner loop" back to ring-3 and -not- for any longjmp to ring3. */
     
    59685968    VMMRZCallRing3Disable(pVCpu);
    59695969    Assert(VMMR0IsLogFlushDisabled(pVCpu));
    5970     Log(("hmR0VmxCallRing3Callback->hmR0VmxLongJmpToRing3\n"));
     5970    Log4(("hmR0VmxCallRing3Callback->hmR0VmxLongJmpToRing3\n"));
    59715971    hmR0VmxLongJmpToRing3(pVCpu->CTX_SUFF(pVM), pVCpu, (PCPUMCTX)pvUser, VINF_VMM_UNKNOWN_RING3_CALL);
    59725972    VMMRZCallRing3Enable(pVCpu);
     
    60456045        if (fInject)
    60466046        {
    6047             Log(("Injecting pending event\n"));
     6047            Log4(("Injecting pending event\n"));
    60486048            rc = hmR0VmxInjectEventVmcs(pVCpu, pMixedCtx, pVCpu->hm.s.Event.u64IntrInfo, pVCpu->hm.s.Event.cbInstr,
    60496049                                        pVCpu->hm.s.Event.u32ErrCode, pVCpu->hm.s.Event.GCPtrFaultAddress, &uIntrState);
     
    60616061            && !fBlockSti)
    60626062        {
    6063             Log(("Injecting NMI\n"));
     6063            Log4(("Injecting NMI\n"));
    60646064            uint32_t u32IntrInfo = X86_XCPT_NMI | VMX_EXIT_INTERRUPTION_INFO_VALID;
    60656065            u32IntrInfo         |= (VMX_EXIT_INTERRUPTION_INFO_TYPE_NMI << VMX_EXIT_INTERRUPTION_INFO_TYPE_SHIFT);
     
    60866086            if (RT_SUCCESS(rc))
    60876087            {
    6088                 Log(("Injecting interrupt u8Interrupt=%#x\n", u8Interrupt));
     6088                Log4(("Injecting interrupt u8Interrupt=%#x\n", u8Interrupt));
    60896089                uint32_t u32IntrInfo = u8Interrupt | VMX_EXIT_INTERRUPTION_INFO_VALID;
    60906090                u32IntrInfo         |= (VMX_EXIT_INTERRUPTION_INFO_TYPE_EXT_INT << VMX_EXIT_INTERRUPTION_INFO_TYPE_SHIFT);
     
    64216421                    Assert(   uIntrType != VMX_EXIT_INTERRUPTION_INFO_TYPE_NMI
    64226422                           && uIntrType != VMX_EXIT_INTERRUPTION_INFO_TYPE_EXT_INT);
    6423                     Log(("Clearing inhibition due to STI.\n"));
     6423                    Log4(("Clearing inhibition due to STI.\n"));
    64246424                    *puIntrState &= ~VMX_VMCS_GUEST_INTERRUPTIBILITY_STATE_BLOCK_STI;
    64256425                }
    6426                 Log(("Injecting real-mode: u32IntrInfo=%#x u32ErrCode=%#x instrlen=%#x\n", u32IntrInfo, u32ErrCode, cbInstr));
     6426                Log4(("Injecting real-mode: u32IntrInfo=%#x u32ErrCode=%#x instrlen=%#x\n", u32IntrInfo, u32ErrCode, cbInstr));
    64276427            }
    64286428            Assert(rc == VINF_SUCCESS || rc == VINF_EM_RESET);
     
    64556455        pMixedCtx->cr2 = GCPtrFaultAddress;
    64566456    }
    6457     Log(("Injecting u32IntrInfo=%#x u32ErrCode=%#x cbInstr=%#x uCR2=%#RX64\n", u32IntrInfo, u32ErrCode, cbInstr, pMixedCtx->cr2));
     6457
     6458    Log4(("Injecting u32IntrInfo=%#x u32ErrCode=%#x cbInstr=%#x pMixedCtx->uCR2=%#RX64\n", u32IntrInfo, u32ErrCode, cbInstr,
     6459          pMixedCtx->cr2));
    64586460
    64596461    AssertRCReturn(rc, rc);
     
    67226724
    67236725        /* Map the HC APIC-access page into the GC space, this also updates the shadow page tables if necessary. */
    6724         Log(("Mapped HC APIC-access page into GC: GCPhysApicBase=%#RGv\n", GCPhysApicBase));
     6726        Log4(("Mapped HC APIC-access page into GC: GCPhysApicBase=%#RGv\n", GCPhysApicBase));
    67256727        rc = IOMMMIOMapMMIOHCPage(pVM, pVCpu, GCPhysApicBase, pVM->hm.s.vmx.HCPhysApicAccess, X86_PTE_RW | X86_PTE_P);
    67266728        AssertRCReturn(rc, rc);
     
    67866788    /* Load the required guest state bits (for guest-state changes in the inner execution loop). */
    67876789    Assert(!(pVCpu->hm.s.fContextUseFlags & HM_CHANGED_HOST_CONTEXT));
    6788     Log4(("LoadFlags=%#RX32\n", pVCpu->hm.s.fContextUseFlags));
     6790    Log5(("LoadFlags=%#RX32\n", pVCpu->hm.s.fContextUseFlags));
    67896791#ifdef HMVMX_SYNC_FULL_GUEST_STATE
    67906792    pVCpu->hm.s.fContextUseFlags |= HM_CHANGED_ALL_GUEST;
     
    69286930    if (RT_UNLIKELY(rcVMRun != VINF_SUCCESS))
    69296931    {
    6930         Log(("VM-entry failure: rcVMRun=%Rrc fVMEntryFailed=%RTbool\n", rcVMRun, pVmxTransient->fVMEntryFailed));
     6932        Log4(("VM-entry failure: rcVMRun=%Rrc fVMEntryFailed=%RTbool\n", rcVMRun, pVmxTransient->fVMEntryFailed));
    69316933        return;
    69326934    }
     
    77527754    AssertRCReturn(rc, rc);
    77537755
    7754     Log(("VMX_VMCS32_CTRL_ENTRY_INTERRUPTION_INFO    %#RX32\n", pVmxTransient->uEntryIntrInfo));
    7755     Log(("VMX_VMCS32_CTRL_ENTRY_EXCEPTION_ERRCODE    %#RX32\n", pVmxTransient->uEntryXcptErrorCode));
    7756     Log(("VMX_VMCS32_CTRL_ENTRY_INSTR_LENGTH         %#RX32\n", pVmxTransient->cbEntryInstr));
    7757     Log(("VMX_VMCS32_GUEST_INTERRUPTIBILITY_STATE    %#RX32\n", uIntrState));
     7756    Log4(("VMX_VMCS32_CTRL_ENTRY_INTERRUPTION_INFO    %#RX32\n", pVmxTransient->uEntryIntrInfo));
     7757    Log4(("VMX_VMCS32_CTRL_ENTRY_EXCEPTION_ERRCODE    %#RX32\n", pVmxTransient->uEntryXcptErrorCode));
     7758    Log4(("VMX_VMCS32_CTRL_ENTRY_INSTR_LENGTH         %#RX32\n", pVmxTransient->cbEntryInstr));
     7759    Log4(("VMX_VMCS32_GUEST_INTERRUPTIBILITY_STATE    %#RX32\n", uIntrState));
    77587760
    77597761    rc = VMXReadVmcs32(VMX_VMCS_GUEST_CR0, &u32Val);                        AssertRC(rc);
    7760     Log(("VMX_VMCS_GUEST_CR0                         %#RX32\n", u32Val));
     7762    Log4(("VMX_VMCS_GUEST_CR0                         %#RX32\n", u32Val));
    77617763    rc = VMXReadVmcsHstN(VMX_VMCS_CTRL_CR0_MASK, &uHCReg);                  AssertRC(rc);
    7762     Log(("VMX_VMCS_CTRL_CR0_MASK                     %#RHr\n", uHCReg));
     7764    Log4(("VMX_VMCS_CTRL_CR0_MASK                     %#RHr\n", uHCReg));
    77637765    rc = VMXReadVmcsHstN(VMX_VMCS_CTRL_CR0_READ_SHADOW, &uHCReg);           AssertRC(rc);
    7764     Log(("VMX_VMCS_CTRL_CR4_READ_SHADOW              %#RHr\n", uHCReg));
     7766    Log4(("VMX_VMCS_CTRL_CR4_READ_SHADOW              %#RHr\n", uHCReg));
    77657767    rc = VMXReadVmcsHstN(VMX_VMCS_CTRL_CR4_MASK, &uHCReg);                  AssertRC(rc);
    7766     Log(("VMX_VMCS_CTRL_CR4_MASK                     %#RHr\n", uHCReg));
     7768    Log4(("VMX_VMCS_CTRL_CR4_MASK                     %#RHr\n", uHCReg));
    77677769    rc = VMXReadVmcsHstN(VMX_VMCS_CTRL_CR4_READ_SHADOW, &uHCReg);           AssertRC(rc);
    7768     Log(("VMX_VMCS_CTRL_CR4_READ_SHADOW              %#RHr\n", uHCReg));
     7770    Log4(("VMX_VMCS_CTRL_CR4_READ_SHADOW              %#RHr\n", uHCReg));
    77697771    rc = VMXReadVmcs64(VMX_VMCS64_CTRL_EPTP_FULL, &u64Val);                 AssertRC(rc);
    7770     Log(("VMX_VMCS64_CTRL_EPTP_FULL                  %#RX64\n", u64Val));
     7772    Log4(("VMX_VMCS64_CTRL_EPTP_FULL                  %#RX64\n", u64Val));
    77717773
    77727774    PVM pVM = pVCpu->CTX_SUFF(pVM);
     
    79037905    rc |= hmR0VmxSaveGuestSegmentRegs(pVCpu, pMixedCtx);
    79047906    AssertRCReturn(rc, rc);
    7905     Log(("ecx=%#RX32\n", pMixedCtx->ecx));
     7907    Log4(("ecx=%#RX32\n", pMixedCtx->ecx));
    79067908
    79077909    rc = EMInterpretWrmsr(pVM, pVCpu, CPUMCTX2CORE(pMixedCtx));
     
    80548056            {
    80558057                case 0: /* CR0 */
    8056                     Log(("CRX CR0 write rc=%d CR0=%#RX64\n", rc, pMixedCtx->cr0));
     8058                    Log4(("CRX CR0 write rc=%d CR0=%#RX64\n", rc, pMixedCtx->cr0));
    80578059                    pVCpu->hm.s.fContextUseFlags |= HM_CHANGED_GUEST_CR0;
    80588060                    break;
     
    80628064                case 3: /* CR3 */
    80638065                    Assert(!pVM->hm.s.fNestedPaging || !CPUMIsGuestPagingEnabledEx(pMixedCtx));
    8064                     Log(("CRX CR3 write rc=%d CR3=%#RX64\n", rc, pMixedCtx->cr3));
     8066                    Log4(("CRX CR3 write rc=%d CR3=%#RX64\n", rc, pMixedCtx->cr3));
    80658067                    pVCpu->hm.s.fContextUseFlags |= HM_CHANGED_GUEST_CR3;
    80668068                    break;
    80678069                case 4: /* CR4 */
    8068                     Log(("CRX CR4 write rc=%d CR4=%#RX64\n", rc, pMixedCtx->cr4));
     8070                    Log4(("CRX CR4 write rc=%d CR4=%#RX64\n", rc, pMixedCtx->cr4));
    80698071                    pVCpu->hm.s.fContextUseFlags |= HM_CHANGED_GUEST_CR4;
    80708072                    break;
     
    81018103            Assert(rc == VINF_SUCCESS || rc == VERR_EM_INTERPRETER);
    81028104            STAM_COUNTER_INC(&pVCpu->hm.s.StatExitCRxRead[VMX_EXIT_QUALIFICATION_CRX_REGISTER(uExitQualification)]);
    8103             Log(("CRX CR%d Read access rc=%d\n", VMX_EXIT_QUALIFICATION_CRX_REGISTER(uExitQualification), rc));
     8105            Log4(("CRX CR%d Read access rc=%d\n", VMX_EXIT_QUALIFICATION_CRX_REGISTER(uExitQualification), rc));
    81048106            break;
    81058107        }
     
    81138115            pVCpu->hm.s.fContextUseFlags |= HM_CHANGED_GUEST_CR0;
    81148116            STAM_COUNTER_INC(&pVCpu->hm.s.StatExitClts);
    8115             Log(("CRX CLTS write rc=%d\n", rc));
     8117            Log4(("CRX CLTS write rc=%d\n", rc));
    81168118            break;
    81178119        }
     
    81258127                pVCpu->hm.s.fContextUseFlags |= HM_CHANGED_GUEST_CR0;
    81268128            STAM_COUNTER_INC(&pVCpu->hm.s.StatExitLmsw);
    8127             Log(("CRX LMSW write rc=%d\n", rc));
     8129            Log4(("CRX LMSW write rc=%d\n", rc));
    81288130            break;
    81298131        }
     
    81678169    /* EFER also required for longmode checks in EMInterpretDisasCurrent(), but it's always up-to-date. */
    81688170    AssertRCReturn(rc, rc);
    8169     Log(("CS:RIP=%04x:%#RX64\n", pMixedCtx->cs.Sel, pMixedCtx->rip));
     8171    Log4(("CS:RIP=%04x:%#RX64\n", pMixedCtx->cs.Sel, pMixedCtx->rip));
    81708172
    81718173    /* Refer Intel spec. 27-5. "Exit Qualifications for I/O Instructions" for the format. */
     
    83638365                    pVCpu->hm.s.Event.GCPtrFaultAddress = pMixedCtx->cr2;
    83648366                }
    8365                 Log(("Pending event on TaskSwitch uIntType=%#x uVector=%#x\n", uIntType, uVector));
     8367                Log4(("Pending event on TaskSwitch uIntType=%#x uVector=%#x\n", uIntType, uVector));
    83668368            }
    83678369        }
     
    84338435            GCPhys += VMX_EXIT_QUALIFICATION_APIC_ACCESS_OFFSET(pVmxTransient->uExitQualification);
    84348436            PVM pVM = pVCpu->CTX_SUFF(pVM);
    8435             Log(("ApicAccess uAccessType=%#x GCPhys=%#RGv Off=%#x\n", uAccessType, GCPhys,
     8437            Log4(("ApicAccess uAccessType=%#x GCPhys=%#RGv Off=%#x\n", uAccessType, GCPhys,
    84368438                 VMX_EXIT_QUALIFICATION_APIC_ACCESS_OFFSET(pVmxTransient->uExitQualification)));
    84378439
     
    84408442                                                  CPUMCTX2CORE(pMixedCtx), GCPhys);
    84418443            rc = VBOXSTRICTRC_VAL(rc2);
    8442             Log(("ApicAccess rc=%d\n", rc));
     8444            Log4(("ApicAccess rc=%d\n", rc));
    84438445            if (   rc == VINF_SUCCESS
    84448446                || rc == VERR_PAGE_TABLE_NOT_PRESENT
     
    84538455
    84548456        default:
    8455             Log(("ApicAccess uAccessType=%#x\n", uAccessType));
     8457            Log4(("ApicAccess uAccessType=%#x\n", uAccessType));
    84568458            rc = VINF_EM_RAW_EMULATE_INSTR;
    84578459            break;
     
    85808582    VBOXSTRICTRC rc2 = PGMR0Trap0eHandlerNPMisconfig(pVM, pVCpu, PGMMODE_EPT, CPUMCTX2CORE(pMixedCtx), GCPhys, UINT32_MAX);
    85818583    rc = VBOXSTRICTRC_VAL(rc2);
    8582     Log(("EPT misconfig at %#RGv RIP=%#RX64 rc=%d\n", GCPhys, pMixedCtx->rip, rc));
     8584    Log4(("EPT misconfig at %#RGv RIP=%#RX64 rc=%d\n", GCPhys, pMixedCtx->rip, rc));
    85838585    if (   rc == VINF_SUCCESS
    85848586        || rc == VERR_PAGE_TABLE_NOT_PRESENT
     
    86358637    TRPMAssertXcptPF(pVCpu, GCPhys, uErrorCode);
    86368638
    8637     Log(("EPT violation %#x at %#RX64 ErrorCode %#x CS:EIP=%04x:%#RX64\n", pVmxTransient->uExitQualification, GCPhys,
     8639    Log4(("EPT violation %#x at %#RX64 ErrorCode %#x CS:EIP=%04x:%#RX64\n", pVmxTransient->uExitQualification, GCPhys,
    86388640         uErrorCode, pMixedCtx->cs.Sel, pMixedCtx->rip));
    86398641
     
    86558657    }
    86568658
    8657     Log(("EPT return to ring-3 rc=%d\n"));
     8659    Log4(("EPT return to ring-3 rc=%d\n"));
    86588660    return rc;
    86598661}
     
    88268828        rc |= hmR0VmxSaveGuestState(pVCpu, pMixedCtx);
    88278829        AssertRCReturn(rc, rc);
    8828         Log(("#GP Gst: RIP %#RX64 ErrorCode=%#x CR0=%#RX64 CPL=%u\n", pMixedCtx->rip, pVmxTransient->uExitIntrErrorCode,
     8830        Log4(("#GP Gst: RIP %#RX64 ErrorCode=%#x CR0=%#RX64 CPL=%u\n", pMixedCtx->rip, pVmxTransient->uExitIntrErrorCode,
    88298831             pMixedCtx->cr0, CPUMGetGuestCPL(pVCpu)));
    88308832        hmR0VmxSetPendingEvent(pVCpu, VMX_VMCS_CTRL_ENTRY_IRQ_INFO_FROM_EXIT_INT_INFO(pVmxTransient->uExitIntrInfo),
     
    88538855        rc = VINF_SUCCESS;
    88548856        Assert(cbOp == pDis->cbInstr);
    8855         Log(("#GP Disas OpCode=%u CS:EIP %04x:%#RX64\n", pDis->pCurInstr->uOpcode, pMixedCtx->cs.Sel, pMixedCtx->rip));
     8857        Log4(("#GP Disas OpCode=%u CS:EIP %04x:%#RX64\n", pDis->pCurInstr->uOpcode, pMixedCtx->cs.Sel, pMixedCtx->rip));
    88568858        switch (pDis->pCurInstr->uOpcode)
    88578859        {
     
    88818883            case OP_POPF:
    88828884            {
    8883                 Log(("POPF CS:RIP %04x:%#RX64\n", pMixedCtx->cs.Sel, pMixedCtx->rip));
     8885                Log4(("POPF CS:RIP %04x:%#RX64\n", pMixedCtx->cs.Sel, pMixedCtx->rip));
    88848886                uint32_t cbParm = 0;
    88858887                uint32_t uMask  = 0;
     
    89118913                    break;
    89128914                }
    8913                 Log(("POPF %x -> %#RX64 mask=%x RIP=%#RX64\n", uEflags.u, pMixedCtx->rsp, uMask, pMixedCtx->rip));
     8915                Log4(("POPF %x -> %#RX64 mask=%x RIP=%#RX64\n", uEflags.u, pMixedCtx->rsp, uMask, pMixedCtx->rip));
    89148916                pMixedCtx->eflags.u32 =   (pMixedCtx->eflags.u32 & ~(X86_EFL_POPF_BITS & uMask))
    89158917                                        | (uEflags.u32 & X86_EFL_POPF_BITS & uMask);
     
    89608962                    break;
    89618963                }
    8962                 Log(("PUSHF %x -> %#RGv\n", uEflags.u, GCPtrStack));
     8964                Log4(("PUSHF %x -> %#RGv\n", uEflags.u, GCPtrStack));
    89638965                pMixedCtx->esp               -= cbParm;
    89648966                pMixedCtx->esp               &= uMask;
     
    90009002                pVCpu->hm.s.fContextUseFlags |=   HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_SEGMENT_REGS | HM_CHANGED_GUEST_RSP
    90019003                                                | HM_CHANGED_GUEST_RFLAGS;
    9002                 Log(("IRET %#RX32 to %04x:%x\n", GCPtrStack, pMixedCtx->cs.Sel, pMixedCtx->ip));
     9004                Log4(("IRET %#RX32 to %04x:%x\n", GCPtrStack, pMixedCtx->cs.Sel, pMixedCtx->ip));
    90039005                STAM_COUNTER_INC(&pVCpu->hm.s.StatExitIret);
    90049006                break;
     
    90299031                rc = VBOXSTRICTRC_VAL(rc2);
    90309032                pVCpu->hm.s.fContextUseFlags |= HM_CHANGED_ALL_GUEST;
    9031                 Log(("#GP rc=%Rrc\n", rc));
     9033                Log4(("#GP rc=%Rrc\n", rc));
    90329034                break;
    90339035            }
     
    90949096            pVCpu->hm.s.Event.fPending = false;     /* A vectoring #PF. */
    90959097            hmR0VmxSetPendingXcptDF(pVCpu, pMixedCtx);
    9096             Log(("Pending #DF due to vectoring #PF. NP\n"));
     9098            Log4(("Pending #DF due to vectoring #PF. NP\n"));
    90979099        }
    90989100        STAM_COUNTER_INC(&pVCpu->hm.s.StatExitGuestPF);
     
    91379139    AssertRCReturn(rc, rc);
    91389140
    9139     Log(("#PF: cr2=%#RX64 cs:rip=%#04x:%#RX64 uErrCode %#RX32 cr3=%#RX64\n", pVmxTransient->uExitQualification, pMixedCtx->cs.Sel,
    9140          pMixedCtx->rip, pVmxTransient->uExitIntrErrorCode, pMixedCtx->cr3));
     9141    Log4(("#PF: cr2=%#RX64 cs:rip=%#04x:%#RX64 uErrCode %#RX32 cr3=%#RX64\n", pVmxTransient->uExitQualification,
     9142          pMixedCtx->cs.Sel, pMixedCtx->rip, pVmxTransient->uExitIntrErrorCode, pMixedCtx->cr3));
    91419143
    91429144    TRPMAssertXcptPF(pVCpu, pVmxTransient->uExitQualification, (RTGCUINT)pVmxTransient->uExitIntrErrorCode);
     
    91449146                          (RTGCPTR)pVmxTransient->uExitQualification);
    91459147
    9146     Log(("#PF: rc=%Rrc\n", rc));
     9148    Log4(("#PF: rc=%Rrc\n", rc));
    91479149    if (rc == VINF_SUCCESS)
    91489150    {
     
    91749176            pVCpu->hm.s.Event.fPending = false;     /* Clear pending #PF for replace it with #DF. */
    91759177            hmR0VmxSetPendingXcptDF(pVCpu, pMixedCtx);
    9176             Log(("#PF: Pending #DF due to vectoring #PF\n"));
     9178            Log4(("#PF: Pending #DF due to vectoring #PF\n"));
    91779179        }
    91789180
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette