Changeset 13825 in vbox
- Timestamp:
- Nov 5, 2008 1:12:46 AM (16 years ago)
- svn:sync-xref-src-repo-rev:
- 38816
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 8 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/HWACCM.cpp
r13824 r13825 270 270 { 271 271 LogRel(("HWACCM: No VMX or SVM CPU extension found. Reason %Rrc\n", pVM->hwaccm.s.lLastError)); 272 LogRel(("HWACCM: VMX MSR_IA32_FEATURE_CONTROL=% VX64\n", pVM->hwaccm.s.vmx.msr.feature_ctrl));272 LogRel(("HWACCM: VMX MSR_IA32_FEATURE_CONTROL=%RX64\n", pVM->hwaccm.s.vmx.msr.feature_ctrl)); 273 273 return VINF_SUCCESS; 274 274 } … … 319 319 320 320 LogRel(("HWACCM: Host CR4=%08X\n", pVM->hwaccm.s.vmx.hostCR4)); 321 LogRel(("HWACCM: MSR_IA32_FEATURE_CONTROL = % VX64\n", pVM->hwaccm.s.vmx.msr.feature_ctrl));322 LogRel(("HWACCM: MSR_IA32_VMX_BASIC_INFO = % VX64\n", pVM->hwaccm.s.vmx.msr.vmx_basic_info));321 LogRel(("HWACCM: MSR_IA32_FEATURE_CONTROL = %RX64\n", pVM->hwaccm.s.vmx.msr.feature_ctrl)); 322 LogRel(("HWACCM: MSR_IA32_VMX_BASIC_INFO = %RX64\n", pVM->hwaccm.s.vmx.msr.vmx_basic_info)); 323 323 LogRel(("HWACCM: VMCS id = %x\n", MSR_IA32_VMX_BASIC_INFO_VMCS_ID(pVM->hwaccm.s.vmx.msr.vmx_basic_info))); 324 324 LogRel(("HWACCM: VMCS size = %x\n", MSR_IA32_VMX_BASIC_INFO_VMCS_SIZE(pVM->hwaccm.s.vmx.msr.vmx_basic_info))); … … 327 327 LogRel(("HWACCM: Dual monitor treatment = %d\n", MSR_IA32_VMX_BASIC_INFO_VMCS_DUAL_MON(pVM->hwaccm.s.vmx.msr.vmx_basic_info))); 328 328 329 LogRel(("HWACCM: MSR_IA32_VMX_PINBASED_CTLS = % VX64\n", pVM->hwaccm.s.vmx.msr.vmx_pin_ctls.u));329 LogRel(("HWACCM: MSR_IA32_VMX_PINBASED_CTLS = %RX64\n", pVM->hwaccm.s.vmx.msr.vmx_pin_ctls.u)); 330 330 val = pVM->hwaccm.s.vmx.msr.vmx_pin_ctls.n.allowed1; 331 331 if (val & VMX_VMCS_CTRL_PIN_EXEC_CONTROLS_EXT_INT_EXIT) … … 339 339 LogRel(("HWACCM: VMX_VMCS_CTRL_PIN_EXEC_CONTROLS_NMI_EXIT *must* be set\n")); 340 340 341 LogRel(("HWACCM: MSR_IA32_VMX_PROCBASED_CTLS = % VX64\n", pVM->hwaccm.s.vmx.msr.vmx_proc_ctls.u));341 LogRel(("HWACCM: MSR_IA32_VMX_PROCBASED_CTLS = %RX64\n", pVM->hwaccm.s.vmx.msr.vmx_proc_ctls.u)); 342 342 val = pVM->hwaccm.s.vmx.msr.vmx_proc_ctls.n.allowed1; 343 343 if (val & VMX_VMCS_CTRL_PROC_EXEC_CONTROLS_IRQ_WINDOW_EXIT) … … 426 426 if (pVM->hwaccm.s.vmx.msr.vmx_proc_ctls.n.allowed1 & VMX_VMCS_CTRL_PROC_EXEC_USE_SECONDARY_EXEC_CTRL) 427 427 { 428 LogRel(("HWACCM: MSR_IA32_VMX_PROCBASED_CTLS2 = % VX64\n", pVM->hwaccm.s.vmx.msr.vmx_proc_ctls2.u));428 LogRel(("HWACCM: MSR_IA32_VMX_PROCBASED_CTLS2 = %RX64\n", pVM->hwaccm.s.vmx.msr.vmx_proc_ctls2.u)); 429 429 val = pVM->hwaccm.s.vmx.msr.vmx_proc_ctls2.n.allowed1; 430 430 if (val & VMX_VMCS_CTRL_PROC_EXEC2_VIRT_APIC) … … 448 448 } 449 449 450 LogRel(("HWACCM: MSR_IA32_VMX_ENTRY_CTLS = % VX64\n", pVM->hwaccm.s.vmx.msr.vmx_entry.u));450 LogRel(("HWACCM: MSR_IA32_VMX_ENTRY_CTLS = %RX64\n", pVM->hwaccm.s.vmx.msr.vmx_entry.u)); 451 451 val = pVM->hwaccm.s.vmx.msr.vmx_entry.n.allowed1; 452 452 if (val & VMX_VMCS_CTRL_ENTRY_CONTROLS_LOAD_DEBUG) … … 480 480 LogRel(("HWACCM: VMX_VMCS_CTRL_ENTRY_CONTROLS_LOAD_GUEST_EFER_MSR *must* be set\n")); 481 481 482 LogRel(("HWACCM: MSR_IA32_VMX_EXIT_CTLS = % VX64\n", pVM->hwaccm.s.vmx.msr.vmx_exit.u));482 LogRel(("HWACCM: MSR_IA32_VMX_EXIT_CTLS = %RX64\n", pVM->hwaccm.s.vmx.msr.vmx_exit.u)); 483 483 val = pVM->hwaccm.s.vmx.msr.vmx_exit.n.allowed1; 484 484 if (val & VMX_VMCS_CTRL_EXIT_CONTROLS_SAVE_DEBUG) … … 518 518 if (pVM->hwaccm.s.vmx.msr.vmx_eptcaps) 519 519 { 520 LogRel(("HWACCM: MSR_IA32_VMX_EPT_VPID_CAPS = % VX64\n", pVM->hwaccm.s.vmx.msr.vmx_eptcaps));520 LogRel(("HWACCM: MSR_IA32_VMX_EPT_VPID_CAPS = %RX64\n", pVM->hwaccm.s.vmx.msr.vmx_eptcaps)); 521 521 522 522 if (pVM->hwaccm.s.vmx.msr.vmx_eptcaps & MSR_IA32_VMX_EPT_CAPS_RWX_X_ONLY) … … 574 574 } 575 575 576 LogRel(("HWACCM: MSR_IA32_VMX_MISC = % VX64\n", pVM->hwaccm.s.vmx.msr.vmx_misc));576 LogRel(("HWACCM: MSR_IA32_VMX_MISC = %RX64\n", pVM->hwaccm.s.vmx.msr.vmx_misc)); 577 577 LogRel(("HWACCM: MSR_IA32_VMX_MISC_ACTIVITY_STATES %x\n", MSR_IA32_VMX_MISC_ACTIVITY_STATES(pVM->hwaccm.s.vmx.msr.vmx_misc))); 578 578 LogRel(("HWACCM: MSR_IA32_VMX_MISC_CR3_TARGET %x\n", MSR_IA32_VMX_MISC_CR3_TARGET(pVM->hwaccm.s.vmx.msr.vmx_misc))); … … 580 580 LogRel(("HWACCM: MSR_IA32_VMX_MISC_MSEG_ID %x\n", MSR_IA32_VMX_MISC_MSEG_ID(pVM->hwaccm.s.vmx.msr.vmx_misc))); 581 581 582 LogRel(("HWACCM: MSR_IA32_VMX_CR0_FIXED0 = % VX64\n", pVM->hwaccm.s.vmx.msr.vmx_cr0_fixed0));583 LogRel(("HWACCM: MSR_IA32_VMX_CR0_FIXED1 = % VX64\n", pVM->hwaccm.s.vmx.msr.vmx_cr0_fixed1));584 LogRel(("HWACCM: MSR_IA32_VMX_CR4_FIXED0 = % VX64\n", pVM->hwaccm.s.vmx.msr.vmx_cr4_fixed0));585 LogRel(("HWACCM: MSR_IA32_VMX_CR4_FIXED1 = % VX64\n", pVM->hwaccm.s.vmx.msr.vmx_cr4_fixed1));586 LogRel(("HWACCM: MSR_IA32_VMX_VMCS_ENUM = % VX64\n", pVM->hwaccm.s.vmx.msr.vmx_vmcs_enum));582 LogRel(("HWACCM: MSR_IA32_VMX_CR0_FIXED0 = %RX64\n", pVM->hwaccm.s.vmx.msr.vmx_cr0_fixed0)); 583 LogRel(("HWACCM: MSR_IA32_VMX_CR0_FIXED1 = %RX64\n", pVM->hwaccm.s.vmx.msr.vmx_cr0_fixed1)); 584 LogRel(("HWACCM: MSR_IA32_VMX_CR4_FIXED0 = %RX64\n", pVM->hwaccm.s.vmx.msr.vmx_cr4_fixed0)); 585 LogRel(("HWACCM: MSR_IA32_VMX_CR4_FIXED1 = %RX64\n", pVM->hwaccm.s.vmx.msr.vmx_cr4_fixed1)); 586 LogRel(("HWACCM: MSR_IA32_VMX_VMCS_ENUM = %RX64\n", pVM->hwaccm.s.vmx.msr.vmx_vmcs_enum)); 587 587 588 588 LogRel(("HWACCM: VMCS physaddr = %RHp\n", pVM->hwaccm.s.vmx.pVMCSPhys)); -
trunk/src/VBox/VMM/PATM/CSAM.cpp
r13824 r13825 1803 1803 // AssertMsg( (rc == VERR_PAGE_NOT_PRESENT || rc == VERR_PAGE_TABLE_NOT_PRESENT) 1804 1804 // || !(fPageShw & X86_PTE_RW) 1805 // || (pPageRec->page.GCPhys == 0), ("Shadow page flags for %RRv (%RHp) aren't readonly (% VX64)!!\n", pPageAddrGC, GCPhys, fPageShw));1805 // || (pPageRec->page.GCPhys == 0), ("Shadow page flags for %RRv (%RHp) aren't readonly (%RX64)!!\n", pPageAddrGC, GCPhys, fPageShw)); 1806 1806 } 1807 1807 #endif -
trunk/src/VBox/VMM/VMMAll/EMAll.cpp
r13823 r13825 1125 1125 } 1126 1126 1127 Log2(("emInterpretLockBitTest %s: GCPtrPar1=%RGv imm=% VX64 CF=%d\n", emGetMnemonic(pCpu), GCPtrPar1, ValPar2, !!(eflags & X86_EFL_CF)));1127 Log2(("emInterpretLockBitTest %s: GCPtrPar1=%RGv imm=%RX64 CF=%d\n", emGetMnemonic(pCpu), GCPtrPar1, ValPar2, !!(eflags & X86_EFL_CF))); 1128 1128 1129 1129 /* Update guest's eflags and finish. */ … … 1951 1951 if(RT_SUCCESS(rc)) 1952 1952 { 1953 LogFlow(("MOV_CR: gen32=%d CR=%d val=% VX64\n", DestRegGen, SrcRegCrx, val64));1953 LogFlow(("MOV_CR: gen32=%d CR=%d val=%RX64\n", DestRegGen, SrcRegCrx, val64)); 1954 1954 return VINF_SUCCESS; 1955 1955 } … … 2000 2000 2001 2001 /** @todo Clean up this mess. */ 2002 LogFlow(("EMInterpretCRxWrite at %RGv CR%d <- % VX64\n", (RTGCPTR)pRegFrame->rip, DestRegCrx, val));2002 LogFlow(("EMInterpretCRxWrite at %RGv CR%d <- %RX64\n", (RTGCPTR)pRegFrame->rip, DestRegCrx, val)); 2003 2003 switch (DestRegCrx) 2004 2004 { … … 2679 2679 break; 2680 2680 } 2681 Log(("EMInterpretRdmsr %s (%x) -> val=% VX64\n", emMSRtoString(pRegFrame->ecx), pRegFrame->ecx, val));2681 Log(("EMInterpretRdmsr %s (%x) -> val=%RX64\n", emMSRtoString(pRegFrame->ecx), pRegFrame->ecx, val)); 2682 2682 if (rc == VINF_SUCCESS) 2683 2683 { … … 2726 2726 2727 2727 val = RT_MAKE_U64(pRegFrame->eax, pRegFrame->edx); 2728 Log(("EMInterpretWrmsr %s (%x) val=% VX64\n", emMSRtoString(pRegFrame->ecx), pRegFrame->ecx, val));2728 Log(("EMInterpretWrmsr %s (%x) val=%RX64\n", emMSRtoString(pRegFrame->ecx), pRegFrame->ecx, val)); 2729 2729 switch (pRegFrame->ecx) 2730 2730 { -
trunk/src/VBox/VMM/VMMAll/PGMAll.cpp
r13824 r13825 1457 1457 if (fGlobal) 1458 1458 VM_FF_SET(pVM, VM_FF_PGM_SYNC_CR3); 1459 LogFlow(("PGMFlushTLB: cr3=% VX64 OldCr3=%VX64 fGlobal=%d\n", cr3, pVM->pgm.s.GCPhysCR3, fGlobal));1459 LogFlow(("PGMFlushTLB: cr3=%RX64 OldCr3=%RX64 fGlobal=%d\n", cr3, pVM->pgm.s.GCPhysCR3, fGlobal)); 1460 1460 1461 1461 /* … … 1524 1524 VMMDECL(int) PGMUpdateCR3(PVM pVM, uint64_t cr3) 1525 1525 { 1526 LogFlow(("PGMUpdateCR3: cr3=% VX64 OldCr3=%VX64\n", cr3, pVM->pgm.s.GCPhysCR3));1526 LogFlow(("PGMUpdateCR3: cr3=%RX64 OldCr3=%RX64\n", cr3, pVM->pgm.s.GCPhysCR3)); 1527 1527 1528 1528 /* We assume we're only called in nested paging mode. */ … … 1588 1588 if (!(cr4 & X86_CR4_PGE)) 1589 1589 fGlobal = true; 1590 LogFlow(("PGMSyncCR3: cr0=% VX64 cr3=%VX64 cr4=%VX64 fGlobal=%d[%d,%d]\n", cr0, cr3, cr4, fGlobal,1590 LogFlow(("PGMSyncCR3: cr0=%RX64 cr3=%RX64 cr4=%RX64 fGlobal=%d[%d,%d]\n", cr0, cr3, cr4, fGlobal, 1591 1591 VM_FF_ISSET(pVM, VM_FF_PGM_SYNC_CR3), VM_FF_ISSET(pVM, VM_FF_PGM_SYNC_CR3_NON_GLOBAL))); 1592 1592 -
trunk/src/VBox/VMM/VMMAll/PGMAllBth.h
r13824 r13825 759 759 uint64_t fPageShw; 760 760 rc = PGMShwGetPage(pVM, pvFault, &fPageShw, NULL); 761 AssertMsg(RT_SUCCESS(rc) && fPageShw & X86_PTE_RW, ("rc=%Rrc fPageShw=% VX64\n", rc, fPageShw));761 AssertMsg(RT_SUCCESS(rc) && fPageShw & X86_PTE_RW, ("rc=%Rrc fPageShw=%RX64\n", rc, fPageShw)); 762 762 # endif /* VBOX_STRICT */ 763 763 STAM_PROFILE_STOP(&pVM->pgm.s.StatRZTrap0eTimeOutOfSync, c); … … 1242 1242 || (PdeDst.u & PGM_PDFLAGS_TRACK_DIRTY))) 1243 1243 { 1244 LogFlow(("Skipping flush for big page containing %RGv (PD=%X .u=% VX64)-> nothing has changed!\n", GCPtrPage, iPDSrc, PdeSrc.u));1244 LogFlow(("Skipping flush for big page containing %RGv (PD=%X .u=%RX64)-> nothing has changed!\n", GCPtrPage, iPDSrc, PdeSrc.u)); 1245 1245 STAM_COUNTER_INC(&pVM->pgm.s.CTX_MID_Z(Stat,InvalidatePage4MBPagesSkip)); 1246 1246 return VINF_SUCCESS; -
trunk/src/VBox/VMM/VMMR0/HWACCMR0.cpp
r13816 r13825 1119 1119 uint64_t u32Base = X86DESC64_BASE(*pDesc); 1120 1120 1121 Log(("%s %04x - % VX64 %VX64 - base=%VX64 limit=%08x dpl=%d %s\n", pszMsg,1121 Log(("%s %04x - %RX64 %RX64 - base=%RX64 limit=%08x dpl=%d %s\n", pszMsg, 1122 1122 Sel, pDesc->au64[0], pDesc->au64[1], u32Base, u32Limit, pDesc->Gen.u2Dpl, szMsg)); 1123 1123 # else -
trunk/src/VBox/VMM/VMMR0/HWSVMR0.cpp
r13824 r13825 1045 1045 Log(("ctrl.u32InterceptCtrl1 %x\n", pVMCB->ctrl.u32InterceptCtrl1)); 1046 1046 Log(("ctrl.u32InterceptCtrl2 %x\n", pVMCB->ctrl.u32InterceptCtrl2)); 1047 Log(("ctrl.u64IOPMPhysAddr % VX64\n", pVMCB->ctrl.u64IOPMPhysAddr));1048 Log(("ctrl.u64MSRPMPhysAddr % VX64\n", pVMCB->ctrl.u64MSRPMPhysAddr));1049 Log(("ctrl.u64TSCOffset % VX64\n", pVMCB->ctrl.u64TSCOffset));1047 Log(("ctrl.u64IOPMPhysAddr %RX64\n", pVMCB->ctrl.u64IOPMPhysAddr)); 1048 Log(("ctrl.u64MSRPMPhysAddr %RX64\n", pVMCB->ctrl.u64MSRPMPhysAddr)); 1049 Log(("ctrl.u64TSCOffset %RX64\n", pVMCB->ctrl.u64TSCOffset)); 1050 1050 1051 1051 Log(("ctrl.TLBCtrl.u32ASID %x\n", pVMCB->ctrl.TLBCtrl.n.u32ASID)); … … 1065 1065 Log(("ctrl.IntCtrl.u24Reserved %x\n", pVMCB->ctrl.IntCtrl.n.u24Reserved)); 1066 1066 1067 Log(("ctrl.u64IntShadow % VX64\n", pVMCB->ctrl.u64IntShadow));1068 Log(("ctrl.u64ExitCode % VX64\n", pVMCB->ctrl.u64ExitCode));1069 Log(("ctrl.u64ExitInfo1 % VX64\n", pVMCB->ctrl.u64ExitInfo1));1070 Log(("ctrl.u64ExitInfo2 % VX64\n", pVMCB->ctrl.u64ExitInfo2));1067 Log(("ctrl.u64IntShadow %RX64\n", pVMCB->ctrl.u64IntShadow)); 1068 Log(("ctrl.u64ExitCode %RX64\n", pVMCB->ctrl.u64ExitCode)); 1069 Log(("ctrl.u64ExitInfo1 %RX64\n", pVMCB->ctrl.u64ExitInfo1)); 1070 Log(("ctrl.u64ExitInfo2 %RX64\n", pVMCB->ctrl.u64ExitInfo2)); 1071 1071 Log(("ctrl.ExitIntInfo.u8Vector %x\n", pVMCB->ctrl.ExitIntInfo.n.u8Vector)); 1072 1072 Log(("ctrl.ExitIntInfo.u3Type %x\n", pVMCB->ctrl.ExitIntInfo.n.u3Type)); … … 1075 1075 Log(("ctrl.ExitIntInfo.u1Valid %x\n", pVMCB->ctrl.ExitIntInfo.n.u1Valid)); 1076 1076 Log(("ctrl.ExitIntInfo.u32ErrorCode %x\n", pVMCB->ctrl.ExitIntInfo.n.u32ErrorCode)); 1077 Log(("ctrl.NestedPaging % VX64\n", pVMCB->ctrl.NestedPaging.au64));1077 Log(("ctrl.NestedPaging %RX64\n", pVMCB->ctrl.NestedPaging.au64)); 1078 1078 Log(("ctrl.EventInject.u8Vector %x\n", pVMCB->ctrl.EventInject.n.u8Vector)); 1079 1079 Log(("ctrl.EventInject.u3Type %x\n", pVMCB->ctrl.EventInject.n.u3Type)); … … 1083 1083 Log(("ctrl.EventInject.u32ErrorCode %x\n", pVMCB->ctrl.EventInject.n.u32ErrorCode)); 1084 1084 1085 Log(("ctrl.u64NestedPagingCR3 % VX64\n", pVMCB->ctrl.u64NestedPagingCR3));1086 Log(("ctrl.u64LBRVirt % VX64\n", pVMCB->ctrl.u64LBRVirt));1085 Log(("ctrl.u64NestedPagingCR3 %RX64\n", pVMCB->ctrl.u64NestedPagingCR3)); 1086 Log(("ctrl.u64LBRVirt %RX64\n", pVMCB->ctrl.u64LBRVirt)); 1087 1087 1088 1088 Log(("guest.CS.u16Sel %04X\n", pVMCB->guest.CS.u16Sel)); 1089 1089 Log(("guest.CS.u16Attr %04X\n", pVMCB->guest.CS.u16Attr)); 1090 1090 Log(("guest.CS.u32Limit %X\n", pVMCB->guest.CS.u32Limit)); 1091 Log(("guest.CS.u64Base % VX64\n", pVMCB->guest.CS.u64Base));1091 Log(("guest.CS.u64Base %RX64\n", pVMCB->guest.CS.u64Base)); 1092 1092 Log(("guest.DS.u16Sel %04X\n", pVMCB->guest.DS.u16Sel)); 1093 1093 Log(("guest.DS.u16Attr %04X\n", pVMCB->guest.DS.u16Attr)); 1094 1094 Log(("guest.DS.u32Limit %X\n", pVMCB->guest.DS.u32Limit)); 1095 Log(("guest.DS.u64Base % VX64\n", pVMCB->guest.DS.u64Base));1095 Log(("guest.DS.u64Base %RX64\n", pVMCB->guest.DS.u64Base)); 1096 1096 Log(("guest.ES.u16Sel %04X\n", pVMCB->guest.ES.u16Sel)); 1097 1097 Log(("guest.ES.u16Attr %04X\n", pVMCB->guest.ES.u16Attr)); 1098 1098 Log(("guest.ES.u32Limit %X\n", pVMCB->guest.ES.u32Limit)); 1099 Log(("guest.ES.u64Base % VX64\n", pVMCB->guest.ES.u64Base));1099 Log(("guest.ES.u64Base %RX64\n", pVMCB->guest.ES.u64Base)); 1100 1100 Log(("guest.FS.u16Sel %04X\n", pVMCB->guest.FS.u16Sel)); 1101 1101 Log(("guest.FS.u16Attr %04X\n", pVMCB->guest.FS.u16Attr)); 1102 1102 Log(("guest.FS.u32Limit %X\n", pVMCB->guest.FS.u32Limit)); 1103 Log(("guest.FS.u64Base % VX64\n", pVMCB->guest.FS.u64Base));1103 Log(("guest.FS.u64Base %RX64\n", pVMCB->guest.FS.u64Base)); 1104 1104 Log(("guest.GS.u16Sel %04X\n", pVMCB->guest.GS.u16Sel)); 1105 1105 Log(("guest.GS.u16Attr %04X\n", pVMCB->guest.GS.u16Attr)); 1106 1106 Log(("guest.GS.u32Limit %X\n", pVMCB->guest.GS.u32Limit)); 1107 Log(("guest.GS.u64Base % VX64\n", pVMCB->guest.GS.u64Base));1107 Log(("guest.GS.u64Base %RX64\n", pVMCB->guest.GS.u64Base)); 1108 1108 1109 1109 Log(("guest.GDTR.u32Limit %X\n", pVMCB->guest.GDTR.u32Limit)); 1110 Log(("guest.GDTR.u64Base % VX64\n", pVMCB->guest.GDTR.u64Base));1110 Log(("guest.GDTR.u64Base %RX64\n", pVMCB->guest.GDTR.u64Base)); 1111 1111 1112 1112 Log(("guest.LDTR.u16Sel %04X\n", pVMCB->guest.LDTR.u16Sel)); 1113 1113 Log(("guest.LDTR.u16Attr %04X\n", pVMCB->guest.LDTR.u16Attr)); 1114 1114 Log(("guest.LDTR.u32Limit %X\n", pVMCB->guest.LDTR.u32Limit)); 1115 Log(("guest.LDTR.u64Base % VX64\n", pVMCB->guest.LDTR.u64Base));1115 Log(("guest.LDTR.u64Base %RX64\n", pVMCB->guest.LDTR.u64Base)); 1116 1116 1117 1117 Log(("guest.IDTR.u32Limit %X\n", pVMCB->guest.IDTR.u32Limit)); 1118 Log(("guest.IDTR.u64Base % VX64\n", pVMCB->guest.IDTR.u64Base));1118 Log(("guest.IDTR.u64Base %RX64\n", pVMCB->guest.IDTR.u64Base)); 1119 1119 1120 1120 Log(("guest.TR.u16Sel %04X\n", pVMCB->guest.TR.u16Sel)); 1121 1121 Log(("guest.TR.u16Attr %04X\n", pVMCB->guest.TR.u16Attr)); 1122 1122 Log(("guest.TR.u32Limit %X\n", pVMCB->guest.TR.u32Limit)); 1123 Log(("guest.TR.u64Base % VX64\n", pVMCB->guest.TR.u64Base));1123 Log(("guest.TR.u64Base %RX64\n", pVMCB->guest.TR.u64Base)); 1124 1124 1125 1125 Log(("guest.u8CPL %X\n", pVMCB->guest.u8CPL)); 1126 Log(("guest.u64CR0 % VX64\n", pVMCB->guest.u64CR0));1127 Log(("guest.u64CR2 % VX64\n", pVMCB->guest.u64CR2));1128 Log(("guest.u64CR3 % VX64\n", pVMCB->guest.u64CR3));1129 Log(("guest.u64CR4 % VX64\n", pVMCB->guest.u64CR4));1130 Log(("guest.u64DR6 % VX64\n", pVMCB->guest.u64DR6));1131 Log(("guest.u64DR7 % VX64\n", pVMCB->guest.u64DR7));1132 1133 Log(("guest.u64RIP % VX64\n", pVMCB->guest.u64RIP));1134 Log(("guest.u64RSP % VX64\n", pVMCB->guest.u64RSP));1135 Log(("guest.u64RAX % VX64\n", pVMCB->guest.u64RAX));1136 Log(("guest.u64RFlags % VX64\n", pVMCB->guest.u64RFlags));1137 1138 Log(("guest.u64SysEnterCS % VX64\n", pVMCB->guest.u64SysEnterCS));1139 Log(("guest.u64SysEnterEIP % VX64\n", pVMCB->guest.u64SysEnterEIP));1140 Log(("guest.u64SysEnterESP % VX64\n", pVMCB->guest.u64SysEnterESP));1141 1142 Log(("guest.u64EFER % VX64\n", pVMCB->guest.u64EFER));1143 Log(("guest.u64STAR % VX64\n", pVMCB->guest.u64STAR));1144 Log(("guest.u64LSTAR % VX64\n", pVMCB->guest.u64LSTAR));1145 Log(("guest.u64CSTAR % VX64\n", pVMCB->guest.u64CSTAR));1146 Log(("guest.u64SFMASK % VX64\n", pVMCB->guest.u64SFMASK));1147 Log(("guest.u64KernelGSBase % VX64\n", pVMCB->guest.u64KernelGSBase));1148 Log(("guest.u64GPAT % VX64\n", pVMCB->guest.u64GPAT));1149 Log(("guest.u64DBGCTL % VX64\n", pVMCB->guest.u64DBGCTL));1150 Log(("guest.u64BR_FROM % VX64\n", pVMCB->guest.u64BR_FROM));1151 Log(("guest.u64BR_TO % VX64\n", pVMCB->guest.u64BR_TO));1152 Log(("guest.u64LASTEXCPFROM % VX64\n", pVMCB->guest.u64LASTEXCPFROM));1153 Log(("guest.u64LASTEXCPTO % VX64\n", pVMCB->guest.u64LASTEXCPTO));1126 Log(("guest.u64CR0 %RX64\n", pVMCB->guest.u64CR0)); 1127 Log(("guest.u64CR2 %RX64\n", pVMCB->guest.u64CR2)); 1128 Log(("guest.u64CR3 %RX64\n", pVMCB->guest.u64CR3)); 1129 Log(("guest.u64CR4 %RX64\n", pVMCB->guest.u64CR4)); 1130 Log(("guest.u64DR6 %RX64\n", pVMCB->guest.u64DR6)); 1131 Log(("guest.u64DR7 %RX64\n", pVMCB->guest.u64DR7)); 1132 1133 Log(("guest.u64RIP %RX64\n", pVMCB->guest.u64RIP)); 1134 Log(("guest.u64RSP %RX64\n", pVMCB->guest.u64RSP)); 1135 Log(("guest.u64RAX %RX64\n", pVMCB->guest.u64RAX)); 1136 Log(("guest.u64RFlags %RX64\n", pVMCB->guest.u64RFlags)); 1137 1138 Log(("guest.u64SysEnterCS %RX64\n", pVMCB->guest.u64SysEnterCS)); 1139 Log(("guest.u64SysEnterEIP %RX64\n", pVMCB->guest.u64SysEnterEIP)); 1140 Log(("guest.u64SysEnterESP %RX64\n", pVMCB->guest.u64SysEnterESP)); 1141 1142 Log(("guest.u64EFER %RX64\n", pVMCB->guest.u64EFER)); 1143 Log(("guest.u64STAR %RX64\n", pVMCB->guest.u64STAR)); 1144 Log(("guest.u64LSTAR %RX64\n", pVMCB->guest.u64LSTAR)); 1145 Log(("guest.u64CSTAR %RX64\n", pVMCB->guest.u64CSTAR)); 1146 Log(("guest.u64SFMASK %RX64\n", pVMCB->guest.u64SFMASK)); 1147 Log(("guest.u64KernelGSBase %RX64\n", pVMCB->guest.u64KernelGSBase)); 1148 Log(("guest.u64GPAT %RX64\n", pVMCB->guest.u64GPAT)); 1149 Log(("guest.u64DBGCTL %RX64\n", pVMCB->guest.u64DBGCTL)); 1150 Log(("guest.u64BR_FROM %RX64\n", pVMCB->guest.u64BR_FROM)); 1151 Log(("guest.u64BR_TO %RX64\n", pVMCB->guest.u64BR_TO)); 1152 Log(("guest.u64LASTEXCPFROM %RX64\n", pVMCB->guest.u64LASTEXCPFROM)); 1153 Log(("guest.u64LASTEXCPTO %RX64\n", pVMCB->guest.u64LASTEXCPTO)); 1154 1154 1155 1155 #endif … … 1227 1227 && pVMCB->ctrl.ExitIntInfo.n.u3Type != SVM_EVENT_SOFTWARE_INT /* we don't care about 'int xx' as the instruction will be restarted. */) 1228 1228 { 1229 Log(("Pending inject % VX64 at %RGv exit=%08x\n", pVM->hwaccm.s.Event.intInfo, (RTGCPTR)pCtx->rip, exitCode));1229 Log(("Pending inject %RX64 at %RGv exit=%08x\n", pVM->hwaccm.s.Event.intInfo, (RTGCPTR)pCtx->rip, exitCode)); 1230 1230 1231 1231 #ifdef LOG_ENABLED -
trunk/src/VBox/VMM/VMMR0/HWVMXR0.cpp
r13824 r13825 637 637 if (pVM->hwaccm.s.Event.fPending) 638 638 { 639 Log(("Reinjecting event % VX64 %08x at %RGv cr2=%RX64\n", pVM->hwaccm.s.Event.intInfo, pVM->hwaccm.s.Event.errCode, (RTGCPTR)pCtx->rip, pCtx->cr2));639 Log(("Reinjecting event %RX64 %08x at %RGv cr2=%RX64\n", pVM->hwaccm.s.Event.intInfo, pVM->hwaccm.s.Event.errCode, (RTGCPTR)pCtx->rip, pCtx->cr2)); 640 640 STAM_COUNTER_INC(&pVM->hwaccm.s.StatIntReinject); 641 641 rc = VMXR0InjectEvent(pVM, pCtx, pVM->hwaccm.s.Event.intInfo, 0, pVM->hwaccm.s.Event.errCode); … … 1986 1986 AssertRC(rc); 1987 1987 pVM->hwaccm.s.Event.errCode = val; 1988 Log(("Pending inject % VX64 at %RGv exit=%08x intInfo=%08x exitQualification=%08x pending error=%RX64\n", pVM->hwaccm.s.Event.intInfo, (RTGCPTR)pCtx->rip, exitReason, intInfo, exitQualification, val));1988 Log(("Pending inject %RX64 at %RGv exit=%08x intInfo=%08x exitQualification=%08x pending error=%RX64\n", pVM->hwaccm.s.Event.intInfo, (RTGCPTR)pCtx->rip, exitReason, intInfo, exitQualification, val)); 1989 1989 } 1990 1990 else 1991 1991 { 1992 Log(("Pending inject % VX64 at %RGv exit=%08x intInfo=%08x exitQualification=%08x\n", pVM->hwaccm.s.Event.intInfo, (RTGCPTR)pCtx->rip, exitReason, intInfo, exitQualification));1992 Log(("Pending inject %RX64 at %RGv exit=%08x intInfo=%08x exitQualification=%08x\n", pVM->hwaccm.s.Event.intInfo, (RTGCPTR)pCtx->rip, exitReason, intInfo, exitQualification)); 1993 1993 pVM->hwaccm.s.Event.errCode = 0; 1994 1994 } … … 2205 2205 AssertRC(rc); 2206 2206 2207 Log(("Trap %x (debug) at %RGv exit qualification % VX64\n", vector, (RTGCPTR)pCtx->rip, exitQualification));2207 Log(("Trap %x (debug) at %RGv exit qualification %RX64\n", vector, (RTGCPTR)pCtx->rip, exitQualification)); 2208 2208 rc = VMXR0InjectEvent(pVM, pCtx, VMX_VMCS_CTRL_ENTRY_IRQ_INFO_FROM_EXIT_INT_INFO(intInfo), cbInstr, errCode); 2209 2209 AssertRC(rc); … … 3250 3250 3251 3251 # if HC_ARCH_BITS == 64 3252 Log(("MSR_K6_EFER = % VX64\n", ASMRdMsr(MSR_K6_EFER)));3253 Log(("MSR_K6_STAR = % VX64\n", ASMRdMsr(MSR_K6_STAR)));3254 Log(("MSR_K8_LSTAR = % VX64\n", ASMRdMsr(MSR_K8_LSTAR)));3255 Log(("MSR_K8_CSTAR = % VX64\n", ASMRdMsr(MSR_K8_CSTAR)));3256 Log(("MSR_K8_SF_MASK = % VX64\n", ASMRdMsr(MSR_K8_SF_MASK)));3252 Log(("MSR_K6_EFER = %RX64\n", ASMRdMsr(MSR_K6_EFER))); 3253 Log(("MSR_K6_STAR = %RX64\n", ASMRdMsr(MSR_K6_STAR))); 3254 Log(("MSR_K8_LSTAR = %RX64\n", ASMRdMsr(MSR_K8_LSTAR))); 3255 Log(("MSR_K8_CSTAR = %RX64\n", ASMRdMsr(MSR_K8_CSTAR))); 3256 Log(("MSR_K8_SF_MASK = %RX64\n", ASMRdMsr(MSR_K8_SF_MASK))); 3257 3257 # endif 3258 3258 #endif /* VBOX_STRICT */
Note:
See TracChangeset
for help on using the changeset viewer.