Changeset 70352 in vbox
- Timestamp:
- Dec 27, 2017 7:56:22 AM (7 years ago)
- svn:sync-xref-src-repo-rev:
- 119952
- Location:
- trunk
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/VBox/vmm/hm_svm.h
r70301 r70352 458 458 /** @} */ 459 459 460 461 460 /** @name SVMVMCB.ctrl.TLBCtrl.n.u8TLBFlush 462 461 * @{ … … 471 470 #define SVM_TLB_FLUSH_SINGLE_CONTEXT_RETAIN_GLOBALS 7 472 471 /** @} */ 473 474 472 475 473 /** … … 506 504 /** Pointer to a const SVMXDTR struct. */ 507 505 typedef const SVMXDTR *PCSVMXDTR; 508 509 506 510 507 /** … … 663 660 AssertCompileSize(SVMAVICPHYS, 8); 664 661 665 666 /** 667 * SVM VMCB control area. 668 */ 669 #pragma pack(1) 670 typedef struct 671 { 672 /** Offset 0x00 - Intercept reads of CR0-CR15. */ 673 uint16_t u16InterceptRdCRx; 674 /** Offset 0x02 - Intercept writes to CR0-CR15. */ 675 uint16_t u16InterceptWrCRx; 676 /** Offset 0x04 - Intercept reads of DR0-DR15. */ 677 uint16_t u16InterceptRdDRx; 678 /** Offset 0x06 - Intercept writes to DR0-DR15. */ 679 uint16_t u16InterceptWrDRx; 680 /** Offset 0x08 - Intercept exception vectors 0-31. */ 681 uint32_t u32InterceptXcpt; 682 /** Offset 0x0c - Intercept control. */ 683 uint64_t u64InterceptCtrl; 684 /** Offset 0x14-0x3f - Reserved. */ 685 uint8_t u8Reserved[0x3c - 0x14]; 686 /** Offset 0x3c - PAUSE filter threshold. */ 687 uint16_t u16PauseFilterThreshold; 688 /** Offset 0x3e - PAUSE intercept filter count. */ 689 uint16_t u16PauseFilterCount; 690 /** Offset 0x40 - Physical address of IOPM. */ 691 uint64_t u64IOPMPhysAddr; 692 /** Offset 0x48 - Physical address of MSRPM. */ 693 uint64_t u64MSRPMPhysAddr; 694 /** Offset 0x50 - TSC Offset. */ 695 uint64_t u64TSCOffset; 696 /** Offset 0x58 - TLB control field. */ 697 SVMTLBCTRL TLBCtrl; 698 /** Offset 0x60 - Interrupt control field. */ 699 SVMINTCTRL IntCtrl; 700 /** Offset 0x68 - Interrupt shadow. */ 701 union 702 { 703 uint32_t u1IntShadow : 1; 704 uint32_t u1GuestIntMask : 1; 705 uint32_t u30Reserved0 : 30; 706 uint64_t u64IntShadowCtrl; 707 } RT_UNION_NM(intshadow); 708 /** Offset 0x70 - Exit code. */ 709 uint64_t u64ExitCode; 710 /** Offset 0x78 - Exit info 1. */ 711 uint64_t u64ExitInfo1; 712 /** Offset 0x80 - Exit info 2. */ 713 uint64_t u64ExitInfo2; 714 /** Offset 0x88 - Exit Interrupt info. */ 715 SVMEVENT ExitIntInfo; 716 /** Offset 0x90 - Nested Paging. */ 717 union 662 /** 663 * SVM Nested Paging struct. 664 */ 665 typedef union 666 { 667 struct 718 668 { 719 669 uint32_t u1NestedPaging : 1; … … 721 671 uint32_t u1SevEs : 1; 722 672 uint32_t u29Reserved0 : 29; 723 uint64_t u64NpSevCtrl; 724 } RT_UNION_NM(np); 725 /** Offset 0x98 - AVIC APIC BAR. */ 726 SVMAVIC AvicBar; 727 /** Offset 0xa0-0xa7 - Reserved. */ 728 uint8_t u8Reserved2[0xA8 - 0xA0]; 729 /** Offset 0xa8 - Event injection. */ 730 SVMEVENT EventInject; 731 /** Offset 0xb0 - Host CR3 for nested paging. */ 732 uint64_t u64NestedPagingCR3; 733 /** Offset 0xb8 - LBR Virtualization. */ 734 union 673 } n; 674 uint64_t u; 675 } SVMNP; 676 AssertCompileSize(SVMNP, 8); 677 678 /** 679 * SVM Interrupt shadow struct. 680 */ 681 typedef union 682 { 683 struct 684 { 685 uint32_t u1IntShadow : 1; 686 uint32_t u1GuestIntMask : 1; 687 uint32_t u30Reserved0 : 30; 688 } n; 689 uint64_t u; 690 } SVMINTSHADOW; 691 AssertCompileSize(SVMINTSHADOW, 8); 692 693 /** 694 * SVM LBR virtualization struct. 695 */ 696 typedef union 697 { 698 struct 735 699 { 736 700 uint32_t u1LbrVirt : 1; 737 701 uint32_t u1VirtVmsaveVmload : 1; 738 702 uint32_t u30Reserved1 : 30; 739 uint64_t u64LbrVirtCtrl; 740 } RT_UNION_NM(lbrvirt); 703 } n; 704 uint64_t u; 705 } SVMLBRVIRT; 706 AssertCompileSize(SVMLBRVIRT, 8); 707 708 /** 709 * SVM VMCB control area. 710 */ 711 #pragma pack(1) 712 typedef struct 713 { 714 /** Offset 0x00 - Intercept reads of CR0-CR15. */ 715 uint16_t u16InterceptRdCRx; 716 /** Offset 0x02 - Intercept writes to CR0-CR15. */ 717 uint16_t u16InterceptWrCRx; 718 /** Offset 0x04 - Intercept reads of DR0-DR15. */ 719 uint16_t u16InterceptRdDRx; 720 /** Offset 0x06 - Intercept writes to DR0-DR15. */ 721 uint16_t u16InterceptWrDRx; 722 /** Offset 0x08 - Intercept exception vectors 0-31. */ 723 uint32_t u32InterceptXcpt; 724 /** Offset 0x0c - Intercept control. */ 725 uint64_t u64InterceptCtrl; 726 /** Offset 0x14-0x3f - Reserved. */ 727 uint8_t u8Reserved[0x3c - 0x14]; 728 /** Offset 0x3c - PAUSE filter threshold. */ 729 uint16_t u16PauseFilterThreshold; 730 /** Offset 0x3e - PAUSE intercept filter count. */ 731 uint16_t u16PauseFilterCount; 732 /** Offset 0x40 - Physical address of IOPM. */ 733 uint64_t u64IOPMPhysAddr; 734 /** Offset 0x48 - Physical address of MSRPM. */ 735 uint64_t u64MSRPMPhysAddr; 736 /** Offset 0x50 - TSC Offset. */ 737 uint64_t u64TSCOffset; 738 /** Offset 0x58 - TLB control field. */ 739 SVMTLBCTRL TLBCtrl; 740 /** Offset 0x60 - Interrupt control field. */ 741 SVMINTCTRL IntCtrl; 742 /** Offset 0x68 - Interrupt shadow. */ 743 SVMINTSHADOW IntShadow; 744 /** Offset 0x70 - Exit code. */ 745 uint64_t u64ExitCode; 746 /** Offset 0x78 - Exit info 1. */ 747 uint64_t u64ExitInfo1; 748 /** Offset 0x80 - Exit info 2. */ 749 uint64_t u64ExitInfo2; 750 /** Offset 0x88 - Exit Interrupt info. */ 751 SVMEVENT ExitIntInfo; 752 /** Offset 0x90 - Nested Paging. */ 753 SVMNP NestedPaging; 754 /** Offset 0x98 - AVIC APIC BAR. */ 755 SVMAVIC AvicBar; 756 /** Offset 0xa0-0xa7 - Reserved. */ 757 uint8_t u8Reserved2[0xA8 - 0xA0]; 758 /** Offset 0xa8 - Event injection. */ 759 SVMEVENT EventInject; 760 /** Offset 0xb0 - Host CR3 for nested paging. */ 761 uint64_t u64NestedPagingCR3; 762 /** Offset 0xb8 - LBR Virtualization. */ 763 SVMLBRVIRT LbrVirt; 741 764 /** Offset 0xc0 - VMCB Clean Bits. */ 742 765 uint32_t u32VmcbCleanBits; … … 763 786 typedef const SVMVMCBCTRL *PCSVMVMCBCTRL; 764 787 AssertCompileSize(SVMVMCBCTRL, 0x100); 765 AssertCompileMemberOffset(SVMVMCBCTRL, u16InterceptRdCRx, 766 AssertCompileMemberOffset(SVMVMCBCTRL, u16InterceptWrCRx, 767 AssertCompileMemberOffset(SVMVMCBCTRL, u16InterceptRdDRx, 768 AssertCompileMemberOffset(SVMVMCBCTRL, u16InterceptWrDRx, 769 AssertCompileMemberOffset(SVMVMCBCTRL, u32InterceptXcpt, 770 AssertCompileMemberOffset(SVMVMCBCTRL, u64InterceptCtrl, 771 AssertCompileMemberOffset(SVMVMCBCTRL, u8Reserved, 772 AssertCompileMemberOffset(SVMVMCBCTRL, u16PauseFilterThreshold, 773 AssertCompileMemberOffset(SVMVMCBCTRL, u16PauseFilterCount, 774 AssertCompileMemberOffset(SVMVMCBCTRL, u64IOPMPhysAddr, 775 AssertCompileMemberOffset(SVMVMCBCTRL, u64MSRPMPhysAddr, 776 AssertCompileMemberOffset(SVMVMCBCTRL, u64TSCOffset, 777 AssertCompileMemberOffset(SVMVMCBCTRL, TLBCtrl, 778 AssertCompileMemberOffset(SVMVMCBCTRL, IntCtrl, 779 AssertCompileMemberOffset(SVMVMCBCTRL, RT_UNION_NM(intshadow.) u64IntShadowCtrl,0x68);780 AssertCompileMemberOffset(SVMVMCBCTRL, u64ExitCode, 781 AssertCompileMemberOffset(SVMVMCBCTRL, u64ExitInfo1, 782 AssertCompileMemberOffset(SVMVMCBCTRL, u64ExitInfo2, 783 AssertCompileMemberOffset(SVMVMCBCTRL, ExitIntInfo, 784 AssertCompileMemberOffset(SVMVMCBCTRL, RT_UNION_NM(np.) u64NpSevCtrl, 0x90);785 AssertCompileMemberOffset(SVMVMCBCTRL, AvicBar, 786 AssertCompileMemberOffset(SVMVMCBCTRL, u8Reserved2, 787 AssertCompileMemberOffset(SVMVMCBCTRL, EventInject, 788 AssertCompileMemberOffset(SVMVMCBCTRL, u64NestedPagingCR3, 789 AssertCompileMemberOffset(SVMVMCBCTRL, RT_UNION_NM(lbrvirt.) u64LbrVirtCtrl,0xb8);790 AssertCompileMemberOffset(SVMVMCBCTRL, u32VmcbCleanBits, 791 AssertCompileMemberOffset(SVMVMCBCTRL, u64NextRIP, 792 AssertCompileMemberOffset(SVMVMCBCTRL, cbInstrFetched, 793 AssertCompileMemberOffset(SVMVMCBCTRL, abInstr, 794 AssertCompileMemberOffset(SVMVMCBCTRL, AvicBackingPagePtr, 795 AssertCompileMemberOffset(SVMVMCBCTRL, u8Reserved3, 796 AssertCompileMemberOffset(SVMVMCBCTRL, AvicLogicalTablePtr, 797 AssertCompileMemberOffset(SVMVMCBCTRL, AvicPhysicalTablePtr, 788 AssertCompileMemberOffset(SVMVMCBCTRL, u16InterceptRdCRx, 0x00); 789 AssertCompileMemberOffset(SVMVMCBCTRL, u16InterceptWrCRx, 0x02); 790 AssertCompileMemberOffset(SVMVMCBCTRL, u16InterceptRdDRx, 0x04); 791 AssertCompileMemberOffset(SVMVMCBCTRL, u16InterceptWrDRx, 0x06); 792 AssertCompileMemberOffset(SVMVMCBCTRL, u32InterceptXcpt, 0x08); 793 AssertCompileMemberOffset(SVMVMCBCTRL, u64InterceptCtrl, 0x0c); 794 AssertCompileMemberOffset(SVMVMCBCTRL, u8Reserved, 0x14); 795 AssertCompileMemberOffset(SVMVMCBCTRL, u16PauseFilterThreshold, 0x3c); 796 AssertCompileMemberOffset(SVMVMCBCTRL, u16PauseFilterCount, 0x3e); 797 AssertCompileMemberOffset(SVMVMCBCTRL, u64IOPMPhysAddr, 0x40); 798 AssertCompileMemberOffset(SVMVMCBCTRL, u64MSRPMPhysAddr, 0x48); 799 AssertCompileMemberOffset(SVMVMCBCTRL, u64TSCOffset, 0x50); 800 AssertCompileMemberOffset(SVMVMCBCTRL, TLBCtrl, 0x58); 801 AssertCompileMemberOffset(SVMVMCBCTRL, IntCtrl, 0x60); 802 AssertCompileMemberOffset(SVMVMCBCTRL, IntShadow, 0x68); 803 AssertCompileMemberOffset(SVMVMCBCTRL, u64ExitCode, 0x70); 804 AssertCompileMemberOffset(SVMVMCBCTRL, u64ExitInfo1, 0x78); 805 AssertCompileMemberOffset(SVMVMCBCTRL, u64ExitInfo2, 0x80); 806 AssertCompileMemberOffset(SVMVMCBCTRL, ExitIntInfo, 0x88); 807 AssertCompileMemberOffset(SVMVMCBCTRL, NestedPaging, 0x90); 808 AssertCompileMemberOffset(SVMVMCBCTRL, AvicBar, 0x98); 809 AssertCompileMemberOffset(SVMVMCBCTRL, u8Reserved2, 0xa0); 810 AssertCompileMemberOffset(SVMVMCBCTRL, EventInject, 0xa8); 811 AssertCompileMemberOffset(SVMVMCBCTRL, u64NestedPagingCR3, 0xb0); 812 AssertCompileMemberOffset(SVMVMCBCTRL, LbrVirt, 0xb8); 813 AssertCompileMemberOffset(SVMVMCBCTRL, u32VmcbCleanBits, 0xc0); 814 AssertCompileMemberOffset(SVMVMCBCTRL, u64NextRIP, 0xc8); 815 AssertCompileMemberOffset(SVMVMCBCTRL, cbInstrFetched, 0xd0); 816 AssertCompileMemberOffset(SVMVMCBCTRL, abInstr, 0xd1); 817 AssertCompileMemberOffset(SVMVMCBCTRL, AvicBackingPagePtr, 0xe0); 818 AssertCompileMemberOffset(SVMVMCBCTRL, u8Reserved3, 0xe8); 819 AssertCompileMemberOffset(SVMVMCBCTRL, AvicLogicalTablePtr, 0xf0); 820 AssertCompileMemberOffset(SVMVMCBCTRL, AvicPhysicalTablePtr, 0xf8); 798 821 799 822 /** … … 1001 1024 /** Cache of the nested-paging control. */ 1002 1025 uint32_t u1NestedPaging : 1; 1003 uint32_t u31Reserved0 : 31; 1026 /** Cache of the LBR virtualization control. */ 1027 uint32_t u1LbrVirt : 1; 1028 uint32_t u31Reserved0 : 30; 1004 1029 uint32_t u32Reserved1; 1005 1030 /** @} */ … … 1015 1040 /** Cache of EFER. */ 1016 1041 uint64_t u64EFER; 1042 /** Cache of DBGCTL. */ 1043 uint64_t u64DBGCTL; 1017 1044 /** @} */ 1018 1045 -
trunk/src/VBox/VMM/VMMAll/HMSVMAll.cpp
r70260 r70352 353 353 PSVMNESTEDVMCBCACHE pNstGstVmcbCache = &pVCpu->hm.s.svm.NstGstVmcbCache; 354 354 355 pVmcbNstGstCtrl->u16InterceptRdCRx = pNstGstVmcbCache->u16InterceptRdCRx; 356 pVmcbNstGstCtrl->u16InterceptWrCRx = pNstGstVmcbCache->u16InterceptWrCRx; 357 pVmcbNstGstCtrl->u16InterceptRdDRx = pNstGstVmcbCache->u16InterceptRdDRx; 358 pVmcbNstGstCtrl->u16InterceptWrDRx = pNstGstVmcbCache->u16InterceptWrDRx; 359 pVmcbNstGstCtrl->u32InterceptXcpt = pNstGstVmcbCache->u32InterceptXcpt; 360 pVmcbNstGstCtrl->u64InterceptCtrl = pNstGstVmcbCache->u64InterceptCtrl; 361 pVmcbNstGstState->u64CR0 = pNstGstVmcbCache->u64CR0; 362 pVmcbNstGstState->u64CR3 = pNstGstVmcbCache->u64CR3; 363 pVmcbNstGstState->u64CR4 = pNstGstVmcbCache->u64CR4; 364 pVmcbNstGstState->u64EFER = pNstGstVmcbCache->u64EFER; 365 pVmcbNstGstCtrl->u32VmcbCleanBits = pNstGstVmcbCache->u32VmcbCleanBits; 366 pVmcbNstGstCtrl->u64IOPMPhysAddr = pNstGstVmcbCache->u64IOPMPhysAddr; 367 pVmcbNstGstCtrl->u64MSRPMPhysAddr = pNstGstVmcbCache->u64MSRPMPhysAddr; 368 pVmcbNstGstCtrl->u64TSCOffset = pNstGstVmcbCache->u64TSCOffset; 369 pVmcbNstGstCtrl->IntCtrl.n.u1VIntrMasking = pNstGstVmcbCache->fVIntrMasking; 370 pVmcbNstGstCtrl->TLBCtrl = pNstGstVmcbCache->TLBCtrl; 371 pVmcbNstGstCtrl->u1NestedPaging = pNstGstVmcbCache->u1NestedPaging; 355 pVmcbNstGstCtrl->u16InterceptRdCRx = pNstGstVmcbCache->u16InterceptRdCRx; 356 pVmcbNstGstCtrl->u16InterceptWrCRx = pNstGstVmcbCache->u16InterceptWrCRx; 357 pVmcbNstGstCtrl->u16InterceptRdDRx = pNstGstVmcbCache->u16InterceptRdDRx; 358 pVmcbNstGstCtrl->u16InterceptWrDRx = pNstGstVmcbCache->u16InterceptWrDRx; 359 pVmcbNstGstCtrl->u32InterceptXcpt = pNstGstVmcbCache->u32InterceptXcpt; 360 pVmcbNstGstCtrl->u64InterceptCtrl = pNstGstVmcbCache->u64InterceptCtrl; 361 pVmcbNstGstState->u64CR0 = pNstGstVmcbCache->u64CR0; 362 pVmcbNstGstState->u64CR3 = pNstGstVmcbCache->u64CR3; 363 pVmcbNstGstState->u64CR4 = pNstGstVmcbCache->u64CR4; 364 pVmcbNstGstState->u64EFER = pNstGstVmcbCache->u64EFER; 365 pVmcbNstGstState->u64DBGCTL = pNstGstVmcbCache->u64DBGCTL; 366 pVmcbNstGstCtrl->u32VmcbCleanBits = pNstGstVmcbCache->u32VmcbCleanBits; 367 pVmcbNstGstCtrl->u64IOPMPhysAddr = pNstGstVmcbCache->u64IOPMPhysAddr; 368 pVmcbNstGstCtrl->u64MSRPMPhysAddr = pNstGstVmcbCache->u64MSRPMPhysAddr; 369 pVmcbNstGstCtrl->u64TSCOffset = pNstGstVmcbCache->u64TSCOffset; 370 pVmcbNstGstCtrl->IntCtrl.n.u1VIntrMasking = pNstGstVmcbCache->fVIntrMasking; 371 pVmcbNstGstCtrl->TLBCtrl = pNstGstVmcbCache->TLBCtrl; 372 pVmcbNstGstCtrl->NestedPaging.n.u1NestedPaging = pNstGstVmcbCache->u1NestedPaging; 373 pVmcbNstGstCtrl->LbrVirt.n.u1LbrVirt = pNstGstVmcbCache->u1LbrVirt; 372 374 pCtx->hwvirt.svm.fHMCachedVmcb = false; 373 375 } -
trunk/src/VBox/VMM/VMMR0/HMSVMR0.cpp
r70303 r70352 798 798 bool const fUsePauseFilter = fPauseFilter && pVM->hm.s.svm.cPauseFilter && pVM->hm.s.svm.cPauseFilterThresholdTicks; 799 799 800 bool const fLbrVirt = RT_BOOL(pVM->hm.s.svm.u32Features & X86_CPUID_SVM_FEATURE_EDX_LBR_VIRT); 801 bool const fUseLbrVirt = fLbrVirt; /** @todo CFGM etc. */ 802 800 803 for (VMCPUID i = 0; i < pVM->cCpus; i++) 801 804 { … … 858 861 pVmcb->ctrl.u64MSRPMPhysAddr = pVCpu->hm.s.svm.HCPhysMsrBitmap; 859 862 860 /* No LBR virtualization. */ 861 Assert(pVmcb->ctrl.u1LbrVirt == 0); 863 /* LBR virtualization. */ 864 if (fUseLbrVirt) 865 { 866 pVmcb->ctrl.LbrVirt.n.u1LbrVirt = fUseLbrVirt; 867 pVmcb->guest.u64DBGCTL = MSR_IA32_DEBUGCTL_LBR; 868 } 869 else 870 Assert(pVmcb->ctrl.LbrVirt.n.u1LbrVirt == 0); 862 871 863 872 /* Initially all VMCB clean bits MBZ indicating that everything should be loaded from the VMCB in memory. */ … … 875 884 876 885 /* Setup Nested Paging. This doesn't change throughout the execution time of the VM. */ 877 pVmcb->ctrl. u1NestedPaging = pVM->hm.s.fNestedPaging;886 pVmcb->ctrl.NestedPaging.n.u1NestedPaging = pVM->hm.s.fNestedPaging; 878 887 879 888 /* Without Nested Paging, we need additionally intercepts. */ … … 2108 2117 ("fContextUseFlags=%#RX32\n", HMCPU_CF_VALUE(pVCpu))); 2109 2118 2110 Log4(("hmR0SvmLoadGuestState: CS:RIP=%04x:%RX64 EFL=%#x CR0=%#RX32 CR3=%#RX32 CR4=%#RX32\n", pCtx->cs.Sel, pCtx->rip, 2111 pCtx->eflags.u, pCtx->cr0, pCtx->cr3, pCtx->cr4)); 2119 Log4(("hmR0SvmLoadGuestState: CS:RIP=%04x:%RX64 EFL=%#x CR0=%#RX32 CR3=%#RX32 CR4=%#RX32 ESP=%#RX32 EBP=%#RX32\n", 2120 pCtx->cs.Sel, pCtx->rip, pCtx->eflags.u, pCtx->cr0, pCtx->cr3, pCtx->cr4, pCtx->esp, pCtx->ebp)); 2121 Log4(("hmR0SvmLoadGuestState: SS={%04x base=%016RX64 limit=%08x flags=%08x}\n", pCtx->ss.Sel, pCtx->ss.u64Base, 2122 pCtx->ss.u32Limit, pCtx->ss.Attr.u)); 2112 2123 STAM_PROFILE_ADV_STOP(&pVCpu->hm.s.StatLoadGuestState, x); 2113 2124 return rc; … … 2152 2163 pNstGstVmcbCache->u64CR4 = pVmcbNstGstState->u64CR4; 2153 2164 pNstGstVmcbCache->u64EFER = pVmcbNstGstState->u64EFER; 2165 pNstGstVmcbCache->u64DBGCTL = pVmcbNstGstState->u64DBGCTL; 2154 2166 pNstGstVmcbCache->u64IOPMPhysAddr = pVmcbNstGstCtrl->u64IOPMPhysAddr; 2155 2167 pNstGstVmcbCache->u64MSRPMPhysAddr = pVmcbNstGstCtrl->u64MSRPMPhysAddr; … … 2158 2170 pNstGstVmcbCache->fVIntrMasking = pVmcbNstGstCtrl->IntCtrl.n.u1VIntrMasking; 2159 2171 pNstGstVmcbCache->TLBCtrl = pVmcbNstGstCtrl->TLBCtrl; 2160 pNstGstVmcbCache->u1NestedPaging = pVmcbNstGstCtrl->u1NestedPaging; 2172 pNstGstVmcbCache->u1NestedPaging = pVmcbNstGstCtrl->NestedPaging.n.u1NestedPaging; 2173 pNstGstVmcbCache->u1LbrVirt = pVmcbNstGstCtrl->LbrVirt.n.u1LbrVirt; 2161 2174 pCtx->hwvirt.svm.fHMCachedVmcb = true; 2162 2175 Log4(("hmR0SvmVmRunCacheVmcb: Cached VMCB fields\n")); … … 2202 2215 * end of Trap0eHandler in PGMAllBth.h). 2203 2216 */ 2204 pVmcbNstGstCtrl->u1NestedPaging = pVCpu->CTX_SUFF(pVM)->hm.s.fNestedPaging; 2217 pVmcbNstGstCtrl->NestedPaging.n.u1NestedPaging = pVCpu->CTX_SUFF(pVM)->hm.s.fNestedPaging; 2218 2219 /* For now copy the LBR info. from outer guest VMCB. */ 2220 /** @todo fix this later. */ 2221 PCSVMVMCB pVmcb = pVCpu->hm.s.svm.pVmcb; 2222 pVmcbNstGstCtrl->LbrVirt.n.u1LbrVirt = pVmcb->ctrl.LbrVirt.n.u1LbrVirt; 2223 pVmcbNstGst->guest.u64DBGCTL = pVmcb->guest.u64DBGCTL; 2205 2224 } 2206 2225 else … … 2208 2227 Assert(pVmcbNstGstCtrl->u64IOPMPhysAddr == g_HCPhysIOBitmap); 2209 2228 Assert(pVmcbNstGstCtrl->u64MSRPMPhysAddr = g_HCPhysNstGstMsrBitmap); 2210 Assert(RT_BOOL(pVmcbNstGstCtrl-> u1NestedPaging) == pVCpu->CTX_SUFF(pVM)->hm.s.fNestedPaging);2229 Assert(RT_BOOL(pVmcbNstGstCtrl->NestedPaging.n.u1NestedPaging) == pVCpu->CTX_SUFF(pVM)->hm.s.fNestedPaging); 2211 2230 } 2212 2231 } … … 2269 2288 "ESP=%#RX32 EBP=%#RX32 rc=%d\n", pCtx->cs.Sel, pCtx->rip, pCtx->eflags.u, pCtx->cr0, pCtx->cr3, 2270 2289 pVmcbNstGst->guest.u64CR3, pCtx->cr4, pCtx->esp, pCtx->ebp, rc)); 2290 Log4(("hmR0SvmLoadGuestStateNested: SS={%04x base=%016RX64 limit=%08x flags=%08x}\n", pCtx->ss.Sel, pCtx->ss.u64Base, 2291 pCtx->ss.u32Limit, pCtx->ss.Attr.u)); 2271 2292 STAM_PROFILE_ADV_STOP(&pVCpu->hm.s.StatLoadGuestState, x); 2272 2293 … … 2345 2366 * Guest interrupt shadow. 2346 2367 */ 2347 if (pVmcb->ctrl. u1IntShadow)2368 if (pVmcb->ctrl.IntShadow.n.u1IntShadow) 2348 2369 EMSetInhibitInterruptsPC(pVCpu, pMixedCtx->rip); 2349 2370 else if (VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_INHIBIT_INTERRUPTS)) … … 2417 2438 */ 2418 2439 Assert(!(pVmcb->guest.u8CPL & ~0x3)); 2419 pMixedCtx->ss.Attr.n.u2Dpl = pVmcb->guest.u8CPL & 0x3; 2440 uint8_t const uCpl = pVmcb->guest.u8CPL; 2441 if (pMixedCtx->ss.Attr.n.u2Dpl != uCpl) 2442 { 2443 Log4(("hmR0SvmSaveGuestState: CPL differs. SS.DPL=%u, CPL=%u, overwriting SS.DPL!\n", pMixedCtx->ss.Attr.n.u2Dpl, uCpl)); 2444 pMixedCtx->ss.Attr.n.u2Dpl = pVmcb->guest.u8CPL & 0x3; 2445 } 2420 2446 2421 2447 /* … … 2463 2489 * This is done as the very last step of syncing the guest state, as PGMUpdateCR3() may cause longjmp's to ring-3. 2464 2490 */ 2465 if ( pVmcb->ctrl. u1NestedPaging2491 if ( pVmcb->ctrl.NestedPaging.n.u1NestedPaging 2466 2492 && pMixedCtx->cr3 != pVmcb->guest.u64CR3) 2467 2493 { … … 2470 2496 } 2471 2497 2472 Log4(("hmR0SvmSaveGuestState: CS:RIP=%04x:%RX64 EFL=%#x CR0=%#RX32 CR3=%#RX32 CR4=%#RX32\n", pMixedCtx->cs.Sel, 2473 pMixedCtx->rip, pMixedCtx->eflags.u, pMixedCtx->cr0, pMixedCtx->cr3, pMixedCtx->cr4)); 2498 if (CPUMIsGuestInSvmNestedHwVirtMode(pMixedCtx)) 2499 { 2500 Log4(("hmR0SvmSaveGuestState: CS:RIP=%04x:%RX64 EFL=%#x CR0=%#RX32 CR3=%#RX32 CR4=%#RX32 ESP=%#RX32 EBP=%#RX32\n", 2501 pMixedCtx->cs.Sel, pMixedCtx->rip, pMixedCtx->eflags.u, pMixedCtx->cr0, pMixedCtx->cr3, pMixedCtx->cr4, 2502 pMixedCtx->esp, pMixedCtx->ebp)); 2503 Log4(("hmR0SvmSaveGuestState: SS={%04x base=%016RX64 limit=%08x flags=%08x}\n", pMixedCtx->ss.Sel, pMixedCtx->ss.u64Base, 2504 pMixedCtx->ss.u32Limit, pMixedCtx->ss.Attr.u)); 2505 Log4(("hmR0SvmSaveGuestState: DBGCTL BR_FROM=%#RX64 BR_TO=%#RX64 XcptFrom=%#RX64 XcptTo=%#RX64\n", 2506 pVmcb->guest.u64BR_FROM, pVmcb->guest.u64BR_TO,pVmcb->guest.u64LASTEXCPFROM, pVmcb->guest.u64LASTEXCPTO)); 2507 } 2474 2508 } 2475 2509 … … 3493 3527 * the nested-guest but execution later continues here with an interrupt shadow active. 3494 3528 */ 3495 pVmcb->ctrl. u1IntShadow = fIntShadow;3529 pVmcb->ctrl.IntShadow.n.u1IntShadow = fIntShadow; 3496 3530 } 3497 3531 … … 3545 3579 Log4(("ctrl.IntCtrl.u24Reserved %#x\n", pVmcb->ctrl.IntCtrl.n.u24Reserved)); 3546 3580 3547 Log4(("ctrl. u1IntShadow %#x\n", pVmcb->ctrl.u1IntShadow));3548 Log4(("ctrl. u1GuestIntMask %#x\n", pVmcb->ctrl.u1GuestIntMask));3581 Log4(("ctrl.IntShadow.u1IntShadow %#x\n", pVmcb->ctrl.IntShadow.n.u1IntShadow)); 3582 Log4(("ctrl.IntShadow.u1GuestIntMask %#x\n", pVmcb->ctrl.IntShadow.n.u1GuestIntMask)); 3549 3583 Log4(("ctrl.u64ExitCode %#RX64\n", pVmcb->ctrl.u64ExitCode)); 3550 3584 Log4(("ctrl.u64ExitInfo1 %#RX64\n", pVmcb->ctrl.u64ExitInfo1)); … … 3556 3590 Log4(("ctrl.ExitIntInfo.u1Valid %#x\n", pVmcb->ctrl.ExitIntInfo.n.u1Valid)); 3557 3591 Log4(("ctrl.ExitIntInfo.u32ErrorCode %#x\n", pVmcb->ctrl.ExitIntInfo.n.u32ErrorCode)); 3558 Log4(("ctrl. u1NestedPaging %#x\n", pVmcb->ctrl.u1NestedPaging));3559 Log4(("ctrl. u1Sev %#x\n", pVmcb->ctrl.u1Sev));3560 Log4(("ctrl. u1SevEs %#x\n", pVmcb->ctrl.u1SevEs));3592 Log4(("ctrl.NestedPaging.u1NestedPaging %#x\n", pVmcb->ctrl.NestedPaging.n.u1NestedPaging)); 3593 Log4(("ctrl.NestedPaging.u1Sev %#x\n", pVmcb->ctrl.NestedPaging.n.u1Sev)); 3594 Log4(("ctrl.NestedPaging.u1SevEs %#x\n", pVmcb->ctrl.NestedPaging.n.u1SevEs)); 3561 3595 Log4(("ctrl.EventInject.u8Vector %#x\n", pVmcb->ctrl.EventInject.n.u8Vector)); 3562 3596 Log4(("ctrl.EventInject.u3Type %#x\n", pVmcb->ctrl.EventInject.n.u3Type)); … … 3568 3602 Log4(("ctrl.u64NestedPagingCR3 %#RX64\n", pVmcb->ctrl.u64NestedPagingCR3)); 3569 3603 3570 Log4(("ctrl. u1Lbrvirt %#x\n", pVmcb->ctrl.u1LbrVirt));3571 Log4(("ctrl. u1VirtVmsaveVmload %#x\n", pVmcb->ctrl.u1VirtVmsaveVmload));3604 Log4(("ctrl.LbrVirt.u1LbrVirt %#x\n", pVmcb->ctrl.LbrVirt.n.u1LbrVirt)); 3605 Log4(("ctrl.LbrVirt.u1VirtVmsaveVmload %#x\n", pVmcb->ctrl.LbrVirt.n.u1VirtVmsaveVmload)); 3572 3606 3573 3607 Log4(("guest.CS.u16Sel %RTsel\n", pVmcb->guest.CS.u16Sel)); … … 4862 4896 case SVM_EXIT_IOIO: 4863 4897 { 4864 /*4865 * Figure out if the IO port access is intercepted by the nested-guest.4866 */4867 4898 if (HMIsGuestSvmCtrlInterceptSet(pVCpu, pCtx, SVM_CTRL_INTERCEPT_IOIO_PROT)) 4868 4899 { … … 5070 5101 if (HMIsGuestSvmXcptInterceptSet(pVCpu, pCtx, uVector)) 5071 5102 return HM_SVM_VMEXIT_NESTED(pVCpu, uExitCode, uExitInfo1, uExitInfo2); 5103 #if 0 5104 /* Debugging DOS6 triple-fault nested-VM. */ 5105 unsigned cbInstr; 5106 DISCPUSTATE Dis; 5107 int rc = EMInterpretDisasCurrent(pVCpu->CTX_SUFF(pVM), pVCpu, &Dis, &cbInstr); 5108 if (RT_SUCCESS(rc)) 5109 { 5110 RT_NOREF(cbInstr); 5111 if ( Dis.pCurInstr->uOpcode == OP_IRET 5112 && uVector == X86_XCPT_GP) 5113 { 5114 Log4(("#GP on IRET detected!\n")); 5115 return VERR_IEM_INSTR_NOT_IMPLEMENTED; 5116 } 5117 } 5118 else 5119 Log4(("hmR0SvmExitXcptGeneric: failed to disassemble instr. rc=%Rrc\n", rc)); 5120 #endif 5072 5121 return hmR0SvmExitXcptGeneric(pVCpu, pCtx, pSvmTransient); 5073 5122 } … … 5407 5456 5408 5457 case X86_XCPT_GP: 5458 { 5409 5459 Event.n.u1ErrorCodeValid = 1; 5410 5460 Event.n.u32ErrorCode = pVmcb->ctrl.u64ExitInfo1; 5411 5461 STAM_COUNTER_INC(&pVCpu->hm.s.StatExitGuestGP); 5412 5462 break; 5463 } 5413 5464 5414 5465 default: … … 7589 7640 Assert(pSvmTransient->u64ExitCode == pVmcb->ctrl.u64ExitCode); 7590 7641 Assert(uVector <= X86_XCPT_LAST); 7642 Log4(("hmR0SvmExitXcptGeneric: uVector=%#x uErrCode=%u\n", uVector, uErrCode)); 7591 7643 7592 7644 SVMEVENT Event; -
trunk/src/VBox/VMM/VMMR3/CPUM.cpp
r70323 r70352 2243 2243 pHlp->pfnPrintf(pHlp, "%s u1AvicEnable = %RTbool\n", pszPrefix, pVmcbCtrl->IntCtrl.n.u1AvicEnable); 2244 2244 pHlp->pfnPrintf(pHlp, "%s u8VIntrVector = %#RX8\n", pszPrefix, pVmcbCtrl->IntCtrl.n.u8VIntrVector); 2245 pHlp->pfnPrintf(pHlp, "%su1IntShadow = %RTbool\n", pszPrefix, pVmcbCtrl->u1IntShadow); 2246 pHlp->pfnPrintf(pHlp, "%su1GuestIntMask = %RTbool\n", pszPrefix, pVmcbCtrl->u1GuestIntMask); 2245 pHlp->pfnPrintf(pHlp, "%sIntShadow\n", pszPrefix); 2246 pHlp->pfnPrintf(pHlp, "%s u1IntShadow = %RTbool\n", pszPrefix, pVmcbCtrl->IntShadow.n.u1IntShadow); 2247 pHlp->pfnPrintf(pHlp, "%s u1GuestIntMask = %RTbool\n", pszPrefix, pVmcbCtrl->IntShadow.n.u1GuestIntMask); 2247 2248 pHlp->pfnPrintf(pHlp, "%su64ExitCode = %#RX64\n", pszPrefix, pVmcbCtrl->u64ExitCode); 2248 2249 pHlp->pfnPrintf(pHlp, "%su64ExitInfo1 = %#RX64\n", pszPrefix, pVmcbCtrl->u64ExitInfo1); … … 2255 2256 pHlp->pfnPrintf(pHlp, "%s u32ErrorCode = %#RX32\n", pszPrefix, pVmcbCtrl->ExitIntInfo.n.u32ErrorCode); 2256 2257 pHlp->pfnPrintf(pHlp, "%sNestedPaging and SEV\n", pszPrefix); 2257 pHlp->pfnPrintf(pHlp, "%s u1NestedPaging = %RTbool\n", pszPrefix, pVmcbCtrl-> u1NestedPaging);2258 pHlp->pfnPrintf(pHlp, "%s u1Sev = %RTbool\n", pszPrefix, pVmcbCtrl-> u1Sev);2259 pHlp->pfnPrintf(pHlp, "%s u1SevEs = %RTbool\n", pszPrefix, pVmcbCtrl-> u1SevEs);2258 pHlp->pfnPrintf(pHlp, "%s u1NestedPaging = %RTbool\n", pszPrefix, pVmcbCtrl->NestedPaging.n.u1NestedPaging); 2259 pHlp->pfnPrintf(pHlp, "%s u1Sev = %RTbool\n", pszPrefix, pVmcbCtrl->NestedPaging.n.u1Sev); 2260 pHlp->pfnPrintf(pHlp, "%s u1SevEs = %RTbool\n", pszPrefix, pVmcbCtrl->NestedPaging.n.u1SevEs); 2260 2261 pHlp->pfnPrintf(pHlp, "%sAvicBar\n", pszPrefix); 2261 2262 pHlp->pfnPrintf(pHlp, "%s u40Addr = %#RX64\n", pszPrefix, pVmcbCtrl->AvicBar.n.u40Addr); … … 2268 2269 pHlp->pfnPrintf(pHlp, "%s u32ErrorCode = %#RX32\n", pszPrefix, pVmcbCtrl->EventInject.n.u32ErrorCode); 2269 2270 pHlp->pfnPrintf(pHlp, "%su64NestedPagingCR3 = %#RX64\n", pszPrefix, pVmcbCtrl->u64NestedPagingCR3); 2270 pHlp->pfnPrintf(pHlp, "%su1LbrVirt = %RTbool\n", pszPrefix, pVmcbCtrl->u1LbrVirt); 2271 pHlp->pfnPrintf(pHlp, "%su1VirtVmsaveVmload = %RTbool\n", pszPrefix, pVmcbCtrl->u1VirtVmsaveVmload); 2271 pHlp->pfnPrintf(pHlp, "%sLBR virtualization\n", pszPrefix); 2272 pHlp->pfnPrintf(pHlp, "%s u1LbrVirt = %RTbool\n", pszPrefix, pVmcbCtrl->LbrVirt.n.u1LbrVirt); 2273 pHlp->pfnPrintf(pHlp, "%s u1VirtVmsaveVmload = %RTbool\n", pszPrefix, pVmcbCtrl->LbrVirt.n.u1VirtVmsaveVmload); 2272 2274 pHlp->pfnPrintf(pHlp, "%su32VmcbCleanBits = %#RX32\n", pszPrefix, pVmcbCtrl->u32VmcbCleanBits); 2273 2275 pHlp->pfnPrintf(pHlp, "%su64NextRIP = %#RX64\n", pszPrefix, pVmcbCtrl->u64NextRIP);
Note:
See TracChangeset
for help on using the changeset viewer.