Changeset 106382 in vbox for trunk/src/VBox/VMM
- Timestamp:
- Oct 16, 2024 1:53:23 PM (3 months ago)
- Location:
- trunk/src/VBox/VMM/VMMR3
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMR3/PGM-armv8.cpp
r106061 r106382 743 743 744 744 745 DECLINLINE(int) pgmGstWalkReturnNotPresent(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel) 746 { 747 NOREF(pVCpu); 748 pWalk->fNotPresent = true; 749 pWalk->uLevel = uLevel; 750 pWalk->fFailed = PGM_WALKFAIL_NOT_PRESENT 751 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT); 752 return VERR_PAGE_TABLE_NOT_PRESENT; 753 } 754 755 DECLINLINE(int) pgmGstWalkReturnBadPhysAddr(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel, int rc) 756 { 757 AssertMsg(rc == VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS, ("%Rrc\n", rc)); NOREF(rc); NOREF(pVCpu); 758 pWalk->fBadPhysAddr = true; 759 pWalk->uLevel = uLevel; 760 pWalk->fFailed = PGM_WALKFAIL_BAD_PHYSICAL_ADDRESS 761 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT); 762 return VERR_PAGE_TABLE_NOT_PRESENT; 763 } 764 765 766 DECLINLINE(int) pgmGstWalkReturnRsvdError(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel) 767 { 768 NOREF(pVCpu); 769 pWalk->fRsvdError = true; 770 pWalk->uLevel = uLevel; 771 pWalk->fFailed = PGM_WALKFAIL_RESERVED_BITS 772 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT); 773 return VERR_PAGE_TABLE_NOT_PRESENT; 774 } 775 776 745 777 VMMDECL(int) PGMGstGetPage(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk) 746 778 { 747 779 VMCPU_ASSERT_EMT(pVCpu); 748 780 Assert(pWalk); 749 //AssertReleaseFailed(); 781 #ifndef DEBUG_aeichner 782 AssertReleaseFailed(); 750 783 RT_NOREF(pVCpu, GCPtr, pWalk); 751 784 return VERR_NOT_IMPLEMENTED; 785 #else 786 pWalk->fSucceeded = false; 787 788 RTGCPHYS GCPhysPt = CPUMGetEffectiveTtbr(pVCpu, GCPtr); 789 if (GCPhysPt == RTGCPHYS_MAX) /* MMU disabled? */ 790 { 791 pWalk->GCPtr = GCPtr; 792 pWalk->fSucceeded = true; 793 pWalk->GCPhys = GCPtr; 794 return VINF_SUCCESS; 795 } 796 797 /* Do the translation. */ 798 /** @todo This is just a sketch to get something working for debugging, assumes 4KiB granules and 48-bit output address. 799 * Needs to be moved to PGMAllGst like on x86 and implemented for 16KiB and 64KiB granule sizes. */ 800 uint64_t u64TcrEl1 = CPUMGetTcrEl1(pVCpu); 801 uint8_t u8TxSz = (GCPtr & RT_BIT_64(55)) 802 ? ARMV8_TCR_EL1_AARCH64_T1SZ_GET(u64TcrEl1) 803 : ARMV8_TCR_EL1_AARCH64_T0SZ_GET(u64TcrEl1); 804 uint8_t uLookupLvl; 805 RTGCPHYS fLookupMask; 806 807 /* 808 * From: https://github.com/codingbelief/arm-architecture-reference-manual-for-armv8-a/blob/master/en/chapter_d4/d42_2_controlling_address_translation_stages.md 809 * For all translation stages 810 * The maximum TxSZ value is 39. If TxSZ is programmed to a value larger than 39 then it is IMPLEMENTATION DEFINED whether: 811 * - The implementation behaves as if the field is programmed to 39 for all purposes other than reading back the value of the field. 812 * - Any use of the TxSZ value generates a Level 0 Translation fault for the stage of translation at which TxSZ is used. 813 * 814 * For a stage 1 translation 815 * The minimum TxSZ value is 16. If TxSZ is programmed to a value smaller than 16 then it is IMPLEMENTATION DEFINED whether: 816 * - The implementation behaves as if the field were programmed to 16 for all purposes other than reading back the value of the field. 817 * - Any use of the TxSZ value generates a stage 1 Level 0 Translation fault. 818 * 819 * We currently choose the former for both. 820 */ 821 if (/*u8TxSz >= 16 &&*/ u8TxSz <= 24) 822 { 823 uLookupLvl = 0; 824 fLookupMask = RT_BIT_64(24 - u8TxSz + 1) - 1; 825 } 826 else if (u8TxSz >= 25 && u8TxSz <= 33) 827 { 828 uLookupLvl = 1; 829 fLookupMask = RT_BIT_64(33 - u8TxSz + 1) - 1; 830 } 831 else /*if (u8TxSz >= 34 && u8TxSz <= 39)*/ 832 { 833 uLookupLvl = 2; 834 fLookupMask = RT_BIT_64(39 - u8TxSz + 1) - 1; 835 } 836 /*else 837 return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 0, VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS);*/ /** @todo Better status (Invalid TCR config). */ 838 839 uint64_t *pu64Pt = NULL; 840 uint64_t uPt; 841 int rc; 842 if (uLookupLvl == 0) 843 { 844 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt); 845 if (RT_SUCCESS(rc)) { /* probable */ } 846 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 0, rc); 847 848 uPt = pu64Pt[(GCPtr >> 39) & fLookupMask]; 849 if (uPt & RT_BIT_64(0)) { /* probable */ } 850 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 0); 851 852 if (uPt & RT_BIT_64(1)) { /* probable */ } 853 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, 0); /** @todo Only supported if TCR_EL1.DS is set. */ 854 855 /* All nine bits from now on. */ 856 fLookupMask = RT_BIT_64(9) - 1; 857 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000)); 858 } 859 860 if (uLookupLvl <= 1) 861 { 862 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt); 863 if (RT_SUCCESS(rc)) { /* probable */ } 864 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 1, rc); 865 866 uPt = pu64Pt[(GCPtr >> 30) & fLookupMask]; 867 if (uPt & RT_BIT_64(0)) { /* probable */ } 868 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 1); 869 870 if (uPt & RT_BIT_64(1)) { /* probable */ } 871 else 872 { 873 /* Block descriptor (1G page). */ 874 pWalk->GCPtr = GCPtr; 875 pWalk->fSucceeded = true; 876 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xffffc0000000)) | (GCPtr & (RTGCPTR)(_1G - 1)); 877 pWalk->fGigantPage = true; 878 return VINF_SUCCESS; 879 } 880 881 /* All nine bits from now on. */ 882 fLookupMask = RT_BIT_64(9) - 1; 883 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000)); 884 } 885 886 if (uLookupLvl <= 2) 887 { 888 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt); 889 if (RT_SUCCESS(rc)) { /* probable */ } 890 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 2, rc); 891 892 uPt = pu64Pt[(GCPtr >> 21) & fLookupMask]; 893 if (uPt & RT_BIT_64(0)) { /* probable */ } 894 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 2); 895 896 if (uPt & RT_BIT_64(1)) { /* probable */ } 897 else 898 { 899 /* Block descriptor (2M page). */ 900 pWalk->GCPtr = GCPtr; 901 pWalk->fSucceeded = true; 902 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xffffffe00000)) | (GCPtr & (RTGCPTR)(_2M - 1)); 903 pWalk->fBigPage = true; 904 return VINF_SUCCESS; 905 } 906 907 /* All nine bits from now on. */ 908 fLookupMask = RT_BIT_64(9) - 1; 909 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000)); 910 } 911 912 Assert(uLookupLvl <= 3); 913 914 /* Next level. */ 915 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt); 916 if (RT_SUCCESS(rc)) { /* probable */ } 917 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 3, rc); 918 919 uPt = pu64Pt[(GCPtr & UINT64_C(0x1ff000)) >> 12]; 920 if (uPt & RT_BIT_64(0)) { /* probable */ } 921 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 3); 922 923 if (uPt & RT_BIT_64(1)) { /* probable */ } 924 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, 3); /** No block descriptors. */ 925 926 pWalk->GCPtr = GCPtr; 927 pWalk->fSucceeded = true; 928 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000)) | (GCPtr & (RTGCPTR)(_4K - 1)); 929 return VINF_SUCCESS; 930 #endif 752 931 } 753 932 … … 816 995 { 817 996 VMCPU_ASSERT_EMT(pVCpu); 818 AssertReleaseFailed(); 819 RT_NOREF(pVCpu, GCPtr, pWalk, pGstWalk); 820 return VERR_NOT_IMPLEMENTED; 997 RT_NOREF(pGstWalk); 998 return PGMGstGetPage(pVCpu, GCPtr, pWalk); 821 999 } 822 1000 … … 825 1003 { 826 1004 VMCPU_ASSERT_EMT(pVCpu); 827 AssertReleaseFailed(); 828 RT_NOREF(pVCpu, GCPtr, pWalk, pGstWalk); 829 return VERR_NOT_IMPLEMENTED; 830 } 1005 return pgmGstPtWalk(pVCpu, GCPtr, pWalk, pGstWalk); /** @todo Always do full walk for now. */ 1006 } -
trunk/src/VBox/VMM/VMMR3/PGMDbg.cpp
r106061 r106382 1084 1084 else 1085 1085 { 1086 #ifndef VBOX_VMM_TARGET_ARMV8 1086 1087 Assert(WalkGst.enmType != PGMPTWALKGSTTYPE_INVALID); 1088 #endif 1087 1089 Assert(!Walk.fSucceeded); 1088 1090 cbPrev = 0; /* ignore error. */ … … 1093 1095 */ 1094 1096 uint64_t cPagesCanSkip; 1097 #ifndef VBOX_VMM_TARGET_ARMV8 1095 1098 switch (Walk.uLevel) 1096 1099 { … … 1141 1144 continue; 1142 1145 } 1146 #else 1147 /** @todo Sketch, needs creating proper defines for constants in armv8.h and using these 1148 * instead of hardcoding these here. */ 1149 switch (Walk.uLevel) 1150 { 1151 case 0: 1152 case 1: 1153 cPagesCanSkip = (512 - ((GCPtr >> 21) & 0x1ff)) * 512 1154 - ((GCPtr >> 12) & 0x1ff); 1155 Assert(!((GCPtr + ((RTGCPTR)cPagesCanSkip << 12)) & (RT_BIT_64(21) - 1))); 1156 break; 1157 case 2: 1158 cPagesCanSkip = 512 - ((GCPtr >> 12) & 0x1ff); 1159 Assert(!((GCPtr + ((RTGCPTR)cPagesCanSkip << 12)) & (RT_BIT_64(12) - 1))); 1160 break; 1161 case 3: 1162 /* page level, use cIncPages */ 1163 cPagesCanSkip = 1; 1164 break; 1165 default: 1166 AssertMsgFailed(("%d\n", Walk.uLevel)); 1167 cPagesCanSkip = 0; 1168 break; 1169 } 1170 1171 if (cPages <= cPagesCanSkip) 1172 break; 1173 fFullWalk = true; 1174 if (cPagesCanSkip >= cIncPages) 1175 { 1176 cPages -= cPagesCanSkip; 1177 GCPtr += (RTGCPTR)cPagesCanSkip << 12; 1178 continue; 1179 } 1180 #endif 1143 1181 } 1144 1182 … … 1147 1185 break; 1148 1186 cPages -= cIncPages; 1187 #ifndef VBOX_VMM_TARGET_ARMV8 1149 1188 GCPtr += (RTGCPTR)cIncPages << X86_PT_PAE_SHIFT; 1189 #else 1190 GCPtr += (RTGCPTR)cIncPages << 12; 1191 #endif 1150 1192 1151 1193 /* Yield the PGM lock every now and then. */
Note:
See TracChangeset
for help on using the changeset viewer.