Changeset 108856 in vbox for trunk/src/VBox/VMM
- Timestamp:
- Apr 4, 2025 5:36:38 PM (5 weeks ago)
- svn:sync-xref-src-repo-rev:
- 168306
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/PGMAll.cpp
r108843 r108856 966 966 967 967 #elif defined(VBOX_VMM_TARGET_ARMV8) 968 969 /*970 *971 * Mode criteria:972 * - MMU enabled/disabled.973 * - TCR_EL1.TG0 (granule size for TTBR0_EL1).974 * - TCR_EL1.TG1 (granule size for TTBR1_EL1).975 * - TCR_EL1.T0SZ (address space size for TTBR0_EL1).976 * - TCR_EL1.T1SZ (address space size for TTBR1_EL1).977 * - TCR_EL1.IPS (intermediate physical address size).978 * - TCR_EL1.TBI0 (ignore top address byte for TTBR0_EL1).979 * - TCR_EL1.TBI1 (ignore top address byte for TTBR1_EL1).980 * - TCR_EL1.HPD0 (hierarchical permisson disables for TTBR0_EL1).981 * - TCR_EL1.HPD1 (hierarchical permisson disables for TTBR1_EL1).982 * - More ?983 *984 * Other relevant modifiers:985 * - TCR_EL1.HA - hardware access bit.986 * - TCR_EL1.HD - hardware dirty bit.987 * - ++988 *989 * Each privilege EL (1,2,3) has their own TCR_ELx and TTBR[01]_ELx registers,990 * so they should all have their own separate modes. To make it simpler,991 * why not do a separate mode for TTBR0_ELx and one for TTBR1_ELx. Top-level992 * functions determins which of the roots to use and call template (C++)993 * functions that takes it from there. Using the preprocessor function template994 * approach is _not_ desirable here.995 *996 */997 998 /** @todo Proper ARMv8 guest support for PGM; The idea is to cache the999 * configuration register values and lazily reconfigure when they1000 * change. */1001 968 # include "PGMAllGst-armv8.cpp.h" 1002 1003 /**1004 * Guest mode data array.1005 */1006 PGMMODEDATAGST const g_aPgmGuestModeData[PGM_GUEST_MODE_DATA_ARRAY_SIZE] =1007 {1008 { UINT32_MAX, NULL, NULL, NULL, NULL, NULL }, /* 0 */1009 {1010 PGM_TYPE_NONE,1011 PGM_CTX(pgm,GstNoneGetPage),1012 PGM_CTX(pgm,GstNoneQueryPageFast),1013 PGM_CTX(pgm,GstNoneModifyPage),1014 PGM_CTX(pgm,GstNoneWalk),1015 PGM_CTX(pgm,GstNoneEnter),1016 PGM_CTX(pgm,GstNoneExit),1017 },1018 1019 #define PGM_MODE_TYPE_CREATE(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd) \1020 (2 + ( (a_fEpd ? RT_BIT_32(6) : 0) \1021 | (a_fTbi ? RT_BIT_32(5) : 0) \1022 | (a_GranuleSz << 3) \1023 | (a_InitialLookupLvl << 1) \1024 | (a_fTtbr0 ? RT_BIT_32(0) : 0) ))1025 1026 #define PGM_MODE_CREATE_EX(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd) \1027 { \1028 PGM_MODE_TYPE_CREATE(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd), \1029 PGM_CTX(pgm,GstGetPage)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \1030 PGM_CTX(pgm,GstQueryPageFast)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \1031 PGM_CTX(pgm,GstModifyPage)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \1032 PGM_CTX(pgm,GstWalk)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \1033 PGM_CTX(pgm,GstEnter)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \1034 PGM_CTX(pgm,GstExit)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd> \1035 }1036 1037 #define PGM_MODE_CREATE_TTBR(a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd) \1038 PGM_MODE_CREATE_EX(false, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd), \1039 PGM_MODE_CREATE_EX(true, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd)1040 1041 #define PGM_MODE_CREATE_LOOKUP_LVL(a_GranuleSz, a_fTbi, a_fEpd) \1042 PGM_MODE_CREATE_TTBR(0, a_GranuleSz, a_fTbi, a_fEpd ), \1043 PGM_MODE_CREATE_TTBR(1, a_GranuleSz, a_fTbi, a_fEpd ), \1044 PGM_MODE_CREATE_TTBR(2, a_GranuleSz, a_fTbi, a_fEpd ), \1045 PGM_MODE_CREATE_TTBR(3, a_GranuleSz, a_fTbi, a_fEpd ) /* Invalid */1046 1047 #define PGM_MODE_CREATE_GRANULE_SZ(a_fTbi, a_fEpd) \1048 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_INVALID, a_fTbi, a_fEpd), \1049 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_16KB, a_fTbi, a_fEpd), \1050 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_4KB, a_fTbi, a_fEpd), \1051 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_64KB, a_fTbi, a_fEpd)1052 1053 #define PGM_MODE_CREATE_TBI(a_fEpd) \1054 PGM_MODE_CREATE_GRANULE_SZ(false, a_fEpd), \1055 PGM_MODE_CREATE_GRANULE_SZ(true, a_fEpd)1056 1057 /* Recursive expansion for the win, this will blow up to 128 entries covering all possible modes. */1058 PGM_MODE_CREATE_TBI(false),1059 PGM_MODE_CREATE_TBI(true)1060 1061 #undef PGM_MODE_CREATE_TBI1062 #undef PGM_MODE_CREATE_GRANULE_SZ1063 #undef PGM_MODE_CREATE_LOOKUP_LVL1064 #undef PGM_MODE_CREATE_TTBR1065 #undef PGM_MODE_CREATE_EX1066 };1067 1068 969 #else 1069 970 # error "port me" … … 3656 3557 #elif defined(VBOX_VMM_TARGET_ARMV8) 3657 3558 3658 template<uint8_t a_offTsz, uint8_t a_offTg, uint8_t a_offTbi, uint8_t a_offEpd, bool a_fTtbr0>3659 DECLINLINE(uintptr_t) pgmR3DeduceTypeFromTcr(uint64_t u64RegSctlr, uint64_t u64RegTcr, uint64_t *pfInitialLookupMask)3660 {3661 uintptr_t idxNewGst = 0;3662 3663 /*3664 * MMU enabled at all?3665 * Technically this is incorrect as we use ARMV8_SCTLR_EL1_M regardless of the EL but the bit is the same3666 * for all exception levels.3667 */3668 if (u64RegSctlr & ARMV8_SCTLR_EL1_M)3669 {3670 uint64_t const u64Tsz = (u64RegTcr >> a_offTsz) & 0x1f;3671 uint64_t const u64Tg = (u64RegTcr >> a_offTg) & 0x3;3672 bool const fTbi = RT_BOOL(u64RegTcr & RT_BIT_64(a_offTbi));3673 bool const fEpd = RT_BOOL(u64RegTcr & RT_BIT_64(a_offEpd));3674 3675 /*3676 * From: https://github.com/codingbelief/arm-architecture-reference-manual-for-armv8-a/blob/master/en/chapter_d4/d42_2_controlling_address_translation_stages.md3677 * For all translation stages3678 * The maximum TxSZ value is 39. If TxSZ is programmed to a value larger than 39 then it is IMPLEMENTATION DEFINED whether:3679 * - The implementation behaves as if the field is programmed to 39 for all purposes other than reading back the value of the field.3680 * - Any use of the TxSZ value generates a Level 0 Translation fault for the stage of translation at which TxSZ is used.3681 *3682 * For a stage 1 translation3683 * The minimum TxSZ value is 16. If TxSZ is programmed to a value smaller than 16 then it is IMPLEMENTATION DEFINED whether:3684 * - The implementation behaves as if the field were programmed to 16 for all purposes other than reading back the value of the field.3685 * - Any use of the TxSZ value generates a stage 1 Level 0 Translation fault.3686 *3687 * We currently choose the former for both.3688 */3689 uint64_t uLookupLvl;3690 if (/*u64Tsz >= 16 &&*/ u64Tsz <= 24)3691 {3692 uLookupLvl = 0;3693 if (u64Tsz >= 16)3694 *pfInitialLookupMask = RT_BIT_64(24 - u64Tsz + 1) - 1;3695 else3696 *pfInitialLookupMask = RT_BIT_64(24 - 16 + 1) - 1;3697 }3698 else if (u64Tsz >= 25 && u64Tsz <= 33)3699 {3700 uLookupLvl = 1;3701 *pfInitialLookupMask = RT_BIT_64(33 - u64Tsz + 1) - 1;3702 }3703 else /*if (u64Tsz >= 34 && u64Tsz <= 39)*/3704 {3705 uLookupLvl = 2;3706 if (u64Tsz <= 39)3707 *pfInitialLookupMask = RT_BIT_64(39 - u64Tsz + 1) - 1;3708 else3709 *pfInitialLookupMask = RT_BIT_64(39 - 39 + 1) - 1;3710 }3711 3712 /* Build the index into the PGM mode callback table for the given config. */3713 idxNewGst = PGM_MODE_TYPE_CREATE(a_fTtbr0, uLookupLvl, u64Tg, fTbi, fEpd);3714 }3715 else3716 idxNewGst = PGM_TYPE_NONE;3717 3718 return idxNewGst;3719 }3720 3721 3722 3559 VMM_INT_DECL(int) PGMChangeMode(PVMCPUCC pVCpu, uint8_t bEl, uint64_t u64RegSctlr, uint64_t u64RegTcr) 3723 3560 { -
trunk/src/VBox/VMM/VMMAll/PGMAllGst-armv8.cpp.h
r108846 r108856 26 26 */ 27 27 28 29 /* 30 * 31 * Mode criteria: 32 * - MMU enabled/disabled. 33 * - TCR_EL1.TG0 (granule size for TTBR0_EL1). 34 * - TCR_EL1.TG1 (granule size for TTBR1_EL1). 35 * - TCR_EL1.T0SZ (address space size for TTBR0_EL1). 36 * - TCR_EL1.T1SZ (address space size for TTBR1_EL1). 37 * - TCR_EL1.IPS (intermediate physical address size). 38 * - TCR_EL1.TBI0 (ignore top address byte for TTBR0_EL1). 39 * - TCR_EL1.TBI1 (ignore top address byte for TTBR1_EL1). 40 * - TCR_EL1.HPD0 (hierarchical permisson disables for TTBR0_EL1). 41 * - TCR_EL1.HPD1 (hierarchical permisson disables for TTBR1_EL1). 42 * - More ? 43 * 44 * Other relevant modifiers: 45 * - TCR_EL1.HA - hardware access bit. 46 * - TCR_EL1.HD - hardware dirty bit. 47 * - ++ 48 * 49 * Each privilege EL (1,2,3) has their own TCR_ELx and TTBR[01]_ELx registers, 50 * so they should all have their own separate modes. To make it simpler, 51 * why not do a separate mode for TTBR0_ELx and one for TTBR1_ELx. Top-level 52 * functions determins which of the roots to use and call template (C++) 53 * functions that takes it from there. Using the preprocessor function template 54 * approach is _not_ desirable here. 55 * 56 */ 28 57 29 58 … … 164 193 { 165 194 RT_NOREF(pGstWalk); /** @todo */ 166 uint8_t const bEl = CPUMGetGuestEL(pVCpu);167 168 195 169 196 /* … … 178 205 else 179 206 { 207 uint8_t const bEl = CPUMGetGuestEL(pVCpu); 208 180 209 uint64_t fLookupMask; 181 210 if RT_CONSTEXPR_IF(a_fTtbr0 == true) … … 195 224 196 225 uPt = pu64Pt[(GCPtr >> 39) & fLookupMask]; 197 if (uPt & RT_BIT_64(0)) { /* probable */ }226 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ } 198 227 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 0); 199 228 200 if (uPt & RT_BIT_64(1)) { /* probable */ }229 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ } 201 230 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, 0); /** @todo Only supported if TCR_EL1.DS is set. */ 202 231 … … 213 242 214 243 uPt = pu64Pt[(GCPtr >> 30) & fLookupMask]; 215 if (uPt & RT_BIT_64(0)) { /* probable */ }244 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ } 216 245 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 1); 217 246 218 if (uPt & RT_BIT_64(1)) { /* probable */ }247 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ } 219 248 else 220 249 { … … 239 268 240 269 uPt = pu64Pt[(GCPtr >> 21) & fLookupMask]; 241 if (uPt & RT_BIT_64(0)) { /* probable */ }270 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ } 242 271 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 2); 243 272 244 if (uPt & RT_BIT_64(1)) { /* probable */ }273 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ } 245 274 else 246 275 { … … 266 295 267 296 uPt = pu64Pt[(GCPtr & UINT64_C(0x1ff000)) >> 12]; 268 if (uPt & RT_BIT_64(0)) { /* probable */ }297 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ } 269 298 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 3); 270 299 271 if (uPt & RT_BIT_64(1)) { /* probable */ }300 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ } 272 301 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, 3); /** No block descriptors. */ 273 302 … … 291 320 { 292 321 RT_NOREF(fFlags); /** @todo */ 293 uint8_t const bEl = CPUMGetGuestEL(pVCpu);294 322 295 323 /* … … 304 332 else 305 333 { 334 uint8_t const bEl = CPUMGetGuestEL(pVCpu); 335 306 336 uint64_t fLookupMask; 307 337 if RT_CONSTEXPR_IF(a_fTtbr0 == true) … … 321 351 322 352 uPt = pu64Pt[(GCPtr >> 39) & fLookupMask]; 323 if (uPt & RT_BIT_64(0)) { /* probable */ }353 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ } 324 354 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, 0); 325 355 326 if (uPt & RT_BIT_64(1)) { /* probable */ }356 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ } 327 357 else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, 0); /** @todo Only supported if TCR_EL1.DS is set. */ 328 358 … … 339 369 340 370 uPt = pu64Pt[(GCPtr >> 30) & fLookupMask]; 341 if (uPt & RT_BIT_64(0)) { /* probable */ }371 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ } 342 372 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, 1); 343 373 344 if (uPt & RT_BIT_64(1)) { /* probable */ }374 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ } 345 375 else 346 376 { … … 364 394 365 395 uPt = pu64Pt[(GCPtr >> 21) & fLookupMask]; 366 if (uPt & RT_BIT_64(0)) { /* probable */ }396 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ } 367 397 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, 2); 368 398 369 if (uPt & RT_BIT_64(1)) { /* probable */ }399 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ } 370 400 else 371 401 { … … 390 420 391 421 uPt = pu64Pt[(GCPtr & UINT64_C(0x1ff000)) >> 12]; 392 if (uPt & RT_BIT_64(0)) { /* probable */ }422 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ } 393 423 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, 3); 394 424 395 if (uPt & RT_BIT_64(1)) { /* probable */ }425 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ } 396 426 else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, 3); /** No block descriptors. */ 397 427 … … 437 467 return VINF_SUCCESS; 438 468 } 469 470 471 /** 472 * Guest mode data array. 473 */ 474 PGMMODEDATAGST const g_aPgmGuestModeData[PGM_GUEST_MODE_DATA_ARRAY_SIZE] = 475 { 476 { UINT32_MAX, NULL, NULL, NULL, NULL, NULL }, /* 0 */ 477 { 478 PGM_TYPE_NONE, 479 PGM_CTX(pgm,GstNoneGetPage), 480 PGM_CTX(pgm,GstNoneQueryPageFast), 481 PGM_CTX(pgm,GstNoneModifyPage), 482 PGM_CTX(pgm,GstNoneWalk), 483 PGM_CTX(pgm,GstNoneEnter), 484 PGM_CTX(pgm,GstNoneExit), 485 }, 486 487 #define PGM_MODE_TYPE_CREATE(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd) \ 488 (2 + ( (a_fEpd ? RT_BIT_32(6) : 0) \ 489 | (a_fTbi ? RT_BIT_32(5) : 0) \ 490 | (a_GranuleSz << 3) \ 491 | (a_InitialLookupLvl << 1) \ 492 | (a_fTtbr0 ? RT_BIT_32(0) : 0) )) 493 494 #define PGM_MODE_CREATE_EX(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd) \ 495 { \ 496 PGM_MODE_TYPE_CREATE(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd), \ 497 PGM_CTX(pgm,GstGetPage)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \ 498 PGM_CTX(pgm,GstQueryPageFast)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \ 499 PGM_CTX(pgm,GstModifyPage)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \ 500 PGM_CTX(pgm,GstWalk)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \ 501 PGM_CTX(pgm,GstEnter)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \ 502 PGM_CTX(pgm,GstExit)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd> \ 503 } 504 505 #define PGM_MODE_CREATE_TTBR(a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd) \ 506 PGM_MODE_CREATE_EX(false, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd), \ 507 PGM_MODE_CREATE_EX(true, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd) 508 509 #define PGM_MODE_CREATE_LOOKUP_LVL(a_GranuleSz, a_fTbi, a_fEpd) \ 510 PGM_MODE_CREATE_TTBR(0, a_GranuleSz, a_fTbi, a_fEpd ), \ 511 PGM_MODE_CREATE_TTBR(1, a_GranuleSz, a_fTbi, a_fEpd ), \ 512 PGM_MODE_CREATE_TTBR(2, a_GranuleSz, a_fTbi, a_fEpd ), \ 513 PGM_MODE_CREATE_TTBR(3, a_GranuleSz, a_fTbi, a_fEpd ) /* Invalid */ 514 515 #define PGM_MODE_CREATE_GRANULE_SZ(a_fTbi, a_fEpd) \ 516 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_INVALID, a_fTbi, a_fEpd), \ 517 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_16KB, a_fTbi, a_fEpd), \ 518 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_4KB, a_fTbi, a_fEpd), \ 519 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_64KB, a_fTbi, a_fEpd) 520 521 #define PGM_MODE_CREATE_TBI(a_fEpd) \ 522 PGM_MODE_CREATE_GRANULE_SZ(false, a_fEpd), \ 523 PGM_MODE_CREATE_GRANULE_SZ(true, a_fEpd) 524 525 /* Recursive expansion for the win, this will blow up to 128 entries covering all possible modes. */ 526 PGM_MODE_CREATE_TBI(false), 527 PGM_MODE_CREATE_TBI(true) 528 529 #undef PGM_MODE_CREATE_TBI 530 #undef PGM_MODE_CREATE_GRANULE_SZ 531 #undef PGM_MODE_CREATE_LOOKUP_LVL 532 #undef PGM_MODE_CREATE_TTBR 533 #undef PGM_MODE_CREATE_EX 534 }; 535 536 537 template<uint8_t a_offTsz, uint8_t a_offTg, uint8_t a_offTbi, uint8_t a_offEpd, bool a_fTtbr0> 538 DECLINLINE(uintptr_t) pgmR3DeduceTypeFromTcr(uint64_t u64RegSctlr, uint64_t u64RegTcr, uint64_t *pfInitialLookupMask) 539 { 540 uintptr_t idxNewGst = 0; 541 542 /* 543 * MMU enabled at all? 544 * Technically this is incorrect as we use ARMV8_SCTLR_EL1_M regardless of the EL but the bit is the same 545 * for all exception levels. 546 */ 547 if (u64RegSctlr & ARMV8_SCTLR_EL1_M) 548 { 549 uint64_t const u64Tsz = (u64RegTcr >> a_offTsz) & 0x1f; 550 uint64_t const u64Tg = (u64RegTcr >> a_offTg) & 0x3; 551 bool const fTbi = RT_BOOL(u64RegTcr & RT_BIT_64(a_offTbi)); 552 bool const fEpd = RT_BOOL(u64RegTcr & RT_BIT_64(a_offEpd)); 553 554 /* 555 * From: https://github.com/codingbelief/arm-architecture-reference-manual-for-armv8-a/blob/master/en/chapter_d4/d42_2_controlling_address_translation_stages.md 556 * For all translation stages 557 * The maximum TxSZ value is 39. If TxSZ is programmed to a value larger than 39 then it is IMPLEMENTATION DEFINED whether: 558 * - The implementation behaves as if the field is programmed to 39 for all purposes other than reading back the value of the field. 559 * - Any use of the TxSZ value generates a Level 0 Translation fault for the stage of translation at which TxSZ is used. 560 * 561 * For a stage 1 translation 562 * The minimum TxSZ value is 16. If TxSZ is programmed to a value smaller than 16 then it is IMPLEMENTATION DEFINED whether: 563 * - The implementation behaves as if the field were programmed to 16 for all purposes other than reading back the value of the field. 564 * - Any use of the TxSZ value generates a stage 1 Level 0 Translation fault. 565 * 566 * We currently choose the former for both. 567 */ 568 uint64_t uLookupLvl; 569 if (/*u64Tsz >= 16 &&*/ u64Tsz <= 24) 570 { 571 uLookupLvl = 0; 572 if (u64Tsz >= 16) 573 *pfInitialLookupMask = RT_BIT_64(24 - u64Tsz + 1) - 1; 574 else 575 *pfInitialLookupMask = RT_BIT_64(24 - 16 + 1) - 1; 576 } 577 else if (u64Tsz >= 25 && u64Tsz <= 33) 578 { 579 uLookupLvl = 1; 580 *pfInitialLookupMask = RT_BIT_64(33 - u64Tsz + 1) - 1; 581 } 582 else /*if (u64Tsz >= 34 && u64Tsz <= 39)*/ 583 { 584 uLookupLvl = 2; 585 if (u64Tsz <= 39) 586 *pfInitialLookupMask = RT_BIT_64(39 - u64Tsz + 1) - 1; 587 else 588 *pfInitialLookupMask = RT_BIT_64(39 - 39 + 1) - 1; 589 } 590 591 /* Build the index into the PGM mode callback table for the given config. */ 592 idxNewGst = PGM_MODE_TYPE_CREATE(a_fTtbr0, uLookupLvl, u64Tg, fTbi, fEpd); 593 } 594 else 595 idxNewGst = PGM_TYPE_NONE; 596 597 return idxNewGst; 598 }
Note:
See TracChangeset
for help on using the changeset viewer.