VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/PGMAllGst-armv8.cpp.h@ 108910

Last change on this file since 108910 was 108910, checked in by vboxsync, 4 weeks ago

VMM/testcase/tstPGMAllGst-armv8.cpp: Add support for testing QueryPageFast callbacks, bugref:10388

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 22.9 KB
Line 
1/* $Id: PGMAllGst-armv8.cpp.h 108910 2025-04-09 09:08:53Z vboxsync $ */
2/** @file
3 * PGM - Page Manager, ARMv8 Guest Paging Template - All context code.
4 */
5
6/*
7 * Copyright (C) 2023-2024 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*
30 *
31 * Mode criteria:
32 * - MMU enabled/disabled.
33 * - TCR_EL1.TG0 (granule size for TTBR0_EL1).
34 * - TCR_EL1.TG1 (granule size for TTBR1_EL1).
35 * - TCR_EL1.T0SZ (address space size for TTBR0_EL1).
36 * - TCR_EL1.T1SZ (address space size for TTBR1_EL1).
37 * - TCR_EL1.IPS (intermediate physical address size).
38 * - TCR_EL1.TBI0 (ignore top address byte for TTBR0_EL1).
39 * - TCR_EL1.TBI1 (ignore top address byte for TTBR1_EL1).
40 * - TCR_EL1.HPD0 (hierarchical permisson disables for TTBR0_EL1).
41 * - TCR_EL1.HPD1 (hierarchical permisson disables for TTBR1_EL1).
42 * - More ?
43 *
44 * Other relevant modifiers:
45 * - TCR_EL1.HA - hardware access bit.
46 * - TCR_EL1.HD - hardware dirty bit.
47 * - ++
48 *
49 * Each privilege EL (1,2,3) has their own TCR_ELx and TTBR[01]_ELx registers,
50 * so they should all have their own separate modes. To make it simpler,
51 * why not do a separate mode for TTBR0_ELx and one for TTBR1_ELx. Top-level
52 * functions determins which of the roots to use and call template (C++)
53 * functions that takes it from there. Using the preprocessor function template
54 * approach is _not_ desirable here.
55 *
56 */
57
58
59/*
60 * Common helpers.
61 * Common helpers.
62 * Common helpers.
63 */
64
65DECLINLINE(int) pgmGstWalkReturnNotPresent(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel)
66{
67 NOREF(pVCpu);
68 pWalk->fNotPresent = true;
69 pWalk->uLevel = uLevel;
70 pWalk->fFailed = PGM_WALKFAIL_NOT_PRESENT
71 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
72 return VERR_PAGE_TABLE_NOT_PRESENT;
73}
74
75DECLINLINE(int) pgmGstWalkReturnBadPhysAddr(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel, int rc)
76{
77 AssertMsg(rc == VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS, ("%Rrc\n", rc)); NOREF(rc); NOREF(pVCpu);
78 pWalk->fBadPhysAddr = true;
79 pWalk->uLevel = uLevel;
80 pWalk->fFailed = PGM_WALKFAIL_BAD_PHYSICAL_ADDRESS
81 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
82 return VERR_PAGE_TABLE_NOT_PRESENT;
83}
84
85
86DECLINLINE(int) pgmGstWalkReturnRsvdError(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel)
87{
88 NOREF(pVCpu);
89 pWalk->fRsvdError = true;
90 pWalk->uLevel = uLevel;
91 pWalk->fFailed = PGM_WALKFAIL_RESERVED_BITS
92 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
93 return VERR_PAGE_TABLE_NOT_PRESENT;
94}
95
96
97DECLINLINE(int) pgmGstWalkFastReturnNotPresent(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel)
98{
99 RT_NOREF(pVCpu);
100 pWalk->fFailed = PGM_WALKFAIL_NOT_PRESENT | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
101 return VERR_PAGE_TABLE_NOT_PRESENT;
102}
103
104
105DECLINLINE(int) pgmGstWalkFastReturnBadPhysAddr(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel, int rc)
106{
107 AssertMsg(rc == VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS, ("%Rrc\n", rc)); RT_NOREF(pVCpu, rc);
108 pWalk->fFailed = PGM_WALKFAIL_BAD_PHYSICAL_ADDRESS | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
109 return VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS;
110}
111
112
113DECLINLINE(int) pgmGstWalkFastReturnRsvdError(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel)
114{
115 RT_NOREF(pVCpu);
116 pWalk->fFailed = PGM_WALKFAIL_RESERVED_BITS | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
117 return VERR_RESERVED_PAGE_TABLE_BITS;
118}
119
120
121/*
122 * Special no paging variant.
123 * Special no paging variant.
124 * Special no paging variant.
125 */
126
127static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneGetPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk)
128{
129 RT_NOREF(pVCpu);
130
131 RT_ZERO(*pWalk);
132 pWalk->fSucceeded = true;
133 pWalk->GCPtr = GCPtr;
134 pWalk->GCPhys = GCPtr & ~(RTGCPHYS)GUEST_PAGE_OFFSET_MASK;
135 pWalk->fEffective = PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_PGCS_MASK
136 | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_UX_MASK | PGM_PTATTRS_UGCS_MASK;
137 return VINF_SUCCESS;
138}
139
140
141static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneQueryPageFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk)
142{
143 RT_NOREF(pVCpu, fFlags);
144
145 pWalk->GCPtr = GCPtr;
146 pWalk->GCPhys = GCPtr;
147 pWalk->GCPhysNested = 0;
148 pWalk->fInfo = PGM_WALKINFO_SUCCEEDED;
149 pWalk->fFailed = PGM_WALKFAIL_SUCCESS;
150 pWalk->fEffective = PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_PGCS_MASK
151 | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_UX_MASK | PGM_PTATTRS_UGCS_MASK;
152 return VINF_SUCCESS;
153}
154
155
156static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneModifyPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, size_t cb, uint64_t fFlags, uint64_t fMask)
157{
158 /* Ignore. */
159 RT_NOREF(pVCpu, GCPtr, cb, fFlags, fMask);
160 return VINF_SUCCESS;
161}
162
163
164static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneWalk)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
165{
166 RT_NOREF(pVCpu, GCPtr, pWalk);
167 pGstWalk->enmType = PGMPTWALKGSTTYPE_INVALID;
168 return VERR_PGM_NOT_USED_IN_MODE;
169}
170
171
172static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneEnter)(PVMCPUCC pVCpu)
173{
174 /* Nothing to do. */
175 RT_NOREF(pVCpu);
176 return VINF_SUCCESS;
177}
178
179
180static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneExit)(PVMCPUCC pVCpu)
181{
182 /* Nothing to do. */
183 RT_NOREF(pVCpu);
184 return VINF_SUCCESS;
185}
186
187
188/*
189 * Template variants for actual paging modes.
190 * Template variants for actual paging modes.
191 * Template variants for actual paging modes.
192 */
193template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
194DECL_FORCE_INLINE(int) pgmGstWalkWorker(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
195{
196 RT_NOREF(pGstWalk); /** @todo */
197
198 /*
199 * Initial lookup level 3 is not valid and only instantiated because we need two
200 * bits for the lookup level when creating the index and have to fill the slots.
201 */
202 if RT_CONSTEXPR_IF(a_InitialLookupLvl == 3)
203 {
204 AssertReleaseFailed();
205 return VERR_PGM_MODE_IPE;
206 }
207 else
208 {
209 uint8_t const bEl = CPUMGetGuestEL(pVCpu);
210
211 uint64_t fLookupMask;
212 if RT_CONSTEXPR_IF(a_fTtbr0 == true)
213 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr0[bEl];
214 else
215 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr1[bEl];
216
217 RTGCPHYS GCPhysPt = CPUMGetEffectiveTtbr(pVCpu, GCPtr);
218 uint64_t *pu64Pt = NULL;
219 uint64_t uPt;
220 int rc;
221 if RT_CONSTEXPR_IF(a_InitialLookupLvl == 0)
222 {
223 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt);
224 if (RT_SUCCESS(rc)) { /* probable */ }
225 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 0, rc);
226
227 uPt = pu64Pt[(GCPtr >> 39) & fLookupMask];
228 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ }
229 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 0);
230
231 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ }
232 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, 0); /** @todo Only supported if TCR_EL1.DS is set. */
233
234 /* All nine bits from now on. */
235 fLookupMask = RT_BIT_64(9) - 1;
236 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
237 }
238
239 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 1)
240 {
241 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt);
242 if (RT_SUCCESS(rc)) { /* probable */ }
243 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 1, rc);
244
245 uPt = pu64Pt[(GCPtr >> 30) & fLookupMask];
246 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ }
247 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 1);
248
249 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ }
250 else
251 {
252 /* Block descriptor (1G page). */
253 pWalk->GCPtr = GCPtr;
254 pWalk->fSucceeded = true;
255 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xffffc0000000)) | (GCPtr & (RTGCPTR)(_1G - 1));
256 pWalk->fGigantPage = true;
257 return VINF_SUCCESS;
258 }
259
260 /* All nine bits from now on. */
261 fLookupMask = RT_BIT_64(9) - 1;
262 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
263 }
264
265 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 2)
266 {
267 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt);
268 if (RT_SUCCESS(rc)) { /* probable */ }
269 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 2, rc);
270
271 uPt = pu64Pt[(GCPtr >> 21) & fLookupMask];
272 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ }
273 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 2);
274
275 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ }
276 else
277 {
278 /* Block descriptor (2M page). */
279 pWalk->GCPtr = GCPtr;
280 pWalk->fSucceeded = true;
281 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xffffffe00000)) | (GCPtr & (RTGCPTR)(_2M - 1));
282 pWalk->fBigPage = true;
283 return VINF_SUCCESS;
284 }
285
286 /* All nine bits from now on. */
287 fLookupMask = RT_BIT_64(9) - 1;
288 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
289 }
290
291 AssertCompile(a_InitialLookupLvl <= 3);
292
293 /* Next level. */
294 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt);
295 if (RT_SUCCESS(rc)) { /* probable */ }
296 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 3, rc);
297
298 uPt = pu64Pt[(GCPtr & UINT64_C(0x1ff000)) >> 12];
299 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ }
300 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 3);
301
302 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ }
303 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, 3); /** No block descriptors. */
304
305 pWalk->GCPtr = GCPtr;
306 pWalk->fSucceeded = true;
307 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000)) | (GCPtr & (RTGCPTR)(_4K - 1));
308 return VINF_SUCCESS;
309 }
310}
311
312
313template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
314static PGM_CTX_DECL(int) PGM_CTX(pgm,GstGetPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk)
315{
316 return pgmGstWalkWorker<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>(pVCpu, GCPtr, pWalk, NULL /*pGstWalk*/);
317}
318
319
320template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
321static PGM_CTX_DECL(int) PGM_CTX(pgm,GstQueryPageFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk)
322{
323 RT_NOREF(fFlags); /** @todo */
324
325 /*
326 * Initial lookup level 3 is not valid and only instantiated because we need two
327 * bits for the lookup level when creating the index and have to fill the slots.
328 */
329 if RT_CONSTEXPR_IF(a_InitialLookupLvl == 3)
330 {
331 AssertReleaseFailed();
332 return VERR_PGM_MODE_IPE;
333 }
334 else
335 {
336 uint8_t const bEl = CPUMGetGuestEL(pVCpu);
337
338 uint64_t fLookupMask;
339 if RT_CONSTEXPR_IF(a_fTtbr0 == true)
340 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr0[bEl];
341 else
342 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr1[bEl];
343
344 RTGCPHYS GCPhysPt = CPUMGetEffectiveTtbr(pVCpu, GCPtr);
345 uint64_t *pu64Pt = NULL;
346 uint64_t uPt;
347 int rc;
348 if RT_CONSTEXPR_IF(a_InitialLookupLvl == 0)
349 {
350 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&pu64Pt);
351 if (RT_SUCCESS(rc)) { /* probable */ }
352 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, 0, rc);
353
354 uPt = pu64Pt[(GCPtr >> 39) & fLookupMask];
355 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ }
356 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, 0);
357
358 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ }
359 else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, 0); /** @todo Only supported if TCR_EL1.DS is set. */
360
361 /* All nine bits from now on. */
362 fLookupMask = RT_BIT_64(9) - 1;
363 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
364 }
365
366 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 1)
367 {
368 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&pu64Pt);
369 if (RT_SUCCESS(rc)) { /* probable */ }
370 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, 1, rc);
371
372 uPt = pu64Pt[(GCPtr >> 30) & fLookupMask];
373 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ }
374 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, 1);
375
376 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ }
377 else
378 {
379 /* Block descriptor (1G page). */
380 pWalk->GCPtr = GCPtr;
381 pWalk->fInfo = PGM_WALKINFO_SUCCEEDED | PGM_WALKINFO_GIGANTIC_PAGE;
382 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xffffc0000000)) | (GCPtr & (RTGCPTR)(_1G - 1));
383 return VINF_SUCCESS;
384 }
385
386 /* All nine bits from now on. */
387 fLookupMask = RT_BIT_64(9) - 1;
388 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
389 }
390
391 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 2)
392 {
393 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&pu64Pt);
394 if (RT_SUCCESS(rc)) { /* probable */ }
395 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, 2, rc);
396
397 uPt = pu64Pt[(GCPtr >> 21) & fLookupMask];
398 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ }
399 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, 2);
400
401 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ }
402 else
403 {
404 /* Block descriptor (2M page). */
405 pWalk->GCPtr = GCPtr;
406 pWalk->fInfo = PGM_WALKINFO_SUCCEEDED | PGM_WALKINFO_BIG_PAGE;
407 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xffffffe00000)) | (GCPtr & (RTGCPTR)(_2M - 1));
408 return VINF_SUCCESS;
409 }
410
411 /* All nine bits from now on. */
412 fLookupMask = RT_BIT_64(9) - 1;
413 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
414 }
415
416 AssertCompile(a_InitialLookupLvl <= 3);
417
418 /* Next level. */
419 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&pu64Pt);
420 if (RT_SUCCESS(rc)) { /* probable */ }
421 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, 3, rc);
422
423 uPt = pu64Pt[(GCPtr & UINT64_C(0x1ff000)) >> 12];
424 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_VALID) { /* probable */ }
425 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, 3);
426
427 if (uPt & ARMV8_VMSA64_TBL_ENTRY_F_TBL_OR_PG) { /* probable */ }
428 else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, 3); /** No block descriptors. */
429
430 pWalk->GCPtr = GCPtr;
431 pWalk->fInfo = PGM_WALKINFO_SUCCEEDED;
432 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000)) | (GCPtr & (RTGCPTR)(_4K - 1));
433 return VINF_SUCCESS;
434 }
435}
436
437
438template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
439static PGM_CTX_DECL(int) PGM_CTX(pgm,GstModifyPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, size_t cb, uint64_t fFlags, uint64_t fMask)
440{
441 /** @todo Ignore for now. */
442 RT_NOREF(pVCpu, GCPtr, cb, fFlags, fMask);
443 return VINF_SUCCESS;
444}
445
446
447template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
448static PGM_CTX_DECL(int) PGM_CTX(pgm,GstWalk)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
449{
450 pGstWalk->enmType = PGMPTWALKGSTTYPE_INVALID;
451 return pgmGstWalkWorker<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>(pVCpu, GCPtr, pWalk, pGstWalk);
452}
453
454
455template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
456static PGM_CTX_DECL(int) PGM_CTX(pgm,GstEnter)(PVMCPUCC pVCpu)
457{
458 /* Nothing to do for now. */
459 RT_NOREF(pVCpu);
460 return VINF_SUCCESS;
461}
462
463
464template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
465static PGM_CTX_DECL(int) PGM_CTX(pgm,GstExit)(PVMCPUCC pVCpu)
466{
467 /* Nothing to do for now. */
468 RT_NOREF(pVCpu);
469 return VINF_SUCCESS;
470}
471
472
473/**
474 * Guest mode data array.
475 */
476PGMMODEDATAGST const g_aPgmGuestModeData[PGM_GUEST_MODE_DATA_ARRAY_SIZE] =
477{
478 { UINT32_MAX, NULL, NULL, NULL, NULL, NULL }, /* 0 */
479 {
480 PGM_TYPE_NONE,
481 PGM_CTX(pgm,GstNoneGetPage),
482 PGM_CTX(pgm,GstNoneQueryPageFast),
483 PGM_CTX(pgm,GstNoneModifyPage),
484 PGM_CTX(pgm,GstNoneWalk),
485 PGM_CTX(pgm,GstNoneEnter),
486 PGM_CTX(pgm,GstNoneExit),
487 },
488
489#define PGM_MODE_TYPE_CREATE(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd) \
490 (2 + ( (a_fEpd ? RT_BIT_32(6) : 0) \
491 | (a_fTbi ? RT_BIT_32(5) : 0) \
492 | (a_GranuleSz << 3) \
493 | (a_InitialLookupLvl << 1) \
494 | (a_fTtbr0 ? RT_BIT_32(0) : 0) ))
495
496#define PGM_MODE_CREATE_EX(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd) \
497 { \
498 PGM_MODE_TYPE_CREATE(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd), \
499 PGM_CTX(pgm,GstGetPage)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \
500 PGM_CTX(pgm,GstQueryPageFast)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \
501 PGM_CTX(pgm,GstModifyPage)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \
502 PGM_CTX(pgm,GstWalk)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \
503 PGM_CTX(pgm,GstEnter)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \
504 PGM_CTX(pgm,GstExit)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd> \
505 }
506
507#define PGM_MODE_CREATE_TTBR(a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd) \
508 PGM_MODE_CREATE_EX(false, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd), \
509 PGM_MODE_CREATE_EX(true, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd)
510
511#define PGM_MODE_CREATE_LOOKUP_LVL(a_GranuleSz, a_fTbi, a_fEpd) \
512 PGM_MODE_CREATE_TTBR(0, a_GranuleSz, a_fTbi, a_fEpd ), \
513 PGM_MODE_CREATE_TTBR(1, a_GranuleSz, a_fTbi, a_fEpd ), \
514 PGM_MODE_CREATE_TTBR(2, a_GranuleSz, a_fTbi, a_fEpd ), \
515 PGM_MODE_CREATE_TTBR(3, a_GranuleSz, a_fTbi, a_fEpd ) /* Invalid */
516
517#define PGM_MODE_CREATE_GRANULE_SZ(a_fTbi, a_fEpd) \
518 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_INVALID, a_fTbi, a_fEpd), \
519 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_16KB, a_fTbi, a_fEpd), \
520 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_4KB, a_fTbi, a_fEpd), \
521 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_64KB, a_fTbi, a_fEpd)
522
523#define PGM_MODE_CREATE_TBI(a_fEpd) \
524 PGM_MODE_CREATE_GRANULE_SZ(false, a_fEpd), \
525 PGM_MODE_CREATE_GRANULE_SZ(true, a_fEpd)
526
527 /* Recursive expansion for the win, this will blow up to 128 entries covering all possible modes. */
528 PGM_MODE_CREATE_TBI(false),
529 PGM_MODE_CREATE_TBI(true)
530
531#undef PGM_MODE_CREATE_TBI
532#undef PGM_MODE_CREATE_GRANULE_SZ
533#undef PGM_MODE_CREATE_LOOKUP_LVL
534#undef PGM_MODE_CREATE_TTBR
535#undef PGM_MODE_CREATE_EX
536};
537
538
539template<uint8_t a_offTsz, uint8_t a_offTg, uint8_t a_offTbi, uint8_t a_offEpd, bool a_fTtbr0>
540DECLINLINE(uintptr_t) pgmR3DeduceTypeFromTcr(uint64_t u64RegSctlr, uint64_t u64RegTcr, uint64_t *pfInitialLookupMask)
541{
542 uintptr_t idxNewGst = 0;
543
544 /*
545 * MMU enabled at all?
546 * Technically this is incorrect as we use ARMV8_SCTLR_EL1_M regardless of the EL but the bit is the same
547 * for all exception levels.
548 */
549 if (u64RegSctlr & ARMV8_SCTLR_EL1_M)
550 {
551 uint64_t const u64Tsz = (u64RegTcr >> a_offTsz) & 0x1f;
552 uint64_t const u64Tg = (u64RegTcr >> a_offTg) & 0x3;
553 bool const fTbi = RT_BOOL(u64RegTcr & RT_BIT_64(a_offTbi));
554 bool const fEpd = RT_BOOL(u64RegTcr & RT_BIT_64(a_offEpd));
555
556 /*
557 * From: https://github.com/codingbelief/arm-architecture-reference-manual-for-armv8-a/blob/master/en/chapter_d4/d42_2_controlling_address_translation_stages.md
558 * For all translation stages
559 * The maximum TxSZ value is 39. If TxSZ is programmed to a value larger than 39 then it is IMPLEMENTATION DEFINED whether:
560 * - The implementation behaves as if the field is programmed to 39 for all purposes other than reading back the value of the field.
561 * - Any use of the TxSZ value generates a Level 0 Translation fault for the stage of translation at which TxSZ is used.
562 *
563 * For a stage 1 translation
564 * The minimum TxSZ value is 16. If TxSZ is programmed to a value smaller than 16 then it is IMPLEMENTATION DEFINED whether:
565 * - The implementation behaves as if the field were programmed to 16 for all purposes other than reading back the value of the field.
566 * - Any use of the TxSZ value generates a stage 1 Level 0 Translation fault.
567 *
568 * We currently choose the former for both.
569 */
570 uint64_t uLookupLvl;
571 if (/*u64Tsz >= 16 &&*/ u64Tsz <= 24)
572 {
573 uLookupLvl = 0;
574 if (u64Tsz >= 16)
575 *pfInitialLookupMask = RT_BIT_64(24 - u64Tsz + 1) - 1;
576 else
577 *pfInitialLookupMask = RT_BIT_64(24 - 16 + 1) - 1;
578 }
579 else if (u64Tsz >= 25 && u64Tsz <= 33)
580 {
581 uLookupLvl = 1;
582 *pfInitialLookupMask = RT_BIT_64(33 - u64Tsz + 1) - 1;
583 }
584 else /*if (u64Tsz >= 34 && u64Tsz <= 39)*/
585 {
586 uLookupLvl = 2;
587 if (u64Tsz <= 39)
588 *pfInitialLookupMask = RT_BIT_64(39 - u64Tsz + 1) - 1;
589 else
590 *pfInitialLookupMask = RT_BIT_64(39 - 39 + 1) - 1;
591 }
592
593 /* Build the index into the PGM mode callback table for the given config. */
594 idxNewGst = PGM_MODE_TYPE_CREATE(a_fTtbr0, uLookupLvl, u64Tg, fTbi, fEpd);
595 }
596 else
597 idxNewGst = PGM_TYPE_NONE;
598
599 return idxNewGst;
600}
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette