VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/PGMAllGst-armv8.cpp.h@ 108926

Last change on this file since 108926 was 108926, checked in by vboxsync, 4 weeks ago

VMM/testcase/tstPGMAllGst-armv8.cpp: Unify the testing config for the GetPage() and QueryPageFast() variants, bugref:10388

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 39.8 KB
Line 
1/* $Id: PGMAllGst-armv8.cpp.h 108926 2025-04-10 09:52:39Z vboxsync $ */
2/** @file
3 * PGM - Page Manager, ARMv8 Guest Paging Template - All context code.
4 */
5
6/*
7 * Copyright (C) 2023-2024 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*
30 *
31 * Mode criteria:
32 * - MMU enabled/disabled.
33 * - TCR_EL1.TG0 (granule size for TTBR0_EL1).
34 * - TCR_EL1.TG1 (granule size for TTBR1_EL1).
35 * - TCR_EL1.T0SZ (address space size for TTBR0_EL1).
36 * - TCR_EL1.T1SZ (address space size for TTBR1_EL1).
37 * - TCR_EL1.IPS (intermediate physical address size).
38 * - TCR_EL1.TBI0 (ignore top address byte for TTBR0_EL1).
39 * - TCR_EL1.TBI1 (ignore top address byte for TTBR1_EL1).
40 * - TCR_EL1.HPD0 (hierarchical permisson disables for TTBR0_EL1).
41 * - TCR_EL1.HPD1 (hierarchical permisson disables for TTBR1_EL1).
42 * - More ?
43 *
44 * Other relevant modifiers:
45 * - TCR_EL1.HA - hardware access bit.
46 * - TCR_EL1.HD - hardware dirty bit.
47 * - ++
48 *
49 * Each privilege EL (1,2,3) has their own TCR_ELx and TTBR[01]_ELx registers,
50 * so they should all have their own separate modes. To make it simpler,
51 * why not do a separate mode for TTBR0_ELx and one for TTBR1_ELx. Top-level
52 * functions determins which of the roots to use and call template (C++)
53 * functions that takes it from there. Using the preprocessor function template
54 * approach is _not_ desirable here.
55 *
56 */
57
58
59/*
60 * Common helpers.
61 * Common helpers.
62 * Common helpers.
63 */
64
65DECLINLINE(int) pgmGstWalkReturnNotPresent(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel)
66{
67 NOREF(pVCpu);
68 pWalk->fNotPresent = true;
69 pWalk->uLevel = uLevel;
70 pWalk->fFailed = PGM_WALKFAIL_NOT_PRESENT
71 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
72 return VERR_PAGE_TABLE_NOT_PRESENT;
73}
74
75DECLINLINE(int) pgmGstWalkReturnBadPhysAddr(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel, int rc)
76{
77 AssertMsg(rc == VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS, ("%Rrc\n", rc)); NOREF(rc); NOREF(pVCpu);
78 pWalk->fBadPhysAddr = true;
79 pWalk->uLevel = uLevel;
80 pWalk->fFailed = PGM_WALKFAIL_BAD_PHYSICAL_ADDRESS
81 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
82 return VERR_PAGE_TABLE_NOT_PRESENT;
83}
84
85
86DECLINLINE(int) pgmGstWalkReturnRsvdError(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel)
87{
88 NOREF(pVCpu);
89 pWalk->fRsvdError = true;
90 pWalk->uLevel = uLevel;
91 pWalk->fFailed = PGM_WALKFAIL_RESERVED_BITS
92 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
93 return VERR_PAGE_TABLE_NOT_PRESENT;
94}
95
96
97DECLINLINE(int) pgmGstWalkFastReturnNotPresent(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel)
98{
99 RT_NOREF(pVCpu);
100 pWalk->fFailed = PGM_WALKFAIL_NOT_PRESENT | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
101 return VERR_PAGE_TABLE_NOT_PRESENT;
102}
103
104
105DECLINLINE(int) pgmGstWalkFastReturnBadPhysAddr(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel, int rc)
106{
107 AssertMsg(rc == VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS, ("%Rrc\n", rc)); RT_NOREF(pVCpu, rc);
108 pWalk->fFailed = PGM_WALKFAIL_BAD_PHYSICAL_ADDRESS | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
109 return VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS;
110}
111
112
113DECLINLINE(int) pgmGstWalkFastReturnRsvdError(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel)
114{
115 RT_NOREF(pVCpu);
116 pWalk->fFailed = PGM_WALKFAIL_RESERVED_BITS | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
117 return VERR_RESERVED_PAGE_TABLE_BITS;
118}
119
120
121/*
122 * Special no paging variant.
123 * Special no paging variant.
124 * Special no paging variant.
125 */
126
127static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneGetPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk)
128{
129 RT_NOREF(pVCpu);
130
131 RT_ZERO(*pWalk);
132 pWalk->fSucceeded = true;
133 pWalk->GCPtr = GCPtr;
134 pWalk->GCPhys = GCPtr;
135 pWalk->fEffective = PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_PGCS_MASK
136 | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_UX_MASK | PGM_PTATTRS_UGCS_MASK;
137 return VINF_SUCCESS;
138}
139
140
141static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneQueryPageFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk)
142{
143 RT_NOREF(pVCpu, fFlags);
144
145 pWalk->GCPtr = GCPtr;
146 pWalk->GCPhys = GCPtr;
147 pWalk->GCPhysNested = 0;
148 pWalk->fInfo = PGM_WALKINFO_SUCCEEDED;
149 pWalk->fFailed = PGM_WALKFAIL_SUCCESS;
150 pWalk->fEffective = PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_PGCS_MASK
151 | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_UX_MASK | PGM_PTATTRS_UGCS_MASK;
152 return VINF_SUCCESS;
153}
154
155
156static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneModifyPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, size_t cb, uint64_t fFlags, uint64_t fMask)
157{
158 /* Ignore. */
159 RT_NOREF(pVCpu, GCPtr, cb, fFlags, fMask);
160 return VINF_SUCCESS;
161}
162
163
164static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneWalk)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
165{
166 RT_NOREF(pVCpu, GCPtr, pWalk);
167 pGstWalk->enmType = PGMPTWALKGSTTYPE_INVALID;
168 return VERR_PGM_NOT_USED_IN_MODE;
169}
170
171
172static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneEnter)(PVMCPUCC pVCpu)
173{
174 /* Nothing to do. */
175 RT_NOREF(pVCpu);
176 return VINF_SUCCESS;
177}
178
179
180static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneExit)(PVMCPUCC pVCpu)
181{
182 /* Nothing to do. */
183 RT_NOREF(pVCpu);
184 return VINF_SUCCESS;
185}
186
187
188/*
189 * Template variants for actual paging modes.
190 * Template variants for actual paging modes.
191 * Template variants for actual paging modes.
192 */
193
194/*
195 * Descriptor flags to page table attribute flags mapping.
196 */
197static const PGMPTATTRS s_aEffective[] =
198{
199 /* UXN PXN AP[2] AP[1] */
200 /* 0 0 0 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_UX_MASK,
201 /* 0 0 0 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_UX_MASK,
202 /* 0 0 1 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_UX_MASK,
203 /* 0 0 1 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_UX_MASK,
204
205 /* 0 1 0 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UX_MASK,
206 /* 0 1 0 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_UX_MASK,
207 /* 0 1 1 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UX_MASK,
208 /* 0 1 1 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UX_MASK,
209
210 /* 1 0 0 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK,
211 /* 1 0 0 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_PX_MASK,
212 /* 1 0 1 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PX_MASK,
213 /* 1 0 1 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_PX_MASK,
214
215 /* 1 1 0 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK,
216 /* 1 1 0 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK ,
217 /* 1 1 1 0 */ PGM_PTATTRS_PR_MASK,
218 /* 1 1 1 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UR_MASK,
219};
220
221
222DECL_FORCE_INLINE(int) pgmGstWalkWorkerSetEffective(PPGMPTWALK pWalk, ARMV8VMSA64DESC Desc)
223{
224 uint32_t const idxPerm = RT_BF_GET(Desc, ARMV8_VMSA64_DESC_PG_OR_BLOCK_LATTR_AP)
225 | ((Desc & ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_PXN) >> ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_PXN_BIT) << 2
226 | ((Desc & ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_UXN) >> ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_UXN_BIT) << 3;
227
228 pWalk->fEffective = s_aEffective[idxPerm];
229 return VINF_SUCCESS;
230}
231
232
233template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
234DECL_FORCE_INLINE(int) pgmGstWalkWorker(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
235{
236 RT_NOREF(pGstWalk); /** @todo */
237
238 /*
239 * Initial lookup level 3 is not valid and only instantiated because we need two
240 * bits for the lookup level when creating the index and have to fill the slots.
241 */
242 if RT_CONSTEXPR_IF(a_InitialLookupLvl == 3)
243 {
244 AssertReleaseFailed();
245 return VERR_PGM_MODE_IPE;
246 }
247 else
248 {
249 uint8_t const bEl = CPUMGetGuestEL(pVCpu);
250
251 uint64_t fLookupMask;
252 if RT_CONSTEXPR_IF(a_fTtbr0 == true)
253 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr0[bEl];
254 else
255 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr1[bEl];
256
257 RTGCPHYS GCPhysPt = CPUMGetEffectiveTtbr(pVCpu, GCPtr);
258 uint64_t *pu64Pt = NULL;
259 uint64_t uPt;
260 int rc;
261 if RT_CONSTEXPR_IF(a_InitialLookupLvl == 0)
262 {
263 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt);
264 if (RT_SUCCESS(rc)) { /* probable */ }
265 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 0, rc);
266
267 uPt = pu64Pt[(GCPtr >> 39) & fLookupMask];
268 if (uPt & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
269 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 0);
270
271 if (uPt & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
272 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, 0); /** @todo Only supported if TCR_EL1.DS is set. */
273
274 /* All nine bits from now on. */
275 fLookupMask = RT_BIT_64(9) - 1;
276 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
277 }
278
279 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 1)
280 {
281 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt);
282 if (RT_SUCCESS(rc)) { /* probable */ }
283 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 1, rc);
284
285 uPt = pu64Pt[(GCPtr >> 30) & fLookupMask];
286 if (uPt & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
287 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 1);
288
289 if (uPt & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
290 else
291 {
292 /* Block descriptor (1G page). */
293 pWalk->GCPtr = GCPtr;
294 pWalk->fSucceeded = true;
295 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xffffc0000000)) | (GCPtr & (RTGCPTR)(_1G - 1));
296 pWalk->fGigantPage = true;
297 return pgmGstWalkWorkerSetEffective(pWalk, uPt);
298 }
299
300 /* All nine bits from now on. */
301 fLookupMask = RT_BIT_64(9) - 1;
302 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
303 }
304
305 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 2)
306 {
307 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt);
308 if (RT_SUCCESS(rc)) { /* probable */ }
309 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 2, rc);
310
311 uPt = pu64Pt[(GCPtr >> 21) & fLookupMask];
312 if (uPt & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
313 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 2);
314
315 if (uPt & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
316 else
317 {
318 /* Block descriptor (2M page). */
319 pWalk->GCPtr = GCPtr;
320 pWalk->fSucceeded = true;
321 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xffffffe00000)) | (GCPtr & (RTGCPTR)(_2M - 1));
322 pWalk->fBigPage = true;
323 return pgmGstWalkWorkerSetEffective(pWalk, uPt);
324 }
325
326 /* All nine bits from now on. */
327 fLookupMask = RT_BIT_64(9) - 1;
328 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
329 }
330
331 AssertCompile(a_InitialLookupLvl <= 3);
332
333 /* Next level. */
334 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt);
335 if (RT_SUCCESS(rc)) { /* probable */ }
336 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 3, rc);
337
338 uPt = pu64Pt[(GCPtr & UINT64_C(0x1ff000)) >> 12];
339 if (uPt & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
340 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 3);
341
342 if (uPt & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
343 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, 3); /** No block descriptors. */
344
345 pWalk->GCPtr = GCPtr;
346 pWalk->fSucceeded = true;
347 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000)) | (GCPtr & (RTGCPTR)(_4K - 1));
348 return pgmGstWalkWorkerSetEffective(pWalk, uPt);
349 }
350}
351
352
353template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
354static PGM_CTX_DECL(int) PGM_CTX(pgm,GstGetPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk)
355{
356 return pgmGstWalkWorker<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>(pVCpu, GCPtr, pWalk, NULL /*pGstWalk*/);
357}
358
359
360static const PGMWALKFAIL g_aPermPrivRead[] =
361{
362 /* UXN PXN AP[2] AP[1] */
363 /* 0 0 0 0 */ PGM_WALKFAIL_SUCCESS,
364 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
365 /* 0 0 1 0 */ PGM_WALKFAIL_SUCCESS,
366 /* 0 0 1 1 */ PGM_WALKFAIL_SUCCESS,
367 /* 0 1 0 0 */ PGM_WALKFAIL_SUCCESS,
368 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
369 /* 0 1 1 0 */ PGM_WALKFAIL_SUCCESS,
370 /* 0 1 1 1 */ PGM_WALKFAIL_SUCCESS,
371 /* 1 0 0 0 */ PGM_WALKFAIL_SUCCESS,
372 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
373 /* 1 0 1 0 */ PGM_WALKFAIL_SUCCESS,
374 /* 1 0 1 1 */ PGM_WALKFAIL_SUCCESS,
375 /* 1 1 0 0 */ PGM_WALKFAIL_SUCCESS,
376 /* 1 1 0 1 */ PGM_WALKFAIL_SUCCESS,
377 /* 1 1 1 0 */ PGM_WALKFAIL_SUCCESS,
378 /* 1 1 1 1 */ PGM_WALKFAIL_SUCCESS
379};
380
381
382static const PGMWALKFAIL g_aPermPrivWrite[] =
383{
384 /* UXN PXN AP[2] AP[1] */
385 /* 0 0 0 0 */ PGM_WALKFAIL_SUCCESS,
386 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
387 /* 0 0 1 0 */ PGM_WALKFAIL_NOT_WRITABLE,
388 /* 0 0 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
389 /* 0 1 0 0 */ PGM_WALKFAIL_SUCCESS,
390 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
391 /* 0 1 1 0 */ PGM_WALKFAIL_NOT_WRITABLE,
392 /* 0 1 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
393 /* 1 0 0 0 */ PGM_WALKFAIL_SUCCESS,
394 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
395 /* 1 0 1 0 */ PGM_WALKFAIL_NOT_WRITABLE,
396 /* 1 0 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
397 /* 1 1 0 0 */ PGM_WALKFAIL_SUCCESS,
398 /* 1 1 0 1 */ PGM_WALKFAIL_SUCCESS,
399 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_WRITABLE,
400 /* 1 1 1 1 */ PGM_WALKFAIL_NOT_WRITABLE
401};
402
403
404static const PGMWALKFAIL g_aPermPrivExec[] =
405{
406 /* UXN PXN AP[2] AP[1] */
407 /* 0 0 0 0 */ PGM_WALKFAIL_SUCCESS,
408 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
409 /* 0 0 1 0 */ PGM_WALKFAIL_SUCCESS,
410 /* 0 0 1 1 */ PGM_WALKFAIL_SUCCESS,
411 /* 0 1 0 0 */ PGM_WALKFAIL_NOT_EXECUTABLE,
412 /* 0 1 0 1 */ PGM_WALKFAIL_NOT_EXECUTABLE,
413 /* 0 1 1 0 */ PGM_WALKFAIL_NOT_EXECUTABLE,
414 /* 0 1 1 1 */ PGM_WALKFAIL_NOT_EXECUTABLE,
415 /* 1 0 0 0 */ PGM_WALKFAIL_SUCCESS,
416 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
417 /* 1 0 1 0 */ PGM_WALKFAIL_SUCCESS,
418 /* 1 0 1 1 */ PGM_WALKFAIL_SUCCESS,
419 /* 1 1 0 0 */ PGM_WALKFAIL_NOT_EXECUTABLE,
420 /* 1 1 0 1 */ PGM_WALKFAIL_NOT_EXECUTABLE,
421 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_EXECUTABLE,
422 /* 1 1 1 1 */ PGM_WALKFAIL_NOT_EXECUTABLE
423};
424
425
426static const PGMWALKFAIL g_aPermUnprivRead[] =
427{
428 /* UXN PXN AP[2] AP[1] */
429 /* 0 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
430 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
431 /* 0 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
432 /* 0 0 1 1 */ PGM_WALKFAIL_SUCCESS,
433 /* 0 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
434 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
435 /* 0 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
436 /* 0 1 1 1 */ PGM_WALKFAIL_SUCCESS,
437 /* 1 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
438 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
439 /* 1 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
440 /* 1 0 1 1 */ PGM_WALKFAIL_SUCCESS,
441 /* 1 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
442 /* 1 1 0 1 */ PGM_WALKFAIL_SUCCESS,
443 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
444 /* 1 1 1 1 */ PGM_WALKFAIL_SUCCESS
445};
446
447
448static const PGMWALKFAIL g_aPermUnprivWrite[] =
449{
450 /* UXN PXN AP[2] AP[1] */
451 /* 0 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
452 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
453 /* 0 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_WRITABLE,
454 /* 0 0 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
455 /* 0 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
456 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
457 /* 0 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_WRITABLE,
458 /* 0 1 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
459 /* 1 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
460 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
461 /* 1 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_WRITABLE,
462 /* 1 0 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
463 /* 1 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
464 /* 1 1 0 1 */ PGM_WALKFAIL_SUCCESS,
465 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_WRITABLE,
466 /* 1 1 1 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE
467};
468
469
470static const PGMWALKFAIL g_aPermUnprivExec[] =
471{
472 /* UXN PXN AP[2] AP[1] */
473 /* 0 0 0 0 */ PGM_WALKFAIL_SUCCESS,
474 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
475 /* 0 0 1 0 */ PGM_WALKFAIL_SUCCESS,
476 /* 0 0 1 1 */ PGM_WALKFAIL_SUCCESS,
477 /* 0 1 0 0 */ PGM_WALKFAIL_SUCCESS,
478 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
479 /* 0 1 1 0 */ PGM_WALKFAIL_SUCCESS,
480 /* 0 1 1 1 */ PGM_WALKFAIL_SUCCESS,
481 /* 1 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
482 /* 1 0 0 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
483 /* 1 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
484 /* 1 0 1 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
485 /* 1 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
486 /* 1 1 0 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
487 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
488 /* 1 1 1 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE
489};
490
491
492DECL_FORCE_INLINE(int) pgmGstQueryPageCheckPermissions(PPGMPTWALKFAST pWalk, ARMV8VMSA64DESC Desc, uint32_t fFlags, uint8_t uLvl)
493{
494 Assert(!(fFlags & ~PGMQPAGE_F_VALID_MASK));
495
496 static const uint32_t *s_apaPerm[] =
497 {
498 /* U X W R */
499 /* 0 0 0 0 */ &g_aPermPrivRead[0], /* Don't check or modify anything, this translates to a privileged read */
500 /* 0 0 0 1 */ &g_aPermPrivRead[0], /* Privileged read access */
501 /* 0 0 1 0 */ &g_aPermPrivWrite[0], /* Privileged write access */
502 /* 0 0 1 1 */ NULL, /* Invalid access flags */
503 /* 0 1 0 0 */ &g_aPermPrivExec[0], /* Privileged execute access */
504 /* 0 1 0 1 */ NULL, /* Invalid access flags */
505 /* 0 1 1 0 */ NULL, /* Invalid access flags */
506 /* 0 1 1 1 */ NULL, /* Invalid access flags */
507
508 /* 1 0 0 0 */ NULL, /* Invalid access flags */
509 /* 1 0 0 1 */ &g_aPermUnprivRead[0], /* Unprivileged read access */
510 /* 1 0 1 0 */ &g_aPermUnprivWrite[0], /* Unprivileged write access */
511 /* 1 0 1 1 */ NULL, /* Invalid access flags */
512 /* 1 1 0 0 */ &g_aPermUnprivExec[0], /* Unprivileged execute access */
513 /* 1 1 0 1 */ NULL, /* Invalid access flags */
514 /* 1 1 1 0 */ NULL, /* Invalid access flags */
515 /* 1 1 1 1 */ NULL, /* Invalid access flags */
516 };
517 Assert(fFlags < RT_ELEMENTS(s_apaPerm));
518
519 const uint32_t *paPerm = s_apaPerm[fFlags];
520 AssertReturn(paPerm, VERR_PGM_MODE_IPE);
521
522 uint32_t const idxPerm = RT_BF_GET(Desc, ARMV8_VMSA64_DESC_PG_OR_BLOCK_LATTR_AP)
523 | ((Desc & ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_PXN) >> ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_PXN_BIT) << 2
524 | ((Desc & ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_UXN) >> ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_UXN_BIT) << 3;
525
526 pWalk->fEffective = s_aEffective[idxPerm];
527
528 PGMWALKFAIL const fFailed = paPerm[idxPerm];
529 if (fFailed == PGM_WALKFAIL_SUCCESS)
530 {
531 pWalk->fInfo |= PGM_WALKINFO_SUCCEEDED;
532 return VINF_SUCCESS;
533 }
534
535 pWalk->fFailed = fFailed | (uLvl << PGM_WALKFAIL_LEVEL_SHIFT);
536 return VERR_ACCESS_DENIED;
537}
538
539
540template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
541static PGM_CTX_DECL(int) PGM_CTX(pgm,GstQueryPageFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk)
542{
543 /* This also applies to TG1 granule sizes, as both share the same encoding in TCR. */
544 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_INVALID == ARMV8_TCR_EL1_AARCH64_TG1_INVALID);
545 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_16KB == ARMV8_TCR_EL1_AARCH64_TG1_16KB);
546 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_4KB == ARMV8_TCR_EL1_AARCH64_TG1_4KB);
547 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_64KB == ARMV8_TCR_EL1_AARCH64_TG1_64KB);
548
549 if RT_CONSTEXPR_IF(a_GranuleSz != ARMV8_TCR_EL1_AARCH64_TG0_INVALID)
550 {
551 uint64_t fLookupMaskFull;
552 RTGCPTR offPageMask;
553
554 RTGCPTR offLvl1BlockMask;
555 RTGCPTR offLvl2BlockMask;
556
557 uint64_t fNextTableOrPageMask;
558 uint8_t cLvl0Shift;
559 uint8_t cLvl1Shift;
560 uint8_t cLvl2Shift;
561 uint8_t cLvl3Shift;
562
563 RTGCPHYS fGCPhysLvl1BlockBase;
564 RTGCPHYS fGCPhysLvl2BlockBase;
565
566 /** @todo This needs to go into defines in armv8.h if final. */
567 if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_4KB)
568 {
569 fLookupMaskFull = RT_BIT_64(9) - 1;
570 offLvl1BlockMask = (RTGCPTR)(_1G - 1);
571 offLvl2BlockMask = (RTGCPTR)(_2M - 1);
572 offPageMask = (RTGCPTR)(_4K - 1);
573 fNextTableOrPageMask = UINT64_C(0xfffffffff000);
574 cLvl0Shift = 39;
575 cLvl1Shift = 30;
576 cLvl2Shift = 21;
577 cLvl3Shift = 12;
578 fGCPhysLvl1BlockBase = UINT64_C(0xffffc0000000);
579 fGCPhysLvl2BlockBase = UINT64_C(0xffffffe00000);
580 }
581 else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_16KB)
582 {
583 fLookupMaskFull = RT_BIT_64(11) - 1;
584 offLvl1BlockMask = 0; /** @todo TCR_EL1.DS support. */
585 offLvl2BlockMask = (RTGCPTR)(_32M - 1);
586 offPageMask = (RTGCPTR)(_16K - 1);
587 fNextTableOrPageMask = UINT64_C(0xffffffffc000);
588 cLvl0Shift = 47;
589 cLvl1Shift = 36;
590 cLvl2Shift = 25;
591 cLvl3Shift = 14;
592 fGCPhysLvl1BlockBase = 0; /* Not supported. */
593 fGCPhysLvl2BlockBase = UINT64_C(0xfffffe000000);
594 }
595 else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_64KB)
596 {
597 Assert(a_InitialLookupLvl > 0);
598
599 fLookupMaskFull = RT_BIT_64(13) - 1;
600 offLvl1BlockMask = 0; /** @todo FEAT_LPA (RTGCPTR)(4*_1T - 1) */
601 offLvl2BlockMask = (RTGCPTR)(_512M - 1);
602 offPageMask = (RTGCPTR)(_64K - 1);
603 fNextTableOrPageMask = UINT64_C(0xffffffff0000);
604 cLvl0Shift = 0; /* No Level 0 with 64KiB granules. */
605 cLvl1Shift = 42;
606 cLvl2Shift = 29;
607 cLvl3Shift = 16;
608 fGCPhysLvl1BlockBase = 0; /* Not supported. */
609 fGCPhysLvl2BlockBase = UINT64_C(0xffffe0000000);
610 }
611
612 /* Get the initial lookup mask. */
613 uint8_t const bEl = (fFlags & PGMQPAGE_F_USER_MODE) ? 0 : 1; /** @todo EL2 support */
614 uint64_t fLookupMask;
615 if RT_CONSTEXPR_IF(a_fTtbr0 == true)
616 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr0[bEl];
617 else
618 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr1[bEl];
619
620 RTGCPHYS GCPhysPt = CPUMGetEffectiveTtbr(pVCpu, GCPtr);
621 PARMV8VMSA64DESC paDesc = NULL;
622 ARMV8VMSA64DESC Desc;
623 int rc;
624 if RT_CONSTEXPR_IF(a_InitialLookupLvl == 0)
625 {
626 Assert(cLvl0Shift != 0);
627 uint8_t const uLvl = 0;
628
629 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
630 if (RT_SUCCESS(rc)) { /* probable */ }
631 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
632
633 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl0Shift) & fLookupMask]);
634 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
635 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
636
637 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
638 else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl); /** @todo Only supported if TCR_EL1.DS is set. */
639
640 /* Full lookup mask from now on. */
641 fLookupMask = fLookupMaskFull;
642 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
643 }
644
645 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 1)
646 {
647 uint8_t const uLvl = 1;
648
649 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
650 if (RT_SUCCESS(rc)) { /* probable */ }
651 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
652
653 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl1Shift) & fLookupMask]);
654 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
655 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
656
657 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
658 else
659 {
660 if (offLvl1BlockMask != 0)
661 {
662 /* Block descriptor. */
663 pWalk->GCPtr = GCPtr;
664 pWalk->fInfo = PGM_WALKINFO_GIGANTIC_PAGE;
665 pWalk->GCPhys = (RTGCPHYS)(Desc & fGCPhysLvl1BlockBase) | (GCPtr & offLvl1BlockMask);
666 return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
667 }
668 else
669 return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl);
670 }
671
672 /* Full lookup mask from now on. */
673 fLookupMask = fLookupMaskFull;
674 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
675 }
676
677 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 2)
678 {
679 uint8_t const uLvl = 2;
680
681 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
682 if (RT_SUCCESS(rc)) { /* probable */ }
683 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
684
685 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl2Shift) & fLookupMask]);
686 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
687 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
688
689 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
690 else
691 {
692 /* Block descriptor. */
693 pWalk->GCPtr = GCPtr;
694 pWalk->fInfo = PGM_WALKINFO_BIG_PAGE;
695 pWalk->GCPhys = (RTGCPHYS)(Desc & fGCPhysLvl2BlockBase) | (GCPtr & offLvl2BlockMask);
696 return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
697 }
698
699 /* Full lookup mask from now on. */
700 fLookupMask = fLookupMaskFull;
701 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
702 }
703
704 AssertCompile(a_InitialLookupLvl <= 3);
705 uint8_t const uLvl = 3;
706
707 /* Next level. */
708 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
709 if (RT_SUCCESS(rc)) { /* probable */ }
710 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
711
712 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl3Shift) & fLookupMask]);
713 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
714 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
715
716 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
717 else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl); /* No block descriptors. */
718
719 pWalk->GCPtr = GCPtr;
720 pWalk->GCPhys = (RTGCPHYS)(Desc & fNextTableOrPageMask) | (GCPtr & offPageMask);
721 return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
722 }
723 else
724 AssertReleaseFailedReturn(VERR_PGM_MODE_IPE);
725}
726
727
728template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
729static PGM_CTX_DECL(int) PGM_CTX(pgm,GstModifyPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, size_t cb, uint64_t fFlags, uint64_t fMask)
730{
731 /** @todo Ignore for now. */
732 RT_NOREF(pVCpu, GCPtr, cb, fFlags, fMask);
733 return VINF_SUCCESS;
734}
735
736
737template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
738static PGM_CTX_DECL(int) PGM_CTX(pgm,GstWalk)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
739{
740 pGstWalk->enmType = PGMPTWALKGSTTYPE_INVALID;
741 return pgmGstWalkWorker<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>(pVCpu, GCPtr, pWalk, pGstWalk);
742}
743
744
745template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
746static PGM_CTX_DECL(int) PGM_CTX(pgm,GstEnter)(PVMCPUCC pVCpu)
747{
748 /* Nothing to do for now. */
749 RT_NOREF(pVCpu);
750 return VINF_SUCCESS;
751}
752
753
754template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
755static PGM_CTX_DECL(int) PGM_CTX(pgm,GstExit)(PVMCPUCC pVCpu)
756{
757 /* Nothing to do for now. */
758 RT_NOREF(pVCpu);
759 return VINF_SUCCESS;
760}
761
762
763/**
764 * Guest mode data array.
765 */
766PGMMODEDATAGST const g_aPgmGuestModeData[PGM_GUEST_MODE_DATA_ARRAY_SIZE] =
767{
768 { UINT32_MAX, NULL, NULL, NULL, NULL, NULL }, /* 0 */
769 {
770 PGM_TYPE_NONE,
771 PGM_CTX(pgm,GstNoneGetPage),
772 PGM_CTX(pgm,GstNoneQueryPageFast),
773 PGM_CTX(pgm,GstNoneModifyPage),
774 PGM_CTX(pgm,GstNoneWalk),
775 PGM_CTX(pgm,GstNoneEnter),
776 PGM_CTX(pgm,GstNoneExit),
777 },
778
779#define PGM_MODE_TYPE_CREATE(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd) \
780 (2 + ( (a_fEpd ? RT_BIT_32(6) : 0) \
781 | (a_fTbi ? RT_BIT_32(5) : 0) \
782 | (a_GranuleSz << 3) \
783 | (a_InitialLookupLvl << 1) \
784 | (a_fTtbr0 ? RT_BIT_32(0) : 0) ))
785
786#define PGM_MODE_CREATE_EX(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd) \
787 { \
788 PGM_MODE_TYPE_CREATE(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd), \
789 PGM_CTX(pgm,GstGetPage)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \
790 PGM_CTX(pgm,GstQueryPageFast)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \
791 PGM_CTX(pgm,GstModifyPage)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \
792 PGM_CTX(pgm,GstWalk)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \
793 PGM_CTX(pgm,GstEnter)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>, \
794 PGM_CTX(pgm,GstExit)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd> \
795 }
796
797#define PGM_MODE_CREATE_TTBR(a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd) \
798 PGM_MODE_CREATE_EX(false, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd), \
799 PGM_MODE_CREATE_EX(true, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd)
800
801#define PGM_MODE_CREATE_LOOKUP_LVL(a_GranuleSz, a_fTbi, a_fEpd) \
802 PGM_MODE_CREATE_TTBR(0, a_GranuleSz, a_fTbi, a_fEpd ), \
803 PGM_MODE_CREATE_TTBR(1, a_GranuleSz, a_fTbi, a_fEpd ), \
804 PGM_MODE_CREATE_TTBR(2, a_GranuleSz, a_fTbi, a_fEpd ), \
805 PGM_MODE_CREATE_TTBR(3, a_GranuleSz, a_fTbi, a_fEpd ) /* Invalid */
806
807#define PGM_MODE_CREATE_GRANULE_SZ(a_fTbi, a_fEpd) \
808 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_INVALID, a_fTbi, a_fEpd), \
809 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_16KB, a_fTbi, a_fEpd), \
810 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_4KB, a_fTbi, a_fEpd), \
811 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_64KB, a_fTbi, a_fEpd)
812
813#define PGM_MODE_CREATE_TBI(a_fEpd) \
814 PGM_MODE_CREATE_GRANULE_SZ(false, a_fEpd), \
815 PGM_MODE_CREATE_GRANULE_SZ(true, a_fEpd)
816
817 /* Recursive expansion for the win, this will blow up to 128 entries covering all possible modes. */
818 PGM_MODE_CREATE_TBI(false),
819 PGM_MODE_CREATE_TBI(true)
820
821#undef PGM_MODE_CREATE_TBI
822#undef PGM_MODE_CREATE_GRANULE_SZ
823#undef PGM_MODE_CREATE_LOOKUP_LVL
824#undef PGM_MODE_CREATE_TTBR
825#undef PGM_MODE_CREATE_EX
826};
827
828
829template<uint8_t a_offTsz, uint8_t a_offTg, uint8_t a_offTbi, uint8_t a_offEpd, bool a_fTtbr0>
830DECLINLINE(uintptr_t) pgmR3DeduceTypeFromTcr(uint64_t u64RegSctlr, uint64_t u64RegTcr, uint64_t *pfInitialLookupMask)
831{
832 uintptr_t idxNewGst = 0;
833
834 /*
835 * MMU enabled at all?
836 * Technically this is incorrect as we use ARMV8_SCTLR_EL1_M regardless of the EL but the bit is the same
837 * for all exception levels.
838 */
839 if (u64RegSctlr & ARMV8_SCTLR_EL1_M)
840 {
841 uint64_t const u64Tsz = (u64RegTcr >> a_offTsz) & 0x1f;
842 uint64_t u64Tg = (u64RegTcr >> a_offTg) & 0x3;
843 bool const fTbi = RT_BOOL(u64RegTcr & RT_BIT_64(a_offTbi));
844 bool const fEpd = RT_BOOL(u64RegTcr & RT_BIT_64(a_offEpd));
845
846 /*
847 * From the ARM reference manual regarding granule size choices:
848 *
849 * If the value is programmed to either a reserved value or a size that has not been implemented, then
850 * the hardware will treat the field as if it has been programmed to an IMPLEMENTATION DEFINED
851 * choice of the sizes that has been implemented for all purposes other than the value read back from
852 * this register.
853 *
854 * We always fall back on the 4KiB granule size in that case.
855 */
856 /** @todo Can this be made table driven? */
857 uint64_t uLookupLvl;
858 if (u64Tg == ARMV8_TCR_EL1_AARCH64_TG0_16KB)
859 {
860 if (u64Tsz <= 16)
861 {
862 uLookupLvl = 0;
863 *pfInitialLookupMask = 0x1;
864 }
865 else if (u64Tsz >= 17 && u64Tsz <= 27)
866 {
867 uLookupLvl = 1;
868 *pfInitialLookupMask = RT_BIT_64(28 - u64Tsz + 1) - 1;
869 }
870 else if (u64Tsz >= 28 && u64Tsz <= 38)
871 {
872 uLookupLvl = 2;
873 *pfInitialLookupMask = RT_BIT_64(38 - u64Tsz + 1) - 1;
874 }
875 else /* if (u64Tsz == 39) */
876 {
877 uLookupLvl = 3;
878 *pfInitialLookupMask = 0x1;
879 }
880 }
881 else if (u64Tg == ARMV8_TCR_EL1_AARCH64_TG0_64KB)
882 {
883 if (/*u64Tsz >= 16 &&*/ u64Tsz <= 21)
884 {
885 uLookupLvl = 1;
886 *pfInitialLookupMask = RT_BIT_64(21 - u64Tsz + 1) - 1;
887 }
888 else if (u64Tsz >= 22 && u64Tsz <= 34)
889 {
890 uLookupLvl = 2;
891 *pfInitialLookupMask = RT_BIT_64(34 - u64Tsz + 1) - 1;
892 }
893 else /*if (u64Tsz >= 35 && u64Tsz <= 39)*/
894 {
895 uLookupLvl = 3;
896 if (u64Tsz <= 39)
897 *pfInitialLookupMask = RT_BIT_64(39 - u64Tsz + 1) - 1;
898 else
899 *pfInitialLookupMask = 0x1;
900 }
901 }
902 else /* if (u64Tg == ARMV8_TCR_EL1_AARCH64_TG0_4KB) */
903 {
904 /*
905 * From: https://github.com/codingbelief/arm-architecture-reference-manual-for-armv8-a/blob/master/en/chapter_d4/d42_2_controlling_address_translation_stages.md
906 * For all translation stages
907 * The maximum TxSZ value is 39. If TxSZ is programmed to a value larger than 39 then it is IMPLEMENTATION DEFINED whether:
908 * - The implementation behaves as if the field is programmed to 39 for all purposes other than reading back the value of the field.
909 * - Any use of the TxSZ value generates a Level 0 Translation fault for the stage of translation at which TxSZ is used.
910 *
911 * For a stage 1 translation
912 * The minimum TxSZ value is 16. If TxSZ is programmed to a value smaller than 16 then it is IMPLEMENTATION DEFINED whether:
913 * - The implementation behaves as if the field were programmed to 16 for all purposes other than reading back the value of the field.
914 * - Any use of the TxSZ value generates a stage 1 Level 0 Translation fault.
915 *
916 * We currently choose the former for both.
917 */
918 if (/*u64Tsz >= 16 &&*/ u64Tsz <= 24)
919 {
920 uLookupLvl = 0;
921 if (u64Tsz >= 16)
922 *pfInitialLookupMask = RT_BIT_64(24 - u64Tsz + 1) - 1;
923 else
924 *pfInitialLookupMask = RT_BIT_64(9) - 1;
925 }
926 else if (u64Tsz >= 25 && u64Tsz <= 33)
927 {
928 uLookupLvl = 1;
929 *pfInitialLookupMask = RT_BIT_64(33 - u64Tsz + 1) - 1;
930 }
931 else /*if (u64Tsz >= 34 && u64Tsz <= 39)*/
932 {
933 uLookupLvl = 2;
934 if (u64Tsz <= 39)
935 *pfInitialLookupMask = RT_BIT_64(39 - u64Tsz + 1) - 1;
936 else
937 *pfInitialLookupMask = 0x1;
938 }
939
940 u64Tg = ARMV8_TCR_EL1_AARCH64_TG0_4KB;
941 }
942
943 /* Build the index into the PGM mode callback table for the given config. */
944 idxNewGst = PGM_MODE_TYPE_CREATE(a_fTtbr0, uLookupLvl, u64Tg, fTbi, fEpd);
945 }
946 else
947 idxNewGst = PGM_TYPE_NONE;
948
949 return idxNewGst;
950}
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette