VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/PGMAllGst-armv8.cpp.h@ 108846

Last change on this file since 108846 was 108846, checked in by vboxsync, 5 weeks ago

VMM/PGM,NEM: Some early page table management infrastructure for ARMv8, bugref:10388 [doxygen]

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 15.5 KB
Line 
1/* $Id: PGMAllGst-armv8.cpp.h 108846 2025-04-04 09:01:14Z vboxsync $ */
2/** @file
3 * PGM - Page Manager, ARMv8 Guest Paging Template - All context code.
4 */
5
6/*
7 * Copyright (C) 2023-2024 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29
30/*
31 * Common helpers.
32 * Common helpers.
33 * Common helpers.
34 */
35
36DECLINLINE(int) pgmGstWalkReturnNotPresent(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel)
37{
38 NOREF(pVCpu);
39 pWalk->fNotPresent = true;
40 pWalk->uLevel = uLevel;
41 pWalk->fFailed = PGM_WALKFAIL_NOT_PRESENT
42 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
43 return VERR_PAGE_TABLE_NOT_PRESENT;
44}
45
46DECLINLINE(int) pgmGstWalkReturnBadPhysAddr(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel, int rc)
47{
48 AssertMsg(rc == VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS, ("%Rrc\n", rc)); NOREF(rc); NOREF(pVCpu);
49 pWalk->fBadPhysAddr = true;
50 pWalk->uLevel = uLevel;
51 pWalk->fFailed = PGM_WALKFAIL_BAD_PHYSICAL_ADDRESS
52 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
53 return VERR_PAGE_TABLE_NOT_PRESENT;
54}
55
56
57DECLINLINE(int) pgmGstWalkReturnRsvdError(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel)
58{
59 NOREF(pVCpu);
60 pWalk->fRsvdError = true;
61 pWalk->uLevel = uLevel;
62 pWalk->fFailed = PGM_WALKFAIL_RESERVED_BITS
63 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
64 return VERR_PAGE_TABLE_NOT_PRESENT;
65}
66
67
68DECLINLINE(int) pgmGstWalkFastReturnNotPresent(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel)
69{
70 RT_NOREF(pVCpu);
71 pWalk->fFailed = PGM_WALKFAIL_NOT_PRESENT | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
72 return VERR_PAGE_TABLE_NOT_PRESENT;
73}
74
75
76DECLINLINE(int) pgmGstWalkFastReturnBadPhysAddr(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel, int rc)
77{
78 AssertMsg(rc == VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS, ("%Rrc\n", rc)); RT_NOREF(pVCpu, rc);
79 pWalk->fFailed = PGM_WALKFAIL_BAD_PHYSICAL_ADDRESS | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
80 return VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS;
81}
82
83
84DECLINLINE(int) pgmGstWalkFastReturnRsvdError(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel)
85{
86 RT_NOREF(pVCpu);
87 pWalk->fFailed = PGM_WALKFAIL_RESERVED_BITS | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
88 return VERR_RESERVED_PAGE_TABLE_BITS;
89}
90
91
92/*
93 * Special no paging variant.
94 * Special no paging variant.
95 * Special no paging variant.
96 */
97
98static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneGetPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk)
99{
100 RT_NOREF(pVCpu);
101
102 RT_ZERO(*pWalk);
103 pWalk->fSucceeded = true;
104 pWalk->GCPtr = GCPtr;
105 pWalk->GCPhys = GCPtr & ~(RTGCPHYS)GUEST_PAGE_OFFSET_MASK;
106 pWalk->fEffective = X86_PTE_P | X86_PTE_RW | X86_PTE_US; /** @todo */
107 return VINF_SUCCESS;
108}
109
110
111static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneQueryPageFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk)
112{
113 RT_NOREF(pVCpu, fFlags);
114
115 pWalk->GCPtr = GCPtr;
116 pWalk->GCPhys = GCPtr;
117 pWalk->GCPhysNested = 0;
118 pWalk->fInfo = PGM_WALKINFO_SUCCEEDED;
119 pWalk->fFailed = PGM_WALKFAIL_SUCCESS;
120 pWalk->fEffective = X86_PTE_P | X86_PTE_RW | X86_PTE_US | X86_PTE_A | X86_PTE_D; /** @todo */
121 return VINF_SUCCESS;
122}
123
124
125static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneModifyPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, size_t cb, uint64_t fFlags, uint64_t fMask)
126{
127 /* Ignore. */
128 RT_NOREF(pVCpu, GCPtr, cb, fFlags, fMask);
129 return VINF_SUCCESS;
130}
131
132
133static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneWalk)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
134{
135 RT_NOREF(pVCpu, GCPtr, pWalk);
136 pGstWalk->enmType = PGMPTWALKGSTTYPE_INVALID;
137 return VERR_PGM_NOT_USED_IN_MODE;
138}
139
140
141static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneEnter)(PVMCPUCC pVCpu)
142{
143 /* Nothing to do. */
144 RT_NOREF(pVCpu);
145 return VINF_SUCCESS;
146}
147
148
149static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneExit)(PVMCPUCC pVCpu)
150{
151 /* Nothing to do. */
152 RT_NOREF(pVCpu);
153 return VINF_SUCCESS;
154}
155
156
157/*
158 * Template variants for actual paging modes.
159 * Template variants for actual paging modes.
160 * Template variants for actual paging modes.
161 */
162template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
163DECL_FORCE_INLINE(int) pgmGstWalkWorker(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
164{
165 RT_NOREF(pGstWalk); /** @todo */
166 uint8_t const bEl = CPUMGetGuestEL(pVCpu);
167
168
169 /*
170 * Initial lookup level 3 is not valid and only instantiated because we need two
171 * bits for the lookup level when creating the index and have to fill the slots.
172 */
173 if RT_CONSTEXPR_IF(a_InitialLookupLvl == 3)
174 {
175 AssertReleaseFailed();
176 return VERR_PGM_MODE_IPE;
177 }
178 else
179 {
180 uint64_t fLookupMask;
181 if RT_CONSTEXPR_IF(a_fTtbr0 == true)
182 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr0[bEl];
183 else
184 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr1[bEl];
185
186 RTGCPHYS GCPhysPt = CPUMGetEffectiveTtbr(pVCpu, GCPtr);
187 uint64_t *pu64Pt = NULL;
188 uint64_t uPt;
189 int rc;
190 if RT_CONSTEXPR_IF(a_InitialLookupLvl == 0)
191 {
192 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt);
193 if (RT_SUCCESS(rc)) { /* probable */ }
194 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 0, rc);
195
196 uPt = pu64Pt[(GCPtr >> 39) & fLookupMask];
197 if (uPt & RT_BIT_64(0)) { /* probable */ }
198 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 0);
199
200 if (uPt & RT_BIT_64(1)) { /* probable */ }
201 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, 0); /** @todo Only supported if TCR_EL1.DS is set. */
202
203 /* All nine bits from now on. */
204 fLookupMask = RT_BIT_64(9) - 1;
205 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
206 }
207
208 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 1)
209 {
210 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt);
211 if (RT_SUCCESS(rc)) { /* probable */ }
212 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 1, rc);
213
214 uPt = pu64Pt[(GCPtr >> 30) & fLookupMask];
215 if (uPt & RT_BIT_64(0)) { /* probable */ }
216 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 1);
217
218 if (uPt & RT_BIT_64(1)) { /* probable */ }
219 else
220 {
221 /* Block descriptor (1G page). */
222 pWalk->GCPtr = GCPtr;
223 pWalk->fSucceeded = true;
224 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xffffc0000000)) | (GCPtr & (RTGCPTR)(_1G - 1));
225 pWalk->fGigantPage = true;
226 return VINF_SUCCESS;
227 }
228
229 /* All nine bits from now on. */
230 fLookupMask = RT_BIT_64(9) - 1;
231 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
232 }
233
234 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 2)
235 {
236 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt);
237 if (RT_SUCCESS(rc)) { /* probable */ }
238 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 2, rc);
239
240 uPt = pu64Pt[(GCPtr >> 21) & fLookupMask];
241 if (uPt & RT_BIT_64(0)) { /* probable */ }
242 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 2);
243
244 if (uPt & RT_BIT_64(1)) { /* probable */ }
245 else
246 {
247 /* Block descriptor (2M page). */
248 pWalk->GCPtr = GCPtr;
249 pWalk->fSucceeded = true;
250 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xffffffe00000)) | (GCPtr & (RTGCPTR)(_2M - 1));
251 pWalk->fBigPage = true;
252 return VINF_SUCCESS;
253 }
254
255 /* All nine bits from now on. */
256 fLookupMask = RT_BIT_64(9) - 1;
257 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
258 }
259
260 AssertCompile(a_InitialLookupLvl <= 3);
261
262 /* Next level. */
263 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, &pu64Pt);
264 if (RT_SUCCESS(rc)) { /* probable */ }
265 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, 3, rc);
266
267 uPt = pu64Pt[(GCPtr & UINT64_C(0x1ff000)) >> 12];
268 if (uPt & RT_BIT_64(0)) { /* probable */ }
269 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, 3);
270
271 if (uPt & RT_BIT_64(1)) { /* probable */ }
272 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, 3); /** No block descriptors. */
273
274 pWalk->GCPtr = GCPtr;
275 pWalk->fSucceeded = true;
276 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000)) | (GCPtr & (RTGCPTR)(_4K - 1));
277 return VINF_SUCCESS;
278 }
279}
280
281
282template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
283static PGM_CTX_DECL(int) PGM_CTX(pgm,GstGetPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk)
284{
285 return pgmGstWalkWorker<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>(pVCpu, GCPtr, pWalk, NULL /*pGstWalk*/);
286}
287
288
289template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
290static PGM_CTX_DECL(int) PGM_CTX(pgm,GstQueryPageFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk)
291{
292 RT_NOREF(fFlags); /** @todo */
293 uint8_t const bEl = CPUMGetGuestEL(pVCpu);
294
295 /*
296 * Initial lookup level 3 is not valid and only instantiated because we need two
297 * bits for the lookup level when creating the index and have to fill the slots.
298 */
299 if RT_CONSTEXPR_IF(a_InitialLookupLvl == 3)
300 {
301 AssertReleaseFailed();
302 return VERR_PGM_MODE_IPE;
303 }
304 else
305 {
306 uint64_t fLookupMask;
307 if RT_CONSTEXPR_IF(a_fTtbr0 == true)
308 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr0[bEl];
309 else
310 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr1[bEl];
311
312 RTGCPHYS GCPhysPt = CPUMGetEffectiveTtbr(pVCpu, GCPtr);
313 uint64_t *pu64Pt = NULL;
314 uint64_t uPt;
315 int rc;
316 if RT_CONSTEXPR_IF(a_InitialLookupLvl == 0)
317 {
318 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&pu64Pt);
319 if (RT_SUCCESS(rc)) { /* probable */ }
320 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, 0, rc);
321
322 uPt = pu64Pt[(GCPtr >> 39) & fLookupMask];
323 if (uPt & RT_BIT_64(0)) { /* probable */ }
324 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, 0);
325
326 if (uPt & RT_BIT_64(1)) { /* probable */ }
327 else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, 0); /** @todo Only supported if TCR_EL1.DS is set. */
328
329 /* All nine bits from now on. */
330 fLookupMask = RT_BIT_64(9) - 1;
331 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
332 }
333
334 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 1)
335 {
336 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&pu64Pt);
337 if (RT_SUCCESS(rc)) { /* probable */ }
338 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, 1, rc);
339
340 uPt = pu64Pt[(GCPtr >> 30) & fLookupMask];
341 if (uPt & RT_BIT_64(0)) { /* probable */ }
342 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, 1);
343
344 if (uPt & RT_BIT_64(1)) { /* probable */ }
345 else
346 {
347 /* Block descriptor (1G page). */
348 pWalk->GCPtr = GCPtr;
349 pWalk->fInfo = PGM_WALKINFO_SUCCEEDED | PGM_WALKINFO_GIGANTIC_PAGE;
350 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xffffc0000000)) | (GCPtr & (RTGCPTR)(_1G - 1));
351 return VINF_SUCCESS;
352 }
353
354 /* All nine bits from now on. */
355 fLookupMask = RT_BIT_64(9) - 1;
356 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
357 }
358
359 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= 2)
360 {
361 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&pu64Pt);
362 if (RT_SUCCESS(rc)) { /* probable */ }
363 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, 2, rc);
364
365 uPt = pu64Pt[(GCPtr >> 21) & fLookupMask];
366 if (uPt & RT_BIT_64(0)) { /* probable */ }
367 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, 2);
368
369 if (uPt & RT_BIT_64(1)) { /* probable */ }
370 else
371 {
372 /* Block descriptor (2M page). */
373 pWalk->GCPtr = GCPtr;
374 pWalk->fInfo = PGM_WALKINFO_SUCCEEDED | PGM_WALKINFO_BIG_PAGE;
375 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xffffffe00000)) | (GCPtr & (RTGCPTR)(_2M - 1));
376 return VINF_SUCCESS;
377 }
378
379 /* All nine bits from now on. */
380 fLookupMask = RT_BIT_64(9) - 1;
381 GCPhysPt = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000));
382 }
383
384 AssertCompile(a_InitialLookupLvl <= 3);
385
386 /* Next level. */
387 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&pu64Pt);
388 if (RT_SUCCESS(rc)) { /* probable */ }
389 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, 3, rc);
390
391 uPt = pu64Pt[(GCPtr & UINT64_C(0x1ff000)) >> 12];
392 if (uPt & RT_BIT_64(0)) { /* probable */ }
393 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, 3);
394
395 if (uPt & RT_BIT_64(1)) { /* probable */ }
396 else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, 3); /** No block descriptors. */
397
398 pWalk->GCPtr = GCPtr;
399 pWalk->fInfo = PGM_WALKINFO_SUCCEEDED;
400 pWalk->GCPhys = (RTGCPHYS)(uPt & UINT64_C(0xfffffffff000)) | (GCPtr & (RTGCPTR)(_4K - 1));
401 return VINF_SUCCESS;
402 }
403}
404
405
406template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
407static PGM_CTX_DECL(int) PGM_CTX(pgm,GstModifyPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, size_t cb, uint64_t fFlags, uint64_t fMask)
408{
409 /** @todo Ignore for now. */
410 RT_NOREF(pVCpu, GCPtr, cb, fFlags, fMask);
411 return VINF_SUCCESS;
412}
413
414
415template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
416static PGM_CTX_DECL(int) PGM_CTX(pgm,GstWalk)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
417{
418 pGstWalk->enmType = PGMPTWALKGSTTYPE_INVALID;
419 return pgmGstWalkWorker<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd>(pVCpu, GCPtr, pWalk, pGstWalk);
420}
421
422
423template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
424static PGM_CTX_DECL(int) PGM_CTX(pgm,GstEnter)(PVMCPUCC pVCpu)
425{
426 /* Nothing to do for now. */
427 RT_NOREF(pVCpu);
428 return VINF_SUCCESS;
429}
430
431
432template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd>
433static PGM_CTX_DECL(int) PGM_CTX(pgm,GstExit)(PVMCPUCC pVCpu)
434{
435 /* Nothing to do for now. */
436 RT_NOREF(pVCpu);
437 return VINF_SUCCESS;
438}
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette