VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-pf.c32@ 106317

Last change on this file since 106317 was 106061, checked in by vboxsync, 2 months ago

Copyright year updates by scm.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 85.4 KB
Line 
1/* $Id: bs3-cpu-basic-2-pf.c32 106061 2024-09-16 14:03:52Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-basic-2, 32-bit C code for testing \#PF.
4 */
5
6/*
7 * Copyright (C) 2007-2024 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * The contents of this file may alternatively be used under the terms
26 * of the Common Development and Distribution License Version 1.0
27 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
28 * in the VirtualBox distribution, in which case the provisions of the
29 * CDDL are applicable instead of those of the GPL.
30 *
31 * You may elect to license modified versions of this file under the
32 * terms and conditions of either the GPL or the CDDL or both.
33 *
34 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
35 */
36
37
38/*********************************************************************************************************************************
39* Header Files *
40*********************************************************************************************************************************/
41#include <bs3kit.h>
42#include <iprt/asm-amd64-x86.h>
43
44
45/*********************************************************************************************************************************
46* Defined Constants And Macros *
47*********************************************************************************************************************************/
48#define CHECK_MEMBER(a_pszMode, a_szName, a_szFmt, a_Actual, a_Expected) \
49 do { \
50 if ((a_Actual) == (a_Expected)) { /* likely */ } \
51 else Bs3TestFailedF("%u - %s: " a_szName "=" a_szFmt " expected " a_szFmt, \
52 g_usBs3TestStep, (a_pszMode), (a_Actual), (a_Expected)); \
53 } while (0)
54
55#define BS3CPUBASIC2PF_HALT(pThis) \
56 do { \
57 Bs3TestPrintf("Halting: pteworker=%s store=%s accessor=%s\n", \
58 pThis->pszPteWorker, pThis->pszStore, pThis->pszAccessor); \
59 ASMHalt(); \
60 } while (0)
61
62
63/** @def BS3CPUBASIC2PF_FASTER
64 * This is useful for IEM execution. */
65#define BS3CPUBASIC2PF_FASTER
66
67
68/*********************************************************************************************************************************
69* Structures and Typedefs *
70*********************************************************************************************************************************/
71typedef void BS3_CALL FNBS3CPUBASIC2PFSNIPPET(void);
72
73typedef struct FNBS3CPUBASIC2PFTSTCODE
74{
75 FNBS3CPUBASIC2PFSNIPPET *pfn;
76 uint8_t offUd2;
77
78} FNBS3CPUBASIC2PFTSTCODE;
79typedef FNBS3CPUBASIC2PFTSTCODE const *PCFNBS3CPUBASIC2PFTSTCODE;
80
81typedef struct BS3CPUBASIC2PFTTSTCMNMODE
82{
83 uint8_t bMode;
84 FNBS3CPUBASIC2PFTSTCODE MovLoad;
85 FNBS3CPUBASIC2PFTSTCODE MovStore;
86 FNBS3CPUBASIC2PFTSTCODE Xchg;
87 FNBS3CPUBASIC2PFTSTCODE CmpXchg;
88 FNBS3CPUBASIC2PFTSTCODE DivMem;
89} BS3CPUBASIC2PFTTSTCMNMODE;
90typedef BS3CPUBASIC2PFTTSTCMNMODE const *PCBS3CPUBASIC2PFTTSTCMNMODE;
91
92
93typedef struct BS3CPUBASIC2PFSTATE
94{
95 /** The mode we're currently testing. */
96 uint8_t bMode;
97 /** The size of a natural access. */
98 uint8_t cbAccess;
99 /** The common mode functions. */
100 PCBS3CPUBASIC2PFTTSTCMNMODE pCmnMode;
101 /** Address of the test area (alias). */
102 union
103 {
104 uint64_t u;
105 uint32_t u32;
106 uint16_t u16;
107 } uTestAddr;
108 /** Pointer to the orignal test area mapping. */
109 uint8_t *pbOrgTest;
110 /** The size of the test area (at least two pages). */
111 uint32_t cbTest;
112 /** cbTest expressed as a page count. */
113 uint16_t cTestPages;
114 /** The number of PTEs in the first PTE, i.e. what we can
115 * safely access via PgInfo.u.Pae.pPte/PgInfo.u.Legacy.pPte. */
116 uint16_t cTest1stPtes;
117 /** The number of PDEs for cTestPages. */
118 uint16_t cTestPdes;
119 /** 16-bit data selector for uTestAddr.u32. */
120 uint16_t uSel16TestData;
121 /** 16-bit code selector for uTestAddr.u32. */
122 uint16_t uSel16TestCode;
123 /** The size of the PDE backup. */
124 uint16_t cbPdeBackup;
125 /** The size of the PTE backup. */
126 uint16_t cbPteBackup;
127 /** Test paging information for uTestAddr.u. */
128 BS3PAGINGINFO4ADDR PgInfo;
129
130 /** Set if we can use the INVLPG instruction. */
131 bool fUseInvlPg;
132 /** Physical addressing width. */
133 uint8_t cBitsPhysWidth;
134
135 /** Reflects CR0.WP. */
136 bool fWp;
137 /** Reflects EFER.NXE & CR4.PAE. */
138 bool fNxe;
139
140 const char *pszAccessor;
141 const char *pszPteWorker;
142 const char *pszStore;
143
144 /** Trap context frame. */
145 BS3TRAPFRAME TrapCtx;
146 /** Expected result context. */
147 BS3REGCTX ExpectCtx;
148
149 /** The PML4E backup. */
150 uint64_t u64Pml4eBackup;
151 /** The PDPTE backup. */
152 uint64_t u64PdpteBackup;
153 /** The PDE backup. */
154 uint64_t au64PdeBackup[16];
155 /** The PTE backup. */
156 union
157 {
158 uint32_t Legacy[X86_PG_ENTRIES];
159 uint64_t Pae[X86_PG_PAE_ENTRIES];
160 } PteBackup;
161
162} BS3CPUBASIC2PFSTATE;
163/** Pointer to state for the \#PF test. */
164typedef BS3CPUBASIC2PFSTATE *PBS3CPUBASIC2PFSTATE;
165
166
167/**
168 * Paging modification worker.
169 */
170typedef struct BS3CPUBASIC2PFMODPT
171{
172 const char *pszName;
173 uint32_t fPresent : 1;
174 uint32_t fUser : 1;
175 uint32_t fWriteable : 1;
176 uint32_t fNoExecute : 1;
177 uint32_t fReserved : 1;
178 uint32_t uModifyArg : 24;
179 void (*pfnModify)(PBS3CPUBASIC2PFSTATE pThis, unsigned iStore, struct BS3CPUBASIC2PFMODPT const *pEntry,
180 uint32_t fClearMask, uint32_t fSetMask);
181 bool (*pfnApplicable)(PBS3CPUBASIC2PFSTATE pThis, struct BS3CPUBASIC2PFMODPT const *pEntry);
182} BS3CPUBASIC2PFMODPT;
183typedef BS3CPUBASIC2PFMODPT const *PCBS3CPUBASIC2PFMODPT;
184
185/** Page level protection. Alternative is page directory or higher level. */
186#define BS3CB2PFACC_F_PAGE_LEVEL RT_BIT(0)
187/** Directly access the boobytrapped page, no edging on or off it. */
188#define BS3CB2PFACC_F_DIRECT RT_BIT(1)
189
190/**
191 * Memory accessor.
192 */
193typedef struct BS3CPUBASIC2PFACCESSOR
194{
195 /** Accessor name. */
196 const char *pszName;
197 /** The accessor. */
198 void (*pfnAccessor)(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags, uint8_t bXcpt, uint8_t uPfErrCd);
199 /** The X86_TRAP_PF_XXX access flags this access sets. */
200 uint32_t fAccess;
201 /** The exception when things are fine. */
202 uint8_t bOkayXcpt;
203} BS3CPUBASIC2PFACCESSOR;
204typedef const BS3CPUBASIC2PFACCESSOR *PCBS3CPUBASIC2PFACCESSOR;
205
206
207/*********************************************************************************************************************************
208* Internal Functions *
209*********************************************************************************************************************************/
210FNBS3TESTDOMODE bs3CpuBasic2_RaiseXcpt0e_c32;
211
212/* bs3-cpu-basic-2-asm.asm: */
213void BS3_CALL bs3CpuBasic2_Store_mov_c32(void *pvDst, uint32_t uValue, uint32_t uOld);
214void BS3_CALL bs3CpuBasic2_Store_xchg_c32(void *pvDst, uint32_t uValue, uint32_t uOld);
215void BS3_CALL bs3CpuBasic2_Store_cmpxchg_c32(void *pvDst, uint32_t uValue, uint32_t uOld);
216
217
218/* bs3-cpu-basic-2-template.mac: */
219FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c16;
220FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c16;
221FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16;
222FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16;
223FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c16;
224
225FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c32;
226FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c32;
227FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32;
228FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32;
229FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c32;
230
231FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c64;
232FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c64;
233FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64;
234FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64;
235FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c64;
236
237
238/*********************************************************************************************************************************
239* Global Variables *
240*********************************************************************************************************************************/
241/** Page table access functions. */
242static const struct
243{
244 const char *pszName;
245 void (BS3_CALL *pfnStore)(void *pvDst, uint32_t uValue, uint32_t uOld);
246} g_aStoreMethods[] =
247{
248 { "mov", bs3CpuBasic2_Store_mov_c32 },
249 { "xchg", bs3CpuBasic2_Store_xchg_c32 },
250 { "cmpxchg", bs3CpuBasic2_Store_cmpxchg_c32 },
251};
252
253
254static const BS3CPUBASIC2PFTTSTCMNMODE g_aCmnModes[] =
255{
256 {
257 BS3_MODE_CODE_16,
258 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, 2 },
259 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, 2 },
260 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, 2 },
261 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, 3 },
262 { bs3CpuBasic2_div_ds_bx__ud2_c16, 2 },
263 },
264 {
265 BS3_MODE_CODE_32,
266 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c32, 2 },
267 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c32, 2 },
268 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32, 2 },
269 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32, 3 },
270 { bs3CpuBasic2_div_ds_bx__ud2_c32, 2 },
271 },
272 {
273 BS3_MODE_CODE_64,
274 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c64, 2 + 1 },
275 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c64, 2 + 1 },
276 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64, 2 + 1 },
277 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64, 3 + 1 },
278 { bs3CpuBasic2_div_ds_bx__ud2_c64, 2 + 1 },
279 },
280 {
281 BS3_MODE_CODE_V86,
282 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, 2 },
283 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, 2 },
284 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, 2 },
285 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, 3 },
286 { bs3CpuBasic2_div_ds_bx__ud2_c16, 2 },
287 },
288};
289
290
291/**
292 * Compares a CPU trap.
293 */
294static void bs3CpuBasic2Pf_CompareCtx(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pExpectCtx, int cbPcAdjust,
295 uint8_t bXcpt, unsigned uErrCd)
296{
297 const char *pszHint = "xxxx";
298 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
299 uint32_t fExtraEfl;
300
301 CHECK_MEMBER(pszHint, "bXcpt", "%#04x", pThis->TrapCtx.bXcpt, bXcpt);
302 CHECK_MEMBER(pszHint, "uErrCd", "%#06RX16", (uint16_t)pThis->TrapCtx.uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
303
304 fExtraEfl = X86_EFL_RF;
305 if (BS3_MODE_IS_16BIT_SYS(g_bBs3CurrentMode))
306 fExtraEfl = 0;
307 else
308 fExtraEfl = X86_EFL_RF;
309 Bs3TestCheckRegCtxEx(&pThis->TrapCtx.Ctx, pExpectCtx, cbPcAdjust, 0 /*cbSpAdjust*/, fExtraEfl, pszHint, g_usBs3TestStep);
310 if (Bs3TestSubErrorCount() != cErrorsBefore)
311 {
312 Bs3TrapPrintFrame(&pThis->TrapCtx);
313#if 1
314 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
315 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
316 BS3CPUBASIC2PF_HALT(pThis);
317#endif
318 }
319}
320
321
322/**
323 * Compares a CPU trap.
324 */
325static void bs3CpuBasic2Pf_CompareSimpleCtx(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pStartCtx, int offAddPC,
326 uint8_t bXcpt, unsigned uErrCd, uint64_t uCr2)
327{
328 const char *pszHint = "xxxx";
329 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
330 uint64_t const uSavedCr2 = pStartCtx->cr2.u;
331 uint32_t fExtraEfl;
332
333 CHECK_MEMBER(pszHint, "bXcpt", "%#04x", pThis->TrapCtx.bXcpt, bXcpt);
334 CHECK_MEMBER(pszHint, "uErrCd", "%#06RX16", (uint16_t)pThis->TrapCtx.uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
335
336 fExtraEfl = X86_EFL_RF;
337 if (BS3_MODE_IS_16BIT_SYS(g_bBs3CurrentMode))
338 fExtraEfl = 0;
339 else
340 fExtraEfl = X86_EFL_RF;
341 pStartCtx->cr2.u = uCr2;
342 Bs3TestCheckRegCtxEx(&pThis->TrapCtx.Ctx, pStartCtx, offAddPC, 0 /*cbSpAdjust*/, fExtraEfl, pszHint, g_usBs3TestStep);
343 pStartCtx->cr2.u = uSavedCr2;
344 if (Bs3TestSubErrorCount() != cErrorsBefore)
345 {
346 Bs3TrapPrintFrame(&pThis->TrapCtx);
347#if 1
348 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
349 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
350 BS3CPUBASIC2PF_HALT(pThis);
351#endif
352 }
353}
354
355
356/**
357 * Checks the trap context for a simple \#PF trap.
358 */
359static void bs3CpuBasic2Pf_CompareSimplePf(PBS3CPUBASIC2PFSTATE pThis, PCBS3REGCTX pStartCtx, int offAddPC,
360 unsigned uErrCd, uint64_t uCr2)
361{
362 bs3CpuBasic2Pf_CompareSimpleCtx(pThis, (PBS3REGCTX)pStartCtx, offAddPC, X86_XCPT_PF, uErrCd, uCr2);
363}
364
365/**
366 * Checks the trap context for a simple \#UD trap.
367 */
368static void bs3CpuBasic2Pf_CompareSimpleUd(PBS3CPUBASIC2PFSTATE pThis, PCBS3REGCTX pStartCtx, int offAddPC)
369{
370 bs3CpuBasic2Pf_CompareSimpleCtx(pThis, (PBS3REGCTX)pStartCtx, offAddPC, X86_XCPT_UD, 0, pStartCtx->cr2.u);
371}
372
373
374/**
375 * Restores all the paging entries from backup and flushes everything.
376 */
377static void bs3CpuBasic2Pf_FlushAll(void)
378{
379 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
380 {
381 uint32_t uCr4 = ASMGetCR4();
382 if (uCr4 & (X86_CR4_PGE | X86_CR4_PCIDE))
383 {
384 ASMSetCR4(uCr4 & ~(X86_CR4_PGE | X86_CR4_PCIDE));
385 ASMSetCR4(uCr4);
386 return;
387 }
388 }
389
390 ASMReloadCR3();
391}
392
393
394/**
395 * Restores all the paging entries from backup and flushes everything.
396 *
397 * @param pThis Test state data.
398 */
399static void bs3CpuBasic2Pf_RestoreFromBackups(PBS3CPUBASIC2PFSTATE pThis)
400{
401 Bs3MemCpy(pThis->PgInfo.u.Legacy.pPte, &pThis->PteBackup, pThis->cbPteBackup);
402 Bs3MemCpy(pThis->PgInfo.u.Legacy.pPde, pThis->au64PdeBackup, pThis->cbPdeBackup);
403 if (pThis->PgInfo.cEntries > 2)
404 pThis->PgInfo.u.Pae.pPdpe->u = pThis->u64PdpteBackup;
405 if (pThis->PgInfo.cEntries > 3)
406 pThis->PgInfo.u.Pae.pPml4e->u = pThis->u64Pml4eBackup;
407 bs3CpuBasic2Pf_FlushAll();
408}
409
410
411/** @name BS3CPUBASIC2PFACCESSOR::pfnAccessor Implementations
412 * @{ */
413
414static void bs3CpuBasic2Pf_DoExec(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags, uint8_t bXcpt, uint8_t uPfErrCd)
415{
416 uint8_t *pbOrgTest = pThis->pbOrgTest;
417 unsigned offEnd = fFlags & BS3CB2PFACC_F_DIRECT ? X86_PAGE_SIZE + 1 : X86_PAGE_SIZE + 2;
418 unsigned off = fFlags & BS3CB2PFACC_F_DIRECT ? offEnd - 1 : X86_PAGE_SIZE - 5;
419
420 for (; off < offEnd; off++)
421 {
422 /* Emit a little bit of code (using the original allocation mapping) and point pCtx to it. */
423 pbOrgTest[off + 0] = X86_OP_PRF_SIZE_ADDR;
424 pbOrgTest[off + 1] = X86_OP_PRF_SIZE_OP;
425 pbOrgTest[off + 2] = 0x90; /* NOP */
426 pbOrgTest[off + 3] = 0x0f; /* UD2 */
427 pbOrgTest[off + 4] = 0x0b;
428 pbOrgTest[off + 5] = 0xeb; /* JMP $-4 */
429 pbOrgTest[off + 6] = 0xfc;
430 switch (pThis->bMode & BS3_MODE_CODE_MASK)
431 {
432 default:
433 pCtx->rip.u = pThis->uTestAddr.u + off;
434 break;
435 case BS3_MODE_CODE_16:
436 Bs3SelSetup16BitCode(&Bs3GdteSpare01, pThis->uTestAddr.u32, pCtx->bCpl);
437 pCtx->rip.u = off;
438 pCtx->cs = BS3_SEL_SPARE_01 | pCtx->bCpl;
439 break;
440 case BS3_MODE_CODE_V86:
441 /** @todo fix me. */
442 return;
443 }
444 //Bs3TestPrintf("cs:rip=%04x:%010RX64 iRing=%d\n", pCtx->cs, pCtx->rip.u, pCtx->bCpl);
445
446 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
447 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
448 if ( bXcpt != X86_XCPT_PF
449 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off < X86_PAGE_SIZE - 4))
450 bs3CpuBasic2Pf_CompareSimpleUd(pThis, pCtx, 3);
451 else if (!(fFlags & BS3CB2PFACC_F_PAGE_LEVEL) || off >= X86_PAGE_SIZE)
452 bs3CpuBasic2Pf_CompareSimplePf(pThis, pCtx, 0, uPfErrCd, pThis->uTestAddr.u + off);
453 else
454 bs3CpuBasic2Pf_CompareSimplePf(pThis, pCtx,
455 off + 3 == X86_PAGE_SIZE || off + 4 == X86_PAGE_SIZE
456 ? RT_MIN(X86_PAGE_SIZE, off + 3) - off : 0,
457 uPfErrCd, pThis->uTestAddr.u + RT_MIN(X86_PAGE_SIZE, off + 4));
458 }
459}
460
461
462static void bs3CpuBasic2Pf_SetCsEip(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, PCFNBS3CPUBASIC2PFTSTCODE pCode)
463{
464 switch (pThis->bMode & BS3_MODE_CODE_MASK)
465 {
466 default:
467 pCtx->rip.u = (uintptr_t)pCode->pfn;
468 break;
469
470 case BS3_MODE_CODE_16:
471 {
472 uint32_t uFar16 = Bs3SelFlatCodeToProtFar16((uintptr_t)pCode->pfn);
473 pCtx->rip.u = (uint16_t)uFar16;
474 pCtx->cs = (uint16_t)(uFar16 >> 16) | pCtx->bCpl;
475 pCtx->cs += (uint16_t)pCtx->bCpl << BS3_SEL_RING_SHIFT;
476 break;
477 }
478
479 case BS3_MODE_CODE_V86:
480 {
481 uint32_t uFar16 = Bs3SelFlatCodeToRealMode((uintptr_t)pCode->pfn);
482 pCtx->rip.u = (uint16_t)uFar16;
483 pCtx->cs = (uint16_t)(uFar16 >> 16);
484 break;
485 }
486 }
487}
488
489
490/**
491 * Test a simple load instruction around the edges of page two.
492 *
493 * @param pThis The test stat data.
494 * @param pCtx The test context.
495 * @param fFlags BS3CB2PFACC_F_XXX.
496 * @param bXcpt X86_XCPT_PF if this can cause \#PFs, otherwise
497 * X86_XCPT_UD.
498 * @param uPfErrCd The error code for \#PFs.
499 */
500static void bs3CpuBasic2Pf_DoMovLoad(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags, uint8_t bXcpt, uint8_t uPfErrCd)
501{
502 static uint64_t const s_uValue = UINT64_C(0x7c4d0114428d);
503 uint64_t uExpectRax;
504 unsigned i;
505
506 /*
507 * Adjust the incoming context and calculate our expections.
508 */
509 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->MovLoad);
510 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
511 switch (pThis->bMode & BS3_MODE_CODE_MASK)
512 {
513 case BS3_MODE_CODE_16:
514 case BS3_MODE_CODE_V86:
515 uExpectRax = (uint16_t)s_uValue | (pCtx->rax.u & UINT64_C(0xffffffffffff0000));
516 break;
517 case BS3_MODE_CODE_32:
518 uExpectRax = (uint32_t)s_uValue | (pCtx->rax.u & UINT64_C(0xffffffff00000000));
519 break;
520 case BS3_MODE_CODE_64:
521 uExpectRax = s_uValue;
522 break;
523 }
524 if (uExpectRax == pCtx->rax.u)
525 pCtx->rax.u = ~pCtx->rax.u;
526
527 /*
528 * Make two approaches to the test page (the 2nd one):
529 * - i=0: Start on the 1st page and edge into the 2nd.
530 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
531 */
532 for (i = 0; i < 2; i++)
533 {
534 unsigned off = fFlags & BS3CB2PFACC_F_DIRECT ? X86_PAGE_SIZE : X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
535 unsigned offEnd = fFlags & BS3CB2PFACC_F_DIRECT ? off + 1 : X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
536
537 for (; off < offEnd; off++)
538 {
539 *(uint64_t *)&pThis->pbOrgTest[off] = s_uValue;
540 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
541 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
542 else
543 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = pThis->uTestAddr.u + off;
544
545 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
546 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
547
548 if ( bXcpt != X86_XCPT_PF
549 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off >= X86_PAGE_SIZE * 2)
550 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off <= X86_PAGE_SIZE - pThis->cbAccess) )
551 {
552 pThis->ExpectCtx.rax.u = uExpectRax;
553 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, pThis->pCmnMode->MovLoad.offUd2, X86_XCPT_UD, 0 /*uErrCd*/);
554 pThis->ExpectCtx.rax = pCtx->rax;
555 }
556 else
557 {
558 if (off < X86_PAGE_SIZE)
559 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + X86_PAGE_SIZE;
560 else
561 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + off;
562 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
563 pThis->ExpectCtx.cr2 = pCtx->cr2;
564 }
565 }
566
567 if (fFlags & BS3CB2PFACC_F_DIRECT)
568 break;
569 }
570}
571
572
573/**
574 * Test a simple store instruction around the edges of page two.
575 *
576 * @param pThis The test stat data.
577 * @param pCtx The test context.
578 * @param fFlags BS3CB2PFACC_F_XXX.
579 * @param bXcpt X86_XCPT_PF if this can cause \#PFs, otherwise
580 * X86_XCPT_UD.
581 * @param uPfErrCd The error code for \#PFs.
582 */
583static void bs3CpuBasic2Pf_DoMovStore(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags,
584 uint8_t bXcpt, uint8_t uPfErrCd)
585{
586 static uint64_t const s_uValue = UINT64_C(0x3af45ead86a34a26);
587 static uint64_t const s_uValueFlipped = UINT64_C(0xc50ba152795cb5d9);
588 uint64_t const uRaxSaved = pCtx->rax.u;
589 uint64_t uExpectStored;
590 unsigned i;
591
592 /*
593 * Adjust the incoming context and calculate our expections.
594 */
595 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->MovStore);
596 if ((pThis->bMode & BS3_MODE_CODE_MASK) != BS3_MODE_CODE_64)
597 pCtx->rax.u = (uint32_t)s_uValue; /* leave the upper part zero */
598 else
599 pCtx->rax.u = s_uValue;
600
601 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
602 switch (pThis->bMode & BS3_MODE_CODE_MASK)
603 {
604 case BS3_MODE_CODE_16:
605 case BS3_MODE_CODE_V86:
606 uExpectStored = (uint16_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffffffff0000));
607 break;
608 case BS3_MODE_CODE_32:
609 uExpectStored = (uint32_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffff00000000));
610 break;
611 case BS3_MODE_CODE_64:
612 uExpectStored = s_uValue;
613 break;
614 }
615
616 /*
617 * Make two approaches to the test page (the 2nd one):
618 * - i=0: Start on the 1st page and edge into the 2nd.
619 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
620 */
621 for (i = 0; i < 2; i++)
622 {
623 unsigned off = fFlags & BS3CB2PFACC_F_DIRECT ? X86_PAGE_SIZE : X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
624 unsigned offEnd = fFlags & BS3CB2PFACC_F_DIRECT ? off + 1 : X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
625 for (; off < offEnd; off++)
626 {
627 *(uint64_t *)&pThis->pbOrgTest[off] = s_uValueFlipped;
628 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
629 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
630 else
631 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = pThis->uTestAddr.u + off;
632
633 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
634 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
635
636 if ( bXcpt != X86_XCPT_PF
637 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off >= X86_PAGE_SIZE * 2)
638 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off <= X86_PAGE_SIZE - pThis->cbAccess) )
639 {
640 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, pThis->pCmnMode->MovStore.offUd2, X86_XCPT_UD, 0 /*uErrCd*/);
641 if (*(uint64_t *)&pThis->pbOrgTest[off] != uExpectStored)
642 Bs3TestFailedF("%u - %s: Stored %#RX64, expected %#RX64",
643 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], uExpectStored);
644 }
645 else
646 {
647 if (off < X86_PAGE_SIZE)
648 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + X86_PAGE_SIZE;
649 else
650 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + off;
651 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
652 pThis->ExpectCtx.cr2 = pCtx->cr2;
653 if (*(uint64_t *)&pThis->pbOrgTest[off] != s_uValueFlipped)
654 Bs3TestFailedF("%u - %s: #PF'ed store modified memory: %#RX64, expected %#RX64",
655 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], s_uValueFlipped);
656
657 }
658 }
659
660 if (fFlags & BS3CB2PFACC_F_DIRECT)
661 break;
662 }
663
664 pCtx->rax.u = uRaxSaved;
665}
666
667
668/**
669 * Test a xchg instruction around the edges of page two.
670 *
671 * @param pThis The test stat data.
672 * @param pCtx The test context.
673 * @param fFlags BS3CB2PFACC_F_XXX.
674 * @param bXcpt X86_XCPT_PF if this can cause \#PFs, otherwise
675 * X86_XCPT_UD.
676 * @param uPfErrCd The error code for \#PFs.
677 */
678static void bs3CpuBasic2Pf_DoXchg(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags, uint8_t bXcpt, uint8_t uPfErrCd)
679{
680 static uint64_t const s_uValue = UINT64_C(0xea58699648e2f32c);
681 static uint64_t const s_uValueFlipped = UINT64_C(0x15a79669b71d0cd3);
682 uint64_t const uRaxSaved = pCtx->rax.u;
683 uint64_t uRaxIn;
684 uint64_t uExpectedRax;
685 uint64_t uExpectStored;
686 unsigned i;
687
688 /*
689 * Adjust the incoming context and calculate our expections.
690 */
691 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->Xchg);
692 if ((pThis->bMode & BS3_MODE_CODE_MASK) != BS3_MODE_CODE_64)
693 uRaxIn = (uint32_t)s_uValue; /* leave the upper part zero */
694 else
695 uRaxIn = s_uValue;
696
697 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
698 switch (pThis->bMode & BS3_MODE_CODE_MASK)
699 {
700 case BS3_MODE_CODE_16:
701 case BS3_MODE_CODE_V86:
702 uExpectedRax = (uint16_t)s_uValueFlipped | (uRaxIn & UINT64_C(0xffffffffffff0000));
703 uExpectStored = (uint16_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffffffff0000));
704 break;
705 case BS3_MODE_CODE_32:
706 uExpectedRax = (uint32_t)s_uValueFlipped | (uRaxIn & UINT64_C(0xffffffff00000000));
707 uExpectStored = (uint32_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffff00000000));
708 break;
709 case BS3_MODE_CODE_64:
710 uExpectedRax = s_uValueFlipped;
711 uExpectStored = s_uValue;
712 break;
713 }
714
715 /*
716 * Make two approaches to the test page (the 2nd one):
717 * - i=0: Start on the 1st page and edge into the 2nd.
718 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
719 */
720 for (i = 0; i < 2; i++)
721 {
722 unsigned off = fFlags & BS3CB2PFACC_F_DIRECT ? X86_PAGE_SIZE : X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
723 unsigned offEnd = fFlags & BS3CB2PFACC_F_DIRECT ? off + 1 : X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
724 for (; off < offEnd; off++)
725 {
726 *(uint64_t *)&pThis->pbOrgTest[off] = s_uValueFlipped;
727 pCtx->rax.u = uRaxIn;
728 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
729 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
730 else
731 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = pThis->uTestAddr.u + off;
732
733 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
734 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
735
736 if ( bXcpt != X86_XCPT_PF
737 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off >= X86_PAGE_SIZE * 2)
738 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off <= X86_PAGE_SIZE - pThis->cbAccess) )
739 {
740 pThis->ExpectCtx.rax.u = uExpectedRax;
741 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, pThis->pCmnMode->Xchg.offUd2, X86_XCPT_UD, 0 /*uErrCd*/);
742 if (*(uint64_t *)&pThis->pbOrgTest[off] != uExpectStored)
743 Bs3TestFailedF("%u - %s: Stored %#RX64, expected %#RX64",
744 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], uExpectStored);
745 }
746 else
747 {
748 pThis->ExpectCtx.rax.u = uRaxIn;
749 if (off < X86_PAGE_SIZE)
750 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + X86_PAGE_SIZE;
751 else
752 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + off;
753 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
754 pThis->ExpectCtx.cr2 = pCtx->cr2;
755 if (*(uint64_t *)&pThis->pbOrgTest[off] != s_uValueFlipped)
756 Bs3TestFailedF("%u - %s: #PF'ed store modified memory: %#RX64, expected %#RX64",
757 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], s_uValueFlipped);
758 }
759 }
760
761 if (fFlags & BS3CB2PFACC_F_DIRECT)
762 break;
763 }
764
765 pCtx->rax.u = uRaxSaved;
766}
767
768
769/**
770 * Test a cmpxchg instruction around the edges of page two.
771 *
772 * @param pThis The test stat data.
773 * @param pCtx The test context.
774 * @param fFlags BS3CB2PFACC_F_XXX.
775 * @param bXcpt X86_XCPT_PF if this can cause \#PFs, otherwise
776 * X86_XCPT_UD.
777 * @param uPfErrCd The error code for \#PFs.
778 * @param fMissmatch Whether to fail and not store (@c true), or succeed
779 * and do the store.
780 */
781static void bs3CpuBasic2Pf_DoCmpXchg(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags,
782 uint8_t bXcpt, uint8_t uPfErrCd, bool fMissmatch)
783{
784 static uint64_t const s_uValue = UINT64_C(0xea58699648e2f32c);
785 static uint64_t const s_uValueFlipped = UINT64_C(0x15a79669b71d0cd3);
786 static uint64_t const s_uValueOther = UINT64_C(0x2171239bcb044c81);
787 uint64_t const uRaxSaved = pCtx->rax.u;
788 uint64_t const uRcxSaved = pCtx->rcx.u;
789 uint64_t uRaxIn;
790 uint64_t uExpectedRax;
791 uint32_t uExpectedFlags;
792 uint64_t uExpectStored;
793 unsigned i;
794
795 /*
796 * Adjust the incoming context and calculate our expections.
797 * Hint: CMPXCHG [xBX],xCX ; xAX compare and update implicit, ZF set to !fMissmatch.
798 */
799 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->CmpXchg);
800 if ((pThis->bMode & BS3_MODE_CODE_MASK) != BS3_MODE_CODE_64)
801 {
802 uRaxIn = (uint32_t)(fMissmatch ? s_uValueOther : s_uValueFlipped); /* leave the upper part zero */
803 pCtx->rcx.u = (uint32_t)s_uValue; /* ditto */
804 }
805 else
806 {
807 uRaxIn = fMissmatch ? s_uValueOther : s_uValueFlipped;
808 pCtx->rcx.u = s_uValue;
809 }
810 if (fMissmatch)
811 pCtx->rflags.u32 |= X86_EFL_ZF;
812 else
813 pCtx->rflags.u32 &= ~X86_EFL_ZF;
814
815 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
816 uExpectedFlags = pCtx->rflags.u32 & ~(X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_SF | X86_EFL_OF | X86_EFL_ZF);
817 switch (pThis->bMode & BS3_MODE_CODE_MASK)
818 {
819 case BS3_MODE_CODE_16:
820 case BS3_MODE_CODE_V86:
821 uExpectedRax = (uint16_t)s_uValueFlipped | (uRaxIn & UINT64_C(0xffffffffffff0000));
822 uExpectStored = (uint16_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffffffff0000));
823 uExpectedFlags |= !fMissmatch ? X86_EFL_ZF | X86_EFL_PF : X86_EFL_AF;
824 break;
825 case BS3_MODE_CODE_32:
826 uExpectedRax = (uint32_t)s_uValueFlipped | (uRaxIn & UINT64_C(0xffffffff00000000));
827 uExpectStored = (uint32_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffff00000000));
828 uExpectedFlags |= !fMissmatch ? X86_EFL_ZF | X86_EFL_PF : X86_EFL_AF;
829 break;
830 case BS3_MODE_CODE_64:
831 uExpectedRax = s_uValueFlipped;
832 uExpectStored = s_uValue;
833 uExpectedFlags |= !fMissmatch ? X86_EFL_ZF | X86_EFL_PF : X86_EFL_AF;
834 break;
835 }
836 if (fMissmatch)
837 uExpectStored = s_uValueFlipped;
838
839 /*
840 * Make two approaches to the test page (the 2nd one):
841 * - i=0: Start on the 1st page and edge into the 2nd.
842 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
843 */
844 for (i = 0; i < 2; i++)
845 {
846 unsigned off = fFlags & BS3CB2PFACC_F_DIRECT ? X86_PAGE_SIZE : X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
847 unsigned offEnd = fFlags & BS3CB2PFACC_F_DIRECT ? off + 1 : X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
848 for (; off < offEnd; off++)
849 {
850 *(uint64_t *)&pThis->pbOrgTest[off] = s_uValueFlipped;
851 pCtx->rax.u = uRaxIn;
852 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
853 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
854 else
855 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = pThis->uTestAddr.u + off;
856
857 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
858 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
859
860 if ( bXcpt != X86_XCPT_PF
861 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off >= X86_PAGE_SIZE * 2)
862 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off <= X86_PAGE_SIZE - pThis->cbAccess) )
863 {
864 pThis->ExpectCtx.rax.u = uExpectedRax;
865 pThis->ExpectCtx.rflags.u32 = uExpectedFlags;
866 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, pThis->pCmnMode->CmpXchg.offUd2, X86_XCPT_UD, 0 /*uErrCd*/);
867 if (*(uint64_t *)&pThis->pbOrgTest[off] != uExpectStored)
868 Bs3TestFailedF("%u - %s: Stored %#RX64, expected %#RX64",
869 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], uExpectStored);
870 }
871 else
872 {
873 pThis->ExpectCtx.rax.u = uRaxIn;
874 pThis->ExpectCtx.rflags = pCtx->rflags;
875 if (off < X86_PAGE_SIZE)
876 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + X86_PAGE_SIZE;
877 else
878 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + off;
879 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
880 pThis->ExpectCtx.cr2 = pCtx->cr2;
881 if (*(uint64_t *)&pThis->pbOrgTest[off] != s_uValueFlipped)
882 Bs3TestFailedF("%u - %s: #PF'ed store modified memory: %#RX64, expected %#RX64",
883 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], s_uValueFlipped);
884 }
885 }
886
887 if (fFlags & BS3CB2PFACC_F_DIRECT)
888 break;
889 }
890
891 pCtx->rax.u = uRaxSaved;
892 pCtx->rcx.u = uRcxSaved;
893}
894
895
896static void bs3CpuBasic2Pf_DoCmpXchgMiss(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags,
897 uint8_t bXcpt, uint8_t uPfErrCd)
898{
899 bs3CpuBasic2Pf_DoCmpXchg(pThis, pCtx, fFlags, bXcpt, uPfErrCd, true /*fMissmatch*/ );
900}
901
902
903static void bs3CpuBasic2Pf_DoCmpXchgMatch(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags,
904 uint8_t bXcpt, uint8_t uPfErrCd)
905{
906 bs3CpuBasic2Pf_DoCmpXchg(pThis, pCtx, fFlags, bXcpt, uPfErrCd , false /*fMissmatch*/ );
907}
908
909
910/**
911 * @interface_method_impl{BS3CPUBASIC2PFACCESSOR,pfnAccessor,
912 * DIV [MEM=0] for checking the accessed bit}
913 */
914static void bs3CpuBasic2Pf_DoDivByZero(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags,
915 uint8_t bXcpt, uint8_t uPfErrCd)
916{
917 static uint64_t const s_uFiller = UINT64_C(0x9856703711f4069e);
918 uint64_t uZeroAndFill;
919 unsigned i;
920
921 /*
922 * Adjust the incoming context and calculate our expections.
923 */
924 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->DivMem);
925
926 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
927 switch (pThis->bMode & BS3_MODE_CODE_MASK)
928 {
929 case BS3_MODE_CODE_16:
930 case BS3_MODE_CODE_V86:
931 uZeroAndFill = s_uFiller & UINT64_C(0xffffffffffff0000);
932 break;
933 case BS3_MODE_CODE_32:
934 uZeroAndFill = s_uFiller & UINT64_C(0xffffffff00000000);
935 break;
936 case BS3_MODE_CODE_64:
937 uZeroAndFill = 0;
938 break;
939 }
940
941 /*
942 * Make two approaches to the test page (the 2nd one):
943 * - i=0: Start on the 1st page and edge into the 2nd.
944 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
945 */
946 for (i = 0; i < 2; i++)
947 {
948 unsigned off = fFlags & BS3CB2PFACC_F_DIRECT ? X86_PAGE_SIZE : X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
949 unsigned offEnd = fFlags & BS3CB2PFACC_F_DIRECT ? off + 1 : X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
950 for (; off < offEnd; off++)
951 {
952 *(uint64_t *)&pThis->pbOrgTest[off] = uZeroAndFill;
953 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
954 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
955 else
956 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = pThis->uTestAddr.u + off;
957
958 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
959 //if (pThis->bMode == BS3_MODE_PP16_32) Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
960
961 if ( bXcpt != X86_XCPT_PF
962 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off >= X86_PAGE_SIZE * 2)
963 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off <= X86_PAGE_SIZE - pThis->cbAccess) )
964 {
965 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, X86_XCPT_DE, 0 /*uErrCd*/);
966 if (*(uint64_t *)&pThis->pbOrgTest[off] != uZeroAndFill)
967 Bs3TestFailedF("%u - %s: Modified source op: %#RX64, expected %#RX64",
968 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], uZeroAndFill);
969 }
970 else
971 {
972 if (off < X86_PAGE_SIZE)
973 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + X86_PAGE_SIZE;
974 else
975 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + off;
976 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
977 pThis->ExpectCtx.cr2 = pCtx->cr2;
978 if (*(uint64_t *)&pThis->pbOrgTest[off] != uZeroAndFill)
979 Bs3TestFailedF("%u - %s: Modified source op: %#RX64, expected %#RX64",
980 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], uZeroAndFill);
981 }
982 }
983
984 if (fFlags & BS3CB2PFACC_F_DIRECT)
985 break;
986 }
987}
988
989
990static BS3CPUBASIC2PFACCESSOR const g_aAccessors[] =
991{
992 { "DoExec", bs3CpuBasic2Pf_DoExec, X86_TRAP_PF_ID, X86_XCPT_UD },
993 { "DoMovLoad", bs3CpuBasic2Pf_DoMovLoad, 0, X86_XCPT_UD },
994 { "DoMovStore", bs3CpuBasic2Pf_DoMovStore, X86_TRAP_PF_RW, X86_XCPT_UD },
995 { "DoXchg", bs3CpuBasic2Pf_DoXchg, X86_TRAP_PF_RW, X86_XCPT_UD },
996 { "DoCmpXchgMiss", bs3CpuBasic2Pf_DoCmpXchgMiss, X86_TRAP_PF_RW, X86_XCPT_UD },
997 { "DoCmpXhcgMatch", bs3CpuBasic2Pf_DoCmpXchgMatch, X86_TRAP_PF_RW, X86_XCPT_UD },
998 { "DoDivByZero", bs3CpuBasic2Pf_DoDivByZero, 0, X86_XCPT_DE },
999};
1000
1001/** @} */
1002
1003
1004/** @name BS3CPUBASIC2PFMODPT::pfnModify implementations.
1005 * @{ */
1006
1007
1008static void bs3CpuBasic2Pf_ClearMask(PBS3CPUBASIC2PFSTATE pThis, unsigned iStore, PCBS3CPUBASIC2PFMODPT pEntry,
1009 uint32_t fClearMask, uint32_t fSetMask)
1010{
1011 if (pThis->PgInfo.cbEntry == 4)
1012 {
1013 uint32_t const uOrg = pThis->PteBackup.Legacy[1];
1014 uint32_t uNew = ((uOrg & ~fClearMask) | fSetMask) & ~(uint32_t)pEntry->uModifyArg;
1015 uint32_t const uOld = pThis->PgInfo.u.Legacy.pPte[1].u;
1016 g_aStoreMethods[iStore].pfnStore(pThis->PgInfo.u.Legacy.pPte + 1, uNew, uOld);
1017 }
1018 else
1019 {
1020 uint64_t const uOrg = pThis->PteBackup.Pae[1];
1021 uint64_t uNew = ((uOrg & ~(uint64_t)fClearMask) | fSetMask) & ~(uint64_t)pEntry->uModifyArg;
1022 uint64_t const uOld = pThis->PgInfo.u.Pae.pPte[1].u;
1023
1024 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[0], (uint32_t)uNew, (uint32_t)uOld);
1025 if ((uint32_t)(uNew >> 32) != (uint32_t)(uOld >> 32))
1026 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[1],
1027 (uint32_t)(uNew >> 32), (uint32_t)(uOld >> 32));
1028 }
1029}
1030
1031static void bs3CpuBasic2Pf_SetBit(PBS3CPUBASIC2PFSTATE pThis, unsigned iStore, PCBS3CPUBASIC2PFMODPT pEntry,
1032 uint32_t fClearMask, uint32_t fSetMask)
1033{
1034 if (pThis->PgInfo.cbEntry == 4)
1035 {
1036 uint32_t const uOrg = pThis->PteBackup.Legacy[1];
1037 uint32_t uNew = (uOrg & ~fClearMask) | fSetMask | RT_BIT_32(pEntry->uModifyArg);
1038 uint32_t const uOld = pThis->PgInfo.u.Legacy.pPte[1].u;
1039 g_aStoreMethods[iStore].pfnStore(pThis->PgInfo.u.Legacy.pPte + 1, uNew, uOld);
1040 }
1041 else
1042 {
1043 uint64_t const uOrg = pThis->PteBackup.Pae[1];
1044 uint64_t uNew = ((uOrg & ~(uint64_t)fClearMask) | fSetMask) | RT_BIT_64(pEntry->uModifyArg);
1045 uint64_t const uOld = pThis->PgInfo.u.Pae.pPte[1].u;
1046
1047 if (pEntry->uModifyArg < 32 || (uint32_t)uNew != (uint32_t)uOld)
1048 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[0], (uint32_t)uNew, (uint32_t)uOld);
1049 if (pEntry->uModifyArg >= 32 || (uint32_t)(uNew >> 32) != (uint32_t)(uOld >> 32))
1050 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[1],
1051 (uint32_t)(uNew >> 32), (uint32_t)(uOld >> 32));
1052 }
1053}
1054
1055static void bs3CpuBasic2Pf_NoChange(PBS3CPUBASIC2PFSTATE pThis, unsigned iStore, PCBS3CPUBASIC2PFMODPT pEntry,
1056 uint32_t fClearMask, uint32_t fSetMask)
1057{
1058 if (pThis->PgInfo.cbEntry == 4)
1059 {
1060 uint32_t const uOrg = pThis->PteBackup.Legacy[1];
1061 uint32_t uNew = (uOrg & ~fClearMask) | fSetMask;
1062 uint32_t const uOld = pThis->PgInfo.u.Legacy.pPte[1].u;
1063 if (uNew != uOld)
1064 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Legacy.pPte[1], uNew, uOld);
1065 }
1066 else
1067 {
1068 uint64_t const uOrg = pThis->PteBackup.Pae[1];
1069 uint64_t uNew = (uOrg & ~(uint64_t)fClearMask) | fSetMask;
1070 uint64_t const uOld = pThis->PgInfo.u.Pae.pPte[1].u;
1071 if (uNew != uOld)
1072 {
1073 if ((uint32_t)uNew != (uint32_t)uOld)
1074 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[0], (uint32_t)uNew, (uint32_t)uOld);
1075 if ((uint32_t)(uNew >> 32) != (uint32_t)(uOld >> 32))
1076 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[1],
1077 (uint32_t)(uNew >> 32), (uint32_t)(uOld >> 32));
1078 }
1079 }
1080}
1081
1082/** @} */
1083
1084
1085/** @name BS3CPUBASIC2PFMODPT::pfnApplicable implementations.
1086 * @{ */
1087
1088static bool bs3CpuBasic2Pf_IsPteBitReserved(PBS3CPUBASIC2PFSTATE pThis, PCBS3CPUBASIC2PFMODPT pEntry)
1089{
1090 if (pThis->PgInfo.cbEntry == 8)
1091 {
1092 /* Bits 52..63 or 62 (NXE=1). */
1093 if (pThis->PgInfo.cEntries == 3)
1094 {
1095 if ((uint32_t)(pEntry->uModifyArg - 52U) < (uint32_t)(12 - pThis->fNxe))
1096 return true;
1097 }
1098 else if (pEntry->uModifyArg == 63 && !pThis->fNxe)
1099 return true;
1100
1101 /* Reserved physical address bits. */
1102 if (pEntry->uModifyArg < 52)
1103 {
1104 if ((uint32_t)pEntry->uModifyArg >= (uint32_t)pThis->cBitsPhysWidth)
1105 return true;
1106 }
1107 }
1108 return false;
1109}
1110
1111static bool bs3CpuBasic2Pf_IsPteBitSoftwareUsable(PBS3CPUBASIC2PFSTATE pThis, PCBS3CPUBASIC2PFMODPT pEntry)
1112{
1113 if (pThis->PgInfo.cbEntry == 8)
1114 {
1115 if (pThis->PgInfo.cEntries != 3)
1116 {
1117 if ((uint32_t)(pEntry->uModifyArg - 52U) < (uint32_t)11)
1118 return true;
1119 }
1120 }
1121 return false;
1122}
1123
1124
1125static bool bs3CpuBasic2Pf_IsNxe(PBS3CPUBASIC2PFSTATE pThis, PCBS3CPUBASIC2PFMODPT pEntry)
1126{
1127 return pThis->fNxe && pThis->PgInfo.cbEntry == 8;
1128}
1129
1130/** @} */
1131
1132
1133static const BS3CPUBASIC2PFMODPT g_aPteWorkers[] =
1134{
1135/* { pszName, P U W NX RSV ModiyfArg pfnModify, pfnApplicable }, */
1136 { "org", 1, 1, 1, 0, 0, 0, bs3CpuBasic2Pf_NoChange, NULL },
1137 { "!US", 1, 0, 1, 0, 0, X86_PTE_US, bs3CpuBasic2Pf_ClearMask, NULL },
1138 { "!RW", 1, 1, 0, 0, 0, X86_PTE_RW, bs3CpuBasic2Pf_ClearMask, NULL },
1139 { "!RW+!US", 1, 0, 0, 0, 0, X86_PTE_RW | X86_PTE_US, bs3CpuBasic2Pf_ClearMask, NULL },
1140 { "!P", 0, 0, 0, 0, 0, X86_PTE_P, bs3CpuBasic2Pf_ClearMask, NULL },
1141 { "NX", 1, 1, 1, 1, 0, 63, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsNxe },
1142 { "RSVPH[32]", 0, 0, 0, 0, 1, 32, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1143 { "RSVPH[33]", 0, 0, 0, 0, 1, 33, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1144 { "RSVPH[34]", 0, 0, 0, 0, 1, 34, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1145 { "RSVPH[35]", 0, 0, 0, 0, 1, 35, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1146 { "RSVPH[36]", 0, 0, 0, 0, 1, 36, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1147 { "RSVPH[37]", 0, 0, 0, 0, 1, 37, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1148 { "RSVPH[38]", 0, 0, 0, 0, 1, 38, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1149 { "RSVPH[39]", 0, 0, 0, 0, 1, 39, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1150 { "RSVPH[40]", 0, 0, 0, 0, 1, 40, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1151 { "RSVPH[41]", 0, 0, 0, 0, 1, 41, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1152 { "RSVPH[42]", 0, 0, 0, 0, 1, 42, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1153 { "RSVPH[43]", 0, 0, 0, 0, 1, 43, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1154 { "RSVPH[44]", 0, 0, 0, 0, 1, 44, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1155 { "RSVPH[45]", 0, 0, 0, 0, 1, 45, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1156 { "RSVPH[46]", 0, 0, 0, 0, 1, 46, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1157 { "RSVPH[47]", 0, 0, 0, 0, 1, 47, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1158 { "RSVPH[48]", 0, 0, 0, 0, 1, 48, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1159 { "RSVPH[49]", 0, 0, 0, 0, 1, 49, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1160 { "RSVPH[50]", 0, 0, 0, 0, 1, 50, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1161 { "RSVPH[51]", 0, 0, 0, 0, 1, 51, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1162 { "RSV[52]", 0, 0, 0, 0, 1, 52, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1163 { "RSV[53]", 0, 0, 0, 0, 1, 53, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1164 { "RSV[54]", 0, 0, 0, 0, 1, 54, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1165 { "RSV[55]", 0, 0, 0, 0, 1, 55, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1166 { "RSV[56]", 0, 0, 0, 0, 1, 56, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1167 { "RSV[57]", 0, 0, 0, 0, 1, 57, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1168 { "RSV[58]", 0, 0, 0, 0, 1, 58, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1169 { "RSV[59]", 0, 0, 0, 0, 1, 59, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1170 { "RSV[60]", 0, 0, 0, 0, 1, 60, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1171 { "RSV[61]", 0, 0, 0, 0, 1, 61, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1172 { "RSV[62]", 0, 0, 0, 0, 1, 62, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1173 { "RSV[62]", 0, 0, 0, 0, 1, 62, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1174 { "RSV[63]", 0, 0, 0, 0, 1, 63, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1175 { "!RSV[52]", 1, 1, 1, 0, 0, 52, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1176 { "!RSV[53]", 1, 1, 1, 0, 0, 53, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1177 { "!RSV[54]", 1, 1, 1, 0, 0, 54, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1178 { "!RSV[55]", 1, 1, 1, 0, 0, 55, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1179 { "!RSV[56]", 1, 1, 1, 0, 0, 56, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1180 { "!RSV[57]", 1, 1, 1, 0, 0, 57, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1181 { "!RSV[58]", 1, 1, 1, 0, 0, 58, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1182 { "!RSV[59]", 1, 1, 1, 0, 0, 59, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1183 { "!RSV[60]", 1, 1, 1, 0, 0, 60, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1184 { "!RSV[61]", 1, 1, 1, 0, 0, 61, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1185 { "!RSV[62]", 1, 1, 1, 0, 0, 62, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1186
1187};
1188
1189
1190/**
1191 * Worker for bs3CpuBasic2_RaiseXcpt0e_c32 that does the actual testing.
1192 *
1193 * Caller does all the cleaning up.
1194 *
1195 * @returns Error count.
1196 * @param pThis Test state data.
1197 * @param fNxe Whether NX is enabled.
1198 */
1199static uint8_t bs3CpuBasic2_RaiseXcpt0eWorker(PBS3CPUBASIC2PFSTATE register pThis, bool const fWp, bool const fNxe)
1200{
1201 unsigned iLevel;
1202 unsigned iRing;
1203 unsigned iStore;
1204 unsigned iAccessor;
1205 unsigned iOuter;
1206 unsigned cPml4Tests;
1207 unsigned cPdPtrTests;
1208 uint32_t const fPfIdMask = fNxe ? UINT32_MAX : ~X86_TRAP_PF_ID;
1209 BS3REGCTX aCtxts[4];
1210
1211 pThis->fWp = fWp;
1212 pThis->fNxe = fNxe;
1213
1214 /** @todo figure out V8086 testing. */
1215 if ((pThis->bMode & BS3_MODE_CODE_MASK) == BS3_MODE_CODE_V86)
1216 return BS3TESTDOMODE_SKIPPED;
1217
1218
1219 /* paranoia: Touch the various big stack structures to ensure the compiler has allocated stack for them. */
1220 for (iRing = 0; iRing < RT_ELEMENTS(aCtxts); iRing++)
1221 Bs3MemZero(&aCtxts[iRing], sizeof(aCtxts[iRing]));
1222
1223 /*
1224 * Set up a few contexts for testing this stuff.
1225 */
1226 Bs3RegCtxSaveEx(&aCtxts[0], pThis->bMode, 2048);
1227 for (iRing = 1; iRing < 4; iRing++)
1228 {
1229 aCtxts[iRing] = aCtxts[0];
1230 Bs3RegCtxConvertToRingX(&aCtxts[iRing], iRing);
1231 }
1232
1233 if (!BS3_MODE_IS_16BIT_CODE(pThis->bMode))
1234 {
1235 for (iRing = 0; iRing < 4; iRing++)
1236 aCtxts[iRing].rbx.u = pThis->uTestAddr.u;
1237 }
1238 else
1239 {
1240 for (iRing = 0; iRing < 4; iRing++)
1241 {
1242 aCtxts[iRing].ds = pThis->uSel16TestData;
1243 aCtxts[iRing].rbx.u = 0;
1244 }
1245 }
1246
1247 /*
1248 * Check basic operation:
1249 */
1250 for (iRing = 0; iRing < 4; iRing++)
1251 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1252 g_aAccessors[iAccessor].pfnAccessor(pThis, &aCtxts[iRing], BS3CB2PFACC_F_PAGE_LEVEL, X86_XCPT_UD, UINT8_MAX);
1253
1254 /*
1255 * Some PTE checks. We only mess with the 2nd page.
1256 */
1257 for (iOuter = 0; iOuter < 2; iOuter++)
1258 {
1259 uint32_t const fAccessor = (iOuter == 0 ? BS3CB2PFACC_F_DIRECT : 0) | BS3CB2PFACC_F_PAGE_LEVEL;
1260 unsigned iPteWrk;
1261
1262 bs3CpuBasic2Pf_FlushAll();
1263 for (iPteWrk = 0; iPteWrk < RT_ELEMENTS(g_aPteWorkers); iPteWrk++)
1264 {
1265 BS3CPUBASIC2PFMODPT EffWrk;
1266 const BS3CPUBASIC2PFMODPT *pPteWrk = &g_aPteWorkers[iPteWrk];
1267 if (pPteWrk->pfnApplicable && !pPteWrk->pfnApplicable(pThis, pPteWrk))
1268 continue;
1269
1270 pThis->pszPteWorker = pPteWrk->pszName;
1271
1272 EffWrk = *pPteWrk;
1273
1274#if 1
1275 /*
1276 * Do the modification once, then test all different accesses
1277 * without flushing the TLB or anything in-between.
1278 */
1279 for (iStore = 0; iStore < RT_ELEMENTS(g_aStoreMethods); iStore++)
1280 {
1281 pThis->pszStore = g_aStoreMethods[iStore].pszName;
1282 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, 0);
1283
1284 for (iRing = 0; iRing < 4; iRing++)
1285 {
1286 PBS3REGCTX const pCtx = &aCtxts[iRing];
1287 if ( EffWrk.fReserved
1288 || !EffWrk.fPresent
1289 || (!EffWrk.fUser && iRing == 3))
1290 {
1291 uint32_t const fPfBase = ( EffWrk.fReserved ? X86_TRAP_PF_P | X86_TRAP_PF_RSVD
1292 : EffWrk.fPresent ? X86_TRAP_PF_P : 0)
1293 | (iRing == 3 ? X86_TRAP_PF_US : 0);
1294 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1295 {
1296 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1297 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1298 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1299 }
1300 }
1301 else
1302 {
1303 uint32_t const fPfBase = X86_TRAP_PF_P | (iRing == 3 ? X86_TRAP_PF_US : 0);
1304 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1305 {
1306 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1307 if ( ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_ID)
1308 && EffWrk.fNoExecute)
1309 || ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1310 && !EffWrk.fWriteable
1311 && (fWp || iRing == 3)) )
1312 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1313 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1314 else
1315 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1316 }
1317 }
1318 }
1319
1320 /* Reset the paging + full flush. */
1321 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1322 }
1323#endif
1324
1325#define CHECK_AD_BITS(a_fExpectedAD) \
1326 do { \
1327 uint32_t fActualAD = ( pThis->PgInfo.cbEntry == 8 \
1328 ? pThis->PgInfo.u.Pae.pPte[1].au32[0] : pThis->PgInfo.u.Legacy.pPte[1].au32[0]) \
1329 & (X86_PTE_A | X86_PTE_D); \
1330 if (fActualAD != (a_fExpectedAD)) \
1331 { \
1332 Bs3TestFailedF("%u - %s/%u: unexpected A/D bits: %#x, expected %#x\n", \
1333 g_usBs3TestStep, "xxxx", __LINE__, fActualAD, a_fExpectedAD); \
1334 BS3CPUBASIC2PF_HALT(pThis); \
1335 } \
1336 } while (0)
1337
1338 /*
1339 * Again, but redoing everything for each accessor.
1340 */
1341 for (iStore = 0; iStore < RT_ELEMENTS(g_aStoreMethods); iStore++)
1342 {
1343 pThis->pszStore = g_aStoreMethods[iStore].pszName;
1344
1345 for (iRing = 0; iRing < 4; iRing++)
1346 {
1347 PBS3REGCTX const pCtx = &aCtxts[iRing];
1348
1349 if ( EffWrk.fReserved
1350 || !EffWrk.fPresent
1351 || (!EffWrk.fUser && iRing == 3))
1352 {
1353 uint32_t const fPfBase = ( EffWrk.fReserved ? X86_TRAP_PF_P | X86_TRAP_PF_RSVD
1354 : EffWrk.fPresent ? X86_TRAP_PF_P : 0)
1355 | (iRing == 3 ? X86_TRAP_PF_US : 0);
1356 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1357 {
1358 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1359
1360 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, 0);
1361 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1362 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1363 CHECK_AD_BITS(0);
1364 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1365
1366 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1367 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1368 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1369 CHECK_AD_BITS(0);
1370 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1371 }
1372 }
1373 else
1374 {
1375 uint32_t const fPfBase = X86_TRAP_PF_P | (iRing == 3 ? X86_TRAP_PF_US : 0);
1376 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1377 {
1378 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1379 if ( ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_ID)
1380 && EffWrk.fNoExecute)
1381 || ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1382 && !EffWrk.fWriteable
1383 && (fWp || iRing == 3)) )
1384 {
1385 uint32_t const fErrCd = fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask);
1386
1387 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1388 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1389 CHECK_AD_BITS(0);
1390 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1391
1392 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, X86_PTE_A | X86_PTE_D);
1393 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1394 CHECK_AD_BITS(X86_PTE_A | X86_PTE_D);
1395 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1396
1397 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A, X86_PTE_D);
1398 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1399 CHECK_AD_BITS(X86_PTE_D);
1400 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1401
1402 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_D, X86_PTE_A);
1403 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1404 CHECK_AD_BITS(X86_PTE_A);
1405 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1406 }
1407 else
1408 {
1409 uint32_t const fExpectedAD = (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1410 ? X86_PTE_A | X86_PTE_D : X86_PTE_A;
1411
1412 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1413 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1414 CHECK_AD_BITS(fExpectedAD);
1415 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1416
1417 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, X86_PTE_A | X86_PTE_D);
1418 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1419 CHECK_AD_BITS(X86_PTE_A | X86_PTE_D);
1420 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1421
1422 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A, X86_PTE_D);
1423 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1424 CHECK_AD_BITS(fExpectedAD | X86_PTE_D);
1425 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1426
1427 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_D, X86_PTE_A);
1428 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1429 CHECK_AD_BITS(fExpectedAD | X86_PTE_A);
1430 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1431 }
1432 }
1433 }
1434 }
1435 }
1436
1437 /*
1438 * Again, but using invalidate page.
1439 */
1440 if (pThis->fUseInvlPg)
1441 {
1442 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1443
1444 for (iStore = 0; iStore < RT_ELEMENTS(g_aStoreMethods); iStore++)
1445 {
1446 pThis->pszStore = g_aStoreMethods[iStore].pszName;
1447
1448 for (iRing = 0; iRing < 4; iRing++)
1449 {
1450 PBS3REGCTX const pCtx = &aCtxts[iRing];
1451
1452 if ( EffWrk.fReserved
1453 || !EffWrk.fPresent
1454 || (!EffWrk.fUser && iRing == 3))
1455 {
1456 uint32_t const fPfBase = ( EffWrk.fReserved ? X86_TRAP_PF_P | X86_TRAP_PF_RSVD
1457 : EffWrk.fPresent ? X86_TRAP_PF_P : 0)
1458 | (iRing == 3 ? X86_TRAP_PF_US : 0);
1459 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1460 {
1461 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1462
1463 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, 0);
1464 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1465 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1466 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1467 CHECK_AD_BITS(0);
1468
1469 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1470 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1471 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1472 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1473 CHECK_AD_BITS(0);
1474 }
1475 }
1476 else
1477 {
1478 uint32_t const fPfBase = X86_TRAP_PF_P | (iRing == 3 ? X86_TRAP_PF_US : 0);
1479 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1480 {
1481 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1482 if ( ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_ID)
1483 && EffWrk.fNoExecute)
1484 || ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1485 && !EffWrk.fWriteable
1486 && (fWp || iRing == 3)) )
1487 {
1488 uint32_t const fErrCd = fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask);
1489
1490 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1491 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1492 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1493 CHECK_AD_BITS(0);
1494
1495 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, X86_PTE_A | X86_PTE_D);
1496 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1497 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1498 CHECK_AD_BITS(X86_PTE_A | X86_PTE_D);
1499
1500 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A, X86_PTE_D);
1501 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1502 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1503 CHECK_AD_BITS(X86_PTE_D);
1504
1505 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_D, X86_PTE_A);
1506 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1507 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1508 CHECK_AD_BITS(X86_PTE_A);
1509 }
1510 else
1511 {
1512 uint32_t const fExpectedAD = (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1513 ? X86_PTE_A | X86_PTE_D : X86_PTE_A;
1514
1515 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1516 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1517 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1518 CHECK_AD_BITS(fExpectedAD);
1519
1520 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, X86_PTE_A | X86_PTE_D);
1521 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1522 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1523 CHECK_AD_BITS(X86_PTE_A | X86_PTE_D);
1524
1525 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A, X86_PTE_D);
1526 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1527 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1528 CHECK_AD_BITS(fExpectedAD | X86_PTE_D);
1529
1530 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_D, X86_PTE_A);
1531 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1532 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1533 CHECK_AD_BITS(fExpectedAD | X86_PTE_A);
1534 }
1535 }
1536 }
1537 }
1538 }
1539
1540 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1541 }
1542 }
1543 }
1544
1545
1546 /*
1547 * Do all 4 paging levels. We start out with full access to the page and
1548 * restrict it in various ways.
1549 *
1550 * (On the final level we only mess with the 2nd page for now.)
1551 */
1552 cPdPtrTests = 1;
1553 cPml4Tests = 1;
1554 if (pThis->uTestAddr.u >= UINT64_C(0x8000000000))
1555 {
1556 cPml4Tests = 2;
1557 cPdPtrTests = 2;
1558 }
1559 else if (pThis->PgInfo.cEntries == 3)
1560 cPdPtrTests = 2;
1561
1562#if 0
1563 /* Loop 1: Accessor flags. */
1564 for (iOuter = 0; iOuter < 2; iOuter++)
1565 {
1566 uint32_t const fAccessor = (iOuter == 0 ? BS3CB2PFACC_F_DIRECT : 0) | BS3CB2PFACC_F_PAGE_LEVEL;
1567
1568 /* Loop 2: Paging store method. */
1569 for (iStore = 0; iStore < RT_ELEMENTS(g_aStoreMethods); iStore++)
1570 {
1571 unsigned iPml4Test;
1572 int8_t cReserved = 0;
1573 int8_t cNotPresent = 0;
1574 int8_t cNotWrite = 0;
1575 int8_t cNotUser = 0;
1576 int8_t cExecute = 0;
1577
1578 /* Loop 3: Page map level 4 */
1579 for (iPml4Test = 0; iPml4Test < cPml4Tests; iPml4Test++)
1580 {
1581 unsigned iPdPtrTest;
1582
1583 /* Loop 4: Page directory pointer table. */
1584 for (iPdPtrTest = 0; iPdPtrTest < cPdPtrTests; iPdPtrTest++)
1585 {
1586 unsigned iPdTest;
1587
1588 /* Loop 5: Page directory. */
1589 for (iPdTest = 0; iPdTest < 2; iPdTest++)
1590 {
1591 unsigned iPtTest;
1592
1593 /* Loop 6: Page table. */
1594 for (iPtTest = 0; iPtTest < 2; iPtTest++)
1595 {
1596 /* Loop 7: Accessor ring. */
1597 for (iRing = 0; iRing < 4; iRing++)
1598 {
1599 PBS3REGCTX const pCtx = &aCtxts[iRing];
1600
1601 if ( EffWrk.fReserved
1602 || !EffWrk.fPresent
1603 || (!EffWrk.fUser && iRing == 3))
1604 {
1605 uint32_t const fPfBase = ( EffWrk.fReserved ? X86_TRAP_PF_P | X86_TRAP_PF_RSVD
1606 : EffWrk.fPresent ? X86_TRAP_PF_P : 0)
1607 | (iRing == 3 ? X86_TRAP_PF_US : 0);
1608 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1609 {
1610 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1611
1612 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, 0);
1613 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1614 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1615 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1616 CHECK_AD_BITS(0);
1617
1618 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1619 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1620 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1621 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1622 CHECK_AD_BITS(0);
1623 }
1624 }
1625 else
1626 {
1627 uint32_t const fPfBase = X86_TRAP_PF_P | (iRing == 3 ? X86_TRAP_PF_US : 0);
1628 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1629 {
1630 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1631 if ( ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_ID)
1632 && EffWrk.fNoExecute)
1633 || ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1634 && !EffWrk.fWriteable
1635 && (fWp || iRing == 3)) )
1636 {
1637 uint32_t const fErrCd = fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask);
1638
1639 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1640 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1641 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1642 CHECK_AD_BITS(0);
1643
1644 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, X86_PTE_A | X86_PTE_D);
1645 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1646 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1647 CHECK_AD_BITS(X86_PTE_A | X86_PTE_D);
1648
1649 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A, X86_PTE_D);
1650 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1651 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1652 CHECK_AD_BITS(X86_PTE_D);
1653
1654 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_D, X86_PTE_A);
1655 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1656 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1657 CHECK_AD_BITS(X86_PTE_A);
1658 }
1659 else
1660 {
1661 uint32_t const fExpectedAD = (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1662 ? X86_PTE_A | X86_PTE_D : X86_PTE_A;
1663
1664 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1665 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1666 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1667 CHECK_AD_BITS(fExpectedAD);
1668
1669 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, X86_PTE_A | X86_PTE_D);
1670 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1671 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1672 CHECK_AD_BITS(X86_PTE_A | X86_PTE_D);
1673
1674 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A, X86_PTE_D);
1675 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1676 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1677 CHECK_AD_BITS(fExpectedAD | X86_PTE_D);
1678
1679 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_D, X86_PTE_A);
1680 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1681 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1682 CHECK_AD_BITS(fExpectedAD | X86_PTE_A);
1683 }
1684 }
1685 }
1686 }
1687
1688 }
1689 }
1690 }
1691 }
1692
1693 }
1694 }
1695#endif
1696
1697 /*
1698 * Check reserved bits on each paging level.
1699 */
1700
1701 /* Loop 1: Accessor flags (only direct for now). */
1702 for (iOuter = 0; iOuter < 1; iOuter++)
1703 {
1704 uint32_t const fAccessor = BS3CB2PFACC_F_DIRECT;
1705
1706 /* Loop 2: Paging store method. */
1707 for (iStore = 0; iStore < RT_ELEMENTS(g_aStoreMethods); iStore++)
1708 {
1709 /* Loop 3: Accessor ring. */
1710 for (iRing = 0; iRing < 4; iRing++)
1711 {
1712 /* Loop 4: Which level we mess up. */
1713 for (iLevel = 0; iLevel < pThis->PgInfo.cEntries; iLevel++)
1714 {
1715#if 0
1716 const BS3CPUBASIC2PFMODPT *pPteWrk = &g_aPteWorkers[iPteWrk];
1717 if (pThis->PgInfo.)
1718 {
1719 }
1720#endif
1721
1722
1723 }
1724 }
1725 }
1726 }
1727
1728
1729
1730 return 0;
1731}
1732
1733
1734BS3_DECL_CALLBACK(uint8_t) bs3CpuBasic2_RaiseXcpt0e_c32(uint8_t bMode)
1735{
1736 void *pvTestUnaligned;
1737 uint32_t cbTestUnaligned = _8M;
1738 uint8_t bRet = 1;
1739 int rc;
1740 BS3CPUBASIC2PFSTATE State;
1741
1742 /*
1743 * Initalize the state data.
1744 */
1745 Bs3MemZero(&State, sizeof(State));
1746 State.bMode = bMode;
1747 switch (bMode & BS3_MODE_CODE_MASK)
1748 {
1749 case BS3_MODE_CODE_16: State.cbAccess = sizeof(uint16_t); break;
1750 case BS3_MODE_CODE_V86: State.cbAccess = sizeof(uint16_t); break;
1751 case BS3_MODE_CODE_32: State.cbAccess = sizeof(uint32_t); break;
1752 case BS3_MODE_CODE_64: State.cbAccess = sizeof(uint64_t); break;
1753 }
1754 State.pCmnMode = &g_aCmnModes[0];
1755 while (State.pCmnMode->bMode != (bMode & BS3_MODE_CODE_MASK))
1756 State.pCmnMode++;
1757 State.fUseInvlPg = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486;
1758
1759 /* Figure physical addressing width. */
1760 State.cBitsPhysWidth = 32;
1761 if ( (g_uBs3CpuDetected & BS3CPU_F_CPUID)
1762 && (ASMCpuId_EDX(1) & (X86_CPUID_FEATURE_EDX_PSE36 | X86_CPUID_FEATURE_EDX_PAE)) )
1763 State.cBitsPhysWidth = 36;
1764
1765 if ( (g_uBs3CpuDetected & BS3CPU_F_CPUID_EXT_LEAVES)
1766 && ASMCpuId_EAX(0x80000000) >= 0x80000008)
1767 {
1768 uint8_t cBits = (uint8_t)ASMCpuId_EAX(0x80000008);
1769 if (cBits >= 32 && cBits <= 52)
1770 State.cBitsPhysWidth = cBits;
1771 else
1772 Bs3TestPrintf("CPUID 0x80000008: Physical bitcount out of range: %u\n", cBits);
1773 }
1774 //Bs3TestPrintf("Physical bitcount: %u\n", State.cBitsPhysWidth);
1775
1776 /*
1777 * Allocate a some memory we can play around with, then carve a size aligned
1778 * chunk out of it so we might be able to maybe play with 2/4MB pages too.
1779 */
1780 cbTestUnaligned = _8M * 2;
1781 while ((pvTestUnaligned = Bs3MemAlloc(BS3MEMKIND_FLAT32, cbTestUnaligned)) == NULL)
1782 {
1783 cbTestUnaligned >>= 1;
1784 if (cbTestUnaligned <= _16K)
1785 {
1786 Bs3TestFailed("Failed to allocate memory to play around with\n");
1787 return 1;
1788 }
1789 }
1790
1791 /* align. */
1792 if ((uintptr_t)pvTestUnaligned & (cbTestUnaligned - 1))
1793 {
1794 State.cbTest = cbTestUnaligned >> 1;
1795 State.pbOrgTest = (uint8_t *)(((uintptr_t)pvTestUnaligned + State.cbTest - 1) & ~(State.cbTest - 1));
1796 }
1797 else
1798 {
1799 State.pbOrgTest = pvTestUnaligned;
1800 State.cbTest = cbTestUnaligned;
1801 }
1802 State.cTestPages = State.cbTest >> X86_PAGE_SHIFT;
1803
1804 /*
1805 * Alias this memory far away from where our code and data lives.
1806 */
1807 if (bMode & BS3_MODE_CODE_64)
1808 State.uTestAddr.u = UINT64_C(0x0000648680000000);
1809 else
1810 State.uTestAddr.u = UINT32_C(0x80000000);
1811 rc = Bs3PagingAlias(State.uTestAddr.u, (uintptr_t)State.pbOrgTest, State.cbTest, X86_PTE_P | X86_PTE_RW | X86_PTE_US);
1812 if (RT_SUCCESS(rc))
1813 {
1814 rc = Bs3PagingQueryAddressInfo(State.uTestAddr.u, &State.PgInfo);
1815 if (RT_SUCCESS(rc))
1816 {
1817//if (bMode & BS3_MODE_CODE_64) ASMHalt();
1818 /* Set values that derives from the test memory size and paging info. */
1819 if (State.PgInfo.cEntries == 2)
1820 {
1821 State.cTestPdes = (State.cTestPages + X86_PG_ENTRIES - 1) / X86_PG_ENTRIES;
1822 State.cTest1stPtes = RT_MIN(State.cTestPages, X86_PG_ENTRIES);
1823 State.cbPdeBackup = State.cTestPdes * (X86_PAGE_SIZE / X86_PG_ENTRIES);
1824 State.cbPteBackup = State.cTest1stPtes * (X86_PAGE_SIZE / X86_PG_ENTRIES);
1825 }
1826 else
1827 {
1828 State.cTestPdes = (State.cTestPages + X86_PG_PAE_ENTRIES - 1) / X86_PG_PAE_ENTRIES;
1829 State.cTest1stPtes = RT_MIN(State.cTestPages, X86_PG_PAE_ENTRIES);
1830 State.cbPdeBackup = State.cTestPdes * (X86_PAGE_SIZE / X86_PG_PAE_ENTRIES);
1831 State.cbPteBackup = State.cTest1stPtes * (X86_PAGE_SIZE / X86_PG_PAE_ENTRIES);
1832 }
1833#ifdef BS3CPUBASIC2PF_FASTER
1834 State.cbPteBackup = State.PgInfo.cbEntry * 4;
1835#endif
1836 if (State.cTestPdes <= RT_ELEMENTS(State.au64PdeBackup))
1837 {
1838 uint32_t cr0 = ASMGetCR0();
1839
1840 /* Back up the structures. */
1841 Bs3MemCpy(&State.PteBackup, State.PgInfo.u.Legacy.pPte, State.cbPteBackup);
1842 Bs3MemCpy(State.au64PdeBackup, State.PgInfo.u.Legacy.pPde, State.cbPdeBackup);
1843 if (State.PgInfo.cEntries > 2)
1844 State.u64PdpteBackup = State.PgInfo.u.Pae.pPdpe->u;
1845 if (State.PgInfo.cEntries > 3)
1846 State.u64Pml4eBackup = State.PgInfo.u.Pae.pPml4e->u;
1847
1848 /*
1849 * Setup a 16-bit selector for accessing the alias.
1850 */
1851 Bs3SelSetup16BitData(&Bs3GdteSpare00, State.uTestAddr.u32);
1852 State.uSel16TestData = BS3_SEL_SPARE_00 | 3;
1853
1854 /*
1855 * Do the testing.
1856 */
1857 ASMSetCR0(ASMGetCR0() & ~X86_CR0_WP);
1858 bRet = bs3CpuBasic2_RaiseXcpt0eWorker(&State, false /*fWp*/, false /*fNxe*/);
1859 if (bRet == 0 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1860 {
1861 ASMSetCR0(ASMGetCR0() | X86_CR0_WP);
1862 bRet = bs3CpuBasic2_RaiseXcpt0eWorker(&State, true /*fWp*/, false /*fNxe*/);
1863 }
1864
1865 /* Do again with NX enabled. */
1866 if (bRet == 0 && (g_uBs3CpuDetected & BS3CPU_F_NX))
1867 {
1868 ASMWrMsr(MSR_K6_EFER, ASMRdMsr(MSR_K6_EFER) | MSR_K6_EFER_NXE);
1869 ASMSetCR0(ASMGetCR0() & ~X86_CR0_WP);
1870 bRet = bs3CpuBasic2_RaiseXcpt0eWorker(&State, false /*fWp*/, State.PgInfo.cbEntry == 8 /*fNxe*/);
1871 ASMSetCR0(ASMGetCR0() | X86_CR0_WP);
1872 bRet = bs3CpuBasic2_RaiseXcpt0eWorker(&State, true /*fWp*/, State.PgInfo.cbEntry == 8 /*fNxe*/);
1873 ASMWrMsr(MSR_K6_EFER, ASMRdMsr(MSR_K6_EFER) & ~MSR_K6_EFER_NXE);
1874 }
1875 bs3CpuBasic2Pf_RestoreFromBackups(&State);
1876 ASMSetCR0((ASMGetCR0() & ~X86_CR0_WP) | (cr0 & X86_CR0_WP));
1877 }
1878 else
1879 Bs3TestFailedF("cTestPdes=%u!\n", State.cTestPdes);
1880 }
1881 else
1882 Bs3TestFailedF("Bs3PagingQueryAddressInfo failed: %d\n", rc);
1883 Bs3PagingUnalias(State.uTestAddr.u, State.cbTest);
1884 }
1885 else
1886 Bs3TestFailedF("Bs3PagingAlias failed! rc=%d\n", rc);
1887 Bs3MemFree(pvTestUnaligned, cbTestUnaligned);
1888 return bRet;
1889}
1890
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette