VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-pf.c32@ 65366

Last change on this file since 65366 was 65366, checked in by vboxsync, 8 years ago

bs3kit: More #PF testing.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 54.9 KB
Line 
1/* $Id: bs3-cpu-basic-2-pf.c32 65366 2017-01-18 19:34:34Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-basic-2, 32-bit C code.
4 */
5
6/*
7 * Copyright (C) 2007-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*********************************************************************************************************************************
29* Header Files *
30*********************************************************************************************************************************/
31#include <bs3kit.h>
32#include <iprt/asm-amd64-x86.h>
33
34
35/*********************************************************************************************************************************
36* Defined Constants And Macros *
37*********************************************************************************************************************************/
38#define CHECK_MEMBER(a_pszMode, a_szName, a_szFmt, a_Actual, a_Expected) \
39 do { \
40 if ((a_Actual) == (a_Expected)) { /* likely */ } \
41 else Bs3TestFailedF("%u - %s: " a_szName "=" a_szFmt " expected " a_szFmt, \
42 g_usBs3TestStep, (a_pszMode), (a_Actual), (a_Expected)); \
43 } while (0)
44
45
46/*********************************************************************************************************************************
47* Structures and Typedefs *
48*********************************************************************************************************************************/
49typedef void BS3_CALL FNBS3CPUBASIC2PFSNIPPET(void);
50
51typedef struct FNBS3CPUBASIC2PFTSTCODE
52{
53 FNBS3CPUBASIC2PFSNIPPET *pfn;
54 uint8_t offUd2;
55 uint8_t cbTmpl;
56} FNBS3CPUBASIC2PFTSTCODE;
57typedef FNBS3CPUBASIC2PFTSTCODE const *PCFNBS3CPUBASIC2PFTSTCODE;
58
59typedef struct BS3CPUBASIC2PFTTSTCMNMODE
60{
61 uint8_t bMode;
62 FNBS3CPUBASIC2PFTSTCODE MovLoad;
63 FNBS3CPUBASIC2PFTSTCODE MovStore;
64 FNBS3CPUBASIC2PFTSTCODE Xchg;
65 FNBS3CPUBASIC2PFTSTCODE CmpXchg;
66} BS3CPUBASIC2PFTTSTCMNMODE;
67typedef BS3CPUBASIC2PFTTSTCMNMODE const *PCBS3CPUBASIC2PFTTSTCMNMODE;
68
69
70typedef struct BS3CPUBASIC2PFSTATE
71{
72 /** The mode we're currently testing. */
73 uint8_t bMode;
74 /** The size of a natural access. */
75 uint8_t cbAccess;
76 /** The common mode functions. */
77 PCBS3CPUBASIC2PFTTSTCMNMODE pCmnMode;
78 /** Pointer to the test area (alias). */
79 uint8_t *pbTest;
80 /** Pointer to the orignal test area mapping. */
81 uint8_t *pbOrgTest;
82 /** The size of the test area (at least two pages). */
83 uint32_t cbTest;
84 /** cbTest expressed as a page count. */
85 uint16_t cTestPages;
86 /** The number of PTEs in the first PTE, i.e. what we can
87 * safely access via PgInfo.u.Pae.pPte/PgInfo.u.Legacy.pPte. */
88 uint16_t cTest1stPtes;
89 /** The number of PDEs for cTestPages. */
90 uint16_t cTestPdes;
91 /** 16-bit data selector for pbTest. */
92 uint16_t uSel16TestData;
93 /** 16-bit code selector for pbTest. */
94 uint16_t uSel16TestCode;
95 /** The size of the PDE backup. */
96 uint16_t cbPdeBackup;
97 /** The size of the PTE backup. */
98 uint16_t cbPteBackup;
99 /** Test paging information for pbTest. */
100 BS3PAGINGINFO4ADDR PgInfo;
101
102 /** Set if we can use the INVLPG instruction. */
103 bool fUseInvlPg;
104
105 /** Reflects CR0.WP. */
106 bool fWp;
107 /** Reflects EFER.NXE & CR4.PAE. */
108 bool fNxe;
109
110 /** Trap context frame. */
111 BS3TRAPFRAME TrapCtx;
112 /** Expected result context. */
113 BS3REGCTX ExpectCtx;
114
115 /** The PML4E backup. */
116 uint64_t u64Pml4eBackup;
117 /** The PDPTE backup. */
118 uint64_t u64PdpteBackup;
119 /** The PDE backup. */
120 uint64_t au64PdeBackup[16];
121 /** The PTE backup. */
122 uint64_t au64PteBackup[X86_PG_PAE_ENTRIES];
123
124} BS3CPUBASIC2PFSTATE;
125/** Pointer to state for the \#PF test. */
126typedef BS3CPUBASIC2PFSTATE *PBS3CPUBASIC2PFSTATE;
127
128
129/**
130 * Paging modification worker.
131 */
132typedef struct BS3CPUBASIC2PFMODPT
133{
134 const char *pszName;
135 uint32_t fPresent : 1;
136 uint32_t fUser : 1;
137 uint32_t fWriteable : 1;
138 uint32_t fNoExecute : 1;
139 uint32_t fReserved : 1;
140 uint32_t uModifyArg : 24;
141 void (*pfnModify)(PBS3CPUBASIC2PFSTATE pThis, unsigned iStore, struct BS3CPUBASIC2PFMODPT const *pEntry);
142 bool (*pfnApplicable)(PBS3CPUBASIC2PFSTATE pThis, struct BS3CPUBASIC2PFMODPT const *pEntry);
143} BS3CPUBASIC2PFMODPT;
144typedef BS3CPUBASIC2PFMODPT const *PCBS3CPUBASIC2PFMODPT;
145
146
147/**
148 * Memory accessor.
149 */
150typedef struct BS3CPUBASIC2PFACCESSOR
151{
152 /** Accessor name. */
153 const char *pszName;
154 /** The accessor. */
155 void (*pfnAccessor)(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint8_t bXcpt, uint8_t uPfErrCd, bool fPageLevel);
156 /** The X86_TRAP_PF_XXX access flags this access sets. */
157 uint32_t fAccess;
158
159} BS3CPUBASIC2PFACCESSOR;
160typedef const BS3CPUBASIC2PFACCESSOR *PCBS3CPUBASIC2PFACCESSOR;
161
162
163/*********************************************************************************************************************************
164* Internal Functions *
165*********************************************************************************************************************************/
166FNBS3TESTDOMODE bs3CpuBasic2_RaiseXcpt0e_c32;
167
168/* bs3-cpu-basic-2-asm.asm: */
169void BS3_CALL bs3CpuBasic2_Store_mov_c32(void *pvDst, uint32_t uValue, uint32_t uOld);
170void BS3_CALL bs3CpuBasic2_Store_xchg_c32(void *pvDst, uint32_t uValue, uint32_t uOld);
171void BS3_CALL bs3CpuBasic2_Store_cmpxchg_c32(void *pvDst, uint32_t uValue, uint32_t uOld);
172
173
174/* bs3-cpu-basic-2-template.mac: */
175FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c16;
176FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c16;
177FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16;
178FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16;
179
180FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c32;
181FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c32;
182FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32;
183FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32;
184
185FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c64;
186FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c64;
187FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64;
188FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64;
189
190
191/*********************************************************************************************************************************
192* Global Variables *
193*********************************************************************************************************************************/
194/** Page table access functions. */
195static const struct
196{
197 const char *pszStore;
198 void (BS3_CALL *pfnStore)(void *pvDst, uint32_t uValue, uint32_t uOld);
199} g_aStoreMethods[] =
200{
201 { "mov", bs3CpuBasic2_Store_mov_c32 },
202 { "xchg", bs3CpuBasic2_Store_xchg_c32 },
203 { "cmpxchg", bs3CpuBasic2_Store_cmpxchg_c32 },
204};
205
206
207static const BS3CPUBASIC2PFTTSTCMNMODE g_aCmnModes[] =
208{
209 {
210 BS3_MODE_CODE_16,
211 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, 2 },
212 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, 2 },
213 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, 2 },
214 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, 3 },
215 },
216 {
217 BS3_MODE_CODE_32,
218 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c32, 2 },
219 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c32, 2 },
220 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32, 2 },
221 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32, 3 },
222 },
223 {
224 BS3_MODE_CODE_64,
225 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c64, 2 + 1 },
226 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c64, 2 + 1 },
227 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64, 2 + 1 },
228 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64, 3 + 1 },
229 },
230 {
231 BS3_MODE_CODE_V86,
232 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, 2 },
233 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, 2 },
234 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, 2 },
235 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, 3 },
236 },
237};
238
239
240/**
241 * Compares a CPU trap.
242 */
243static void bs3CpuBasic2Pf_CompareCtx(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pExpectCtx, int cbPcAdjust,
244 uint8_t bXcpt, unsigned uErrCd)
245{
246 const char *pszHint = "xxxx";
247 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
248 uint32_t fExtraEfl;
249
250 CHECK_MEMBER(pszHint, "bXcpt", "%#04x", pThis->TrapCtx.bXcpt, bXcpt);
251 CHECK_MEMBER(pszHint, "uErrCd", "%#06RX16", (uint16_t)pThis->TrapCtx.uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
252
253 fExtraEfl = X86_EFL_RF;
254 if (BS3_MODE_IS_16BIT_SYS(g_bBs3CurrentMode))
255 fExtraEfl = 0;
256 else
257 fExtraEfl = X86_EFL_RF;
258 Bs3TestCheckRegCtxEx(&pThis->TrapCtx.Ctx, pExpectCtx, cbPcAdjust, 0 /*cbSpAdjust*/, fExtraEfl, pszHint, g_usBs3TestStep);
259 if (Bs3TestSubErrorCount() != cErrorsBefore)
260 {
261 Bs3TrapPrintFrame(&pThis->TrapCtx);
262#if 1
263 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
264 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
265 ASMHalt();
266#endif
267 }
268}
269
270
271/**
272 * Compares a CPU trap.
273 */
274static void bs3CpuBasic2Pf_CompareSimpleCtx(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pStartCtx, int offAddPC,
275 uint8_t bXcpt, unsigned uErrCd, uint64_t uCr2)
276{
277 const char *pszHint = "xxxx";
278 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
279 uint64_t const uSavedCr2 = pStartCtx->cr2.u;
280 uint32_t fExtraEfl;
281
282 CHECK_MEMBER(pszHint, "bXcpt", "%#04x", pThis->TrapCtx.bXcpt, bXcpt);
283 CHECK_MEMBER(pszHint, "uErrCd", "%#06RX16", (uint16_t)pThis->TrapCtx.uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
284
285 fExtraEfl = X86_EFL_RF;
286 if (BS3_MODE_IS_16BIT_SYS(g_bBs3CurrentMode))
287 fExtraEfl = 0;
288 else
289 fExtraEfl = X86_EFL_RF;
290 pStartCtx->cr2.u = uCr2;
291 Bs3TestCheckRegCtxEx(&pThis->TrapCtx.Ctx, pStartCtx, offAddPC, 0 /*cbSpAdjust*/, fExtraEfl, pszHint, g_usBs3TestStep);
292 pStartCtx->cr2.u = uSavedCr2;
293 if (Bs3TestSubErrorCount() != cErrorsBefore)
294 {
295 Bs3TrapPrintFrame(&pThis->TrapCtx);
296#if 1
297 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
298 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
299 ASMHalt();
300#endif
301 }
302}
303
304
305/**
306 * Checks the trap context for a simple \#PF trap.
307 */
308static void bs3CpuBasic2Pf_CompareSimplePf(PBS3CPUBASIC2PFSTATE pThis, PCBS3REGCTX pStartCtx, int offAddPC,
309 unsigned uErrCd, uint64_t uCr2)
310{
311 bs3CpuBasic2Pf_CompareSimpleCtx(pThis, (PBS3REGCTX)pStartCtx, offAddPC, X86_XCPT_PF, uErrCd, uCr2);
312}
313
314/**
315 * Checks the trap context for a simple \#UD trap.
316 */
317static void bs3CpuBasic2Pf_CompareSimpleUd(PBS3CPUBASIC2PFSTATE pThis, PCBS3REGCTX pStartCtx, int offAddPC)
318{
319 bs3CpuBasic2Pf_CompareSimpleCtx(pThis, (PBS3REGCTX)pStartCtx, offAddPC, X86_XCPT_UD, 0, pStartCtx->cr2.u);
320}
321
322
323/**
324 * Restores all the paging entries from backup and flushes everything.
325 */
326static void bs3CpuBasic2Pf_FlushAll(void)
327{
328 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
329 {
330 uint32_t uCr4 = ASMGetCR4();
331 if (uCr4 & (X86_CR4_PGE | X86_CR4_PCIDE))
332 {
333 ASMSetCR4(uCr4 & ~(X86_CR4_PGE | X86_CR4_PCIDE));
334 ASMSetCR4(uCr4);
335 return;
336 }
337 }
338
339 ASMReloadCR3();
340}
341
342
343/**
344 * Restores all the paging entries from backup and flushes everything.
345 *
346 * @param pThis Test state data.
347 */
348static void bs3CpuBasic2Pf_RestoreFromBackups(PBS3CPUBASIC2PFSTATE pThis)
349{
350 Bs3MemCpy(pThis->PgInfo.u.Legacy.pPte, pThis->au64PteBackup, pThis->cbPteBackup);
351 Bs3MemCpy(pThis->PgInfo.u.Legacy.pPde, pThis->au64PdeBackup, pThis->cbPdeBackup);
352 if (pThis->PgInfo.cEntries > 2)
353 pThis->PgInfo.u.Pae.pPdpe->u = pThis->u64PdpteBackup;
354 if (pThis->PgInfo.cEntries > 3)
355 pThis->PgInfo.u.Pae.pPml4e->u = pThis->u64Pml4eBackup;
356 bs3CpuBasic2Pf_FlushAll();
357}
358
359
360/** @name BS3CPUBASIC2PFACCESSOR::pfnAccessor Implementations
361 * @{ */
362
363static void bs3CpuBasic2Pf_DoExec(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint8_t bXcpt, uint8_t uPfErrCd, bool fPageLevel)
364{
365 uint8_t *pbOrgTest = pThis->pbOrgTest;
366 unsigned off;
367
368 for (off = X86_PAGE_SIZE - 5; off < X86_PAGE_SIZE + 2; off++)
369 {
370 /* Emit a little bit of code (using the original allocation mapping) and point pCtx to it. */
371 pbOrgTest[off + 0] = X86_OP_PRF_SIZE_ADDR;
372 pbOrgTest[off + 1] = X86_OP_PRF_SIZE_OP;
373 pbOrgTest[off + 2] = 0x90; /* NOP */
374 pbOrgTest[off + 3] = 0x0f; /* UD2 */
375 pbOrgTest[off + 4] = 0x0b;
376 pbOrgTest[off + 5] = 0xeb; /* JMP $-4 */
377 pbOrgTest[off + 6] = 0xfc;
378 switch (pThis->bMode & BS3_MODE_CODE_MASK)
379 {
380 default:
381 pCtx->rip.u = (uintptr_t)&pThis->pbTest[off];
382 break;
383 case BS3_MODE_CODE_16:
384 Bs3SelSetup16BitCode(&Bs3GdteSpare01, (uintptr_t)pThis->pbTest, pCtx->bCpl);
385 pCtx->rip.u = off;
386 pCtx->cs = BS3_SEL_SPARE_01 | pCtx->bCpl;
387 break;
388 case BS3_MODE_CODE_V86:
389 /** @todo fix me. */
390 return;
391 }
392 //Bs3TestPrintf("cs:rip=%04x:%010RX64 iRing=%d\n", pCtx->cs, pCtx->rip.u, pCtx->bCpl);
393
394 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
395 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
396 if ( bXcpt != X86_XCPT_PF
397 || (fPageLevel && off < X86_PAGE_SIZE - 4))
398 bs3CpuBasic2Pf_CompareSimpleUd(pThis, pCtx, 3);
399 else if (!fPageLevel || off >= X86_PAGE_SIZE)
400 bs3CpuBasic2Pf_CompareSimplePf(pThis, pCtx, 0, uPfErrCd, (uintptr_t)pThis->pbTest + off);
401 else
402 bs3CpuBasic2Pf_CompareSimplePf(pThis, pCtx,
403 off + 3 == X86_PAGE_SIZE || off + 4 == X86_PAGE_SIZE
404 ? RT_MIN(X86_PAGE_SIZE, off + 3) - off : 0,
405 uPfErrCd, (uintptr_t)pThis->pbTest + RT_MIN(X86_PAGE_SIZE, off + 4));
406 }
407}
408
409
410static void bs3CpuBasic2Pf_SetCsEip(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, PCFNBS3CPUBASIC2PFTSTCODE pCode)
411{
412 switch (pThis->bMode & BS3_MODE_CODE_MASK)
413 {
414 default:
415 pCtx->rip.u = (uintptr_t)pCode->pfn;
416 break;
417
418 case BS3_MODE_CODE_16:
419 {
420 uint32_t uFar16 = Bs3SelFlatCodeToProtFar16((uintptr_t)pCode->pfn);
421 pCtx->rip.u = (uint16_t)uFar16;
422 pCtx->cs = (uint16_t)(uFar16 >> 16) | pCtx->bCpl;
423 pCtx->cs += (uint16_t)pCtx->bCpl << BS3_SEL_RING_SHIFT;
424 break;
425 }
426
427 case BS3_MODE_CODE_V86:
428 {
429 uint32_t uFar16 = Bs3SelFlatCodeToRealMode((uintptr_t)pCode->pfn);
430 pCtx->rip.u = (uint16_t)uFar16;
431 pCtx->cs = (uint16_t)(uFar16 >> 16);
432 break;
433 }
434 }
435}
436
437
438/**
439 * Test a simple load instruction around the edges of page two.
440 *
441 * @param pThis The test stat data.
442 * @param pCtx The test context.
443 * @param bXcpt X86_XCPT_PF if this can cause \#PFs, otherwise
444 * X86_XCPT_UD.
445 * @param uPfErrCd The error code for \#PFs.
446 * @param fPageLevel Set if we're pushing PTE level bits.
447 */
448static void bs3CpuBasic2Pf_DoMovLoad(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint8_t bXcpt, uint8_t uPfErrCd,
449 bool fPageLevel)
450{
451 static uint64_t const s_uValue = UINT64_C(0x7c4d0114428d);
452 uint64_t uExpectRax;
453 unsigned i;
454
455 /*
456 * Adjust the incoming context and calculate our expections.
457 */
458 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->MovLoad);
459 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
460 switch (pThis->bMode & BS3_MODE_CODE_MASK)
461 {
462 case BS3_MODE_CODE_16:
463 case BS3_MODE_CODE_V86:
464 uExpectRax = (uint16_t)s_uValue | (pCtx->rax.u & UINT64_C(0xffffffffffff0000));
465 break;
466 case BS3_MODE_CODE_32:
467 uExpectRax = (uint32_t)s_uValue | (pCtx->rax.u & UINT64_C(0xffffffff00000000));
468 break;
469 case BS3_MODE_CODE_64:
470 uExpectRax = s_uValue;
471 break;
472 }
473 if (uExpectRax == pCtx->rax.u)
474 pCtx->rax.u = ~pCtx->rax.u;
475
476 /*
477 * Make two approaches to the test page (the 2nd one):
478 * - i=0: Start on the 1st page and edge into the 2nd.
479 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
480 */
481 for (i = 0; i < 2; i++)
482 {
483 unsigned off = X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
484 unsigned offEnd = X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
485
486 for (; off < offEnd; off++)
487 {
488 *(uint64_t *)&pThis->pbOrgTest[off] = s_uValue;
489 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
490 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
491 else
492 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = (uintptr_t)pThis->pbTest + off;
493
494 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
495 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
496
497 if ( bXcpt != X86_XCPT_PF
498 || (fPageLevel && off >= X86_PAGE_SIZE * 2)
499 || (fPageLevel && off <= X86_PAGE_SIZE - pThis->cbAccess) )
500 {
501 pThis->ExpectCtx.rax.u = uExpectRax;
502 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, pThis->pCmnMode->MovLoad.offUd2, X86_XCPT_UD, 0 /*uErrCd*/);
503 pThis->ExpectCtx.rax = pCtx->rax;
504 }
505 else
506 {
507 if (off < X86_PAGE_SIZE)
508 pThis->ExpectCtx.cr2.u = (uintptr_t)pThis->pbTest + X86_PAGE_SIZE;
509 else
510 pThis->ExpectCtx.cr2.u = (uintptr_t)pThis->pbTest + off;
511 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
512 pThis->ExpectCtx.cr2 = pCtx->cr2;
513 }
514 }
515 }
516}
517
518
519/**
520 * Test a simple store instruction around the edges of page two.
521 *
522 * @param pThis The test stat data.
523 * @param pCtx The test context.
524 * @param bXcpt X86_XCPT_PF if this can cause \#PFs, otherwise
525 * X86_XCPT_UD.
526 * @param uPfErrCd The error code for \#PFs.
527 * @param fPageLevel Set if we're pushing PTE level bits.
528 */
529static void bs3CpuBasic2Pf_DoMovStore(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint8_t bXcpt, uint8_t uPfErrCd,
530 bool fPageLevel)
531{
532 static uint64_t const s_uValue = UINT64_C(0x3af45ead86a34a26);
533 static uint64_t const s_uValueFlipped = UINT64_C(0xc50ba152795cb5d9);
534 uint64_t const uRaxSaved = pCtx->rax.u;
535 uint64_t uExpectStored;
536 unsigned i;
537
538 /*
539 * Adjust the incoming context and calculate our expections.
540 */
541 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->MovStore);
542 if ((pThis->bMode & BS3_MODE_CODE_MASK) != BS3_MODE_CODE_64)
543 pCtx->rax.u = (uint32_t)s_uValue; /* leave the upper part zero */
544 else
545 pCtx->rax.u = s_uValue;
546
547 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
548 switch (pThis->bMode & BS3_MODE_CODE_MASK)
549 {
550 case BS3_MODE_CODE_16:
551 case BS3_MODE_CODE_V86:
552 uExpectStored = (uint16_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffffffff0000));
553 break;
554 case BS3_MODE_CODE_32:
555 uExpectStored = (uint32_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffff00000000));
556 break;
557 case BS3_MODE_CODE_64:
558 uExpectStored = s_uValue;
559 break;
560 }
561
562 /*
563 * Make two approaches to the test page (the 2nd one):
564 * - i=0: Start on the 1st page and edge into the 2nd.
565 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
566 */
567 for (i = 0; i < 2; i++)
568 {
569 unsigned off = X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
570 unsigned offEnd = X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
571 for (; off < offEnd; off++)
572 {
573 *(uint64_t *)&pThis->pbOrgTest[off] = s_uValueFlipped;
574 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
575 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
576 else
577 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = (uintptr_t)pThis->pbTest + off;
578
579 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
580 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
581
582 if ( bXcpt != X86_XCPT_PF
583 || (fPageLevel && off >= X86_PAGE_SIZE * 2)
584 || (fPageLevel && off <= X86_PAGE_SIZE - pThis->cbAccess) )
585 {
586 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, pThis->pCmnMode->MovStore.offUd2, X86_XCPT_UD, 0 /*uErrCd*/);
587 if (*(uint64_t *)&pThis->pbOrgTest[off] != uExpectStored)
588 Bs3TestFailedF("%u - %s: Stored %#RX64, expected %#RX64",
589 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], uExpectStored);
590 }
591 else
592 {
593 if (off < X86_PAGE_SIZE)
594 pThis->ExpectCtx.cr2.u = (uintptr_t)pThis->pbTest + X86_PAGE_SIZE;
595 else
596 pThis->ExpectCtx.cr2.u = (uintptr_t)pThis->pbTest + off;
597 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
598 pThis->ExpectCtx.cr2 = pCtx->cr2;
599 if (*(uint64_t *)&pThis->pbOrgTest[off] != s_uValueFlipped)
600 Bs3TestFailedF("%u - %s: #PF'ed store modified memory: %#RX64, expected %#RX64",
601 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], s_uValueFlipped);
602
603 }
604 }
605 }
606
607 pCtx->rax.u = uRaxSaved;
608}
609
610
611/**
612 * Test a xchg instruction around the edges of page two.
613 *
614 * @param pThis The test stat data.
615 * @param pCtx The test context.
616 * @param bXcpt X86_XCPT_PF if this can cause \#PFs, otherwise
617 * X86_XCPT_UD.
618 * @param uPfErrCd The error code for \#PFs.
619 * @param fPageLevel Set if we're pushing PTE level bits.
620 */
621static void bs3CpuBasic2Pf_DoXchg(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint8_t bXcpt, uint8_t uPfErrCd, bool fPageLevel)
622{
623 static uint64_t const s_uValue = UINT64_C(0xea58699648e2f32c);
624 static uint64_t const s_uValueFlipped = UINT64_C(0x15a79669b71d0cd3);
625 uint64_t const uRaxSaved = pCtx->rax.u;
626 uint64_t uRaxIn;
627 uint64_t uExpectedRax;
628 uint64_t uExpectStored;
629 unsigned i;
630
631 /*
632 * Adjust the incoming context and calculate our expections.
633 */
634 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->Xchg);
635 if ((pThis->bMode & BS3_MODE_CODE_MASK) != BS3_MODE_CODE_64)
636 uRaxIn = (uint32_t)s_uValue; /* leave the upper part zero */
637 else
638 uRaxIn = s_uValue;
639
640 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
641 switch (pThis->bMode & BS3_MODE_CODE_MASK)
642 {
643 case BS3_MODE_CODE_16:
644 case BS3_MODE_CODE_V86:
645 uExpectedRax = (uint16_t)s_uValueFlipped | (uRaxIn & UINT64_C(0xffffffffffff0000));
646 uExpectStored = (uint16_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffffffff0000));
647 break;
648 case BS3_MODE_CODE_32:
649 uExpectedRax = (uint32_t)s_uValueFlipped | (uRaxIn & UINT64_C(0xffffffff00000000));
650 uExpectStored = (uint32_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffff00000000));
651 break;
652 case BS3_MODE_CODE_64:
653 uExpectedRax = s_uValueFlipped;
654 uExpectStored = s_uValue;
655 break;
656 }
657
658 /*
659 * Make two approaches to the test page (the 2nd one):
660 * - i=0: Start on the 1st page and edge into the 2nd.
661 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
662 */
663 for (i = 0; i < 2; i++)
664 {
665 unsigned off = X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
666 unsigned offEnd = X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
667 for (; off < offEnd; off++)
668 {
669 *(uint64_t *)&pThis->pbOrgTest[off] = s_uValueFlipped;
670 pCtx->rax.u = uRaxIn;
671 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
672 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
673 else
674 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = (uintptr_t)pThis->pbTest + off;
675
676 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
677 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
678
679 if ( bXcpt != X86_XCPT_PF
680 || (fPageLevel && off >= X86_PAGE_SIZE * 2)
681 || (fPageLevel && off <= X86_PAGE_SIZE - pThis->cbAccess) )
682 {
683 pThis->ExpectCtx.rax.u = uExpectedRax;
684 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, pThis->pCmnMode->Xchg.offUd2, X86_XCPT_UD, 0 /*uErrCd*/);
685 if (*(uint64_t *)&pThis->pbOrgTest[off] != uExpectStored)
686 Bs3TestFailedF("%u - %s: Stored %#RX64, expected %#RX64",
687 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], uExpectStored);
688 }
689 else
690 {
691 pThis->ExpectCtx.rax.u = uRaxIn;
692 if (off < X86_PAGE_SIZE)
693 pThis->ExpectCtx.cr2.u = (uintptr_t)pThis->pbTest + X86_PAGE_SIZE;
694 else
695 pThis->ExpectCtx.cr2.u = (uintptr_t)pThis->pbTest + off;
696 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
697 pThis->ExpectCtx.cr2 = pCtx->cr2;
698 if (*(uint64_t *)&pThis->pbOrgTest[off] != s_uValueFlipped)
699 Bs3TestFailedF("%u - %s: #PF'ed store modified memory: %#RX64, expected %#RX64",
700 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], s_uValueFlipped);
701 }
702 }
703 }
704
705 pCtx->rax.u = uRaxSaved;
706}
707
708
709/**
710 * Test a cmpxchg instruction around the edges of page two.
711 *
712 * @param pThis The test stat data.
713 * @param pCtx The test context.
714 * @param bXcpt X86_XCPT_PF if this can cause \#PFs, otherwise
715 * X86_XCPT_UD.
716 * @param uPfErrCd The error code for \#PFs.
717 * @param fPageLevel Set if we're pushing PTE level bits.
718 * @param fMissmatch Whether to fail and not store (@c true), or succeed
719 * and do the store.
720 */
721static void bs3CpuBasic2Pf_DoCmpXchg(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint8_t bXcpt, uint8_t uPfErrCd,
722 bool fPageLevel, bool fMissmatch)
723{
724 static uint64_t const s_uValue = UINT64_C(0xea58699648e2f32c);
725 static uint64_t const s_uValueFlipped = UINT64_C(0x15a79669b71d0cd3);
726 static uint64_t const s_uValueOther = UINT64_C(0x2171239bcb044c81);
727 uint64_t const uRaxSaved = pCtx->rax.u;
728 uint64_t const uRcxSaved = pCtx->rcx.u;
729 uint64_t uRaxIn;
730 uint64_t uExpectedRax;
731 uint32_t uExpectedFlags;
732 uint64_t uExpectStored;
733 unsigned i;
734
735 /*
736 * Adjust the incoming context and calculate our expections.
737 * Hint: CMPXCHG [xBX],xCX ; xAX compare and update implicit, ZF set to !fMissmatch.
738 */
739 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->CmpXchg);
740 if ((pThis->bMode & BS3_MODE_CODE_MASK) != BS3_MODE_CODE_64)
741 {
742 uRaxIn = (uint32_t)(fMissmatch ? s_uValueOther : s_uValueFlipped); /* leave the upper part zero */
743 pCtx->rcx.u = (uint32_t)s_uValue; /* ditto */
744 }
745 else
746 {
747 uRaxIn = fMissmatch ? s_uValueOther : s_uValueFlipped;
748 pCtx->rcx.u = s_uValue;
749 }
750 if (fMissmatch)
751 pCtx->rflags.u32 |= X86_EFL_ZF;
752 else
753 pCtx->rflags.u32 &= ~X86_EFL_ZF;
754
755 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
756 uExpectedFlags = pCtx->rflags.u32 & ~(X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_SF | X86_EFL_OF | X86_EFL_ZF);
757 switch (pThis->bMode & BS3_MODE_CODE_MASK)
758 {
759 case BS3_MODE_CODE_16:
760 case BS3_MODE_CODE_V86:
761 uExpectedRax = (uint16_t)s_uValueFlipped | (uRaxIn & UINT64_C(0xffffffffffff0000));
762 uExpectStored = (uint16_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffffffff0000));
763 uExpectedFlags |= !fMissmatch ? X86_EFL_ZF | X86_EFL_PF : X86_EFL_AF;
764 break;
765 case BS3_MODE_CODE_32:
766 uExpectedRax = (uint32_t)s_uValueFlipped | (uRaxIn & UINT64_C(0xffffffff00000000));
767 uExpectStored = (uint32_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffff00000000));
768 uExpectedFlags |= !fMissmatch ? X86_EFL_ZF | X86_EFL_PF : X86_EFL_AF;
769 break;
770 case BS3_MODE_CODE_64:
771 uExpectedRax = s_uValueFlipped;
772 uExpectStored = s_uValue;
773 uExpectedFlags |= !fMissmatch ? X86_EFL_ZF | X86_EFL_PF : X86_EFL_AF;
774 break;
775 }
776 if (fMissmatch)
777 uExpectStored = s_uValueFlipped;
778
779 /*
780 * Make two approaches to the test page (the 2nd one):
781 * - i=0: Start on the 1st page and edge into the 2nd.
782 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
783 */
784 for (i = 0; i < 2; i++)
785 {
786 unsigned off = X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
787 unsigned offEnd = X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
788 for (; off < offEnd; off++)
789 {
790 *(uint64_t *)&pThis->pbOrgTest[off] = s_uValueFlipped;
791 pCtx->rax.u = uRaxIn;
792 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
793 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
794 else
795 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = (uintptr_t)pThis->pbTest + off;
796
797 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
798 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
799
800 if ( bXcpt != X86_XCPT_PF
801 || (fPageLevel && off >= X86_PAGE_SIZE * 2)
802 || (fPageLevel && off <= X86_PAGE_SIZE - pThis->cbAccess) )
803 {
804 pThis->ExpectCtx.rax.u = uExpectedRax;
805 pThis->ExpectCtx.rflags.u32 = uExpectedFlags;
806 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, pThis->pCmnMode->CmpXchg.offUd2, X86_XCPT_UD, 0 /*uErrCd*/);
807 if (*(uint64_t *)&pThis->pbOrgTest[off] != uExpectStored)
808 Bs3TestFailedF("%u - %s: Stored %#RX64, expected %#RX64",
809 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], uExpectStored);
810 }
811 else
812 {
813 pThis->ExpectCtx.rax.u = uRaxIn;
814 pThis->ExpectCtx.rflags = pCtx->rflags;
815 if (off < X86_PAGE_SIZE)
816 pThis->ExpectCtx.cr2.u = (uintptr_t)pThis->pbTest + X86_PAGE_SIZE;
817 else
818 pThis->ExpectCtx.cr2.u = (uintptr_t)pThis->pbTest + off;
819 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
820 pThis->ExpectCtx.cr2 = pCtx->cr2;
821 if (*(uint64_t *)&pThis->pbOrgTest[off] != s_uValueFlipped)
822 Bs3TestFailedF("%u - %s: #PF'ed store modified memory: %#RX64, expected %#RX64",
823 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], s_uValueFlipped);
824 }
825 }
826 }
827
828 pCtx->rax.u = uRaxSaved;
829 pCtx->rcx.u = uRcxSaved;
830}
831
832
833static void bs3CpuBasic2Pf_DoCmpXchgMiss(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint8_t bXcpt, uint8_t uPfErrCd,
834 bool fPageLevel)
835{
836 bs3CpuBasic2Pf_DoCmpXchg(pThis, pCtx, bXcpt, uPfErrCd, fPageLevel, true /*fMissmatch*/ );
837}
838
839
840static void bs3CpuBasic2Pf_DoCmpXchgMatch(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint8_t bXcpt, uint8_t uPfErrCd,
841 bool fPageLevel)
842{
843 bs3CpuBasic2Pf_DoCmpXchg(pThis, pCtx, bXcpt, uPfErrCd, fPageLevel, false /*fMissmatch*/ );
844}
845
846
847static BS3CPUBASIC2PFACCESSOR const g_aAccessors[] =
848{
849 { "DoExec", bs3CpuBasic2Pf_DoExec, X86_TRAP_PF_ID },
850 { "DoMovLoad", bs3CpuBasic2Pf_DoMovLoad, 0 },
851 { "DoMovStore", bs3CpuBasic2Pf_DoMovStore, X86_TRAP_PF_RW },
852 { "DoXchg", bs3CpuBasic2Pf_DoXchg, X86_TRAP_PF_RW },
853 { "DoCmpXchgMiss", bs3CpuBasic2Pf_DoCmpXchgMiss, X86_TRAP_PF_RW },
854 { "DoCmpXhcgMatch", bs3CpuBasic2Pf_DoCmpXchgMatch, X86_TRAP_PF_RW },
855};
856
857/** @} */
858
859
860/** @name BS3CPUBASIC2PFMODPT::pfnModify implementations.
861 * @{ */
862
863
864static void bs3CpuBasic2Pf_ClearMask(PBS3CPUBASIC2PFSTATE pThis, unsigned iStore, PCBS3CPUBASIC2PFMODPT pEntry)
865{
866 if (pThis->PgInfo.cbEntry == 4)
867 g_aStoreMethods[iStore].pfnStore(pThis->PgInfo.u.Legacy.pPte + 1,
868 pThis->PgInfo.u.Legacy.pPte[1].u & ~(uint32_t)pEntry->uModifyArg,
869 pThis->PgInfo.u.Legacy.pPte[1].u);
870 else
871 g_aStoreMethods[iStore].pfnStore(pThis->PgInfo.u.Pae.pPte + 1,
872 pThis->PgInfo.u.Pae.pPte[1].au32[0] & ~(uint32_t)pEntry->uModifyArg,
873 pThis->PgInfo.u.Pae.pPte[1].au32[0]);
874}
875
876static void bs3CpuBasic2Pf_SetBit(PBS3CPUBASIC2PFSTATE pThis, unsigned iStore, PCBS3CPUBASIC2PFMODPT pEntry)
877{
878 if (pThis->PgInfo.cbEntry == 4)
879 g_aStoreMethods[iStore].pfnStore(pThis->PgInfo.u.Legacy.pPte + 1,
880 pThis->PgInfo.u.Legacy.pPte[1].u | RT_BIT_32(pEntry->uModifyArg),
881 pThis->PgInfo.u.Legacy.pPte[1].u);
882 else if (pEntry->uModifyArg < 32)
883 g_aStoreMethods[iStore].pfnStore(pThis->PgInfo.u.Pae.pPte + 1,
884 pThis->PgInfo.u.Pae.pPte[1].au32[0] | RT_BIT_32(pEntry->uModifyArg),
885 pThis->PgInfo.u.Pae.pPte[1].au32[0]);
886 else
887 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[1],
888 pThis->PgInfo.u.Pae.pPte[1].au32[1] | RT_BIT_32(pEntry->uModifyArg - 32),
889 pThis->PgInfo.u.Pae.pPte[1].au32[1]);
890}
891
892static void bs3CpuBasic2Pf_ClearBit(PBS3CPUBASIC2PFSTATE pThis, unsigned iStore, PCBS3CPUBASIC2PFMODPT pEntry)
893{
894 if (pThis->PgInfo.cbEntry == 4)
895 g_aStoreMethods[iStore].pfnStore(pThis->PgInfo.u.Legacy.pPte + 1,
896 pThis->PgInfo.u.Legacy.pPte[1].u & ~RT_BIT_32(pEntry->uModifyArg),
897 pThis->PgInfo.u.Legacy.pPte[1].u);
898 else if (pEntry->uModifyArg < 32)
899 g_aStoreMethods[iStore].pfnStore(pThis->PgInfo.u.Pae.pPte + 1,
900 pThis->PgInfo.u.Pae.pPte[1].au32[0] & ~RT_BIT_32(pEntry->uModifyArg),
901 pThis->PgInfo.u.Pae.pPte[1].au32[0]);
902 else
903 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[1],
904 pThis->PgInfo.u.Pae.pPte[1].au32[1] & ~RT_BIT_32(pEntry->uModifyArg - 32),
905 pThis->PgInfo.u.Pae.pPte[1].au32[1]);
906}
907
908
909static void bs3CpuBasic2Pf_NoChange(PBS3CPUBASIC2PFSTATE pThis, unsigned iStore, PCBS3CPUBASIC2PFMODPT pEntry)
910{
911 RT_NOREF3(pThis, iStore, pEntry);
912}
913
914/** @} */
915
916
917/** @name BS3CPUBASIC2PFMODPT::pfnApplicable implementations.
918 * @{ */
919
920static bool bs3CpuBasic2Pf_IsPteBitReserved(PBS3CPUBASIC2PFSTATE pThis, PCBS3CPUBASIC2PFMODPT pEntry)
921{
922 if (pThis->PgInfo.cbEntry == 8)
923 {
924 if (pThis->PgInfo.cEntries == 3)
925 {
926 if ((uint32_t)(pEntry->uModifyArg - 52U) < (uint32_t)(12 - pThis->fNxe))
927 return true;
928 }
929 else if (pEntry->uModifyArg == 63 && !pThis->fNxe)
930 return true;
931 }
932 return false;
933}
934
935static bool bs3CpuBasic2Pf_IsPteBitSoftwareUsable(PBS3CPUBASIC2PFSTATE pThis, PCBS3CPUBASIC2PFMODPT pEntry)
936{
937 if (pThis->PgInfo.cbEntry == 8)
938 {
939 if (pThis->PgInfo.cEntries != 3)
940 {
941 if ((uint32_t)(pEntry->uModifyArg - 52U) < (uint32_t)11)
942 return true;
943 }
944 }
945 return false;
946}
947
948
949static bool bs3CpuBasic2Pf_IsNxe(PBS3CPUBASIC2PFSTATE pThis, PCBS3CPUBASIC2PFMODPT pEntry)
950{
951 return pThis->fNxe && pThis->PgInfo.cbEntry == 8;
952}
953
954/** @} */
955
956
957static const BS3CPUBASIC2PFMODPT g_aPteWorkers[] =
958{
959/* { pszName, P U W NX RSV ModiyfArg pfnModify, pfnApplicable }, */
960 { "org", 1, 1, 1, 0, 0, 0, bs3CpuBasic2Pf_NoChange, NULL },
961 { "!US", 1, 0, 1, 0, 0, X86_PTE_US, bs3CpuBasic2Pf_ClearMask, NULL },
962 { "!RW", 1, 1, 0, 0, 0, X86_PTE_RW, bs3CpuBasic2Pf_ClearMask, NULL },
963 { "!RW+!US", 1, 0, 0, 0, 0, X86_PTE_RW | X86_PTE_US, bs3CpuBasic2Pf_ClearMask, NULL },
964 { "!P", 0, 0, 0, 0, 0, X86_PTE_P, bs3CpuBasic2Pf_ClearMask, NULL },
965 { "NX", 1, 1, 1, 1, 0, 63, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsNxe },
966 { "RSV[52]", 0, 0, 0, 0, 1, 52, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
967 { "RSV[53]", 0, 0, 0, 0, 1, 53, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
968 { "RSV[54]", 0, 0, 0, 0, 1, 54, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
969 { "RSV[55]", 0, 0, 0, 0, 1, 55, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
970 { "RSV[56]", 0, 0, 0, 0, 1, 56, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
971 { "RSV[57]", 0, 0, 0, 0, 1, 57, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
972 { "RSV[58]", 0, 0, 0, 0, 1, 58, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
973 { "RSV[59]", 0, 0, 0, 0, 1, 59, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
974 { "RSV[60]", 0, 0, 0, 0, 1, 60, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
975 { "RSV[61]", 0, 0, 0, 0, 1, 61, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
976 { "RSV[62]", 0, 0, 0, 0, 1, 62, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
977 { "RSV[62]", 0, 0, 0, 0, 1, 62, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
978 { "RSV[63]", 0, 0, 0, 0, 1, 63, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
979 { "!RSV[52]", 1, 1, 1, 0, 0, 52, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
980 { "!RSV[53]", 1, 1, 1, 0, 0, 53, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
981 { "!RSV[54]", 1, 1, 1, 0, 0, 54, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
982 { "!RSV[55]", 1, 1, 1, 0, 0, 55, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
983 { "!RSV[56]", 1, 1, 1, 0, 0, 56, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
984 { "!RSV[57]", 1, 1, 1, 0, 0, 57, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
985 { "!RSV[58]", 1, 1, 1, 0, 0, 58, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
986 { "!RSV[59]", 1, 1, 1, 0, 0, 59, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
987 { "!RSV[60]", 1, 1, 1, 0, 0, 60, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
988 { "!RSV[61]", 1, 1, 1, 0, 0, 61, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
989 { "!RSV[62]", 1, 1, 1, 0, 0, 62, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
990
991};
992
993
994/**
995 * Worker for bs3CpuBasic2_RaiseXcpt0e_c32 that does the actual testing.
996 *
997 * Caller does all the cleaning up.
998 *
999 * @returns Error count.
1000 * @param pThis Test state data.
1001 * @param fNxe Whether NX is enabled.
1002 */
1003static uint8_t bs3CpuBasic2_RaiseXcpt0eWorker(PBS3CPUBASIC2PFSTATE register pThis, bool const fWp, bool const fNxe)
1004{
1005 unsigned iRing;
1006 unsigned iStore;
1007 unsigned iAccessor;
1008// uint32_t const fPfId = fNxe ? X86_TRAP_PF_ID : 0;
1009 uint32_t const fPfIdMask = fNxe ? UINT32_MAX : ~X86_TRAP_PF_ID;
1010 BS3REGCTX aCtxts[4];
1011
1012 pThis->fWp = fWp;
1013 pThis->fNxe = fNxe;
1014
1015 /** @todo figure out V8086 testing. */
1016 if ((pThis->bMode & BS3_MODE_CODE_MASK) == BS3_MODE_CODE_V86)
1017 return BS3TESTDOMODE_SKIPPED;
1018
1019
1020 /* paranoia: Touch the various big stack structures to ensure the compiler has allocated stack for them. */
1021 for (iRing = 0; iRing < RT_ELEMENTS(aCtxts); iRing++)
1022 Bs3MemZero(&aCtxts[iRing], sizeof(aCtxts[iRing]));
1023
1024 /*
1025 * Set up a few contexts for testing this stuff.
1026 */
1027 Bs3RegCtxSaveEx(&aCtxts[0], pThis->bMode, 2048);
1028 for (iRing = 1; iRing < 4; iRing++)
1029 {
1030 aCtxts[iRing] = aCtxts[0];
1031 Bs3RegCtxConvertToRingX(&aCtxts[iRing], iRing);
1032 }
1033
1034 if (!BS3_MODE_IS_16BIT_CODE(pThis->bMode))
1035 {
1036 for (iRing = 0; iRing < 4; iRing++)
1037 aCtxts[iRing].rbx.u = (uintptr_t)pThis->pbTest;
1038 }
1039 else
1040 {
1041 for (iRing = 0; iRing < 4; iRing++)
1042 {
1043 aCtxts[iRing].ds = pThis->uSel16TestData;
1044 aCtxts[iRing].rbx.u = 0;
1045 }
1046 }
1047
1048 /*
1049 * Check basic operation:
1050 */
1051 for (iRing = 0; iRing < 4; iRing++)
1052 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1053 g_aAccessors[iAccessor].pfnAccessor(pThis, &aCtxts[iRing], X86_XCPT_UD, UINT8_MAX, true /*fPageLevel*/);
1054
1055 /*
1056 * Check the U bit on PTE level. We only mess with the 2nd page.
1057 */
1058 {
1059 bool const fPgLvl = true;
1060 bool const fWp = RT_BOOL(ASMGetCR0() & X86_CR0_WP);
1061 unsigned iPteWrk;
1062
1063 bs3CpuBasic2Pf_FlushAll();
1064 for (iPteWrk = 0; iPteWrk < RT_ELEMENTS(g_aPteWorkers); iPteWrk++)
1065 {
1066 BS3CPUBASIC2PFMODPT EffWrk;
1067 const BS3CPUBASIC2PFMODPT *pPteWrk = &g_aPteWorkers[iPteWrk];
1068 if ( pPteWrk->pfnApplicable && !pPteWrk->pfnApplicable(pThis, pPteWrk))
1069 continue;
1070 //if (pThis->bMode == BS3_MODE_LM16) Bs3TestPrintf("PteWrk: %s\n", pPteWrk->pszName);
1071
1072 EffWrk = *pPteWrk;
1073
1074 /*
1075 * Do the modification once, then test all different accesses
1076 * without flushing the TLB or anything in-between.
1077 */
1078 for (iStore = 0; iStore < RT_ELEMENTS(g_aStoreMethods); iStore++)
1079 {
1080 pPteWrk->pfnModify(pThis, iStore, pPteWrk);
1081
1082 for (iRing = 0; iRing < 4; iRing++)
1083 {
1084 PBS3REGCTX const pCtx = &aCtxts[iRing];
1085 if ( EffWrk.fReserved
1086 || !EffWrk.fPresent
1087 || (!EffWrk.fUser && iRing == 3))
1088 {
1089 uint32_t const fPfBase = ( EffWrk.fReserved ? X86_TRAP_PF_P | X86_TRAP_PF_RSVD
1090 : EffWrk.fPresent ? X86_TRAP_PF_P : 0)
1091 | (iRing == 3 ? X86_TRAP_PF_US : 0);
1092 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1093 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, X86_XCPT_PF,
1094 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask), fPgLvl);
1095 }
1096 else
1097 {
1098 uint32_t const fPfBase = X86_TRAP_PF_P | (iRing == 3 ? X86_TRAP_PF_US : 0);
1099 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1100 if ( ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_ID)
1101 && EffWrk.fNoExecute)
1102 || ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1103 && !EffWrk.fWriteable
1104 && (fWp || iRing == 3)) )
1105 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, X86_XCPT_PF,
1106 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask),
1107 fPgLvl);
1108 else
1109 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, X86_XCPT_UD, UINT8_MAX, fPgLvl);
1110
1111 }
1112 }
1113
1114 /* Reset the paging + full flush. */
1115 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1116 }
1117
1118 /*
1119 * Again, but redoing everything for each accessor.
1120 */
1121 for (iStore = 0; iStore < RT_ELEMENTS(g_aStoreMethods); iStore++)
1122 {
1123 for (iRing = 0; iRing < 4; iRing++)
1124 {
1125 PBS3REGCTX const pCtx = &aCtxts[iRing];
1126 if ( EffWrk.fReserved
1127 || !EffWrk.fPresent
1128 || (!EffWrk.fUser && iRing == 3))
1129 {
1130 uint32_t const fPfBase = ( EffWrk.fReserved ? X86_TRAP_PF_P | X86_TRAP_PF_RSVD
1131 : EffWrk.fPresent ? X86_TRAP_PF_P : 0)
1132 | (iRing == 3 ? X86_TRAP_PF_US : 0);
1133 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1134 {
1135 pPteWrk->pfnModify(pThis, iStore, pPteWrk);
1136 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, X86_XCPT_PF,
1137 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask), fPgLvl);
1138 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1139 }
1140 }
1141 else
1142 {
1143 uint32_t const fPfBase = X86_TRAP_PF_P | (iRing == 3 ? X86_TRAP_PF_US : 0);
1144 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1145 {
1146 pPteWrk->pfnModify(pThis, iStore, pPteWrk);
1147 if ( ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_ID)
1148 && EffWrk.fNoExecute)
1149 || ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1150 && !EffWrk.fWriteable
1151 && (fWp || iRing == 3)) )
1152 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, X86_XCPT_PF,
1153 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask),
1154 fPgLvl);
1155 else
1156 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, X86_XCPT_UD, UINT8_MAX, fPgLvl);
1157 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1158 }
1159 }
1160 }
1161 }
1162
1163 /*
1164 * ...
1165 */
1166
1167 }
1168 }
1169
1170 return 0;
1171}
1172
1173
1174BS3_DECL_CALLBACK(uint8_t) bs3CpuBasic2_RaiseXcpt0e_c32(uint8_t bMode)
1175{
1176 void *pvTestUnaligned;
1177 uint32_t cbTestUnaligned = _8M;
1178 uint8_t bRet = 1;
1179 int rc;
1180 BS3CPUBASIC2PFSTATE State;
1181
1182 /*
1183 * Initalize the state data.
1184 */
1185 Bs3MemZero(&State, sizeof(State));
1186 State.bMode = bMode;
1187 switch (bMode & BS3_MODE_CODE_MASK)
1188 {
1189 case BS3_MODE_CODE_16: State.cbAccess = sizeof(uint16_t); break;
1190 case BS3_MODE_CODE_V86: State.cbAccess = sizeof(uint16_t); break;
1191 case BS3_MODE_CODE_32: State.cbAccess = sizeof(uint32_t); break;
1192 case BS3_MODE_CODE_64: State.cbAccess = sizeof(uint64_t); break;
1193 }
1194 State.pCmnMode = &g_aCmnModes[0];
1195 while (State.pCmnMode->bMode != (bMode & BS3_MODE_CODE_MASK))
1196 State.pCmnMode++;
1197 State.fUseInvlPg = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486;
1198
1199 /*
1200 * Allocate a some memory we can play around with, then carve a size aligned
1201 * chunk out of it so we might be able to maybe play with 2/4MB pages too.
1202 */
1203 cbTestUnaligned = _8M * 2;
1204 while ((pvTestUnaligned = Bs3MemAlloc(BS3MEMKIND_FLAT32, cbTestUnaligned)) == NULL)
1205 {
1206 cbTestUnaligned >>= 1;
1207 if (cbTestUnaligned <= _16K)
1208 {
1209 Bs3TestFailed("Failed to allocate memory to play around with\n");
1210 return 1;
1211 }
1212 }
1213
1214 /* align. */
1215 if ((uintptr_t)pvTestUnaligned & (cbTestUnaligned - 1))
1216 {
1217 State.cbTest = cbTestUnaligned >> 1;
1218 State.pbOrgTest = (uint8_t *)(((uintptr_t)pvTestUnaligned + State.cbTest - 1) & ~(State.cbTest - 1));
1219 }
1220 else
1221 {
1222 State.pbOrgTest = pvTestUnaligned;
1223 State.cbTest = cbTestUnaligned;
1224 }
1225 State.cTestPages = State.cbTest >> X86_PAGE_SHIFT;
1226
1227 /*
1228 * Alias this memory far away from where our code and data lives.
1229 */
1230 State.pbTest = (uint8_t *)UINT32_C(0x80000000);
1231 rc = Bs3PagingAlias((uintptr_t)State.pbTest, (uintptr_t)State.pbOrgTest, State.cbTest, X86_PTE_P | X86_PTE_RW | X86_PTE_US);
1232 if (RT_SUCCESS(rc))
1233 {
1234 rc = Bs3PagingQueryAddressInfo((uintptr_t)State.pbTest, &State.PgInfo);
1235 if (RT_SUCCESS(rc))
1236 {
1237 /* Set values that derives from the test memory size and paging info. */
1238 if (State.PgInfo.cEntries == 2)
1239 {
1240 State.cTestPdes = (State.cTestPages + X86_PG_ENTRIES - 1) / X86_PG_ENTRIES;
1241 State.cTest1stPtes = RT_MIN(State.cTestPages, X86_PG_ENTRIES);
1242 State.cbPdeBackup = State.cTestPdes * (X86_PAGE_SIZE / X86_PG_ENTRIES);
1243 State.cbPteBackup = State.cTest1stPtes * (X86_PAGE_SIZE / X86_PG_ENTRIES);
1244 }
1245 else
1246 {
1247 State.cTestPdes = (State.cTestPages + X86_PG_PAE_ENTRIES - 1) / X86_PG_PAE_ENTRIES;
1248 State.cTest1stPtes = RT_MIN(State.cTestPages, X86_PG_PAE_ENTRIES);
1249 State.cbPdeBackup = State.cTestPdes * (X86_PAGE_SIZE / X86_PG_PAE_ENTRIES);
1250 State.cbPteBackup = State.cTest1stPtes * (X86_PAGE_SIZE / X86_PG_PAE_ENTRIES);
1251 }
1252 if (State.cTestPdes <= RT_ELEMENTS(State.au64PdeBackup))
1253 {
1254 uint32_t cr0 = ASMGetCR0();
1255
1256 /* Back up the structures. */
1257 Bs3MemCpy(State.au64PteBackup, State.PgInfo.u.Legacy.pPte, State.cbPteBackup);
1258 Bs3MemCpy(State.au64PdeBackup, State.PgInfo.u.Legacy.pPde, State.cbPdeBackup);
1259 if (State.PgInfo.cEntries > 2)
1260 State.u64PdpteBackup = State.PgInfo.u.Pae.pPdpe->u;
1261 if (State.PgInfo.cEntries > 3)
1262 State.u64Pml4eBackup = State.PgInfo.u.Pae.pPml4e->u;
1263
1264 /*
1265 * Setup a 16-bit selector for accessing the alias.
1266 */
1267 Bs3SelSetup16BitData(&Bs3GdteSpare00, (uintptr_t)State.pbTest);
1268 State.uSel16TestData = BS3_SEL_SPARE_00 | 3;
1269
1270 /*
1271 * Do the testing.
1272 */
1273 ASMSetCR0(ASMGetCR0() & ~X86_CR0_WP);
1274 bRet = bs3CpuBasic2_RaiseXcpt0eWorker(&State, false /*fWp*/, false /*fNxe*/);
1275 if (bRet == 0 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1276 {
1277 ASMSetCR0(ASMGetCR0() | X86_CR0_WP);
1278 bRet = bs3CpuBasic2_RaiseXcpt0eWorker(&State, true /*fWp*/, false /*fNxe*/);
1279 }
1280
1281 /* Do again with NX enabled. */
1282 if (bRet == 0 && (g_uBs3CpuDetected & BS3CPU_F_NX))
1283 {
1284 ASMWrMsr(MSR_K6_EFER, ASMRdMsr(MSR_K6_EFER) | MSR_K6_EFER_NXE);
1285 ASMSetCR0(ASMGetCR0() & ~X86_CR0_WP);
1286 bRet = bs3CpuBasic2_RaiseXcpt0eWorker(&State, false /*fWp*/, State.PgInfo.cbEntry == 8 /*fNxe*/);
1287 ASMSetCR0(ASMGetCR0() | X86_CR0_WP);
1288 bRet = bs3CpuBasic2_RaiseXcpt0eWorker(&State, true /*fWp*/, State.PgInfo.cbEntry == 8 /*fNxe*/);
1289 ASMWrMsr(MSR_K6_EFER, ASMRdMsr(MSR_K6_EFER) & ~MSR_K6_EFER_NXE);
1290 }
1291 bs3CpuBasic2Pf_RestoreFromBackups(&State);
1292 ASMSetCR0((ASMGetCR0() & ~X86_CR0_WP) | (cr0 & X86_CR0_WP));
1293 }
1294 else
1295 Bs3TestFailedF("cTestPdes=%u!\n", State.cTestPdes);
1296 }
1297 else
1298 Bs3TestFailedF("Bs3PagingQueryAddressInfo failed: %d\n", rc);
1299 Bs3PagingUnalias((uintptr_t)State.pbTest, State.cbTest);
1300 }
1301 else
1302 Bs3TestFailedF("Bs3PagingAlias failed! rc=%d\n", rc);
1303 Bs3MemFree(pvTestUnaligned, cbTestUnaligned);
1304 return bRet;
1305}
1306
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette