VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-template.c@ 60727

Last change on this file since 60727 was 60724, checked in by vboxsync, 9 years ago

bs3kit: updates (started on lidt)

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 147.2 KB
Line 
1/* $Id: bs3-cpu-basic-2-template.c 60724 2016-04-27 17:00:29Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-basic-2, C code template.
4 */
5
6/*
7 * Copyright (C) 2007-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*********************************************************************************************************************************
29* Header Files *
30*********************************************************************************************************************************/
31#include <iprt/asm.h>
32#include <iprt/asm-amd64-x86.h>
33
34
35/*********************************************************************************************************************************
36* Defined Constants And Macros *
37*********************************************************************************************************************************/
38#undef CHECK_MEMBER
39#define CHECK_MEMBER(a_szName, a_szFmt, a_Actual, a_Expected) \
40 do \
41 { \
42 if ((a_Actual) == (a_Expected)) { /* likely */ } \
43 else bs3CpuBasic2_FailedF(a_szName "=" a_szFmt " expected " a_szFmt, (a_Actual), (a_Expected)); \
44 } while (0)
45
46
47#ifdef BS3_INSTANTIATING_CMN
48/** Indicating that we've got operand size prefix and that it matters. */
49# define BS3CB2SIDTSGDT_F_OPSIZE UINT8_C(0x01)
50#endif
51
52#ifdef BS3_INSTANTIATING_MODE
53# undef MyBs3Idt
54# undef MY_SYS_SEL_R0_CS
55# undef MY_SYS_SEL_R0_CS_CNF
56# undef MY_SYS_SEL_R0_DS
57# undef MY_SYS_SEL_R0_SS
58# if BS3_MODE_IS_16BIT_SYS(TMPL_MODE)
59# define MyBs3Idt Bs3Idt16
60# define MY_SYS_SEL_R0_CS BS3_SEL_R0_CS16
61# define MY_SYS_SEL_R0_CS_CNF BS3_SEL_R0_CS16_CNF
62# define MY_SYS_SEL_R0_DS BS3_SEL_R0_DS16
63# define MY_SYS_SEL_R0_SS BS3_SEL_R0_SS16
64# elif BS3_MODE_IS_32BIT_SYS(TMPL_MODE)
65# define MyBs3Idt Bs3Idt32
66# define MY_SYS_SEL_R0_CS BS3_SEL_R0_CS32
67# define MY_SYS_SEL_R0_CS_CNF BS3_SEL_R0_CS32_CNF
68# define MY_SYS_SEL_R0_DS BS3_SEL_R0_DS32
69# define MY_SYS_SEL_R0_SS BS3_SEL_R0_SS32
70# elif BS3_MODE_IS_64BIT_SYS(TMPL_MODE)
71# define MyBs3Idt Bs3Idt64
72# define MY_SYS_SEL_R0_CS BS3_SEL_R0_CS64
73# define MY_SYS_SEL_R0_CS_CNF BS3_SEL_R0_CS64_CNF
74# define MY_SYS_SEL_R0_DS BS3_SEL_R0_DS64
75# define MY_SYS_SEL_R0_SS BS3_SEL_R0_DS64
76# else
77# error "TMPL_MODE"
78# endif
79#endif
80
81
82/*********************************************************************************************************************************
83* Structures and Typedefs *
84*********************************************************************************************************************************/
85#ifdef BS3_INSTANTIATING_CMN
86typedef struct BS3CB2INVLDESCTYPE
87{
88 uint8_t u4Type;
89 uint8_t u1DescType;
90} BS3CB2INVLDESCTYPE;
91
92typedef struct BS3CB2SIDTSGDT
93{
94 FPFNBS3FAR fpfnWorker;
95 uint8_t cbInstr;
96 bool fSs;
97 uint8_t bMode;
98 uint8_t fFlags;
99} BS3CB2SIDTSGDT;
100#endif
101
102
103/*********************************************************************************************************************************
104* External Symbols *
105*********************************************************************************************************************************/
106#ifdef BS3_INSTANTIATING_CMN
107extern FNBS3FAR bs3CpuBasic2_Int80;
108extern FNBS3FAR bs3CpuBasic2_Int81;
109extern FNBS3FAR bs3CpuBasic2_Int82;
110extern FNBS3FAR bs3CpuBasic2_Int83;
111extern FNBS3FAR bs3CpuBasic2_ud2;
112# define g_bs3CpuBasic2_ud2_FlatAddr BS3_DATA_NM(g_bs3CpuBasic2_ud2_FlatAddr)
113extern uint32_t g_bs3CpuBasic2_ud2_FlatAddr;
114
115extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c16;
116extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c32;
117extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c64;
118extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c16;
119extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c32;
120extern FNBS3FAR bs3CpuBasic2_sidt_rexw_bx_ud2_c64;
121extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c16;
122extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c32;
123extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c64;
124extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16;
125extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32;
126extern FNBS3FAR bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64;
127
128extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c16;
129extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c32;
130extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c64;
131extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c16;
132extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c32;
133extern FNBS3FAR bs3CpuBasic2_sgdt_rexw_bx_ud2_c64;
134extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c16;
135extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c32;
136extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c64;
137extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16;
138extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32;
139extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64;
140
141extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16;
142extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32;
143extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64;
144extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
145extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
146extern FNBS3FAR bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
147extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16;
148extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32;
149extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64;
150extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
151extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
152extern FNBS3FAR bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
153
154#endif
155
156
157/*********************************************************************************************************************************
158* Global Variables *
159*********************************************************************************************************************************/
160#ifdef BS3_INSTANTIATING_CMN
161# define g_pszTestMode BS3_CMN_NM(g_pszTestMode)
162static const char BS3_FAR *g_pszTestMode = (const char *)1;
163# define g_bTestMode BS3_CMN_NM(g_bTestMode)
164static uint8_t g_bTestMode = 1;
165# define g_f16BitSys BS3_CMN_NM(g_f16BitSys)
166static bool g_f16BitSys = 1;
167
168
169/** SIDT test workers. */
170static BS3CB2SIDTSGDT const g_aSidtWorkers[] =
171{
172 { bs3CpuBasic2_sidt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 },
173 { bs3CpuBasic2_sidt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 },
174 { bs3CpuBasic2_sidt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 },
175 { bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 },
176 { bs3CpuBasic2_sidt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32 },
177 { bs3CpuBasic2_sidt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32 },
178 { bs3CpuBasic2_sidt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32 },
179 { bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32 },
180 { bs3CpuBasic2_sidt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64 },
181 { bs3CpuBasic2_sidt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64 },
182 { bs3CpuBasic2_sidt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64 },
183 { bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64 },
184};
185
186/** SGDT test workers. */
187static BS3CB2SIDTSGDT const g_aSgdtWorkers[] =
188{
189 { bs3CpuBasic2_sgdt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 },
190 { bs3CpuBasic2_sgdt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 },
191 { bs3CpuBasic2_sgdt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 },
192 { bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 },
193 { bs3CpuBasic2_sgdt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32 },
194 { bs3CpuBasic2_sgdt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32 },
195 { bs3CpuBasic2_sgdt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32 },
196 { bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32 },
197 { bs3CpuBasic2_sgdt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64 },
198 { bs3CpuBasic2_sgdt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64 },
199 { bs3CpuBasic2_sgdt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64 },
200 { bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64 },
201};
202
203/** LIDT test workers. */
204static BS3CB2SIDTSGDT const g_aLidtWorkers[] =
205{
206 { bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
207 { bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
208 { bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
209 { bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
210 { bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
211 { bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
212 { bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
213 { bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
214 { bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
215 { bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
216 { bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
217 { bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
218};
219
220
221
222/** Table containing invalid CS selector types. */
223static const BS3CB2INVLDESCTYPE g_aInvalidCsTypes[] =
224{
225 { X86_SEL_TYPE_RO, 1 },
226 { X86_SEL_TYPE_RO_ACC, 1 },
227 { X86_SEL_TYPE_RW, 1 },
228 { X86_SEL_TYPE_RW_ACC, 1 },
229 { X86_SEL_TYPE_RO_DOWN, 1 },
230 { X86_SEL_TYPE_RO_DOWN_ACC, 1 },
231 { X86_SEL_TYPE_RW_DOWN, 1 },
232 { X86_SEL_TYPE_RW_DOWN_ACC, 1 },
233 { 0, 0 },
234 { 1, 0 },
235 { 2, 0 },
236 { 3, 0 },
237 { 4, 0 },
238 { 5, 0 },
239 { 6, 0 },
240 { 7, 0 },
241 { 8, 0 },
242 { 9, 0 },
243 { 10, 0 },
244 { 11, 0 },
245 { 12, 0 },
246 { 13, 0 },
247 { 14, 0 },
248 { 15, 0 },
249};
250
251/** Table containing invalid SS selector types. */
252static const BS3CB2INVLDESCTYPE g_aInvalidSsTypes[] =
253{
254 { X86_SEL_TYPE_EO, 1 },
255 { X86_SEL_TYPE_EO_ACC, 1 },
256 { X86_SEL_TYPE_ER, 1 },
257 { X86_SEL_TYPE_ER_ACC, 1 },
258 { X86_SEL_TYPE_EO_CONF, 1 },
259 { X86_SEL_TYPE_EO_CONF_ACC, 1 },
260 { X86_SEL_TYPE_ER_CONF, 1 },
261 { X86_SEL_TYPE_ER_CONF_ACC, 1 },
262 { 0, 0 },
263 { 1, 0 },
264 { 2, 0 },
265 { 3, 0 },
266 { 4, 0 },
267 { 5, 0 },
268 { 6, 0 },
269 { 7, 0 },
270 { 8, 0 },
271 { 9, 0 },
272 { 10, 0 },
273 { 11, 0 },
274 { 12, 0 },
275 { 13, 0 },
276 { 14, 0 },
277 { 15, 0 },
278};
279
280#endif /* BS3_INSTANTIATING_CMN - global */
281
282#ifdef BS3_INSTANTIATING_CMN
283
284/**
285 * Wrapper around Bs3TestFailedF that prefixes the error with g_usBs3TestStep
286 * and g_pszTestMode.
287 */
288# define bs3CpuBasic2_FailedF BS3_CMN_NM(bs3CpuBasic2_FailedF)
289BS3_DECL_NEAR(void) bs3CpuBasic2_FailedF(const char *pszFormat, ...)
290{
291 va_list va;
292
293 char szTmp[168];
294 va_start(va, pszFormat);
295 Bs3StrPrintfV(szTmp, sizeof(szTmp), pszFormat, va);
296 va_end(va);
297
298 Bs3TestFailedF("%u - %s: %s", g_usBs3TestStep, g_pszTestMode, szTmp);
299}
300
301
302/**
303 * Compares trap stuff.
304 */
305# define bs3CpuBasic2_CompareIntCtx1 BS3_CMN_NM(bs3CpuBasic2_CompareIntCtx1)
306BS3_DECL_NEAR(void) bs3CpuBasic2_CompareIntCtx1(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t bXcpt)
307{
308 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
309 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
310 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
311 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, 2 /*int xx*/, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
312 if (Bs3TestSubErrorCount() != cErrorsBefore)
313 {
314 Bs3TrapPrintFrame(pTrapCtx);
315#if 1
316 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
317 Bs3TestPrintf("Halting in CompareTrapCtx1: bXcpt=%#x\n", bXcpt);
318 ASMHalt();
319#endif
320 }
321}
322
323
324/**
325 * Compares trap stuff.
326 */
327# define bs3CpuBasic2_CompareTrapCtx2 BS3_CMN_NM(bs3CpuBasic2_CompareTrapCtx2)
328BS3_DECL_NEAR(void) bs3CpuBasic2_CompareTrapCtx2(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t cbIpAdjust,
329 uint8_t bXcpt, uint16_t uHandlerCs)
330{
331 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
332 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
333 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
334 CHECK_MEMBER("uHandlerCs", "%#06x", pTrapCtx->uHandlerCs, uHandlerCs);
335 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
336 if (Bs3TestSubErrorCount() != cErrorsBefore)
337 {
338 Bs3TrapPrintFrame(pTrapCtx);
339#if 1
340 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
341 Bs3TestPrintf("Halting in CompareTrapCtx2: bXcpt=%#x\n", bXcpt);
342 ASMHalt();
343#endif
344 }
345}
346
347/**
348 * Compares a CPU trap.
349 */
350# define bs3CpuBasic2_CompareCpuTrapCtx BS3_CMN_NM(bs3CpuBasic2_CompareCpuTrapCtx)
351BS3_DECL_NEAR(void) bs3CpuBasic2_CompareCpuTrapCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd,
352 uint8_t bXcpt, bool f486ResumeFlagHint)
353{
354 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
355 uint32_t fExtraEfl;
356
357 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
358 CHECK_MEMBER("bErrCd", "%#06RX16", (uint16_t)pTrapCtx->uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
359
360 fExtraEfl = X86_EFL_RF;
361 if ( g_f16BitSys
362 || ( !f486ResumeFlagHint
363 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) <= BS3CPU_80486 ) )
364 fExtraEfl = 0;
365 else
366 fExtraEfl = X86_EFL_RF;
367#if 0 /** @todo Running on an AMD Phenom II X6 1100T under AMD-V I'm not getting good X86_EFL_RF results. Enable this to get on with other work. */
368 fExtraEfl = pTrapCtx->Ctx.rflags.u32 & X86_EFL_RF;
369#endif
370 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, 0 /*cbIpAdjust*/, 0 /*cbSpAdjust*/, fExtraEfl, g_pszTestMode, g_usBs3TestStep);
371 if (Bs3TestSubErrorCount() != cErrorsBefore)
372 {
373 Bs3TrapPrintFrame(pTrapCtx);
374#if 1
375 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
376 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
377 ASMHalt();
378#endif
379 }
380}
381
382
383/**
384 * Compares \#GP trap.
385 */
386# define bs3CpuBasic2_CompareGpCtx BS3_CMN_NM(bs3CpuBasic2_CompareGpCtx)
387BS3_DECL_NEAR(void) bs3CpuBasic2_CompareGpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
388{
389 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_GP, true /*f486ResumeFlagHint*/);
390}
391
392/**
393 * Compares \#NP trap.
394 */
395# define bs3CpuBasic2_CompareNpCtx BS3_CMN_NM(bs3CpuBasic2_CompareNpCtx)
396BS3_DECL_NEAR(void) bs3CpuBasic2_CompareNpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
397{
398 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_NP, true /*f486ResumeFlagHint*/);
399}
400
401/**
402 * Compares \#SS trap.
403 */
404# define bs3CpuBasic2_CompareSsCtx BS3_CMN_NM(bs3CpuBasic2_CompareSsCtx)
405BS3_DECL_NEAR(void) bs3CpuBasic2_CompareSsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd, bool f486ResumeFlagHint)
406{
407 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_SS, f486ResumeFlagHint);
408}
409
410/**
411 * Compares \#TS trap.
412 */
413# define bs3CpuBasic2_CompareTsCtx BS3_CMN_NM(bs3CpuBasic2_CompareTsCtx)
414BS3_DECL_NEAR(void) bs3CpuBasic2_CompareTsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
415{
416 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_TS, false /*f486ResumeFlagHint*/);
417}
418
419/**
420 * Compares \#PF trap.
421 */
422# define bs3CpuBasic2_ComparePfCtx BS3_CMN_NM(bs3CpuBasic2_ComparePfCtx)
423BS3_DECL_NEAR(void) bs3CpuBasic2_ComparePfCtx(PCBS3TRAPFRAME pTrapCtx, PBS3REGCTX pStartCtx, uint16_t uErrCd, uint64_t uCr2Expected)
424{
425 uint64_t const uCr2Saved = pStartCtx->cr2.u;
426 pStartCtx->cr2.u = uCr2Expected;
427 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_PF, true /*f486ResumeFlagHint*/);
428 pStartCtx->cr2.u = uCr2Saved;
429}
430
431/**
432 * Compares \#UD trap.
433 */
434# define bs3CpuBasic2_CompareUdCtx BS3_CMN_NM(bs3CpuBasic2_CompareUdCtx)
435BS3_DECL_NEAR(void) bs3CpuBasic2_CompareUdCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx)
436{
437 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*no error code*/, X86_XCPT_UD, true /*f486ResumeFlagHint*/);
438}
439
440
441# define bs3CpuBasic2_RaiseXcpt1Common BS3_CMN_NM(bs3CpuBasic2_RaiseXcpt1Common)
442BS3_DECL_NEAR(void) bs3CpuBasic2_RaiseXcpt1Common(uint16_t const uSysR0Cs, uint16_t const uSysR0CsConf, uint16_t const uSysR0Ss,
443 PX86DESC const paIdt, unsigned const cIdteShift)
444{
445 BS3TRAPFRAME TrapCtx;
446 BS3REGCTX Ctx80;
447 BS3REGCTX Ctx81;
448 BS3REGCTX Ctx82;
449 BS3REGCTX Ctx83;
450 BS3REGCTX CtxTmp;
451 BS3REGCTX CtxTmp2;
452 PBS3REGCTX apCtx8x[4];
453 unsigned iCtx;
454 unsigned iRing;
455 unsigned iDpl;
456 unsigned iRpl;
457 unsigned i, j, k;
458 uint32_t uExpected;
459 bool const f486Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486;
460# if TMPL_BITS == 16
461 bool const f386Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386;
462 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
463# else
464 bool const f286 = false;
465 bool const f386Plus = true;
466 int rc;
467 uint8_t *pbIdtCopyAlloc;
468 PX86DESC pIdtCopy;
469 const unsigned cbIdte = 1 << (3 + cIdteShift);
470 RTCCUINTXREG uCr0Saved = ASMGetCR0();
471 RTGDTR GdtrSaved;
472# endif
473 RTIDTR IdtrSaved;
474 RTIDTR Idtr;
475
476 ASMGetIDTR(&IdtrSaved);
477# if TMPL_BITS != 16
478 ASMGetGDTR(&GdtrSaved);
479# endif
480
481 /* make sure they're allocated */
482 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
483 Bs3MemZero(&Ctx80, sizeof(Ctx80));
484 Bs3MemZero(&Ctx81, sizeof(Ctx81));
485 Bs3MemZero(&Ctx82, sizeof(Ctx82));
486 Bs3MemZero(&Ctx83, sizeof(Ctx83));
487 Bs3MemZero(&CtxTmp, sizeof(CtxTmp));
488 Bs3MemZero(&CtxTmp2, sizeof(CtxTmp2));
489
490 /* Context array. */
491 apCtx8x[0] = &Ctx80;
492 apCtx8x[1] = &Ctx81;
493 apCtx8x[2] = &Ctx82;
494 apCtx8x[3] = &Ctx83;
495
496# if TMPL_BITS != 16
497 /* Allocate memory for playing around with the IDT. */
498 pbIdtCopyAlloc = NULL;
499 if (BS3_MODE_IS_PAGED(g_bTestMode))
500 pbIdtCopyAlloc = Bs3MemAlloc(BS3MEMKIND_FLAT32, 12*_1K);
501# endif
502
503 /*
504 * IDT entry 80 thru 83 are assigned DPLs according to the number.
505 * (We'll be useing more, but this'll do for now.)
506 */
507 paIdt[0x80 << cIdteShift].Gate.u2Dpl = 0;
508 paIdt[0x81 << cIdteShift].Gate.u2Dpl = 1;
509 paIdt[0x82 << cIdteShift].Gate.u2Dpl = 2;
510 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
511
512 Bs3RegCtxSave(&Ctx80);
513 Ctx80.rsp.u -= 0x300;
514 Ctx80.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int80);
515# if TMPL_BITS == 16
516 Ctx80.cs = BS3_MODE_IS_RM_OR_V86(g_bTestMode) ? BS3_SEL_TEXT16 : BS3_SEL_R0_CS16;
517# elif TMPL_BITS == 32
518 g_uBs3TrapEipHint = Ctx80.rip.u32;
519# endif
520 Bs3MemCpy(&Ctx81, &Ctx80, sizeof(Ctx80));
521 Ctx81.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int81);
522 Bs3MemCpy(&Ctx82, &Ctx80, sizeof(Ctx80));
523 Ctx82.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int82);
524 Bs3MemCpy(&Ctx83, &Ctx80, sizeof(Ctx80));
525 Ctx83.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int83);
526
527 /*
528 * Check that all the above gates work from ring-0.
529 */
530 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
531 {
532 g_usBs3TestStep = iCtx;
533# if TMPL_BITS == 32
534 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
535# endif
536 Bs3TrapSetJmpAndRestore(apCtx8x[iCtx], &TrapCtx);
537 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, apCtx8x[iCtx], 0x80+iCtx /*bXcpt*/);
538 }
539
540 /*
541 * Check that the gate DPL checks works.
542 */
543 g_usBs3TestStep = 100;
544 for (iRing = 0; iRing <= 3; iRing++)
545 {
546 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
547 {
548 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
549 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
550# if TMPL_BITS == 32
551 g_uBs3TrapEipHint = CtxTmp.rip.u32;
552# endif
553 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
554 if (iCtx < iRing)
555 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
556 else
557 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
558 g_usBs3TestStep++;
559 }
560 }
561
562 /*
563 * Modify the gate CS value and run the handler at a different CPL.
564 * Throw RPL variations into the mix (completely ignored) together
565 * with gate presence.
566 * 1. CPL <= GATE.DPL
567 * 2. GATE.P
568 * 3. GATE.CS.DPL <= CPL (non-conforming segments)
569 */
570 g_usBs3TestStep = 1000;
571 for (i = 0; i <= 3; i++)
572 {
573 for (iRing = 0; iRing <= 3; iRing++)
574 {
575 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
576 {
577# if TMPL_BITS == 32
578 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
579# endif
580 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
581 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
582
583 for (j = 0; j <= 3; j++)
584 {
585 uint16_t const uCs = (uSysR0Cs | j) + (i << BS3_SEL_RING_SHIFT);
586 for (k = 0; k < 2; k++)
587 {
588 g_usBs3TestStep++;
589 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
590 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
591 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = k;
592 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
593 /*Bs3TrapPrintFrame(&TrapCtx);*/
594 if (iCtx < iRing)
595 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
596 else if (k == 0)
597 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
598 else if (i > iRing)
599 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
600 else
601 {
602 uint16_t uExpectedCs = uCs & X86_SEL_MASK_OFF_RPL;
603 if (i <= iCtx && i <= iRing)
604 uExpectedCs |= i;
605 bs3CpuBasic2_CompareTrapCtx2(&TrapCtx, &CtxTmp, 2 /*int 8xh*/, 0x80 + iCtx /*bXcpt*/, uExpectedCs);
606 }
607 }
608 }
609
610 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
611 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
612 }
613 }
614 }
615 BS3_ASSERT(g_usBs3TestStep < 1600);
616
617 /*
618 * Various CS and SS related faults
619 *
620 * We temporarily reconfigure gate 80 and 83 with new CS selectors, the
621 * latter have a CS.DPL of 2 for testing ring transisions and SS loading
622 * without making it impossible to handle faults.
623 */
624 g_usBs3TestStep = 1600;
625 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
626 Bs3GdteTestPage00.Gen.u1Present = 0;
627 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
628 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
629
630 /* CS.PRESENT = 0 */
631 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
632 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
633 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
634 bs3CpuBasic2_FailedF("selector was accessed");
635 g_usBs3TestStep++;
636
637 /* Check that GATE.DPL is checked before CS.PRESENT. */
638 for (iRing = 1; iRing < 4; iRing++)
639 {
640 Bs3MemCpy(&CtxTmp, &Ctx80, sizeof(CtxTmp));
641 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
642 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
643 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x80 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
644 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
645 bs3CpuBasic2_FailedF("selector was accessed");
646 g_usBs3TestStep++;
647 }
648
649 /* CS.DPL mismatch takes precedence over CS.PRESENT = 0. */
650 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
651 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
652 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
653 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
654 bs3CpuBasic2_FailedF("CS selector was accessed");
655 g_usBs3TestStep++;
656 for (iDpl = 1; iDpl < 4; iDpl++)
657 {
658 Bs3GdteTestPage00.Gen.u2Dpl = iDpl;
659 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
660 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
661 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
662 bs3CpuBasic2_FailedF("CS selector was accessed");
663 g_usBs3TestStep++;
664 }
665
666 /* 1608: Check all the invalid CS selector types alone. */
667 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
668 for (i = 0; i < RT_ELEMENTS(g_aInvalidCsTypes); i++)
669 {
670 Bs3GdteTestPage00.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
671 Bs3GdteTestPage00.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
672 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
673 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
674 if (Bs3GdteTestPage00.Gen.u4Type != g_aInvalidCsTypes[i].u4Type)
675 bs3CpuBasic2_FailedF("Invalid CS type %#x/%u -> %#x/%u\n",
676 g_aInvalidCsTypes[i].u4Type, g_aInvalidCsTypes[i].u1DescType,
677 Bs3GdteTestPage00.Gen.u4Type, Bs3GdteTestPage00.Gen.u1DescType);
678 g_usBs3TestStep++;
679
680 /* Incorrect CS.TYPE takes precedence over CS.PRESENT = 0. */
681 Bs3GdteTestPage00.Gen.u1Present = 0;
682 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
683 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
684 Bs3GdteTestPage00.Gen.u1Present = 1;
685 g_usBs3TestStep++;
686 }
687
688 /* Fix CS again. */
689 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
690
691 /* 1632: Test SS. */
692 if (!BS3_MODE_IS_64BIT_SYS(g_bTestMode))
693 {
694 uint16_t BS3_FAR *puTssSs2 = BS3_MODE_IS_16BIT_SYS(g_bTestMode) ? &Bs3Tss16.ss2 : &Bs3Tss32.ss2;
695 uint16_t const uSavedSs2 = *puTssSs2;
696 X86DESC const SavedGate83 = paIdt[0x83 << cIdteShift];
697
698 /* Make the handler execute in ring-2. */
699 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
700 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
701 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_02 | 2;
702
703 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
704 Bs3RegCtxConvertToRingX(&CtxTmp, 3); /* yeah, from 3 so SS:xSP is reloaded. */
705 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
706 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
707 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
708 bs3CpuBasic2_FailedF("CS selector was not access");
709 g_usBs3TestStep++;
710
711 /* Create a SS.DPL=2 stack segment and check that SS2.RPL matters and
712 that we get #SS if the selector isn't present. */
713 i = 0; /* used for cycling thru invalid CS types */
714 for (k = 0; k < 10; k++)
715 {
716 /* k=0: present,
717 k=1: not-present,
718 k=2: present but very low limit,
719 k=3: not-present, low limit.
720 k=4: present, read-only.
721 k=5: not-present, read-only.
722 k=6: present, code-selector.
723 k=7: not-present, code-selector.
724 k=8: present, read-write / no access + system (=LDT).
725 k=9: not-present, read-write / no access + system (=LDT).
726 */
727 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
728 Bs3GdteTestPage03.Gen.u1Present = !(k & 1);
729 if (k >= 8)
730 {
731 Bs3GdteTestPage03.Gen.u1DescType = 0; /* system */
732 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW; /* = LDT */
733 }
734 else if (k >= 6)
735 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_ER;
736 else if (k >= 4)
737 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RO;
738 else if (k >= 2)
739 {
740 Bs3GdteTestPage03.Gen.u16LimitLow = 0x400;
741 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
742 Bs3GdteTestPage03.Gen.u1Granularity = 0;
743 }
744
745 for (iDpl = 0; iDpl < 4; iDpl++)
746 {
747 Bs3GdteTestPage03.Gen.u2Dpl = iDpl;
748
749 for (iRpl = 0; iRpl < 4; iRpl++)
750 {
751 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | iRpl;
752 //Bs3TestPrintf("k=%u iDpl=%u iRpl=%u step=%u\n", k, iDpl, iRpl, g_usBs3TestStep);
753 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
754 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
755 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
756 if (iRpl != 2 || iRpl != iDpl || k >= 4)
757 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
758 else if (k != 0)
759 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03,
760 k == 2 /*f486ResumeFlagHint*/);
761 else
762 {
763 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
764 if (TrapCtx.uHandlerSs != (BS3_SEL_TEST_PAGE_03 | 2))
765 bs3CpuBasic2_FailedF("uHandlerSs=%#x expected %#x\n", TrapCtx.uHandlerSs, BS3_SEL_TEST_PAGE_03 | 2);
766 }
767 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
768 bs3CpuBasic2_FailedF("CS selector was not access");
769 if ( TrapCtx.bXcpt == 0x83
770 || (TrapCtx.bXcpt == X86_XCPT_SS && k == 2) )
771 {
772 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
773 bs3CpuBasic2_FailedF("SS selector was not accessed");
774 }
775 else if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
776 bs3CpuBasic2_FailedF("SS selector was accessed");
777 g_usBs3TestStep++;
778
779 /* +1: Modify the gate DPL to check that this is checked before SS.DPL and SS.PRESENT. */
780 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 2;
781 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
782 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x83 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
783 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
784 g_usBs3TestStep++;
785
786 /* +2: Check the the CS.DPL check is done before the SS ones. Restoring the
787 ring-0 INT 83 context triggers the CS.DPL < CPL check. */
788 Bs3TrapSetJmpAndRestore(&Ctx83, &TrapCtx);
789 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx83, BS3_SEL_TEST_PAGE_02);
790 g_usBs3TestStep++;
791
792 /* +3: Now mark the CS selector not present and check that that also triggers before SS stuff. */
793 Bs3GdteTestPage02.Gen.u1Present = 0;
794 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
795 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
796 Bs3GdteTestPage02.Gen.u1Present = 1;
797 g_usBs3TestStep++;
798
799 /* +4: Make the CS selector some invalid type and check it triggers before SS stuff. */
800 Bs3GdteTestPage02.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
801 Bs3GdteTestPage02.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
802 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
803 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
804 Bs3GdteTestPage02.Gen.u4Type = X86_SEL_TYPE_ER_ACC;
805 Bs3GdteTestPage02.Gen.u1DescType = 1;
806 g_usBs3TestStep++;
807
808 /* +5: Now, make the CS selector limit too small and that it triggers after SS trouble.
809 The 286 had a simpler approach to these GP(0). */
810 Bs3GdteTestPage02.Gen.u16LimitLow = 0;
811 Bs3GdteTestPage02.Gen.u4LimitHigh = 0;
812 Bs3GdteTestPage02.Gen.u1Granularity = 0;
813 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
814 if (f286)
815 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
816 else if (iRpl != 2 || iRpl != iDpl || k >= 4)
817 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
818 else if (k != 0)
819 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, k == 2 /*f486ResumeFlagHint*/);
820 else
821 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
822 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
823 g_usBs3TestStep++;
824 }
825 }
826 }
827
828 /* Check all the invalid SS selector types alone. */
829 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
830 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
831 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
832 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
833 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
834 g_usBs3TestStep++;
835 for (i = 0; i < RT_ELEMENTS(g_aInvalidSsTypes); i++)
836 {
837 Bs3GdteTestPage03.Gen.u4Type = g_aInvalidSsTypes[i].u4Type;
838 Bs3GdteTestPage03.Gen.u1DescType = g_aInvalidSsTypes[i].u1DescType;
839 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
840 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
841 if (Bs3GdteTestPage03.Gen.u4Type != g_aInvalidSsTypes[i].u4Type)
842 bs3CpuBasic2_FailedF("Invalid SS type %#x/%u -> %#x/%u\n",
843 g_aInvalidSsTypes[i].u4Type, g_aInvalidSsTypes[i].u1DescType,
844 Bs3GdteTestPage03.Gen.u4Type, Bs3GdteTestPage03.Gen.u1DescType);
845 g_usBs3TestStep++;
846 }
847
848 /*
849 * Continue the SS experiments with a expand down segment. We'll use
850 * the same setup as we already have with gate 83h being DPL and
851 * having CS.DPL=2.
852 *
853 * Expand down segments are weird. The valid area is practically speaking
854 * reversed. So, a 16-bit segment with a limit of 0x6000 will have valid
855 * addresses from 0xffff thru 0x6001.
856 *
857 * So, with expand down segments we can more easily cut partially into the
858 * pushing of the iret frame and trigger more interesting behavior than
859 * with regular "expand up" segments where the whole pushing area is either
860 * all fine or not not fine.
861 */
862 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
863 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
864 Bs3GdteTestPage03.Gen.u2Dpl = 2;
865 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW_DOWN;
866 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
867
868 /* First test, limit = max --> no bytes accessible --> #GP */
869 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
870 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
871
872 /* Second test, limit = 0 --> all by zero byte accessible --> works */
873 Bs3GdteTestPage03.Gen.u16LimitLow = 0;
874 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
875 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
876 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
877
878 /* Modify the gate handler to be a dummy that immediately does UD2
879 and triggers #UD, then advance the limit down till we get the #UD. */
880 Bs3GdteTestPage03.Gen.u1Granularity = 0;
881
882 Bs3MemCpy(&CtxTmp2, &CtxTmp, sizeof(CtxTmp2)); /* #UD result context */
883 if (g_f16BitSys)
884 {
885 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr - BS3_ADDR_BS3TEXT16;
886 Bs3Trap16SetGate(0x83, X86_SEL_TYPE_SYS_286_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u16, 0 /*cParams*/);
887 CtxTmp2.rsp.u = Bs3Tss16.sp2 - 2*5;
888 }
889 else
890 {
891 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr;
892 Bs3Trap32SetGate(0x83, X86_SEL_TYPE_SYS_386_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u32, 0 /*cParams*/);
893 CtxTmp2.rsp.u = Bs3Tss32.esp2 - 4*5;
894 }
895 CtxTmp2.bMode = g_bTestMode; /* g_bBs3CurrentMode not changed by the UD2 handler. */
896 CtxTmp2.cs = BS3_SEL_TEST_PAGE_02 | 2;
897 CtxTmp2.ss = BS3_SEL_TEST_PAGE_03 | 2;
898 CtxTmp2.bCpl = 2;
899
900 /* test run. */
901 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
902 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
903 g_usBs3TestStep++;
904
905 /* Real run. */
906 i = (g_f16BitSys ? 2 : 4) * 6 + 1;
907 while (i-- > 0)
908 {
909 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
910 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
911 if (i > 0)
912 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
913 else
914 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
915 g_usBs3TestStep++;
916 }
917
918 /* Do a run where we do the same-ring kind of access. */
919 Bs3RegCtxConvertToRingX(&CtxTmp, 2);
920 if (g_f16BitSys)
921 {
922 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 2*3;
923 i = 2*3 - 1;
924 }
925 else
926 {
927 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 4*3;
928 i = 4*3 - 1;
929 }
930 CtxTmp.ss = BS3_SEL_TEST_PAGE_03 | 2;
931 CtxTmp2.ds = CtxTmp.ds;
932 CtxTmp2.es = CtxTmp.es;
933 CtxTmp2.fs = CtxTmp.fs;
934 CtxTmp2.gs = CtxTmp.gs;
935 while (i-- > 0)
936 {
937 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
938 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
939 if (i > 0)
940 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, 0 /*BS3_SEL_TEST_PAGE_03*/, true /*f486ResumeFlagHint*/);
941 else
942 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
943 g_usBs3TestStep++;
944 }
945
946 *puTssSs2 = uSavedSs2;
947 paIdt[0x83 << cIdteShift] = SavedGate83;
948 }
949 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
950 BS3_ASSERT(g_usBs3TestStep < 3000);
951
952 /*
953 * Modify the gate CS value with a conforming segment.
954 */
955 g_usBs3TestStep = 3000;
956 for (i = 0; i <= 3; i++) /* cs.dpl */
957 {
958 for (iRing = 0; iRing <= 3; iRing++)
959 {
960 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
961 {
962 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
963 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
964# if TMPL_BITS == 32
965 g_uBs3TrapEipHint = CtxTmp.rip.u32;
966# endif
967
968 for (j = 0; j <= 3; j++) /* rpl */
969 {
970 uint16_t const uCs = (uSysR0CsConf | j) + (i << BS3_SEL_RING_SHIFT);
971 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
972 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
973 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
974 //Bs3TestPrintf("%u/%u/%u/%u: cs=%04x hcs=%04x xcpt=%02x\n", i, iRing, iCtx, j, uCs, TrapCtx.uHandlerCs, TrapCtx.bXcpt);
975 /*Bs3TrapPrintFrame(&TrapCtx);*/
976 g_usBs3TestStep++;
977 if (iCtx < iRing)
978 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
979 else if (i > iRing)
980 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
981 else
982 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
983 }
984 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
985 }
986 }
987 }
988 BS3_ASSERT(g_usBs3TestStep < 3500);
989
990 /*
991 * The gates must be 64-bit in long mode.
992 */
993 if (cIdteShift != 0)
994 {
995 g_usBs3TestStep = 3500;
996 for (i = 0; i <= 3; i++)
997 {
998 for (iRing = 0; iRing <= 3; iRing++)
999 {
1000 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1001 {
1002 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1003 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1004
1005 for (j = 0; j < 2; j++)
1006 {
1007 static const uint16_t s_auCSes[2] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32 };
1008 uint16_t uCs = (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT);
1009 g_usBs3TestStep++;
1010 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1011 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1012 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1013 /*Bs3TrapPrintFrame(&TrapCtx);*/
1014 if (iCtx < iRing)
1015 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1016 else
1017 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1018 }
1019 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1020 }
1021 }
1022 }
1023 BS3_ASSERT(g_usBs3TestStep < 4000);
1024 }
1025
1026 /*
1027 * IDT limit check. The 286 does not access X86DESCGATE::u16OffsetHigh.
1028 */
1029 g_usBs3TestStep = 5000;
1030 i = (0x80 << (cIdteShift + 3)) - 1;
1031 j = (0x82 << (cIdteShift + 3)) - (!f286 ? 1 : 3);
1032 k = (0x83 << (cIdteShift + 3)) - 1;
1033 for (; i <= k; i++, g_usBs3TestStep++)
1034 {
1035 Idtr = IdtrSaved;
1036 Idtr.cbIdt = i;
1037 ASMSetIDTR(&Idtr);
1038 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1039 if (i < j)
1040 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx81, (0x81 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1041 else
1042 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1043 }
1044 ASMSetIDTR(&IdtrSaved);
1045 BS3_ASSERT(g_usBs3TestStep < 5100);
1046
1047# if TMPL_BITS != 16 /* Only do the paging related stuff in 32-bit and 64-bit modes. */
1048
1049 /*
1050 * IDT page not present. Placing the IDT copy such that 0x80 is on the
1051 * first page and 0x81 is on the second page. We need proceed to move
1052 * it down byte by byte to check that any inaccessible byte means #PF.
1053 *
1054 * Note! We must reload the alternative IDTR for each run as any kind of
1055 * printing to the string (like error reporting) will cause a switch
1056 * to real mode and back, reloading the default IDTR.
1057 */
1058 g_usBs3TestStep = 5200;
1059 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1060 {
1061 uint32_t const uCr2Expected = Bs3SelPtrToFlat(pbIdtCopyAlloc) + _4K;
1062 for (j = 0; j < cbIdte; j++)
1063 {
1064 pIdtCopy = (PX86DESC)&pbIdtCopyAlloc[_4K - cbIdte * 0x81 - j];
1065 Bs3MemCpy(pIdtCopy, paIdt, cbIdte * 256);
1066
1067 Idtr.cbIdt = IdtrSaved.cbIdt;
1068 Idtr.pIdt = Bs3SelPtrToFlat(pIdtCopy);
1069
1070 ASMSetIDTR(&Idtr);
1071 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1072 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1073 g_usBs3TestStep++;
1074
1075 ASMSetIDTR(&Idtr);
1076 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1077 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1078 g_usBs3TestStep++;
1079
1080 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1081 if (RT_SUCCESS(rc))
1082 {
1083 ASMSetIDTR(&Idtr);
1084 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1085 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1086 g_usBs3TestStep++;
1087
1088 ASMSetIDTR(&Idtr);
1089 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1090 if (f486Plus)
1091 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1092 else
1093 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1094 g_usBs3TestStep++;
1095
1096 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1097
1098 /* Check if that the entry type is checked after the whole IDTE has been cleared for #PF. */
1099 pIdtCopy[0x80 << cIdteShift].Gate.u4Type = 0;
1100 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1101 if (RT_SUCCESS(rc))
1102 {
1103 ASMSetIDTR(&Idtr);
1104 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1105 if (f486Plus)
1106 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1107 else
1108 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1109 g_usBs3TestStep++;
1110
1111 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1112 }
1113 }
1114 else
1115 Bs3TestPrintf("Bs3PagingProtectPtr: %d\n", i);
1116
1117 ASMSetIDTR(&IdtrSaved);
1118 }
1119 }
1120
1121 /*
1122 * The read/write and user/supervisor bits the IDT PTEs are irrelevant.
1123 */
1124 g_usBs3TestStep = 5300;
1125 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1126 {
1127 Bs3MemCpy(pbIdtCopyAlloc, paIdt, cbIdte * 256);
1128 Idtr.cbIdt = IdtrSaved.cbIdt;
1129 Idtr.pIdt = Bs3SelPtrToFlat(pbIdtCopyAlloc);
1130
1131 ASMSetIDTR(&Idtr);
1132 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1133 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1134 g_usBs3TestStep++;
1135
1136 rc = Bs3PagingProtect(Idtr.pIdt, _4K, 0 /*fSet*/, X86_PTE_RW | X86_PTE_US /*fClear*/);
1137 if (RT_SUCCESS(rc))
1138 {
1139 ASMSetIDTR(&Idtr);
1140 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1141 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1142 g_usBs3TestStep++;
1143
1144 Bs3PagingProtect(Idtr.pIdt, _4K, X86_PTE_RW | X86_PTE_US /*fSet*/, 0 /*fClear*/);
1145 }
1146 ASMSetIDTR(&IdtrSaved);
1147 }
1148
1149 /*
1150 * Check that CS.u1Accessed is set to 1. Use the test page selector #0 and #3 together
1151 * with interrupt gates 80h and 83h, respectively.
1152 */
1153/** @todo Throw in SS.u1Accessed too. */
1154 g_usBs3TestStep = 5400;
1155 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1156 {
1157 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
1158 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1159 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
1160
1161 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Cs + (3 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1162 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1163 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_03; /* rpl is ignored, so leave it as zero. */
1164
1165 /* Check that the CS.A bit is being set on a general basis and that
1166 the special CS values work with out generic handler code. */
1167 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1168 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1169 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1170 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed", Bs3GdteTestPage00.Gen.u4Type);
1171 g_usBs3TestStep++;
1172
1173 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1174 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1175 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1176 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1177 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1178 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1179 if (TrapCtx.uHandlerCs != (BS3_SEL_TEST_PAGE_03 | 3))
1180 bs3CpuBasic2_FailedF("uHandlerCs=%#x, expected %#x", TrapCtx.uHandlerCs, (BS3_SEL_TEST_PAGE_03 | 3));
1181 g_usBs3TestStep++;
1182
1183 /*
1184 * Now check that setting CS.u1Access to 1 does __NOT__ trigger a page
1185 * fault due to the RW bit being zero.
1186 * (We check both with with and without the WP bit if 80486.)
1187 */
1188 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1189 ASMSetCR0(uCr0Saved | X86_CR0_WP);
1190
1191 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1192 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1193 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_RW /*fClear*/);
1194 if (RT_SUCCESS(rc))
1195 {
1196 /* ring-0 handler */
1197 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1198 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1199 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1200 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1201 g_usBs3TestStep++;
1202
1203 /* ring-3 handler */
1204 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1205 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1206 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1207 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1208 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1209 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1210 g_usBs3TestStep++;
1211
1212 /* clear WP and repeat the above. */
1213 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1214 ASMSetCR0(uCr0Saved & ~X86_CR0_WP);
1215 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1216 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1217
1218 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1219 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1220 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1221 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1222 g_usBs3TestStep++;
1223
1224 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1225 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1226 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1227 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!n", Bs3GdteTestPage03.Gen.u4Type);
1228 g_usBs3TestStep++;
1229
1230 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_RW /*fSet*/, 0 /*fClear*/);
1231 }
1232
1233 ASMSetCR0(uCr0Saved);
1234
1235 /*
1236 * While we're here, check that if the CS GDT entry is a non-present
1237 * page we do get a #PF with the rigth error code and CR2.
1238 */
1239 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* Just for fun, really a pointless gesture. */
1240 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1241 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1242 if (RT_SUCCESS(rc))
1243 {
1244 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1245 if (f486Plus)
1246 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00);
1247 else
1248 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00 + 4);
1249 g_usBs3TestStep++;
1250
1251 /* Do it from ring-3 to check ErrCd, which doesn't set X86_TRAP_PF_US it turns out. */
1252 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1253 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1254 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1255
1256 if (f486Plus)
1257 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03);
1258 else
1259 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03 + 4);
1260 g_usBs3TestStep++;
1261
1262 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1263 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1264 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #1", Bs3GdteTestPage00.Gen.u4Type);
1265 if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1266 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #2", Bs3GdteTestPage03.Gen.u4Type);
1267 }
1268
1269 /* restore */
1270 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1271 paIdt[0x83 << cIdteShift].Gate.u16Sel = uSysR0Cs;// + (3 << BS3_SEL_RING_SHIFT) + 3;
1272 }
1273
1274# endif /* 32 || 64*/
1275
1276 /*
1277 * Check broad EFLAGS effects.
1278 */
1279 g_usBs3TestStep = 5600;
1280 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1281 {
1282 for (iRing = 0; iRing < 4; iRing++)
1283 {
1284 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1285 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1286
1287 /* all set */
1288 CtxTmp.rflags.u32 &= X86_EFL_VM | X86_EFL_1;
1289 CtxTmp.rflags.u32 |= X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF /* | X86_EFL_TF */ /*| X86_EFL_IF*/
1290 | X86_EFL_DF | X86_EFL_OF | X86_EFL_IOPL /* | X86_EFL_NT*/;
1291 if (f486Plus)
1292 CtxTmp.rflags.u32 |= X86_EFL_AC;
1293 if (f486Plus && !g_f16BitSys)
1294 CtxTmp.rflags.u32 |= X86_EFL_RF;
1295 if (g_uBs3CpuDetected & BS3CPU_F_CPUID)
1296 CtxTmp.rflags.u32 |= X86_EFL_VIF | X86_EFL_VIP;
1297 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1298 CtxTmp.rflags.u32 &= ~X86_EFL_RF;
1299
1300 if (iCtx >= iRing)
1301 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1302 else
1303 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1304 uExpected = CtxTmp.rflags.u32
1305 & ( X86_EFL_1 | X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF | X86_EFL_DF
1306 | X86_EFL_OF | X86_EFL_IOPL | X86_EFL_NT | X86_EFL_VM | X86_EFL_AC | X86_EFL_VIF | X86_EFL_VIP
1307 | X86_EFL_ID /*| X86_EFL_TF*/ /*| X86_EFL_IF*/ /*| X86_EFL_RF*/ );
1308 if (TrapCtx.fHandlerRfl != uExpected)
1309 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1310 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1311 g_usBs3TestStep++;
1312
1313 /* all cleared */
1314 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80286)
1315 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_RA1_MASK | UINT16_C(0xf000));
1316 else
1317 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_VM | X86_EFL_RA1_MASK);
1318 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1319 if (iCtx >= iRing)
1320 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1321 else
1322 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1323 uExpected = CtxTmp.rflags.u32;
1324 if (TrapCtx.fHandlerRfl != uExpected)
1325 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1326 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1327 g_usBs3TestStep++;
1328 }
1329 }
1330
1331/** @todo CS.LIMIT / canonical(CS) */
1332
1333
1334 /*
1335 * Check invalid gate types.
1336 */
1337 g_usBs3TestStep = 32000;
1338 for (iRing = 0; iRing <= 3; iRing++)
1339 {
1340 static const uint16_t s_auCSes[] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32, BS3_SEL_R0_CS64,
1341 BS3_SEL_TSS16, BS3_SEL_TSS32, BS3_SEL_TSS64, 0, BS3_SEL_SPARE_1f };
1342 static uint16_t const s_auInvlTypes64[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
1343 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1344 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f };
1345 static uint16_t const s_auInvlTypes32[] = { 0, 1, 2, 3, 8, 9, 10, 11, 13,
1346 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1347 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
1348 /*286:*/ 12, 14, 15 };
1349 uint16_t const * const pauInvTypes = cIdteShift != 0 ? s_auInvlTypes64 : s_auInvlTypes32;
1350 uint16_t const cInvTypes = cIdteShift != 0 ? RT_ELEMENTS(s_auInvlTypes64)
1351 : f386Plus ? RT_ELEMENTS(s_auInvlTypes32) - 3 : RT_ELEMENTS(s_auInvlTypes32);
1352
1353
1354 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1355 {
1356 unsigned iType;
1357
1358 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1359 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1360# if TMPL_BITS == 32
1361 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1362# endif
1363 for (iType = 0; iType < cInvTypes; iType++)
1364 {
1365 uint8_t const bSavedType = paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type;
1366 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = pauInvTypes[iType] >> 4;
1367 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = pauInvTypes[iType] & 0xf;
1368
1369 for (i = 0; i < 4; i++)
1370 {
1371 for (j = 0; j < RT_ELEMENTS(s_auCSes); j++)
1372 {
1373 uint16_t uCs = (unsigned)(s_auCSes[j] - BS3_SEL_R0_FIRST) < (unsigned)(4 << BS3_SEL_RING_SHIFT)
1374 ? (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT)
1375 : s_auCSes[j] | i;
1376 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x type=%#x\n", g_usBs3TestStep, iCtx, iRing, i, uCs, pauInvTypes[iType]);*/
1377 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1378 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1379 g_usBs3TestStep++;
1380 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1381
1382 /* Mark it not-present to check that invalid type takes precedence. */
1383 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 0;
1384 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1385 g_usBs3TestStep++;
1386 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1387 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1388 }
1389 }
1390
1391 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1392 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = bSavedType;
1393 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = 0;
1394 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1395 }
1396 }
1397 }
1398 BS3_ASSERT(g_usBs3TestStep < 62000U && g_usBs3TestStep > 32000U);
1399
1400
1401 /** @todo
1402 * - Run \#PF and \#GP (and others?) at CPLs other than zero.
1403 * - Quickly generate all faults.
1404 * - All the peculiarities v8086.
1405 */
1406
1407# if TMPL_BITS != 16
1408 Bs3MemFree(pbIdtCopyAlloc, 12*_1K);
1409# endif
1410}
1411
1412
1413/**
1414 * Executes one round of SIDT and SGDT tests using one assembly worker.
1415 *
1416 * This is written with driving everything from the 16-bit or 32-bit worker in
1417 * mind, i.e. not assuming the test bitcount is the same as the current.
1418 */
1419# define bs3CpuBasic2_sidt_sgdt_One BS3_CMN_NM(bs3CpuBasic2_sidt_sgdt_One)
1420BS3_DECL_NEAR(void) bs3CpuBasic2_sidt_sgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
1421 uint8_t const *pbExpected)
1422{
1423 BS3TRAPFRAME TrapCtx;
1424 BS3REGCTX Ctx;
1425 BS3REGCTX CtxUdExpected;
1426 BS3REGCTX TmpCtx;
1427 uint8_t const cbBuf = 8*2; /* test buffer area */
1428 uint8_t abBuf[8*2 + 8 + 8]; /* test buffer w/ misalignment test space and some extra guard. */
1429 uint8_t BS3_FAR *pbBuf = abBuf;
1430 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
1431 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
1432 uint8_t bFiller;
1433 int off;
1434 int off2;
1435 unsigned cb;
1436 uint8_t BS3_FAR *pbTest;
1437
1438 /* make sure they're allocated */
1439 Bs3MemZero(&Ctx, sizeof(Ctx));
1440 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1441 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
1442 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1443 Bs3MemZero(&abBuf, sizeof(abBuf));
1444
1445 /* Create a context, give this routine some more stack space, point the context
1446 at our SIDT [xBX] + UD2 combo, and point DS:xBX at abBuf. */
1447 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
1448 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1449 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
1450 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
1451 g_uBs3TrapEipHint = Ctx.rip.u32;
1452 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
1453 Bs3RegCtxConvertToRingX(&Ctx, bRing);
1454
1455 /* For successful SIDT attempts, we'll stop at the UD2. */
1456 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1457 CtxUdExpected.rip.u += pWorker->cbInstr;
1458
1459 /*
1460 * Check that it works at all and that only bytes we expect gets written to.
1461 */
1462 /* First with zero buffer. */
1463 Bs3MemZero(abBuf, sizeof(abBuf));
1464 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), 0))
1465 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1466 if (!ASMMemIsZero(abBuf, sizeof(abBuf)))
1467 Bs3TestFailedF("ASMMemIsZero or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1468 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1469 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1470 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1471 Bs3TestFailedF("286: Top base byte isn't 0xff (#1): %#x\n", abBuf[cbIdtr - 1]);
1472 if (!ASMMemIsZero(&abBuf[cbIdtr], cbBuf - cbIdtr))
1473 Bs3TestFailedF("Unexpected buffer bytes set (#1): cbIdtr=%u abBuf=%.*Rhxs\n", cbIdtr, cbBuf, pbBuf);
1474 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1475 Bs3TestFailedF("Mismatch (#1): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, abBuf);
1476 g_usBs3TestStep++;
1477
1478 /* Again with a buffer filled with a byte not occuring in the previous result. */
1479 bFiller = 0x55;
1480 while (Bs3MemChr(abBuf, bFiller, cbBuf) != NULL)
1481 bFiller++;
1482 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1483 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1484 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemSet is busted: bFiller=%#x abBuf=%.*Rhxs\n", bFiller, sizeof(abBuf), pbBuf);
1485
1486 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1487 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1488 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1489 Bs3TestFailedF("286: Top base byte isn't 0xff (#2): %#x\n", abBuf[cbIdtr - 1]);
1490 if (!ASMMemIsAllU8(&abBuf[cbIdtr], cbBuf - cbIdtr, bFiller))
1491 Bs3TestFailedF("Unexpected buffer bytes set (#2): cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1492 if (Bs3MemChr(abBuf, bFiller, cbIdtr) != NULL)
1493 Bs3TestFailedF("Not all bytes touched: cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1494 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1495 Bs3TestFailedF("Mismatch (#2): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, abBuf);
1496 g_usBs3TestStep++;
1497
1498 /*
1499 * Slide the buffer along 8 bytes to cover misalignment.
1500 */
1501 for (off = 0; off < 8; off++)
1502 {
1503 pbBuf = &abBuf[off];
1504 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBuf[off]);
1505 CtxUdExpected.rbx.u = Ctx.rbx.u;
1506
1507 /* First with zero buffer. */
1508 Bs3MemZero(abBuf, sizeof(abBuf));
1509 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1510 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1511 if (off > 0 && !ASMMemIsZero(abBuf, off))
1512 Bs3TestFailedF("Unexpected buffer bytes set before (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1513 cbIdtr, off, off + cbBuf, abBuf);
1514 if (!ASMMemIsZero(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off))
1515 Bs3TestFailedF("Unexpected buffer bytes set after (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1516 cbIdtr, off, off + cbBuf, abBuf);
1517 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1518 Bs3TestFailedF("286: Top base byte isn't 0xff (#3): %#x\n", abBuf[off + cbIdtr - 1]);
1519 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1520 Bs3TestFailedF("Mismatch (#3): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1521 g_usBs3TestStep++;
1522
1523 /* Again with a buffer filled with a byte not occuring in the previous result. */
1524 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1525 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1526 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1527 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1528 Bs3TestFailedF("Unexpected buffer bytes set before (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1529 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1530 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off, bFiller))
1531 Bs3TestFailedF("Unexpected buffer bytes set after (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1532 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1533 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1534 Bs3TestFailedF("Not all bytes touched (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1535 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1536 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1537 Bs3TestFailedF("286: Top base byte isn't 0xff (#4): %#x\n", abBuf[off + cbIdtr - 1]);
1538 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1539 Bs3TestFailedF("Mismatch (#4): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1540 g_usBs3TestStep++;
1541 }
1542 pbBuf = abBuf;
1543 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1544 CtxUdExpected.rbx.u = Ctx.rbx.u;
1545
1546 /*
1547 * Play with the selector limit if the target mode supports limit checking
1548 * We use BS3_SEL_TEST_PAGE_00 for this
1549 */
1550 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
1551 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
1552 {
1553 uint16_t cbLimit;
1554 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBuf);
1555 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
1556 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
1557 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
1558 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
1559 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
1560
1561 if (pWorker->fSs)
1562 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
1563 else
1564 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
1565
1566 /* Expand up (normal). */
1567 for (off = 0; off < 8; off++)
1568 {
1569 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
1570 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
1571 {
1572 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
1573 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1574 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1575 if (off + cbIdtr <= cbLimit + 1)
1576 {
1577 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1578 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1579 Bs3TestFailedF("Not all bytes touched (#5): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1580 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1581 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1582 Bs3TestFailedF("Mismatch (#5): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1583 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1584 Bs3TestFailedF("286: Top base byte isn't 0xff (#5): %#x\n", abBuf[off + cbIdtr - 1]);
1585 }
1586 else
1587 {
1588 if (pWorker->fSs)
1589 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
1590 else
1591 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1592 if (off + 2 <= cbLimit + 1)
1593 {
1594 if (Bs3MemChr(&abBuf[off], bFiller, 2) != NULL)
1595 Bs3TestFailedF("Limit bytes not touched (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1596 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1597 if (Bs3MemCmp(&abBuf[off], pbExpected, 2) != 0)
1598 Bs3TestFailedF("Mismatch (#6): expected %.2Rhxs, got %.2Rhxs\n", pbExpected, &abBuf[off]);
1599 if (!ASMMemIsAllU8(&abBuf[off + 2], cbIdtr - 2, bFiller))
1600 Bs3TestFailedF("Base bytes touched on #GP (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1601 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1602 }
1603 else if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1604 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1605 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1606 }
1607
1608 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1609 Bs3TestFailedF("Leading bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1610 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1611 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
1612 Bs3TestFailedF("Trailing bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1613 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1614
1615 g_usBs3TestStep++;
1616 }
1617 }
1618
1619 /* Expand down (weird). Inverted valid area compared to expand up,
1620 so a limit of zero give us a valid range for 0001..0ffffh (instead of
1621 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
1622 means one valid byte at 0ffffh, and a limit of 0ffffh means none
1623 (because in a normal expand up the 0ffffh means all 64KB are
1624 accessible). */
1625 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
1626 for (off = 0; off < 8; off++)
1627 {
1628 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
1629 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
1630 {
1631 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
1632 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1633 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1634
1635 if (off > cbLimit)
1636 {
1637 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1638 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1639 Bs3TestFailedF("Not all bytes touched (#8): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1640 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1641 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1642 Bs3TestFailedF("Mismatch (#8): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1643 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1644 Bs3TestFailedF("286: Top base byte isn't 0xff (#8): %#x\n", abBuf[off + cbIdtr - 1]);
1645 }
1646 else
1647 {
1648 if (pWorker->fSs)
1649 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
1650 else
1651 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1652 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1653 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1654 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1655 }
1656
1657 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1658 Bs3TestFailedF("Leading bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1659 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1660 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
1661 Bs3TestFailedF("Trailing bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1662 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1663
1664 g_usBs3TestStep++;
1665 }
1666 }
1667
1668 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1669 CtxUdExpected.rbx.u = Ctx.rbx.u;
1670 CtxUdExpected.ss = Ctx.ss;
1671 CtxUdExpected.ds = Ctx.ds;
1672 }
1673
1674 /*
1675 * Play with the paging.
1676 */
1677 if ( BS3_MODE_IS_PAGED(bTestMode)
1678 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
1679 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
1680 {
1681 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
1682
1683 /*
1684 * Slide the buffer towards the trailing guard page. We'll observe the
1685 * first word being written entirely separately from the 2nd dword/qword.
1686 */
1687 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
1688 {
1689 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
1690 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
1691 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1692 if (off + cbIdtr <= X86_PAGE_SIZE)
1693 {
1694 CtxUdExpected.rbx = Ctx.rbx;
1695 CtxUdExpected.ss = Ctx.ss;
1696 CtxUdExpected.ds = Ctx.ds;
1697 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1698 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
1699 Bs3TestFailedF("Mismatch (#9): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
1700 }
1701 else
1702 {
1703 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
1704 uFlatTest + RT_MAX(off, X86_PAGE_SIZE));
1705 if ( off <= X86_PAGE_SIZE - 2
1706 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
1707 Bs3TestPrintf("Mismatch (#10): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
1708 pbExpected, &pbTest[off], off);
1709 if ( off < X86_PAGE_SIZE - 2
1710 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_SIZE - off - 2, bFiller))
1711 Bs3TestPrintf("Wrote partial base on #PF (#10): bFiller=%#x, got %.*Rhxs; off=%#x\n",
1712 bFiller, X86_PAGE_SIZE - off - 2, &pbTest[off + 2], off);
1713 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
1714 Bs3TestPrintf("Wrote partial limit on #PF (#10): Expected %02x, got %02x\n", bFiller, pbTest[off]);
1715 }
1716 g_usBs3TestStep++;
1717 }
1718
1719 /*
1720 * Now, do it the other way around. It should look normal now since writing
1721 * the limit will #PF first and nothing should be written.
1722 */
1723 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
1724 {
1725 Bs3MemSet(pbTest, bFiller, 48);
1726 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
1727 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1728 if (off >= 0)
1729 {
1730 CtxUdExpected.rbx = Ctx.rbx;
1731 CtxUdExpected.ss = Ctx.ss;
1732 CtxUdExpected.ds = Ctx.ds;
1733 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1734 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
1735 Bs3TestFailedF("Mismatch (#11): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
1736 }
1737 else
1738 {
1739 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0), uFlatTest + off);
1740 if ( -off < cbIdtr
1741 && !ASMMemIsAllU8(pbTest, cbIdtr + off, bFiller))
1742 Bs3TestPrintf("Wrote partial content on #PF (#12): bFiller=%#x, found %.*Rhxs; off=%d\n",
1743 bFiller, cbIdtr + off, pbTest, off);
1744 }
1745 if (!ASMMemIsAllU8(&pbTest[RT_MAX(cbIdtr + off, 0)], 16, bFiller))
1746 Bs3TestPrintf("Wrote beyond expected area (#13): bFiller=%#x, found %.16Rhxs; off=%d\n",
1747 bFiller, &pbTest[RT_MAX(cbIdtr + off, 0)], off);
1748 g_usBs3TestStep++;
1749 }
1750
1751 /*
1752 * Combine paging and segment limit and check ordering.
1753 * This is kind of interesting here since it the instruction seems to
1754 * be doing two separate writes.
1755 */
1756 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
1757 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
1758 {
1759 uint16_t cbLimit;
1760
1761 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
1762 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
1763 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
1764 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
1765 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
1766
1767 if (pWorker->fSs)
1768 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
1769 else
1770 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
1771
1772 /* Expand up (normal), approaching tail guard page. */
1773 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
1774 {
1775 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
1776 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
1777 {
1778 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
1779 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
1780 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1781 if (off + cbIdtr <= cbLimit + 1)
1782 {
1783 /* No #GP, but maybe #PF. */
1784 if (off + cbIdtr <= X86_PAGE_SIZE)
1785 {
1786 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1787 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
1788 Bs3TestFailedF("Mismatch (#14): expected %.*Rhxs, got %.*Rhxs\n",
1789 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
1790 }
1791 else
1792 {
1793 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
1794 uFlatTest + RT_MAX(off, X86_PAGE_SIZE));
1795 if ( off <= X86_PAGE_SIZE - 2
1796 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
1797 Bs3TestPrintf("Mismatch (#15): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
1798 pbExpected, &pbTest[off], off);
1799 cb = X86_PAGE_SIZE - off - 2;
1800 if ( off < X86_PAGE_SIZE - 2
1801 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
1802 Bs3TestPrintf("Wrote partial base on #PF (#15): bFiller=%#x, got %.*Rhxs; off=%#x\n",
1803 bFiller, cb, &pbTest[off + 2], off);
1804 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
1805 Bs3TestPrintf("Wrote partial limit on #PF (#15): Expected %02x, got %02x\n", bFiller, pbTest[off]);
1806 }
1807 }
1808 else if (off + 2 <= cbLimit + 1)
1809 {
1810 /* [ig]tr.limit writing does not cause #GP, but may cause #PG, if not writing the base causes #GP. */
1811 if (off <= X86_PAGE_SIZE - 2)
1812 {
1813 if (pWorker->fSs)
1814 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
1815 else
1816 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1817 if (Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
1818 Bs3TestPrintf("Mismatch (#16): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
1819 pbExpected, &pbTest[off], off);
1820 cb = X86_PAGE_SIZE - off - 2;
1821 if ( off < X86_PAGE_SIZE - 2
1822 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
1823 Bs3TestPrintf("Wrote partial base with limit (#16): bFiller=%#x, got %.*Rhxs; off=%#x\n",
1824 bFiller, cb, &pbTest[off + 2], off);
1825 }
1826 else
1827 {
1828 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
1829 uFlatTest + RT_MAX(off, X86_PAGE_SIZE));
1830 if ( off < X86_PAGE_SIZE
1831 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
1832 Bs3TestPrintf("Mismatch (#16): Partial limit write on #PF: bFiller=%#x, got %.*Rhxs\n",
1833 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
1834 }
1835 }
1836 else
1837 {
1838 /* #GP/#SS on limit. */
1839 if (pWorker->fSs)
1840 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
1841 else
1842 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1843 if ( off < X86_PAGE_SIZE
1844 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
1845 Bs3TestPrintf("Mismatch (#17): Partial write on #GP: bFiller=%#x, got %.*Rhxs\n",
1846 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
1847 }
1848
1849 cb = RT_MIN(cbIdtr * 2, off - (X86_PAGE_SIZE - cbIdtr*2));
1850 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], cb, bFiller))
1851 Bs3TestFailedF("Leading bytes touched (#18): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
1852 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE - cbIdtr * 2]);
1853
1854 g_usBs3TestStep++;
1855
1856 /* Set DS to 0 and check that we get #GP(0). */
1857 if (!pWorker->fSs)
1858 {
1859 Ctx.ds = 0;
1860 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1861 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1862 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
1863 g_usBs3TestStep++;
1864 }
1865 }
1866 }
1867
1868 /* Expand down. */
1869 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
1870 uFlatTest -= X86_PAGE_SIZE;
1871
1872 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
1873 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
1874 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
1875 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
1876
1877 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
1878 {
1879 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
1880 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
1881 {
1882 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
1883 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller, cbIdtr * 2);
1884 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1885 if (cbLimit < off && off >= X86_PAGE_SIZE)
1886 {
1887 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1888 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
1889 Bs3TestFailedF("Mismatch (#19): expected %.*Rhxs, got %.*Rhxs\n",
1890 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
1891 cb = X86_PAGE_SIZE + cbIdtr*2 - off;
1892 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], cb, bFiller))
1893 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
1894 cbIdtr, off, cbLimit, bFiller, cb, pbTest[off + cbIdtr]);
1895 }
1896 else
1897 {
1898 if (cbLimit < off && off < X86_PAGE_SIZE)
1899 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
1900 uFlatTest + off);
1901 else if (pWorker->fSs)
1902 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
1903 else
1904 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1905 cb = cbIdtr*2;
1906 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE], cb, bFiller))
1907 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
1908 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE]);
1909 }
1910 g_usBs3TestStep++;
1911 }
1912 }
1913
1914 pbTest += X86_PAGE_SIZE;
1915 uFlatTest += X86_PAGE_SIZE;
1916 }
1917
1918 Bs3MemGuardedTestPageFree(pbTest);
1919 }
1920
1921 /*
1922 * Check non-canonical 64-bit space.
1923 */
1924 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
1925 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
1926 {
1927 /* Make our references relative to the gap. */
1928 pbTest += g_cbBs3PagingOneCanonicalTrap;
1929
1930 /* Hit it from below. */
1931 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
1932 {
1933 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
1934 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
1935 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1936 if (off + cbIdtr <= 0)
1937 {
1938 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1939 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
1940 Bs3TestFailedF("Mismatch (#21): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
1941 }
1942 else
1943 {
1944 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1945 if (off <= -2 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
1946 Bs3TestFailedF("Mismatch (#21): expected limit %.2Rhxs, got %.2Rhxs\n", pbExpected, &pbTest[off]);
1947 off2 = off <= -2 ? 2 : 0;
1948 cb = cbIdtr - off2;
1949 if (!ASMMemIsAllU8(&pbTest[off + off2], cb, bFiller))
1950 Bs3TestFailedF("Mismatch (#21): touched base %.*Rhxs, got %.*Rhxs\n",
1951 cb, &pbExpected[off], cb, &pbTest[off + off2]);
1952 }
1953 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
1954 Bs3TestFailedF("Leading bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
1955 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
1956 Bs3TestFailedF("Trailing bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
1957 }
1958
1959 /* Hit it from above. */
1960 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
1961 {
1962 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
1963 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
1964 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1965 if (off >= 0)
1966 {
1967 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1968 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
1969 Bs3TestFailedF("Mismatch (#22): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
1970 }
1971 else
1972 {
1973 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1974 if (!ASMMemIsAllU8(&pbTest[off], cbIdtr, bFiller))
1975 Bs3TestFailedF("Mismatch (#22): touched base %.*Rhxs, got %.*Rhxs\n",
1976 cbIdtr, &pbExpected[off], cbIdtr, &pbTest[off]);
1977 }
1978 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
1979 Bs3TestFailedF("Leading bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
1980 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
1981 Bs3TestFailedF("Trailing bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
1982 }
1983
1984 }
1985}
1986
1987
1988# define bs3CpuBasic2_sidt_sgdt_Common BS3_CMN_NM(bs3CpuBasic2_sidt_sgdt_Common)
1989BS3_DECL_NEAR(void) bs3CpuBasic2_sidt_sgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
1990 uint8_t const *pbExpected)
1991{
1992 unsigned idx;
1993 unsigned bRing;
1994 unsigned iStep = 0;
1995
1996 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
1997 test and don't want to bother with double faults. */
1998 for (bRing = 0; bRing <= 3; bRing++)
1999 {
2000 for (idx = 0; idx < cWorkers; idx++)
2001 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
2002 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ ))
2003 {
2004 g_usBs3TestStep = iStep;
2005 bs3CpuBasic2_sidt_sgdt_One(&paWorkers[idx], bTestMode, bRing, pbExpected);
2006 iStep += 1000;
2007 }
2008 if (BS3_MODE_IS_RM_OR_V86(bTestMode))
2009 break;
2010 }
2011}
2012
2013
2014/*
2015 * LIDT & LGDT
2016 */
2017
2018/**
2019 * Executes one round of LIDT and LGDT tests using one assembly worker.
2020 *
2021 * This is written with driving everything from the 16-bit or 32-bit worker in
2022 * mind, i.e. not assuming the test bitcount is the same as the current.
2023 */
2024# define bs3CpuBasic2_lidt_lgdt_One BS3_CMN_NM(bs3CpuBasic2_lidt_lgdt_One)
2025BS3_DECL_NEAR(void) bs3CpuBasic2_lidt_lgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
2026 uint8_t const *pbRestore, size_t cbRestore, uint8_t const *pbExpected)
2027{
2028 BS3TRAPFRAME TrapCtx;
2029 BS3REGCTX Ctx;
2030 BS3REGCTX CtxUdExpected;
2031 BS3REGCTX TmpCtx;
2032 uint8_t abBufLoad[40]; /* Test buffer w/ misalignment test space and some (cbIdtr) extra guard. */
2033 uint8_t abBufSave[32]; /* For saving the result after loading. */
2034 uint8_t abBufRestore[24]; /* For restoring sane value (same seg as abBufSave!). */
2035 uint8_t abExpectedFilled[32]; /* Same as pbExpected, except it's filled with bFiller2 instead of zeros. */
2036 uint8_t BS3_FAR *pbBufSave; /* Correctly aligned pointer into abBufSave. */
2037 uint8_t BS3_FAR *pbBufRestore; /* Correctly aligned pointer into abBufRestore. */
2038 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
2039 uint8_t const cbBaseLoaded = BS3_MODE_IS_16BIT_CODE(bTestMode) || (pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE)
2040 ? 3 : cbIdtr - 2;
2041 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
2042 uint8_t const bTop16BitBase = f286 ? 0xff : 0x00;
2043 uint8_t bFiller1; /* For filling abBufLoad. */
2044 uint8_t bFiller2; /* For filling abBufSave and expectations. */
2045// int off;
2046// int off2;
2047// unsigned cb;
2048// uint8_t BS3_FAR *pbTest;
2049 unsigned i;
2050
2051 /* make sure they're allocated */
2052 Bs3MemZero(&Ctx, sizeof(Ctx));
2053 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
2054 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
2055 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
2056 Bs3MemZero(abBufSave, sizeof(abBufSave));
2057 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2058 Bs3MemZero(abBufRestore, sizeof(abBufRestore));
2059
2060 /*
2061 * Create a context, giving this routine some more stack space.
2062 * - Point the context at our LIDT [xBX] + SIDT [xDI] + LIDT [xSI] + UD2 combo.
2063 * - Point DS/SS:xBX at abBufLoad.
2064 * - Point ES:xDI at abBufSave.
2065 * - Point ES:xSI at abBufRestore.
2066 */
2067 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
2068 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
2069 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
2070 g_uBs3TrapEipHint = Ctx.rip.u32;
2071 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2072
2073 pbBufSave = abBufSave;
2074 if ((BS3_FP_OFF(pbBufSave) + 2) & 7)
2075 pbBufSave += 8 - ((BS3_FP_OFF(pbBufSave) + 2) & 7);
2076 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rdi, &Ctx.es, pbBufSave);
2077
2078 pbBufRestore = abBufRestore;
2079 if ((BS3_FP_OFF(pbBufRestore) + 2) & 7)
2080 pbBufRestore += 8 - ((BS3_FP_OFF(pbBufRestore) + 2) & 7);
2081 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsi, &Ctx.es, pbBufRestore);
2082 Bs3MemCpy(pbBufRestore, pbRestore, cbRestore);
2083
2084 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
2085 Bs3RegCtxConvertToRingX(&Ctx, bRing);
2086
2087 /* For successful SIDT attempts, we'll stop at the UD2. */
2088 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
2089 CtxUdExpected.rip.u += pWorker->cbInstr;
2090
2091 /*
2092 * Check that it works at all.
2093 */
2094 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2095 Bs3MemCpy(abBufLoad, pbBufRestore, cbRestore);
2096 Bs3MemZero(abBufSave, sizeof(abBufSave));
2097 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2098 if (bRing != 0)
2099 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2100 else
2101 {
2102 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2103 if (Bs3MemCmp(pbBufSave, pbExpected, cbIdtr * 2) != 0)
2104 Bs3TestFailedF("Mismatch (#1): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr*2, pbExpected, cbIdtr*2, pbBufSave);
2105 }
2106 g_usBs3TestStep++;
2107
2108 /* Determine two filler bytes that doesn't appear in the previous result or our expectations. */
2109 bFiller1 = ~0x55;
2110 while ( Bs3MemChr(pbBufSave, bFiller1, cbIdtr) != NULL
2111 || Bs3MemChr(pbRestore, bFiller1, cbRestore) != NULL
2112 || bFiller1 == 0xff)
2113 bFiller1++;
2114 bFiller2 = 0x33;
2115 while ( Bs3MemChr(pbBufSave, bFiller2, cbIdtr) != NULL
2116 || Bs3MemChr(pbRestore, bFiller2, cbRestore) != NULL
2117 || bFiller2 == 0xff
2118 || bFiller2 == bFiller1)
2119 bFiller2++;
2120 Bs3MemSet(abExpectedFilled, bFiller2, sizeof(abExpectedFilled));
2121 Bs3MemCpy(abExpectedFilled, pbExpected, cbIdtr);
2122
2123 /* Again with a buffer filled with a byte not occuring in the previous result. */
2124 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2125 Bs3MemCpy(abBufLoad, pbBufRestore, cbRestore);
2126 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2127 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2128 if (bRing != 0)
2129 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2130 else
2131 {
2132 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2133 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2134 Bs3TestFailedF("Mismatch (#2): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2135 }
2136 g_usBs3TestStep++;
2137
2138 /*
2139 * Try loading a bunch of different limit+base value to check what happens,
2140 * especially what happens wrt the top part of the base in 16-bit mode.
2141 */
2142 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
2143 {
2144 static const struct
2145 {
2146 bool fGP;
2147 uint16_t cbLimit;
2148 uint64_t u64Base;
2149 } s_aValues[] =
2150 {
2151 { false, 0x0000, UINT64_C(0x0000000000000000) },
2152 { false, 0x0001, UINT64_C(0x0000000000000001) },
2153 { false, 0x0002, UINT64_C(0x0000000000000010) },
2154 { false, 0x0003, UINT64_C(0x0000000000000123) },
2155 { false, 0x0004, UINT64_C(0x0000000000001234) },
2156 { false, 0x0005, UINT64_C(0x0000000000012345) },
2157 { false, 0x0006, UINT64_C(0x0000000000123456) },
2158 { false, 0x0007, UINT64_C(0x0000000001234567) },
2159 { false, 0x0008, UINT64_C(0x0000000012345678) },
2160 { false, 0x0009, UINT64_C(0x0000000123456789) },
2161 { false, 0x000a, UINT64_C(0x000000123456789a) },
2162 { false, 0x000b, UINT64_C(0x00000123456789ab) },
2163 { false, 0x000c, UINT64_C(0x0000123456789abc) },
2164 { false, 0x001c, UINT64_C(0x00007ffffeefefef) },
2165 { false, 0xffff, UINT64_C(0x00007fffffffffff) },
2166 { true, 0xf3f1, UINT64_C(0x0000800000000000) },
2167 { true, 0x0000, UINT64_C(0x0000800000000000) },
2168 { true, 0x0000, UINT64_C(0x0000800000000333) },
2169 { true, 0x00f0, UINT64_C(0x0001000000000000) },
2170 { true, 0x0ff0, UINT64_C(0x0012000000000000) },
2171 { true, 0x0eff, UINT64_C(0x0123000000000000) },
2172 { true, 0xe0fe, UINT64_C(0x1234000000000000) },
2173 { true, 0x00ad, UINT64_C(0xffff300000000000) },
2174 { true, 0x0000, UINT64_C(0xffff7fffffffffff) },
2175 { true, 0x00f0, UINT64_C(0xffff7fffffffffff) },
2176 { false, 0x5678, UINT64_C(0xffff800000000000) },
2177 { false, 0x2969, UINT64_C(0xffffffffffeefefe) },
2178 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2179 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2180 };
2181 for (i = 0; i < RT_ELEMENTS(s_aValues); i++)
2182 {
2183 Bs3MemSet(abBufSave, bFiller1, sizeof(abBufSave));
2184 Bs3MemCpy(&abBufLoad[0], &s_aValues[i].cbLimit, 2);
2185 Bs3MemCpy(&abBufLoad[2], &s_aValues[i].u64Base, 8);
2186 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2187 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2188 if (bRing != 0 || s_aValues[i].fGP)
2189 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2190 else
2191 {
2192 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2193 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues[i].cbLimit, 2) != 0
2194 || Bs3MemCmp(&pbBufSave[2], &s_aValues[i].u64Base, 8) != 0
2195 || !ASMMemIsAllU8(&pbBufSave[10], cbIdtr, bFiller2))
2196 Bs3TestFailedF("Mismatch (#2): expected %04RX16:%016RX64, fillers %#x %#x, got %.*Rhxs\n",
2197 s_aValues[i].cbLimit, s_aValues[i].u64Base, bFiller1, bFiller2, cbIdtr*2, pbBufSave);
2198 }
2199 g_usBs3TestStep++;
2200 }
2201 }
2202 else
2203 {
2204 static const struct
2205 {
2206 uint16_t cbLimit;
2207 uint32_t u32Base;
2208 } s_aValues[] =
2209 {
2210 { 0x0000, UINT32_C(0x00000000) },
2211 { 0x0001, UINT32_C(0x00000001) },
2212 { 0x0002, UINT32_C(0x00000012) },
2213 { 0x0003, UINT32_C(0x00000123) },
2214 { 0x0004, UINT32_C(0x00001234) },
2215 { 0x0005, UINT32_C(0x00012345) },
2216 { 0x0006, UINT32_C(0x00123456) },
2217 { 0x0007, UINT32_C(0x01234567) },
2218 { 0x0008, UINT32_C(0x12345678) },
2219 { 0x0009, UINT32_C(0x80204060) },
2220 { 0x000a, UINT32_C(0xddeeffaa) },
2221 { 0x000b, UINT32_C(0xfdecdbca) },
2222 { 0x000c, UINT32_C(0x6098456b) },
2223 { 0x000d, UINT32_C(0x98506099) },
2224 { 0x000e, UINT32_C(0x206950bc) },
2225 { 0x000f, UINT32_C(0x9740395d) },
2226 { 0x0334, UINT32_C(0x64a9455e) },
2227 { 0xb423, UINT32_C(0xd20b6eff) },
2228 { 0x4955, UINT32_C(0x85296d46) },
2229 { 0xffff, UINT32_C(0x07000039) },
2230 { 0xefe1, UINT32_C(0x0007fe00) },
2231 };
2232 for (i = 0; i < RT_ELEMENTS(s_aValues); i++)
2233 {
2234 Bs3MemSet(abBufSave, bFiller1, sizeof(abBufSave));
2235 Bs3MemCpy(&abBufLoad[0], &s_aValues[i].cbLimit, 2);
2236 Bs3MemCpy(&abBufLoad[2], &s_aValues[i].u32Base, cbBaseLoaded);
2237 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2238 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2239 if (bRing != 0)
2240 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2241 else
2242 {
2243 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2244 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues[i].cbLimit, 2) != 0
2245 || Bs3MemCmp(&pbBufSave[2], &s_aValues[i].u32Base, cbBaseLoaded) != 0
2246 || ( cbBaseLoaded != 4
2247 && pbBufSave[2+3] != bTop16BitBase)
2248 || !ASMMemIsAllU8(&pbBufSave[8], cbIdtr, bFiller2))
2249 Bs3TestFailedF("Mismatch (#3): loaded %04RX16:%08RX32, fillers %#x %#x%s%s (cbIns=%d), got %.*Rhxs\n",
2250 s_aValues[i].cbLimit, s_aValues[i].u32Base, bFiller1, bFiller2, f286 ? ", 286" : "",
2251 pWorker->fFlags ? ", opsize" : "", pWorker->cbInstr, cbIdtr*2, pbBufSave);
2252 }
2253 g_usBs3TestStep++;
2254 }
2255
2256 }
2257
2258
2259
2260#if 0
2261
2262 /*
2263 * Slide the buffer along 8 bytes to cover misalignment.
2264 */
2265 for (off = 0; off < 8; off++)
2266 {
2267 pbBuf = &abBuf[off];
2268 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBuf[off]);
2269 CtxUdExpected.rbx.u = Ctx.rbx.u;
2270
2271 /* First with zero buffer. */
2272 Bs3MemZero(abBuf, sizeof(abBuf));
2273 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2274 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2275 if (off > 0 && !ASMMemIsZero(abBuf, off))
2276 Bs3TestFailedF("Unexpected buffer bytes set before (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
2277 cbIdtr, off, off + cbBuf, abBuf);
2278 if (!ASMMemIsZero(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off))
2279 Bs3TestFailedF("Unexpected buffer bytes set after (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
2280 cbIdtr, off, off + cbBuf, abBuf);
2281 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2282 Bs3TestFailedF("286: Top base byte isn't 0xff (#3): %#x\n", abBuf[off + cbIdtr - 1]);
2283 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2284 Bs3TestFailedF("Mismatch (#3): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2285 g_usBs3TestStep++;
2286
2287 /* Again with a buffer filled with a byte not occuring in the previous result. */
2288 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2289 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2290 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2291 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2292 Bs3TestFailedF("Unexpected buffer bytes set before (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
2293 cbIdtr, off, bFiller, off + cbBuf, abBuf);
2294 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off, bFiller))
2295 Bs3TestFailedF("Unexpected buffer bytes set after (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
2296 cbIdtr, off, bFiller, off + cbBuf, abBuf);
2297 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2298 Bs3TestFailedF("Not all bytes touched (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
2299 cbIdtr, off, bFiller, off + cbBuf, abBuf);
2300 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2301 Bs3TestFailedF("286: Top base byte isn't 0xff (#4): %#x\n", abBuf[off + cbIdtr - 1]);
2302 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2303 Bs3TestFailedF("Mismatch (#4): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2304 g_usBs3TestStep++;
2305 }
2306 pbBuf = abBuf;
2307 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2308 CtxUdExpected.rbx.u = Ctx.rbx.u;
2309
2310 /*
2311 * Play with the selector limit if the target mode supports limit checking
2312 * We use BS3_SEL_TEST_PAGE_00 for this
2313 */
2314 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2315 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2316 {
2317 uint16_t cbLimit;
2318 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBuf);
2319 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2320 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2321 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2322 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2323 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2324
2325 if (pWorker->fSs)
2326 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2327 else
2328 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2329
2330 /* Expand up (normal). */
2331 for (off = 0; off < 8; off++)
2332 {
2333 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2334 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2335 {
2336 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2337 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2338 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2339 if (off + cbIdtr <= cbLimit + 1)
2340 {
2341 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2342 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2343 Bs3TestFailedF("Not all bytes touched (#5): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2344 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2345 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2346 Bs3TestFailedF("Mismatch (#5): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2347 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2348 Bs3TestFailedF("286: Top base byte isn't 0xff (#5): %#x\n", abBuf[off + cbIdtr - 1]);
2349 }
2350 else
2351 {
2352 if (pWorker->fSs)
2353 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2354 else
2355 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2356 if (off + 2 <= cbLimit + 1)
2357 {
2358 if (Bs3MemChr(&abBuf[off], bFiller, 2) != NULL)
2359 Bs3TestFailedF("Limit bytes not touched (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2360 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2361 if (Bs3MemCmp(&abBuf[off], pbExpected, 2) != 0)
2362 Bs3TestFailedF("Mismatch (#6): expected %.2Rhxs, got %.2Rhxs\n", pbExpected, &abBuf[off]);
2363 if (!ASMMemIsAllU8(&abBuf[off + 2], cbIdtr - 2, bFiller))
2364 Bs3TestFailedF("Base bytes touched on #GP (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2365 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2366 }
2367 else if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2368 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2369 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2370 }
2371
2372 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2373 Bs3TestFailedF("Leading bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2374 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2375 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2376 Bs3TestFailedF("Trailing bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2377 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2378
2379 g_usBs3TestStep++;
2380 }
2381 }
2382
2383 /* Expand down (weird). Inverted valid area compared to expand up,
2384 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2385 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2386 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2387 (because in a normal expand up the 0ffffh means all 64KB are
2388 accessible). */
2389 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2390 for (off = 0; off < 8; off++)
2391 {
2392 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2393 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2394 {
2395 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2396 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2397 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2398
2399 if (off > cbLimit)
2400 {
2401 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2402 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2403 Bs3TestFailedF("Not all bytes touched (#8): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2404 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2405 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2406 Bs3TestFailedF("Mismatch (#8): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2407 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2408 Bs3TestFailedF("286: Top base byte isn't 0xff (#8): %#x\n", abBuf[off + cbIdtr - 1]);
2409 }
2410 else
2411 {
2412 if (pWorker->fSs)
2413 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2414 else
2415 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2416 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2417 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2418 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2419 }
2420
2421 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2422 Bs3TestFailedF("Leading bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2423 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2424 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2425 Bs3TestFailedF("Trailing bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2426 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2427
2428 g_usBs3TestStep++;
2429 }
2430 }
2431
2432 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2433 CtxUdExpected.rbx.u = Ctx.rbx.u;
2434 CtxUdExpected.ss = Ctx.ss;
2435 CtxUdExpected.ds = Ctx.ds;
2436 }
2437
2438 /*
2439 * Play with the paging.
2440 */
2441 if ( BS3_MODE_IS_PAGED(bTestMode)
2442 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2443 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2444 {
2445 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2446
2447 /*
2448 * Slide the buffer towards the trailing guard page. We'll observe the
2449 * first word being written entirely separately from the 2nd dword/qword.
2450 */
2451 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2452 {
2453 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2454 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2455 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2456 if (off + cbIdtr <= X86_PAGE_SIZE)
2457 {
2458 CtxUdExpected.rbx = Ctx.rbx;
2459 CtxUdExpected.ss = Ctx.ss;
2460 CtxUdExpected.ds = Ctx.ds;
2461 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2462 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2463 Bs3TestFailedF("Mismatch (#9): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2464 }
2465 else
2466 {
2467 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2468 uFlatTest + RT_MAX(off, X86_PAGE_SIZE));
2469 if ( off <= X86_PAGE_SIZE - 2
2470 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2471 Bs3TestPrintf("Mismatch (#10): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2472 pbExpected, &pbTest[off], off);
2473 if ( off < X86_PAGE_SIZE - 2
2474 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_SIZE - off - 2, bFiller))
2475 Bs3TestPrintf("Wrote partial base on #PF (#10): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2476 bFiller, X86_PAGE_SIZE - off - 2, &pbTest[off + 2], off);
2477 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2478 Bs3TestPrintf("Wrote partial limit on #PF (#10): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2479 }
2480 g_usBs3TestStep++;
2481 }
2482
2483 /*
2484 * Now, do it the other way around. It should look normal now since writing
2485 * the limit will #PF first and nothing should be written.
2486 */
2487 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2488 {
2489 Bs3MemSet(pbTest, bFiller, 48);
2490 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2491 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2492 if (off >= 0)
2493 {
2494 CtxUdExpected.rbx = Ctx.rbx;
2495 CtxUdExpected.ss = Ctx.ss;
2496 CtxUdExpected.ds = Ctx.ds;
2497 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2498 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2499 Bs3TestFailedF("Mismatch (#11): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2500 }
2501 else
2502 {
2503 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0), uFlatTest + off);
2504 if ( -off < cbIdtr
2505 && !ASMMemIsAllU8(pbTest, cbIdtr + off, bFiller))
2506 Bs3TestPrintf("Wrote partial content on #PF (#12): bFiller=%#x, found %.*Rhxs; off=%d\n",
2507 bFiller, cbIdtr + off, pbTest, off);
2508 }
2509 if (!ASMMemIsAllU8(&pbTest[RT_MAX(cbIdtr + off, 0)], 16, bFiller))
2510 Bs3TestPrintf("Wrote beyond expected area (#13): bFiller=%#x, found %.16Rhxs; off=%d\n",
2511 bFiller, &pbTest[RT_MAX(cbIdtr + off, 0)], off);
2512 g_usBs3TestStep++;
2513 }
2514
2515 /*
2516 * Combine paging and segment limit and check ordering.
2517 * This is kind of interesting here since it the instruction seems to
2518 * be doing two separate writes.
2519 */
2520 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2521 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2522 {
2523 uint16_t cbLimit;
2524
2525 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2526 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2527 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2528 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2529 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2530
2531 if (pWorker->fSs)
2532 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2533 else
2534 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2535
2536 /* Expand up (normal), approaching tail guard page. */
2537 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2538 {
2539 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2540 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2541 {
2542 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2543 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2544 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2545 if (off + cbIdtr <= cbLimit + 1)
2546 {
2547 /* No #GP, but maybe #PF. */
2548 if (off + cbIdtr <= X86_PAGE_SIZE)
2549 {
2550 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2551 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2552 Bs3TestFailedF("Mismatch (#14): expected %.*Rhxs, got %.*Rhxs\n",
2553 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2554 }
2555 else
2556 {
2557 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2558 uFlatTest + RT_MAX(off, X86_PAGE_SIZE));
2559 if ( off <= X86_PAGE_SIZE - 2
2560 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2561 Bs3TestPrintf("Mismatch (#15): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2562 pbExpected, &pbTest[off], off);
2563 cb = X86_PAGE_SIZE - off - 2;
2564 if ( off < X86_PAGE_SIZE - 2
2565 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2566 Bs3TestPrintf("Wrote partial base on #PF (#15): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2567 bFiller, cb, &pbTest[off + 2], off);
2568 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2569 Bs3TestPrintf("Wrote partial limit on #PF (#15): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2570 }
2571 }
2572 else if (off + 2 <= cbLimit + 1)
2573 {
2574 /* [ig]tr.limit writing does not cause #GP, but may cause #PG, if not writing the base causes #GP. */
2575 if (off <= X86_PAGE_SIZE - 2)
2576 {
2577 if (pWorker->fSs)
2578 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2579 else
2580 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2581 if (Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2582 Bs3TestPrintf("Mismatch (#16): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2583 pbExpected, &pbTest[off], off);
2584 cb = X86_PAGE_SIZE - off - 2;
2585 if ( off < X86_PAGE_SIZE - 2
2586 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2587 Bs3TestPrintf("Wrote partial base with limit (#16): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2588 bFiller, cb, &pbTest[off + 2], off);
2589 }
2590 else
2591 {
2592 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2593 uFlatTest + RT_MAX(off, X86_PAGE_SIZE));
2594 if ( off < X86_PAGE_SIZE
2595 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2596 Bs3TestPrintf("Mismatch (#16): Partial limit write on #PF: bFiller=%#x, got %.*Rhxs\n",
2597 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2598 }
2599 }
2600 else
2601 {
2602 /* #GP/#SS on limit. */
2603 if (pWorker->fSs)
2604 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2605 else
2606 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2607 if ( off < X86_PAGE_SIZE
2608 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2609 Bs3TestPrintf("Mismatch (#17): Partial write on #GP: bFiller=%#x, got %.*Rhxs\n",
2610 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2611 }
2612
2613 cb = RT_MIN(cbIdtr * 2, off - (X86_PAGE_SIZE - cbIdtr*2));
2614 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], cb, bFiller))
2615 Bs3TestFailedF("Leading bytes touched (#18): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2616 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE - cbIdtr * 2]);
2617
2618 g_usBs3TestStep++;
2619
2620 /* Set DS to 0 and check that we get #GP(0). */
2621 if (!pWorker->fSs)
2622 {
2623 Ctx.ds = 0;
2624 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2625 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2626 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2627 g_usBs3TestStep++;
2628 }
2629 }
2630 }
2631
2632 /* Expand down. */
2633 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
2634 uFlatTest -= X86_PAGE_SIZE;
2635
2636 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2637 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2638 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2639 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2640
2641 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2642 {
2643 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2644 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2645 {
2646 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2647 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller, cbIdtr * 2);
2648 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2649 if (cbLimit < off && off >= X86_PAGE_SIZE)
2650 {
2651 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2652 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2653 Bs3TestFailedF("Mismatch (#19): expected %.*Rhxs, got %.*Rhxs\n",
2654 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2655 cb = X86_PAGE_SIZE + cbIdtr*2 - off;
2656 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], cb, bFiller))
2657 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2658 cbIdtr, off, cbLimit, bFiller, cb, pbTest[off + cbIdtr]);
2659 }
2660 else
2661 {
2662 if (cbLimit < off && off < X86_PAGE_SIZE)
2663 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2664 uFlatTest + off);
2665 else if (pWorker->fSs)
2666 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2667 else
2668 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2669 cb = cbIdtr*2;
2670 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE], cb, bFiller))
2671 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2672 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE]);
2673 }
2674 g_usBs3TestStep++;
2675 }
2676 }
2677
2678 pbTest += X86_PAGE_SIZE;
2679 uFlatTest += X86_PAGE_SIZE;
2680 }
2681
2682 Bs3MemGuardedTestPageFree(pbTest);
2683 }
2684
2685 /*
2686 * Check non-canonical 64-bit space.
2687 */
2688 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
2689 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
2690 {
2691 /* Make our references relative to the gap. */
2692 pbTest += g_cbBs3PagingOneCanonicalTrap;
2693
2694 /* Hit it from below. */
2695 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2696 {
2697 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
2698 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2699 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2700 if (off + cbIdtr <= 0)
2701 {
2702 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2703 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2704 Bs3TestFailedF("Mismatch (#21): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2705 }
2706 else
2707 {
2708 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2709 if (off <= -2 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2710 Bs3TestFailedF("Mismatch (#21): expected limit %.2Rhxs, got %.2Rhxs\n", pbExpected, &pbTest[off]);
2711 off2 = off <= -2 ? 2 : 0;
2712 cb = cbIdtr - off2;
2713 if (!ASMMemIsAllU8(&pbTest[off + off2], cb, bFiller))
2714 Bs3TestFailedF("Mismatch (#21): touched base %.*Rhxs, got %.*Rhxs\n",
2715 cb, &pbExpected[off], cb, &pbTest[off + off2]);
2716 }
2717 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2718 Bs3TestFailedF("Leading bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2719 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2720 Bs3TestFailedF("Trailing bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2721 }
2722
2723 /* Hit it from above. */
2724 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2725 {
2726 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
2727 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2728 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2729 if (off >= 0)
2730 {
2731 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2732 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2733 Bs3TestFailedF("Mismatch (#22): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2734 }
2735 else
2736 {
2737 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2738 if (!ASMMemIsAllU8(&pbTest[off], cbIdtr, bFiller))
2739 Bs3TestFailedF("Mismatch (#22): touched base %.*Rhxs, got %.*Rhxs\n",
2740 cbIdtr, &pbExpected[off], cbIdtr, &pbTest[off]);
2741 }
2742 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2743 Bs3TestFailedF("Leading bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2744 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2745 Bs3TestFailedF("Trailing bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2746 }
2747
2748 }
2749#endif
2750}
2751
2752
2753# define bs3CpuBasic2_lidt_lgdt_Common BS3_CMN_NM(bs3CpuBasic2_lidt_lgdt_Common)
2754BS3_DECL_NEAR(void) bs3CpuBasic2_lidt_lgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
2755 void const *pvRestore, size_t cbRestore, uint8_t const *pbExpected)
2756{
2757 unsigned idx;
2758 unsigned bRing;
2759 unsigned iStep = 0;
2760
2761 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
2762 test and don't want to bother with double faults. */
2763 for (bRing = BS3_MODE_IS_V86(bTestMode) ? 3 : 0; bRing <= 3; bRing++)
2764 {
2765 for (idx = 0; idx < cWorkers; idx++)
2766 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
2767 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ ))
2768 {
2769 //Bs3TestPrintf("idx=%-2d fpfnWorker=%p fSs=%d cbInstr=%d\n",
2770 // idx, paWorkers[idx].fpfnWorker, paWorkers[idx].fSs, paWorkers[idx].cbInstr);
2771 g_usBs3TestStep = iStep;
2772 bs3CpuBasic2_lidt_lgdt_One(&paWorkers[idx], bTestMode, bRing, pvRestore, cbRestore, pbExpected);
2773 iStep += 1000;
2774 }
2775 if (BS3_MODE_IS_RM_SYS(bTestMode))
2776 break;
2777 }
2778}
2779
2780
2781# if ARCH_BITS != 64
2782
2783/**
2784 * Worker for bs3CpuBasic2_TssGateEsp that tests the INT 80 from outer rings.
2785 */
2786# define bs3CpuBasic2_TssGateEsp_AltStackOuterRing BS3_CMN_NM(bs3CpuBasic2_TssGateEsp_AltStackOuterRing)
2787BS3_DECL_NEAR(void) bs3CpuBasic2_TssGateEsp_AltStackOuterRing(PCBS3REGCTX pCtx, uint8_t bRing, uint8_t *pbAltStack,
2788 size_t cbAltStack, bool f16BitStack, bool f16BitTss,
2789 bool f16BitHandler, unsigned uLine)
2790{
2791 uint8_t const cbIretFrame = f16BitHandler ? 5*2 : 5*4;
2792 BS3REGCTX Ctx2;
2793 BS3TRAPFRAME TrapCtx;
2794 uint8_t *pbTmp;
2795 g_usBs3TestStep = uLine;
2796
2797 Bs3MemCpy(&Ctx2, pCtx, sizeof(Ctx2));
2798 Bs3RegCtxConvertToRingX(&Ctx2, bRing);
2799
2800 if (pbAltStack)
2801 {
2802 Ctx2.rsp.u = Bs3SelPtrToFlat(pbAltStack + 0x1980);
2803 Bs3MemZero(pbAltStack, cbAltStack);
2804 }
2805
2806 Bs3TrapSetJmpAndRestore(&Ctx2, &TrapCtx);
2807
2808 if (!f16BitStack && f16BitTss)
2809 Ctx2.rsp.u &= UINT16_MAX;
2810
2811 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx2, 0x80 /*bXcpt*/);
2812 CHECK_MEMBER("bCpl", "%u", TrapCtx.Ctx.bCpl, bRing);
2813 CHECK_MEMBER("cbIretFrame", "%#x", TrapCtx.cbIretFrame, cbIretFrame);
2814
2815 if (pbAltStack)
2816 {
2817 uint64_t uExpectedRsp = (f16BitTss ? Bs3Tss16.sp0 : Bs3Tss32.esp0) - cbIretFrame;
2818 if (f16BitStack)
2819 {
2820 uExpectedRsp &= UINT16_MAX;
2821 uExpectedRsp |= Ctx2.rsp.u & ~(uint64_t)UINT16_MAX;
2822 }
2823 if ( TrapCtx.uHandlerRsp != uExpectedRsp
2824 || TrapCtx.uHandlerSs != (f16BitTss ? Bs3Tss16.ss0 : Bs3Tss32.ss0))
2825 bs3CpuBasic2_FailedF("handler SS:ESP=%04x:%08RX64, expected %04x:%08RX16",
2826 TrapCtx.uHandlerSs, TrapCtx.uHandlerRsp, Bs3Tss16.ss0, uExpectedRsp);
2827
2828 pbTmp = (uint8_t *)ASMMemFirstNonZero(pbAltStack, cbAltStack);
2829 if ((f16BitStack || TrapCtx.uHandlerRsp <= UINT16_MAX) && pbTmp != NULL)
2830 bs3CpuBasic2_FailedF("someone touched the alt stack (%p) with SS:ESP=%04x:%#RX32: %p=%02x",
2831 pbAltStack, Ctx2.ss, Ctx2.rsp.u32, pbTmp, *pbTmp);
2832 else if (!f16BitStack && TrapCtx.uHandlerRsp > UINT16_MAX && pbTmp == NULL)
2833 bs3CpuBasic2_FailedF("the alt stack (%p) was not used SS:ESP=%04x:%#RX32\n", pbAltStack, Ctx2.ss, Ctx2.rsp.u32);
2834 }
2835}
2836
2837# define bs3CpuBasic2_TssGateEspCommon BS3_CMN_NM(bs3CpuBasic2_TssGateEspCommon)
2838BS3_DECL_NEAR(void) bs3CpuBasic2_TssGateEspCommon(bool const g_f16BitSys, PX86DESC const paIdt, unsigned const cIdteShift)
2839{
2840 BS3TRAPFRAME TrapCtx;
2841 BS3REGCTX Ctx;
2842 BS3REGCTX Ctx2;
2843# if TMPL_BITS == 16
2844 uint8_t *pbTmp;
2845# endif
2846
2847 /* make sure they're allocated */
2848 Bs3MemZero(&Ctx, sizeof(Ctx));
2849 Bs3MemZero(&Ctx2, sizeof(Ctx2));
2850 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
2851
2852 Bs3RegCtxSave(&Ctx);
2853 Ctx.rsp.u -= 0x80;
2854 Ctx.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int80);
2855# if TMPL_BITS == 32
2856 g_uBs3TrapEipHint = Ctx.rip.u32;
2857# endif
2858
2859 /*
2860 * We'll be using IDT entry 80 and 81 here. The first one will be
2861 * accessible from all DPLs, the latter not. So, start with setting
2862 * the DPLs.
2863 */
2864 paIdt[0x80 << cIdteShift].Gate.u2Dpl = 3;
2865 paIdt[0x81 << cIdteShift].Gate.u2Dpl = 0;
2866
2867 /*
2868 * Check that the basic stuff works first.
2869 */
2870 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2871 g_usBs3TestStep = __LINE__;
2872 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx, 0x80 /*bXcpt*/);
2873
2874 bs3CpuBasic2_TssGateEsp_AltStackOuterRing(&Ctx, 1, NULL, 0, g_f16BitSys, g_f16BitSys, g_f16BitSys, __LINE__);
2875 bs3CpuBasic2_TssGateEsp_AltStackOuterRing(&Ctx, 2, NULL, 0, g_f16BitSys, g_f16BitSys, g_f16BitSys, __LINE__);
2876 bs3CpuBasic2_TssGateEsp_AltStackOuterRing(&Ctx, 3, NULL, 0, g_f16BitSys, g_f16BitSys, g_f16BitSys, __LINE__);
2877
2878 /*
2879 * Check that the upper part of ESP is preserved when doing .
2880 */
2881 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)
2882 {
2883 size_t const cbAltStack = _8K;
2884 uint8_t *pbAltStack = Bs3MemAllocZ(BS3MEMKIND_TILED, cbAltStack);
2885 if (pbAltStack)
2886 {
2887 /* same ring */
2888 g_usBs3TestStep = __LINE__;
2889 Bs3MemCpy(&Ctx2, &Ctx, sizeof(Ctx2));
2890 Ctx2.rsp.u = Bs3SelPtrToFlat(pbAltStack + 0x1980);
2891 if (Bs3TrapSetJmp(&TrapCtx))
2892 Bs3RegCtxRestore(&Ctx2, 0); /* (does not return) */
2893 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx2, 0x80 /*bXcpt*/);
2894# if TMPL_BITS == 16
2895 if ((pbTmp = (uint8_t *)ASMMemFirstNonZero(pbAltStack, cbAltStack)) != NULL)
2896 bs3CpuBasic2_FailedF("someone touched the alt stack (%p) with SS:ESP=%04x:%#RX32: %p=%02x\n",
2897 pbAltStack, Ctx2.ss, Ctx2.rsp.u32, pbTmp, *pbTmp);
2898# else
2899 if (ASMMemIsZero(pbAltStack, cbAltStack))
2900 bs3CpuBasic2_FailedF("alt stack wasn't used despite SS:ESP=%04x:%#RX32\n", Ctx2.ss, Ctx2.rsp.u32);
2901# endif
2902
2903 /* Different rings (load SS0:SP0 from TSS). */
2904 bs3CpuBasic2_TssGateEsp_AltStackOuterRing(&Ctx, 1, pbAltStack, cbAltStack,
2905 g_f16BitSys, g_f16BitSys, g_f16BitSys, __LINE__);
2906 bs3CpuBasic2_TssGateEsp_AltStackOuterRing(&Ctx, 2, pbAltStack, cbAltStack,
2907 g_f16BitSys, g_f16BitSys, g_f16BitSys, __LINE__);
2908 bs3CpuBasic2_TssGateEsp_AltStackOuterRing(&Ctx, 3, pbAltStack, cbAltStack,
2909 g_f16BitSys, g_f16BitSys, g_f16BitSys, __LINE__);
2910
2911 /* Different rings but switch the SS bitness in the TSS. */
2912 if (g_f16BitSys)
2913 {
2914 Bs3Tss16.ss0 = BS3_SEL_R0_SS32;
2915 bs3CpuBasic2_TssGateEsp_AltStackOuterRing(&Ctx, 1, pbAltStack, cbAltStack,
2916 false, g_f16BitSys, g_f16BitSys, __LINE__);
2917 Bs3Tss16.ss0 = BS3_SEL_R0_SS16;
2918 }
2919 else
2920 {
2921 Bs3Tss32.ss0 = BS3_SEL_R0_SS16;
2922 bs3CpuBasic2_TssGateEsp_AltStackOuterRing(&Ctx, 1, pbAltStack, cbAltStack,
2923 true, g_f16BitSys, g_f16BitSys, __LINE__);
2924 Bs3Tss32.ss0 = BS3_SEL_R0_SS32;
2925 }
2926
2927 Bs3MemFree(pbAltStack, cbAltStack);
2928 }
2929 else
2930 Bs3TestPrintf("%s: Skipping ESP check, alloc failed\n", g_pszTestMode);
2931 }
2932 else
2933 Bs3TestPrintf("%s: Skipping ESP check, CPU too old\n", g_pszTestMode);
2934}
2935
2936# endif /* ARCH_BITS != 64 */
2937#endif /* BS3_INSTANTIATING_CMN */
2938
2939
2940/*
2941 * Mode specific code.
2942 * Mode specific code.
2943 * Mode specific code.
2944 */
2945#ifdef BS3_INSTANTIATING_MODE
2946
2947BS3_DECL_FAR(uint8_t) TMPL_NM(bs3CpuBasic2_TssGateEsp)(uint8_t bMode)
2948{
2949 uint8_t bRet = 0;
2950
2951 g_pszTestMode = TMPL_NM(g_szBs3ModeName);
2952 g_bTestMode = bMode;
2953 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(TMPL_MODE);
2954
2955# if TMPL_MODE == BS3_MODE_PE16 \
2956 || TMPL_MODE == BS3_MODE_PE16_32 \
2957 || TMPL_MODE == BS3_MODE_PP16 \
2958 || TMPL_MODE == BS3_MODE_PP16_32 \
2959 || TMPL_MODE == BS3_MODE_PAE16 \
2960 || TMPL_MODE == BS3_MODE_PAE16_32 \
2961 || TMPL_MODE == BS3_MODE_PE32
2962 bs3CpuBasic2_TssGateEspCommon(BS3_MODE_IS_16BIT_SYS(TMPL_MODE),
2963 (PX86DESC)MyBs3Idt,
2964 BS3_MODE_IS_64BIT_SYS(TMPL_MODE) ? 1 : 0);
2965# else
2966 bRet = BS3TESTDOMODE_SKIPPED;
2967# endif
2968
2969 /*
2970 * Re-initialize the IDT.
2971 */
2972 Bs3TrapInit();
2973 return bRet;
2974}
2975
2976
2977BS3_DECL_FAR(uint8_t) TMPL_NM(bs3CpuBasic2_RaiseXcpt1)(uint8_t bMode)
2978{
2979 g_pszTestMode = TMPL_NM(g_szBs3ModeName);
2980 g_bTestMode = bMode;
2981 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(TMPL_MODE);
2982
2983# if !BS3_MODE_IS_RM_OR_V86(TMPL_MODE)
2984
2985 /*
2986 * Pass to common worker which is only compiled once per mode.
2987 */
2988 bs3CpuBasic2_RaiseXcpt1Common(MY_SYS_SEL_R0_CS,
2989 MY_SYS_SEL_R0_CS_CNF,
2990 MY_SYS_SEL_R0_SS,
2991 (PX86DESC)MyBs3Idt,
2992 BS3_MODE_IS_64BIT_SYS(TMPL_MODE) ? 1 : 0);
2993
2994 /*
2995 * Re-initialize the IDT.
2996 */
2997 Bs3TrapInit();
2998 return 0;
2999# elif TMPL_MODE == BS3_MODE_RM
3000
3001 /*
3002 * Check
3003 */
3004 /** @todo check */
3005 return BS3TESTDOMODE_SKIPPED;
3006
3007# else
3008 return BS3TESTDOMODE_SKIPPED;
3009# endif
3010}
3011
3012
3013BS3_DECL_FAR(uint8_t) TMPL_NM(bs3CpuBasic2_iret)(uint8_t bMode)
3014{
3015 g_pszTestMode = TMPL_NM(g_szBs3ModeName);
3016 g_bTestMode = bMode;
3017 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(TMPL_MODE);
3018
3019 Bs3PrintStrN(RT_STR_TUPLE("Hello world!\n"));
3020# if !BS3_MODE_IS_V86(TMPL_MODE)
3021 Bs3TestPrintf(RT_STR_TUPLE("Hi there!\n"));
3022# endif
3023 return BS3TESTDOMODE_SKIPPED;
3024}
3025
3026
3027BS3_DECL_FAR(uint8_t) TMPL_NM(bs3CpuBasic2_sidt)(uint8_t bMode)
3028{
3029 union
3030 {
3031 RTIDTR Idtr;
3032 uint8_t ab[16];
3033 } Expected;
3034
3035 g_pszTestMode = TMPL_NM(g_szBs3ModeName);
3036 g_bTestMode = bMode;
3037 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(TMPL_MODE);
3038
3039 BS3_ASSERT(bMode == TMPL_MODE);
3040
3041 /*
3042 * Pass to common worker which is only compiled once per mode.
3043 */
3044 Bs3MemZero(&Expected, sizeof(Expected));
3045 ASMGetIDTR(&Expected.Idtr);
3046 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSidtWorkers, RT_ELEMENTS(g_aSidtWorkers), Expected.ab);
3047
3048 /*
3049 * Re-initialize the IDT.
3050 */
3051 Bs3TrapInit();
3052 return 0;
3053}
3054
3055
3056BS3_DECL_FAR(uint8_t) TMPL_NM(bs3CpuBasic2_sgdt)(uint8_t bMode)
3057{
3058 uint64_t const uOrgAddr = Bs3Lgdt_Gdt.uAddr;
3059 uint64_t uNew = 0;
3060 union
3061 {
3062 RTGDTR Gdtr;
3063 uint8_t ab[16];
3064 } Expected;
3065
3066 g_pszTestMode = TMPL_NM(g_szBs3ModeName);
3067 g_bTestMode = bMode;
3068 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(TMPL_MODE);
3069 BS3_ASSERT(bMode == TMPL_MODE);
3070
3071 /*
3072 * If paged mode, try push the GDT way up.
3073 */
3074 if (BS3_MODE_IS_PAGED(bMode))
3075 {
3076/** @todo loading non-canonical base addresses. */
3077 int rc;
3078 uNew = BS3_MODE_IS_64BIT_SYS(bMode) ? UINT64_C(0xffff80fedcb70000) : UINT64_C(0xc2d28000);
3079 uNew |= uOrgAddr & X86_PAGE_OFFSET_MASK;
3080 rc = Bs3PagingAlias(uNew, uOrgAddr, Bs3Lgdt_Gdt.cb, X86_PTE_P | X86_PTE_RW | X86_PTE_US | X86_PTE_D | X86_PTE_A);
3081 if (RT_SUCCESS(rc))
3082 {
3083 Bs3Lgdt_Gdt.uAddr = uNew;
3084 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uNew);
3085 }
3086 }
3087
3088 /*
3089 * Pass to common worker which is only compiled once per mode.
3090 */
3091 Bs3MemZero(&Expected, sizeof(Expected));
3092 ASMGetGDTR(&Expected.Gdtr);
3093 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSgdtWorkers, RT_ELEMENTS(g_aSgdtWorkers), Expected.ab);
3094
3095 /*
3096 * Unalias the GDT.
3097 */
3098 if (uNew != 0)
3099 {
3100 Bs3Lgdt_Gdt.uAddr = uOrgAddr;
3101 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uOrgAddr);
3102 Bs3PagingUnalias(uNew, Bs3Lgdt_Gdt.cb);
3103 }
3104
3105 /*
3106 * Re-initialize the IDT.
3107 */
3108 Bs3TrapInit();
3109 return 0;
3110}
3111
3112
3113BS3_DECL_FAR(uint8_t) TMPL_NM(bs3CpuBasic2_lidt)(uint8_t bMode)
3114{
3115 union
3116 {
3117 RTIDTR Idtr;
3118 uint8_t ab[32]; /* At least cbIdtr*2! */
3119 } Expected;
3120
3121 g_pszTestMode = TMPL_NM(g_szBs3ModeName);
3122 g_bTestMode = bMode;
3123 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(TMPL_MODE);
3124
3125 BS3_ASSERT(bMode == TMPL_MODE);
3126
3127 /*
3128 * Pass to common worker which is only compiled once per mode.
3129 */
3130 Bs3MemZero(&Expected, sizeof(Expected));
3131 ASMGetIDTR(&Expected.Idtr);
3132
3133 if (BS3_MODE_IS_RM_SYS(bMode))
3134 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3135 &Bs3Lidt_Ivt, sizeof(Bs3Lidt_Ivt), Expected.ab);
3136 else if (BS3_MODE_IS_16BIT_SYS(bMode))
3137 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3138 &Bs3Lidt_Idt16, sizeof(Bs3Lidt_Idt16), Expected.ab);
3139 else if (BS3_MODE_IS_32BIT_SYS(bMode))
3140 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3141 &Bs3Lidt_Idt32, sizeof(Bs3Lidt_Idt32), Expected.ab);
3142 else
3143 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3144 &Bs3Lidt_Idt64, sizeof(Bs3Lidt_Idt64), Expected.ab);
3145
3146 /*
3147 * Re-initialize the IDT.
3148 */
3149 Bs3TrapInit();
3150 return 0;
3151}
3152
3153#if 0
3154BS3_DECL_FAR(uint8_t) TMPL_NM(bs3CpuBasic2_lgdt)(uint8_t bMode)
3155{
3156 uint64_t const uOrgAddr = Bs3Lgdt_Gdt.uAddr;
3157 uint64_t uNew = 0;
3158 union
3159 {
3160 RTGDTR Gdtr;
3161 uint8_t ab[16];
3162 } Expected;
3163
3164 g_pszTestMode = TMPL_NM(g_szBs3ModeName);
3165 g_bTestMode = bMode;
3166 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(TMPL_MODE);
3167 BS3_ASSERT(bMode == TMPL_MODE);
3168
3169 /*
3170 * Pass to common worker which is only compiled once per mode.
3171 */
3172 Bs3MemZero(&Expected, sizeof(Expected));
3173 ASMGetGDTR(&Expected.Gdtr);
3174 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aSgdtWorkers, RT_ELEMENTS(g_aSgdtWorkers), Expected.ab);
3175
3176 /*
3177 * Unalias the GDT.
3178 */
3179 if (uNew != 0)
3180 {
3181 Bs3Lgdt_Gdt.uAddr = uOrgAddr;
3182 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uOrgAddr);
3183 Bs3PagingUnalias(uNew, Bs3Lgdt_Gdt.cb);
3184 }
3185
3186 /*
3187 * Re-initialize the IDT.
3188 */
3189 Bs3TrapInit();
3190 return 0;
3191}
3192#endif
3193
3194#endif /* BS3_INSTANTIATING_MODE */
3195
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette