VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-x0.c@ 94876

Last change on this file since 94876 was 93115, checked in by vboxsync, 3 years ago

scm --update-copyright-year

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 159.4 KB
Line 
1/* $Id: bs3-cpu-basic-2-x0.c 93115 2022-01-01 11:31:46Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-basic-2, C test driver code (16-bit).
4 */
5
6/*
7 * Copyright (C) 2007-2022 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*********************************************************************************************************************************
29* Header Files *
30*********************************************************************************************************************************/
31#define BS3_USE_X0_TEXT_SEG
32#include <bs3kit.h>
33#include <iprt/asm.h>
34#include <iprt/asm-amd64-x86.h>
35
36
37/*********************************************************************************************************************************
38* Defined Constants And Macros *
39*********************************************************************************************************************************/
40#undef CHECK_MEMBER
41#define CHECK_MEMBER(a_szName, a_szFmt, a_Actual, a_Expected) \
42 do \
43 { \
44 if ((a_Actual) == (a_Expected)) { /* likely */ } \
45 else bs3CpuBasic2_FailedF(a_szName "=" a_szFmt " expected " a_szFmt, (a_Actual), (a_Expected)); \
46 } while (0)
47
48
49/** Indicating that we've got operand size prefix and that it matters. */
50#define BS3CB2SIDTSGDT_F_OPSIZE UINT8_C(0x01)
51/** Worker requires 386 or later. */
52#define BS3CB2SIDTSGDT_F_386PLUS UINT8_C(0x02)
53
54
55/** @name MYOP_XXX - Values for FNBS3CPUBASIC2ACTSTCODE::fOp.
56 * @{ */
57#define MYOP_LD 0x1
58#define MYOP_ST 0x2
59#define MYOP_LD_ST 0x3
60#define MYOP_EFL 0x4
61#define MYOP_LD_DIV 0x5
62/** @} */
63
64
65/*********************************************************************************************************************************
66* Structures and Typedefs *
67*********************************************************************************************************************************/
68typedef struct BS3CB2INVLDESCTYPE
69{
70 uint8_t u4Type;
71 uint8_t u1DescType;
72} BS3CB2INVLDESCTYPE;
73
74typedef struct BS3CB2SIDTSGDT
75{
76 const char *pszDesc;
77 FPFNBS3FAR fpfnWorker;
78 uint8_t cbInstr;
79 bool fSs;
80 uint8_t bMode;
81 uint8_t fFlags;
82} BS3CB2SIDTSGDT;
83
84
85typedef void BS3_CALL FNBS3CPUBASIC2ACSNIPPET(void);
86
87typedef struct FNBS3CPUBASIC2ACTSTCODE
88{
89 FNBS3CPUBASIC2ACSNIPPET BS3_FAR *pfn;
90 uint8_t fOp;
91 uint8_t cbMem;
92} FNBS3CPUBASIC2ACTSTCODE;
93typedef FNBS3CPUBASIC2ACTSTCODE const *PCFNBS3CPUBASIC2ACTSTCODE;
94
95typedef struct BS3CPUBASIC2ACTTSTCMNMODE
96{
97 uint8_t bMode;
98 uint16_t cEntries;
99 PCFNBS3CPUBASIC2ACTSTCODE paEntries;
100} BS3CPUBASIC2PFTTSTCMNMODE;
101typedef BS3CPUBASIC2PFTTSTCMNMODE const *PCBS3CPUBASIC2PFTTSTCMNMODE;
102
103
104/*********************************************************************************************************************************
105* External Symbols *
106*********************************************************************************************************************************/
107extern FNBS3FAR bs3CpuBasic2_Int80;
108extern FNBS3FAR bs3CpuBasic2_Int81;
109extern FNBS3FAR bs3CpuBasic2_Int82;
110extern FNBS3FAR bs3CpuBasic2_Int83;
111
112extern FNBS3FAR bs3CpuBasic2_ud2;
113#define g_bs3CpuBasic2_ud2_FlatAddr BS3_DATA_NM(g_bs3CpuBasic2_ud2_FlatAddr)
114extern uint32_t g_bs3CpuBasic2_ud2_FlatAddr;
115
116extern FNBS3FAR bs3CpuBasic2_iret;
117extern FNBS3FAR bs3CpuBasic2_iret_opsize;
118extern FNBS3FAR bs3CpuBasic2_iret_rexw;
119
120extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c16;
121extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c32;
122extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c64;
123extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c16;
124extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c32;
125extern FNBS3FAR bs3CpuBasic2_sidt_rexw_bx_ud2_c64;
126extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c16;
127extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c32;
128extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c64;
129extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16;
130extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32;
131extern FNBS3FAR bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64;
132
133extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c16;
134extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c32;
135extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c64;
136extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c16;
137extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c32;
138extern FNBS3FAR bs3CpuBasic2_sgdt_rexw_bx_ud2_c64;
139extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c16;
140extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c32;
141extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c64;
142extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16;
143extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32;
144extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64;
145
146extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16;
147extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32;
148extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64;
149extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
150extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
151extern FNBS3FAR bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
152extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16;
153extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16;
154extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32;
155extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64;
156extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
157extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
158extern FNBS3FAR bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
159
160extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
161extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
162extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
163extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
164extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
165extern FNBS3FAR bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
166extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
167extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
168extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
169extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
170extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
171extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
172
173
174/* bs3-cpu-basic-2-template.mac: */
175FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c16;
176FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c16;
177FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16;
178FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16;
179FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c16;
180
181FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c32;
182FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c32;
183FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32;
184FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32;
185FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c32;
186
187FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c64;
188FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c64;
189FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64;
190FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64;
191FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c64;
192
193
194/*********************************************************************************************************************************
195* Global Variables *
196*********************************************************************************************************************************/
197static const char BS3_FAR *g_pszTestMode = (const char *)1;
198static uint8_t g_bTestMode = 1;
199static bool g_f16BitSys = 1;
200
201
202/** SIDT test workers. */
203static BS3CB2SIDTSGDT const g_aSidtWorkers[] =
204{
205 { "sidt [bx]", bs3CpuBasic2_sidt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
206 { "sidt [ss:bx]", bs3CpuBasic2_sidt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
207 { "o32 sidt [bx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
208 { "o32 sidt [ss:bx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
209 { "sidt [ebx]", bs3CpuBasic2_sidt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
210 { "sidt [ss:ebx]", bs3CpuBasic2_sidt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
211 { "o16 sidt [ebx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
212 { "o16 sidt [ss:ebx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
213 { "sidt [rbx]", bs3CpuBasic2_sidt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
214 { "o64 sidt [rbx]", bs3CpuBasic2_sidt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
215 { "o32 sidt [rbx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
216 { "o32 o64 sidt [rbx]", bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
217};
218
219/** SGDT test workers. */
220static BS3CB2SIDTSGDT const g_aSgdtWorkers[] =
221{
222 { "sgdt [bx]", bs3CpuBasic2_sgdt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
223 { "sgdt [ss:bx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
224 { "o32 sgdt [bx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
225 { "o32 sgdt [ss:bx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
226 { "sgdt [ebx]", bs3CpuBasic2_sgdt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
227 { "sgdt [ss:ebx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
228 { "o16 sgdt [ebx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
229 { "o16 sgdt [ss:ebx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
230 { "sgdt [rbx]", bs3CpuBasic2_sgdt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
231 { "o64 sgdt [rbx]", bs3CpuBasic2_sgdt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
232 { "o32 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
233 { "o32 o64 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
234};
235
236/** LIDT test workers. */
237static BS3CB2SIDTSGDT const g_aLidtWorkers[] =
238{
239 { "lidt [bx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
240 { "lidt [ss:bx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
241 { "o32 lidt [bx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
242 { "o32 lidt [bx]; sidt32", bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16, 27, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
243 { "o32 lidt [ss:bx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
244 { "lidt [ebx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
245 { "lidt [ss:ebx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
246 { "o16 lidt [ebx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
247 { "o16 lidt [ss:ebx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
248 { "lidt [rbx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
249 { "o64 lidt [rbx]", bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
250 { "o32 lidt [rbx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
251 { "o32 o64 lidt [rbx]", bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
252};
253
254/** LGDT test workers. */
255static BS3CB2SIDTSGDT const g_aLgdtWorkers[] =
256{
257 { "lgdt [bx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
258 { "lgdt [ss:bx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
259 { "o32 lgdt [bx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
260 { "o32 lgdt [ss:bx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
261 { "lgdt [ebx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
262 { "lgdt [ss:ebx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
263 { "o16 lgdt [ebx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
264 { "o16 lgdt [ss:ebx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
265 { "lgdt [rbx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
266 { "o64 lgdt [rbx]", bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
267 { "o32 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
268 { "o32 o64 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
269};
270
271
272
273#if 0
274/** Table containing invalid CS selector types. */
275static const BS3CB2INVLDESCTYPE g_aInvalidCsTypes[] =
276{
277 { X86_SEL_TYPE_RO, 1 },
278 { X86_SEL_TYPE_RO_ACC, 1 },
279 { X86_SEL_TYPE_RW, 1 },
280 { X86_SEL_TYPE_RW_ACC, 1 },
281 { X86_SEL_TYPE_RO_DOWN, 1 },
282 { X86_SEL_TYPE_RO_DOWN_ACC, 1 },
283 { X86_SEL_TYPE_RW_DOWN, 1 },
284 { X86_SEL_TYPE_RW_DOWN_ACC, 1 },
285 { 0, 0 },
286 { 1, 0 },
287 { 2, 0 },
288 { 3, 0 },
289 { 4, 0 },
290 { 5, 0 },
291 { 6, 0 },
292 { 7, 0 },
293 { 8, 0 },
294 { 9, 0 },
295 { 10, 0 },
296 { 11, 0 },
297 { 12, 0 },
298 { 13, 0 },
299 { 14, 0 },
300 { 15, 0 },
301};
302
303/** Table containing invalid SS selector types. */
304static const BS3CB2INVLDESCTYPE g_aInvalidSsTypes[] =
305{
306 { X86_SEL_TYPE_EO, 1 },
307 { X86_SEL_TYPE_EO_ACC, 1 },
308 { X86_SEL_TYPE_ER, 1 },
309 { X86_SEL_TYPE_ER_ACC, 1 },
310 { X86_SEL_TYPE_EO_CONF, 1 },
311 { X86_SEL_TYPE_EO_CONF_ACC, 1 },
312 { X86_SEL_TYPE_ER_CONF, 1 },
313 { X86_SEL_TYPE_ER_CONF_ACC, 1 },
314 { 0, 0 },
315 { 1, 0 },
316 { 2, 0 },
317 { 3, 0 },
318 { 4, 0 },
319 { 5, 0 },
320 { 6, 0 },
321 { 7, 0 },
322 { 8, 0 },
323 { 9, 0 },
324 { 10, 0 },
325 { 11, 0 },
326 { 12, 0 },
327 { 13, 0 },
328 { 14, 0 },
329 { 15, 0 },
330};
331#endif
332
333
334static const FNBS3CPUBASIC2ACTSTCODE g_aCmn16[] =
335{
336 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, MYOP_LD, 2 },
337 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, MYOP_ST, 2 },
338 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, MYOP_LD_ST, 2 },
339 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, MYOP_LD_ST | MYOP_EFL, 2 },
340 { bs3CpuBasic2_div_ds_bx__ud2_c16, MYOP_LD_DIV, 2 },
341};
342
343static const FNBS3CPUBASIC2ACTSTCODE g_aCmn32[] =
344{
345 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c32, MYOP_LD, 4 },
346 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c32, MYOP_ST, 4 },
347 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32, MYOP_LD_ST, 4 },
348 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32, MYOP_LD_ST | MYOP_EFL, 4 },
349 { bs3CpuBasic2_div_ds_bx__ud2_c32, MYOP_LD_DIV, 4 },
350};
351
352static const FNBS3CPUBASIC2ACTSTCODE g_aCmn64[] =
353{
354 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c64, MYOP_LD, 8 },
355 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c64, MYOP_ST, 8 },
356 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64, MYOP_LD_ST, 8 },
357 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64, MYOP_LD_ST | MYOP_EFL, 8 },
358 { bs3CpuBasic2_div_ds_bx__ud2_c64, MYOP_LD_DIV, 8 },
359};
360
361static const BS3CPUBASIC2PFTTSTCMNMODE g_aCmnModes[] =
362{
363 { BS3_MODE_CODE_16, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
364 { BS3_MODE_CODE_V86, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
365 { BS3_MODE_CODE_32, RT_ELEMENTS(g_aCmn32), g_aCmn32 },
366 { BS3_MODE_CODE_64, RT_ELEMENTS(g_aCmn64), g_aCmn64 },
367};
368
369
370/**
371 * Sets globals according to the mode.
372 *
373 * @param bTestMode The test mode.
374 */
375static void bs3CpuBasic2_SetGlobals(uint8_t bTestMode)
376{
377 g_bTestMode = bTestMode;
378 g_pszTestMode = Bs3GetModeName(bTestMode);
379 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(bTestMode);
380 g_usBs3TestStep = 0;
381}
382
383
384/**
385 * Wrapper around Bs3TestFailedF that prefixes the error with g_usBs3TestStep
386 * and g_pszTestMode.
387 */
388static void bs3CpuBasic2_FailedF(const char *pszFormat, ...)
389{
390 va_list va;
391
392 char szTmp[168];
393 va_start(va, pszFormat);
394 Bs3StrPrintfV(szTmp, sizeof(szTmp), pszFormat, va);
395 va_end(va);
396
397 Bs3TestFailedF("%u - %s: %s", g_usBs3TestStep, g_pszTestMode, szTmp);
398}
399
400
401#if 0
402/**
403 * Compares trap stuff.
404 */
405static void bs3CpuBasic2_CompareIntCtx1(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t bXcpt)
406{
407 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
408 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
409 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
410 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, 2 /*int xx*/, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
411 if (Bs3TestSubErrorCount() != cErrorsBefore)
412 {
413 Bs3TrapPrintFrame(pTrapCtx);
414#if 1
415 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
416 Bs3TestPrintf("Halting in CompareTrapCtx1: bXcpt=%#x\n", bXcpt);
417 ASMHalt();
418#endif
419 }
420}
421#endif
422
423
424#if 0
425/**
426 * Compares trap stuff.
427 */
428static void bs3CpuBasic2_CompareTrapCtx2(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t cbIpAdjust,
429 uint8_t bXcpt, uint16_t uHandlerCs)
430{
431 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
432 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
433 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
434 CHECK_MEMBER("uHandlerCs", "%#06x", pTrapCtx->uHandlerCs, uHandlerCs);
435 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
436 if (Bs3TestSubErrorCount() != cErrorsBefore)
437 {
438 Bs3TrapPrintFrame(pTrapCtx);
439#if 1
440 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
441 Bs3TestPrintf("Halting in CompareTrapCtx2: bXcpt=%#x\n", bXcpt);
442 ASMHalt();
443#endif
444 }
445}
446#endif
447
448/**
449 * Compares a CPU trap.
450 */
451static void bs3CpuBasic2_CompareCpuTrapCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd,
452 uint8_t bXcpt, bool f486ResumeFlagHint)
453{
454 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
455 uint32_t fExtraEfl;
456
457 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
458 CHECK_MEMBER("bErrCd", "%#06RX16", (uint16_t)pTrapCtx->uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
459
460 fExtraEfl = X86_EFL_RF;
461 if ( g_f16BitSys
462 || ( !f486ResumeFlagHint
463 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) <= BS3CPU_80486 ) )
464 fExtraEfl = 0;
465 else
466 fExtraEfl = X86_EFL_RF;
467#if 0 /** @todo Running on an AMD Phenom II X6 1100T under AMD-V I'm not getting good X86_EFL_RF results. Enable this to get on with other work. */
468 fExtraEfl = pTrapCtx->Ctx.rflags.u32 & X86_EFL_RF;
469#endif
470 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, 0 /*cbIpAdjust*/, 0 /*cbSpAdjust*/, fExtraEfl, g_pszTestMode, g_usBs3TestStep);
471 if (Bs3TestSubErrorCount() != cErrorsBefore)
472 {
473 Bs3TrapPrintFrame(pTrapCtx);
474#if 1
475 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
476 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
477 ASMHalt();
478#endif
479 }
480}
481
482
483/**
484 * Compares \#GP trap.
485 */
486static void bs3CpuBasic2_CompareGpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
487{
488 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_GP, true /*f486ResumeFlagHint*/);
489}
490
491#if 0
492/**
493 * Compares \#NP trap.
494 */
495static void bs3CpuBasic2_CompareNpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
496{
497 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_NP, true /*f486ResumeFlagHint*/);
498}
499#endif
500
501/**
502 * Compares \#SS trap.
503 */
504static void bs3CpuBasic2_CompareSsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd, bool f486ResumeFlagHint)
505{
506 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_SS, f486ResumeFlagHint);
507}
508
509#if 0
510/**
511 * Compares \#TS trap.
512 */
513static void bs3CpuBasic2_CompareTsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
514{
515 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_TS, false /*f486ResumeFlagHint*/);
516}
517#endif
518
519/**
520 * Compares \#PF trap.
521 */
522static void bs3CpuBasic2_ComparePfCtx(PCBS3TRAPFRAME pTrapCtx, PBS3REGCTX pStartCtx, uint16_t uErrCd, uint64_t uCr2Expected)
523{
524 uint64_t const uCr2Saved = pStartCtx->cr2.u;
525 pStartCtx->cr2.u = uCr2Expected;
526 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_PF, true /*f486ResumeFlagHint*/);
527 pStartCtx->cr2.u = uCr2Saved;
528}
529
530/**
531 * Compares \#UD trap.
532 */
533static void bs3CpuBasic2_CompareUdCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx)
534{
535 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*no error code*/, X86_XCPT_UD, true /*f486ResumeFlagHint*/);
536}
537
538/**
539 * Compares \#AC trap.
540 */
541static void bs3CpuBasic2_CompareAcCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx)
542{
543 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_AC, true /*f486ResumeFlagHint*/);
544}
545
546
547#if 0 /* convert me */
548static void bs3CpuBasic2_RaiseXcpt1Common(uint16_t const uSysR0Cs, uint16_t const uSysR0CsConf, uint16_t const uSysR0Ss,
549 PX86DESC const paIdt, unsigned const cIdteShift)
550{
551 BS3TRAPFRAME TrapCtx;
552 BS3REGCTX Ctx80;
553 BS3REGCTX Ctx81;
554 BS3REGCTX Ctx82;
555 BS3REGCTX Ctx83;
556 BS3REGCTX CtxTmp;
557 BS3REGCTX CtxTmp2;
558 PBS3REGCTX apCtx8x[4];
559 unsigned iCtx;
560 unsigned iRing;
561 unsigned iDpl;
562 unsigned iRpl;
563 unsigned i, j, k;
564 uint32_t uExpected;
565 bool const f486Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486;
566# if TMPL_BITS == 16
567 bool const f386Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386;
568 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
569# else
570 bool const f286 = false;
571 bool const f386Plus = true;
572 int rc;
573 uint8_t *pbIdtCopyAlloc;
574 PX86DESC pIdtCopy;
575 const unsigned cbIdte = 1 << (3 + cIdteShift);
576 RTCCUINTXREG uCr0Saved = ASMGetCR0();
577 RTGDTR GdtrSaved;
578# endif
579 RTIDTR IdtrSaved;
580 RTIDTR Idtr;
581
582 ASMGetIDTR(&IdtrSaved);
583# if TMPL_BITS != 16
584 ASMGetGDTR(&GdtrSaved);
585# endif
586
587 /* make sure they're allocated */
588 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
589 Bs3MemZero(&Ctx80, sizeof(Ctx80));
590 Bs3MemZero(&Ctx81, sizeof(Ctx81));
591 Bs3MemZero(&Ctx82, sizeof(Ctx82));
592 Bs3MemZero(&Ctx83, sizeof(Ctx83));
593 Bs3MemZero(&CtxTmp, sizeof(CtxTmp));
594 Bs3MemZero(&CtxTmp2, sizeof(CtxTmp2));
595
596 /* Context array. */
597 apCtx8x[0] = &Ctx80;
598 apCtx8x[1] = &Ctx81;
599 apCtx8x[2] = &Ctx82;
600 apCtx8x[3] = &Ctx83;
601
602# if TMPL_BITS != 16
603 /* Allocate memory for playing around with the IDT. */
604 pbIdtCopyAlloc = NULL;
605 if (BS3_MODE_IS_PAGED(g_bTestMode))
606 pbIdtCopyAlloc = Bs3MemAlloc(BS3MEMKIND_FLAT32, 12*_1K);
607# endif
608
609 /*
610 * IDT entry 80 thru 83 are assigned DPLs according to the number.
611 * (We'll be useing more, but this'll do for now.)
612 */
613 paIdt[0x80 << cIdteShift].Gate.u2Dpl = 0;
614 paIdt[0x81 << cIdteShift].Gate.u2Dpl = 1;
615 paIdt[0x82 << cIdteShift].Gate.u2Dpl = 2;
616 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
617
618 Bs3RegCtxSave(&Ctx80);
619 Ctx80.rsp.u -= 0x300;
620 Ctx80.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int80);
621# if TMPL_BITS == 16
622 Ctx80.cs = BS3_MODE_IS_RM_OR_V86(g_bTestMode) ? BS3_SEL_TEXT16 : BS3_SEL_R0_CS16;
623# elif TMPL_BITS == 32
624 g_uBs3TrapEipHint = Ctx80.rip.u32;
625# endif
626 Bs3MemCpy(&Ctx81, &Ctx80, sizeof(Ctx80));
627 Ctx81.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int81);
628 Bs3MemCpy(&Ctx82, &Ctx80, sizeof(Ctx80));
629 Ctx82.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int82);
630 Bs3MemCpy(&Ctx83, &Ctx80, sizeof(Ctx80));
631 Ctx83.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int83);
632
633 /*
634 * Check that all the above gates work from ring-0.
635 */
636 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
637 {
638 g_usBs3TestStep = iCtx;
639# if TMPL_BITS == 32
640 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
641# endif
642 Bs3TrapSetJmpAndRestore(apCtx8x[iCtx], &TrapCtx);
643 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, apCtx8x[iCtx], 0x80+iCtx /*bXcpt*/);
644 }
645
646 /*
647 * Check that the gate DPL checks works.
648 */
649 g_usBs3TestStep = 100;
650 for (iRing = 0; iRing <= 3; iRing++)
651 {
652 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
653 {
654 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
655 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
656# if TMPL_BITS == 32
657 g_uBs3TrapEipHint = CtxTmp.rip.u32;
658# endif
659 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
660 if (iCtx < iRing)
661 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
662 else
663 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
664 g_usBs3TestStep++;
665 }
666 }
667
668 /*
669 * Modify the gate CS value and run the handler at a different CPL.
670 * Throw RPL variations into the mix (completely ignored) together
671 * with gate presence.
672 * 1. CPL <= GATE.DPL
673 * 2. GATE.P
674 * 3. GATE.CS.DPL <= CPL (non-conforming segments)
675 */
676 g_usBs3TestStep = 1000;
677 for (i = 0; i <= 3; i++)
678 {
679 for (iRing = 0; iRing <= 3; iRing++)
680 {
681 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
682 {
683# if TMPL_BITS == 32
684 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
685# endif
686 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
687 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
688
689 for (j = 0; j <= 3; j++)
690 {
691 uint16_t const uCs = (uSysR0Cs | j) + (i << BS3_SEL_RING_SHIFT);
692 for (k = 0; k < 2; k++)
693 {
694 g_usBs3TestStep++;
695 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
696 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
697 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = k;
698 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
699 /*Bs3TrapPrintFrame(&TrapCtx);*/
700 if (iCtx < iRing)
701 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
702 else if (k == 0)
703 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
704 else if (i > iRing)
705 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
706 else
707 {
708 uint16_t uExpectedCs = uCs & X86_SEL_MASK_OFF_RPL;
709 if (i <= iCtx && i <= iRing)
710 uExpectedCs |= i;
711 bs3CpuBasic2_CompareTrapCtx2(&TrapCtx, &CtxTmp, 2 /*int 8xh*/, 0x80 + iCtx /*bXcpt*/, uExpectedCs);
712 }
713 }
714 }
715
716 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
717 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
718 }
719 }
720 }
721 BS3_ASSERT(g_usBs3TestStep < 1600);
722
723 /*
724 * Various CS and SS related faults
725 *
726 * We temporarily reconfigure gate 80 and 83 with new CS selectors, the
727 * latter have a CS.DPL of 2 for testing ring transisions and SS loading
728 * without making it impossible to handle faults.
729 */
730 g_usBs3TestStep = 1600;
731 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
732 Bs3GdteTestPage00.Gen.u1Present = 0;
733 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
734 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
735
736 /* CS.PRESENT = 0 */
737 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
738 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
739 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
740 bs3CpuBasic2_FailedF("selector was accessed");
741 g_usBs3TestStep++;
742
743 /* Check that GATE.DPL is checked before CS.PRESENT. */
744 for (iRing = 1; iRing < 4; iRing++)
745 {
746 Bs3MemCpy(&CtxTmp, &Ctx80, sizeof(CtxTmp));
747 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
748 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
749 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x80 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
750 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
751 bs3CpuBasic2_FailedF("selector was accessed");
752 g_usBs3TestStep++;
753 }
754
755 /* CS.DPL mismatch takes precedence over CS.PRESENT = 0. */
756 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
757 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
758 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
759 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
760 bs3CpuBasic2_FailedF("CS selector was accessed");
761 g_usBs3TestStep++;
762 for (iDpl = 1; iDpl < 4; iDpl++)
763 {
764 Bs3GdteTestPage00.Gen.u2Dpl = iDpl;
765 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
766 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
767 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
768 bs3CpuBasic2_FailedF("CS selector was accessed");
769 g_usBs3TestStep++;
770 }
771
772 /* 1608: Check all the invalid CS selector types alone. */
773 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
774 for (i = 0; i < RT_ELEMENTS(g_aInvalidCsTypes); i++)
775 {
776 Bs3GdteTestPage00.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
777 Bs3GdteTestPage00.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
778 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
779 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
780 if (Bs3GdteTestPage00.Gen.u4Type != g_aInvalidCsTypes[i].u4Type)
781 bs3CpuBasic2_FailedF("Invalid CS type %#x/%u -> %#x/%u\n",
782 g_aInvalidCsTypes[i].u4Type, g_aInvalidCsTypes[i].u1DescType,
783 Bs3GdteTestPage00.Gen.u4Type, Bs3GdteTestPage00.Gen.u1DescType);
784 g_usBs3TestStep++;
785
786 /* Incorrect CS.TYPE takes precedence over CS.PRESENT = 0. */
787 Bs3GdteTestPage00.Gen.u1Present = 0;
788 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
789 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
790 Bs3GdteTestPage00.Gen.u1Present = 1;
791 g_usBs3TestStep++;
792 }
793
794 /* Fix CS again. */
795 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
796
797 /* 1632: Test SS. */
798 if (!BS3_MODE_IS_64BIT_SYS(g_bTestMode))
799 {
800 uint16_t BS3_FAR *puTssSs2 = BS3_MODE_IS_16BIT_SYS(g_bTestMode) ? &Bs3Tss16.ss2 : &Bs3Tss32.ss2;
801 uint16_t const uSavedSs2 = *puTssSs2;
802 X86DESC const SavedGate83 = paIdt[0x83 << cIdteShift];
803
804 /* Make the handler execute in ring-2. */
805 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
806 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
807 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_02 | 2;
808
809 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
810 Bs3RegCtxConvertToRingX(&CtxTmp, 3); /* yeah, from 3 so SS:xSP is reloaded. */
811 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
812 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
813 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
814 bs3CpuBasic2_FailedF("CS selector was not access");
815 g_usBs3TestStep++;
816
817 /* Create a SS.DPL=2 stack segment and check that SS2.RPL matters and
818 that we get #SS if the selector isn't present. */
819 i = 0; /* used for cycling thru invalid CS types */
820 for (k = 0; k < 10; k++)
821 {
822 /* k=0: present,
823 k=1: not-present,
824 k=2: present but very low limit,
825 k=3: not-present, low limit.
826 k=4: present, read-only.
827 k=5: not-present, read-only.
828 k=6: present, code-selector.
829 k=7: not-present, code-selector.
830 k=8: present, read-write / no access + system (=LDT).
831 k=9: not-present, read-write / no access + system (=LDT).
832 */
833 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
834 Bs3GdteTestPage03.Gen.u1Present = !(k & 1);
835 if (k >= 8)
836 {
837 Bs3GdteTestPage03.Gen.u1DescType = 0; /* system */
838 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW; /* = LDT */
839 }
840 else if (k >= 6)
841 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_ER;
842 else if (k >= 4)
843 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RO;
844 else if (k >= 2)
845 {
846 Bs3GdteTestPage03.Gen.u16LimitLow = 0x400;
847 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
848 Bs3GdteTestPage03.Gen.u1Granularity = 0;
849 }
850
851 for (iDpl = 0; iDpl < 4; iDpl++)
852 {
853 Bs3GdteTestPage03.Gen.u2Dpl = iDpl;
854
855 for (iRpl = 0; iRpl < 4; iRpl++)
856 {
857 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | iRpl;
858 //Bs3TestPrintf("k=%u iDpl=%u iRpl=%u step=%u\n", k, iDpl, iRpl, g_usBs3TestStep);
859 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
860 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
861 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
862 if (iRpl != 2 || iRpl != iDpl || k >= 4)
863 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
864 else if (k != 0)
865 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03,
866 k == 2 /*f486ResumeFlagHint*/);
867 else
868 {
869 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
870 if (TrapCtx.uHandlerSs != (BS3_SEL_TEST_PAGE_03 | 2))
871 bs3CpuBasic2_FailedF("uHandlerSs=%#x expected %#x\n", TrapCtx.uHandlerSs, BS3_SEL_TEST_PAGE_03 | 2);
872 }
873 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
874 bs3CpuBasic2_FailedF("CS selector was not access");
875 if ( TrapCtx.bXcpt == 0x83
876 || (TrapCtx.bXcpt == X86_XCPT_SS && k == 2) )
877 {
878 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
879 bs3CpuBasic2_FailedF("SS selector was not accessed");
880 }
881 else if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
882 bs3CpuBasic2_FailedF("SS selector was accessed");
883 g_usBs3TestStep++;
884
885 /* +1: Modify the gate DPL to check that this is checked before SS.DPL and SS.PRESENT. */
886 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 2;
887 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
888 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x83 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
889 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
890 g_usBs3TestStep++;
891
892 /* +2: Check the CS.DPL check is done before the SS ones. Restoring the
893 ring-0 INT 83 context triggers the CS.DPL < CPL check. */
894 Bs3TrapSetJmpAndRestore(&Ctx83, &TrapCtx);
895 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx83, BS3_SEL_TEST_PAGE_02);
896 g_usBs3TestStep++;
897
898 /* +3: Now mark the CS selector not present and check that that also triggers before SS stuff. */
899 Bs3GdteTestPage02.Gen.u1Present = 0;
900 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
901 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
902 Bs3GdteTestPage02.Gen.u1Present = 1;
903 g_usBs3TestStep++;
904
905 /* +4: Make the CS selector some invalid type and check it triggers before SS stuff. */
906 Bs3GdteTestPage02.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
907 Bs3GdteTestPage02.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
908 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
909 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
910 Bs3GdteTestPage02.Gen.u4Type = X86_SEL_TYPE_ER_ACC;
911 Bs3GdteTestPage02.Gen.u1DescType = 1;
912 g_usBs3TestStep++;
913
914 /* +5: Now, make the CS selector limit too small and that it triggers after SS trouble.
915 The 286 had a simpler approach to these GP(0). */
916 Bs3GdteTestPage02.Gen.u16LimitLow = 0;
917 Bs3GdteTestPage02.Gen.u4LimitHigh = 0;
918 Bs3GdteTestPage02.Gen.u1Granularity = 0;
919 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
920 if (f286)
921 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
922 else if (iRpl != 2 || iRpl != iDpl || k >= 4)
923 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
924 else if (k != 0)
925 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, k == 2 /*f486ResumeFlagHint*/);
926 else
927 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
928 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
929 g_usBs3TestStep++;
930 }
931 }
932 }
933
934 /* Check all the invalid SS selector types alone. */
935 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
936 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
937 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
938 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
939 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
940 g_usBs3TestStep++;
941 for (i = 0; i < RT_ELEMENTS(g_aInvalidSsTypes); i++)
942 {
943 Bs3GdteTestPage03.Gen.u4Type = g_aInvalidSsTypes[i].u4Type;
944 Bs3GdteTestPage03.Gen.u1DescType = g_aInvalidSsTypes[i].u1DescType;
945 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
946 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
947 if (Bs3GdteTestPage03.Gen.u4Type != g_aInvalidSsTypes[i].u4Type)
948 bs3CpuBasic2_FailedF("Invalid SS type %#x/%u -> %#x/%u\n",
949 g_aInvalidSsTypes[i].u4Type, g_aInvalidSsTypes[i].u1DescType,
950 Bs3GdteTestPage03.Gen.u4Type, Bs3GdteTestPage03.Gen.u1DescType);
951 g_usBs3TestStep++;
952 }
953
954 /*
955 * Continue the SS experiments with a expand down segment. We'll use
956 * the same setup as we already have with gate 83h being DPL and
957 * having CS.DPL=2.
958 *
959 * Expand down segments are weird. The valid area is practically speaking
960 * reversed. So, a 16-bit segment with a limit of 0x6000 will have valid
961 * addresses from 0xffff thru 0x6001.
962 *
963 * So, with expand down segments we can more easily cut partially into the
964 * pushing of the iret frame and trigger more interesting behavior than
965 * with regular "expand up" segments where the whole pushing area is either
966 * all fine or not not fine.
967 */
968 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
969 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
970 Bs3GdteTestPage03.Gen.u2Dpl = 2;
971 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW_DOWN;
972 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
973
974 /* First test, limit = max --> no bytes accessible --> #GP */
975 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
976 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
977
978 /* Second test, limit = 0 --> all by zero byte accessible --> works */
979 Bs3GdteTestPage03.Gen.u16LimitLow = 0;
980 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
981 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
982 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
983
984 /* Modify the gate handler to be a dummy that immediately does UD2
985 and triggers #UD, then advance the limit down till we get the #UD. */
986 Bs3GdteTestPage03.Gen.u1Granularity = 0;
987
988 Bs3MemCpy(&CtxTmp2, &CtxTmp, sizeof(CtxTmp2)); /* #UD result context */
989 if (g_f16BitSys)
990 {
991 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr - BS3_ADDR_BS3TEXT16;
992 Bs3Trap16SetGate(0x83, X86_SEL_TYPE_SYS_286_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u16, 0 /*cParams*/);
993 CtxTmp2.rsp.u = Bs3Tss16.sp2 - 2*5;
994 }
995 else
996 {
997 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr;
998 Bs3Trap32SetGate(0x83, X86_SEL_TYPE_SYS_386_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u32, 0 /*cParams*/);
999 CtxTmp2.rsp.u = Bs3Tss32.esp2 - 4*5;
1000 }
1001 CtxTmp2.bMode = g_bTestMode; /* g_bBs3CurrentMode not changed by the UD2 handler. */
1002 CtxTmp2.cs = BS3_SEL_TEST_PAGE_02 | 2;
1003 CtxTmp2.ss = BS3_SEL_TEST_PAGE_03 | 2;
1004 CtxTmp2.bCpl = 2;
1005
1006 /* test run. */
1007 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1008 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1009 g_usBs3TestStep++;
1010
1011 /* Real run. */
1012 i = (g_f16BitSys ? 2 : 4) * 6 + 1;
1013 while (i-- > 0)
1014 {
1015 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1016 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1017 if (i > 0)
1018 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1019 else
1020 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1021 g_usBs3TestStep++;
1022 }
1023
1024 /* Do a run where we do the same-ring kind of access. */
1025 Bs3RegCtxConvertToRingX(&CtxTmp, 2);
1026 if (g_f16BitSys)
1027 {
1028 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 2*3;
1029 i = 2*3 - 1;
1030 }
1031 else
1032 {
1033 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 4*3;
1034 i = 4*3 - 1;
1035 }
1036 CtxTmp.ss = BS3_SEL_TEST_PAGE_03 | 2;
1037 CtxTmp2.ds = CtxTmp.ds;
1038 CtxTmp2.es = CtxTmp.es;
1039 CtxTmp2.fs = CtxTmp.fs;
1040 CtxTmp2.gs = CtxTmp.gs;
1041 while (i-- > 0)
1042 {
1043 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1044 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1045 if (i > 0)
1046 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, 0 /*BS3_SEL_TEST_PAGE_03*/, true /*f486ResumeFlagHint*/);
1047 else
1048 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1049 g_usBs3TestStep++;
1050 }
1051
1052 *puTssSs2 = uSavedSs2;
1053 paIdt[0x83 << cIdteShift] = SavedGate83;
1054 }
1055 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1056 BS3_ASSERT(g_usBs3TestStep < 3000);
1057
1058 /*
1059 * Modify the gate CS value with a conforming segment.
1060 */
1061 g_usBs3TestStep = 3000;
1062 for (i = 0; i <= 3; i++) /* cs.dpl */
1063 {
1064 for (iRing = 0; iRing <= 3; iRing++)
1065 {
1066 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1067 {
1068 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1069 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1070# if TMPL_BITS == 32
1071 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1072# endif
1073
1074 for (j = 0; j <= 3; j++) /* rpl */
1075 {
1076 uint16_t const uCs = (uSysR0CsConf | j) + (i << BS3_SEL_RING_SHIFT);
1077 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1078 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1079 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1080 //Bs3TestPrintf("%u/%u/%u/%u: cs=%04x hcs=%04x xcpt=%02x\n", i, iRing, iCtx, j, uCs, TrapCtx.uHandlerCs, TrapCtx.bXcpt);
1081 /*Bs3TrapPrintFrame(&TrapCtx);*/
1082 g_usBs3TestStep++;
1083 if (iCtx < iRing)
1084 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1085 else if (i > iRing)
1086 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1087 else
1088 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1089 }
1090 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1091 }
1092 }
1093 }
1094 BS3_ASSERT(g_usBs3TestStep < 3500);
1095
1096 /*
1097 * The gates must be 64-bit in long mode.
1098 */
1099 if (cIdteShift != 0)
1100 {
1101 g_usBs3TestStep = 3500;
1102 for (i = 0; i <= 3; i++)
1103 {
1104 for (iRing = 0; iRing <= 3; iRing++)
1105 {
1106 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1107 {
1108 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1109 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1110
1111 for (j = 0; j < 2; j++)
1112 {
1113 static const uint16_t s_auCSes[2] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32 };
1114 uint16_t uCs = (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT);
1115 g_usBs3TestStep++;
1116 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1117 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1118 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1119 /*Bs3TrapPrintFrame(&TrapCtx);*/
1120 if (iCtx < iRing)
1121 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1122 else
1123 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1124 }
1125 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1126 }
1127 }
1128 }
1129 BS3_ASSERT(g_usBs3TestStep < 4000);
1130 }
1131
1132 /*
1133 * IDT limit check. The 286 does not access X86DESCGATE::u16OffsetHigh.
1134 */
1135 g_usBs3TestStep = 5000;
1136 i = (0x80 << (cIdteShift + 3)) - 1;
1137 j = (0x82 << (cIdteShift + 3)) - (!f286 ? 1 : 3);
1138 k = (0x83 << (cIdteShift + 3)) - 1;
1139 for (; i <= k; i++, g_usBs3TestStep++)
1140 {
1141 Idtr = IdtrSaved;
1142 Idtr.cbIdt = i;
1143 ASMSetIDTR(&Idtr);
1144 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1145 if (i < j)
1146 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx81, (0x81 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1147 else
1148 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1149 }
1150 ASMSetIDTR(&IdtrSaved);
1151 BS3_ASSERT(g_usBs3TestStep < 5100);
1152
1153# if TMPL_BITS != 16 /* Only do the paging related stuff in 32-bit and 64-bit modes. */
1154
1155 /*
1156 * IDT page not present. Placing the IDT copy such that 0x80 is on the
1157 * first page and 0x81 is on the second page. We need proceed to move
1158 * it down byte by byte to check that any inaccessible byte means #PF.
1159 *
1160 * Note! We must reload the alternative IDTR for each run as any kind of
1161 * printing to the string (like error reporting) will cause a switch
1162 * to real mode and back, reloading the default IDTR.
1163 */
1164 g_usBs3TestStep = 5200;
1165 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1166 {
1167 uint32_t const uCr2Expected = Bs3SelPtrToFlat(pbIdtCopyAlloc) + _4K;
1168 for (j = 0; j < cbIdte; j++)
1169 {
1170 pIdtCopy = (PX86DESC)&pbIdtCopyAlloc[_4K - cbIdte * 0x81 - j];
1171 Bs3MemCpy(pIdtCopy, paIdt, cbIdte * 256);
1172
1173 Idtr.cbIdt = IdtrSaved.cbIdt;
1174 Idtr.pIdt = Bs3SelPtrToFlat(pIdtCopy);
1175
1176 ASMSetIDTR(&Idtr);
1177 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1178 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1179 g_usBs3TestStep++;
1180
1181 ASMSetIDTR(&Idtr);
1182 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1183 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1184 g_usBs3TestStep++;
1185
1186 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1187 if (RT_SUCCESS(rc))
1188 {
1189 ASMSetIDTR(&Idtr);
1190 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1191 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1192 g_usBs3TestStep++;
1193
1194 ASMSetIDTR(&Idtr);
1195 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1196 if (f486Plus)
1197 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1198 else
1199 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1200 g_usBs3TestStep++;
1201
1202 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1203
1204 /* Check if that the entry type is checked after the whole IDTE has been cleared for #PF. */
1205 pIdtCopy[0x80 << cIdteShift].Gate.u4Type = 0;
1206 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1207 if (RT_SUCCESS(rc))
1208 {
1209 ASMSetIDTR(&Idtr);
1210 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1211 if (f486Plus)
1212 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1213 else
1214 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1215 g_usBs3TestStep++;
1216
1217 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1218 }
1219 }
1220 else
1221 Bs3TestPrintf("Bs3PagingProtectPtr: %d\n", i);
1222
1223 ASMSetIDTR(&IdtrSaved);
1224 }
1225 }
1226
1227 /*
1228 * The read/write and user/supervisor bits the IDT PTEs are irrelevant.
1229 */
1230 g_usBs3TestStep = 5300;
1231 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1232 {
1233 Bs3MemCpy(pbIdtCopyAlloc, paIdt, cbIdte * 256);
1234 Idtr.cbIdt = IdtrSaved.cbIdt;
1235 Idtr.pIdt = Bs3SelPtrToFlat(pbIdtCopyAlloc);
1236
1237 ASMSetIDTR(&Idtr);
1238 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1239 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1240 g_usBs3TestStep++;
1241
1242 rc = Bs3PagingProtect(Idtr.pIdt, _4K, 0 /*fSet*/, X86_PTE_RW | X86_PTE_US /*fClear*/);
1243 if (RT_SUCCESS(rc))
1244 {
1245 ASMSetIDTR(&Idtr);
1246 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1247 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1248 g_usBs3TestStep++;
1249
1250 Bs3PagingProtect(Idtr.pIdt, _4K, X86_PTE_RW | X86_PTE_US /*fSet*/, 0 /*fClear*/);
1251 }
1252 ASMSetIDTR(&IdtrSaved);
1253 }
1254
1255 /*
1256 * Check that CS.u1Accessed is set to 1. Use the test page selector #0 and #3 together
1257 * with interrupt gates 80h and 83h, respectively.
1258 */
1259/** @todo Throw in SS.u1Accessed too. */
1260 g_usBs3TestStep = 5400;
1261 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1262 {
1263 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
1264 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1265 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
1266
1267 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Cs + (3 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1268 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1269 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_03; /* rpl is ignored, so leave it as zero. */
1270
1271 /* Check that the CS.A bit is being set on a general basis and that
1272 the special CS values work with out generic handler code. */
1273 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1274 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1275 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1276 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed", Bs3GdteTestPage00.Gen.u4Type);
1277 g_usBs3TestStep++;
1278
1279 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1280 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1281 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1282 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1283 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1284 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1285 if (TrapCtx.uHandlerCs != (BS3_SEL_TEST_PAGE_03 | 3))
1286 bs3CpuBasic2_FailedF("uHandlerCs=%#x, expected %#x", TrapCtx.uHandlerCs, (BS3_SEL_TEST_PAGE_03 | 3));
1287 g_usBs3TestStep++;
1288
1289 /*
1290 * Now check that setting CS.u1Access to 1 does __NOT__ trigger a page
1291 * fault due to the RW bit being zero.
1292 * (We check both with with and without the WP bit if 80486.)
1293 */
1294 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1295 ASMSetCR0(uCr0Saved | X86_CR0_WP);
1296
1297 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1298 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1299 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_RW /*fClear*/);
1300 if (RT_SUCCESS(rc))
1301 {
1302 /* ring-0 handler */
1303 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1304 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1305 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1306 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1307 g_usBs3TestStep++;
1308
1309 /* ring-3 handler */
1310 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1311 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1312 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1313 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1314 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1315 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1316 g_usBs3TestStep++;
1317
1318 /* clear WP and repeat the above. */
1319 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1320 ASMSetCR0(uCr0Saved & ~X86_CR0_WP);
1321 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1322 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1323
1324 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1325 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1326 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1327 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1328 g_usBs3TestStep++;
1329
1330 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1331 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1332 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1333 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!n", Bs3GdteTestPage03.Gen.u4Type);
1334 g_usBs3TestStep++;
1335
1336 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_RW /*fSet*/, 0 /*fClear*/);
1337 }
1338
1339 ASMSetCR0(uCr0Saved);
1340
1341 /*
1342 * While we're here, check that if the CS GDT entry is a non-present
1343 * page we do get a #PF with the rigth error code and CR2.
1344 */
1345 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* Just for fun, really a pointless gesture. */
1346 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1347 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1348 if (RT_SUCCESS(rc))
1349 {
1350 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1351 if (f486Plus)
1352 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00);
1353 else
1354 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00 + 4);
1355 g_usBs3TestStep++;
1356
1357 /* Do it from ring-3 to check ErrCd, which doesn't set X86_TRAP_PF_US it turns out. */
1358 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1359 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1360 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1361
1362 if (f486Plus)
1363 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03);
1364 else
1365 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03 + 4);
1366 g_usBs3TestStep++;
1367
1368 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1369 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1370 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #1", Bs3GdteTestPage00.Gen.u4Type);
1371 if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1372 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #2", Bs3GdteTestPage03.Gen.u4Type);
1373 }
1374
1375 /* restore */
1376 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1377 paIdt[0x83 << cIdteShift].Gate.u16Sel = uSysR0Cs;// + (3 << BS3_SEL_RING_SHIFT) + 3;
1378 }
1379
1380# endif /* 32 || 64*/
1381
1382 /*
1383 * Check broad EFLAGS effects.
1384 */
1385 g_usBs3TestStep = 5600;
1386 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1387 {
1388 for (iRing = 0; iRing < 4; iRing++)
1389 {
1390 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1391 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1392
1393 /* all set */
1394 CtxTmp.rflags.u32 &= X86_EFL_VM | X86_EFL_1;
1395 CtxTmp.rflags.u32 |= X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF /* | X86_EFL_TF */ /*| X86_EFL_IF*/
1396 | X86_EFL_DF | X86_EFL_OF | X86_EFL_IOPL /* | X86_EFL_NT*/;
1397 if (f486Plus)
1398 CtxTmp.rflags.u32 |= X86_EFL_AC;
1399 if (f486Plus && !g_f16BitSys)
1400 CtxTmp.rflags.u32 |= X86_EFL_RF;
1401 if (g_uBs3CpuDetected & BS3CPU_F_CPUID)
1402 CtxTmp.rflags.u32 |= X86_EFL_VIF | X86_EFL_VIP;
1403 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1404 CtxTmp.rflags.u32 &= ~X86_EFL_RF;
1405
1406 if (iCtx >= iRing)
1407 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1408 else
1409 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1410 uExpected = CtxTmp.rflags.u32
1411 & ( X86_EFL_1 | X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF | X86_EFL_DF
1412 | X86_EFL_OF | X86_EFL_IOPL | X86_EFL_NT | X86_EFL_VM | X86_EFL_AC | X86_EFL_VIF | X86_EFL_VIP
1413 | X86_EFL_ID /*| X86_EFL_TF*/ /*| X86_EFL_IF*/ /*| X86_EFL_RF*/ );
1414 if (TrapCtx.fHandlerRfl != uExpected)
1415 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1416 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1417 g_usBs3TestStep++;
1418
1419 /* all cleared */
1420 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80286)
1421 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_RA1_MASK | UINT16_C(0xf000));
1422 else
1423 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_VM | X86_EFL_RA1_MASK);
1424 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1425 if (iCtx >= iRing)
1426 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1427 else
1428 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1429 uExpected = CtxTmp.rflags.u32;
1430 if (TrapCtx.fHandlerRfl != uExpected)
1431 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1432 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1433 g_usBs3TestStep++;
1434 }
1435 }
1436
1437/** @todo CS.LIMIT / canonical(CS) */
1438
1439
1440 /*
1441 * Check invalid gate types.
1442 */
1443 g_usBs3TestStep = 32000;
1444 for (iRing = 0; iRing <= 3; iRing++)
1445 {
1446 static const uint16_t s_auCSes[] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32, BS3_SEL_R0_CS64,
1447 BS3_SEL_TSS16, BS3_SEL_TSS32, BS3_SEL_TSS64, 0, BS3_SEL_SPARE_1f };
1448 static uint16_t const s_auInvlTypes64[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
1449 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1450 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f };
1451 static uint16_t const s_auInvlTypes32[] = { 0, 1, 2, 3, 8, 9, 10, 11, 13,
1452 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1453 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
1454 /*286:*/ 12, 14, 15 };
1455 uint16_t const * const pauInvTypes = cIdteShift != 0 ? s_auInvlTypes64 : s_auInvlTypes32;
1456 uint16_t const cInvTypes = cIdteShift != 0 ? RT_ELEMENTS(s_auInvlTypes64)
1457 : f386Plus ? RT_ELEMENTS(s_auInvlTypes32) - 3 : RT_ELEMENTS(s_auInvlTypes32);
1458
1459
1460 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1461 {
1462 unsigned iType;
1463
1464 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1465 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1466# if TMPL_BITS == 32
1467 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1468# endif
1469 for (iType = 0; iType < cInvTypes; iType++)
1470 {
1471 uint8_t const bSavedType = paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type;
1472 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = pauInvTypes[iType] >> 4;
1473 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = pauInvTypes[iType] & 0xf;
1474
1475 for (i = 0; i < 4; i++)
1476 {
1477 for (j = 0; j < RT_ELEMENTS(s_auCSes); j++)
1478 {
1479 uint16_t uCs = (unsigned)(s_auCSes[j] - BS3_SEL_R0_FIRST) < (unsigned)(4 << BS3_SEL_RING_SHIFT)
1480 ? (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT)
1481 : s_auCSes[j] | i;
1482 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x type=%#x\n", g_usBs3TestStep, iCtx, iRing, i, uCs, pauInvTypes[iType]);*/
1483 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1484 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1485 g_usBs3TestStep++;
1486 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1487
1488 /* Mark it not-present to check that invalid type takes precedence. */
1489 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 0;
1490 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1491 g_usBs3TestStep++;
1492 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1493 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1494 }
1495 }
1496
1497 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1498 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = bSavedType;
1499 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = 0;
1500 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1501 }
1502 }
1503 }
1504 BS3_ASSERT(g_usBs3TestStep < 62000U && g_usBs3TestStep > 32000U);
1505
1506
1507 /** @todo
1508 * - Run \#PF and \#GP (and others?) at CPLs other than zero.
1509 * - Quickly generate all faults.
1510 * - All the peculiarities v8086.
1511 */
1512
1513# if TMPL_BITS != 16
1514 Bs3MemFree(pbIdtCopyAlloc, 12*_1K);
1515# endif
1516}
1517#endif /* convert me */
1518
1519
1520static void bs3CpuBasic2_RaiseXcpt11Worker(uint8_t bMode, uint8_t *pbBuf, unsigned cbCacheLine, bool fAm,
1521 BS3CPUBASIC2PFTTSTCMNMODE const BS3_FAR *pCmn)
1522{
1523 BS3TRAPFRAME TrapCtx;
1524 BS3REGCTX Ctx;
1525 BS3REGCTX CtxUdExpected;
1526 uint8_t const cRings = bMode == BS3_MODE_RM ? 1 : 4;
1527 uint8_t iRing;
1528 uint16_t iTest;
1529
1530 /* make sure they're allocated */
1531 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1532 Bs3MemZero(&Ctx, sizeof(Ctx));
1533 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1534
1535 /*
1536 * Test all relevant rings.
1537 *
1538 * The memory operand is ds:xBX, so point it to pbBuf.
1539 * The test snippets mostly use xAX as operand, with the div
1540 * one also using xDX, so make sure they make some sense.
1541 */
1542 Bs3RegCtxSaveEx(&Ctx, bMode, 0);
1543
1544 for (iRing = 0; iRing < cRings; iRing++)
1545 {
1546 uint32_t uEbx;
1547 uint8_t fAc;
1548
1549 Bs3RegCtxConvertToRingX(&Ctx, iRing);
1550
1551 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, pbBuf);
1552 uEbx = Ctx.rbx.u32;
1553
1554 Ctx.rax.u = (bMode & BS3_MODE_CODE_MASK) == BS3_MODE_CODE_64
1555 ? UINT64_C(0x80868028680386fe) : UINT32_C(0x65020686);
1556 Ctx.rdx.u = UINT32_C(0x00100100); /* careful with range due to div */
1557
1558 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1559
1560 /*
1561 * AC flag loop.
1562 */
1563 for (fAc = 0; fAc < 2; fAc++)
1564 {
1565 if (fAc)
1566 Ctx.rflags.u32 |= X86_EFL_AC;
1567 else
1568 Ctx.rflags.u32 &= ~X86_EFL_AC;
1569
1570 /*
1571 * Loop over the test snippets.
1572 */
1573 for (iTest = 0; iTest < pCmn->cEntries; iTest++)
1574 {
1575 uint8_t const fOp = pCmn->paEntries[iTest].fOp;
1576 uint8_t const cbMem = pCmn->paEntries[iTest].cbMem;
1577 uint16_t const cbMax = cbCacheLine + cbMem;
1578 uint16_t offMem;
1579 uint8_t BS3_FAR *poffUd = (uint8_t BS3_FAR *)Bs3SelLnkPtrToCurPtr(pCmn->paEntries[iTest].pfn);
1580 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pCmn->paEntries[iTest].pfn);
1581 CtxUdExpected.rip = Ctx.rip;
1582 CtxUdExpected.rip.u = Ctx.rip.u + poffUd[-1];
1583 CtxUdExpected.cs = Ctx.cs;
1584 CtxUdExpected.rflags = Ctx.rflags;
1585if (bMode == BS3_MODE_RM) CtxUdExpected.rflags.u32 &= ~X86_EFL_AC; /** @todo investigate. automatically cleared, or is it just our code? */
1586 CtxUdExpected.rdx = Ctx.rdx;
1587 CtxUdExpected.rax = Ctx.rax;
1588 if (fOp & MYOP_LD)
1589 {
1590 switch (cbMem)
1591 {
1592 case 2:
1593 CtxUdExpected.rax.u16 = 0x0101;
1594 break;
1595 case 4:
1596 CtxUdExpected.rax.u32 = UINT32_C(0x01010101);
1597 break;
1598 case 8:
1599 CtxUdExpected.rax.u64 = UINT64_C(0x0101010101010101);
1600 break;
1601 }
1602 }
1603
1604 /*
1605 * Buffer misalignment loop.
1606 * Note! We must make sure to cross a cache line here to make sure
1607 * to cover the split-lock scenario. (The buffer is cache
1608 * line aligned.)
1609 */
1610 for (offMem = 0; offMem < cbMax; offMem++)
1611 {
1612 unsigned offBuf = cbMax + cbMem * 2;
1613 while (offBuf-- > 0)
1614 pbBuf[offBuf] = 1; /* byte-by-byte to make sure it doesn't trigger AC. */
1615
1616 CtxUdExpected.rbx.u32 = Ctx.rbx.u32 = uEbx + offMem; /* ASSUMES memory in first 4GB (cur stack, so okay). */
1617 if (BS3_MODE_IS_16BIT_SYS(bMode))
1618 g_uBs3TrapEipHint = Ctx.rip.u32;
1619
1620 //Bs3TestPrintf("iRing=%d iTest=%d cs:rip=%04RX16:%08RX32 ds:rbx=%04RX16:%08RX32\n",
1621 // iRing, iTest, Ctx.cs, Ctx.rip.u32, Ctx.ds, Ctx.rbx.u32);
1622
1623 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1624
1625 if (!fAm || iRing != 3 || !fAc || !(offMem & (cbMem - 1))) /** @todo assumes cbMem is a power of two! */
1626 {
1627 if (fOp & MYOP_EFL)
1628 {
1629 CtxUdExpected.rflags.u16 &= ~X86_EFL_STATUS_BITS;
1630 CtxUdExpected.rflags.u16 |= TrapCtx.Ctx.rflags.u16 & X86_EFL_STATUS_BITS;
1631 }
1632 if (fOp == MYOP_LD_DIV)
1633 {
1634 CtxUdExpected.rax = TrapCtx.Ctx.rax;
1635 CtxUdExpected.rdx = TrapCtx.Ctx.rdx;
1636 }
1637 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1638 }
1639 else
1640 {
1641 bs3CpuBasic2_CompareAcCtx(&TrapCtx, &Ctx);
1642 }
1643
1644 g_usBs3TestStep++;
1645 }
1646 }
1647 }
1648 }
1649}
1650
1651
1652/**
1653 * Entrypoint for \#AC tests.
1654 *
1655 * @returns 0 or BS3TESTDOMODE_SKIPPED.
1656 * @param bMode The CPU mode we're testing.
1657 *
1658 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
1659 * with control registers and such.
1660 */
1661BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_RaiseXcpt11)(uint8_t bMode)
1662{
1663 unsigned cbCacheLine = 128; /** @todo detect */
1664 uint8_t abBuf[4096 /** @todo 512 - but that went crazy in real mode; now it's long mode going wrong. */];
1665 uint8_t BS3_FAR *pbBuf;
1666 unsigned idxCmnModes;
1667 uint32_t fCr0;
1668 Bs3MemZero(&abBuf, sizeof(abBuf));
1669
1670 /*
1671 * Skip if 386 or older.
1672 */
1673 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80486)
1674 {
1675 Bs3TestSkipped("#AC test requires 486 or later");
1676 return BS3TESTDOMODE_SKIPPED;
1677 }
1678
1679 bs3CpuBasic2_SetGlobals(bMode);
1680
1681 /* Get us a 64-byte aligned buffer. */
1682 pbBuf = abBuf;
1683 if (BS3_FP_OFF(pbBuf) & (cbCacheLine - 1))
1684 pbBuf = &abBuf[cbCacheLine - BS3_FP_OFF(pbBuf) & (cbCacheLine - 1)];
1685 //Bs3TestPrintf("pbBuf=%p\n", pbBuf);
1686
1687 /* Find the g_aCmnModes entry. */
1688 idxCmnModes = 0;
1689 while (g_aCmnModes[idxCmnModes].bMode != (bMode & BS3_MODE_CODE_MASK))
1690 idxCmnModes++;
1691 //Bs3TestPrintf("idxCmnModes=%d bMode=%#x\n", idxCmnModes, bMode);
1692
1693 /* First round is w/o aligment checks enabled. */
1694 fCr0 = Bs3RegGetCr0();
1695 BS3_ASSERT(!(fCr0 & X86_CR0_AM));
1696 Bs3RegSetCr0(fCr0 & ~X86_CR0_AM);
1697 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, false /*fAm*/, &g_aCmnModes[idxCmnModes]);
1698
1699#if 1
1700 /* The second round is with aligment checks enabled. */
1701 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1702 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, true /*fAm*/, &g_aCmnModes[idxCmnModes]);
1703#endif
1704
1705 Bs3RegSetCr0(fCr0);
1706 return 0;
1707}
1708
1709
1710/**
1711 * Executes one round of SIDT and SGDT tests using one assembly worker.
1712 *
1713 * This is written with driving everything from the 16-bit or 32-bit worker in
1714 * mind, i.e. not assuming the test bitcount is the same as the current.
1715 */
1716static void bs3CpuBasic2_sidt_sgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
1717 uint8_t const *pbExpected)
1718{
1719 BS3TRAPFRAME TrapCtx;
1720 BS3REGCTX Ctx;
1721 BS3REGCTX CtxUdExpected;
1722 BS3REGCTX TmpCtx;
1723 uint8_t const cbBuf = 8*2; /* test buffer area */
1724 uint8_t abBuf[8*2 + 8 + 8]; /* test buffer w/ misalignment test space and some extra guard. */
1725 uint8_t BS3_FAR *pbBuf = abBuf;
1726 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
1727 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
1728 uint8_t bFiller;
1729 int off;
1730 int off2;
1731 unsigned cb;
1732 uint8_t BS3_FAR *pbTest;
1733
1734 /* make sure they're allocated */
1735 Bs3MemZero(&Ctx, sizeof(Ctx));
1736 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1737 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
1738 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1739 Bs3MemZero(&abBuf, sizeof(abBuf));
1740
1741 /* Create a context, give this routine some more stack space, point the context
1742 at our SIDT [xBX] + UD2 combo, and point DS:xBX at abBuf. */
1743 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
1744 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1745 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
1746 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
1747 g_uBs3TrapEipHint = Ctx.rip.u32;
1748 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
1749 Bs3RegCtxConvertToRingX(&Ctx, bRing);
1750
1751 /* For successful SIDT attempts, we'll stop at the UD2. */
1752 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1753 CtxUdExpected.rip.u += pWorker->cbInstr;
1754
1755 /*
1756 * Check that it works at all and that only bytes we expect gets written to.
1757 */
1758 /* First with zero buffer. */
1759 Bs3MemZero(abBuf, sizeof(abBuf));
1760 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), 0))
1761 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1762 if (!ASMMemIsZero(abBuf, sizeof(abBuf)))
1763 Bs3TestFailedF("ASMMemIsZero or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1764 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1765 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1766 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1767 Bs3TestFailedF("286: Top base byte isn't 0xff (#1): %#x\n", abBuf[cbIdtr - 1]);
1768 if (!ASMMemIsZero(&abBuf[cbIdtr], cbBuf - cbIdtr))
1769 Bs3TestFailedF("Unexpected buffer bytes set (#1): cbIdtr=%u abBuf=%.*Rhxs\n", cbIdtr, cbBuf, pbBuf);
1770 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1771 Bs3TestFailedF("Mismatch (%s,#1): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1772 g_usBs3TestStep++;
1773
1774 /* Again with a buffer filled with a byte not occuring in the previous result. */
1775 bFiller = 0x55;
1776 while (Bs3MemChr(abBuf, bFiller, cbBuf) != NULL)
1777 bFiller++;
1778 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1779 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1780 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemSet is busted: bFiller=%#x abBuf=%.*Rhxs\n", bFiller, sizeof(abBuf), pbBuf);
1781
1782 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1783 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1784 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1785 Bs3TestFailedF("286: Top base byte isn't 0xff (#2): %#x\n", abBuf[cbIdtr - 1]);
1786 if (!ASMMemIsAllU8(&abBuf[cbIdtr], cbBuf - cbIdtr, bFiller))
1787 Bs3TestFailedF("Unexpected buffer bytes set (#2): cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1788 if (Bs3MemChr(abBuf, bFiller, cbIdtr) != NULL)
1789 Bs3TestFailedF("Not all bytes touched: cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1790 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1791 Bs3TestFailedF("Mismatch (%s,#2): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1792 g_usBs3TestStep++;
1793
1794 /*
1795 * Slide the buffer along 8 bytes to cover misalignment.
1796 */
1797 for (off = 0; off < 8; off++)
1798 {
1799 pbBuf = &abBuf[off];
1800 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBuf[off]);
1801 CtxUdExpected.rbx.u = Ctx.rbx.u;
1802
1803 /* First with zero buffer. */
1804 Bs3MemZero(abBuf, sizeof(abBuf));
1805 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1806 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1807 if (off > 0 && !ASMMemIsZero(abBuf, off))
1808 Bs3TestFailedF("Unexpected buffer bytes set before (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1809 cbIdtr, off, off + cbBuf, abBuf);
1810 if (!ASMMemIsZero(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off))
1811 Bs3TestFailedF("Unexpected buffer bytes set after (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1812 cbIdtr, off, off + cbBuf, abBuf);
1813 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1814 Bs3TestFailedF("286: Top base byte isn't 0xff (#3): %#x\n", abBuf[off + cbIdtr - 1]);
1815 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1816 Bs3TestFailedF("Mismatch (#3): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1817 g_usBs3TestStep++;
1818
1819 /* Again with a buffer filled with a byte not occuring in the previous result. */
1820 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1821 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1822 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1823 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1824 Bs3TestFailedF("Unexpected buffer bytes set before (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1825 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1826 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off, bFiller))
1827 Bs3TestFailedF("Unexpected buffer bytes set after (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1828 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1829 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1830 Bs3TestFailedF("Not all bytes touched (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1831 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1832 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1833 Bs3TestFailedF("286: Top base byte isn't 0xff (#4): %#x\n", abBuf[off + cbIdtr - 1]);
1834 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1835 Bs3TestFailedF("Mismatch (#4): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1836 g_usBs3TestStep++;
1837 }
1838 pbBuf = abBuf;
1839 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1840 CtxUdExpected.rbx.u = Ctx.rbx.u;
1841
1842 /*
1843 * Play with the selector limit if the target mode supports limit checking
1844 * We use BS3_SEL_TEST_PAGE_00 for this
1845 */
1846 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
1847 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
1848 {
1849 uint16_t cbLimit;
1850 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBuf);
1851 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
1852 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
1853 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
1854 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
1855 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
1856
1857 if (pWorker->fSs)
1858 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
1859 else
1860 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
1861
1862 /* Expand up (normal). */
1863 for (off = 0; off < 8; off++)
1864 {
1865 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
1866 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
1867 {
1868 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
1869 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1870 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1871 if (off + cbIdtr <= cbLimit + 1)
1872 {
1873 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1874 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1875 Bs3TestFailedF("Not all bytes touched (#5): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1876 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1877 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1878 Bs3TestFailedF("Mismatch (#5): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1879 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1880 Bs3TestFailedF("286: Top base byte isn't 0xff (#5): %#x\n", abBuf[off + cbIdtr - 1]);
1881 }
1882 else
1883 {
1884 if (pWorker->fSs)
1885 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
1886 else
1887 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1888 if (off + 2 <= cbLimit + 1)
1889 {
1890 if (Bs3MemChr(&abBuf[off], bFiller, 2) != NULL)
1891 Bs3TestFailedF("Limit bytes not touched (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1892 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1893 if (Bs3MemCmp(&abBuf[off], pbExpected, 2) != 0)
1894 Bs3TestFailedF("Mismatch (#6): expected %.2Rhxs, got %.2Rhxs\n", pbExpected, &abBuf[off]);
1895 if (!ASMMemIsAllU8(&abBuf[off + 2], cbIdtr - 2, bFiller))
1896 Bs3TestFailedF("Base bytes touched on #GP (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1897 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1898 }
1899 else if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1900 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1901 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1902 }
1903
1904 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1905 Bs3TestFailedF("Leading bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1906 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1907 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
1908 Bs3TestFailedF("Trailing bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1909 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1910
1911 g_usBs3TestStep++;
1912 }
1913 }
1914
1915 /* Expand down (weird). Inverted valid area compared to expand up,
1916 so a limit of zero give us a valid range for 0001..0ffffh (instead of
1917 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
1918 means one valid byte at 0ffffh, and a limit of 0ffffh means none
1919 (because in a normal expand up the 0ffffh means all 64KB are
1920 accessible). */
1921 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
1922 for (off = 0; off < 8; off++)
1923 {
1924 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
1925 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
1926 {
1927 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
1928 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1929 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1930
1931 if (off > cbLimit)
1932 {
1933 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1934 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1935 Bs3TestFailedF("Not all bytes touched (#8): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1936 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1937 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1938 Bs3TestFailedF("Mismatch (#8): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1939 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1940 Bs3TestFailedF("286: Top base byte isn't 0xff (#8): %#x\n", abBuf[off + cbIdtr - 1]);
1941 }
1942 else
1943 {
1944 if (pWorker->fSs)
1945 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
1946 else
1947 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1948 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1949 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1950 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1951 }
1952
1953 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1954 Bs3TestFailedF("Leading bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1955 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1956 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
1957 Bs3TestFailedF("Trailing bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1958 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1959
1960 g_usBs3TestStep++;
1961 }
1962 }
1963
1964 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1965 CtxUdExpected.rbx.u = Ctx.rbx.u;
1966 CtxUdExpected.ss = Ctx.ss;
1967 CtxUdExpected.ds = Ctx.ds;
1968 }
1969
1970 /*
1971 * Play with the paging.
1972 */
1973 if ( BS3_MODE_IS_PAGED(bTestMode)
1974 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
1975 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
1976 {
1977 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
1978
1979 /*
1980 * Slide the buffer towards the trailing guard page. We'll observe the
1981 * first word being written entirely separately from the 2nd dword/qword.
1982 */
1983 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
1984 {
1985 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
1986 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
1987 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1988 if (off + cbIdtr <= X86_PAGE_SIZE)
1989 {
1990 CtxUdExpected.rbx = Ctx.rbx;
1991 CtxUdExpected.ss = Ctx.ss;
1992 CtxUdExpected.ds = Ctx.ds;
1993 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1994 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
1995 Bs3TestFailedF("Mismatch (#9): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
1996 }
1997 else
1998 {
1999 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2000 uFlatTest + RT_MAX(off, X86_PAGE_SIZE));
2001 if ( off <= X86_PAGE_SIZE - 2
2002 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2003 Bs3TestFailedF("Mismatch (#10): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2004 pbExpected, &pbTest[off], off);
2005 if ( off < X86_PAGE_SIZE - 2
2006 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_SIZE - off - 2, bFiller))
2007 Bs3TestFailedF("Wrote partial base on #PF (#10): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2008 bFiller, X86_PAGE_SIZE - off - 2, &pbTest[off + 2], off);
2009 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2010 Bs3TestFailedF("Wrote partial limit on #PF (#10): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2011 }
2012 g_usBs3TestStep++;
2013 }
2014
2015 /*
2016 * Now, do it the other way around. It should look normal now since writing
2017 * the limit will #PF first and nothing should be written.
2018 */
2019 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2020 {
2021 Bs3MemSet(pbTest, bFiller, 48);
2022 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2023 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2024 if (off >= 0)
2025 {
2026 CtxUdExpected.rbx = Ctx.rbx;
2027 CtxUdExpected.ss = Ctx.ss;
2028 CtxUdExpected.ds = Ctx.ds;
2029 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2030 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2031 Bs3TestFailedF("Mismatch (#11): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2032 }
2033 else
2034 {
2035 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0), uFlatTest + off);
2036 if ( -off < cbIdtr
2037 && !ASMMemIsAllU8(pbTest, cbIdtr + off, bFiller))
2038 Bs3TestFailedF("Wrote partial content on #PF (#12): bFiller=%#x, found %.*Rhxs; off=%d\n",
2039 bFiller, cbIdtr + off, pbTest, off);
2040 }
2041 if (!ASMMemIsAllU8(&pbTest[RT_MAX(cbIdtr + off, 0)], 16, bFiller))
2042 Bs3TestFailedF("Wrote beyond expected area (#13): bFiller=%#x, found %.16Rhxs; off=%d\n",
2043 bFiller, &pbTest[RT_MAX(cbIdtr + off, 0)], off);
2044 g_usBs3TestStep++;
2045 }
2046
2047 /*
2048 * Combine paging and segment limit and check ordering.
2049 * This is kind of interesting here since it the instruction seems to
2050 * be doing two separate writes.
2051 */
2052 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2053 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2054 {
2055 uint16_t cbLimit;
2056
2057 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2058 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2059 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2060 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2061 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2062
2063 if (pWorker->fSs)
2064 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2065 else
2066 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2067
2068 /* Expand up (normal), approaching tail guard page. */
2069 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2070 {
2071 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2072 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2073 {
2074 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2075 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2076 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2077 if (off + cbIdtr <= cbLimit + 1)
2078 {
2079 /* No #GP, but maybe #PF. */
2080 if (off + cbIdtr <= X86_PAGE_SIZE)
2081 {
2082 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2083 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2084 Bs3TestFailedF("Mismatch (#14): expected %.*Rhxs, got %.*Rhxs\n",
2085 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2086 }
2087 else
2088 {
2089 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2090 uFlatTest + RT_MAX(off, X86_PAGE_SIZE));
2091 if ( off <= X86_PAGE_SIZE - 2
2092 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2093 Bs3TestFailedF("Mismatch (#15): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2094 pbExpected, &pbTest[off], off);
2095 cb = X86_PAGE_SIZE - off - 2;
2096 if ( off < X86_PAGE_SIZE - 2
2097 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2098 Bs3TestFailedF("Wrote partial base on #PF (#15): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2099 bFiller, cb, &pbTest[off + 2], off);
2100 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2101 Bs3TestFailedF("Wrote partial limit on #PF (#15): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2102 }
2103 }
2104 else if (off + 2 <= cbLimit + 1)
2105 {
2106 /* [ig]tr.limit writing does not cause #GP, but may cause #PG, if not writing the base causes #GP. */
2107 if (off <= X86_PAGE_SIZE - 2)
2108 {
2109 if (pWorker->fSs)
2110 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2111 else
2112 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2113 if (Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2114 Bs3TestFailedF("Mismatch (#16): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2115 pbExpected, &pbTest[off], off);
2116 cb = X86_PAGE_SIZE - off - 2;
2117 if ( off < X86_PAGE_SIZE - 2
2118 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2119 Bs3TestFailedF("Wrote partial base with limit (#16): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2120 bFiller, cb, &pbTest[off + 2], off);
2121 }
2122 else
2123 {
2124 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2125 uFlatTest + RT_MAX(off, X86_PAGE_SIZE));
2126 if ( off < X86_PAGE_SIZE
2127 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2128 Bs3TestFailedF("Mismatch (#16): Partial limit write on #PF: bFiller=%#x, got %.*Rhxs\n",
2129 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2130 }
2131 }
2132 else
2133 {
2134 /* #GP/#SS on limit. */
2135 if (pWorker->fSs)
2136 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2137 else
2138 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2139 if ( off < X86_PAGE_SIZE
2140 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2141 Bs3TestFailedF("Mismatch (#17): Partial write on #GP: bFiller=%#x, got %.*Rhxs\n",
2142 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2143 }
2144
2145 cb = RT_MIN(cbIdtr * 2, off - (X86_PAGE_SIZE - cbIdtr*2));
2146 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], cb, bFiller))
2147 Bs3TestFailedF("Leading bytes touched (#18): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2148 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE - cbIdtr * 2]);
2149
2150 g_usBs3TestStep++;
2151
2152 /* Set DS to 0 and check that we get #GP(0). */
2153 if (!pWorker->fSs)
2154 {
2155 Ctx.ds = 0;
2156 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2157 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2158 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2159 g_usBs3TestStep++;
2160 }
2161 }
2162 }
2163
2164 /* Expand down. */
2165 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
2166 uFlatTest -= X86_PAGE_SIZE;
2167
2168 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2169 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2170 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2171 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2172
2173 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2174 {
2175 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2176 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2177 {
2178 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2179 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller, cbIdtr * 2);
2180 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2181 if (cbLimit < off && off >= X86_PAGE_SIZE)
2182 {
2183 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2184 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2185 Bs3TestFailedF("Mismatch (#19): expected %.*Rhxs, got %.*Rhxs\n",
2186 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2187 cb = X86_PAGE_SIZE + cbIdtr*2 - off;
2188 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], cb, bFiller))
2189 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2190 cbIdtr, off, cbLimit, bFiller, cb, pbTest[off + cbIdtr]);
2191 }
2192 else
2193 {
2194 if (cbLimit < off && off < X86_PAGE_SIZE)
2195 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2196 uFlatTest + off);
2197 else if (pWorker->fSs)
2198 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2199 else
2200 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2201 cb = cbIdtr*2;
2202 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE], cb, bFiller))
2203 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2204 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE]);
2205 }
2206 g_usBs3TestStep++;
2207 }
2208 }
2209
2210 pbTest += X86_PAGE_SIZE;
2211 uFlatTest += X86_PAGE_SIZE;
2212 }
2213
2214 Bs3MemGuardedTestPageFree(pbTest);
2215 }
2216
2217 /*
2218 * Check non-canonical 64-bit space.
2219 */
2220 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
2221 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
2222 {
2223 /* Make our references relative to the gap. */
2224 pbTest += g_cbBs3PagingOneCanonicalTrap;
2225
2226 /* Hit it from below. */
2227 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2228 {
2229 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
2230 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2231 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2232 if (off + cbIdtr <= 0)
2233 {
2234 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2235 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2236 Bs3TestFailedF("Mismatch (#21): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2237 }
2238 else
2239 {
2240 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2241 if (off <= -2 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2242 Bs3TestFailedF("Mismatch (#21): expected limit %.2Rhxs, got %.2Rhxs\n", pbExpected, &pbTest[off]);
2243 off2 = off <= -2 ? 2 : 0;
2244 cb = cbIdtr - off2;
2245 if (!ASMMemIsAllU8(&pbTest[off + off2], cb, bFiller))
2246 Bs3TestFailedF("Mismatch (#21): touched base %.*Rhxs, got %.*Rhxs\n",
2247 cb, &pbExpected[off], cb, &pbTest[off + off2]);
2248 }
2249 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2250 Bs3TestFailedF("Leading bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2251 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2252 Bs3TestFailedF("Trailing bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2253 }
2254
2255 /* Hit it from above. */
2256 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2257 {
2258 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
2259 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2260 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2261 if (off >= 0)
2262 {
2263 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2264 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2265 Bs3TestFailedF("Mismatch (#22): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2266 }
2267 else
2268 {
2269 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2270 if (!ASMMemIsAllU8(&pbTest[off], cbIdtr, bFiller))
2271 Bs3TestFailedF("Mismatch (#22): touched base %.*Rhxs, got %.*Rhxs\n",
2272 cbIdtr, &pbExpected[off], cbIdtr, &pbTest[off]);
2273 }
2274 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2275 Bs3TestFailedF("Leading bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2276 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2277 Bs3TestFailedF("Trailing bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2278 }
2279
2280 }
2281}
2282
2283
2284static void bs3CpuBasic2_sidt_sgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
2285 uint8_t const *pbExpected)
2286{
2287 unsigned idx;
2288 unsigned bRing;
2289 unsigned iStep = 0;
2290
2291 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
2292 test and don't want to bother with double faults. */
2293 for (bRing = 0; bRing <= 3; bRing++)
2294 {
2295 for (idx = 0; idx < cWorkers; idx++)
2296 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
2297 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ ))
2298 {
2299 g_usBs3TestStep = iStep;
2300 bs3CpuBasic2_sidt_sgdt_One(&paWorkers[idx], bTestMode, bRing, pbExpected);
2301 iStep += 1000;
2302 }
2303 if (BS3_MODE_IS_RM_OR_V86(bTestMode))
2304 break;
2305 }
2306}
2307
2308
2309BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sidt)(uint8_t bMode)
2310{
2311 union
2312 {
2313 RTIDTR Idtr;
2314 uint8_t ab[16];
2315 } Expected;
2316
2317 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2318 bs3CpuBasic2_SetGlobals(bMode);
2319
2320 /*
2321 * Pass to common worker which is only compiled once per mode.
2322 */
2323 Bs3MemZero(&Expected, sizeof(Expected));
2324 ASMGetIDTR(&Expected.Idtr);
2325 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSidtWorkers, RT_ELEMENTS(g_aSidtWorkers), Expected.ab);
2326
2327 /*
2328 * Re-initialize the IDT.
2329 */
2330 Bs3TrapReInit();
2331 return 0;
2332}
2333
2334
2335BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sgdt)(uint8_t bMode)
2336{
2337 uint64_t const uOrgAddr = Bs3Lgdt_Gdt.uAddr;
2338 uint64_t uNew = 0;
2339 union
2340 {
2341 RTGDTR Gdtr;
2342 uint8_t ab[16];
2343 } Expected;
2344
2345 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2346 bs3CpuBasic2_SetGlobals(bMode);
2347
2348 /*
2349 * If paged mode, try push the GDT way up.
2350 */
2351 Bs3MemZero(&Expected, sizeof(Expected));
2352 ASMGetGDTR(&Expected.Gdtr);
2353 if (BS3_MODE_IS_PAGED(bMode))
2354 {
2355/** @todo loading non-canonical base addresses. */
2356 int rc;
2357 uNew = BS3_MODE_IS_64BIT_SYS(bMode) ? UINT64_C(0xffff80fedcb70000) : UINT64_C(0xc2d28000);
2358 uNew |= uOrgAddr & X86_PAGE_OFFSET_MASK;
2359 rc = Bs3PagingAlias(uNew, uOrgAddr, Bs3Lgdt_Gdt.cb, X86_PTE_P | X86_PTE_RW | X86_PTE_US | X86_PTE_D | X86_PTE_A);
2360 if (RT_SUCCESS(rc))
2361 {
2362 Bs3Lgdt_Gdt.uAddr = uNew;
2363 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uNew);
2364 ASMGetGDTR(&Expected.Gdtr);
2365 if (BS3_MODE_IS_64BIT_SYS(bMode) && ARCH_BITS != 64)
2366 *(uint32_t *)&Expected.ab[6] = (uint32_t)(uNew >> 32);
2367 }
2368 }
2369
2370 /*
2371 * Pass to common worker which is only compiled once per mode.
2372 */
2373 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSgdtWorkers, RT_ELEMENTS(g_aSgdtWorkers), Expected.ab);
2374
2375 /*
2376 * Unalias the GDT.
2377 */
2378 if (uNew != 0)
2379 {
2380 Bs3Lgdt_Gdt.uAddr = uOrgAddr;
2381 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uOrgAddr);
2382 Bs3PagingUnalias(uNew, Bs3Lgdt_Gdt.cb);
2383 }
2384
2385 /*
2386 * Re-initialize the IDT.
2387 */
2388 Bs3TrapReInit();
2389 return 0;
2390}
2391
2392
2393
2394/*
2395 * LIDT & LGDT
2396 */
2397
2398/**
2399 * Executes one round of LIDT and LGDT tests using one assembly worker.
2400 *
2401 * This is written with driving everything from the 16-bit or 32-bit worker in
2402 * mind, i.e. not assuming the test bitcount is the same as the current.
2403 */
2404static void bs3CpuBasic2_lidt_lgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
2405 uint8_t const *pbRestore, size_t cbRestore, uint8_t const *pbExpected)
2406{
2407 static const struct
2408 {
2409 bool fGP;
2410 uint16_t cbLimit;
2411 uint64_t u64Base;
2412 } s_aValues64[] =
2413 {
2414 { false, 0x0000, UINT64_C(0x0000000000000000) },
2415 { false, 0x0001, UINT64_C(0x0000000000000001) },
2416 { false, 0x0002, UINT64_C(0x0000000000000010) },
2417 { false, 0x0003, UINT64_C(0x0000000000000123) },
2418 { false, 0x0004, UINT64_C(0x0000000000001234) },
2419 { false, 0x0005, UINT64_C(0x0000000000012345) },
2420 { false, 0x0006, UINT64_C(0x0000000000123456) },
2421 { false, 0x0007, UINT64_C(0x0000000001234567) },
2422 { false, 0x0008, UINT64_C(0x0000000012345678) },
2423 { false, 0x0009, UINT64_C(0x0000000123456789) },
2424 { false, 0x000a, UINT64_C(0x000000123456789a) },
2425 { false, 0x000b, UINT64_C(0x00000123456789ab) },
2426 { false, 0x000c, UINT64_C(0x0000123456789abc) },
2427 { false, 0x001c, UINT64_C(0x00007ffffeefefef) },
2428 { false, 0xffff, UINT64_C(0x00007fffffffffff) },
2429 { true, 0xf3f1, UINT64_C(0x0000800000000000) },
2430 { true, 0x0000, UINT64_C(0x0000800000000000) },
2431 { true, 0x0000, UINT64_C(0x0000800000000333) },
2432 { true, 0x00f0, UINT64_C(0x0001000000000000) },
2433 { true, 0x0ff0, UINT64_C(0x0012000000000000) },
2434 { true, 0x0eff, UINT64_C(0x0123000000000000) },
2435 { true, 0xe0fe, UINT64_C(0x1234000000000000) },
2436 { true, 0x00ad, UINT64_C(0xffff300000000000) },
2437 { true, 0x0000, UINT64_C(0xffff7fffffffffff) },
2438 { true, 0x00f0, UINT64_C(0xffff7fffffffffff) },
2439 { false, 0x5678, UINT64_C(0xffff800000000000) },
2440 { false, 0x2969, UINT64_C(0xffffffffffeefefe) },
2441 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2442 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2443 };
2444 static const struct
2445 {
2446 uint16_t cbLimit;
2447 uint32_t u32Base;
2448 } s_aValues32[] =
2449 {
2450 { 0xdfdf, UINT32_C(0xefefefef) },
2451 { 0x0000, UINT32_C(0x00000000) },
2452 { 0x0001, UINT32_C(0x00000001) },
2453 { 0x0002, UINT32_C(0x00000012) },
2454 { 0x0003, UINT32_C(0x00000123) },
2455 { 0x0004, UINT32_C(0x00001234) },
2456 { 0x0005, UINT32_C(0x00012345) },
2457 { 0x0006, UINT32_C(0x00123456) },
2458 { 0x0007, UINT32_C(0x01234567) },
2459 { 0x0008, UINT32_C(0x12345678) },
2460 { 0x0009, UINT32_C(0x80204060) },
2461 { 0x000a, UINT32_C(0xddeeffaa) },
2462 { 0x000b, UINT32_C(0xfdecdbca) },
2463 { 0x000c, UINT32_C(0x6098456b) },
2464 { 0x000d, UINT32_C(0x98506099) },
2465 { 0x000e, UINT32_C(0x206950bc) },
2466 { 0x000f, UINT32_C(0x9740395d) },
2467 { 0x0334, UINT32_C(0x64a9455e) },
2468 { 0xb423, UINT32_C(0xd20b6eff) },
2469 { 0x4955, UINT32_C(0x85296d46) },
2470 { 0xffff, UINT32_C(0x07000039) },
2471 { 0xefe1, UINT32_C(0x0007fe00) },
2472 };
2473
2474 BS3TRAPFRAME TrapCtx;
2475 BS3REGCTX Ctx;
2476 BS3REGCTX CtxUdExpected;
2477 BS3REGCTX TmpCtx;
2478 uint8_t abBufLoad[40]; /* Test buffer w/ misalignment test space and some (cbIdtr) extra guard. */
2479 uint8_t abBufSave[32]; /* For saving the result after loading. */
2480 uint8_t abBufRestore[24]; /* For restoring sane value (same seg as abBufSave!). */
2481 uint8_t abExpectedFilled[32]; /* Same as pbExpected, except it's filled with bFiller2 instead of zeros. */
2482 uint8_t BS3_FAR *pbBufSave; /* Correctly aligned pointer into abBufSave. */
2483 uint8_t BS3_FAR *pbBufRestore; /* Correctly aligned pointer into abBufRestore. */
2484 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
2485 uint8_t const cbBaseLoaded = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 8
2486 : BS3_MODE_IS_16BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE)
2487 ? 3 : 4;
2488 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
2489 uint8_t const bTop16BitBase = f286 ? 0xff : 0x00;
2490 uint8_t bFiller1; /* For filling abBufLoad. */
2491 uint8_t bFiller2; /* For filling abBufSave and expectations. */
2492 int off;
2493 uint8_t BS3_FAR *pbTest;
2494 unsigned i;
2495
2496 /* make sure they're allocated */
2497 Bs3MemZero(&Ctx, sizeof(Ctx));
2498 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
2499 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
2500 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
2501 Bs3MemZero(abBufSave, sizeof(abBufSave));
2502 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2503 Bs3MemZero(abBufRestore, sizeof(abBufRestore));
2504
2505 /*
2506 * Create a context, giving this routine some more stack space.
2507 * - Point the context at our LIDT [xBX] + SIDT [xDI] + LIDT [xSI] + UD2 combo.
2508 * - Point DS/SS:xBX at abBufLoad.
2509 * - Point ES:xDI at abBufSave.
2510 * - Point ES:xSI at abBufRestore.
2511 */
2512 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
2513 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
2514 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
2515 g_uBs3TrapEipHint = Ctx.rip.u32;
2516 Ctx.rflags.u16 &= ~X86_EFL_IF;
2517 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2518
2519 pbBufSave = abBufSave;
2520 if ((BS3_FP_OFF(pbBufSave) + 2) & 7)
2521 pbBufSave += 8 - ((BS3_FP_OFF(pbBufSave) + 2) & 7);
2522 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rdi, &Ctx.es, pbBufSave);
2523
2524 pbBufRestore = abBufRestore;
2525 if ((BS3_FP_OFF(pbBufRestore) + 2) & 7)
2526 pbBufRestore += 8 - ((BS3_FP_OFF(pbBufRestore) + 2) & 7);
2527 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsi, &Ctx.es, pbBufRestore);
2528 Bs3MemCpy(pbBufRestore, pbRestore, cbRestore);
2529
2530 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
2531 Bs3RegCtxConvertToRingX(&Ctx, bRing);
2532
2533 /* For successful SIDT attempts, we'll stop at the UD2. */
2534 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
2535 CtxUdExpected.rip.u += pWorker->cbInstr;
2536
2537 /*
2538 * Check that it works at all.
2539 */
2540 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2541 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2542 Bs3MemZero(abBufSave, sizeof(abBufSave));
2543 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2544 if (bRing != 0)
2545 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2546 else
2547 {
2548 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2549 if (Bs3MemCmp(pbBufSave, pbExpected, cbIdtr * 2) != 0)
2550 Bs3TestFailedF("Mismatch (%s, #1): expected %.*Rhxs, got %.*Rhxs\n",
2551 pWorker->pszDesc, cbIdtr*2, pbExpected, cbIdtr*2, pbBufSave);
2552 }
2553 g_usBs3TestStep++;
2554
2555 /* Determine two filler bytes that doesn't appear in the previous result or our expectations. */
2556 bFiller1 = ~0x55;
2557 while ( Bs3MemChr(pbBufSave, bFiller1, cbIdtr) != NULL
2558 || Bs3MemChr(pbRestore, bFiller1, cbRestore) != NULL
2559 || bFiller1 == 0xff)
2560 bFiller1++;
2561 bFiller2 = 0x33;
2562 while ( Bs3MemChr(pbBufSave, bFiller2, cbIdtr) != NULL
2563 || Bs3MemChr(pbRestore, bFiller2, cbRestore) != NULL
2564 || bFiller2 == 0xff
2565 || bFiller2 == bFiller1)
2566 bFiller2++;
2567 Bs3MemSet(abExpectedFilled, bFiller2, sizeof(abExpectedFilled));
2568 Bs3MemCpy(abExpectedFilled, pbExpected, cbIdtr);
2569
2570 /* Again with a buffer filled with a byte not occuring in the previous result. */
2571 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2572 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2573 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2574 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2575 if (bRing != 0)
2576 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2577 else
2578 {
2579 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2580 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2581 Bs3TestFailedF("Mismatch (%s, #2): expected %.*Rhxs, got %.*Rhxs\n",
2582 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2583 }
2584 g_usBs3TestStep++;
2585
2586 /*
2587 * Try loading a bunch of different limit+base value to check what happens,
2588 * especially what happens wrt the top part of the base in 16-bit mode.
2589 */
2590 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
2591 {
2592 for (i = 0; i < RT_ELEMENTS(s_aValues64); i++)
2593 {
2594 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2595 Bs3MemCpy(&abBufLoad[0], &s_aValues64[i].cbLimit, 2);
2596 Bs3MemCpy(&abBufLoad[2], &s_aValues64[i].u64Base, 8);
2597 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2598 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2599 if (bRing != 0 || s_aValues64[i].fGP)
2600 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2601 else
2602 {
2603 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2604 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues64[i].cbLimit, 2) != 0
2605 || Bs3MemCmp(&pbBufSave[2], &s_aValues64[i].u64Base, 8) != 0
2606 || !ASMMemIsAllU8(&pbBufSave[10], cbIdtr, bFiller2))
2607 Bs3TestFailedF("Mismatch (%s, #2): expected %04RX16:%016RX64, fillers %#x %#x, got %.*Rhxs\n",
2608 pWorker->pszDesc, s_aValues64[i].cbLimit, s_aValues64[i].u64Base,
2609 bFiller1, bFiller2, cbIdtr*2, pbBufSave);
2610 }
2611 g_usBs3TestStep++;
2612 }
2613 }
2614 else
2615 {
2616 for (i = 0; i < RT_ELEMENTS(s_aValues32); i++)
2617 {
2618 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2619 Bs3MemCpy(&abBufLoad[0], &s_aValues32[i].cbLimit, 2);
2620 Bs3MemCpy(&abBufLoad[2], &s_aValues32[i].u32Base, cbBaseLoaded);
2621 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2622 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2623 if (bRing != 0)
2624 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2625 else
2626 {
2627 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2628 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues32[i].cbLimit, 2) != 0
2629 || Bs3MemCmp(&pbBufSave[2], &s_aValues32[i].u32Base, cbBaseLoaded) != 0
2630 || ( cbBaseLoaded != 4
2631 && pbBufSave[2+3] != bTop16BitBase)
2632 || !ASMMemIsAllU8(&pbBufSave[8], cbIdtr, bFiller2))
2633 Bs3TestFailedF("Mismatch (%s,#3): loaded %04RX16:%08RX32, fillers %#x %#x%s, got %.*Rhxs\n",
2634 pWorker->pszDesc, s_aValues32[i].cbLimit, s_aValues32[i].u32Base, bFiller1, bFiller2,
2635 f286 ? ", 286" : "", cbIdtr*2, pbBufSave);
2636 }
2637 g_usBs3TestStep++;
2638 }
2639 }
2640
2641 /*
2642 * Slide the buffer along 8 bytes to cover misalignment.
2643 */
2644 for (off = 0; off < 8; off++)
2645 {
2646 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBufLoad[off]);
2647 CtxUdExpected.rbx.u = Ctx.rbx.u;
2648
2649 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2650 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2651 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2652 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2653 if (bRing != 0)
2654 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2655 else
2656 {
2657 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2658 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2659 Bs3TestFailedF("Mismatch (%s, #4): expected %.*Rhxs, got %.*Rhxs\n",
2660 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2661 }
2662 g_usBs3TestStep++;
2663 }
2664 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2665 CtxUdExpected.rbx.u = Ctx.rbx.u;
2666
2667 /*
2668 * Play with the selector limit if the target mode supports limit checking
2669 * We use BS3_SEL_TEST_PAGE_00 for this
2670 */
2671 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2672 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2673 {
2674 uint16_t cbLimit;
2675 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBufLoad);
2676 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2677 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2678 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2679 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2680 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2681
2682 if (pWorker->fSs)
2683 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2684 else
2685 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2686
2687 /* Expand up (normal). */
2688 for (off = 0; off < 8; off++)
2689 {
2690 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2691 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2692 {
2693 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2694
2695 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2696 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2697 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2698 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2699 if (bRing != 0)
2700 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2701 else if (off + cbIdtr <= cbLimit + 1)
2702 {
2703 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2704 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2705 Bs3TestFailedF("Mismatch (%s, #5): expected %.*Rhxs, got %.*Rhxs\n",
2706 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2707 }
2708 else if (pWorker->fSs)
2709 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2710 else
2711 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2712 g_usBs3TestStep++;
2713
2714 /* Again with zero limit and messed up base (should trigger tripple fault if partially loaded). */
2715 abBufLoad[off] = abBufLoad[off + 1] = 0;
2716 abBufLoad[off + 2] |= 1;
2717 abBufLoad[off + cbIdtr - 2] ^= 0x5a;
2718 abBufLoad[off + cbIdtr - 1] ^= 0xa5;
2719 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2720 if (bRing != 0)
2721 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2722 else if (off + cbIdtr <= cbLimit + 1)
2723 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2724 else if (pWorker->fSs)
2725 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2726 else
2727 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2728 }
2729 }
2730
2731 /* Expand down (weird). Inverted valid area compared to expand up,
2732 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2733 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2734 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2735 (because in a normal expand up the 0ffffh means all 64KB are
2736 accessible). */
2737 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2738 for (off = 0; off < 8; off++)
2739 {
2740 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2741 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2742 {
2743 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2744
2745 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2746 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2747 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2748 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2749 if (bRing != 0)
2750 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2751 else if (off > cbLimit)
2752 {
2753 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2754 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2755 Bs3TestFailedF("Mismatch (%s, #6): expected %.*Rhxs, got %.*Rhxs\n",
2756 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2757 }
2758 else if (pWorker->fSs)
2759 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2760 else
2761 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2762 g_usBs3TestStep++;
2763
2764 /* Again with zero limit and messed up base (should trigger triple fault if partially loaded). */
2765 abBufLoad[off] = abBufLoad[off + 1] = 0;
2766 abBufLoad[off + 2] |= 3;
2767 abBufLoad[off + cbIdtr - 2] ^= 0x55;
2768 abBufLoad[off + cbIdtr - 1] ^= 0xaa;
2769 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2770 if (bRing != 0)
2771 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2772 else if (off > cbLimit)
2773 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2774 else if (pWorker->fSs)
2775 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2776 else
2777 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2778 }
2779 }
2780
2781 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2782 CtxUdExpected.rbx.u = Ctx.rbx.u;
2783 CtxUdExpected.ss = Ctx.ss;
2784 CtxUdExpected.ds = Ctx.ds;
2785 }
2786
2787 /*
2788 * Play with the paging.
2789 */
2790 if ( BS3_MODE_IS_PAGED(bTestMode)
2791 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2792 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2793 {
2794 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2795
2796 /*
2797 * Slide the load buffer towards the trailing guard page.
2798 */
2799 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[X86_PAGE_SIZE]);
2800 CtxUdExpected.ss = Ctx.ss;
2801 CtxUdExpected.ds = Ctx.ds;
2802 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2803 {
2804 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr*2);
2805 if (off < X86_PAGE_SIZE)
2806 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(X86_PAGE_SIZE - off, cbIdtr));
2807 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2808 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2809 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2810 if (bRing != 0)
2811 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2812 else if (off + cbIdtr <= X86_PAGE_SIZE)
2813 {
2814 CtxUdExpected.rbx = Ctx.rbx;
2815 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2816 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
2817 Bs3TestFailedF("Mismatch (%s, #7): expected %.*Rhxs, got %.*Rhxs\n",
2818 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2819 }
2820 else
2821 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE));
2822 g_usBs3TestStep++;
2823
2824 /* Again with zero limit and maybe messed up base as well (triple fault if buggy).
2825 The 386DX-40 here triple faults (or something) with off == 0xffe, nothing else. */
2826 if ( off < X86_PAGE_SIZE && off + cbIdtr > X86_PAGE_SIZE
2827 && ( off != X86_PAGE_SIZE - 2
2828 || (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) != BS3CPU_80386)
2829 )
2830 {
2831 pbTest[off] = 0;
2832 if (off + 1 < X86_PAGE_SIZE)
2833 pbTest[off + 1] = 0;
2834 if (off + 2 < X86_PAGE_SIZE)
2835 pbTest[off + 2] |= 7;
2836 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2837 if (bRing != 0)
2838 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2839 else
2840 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE));
2841 g_usBs3TestStep++;
2842 }
2843 }
2844
2845 /*
2846 * Now, do it the other way around. It should look normal now since writing
2847 * the limit will #PF first and nothing should be written.
2848 */
2849 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2850 {
2851 Bs3MemSet(pbTest, bFiller1, 48);
2852 if (off >= 0)
2853 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
2854 else if (off + cbIdtr > 0)
2855 Bs3MemCpy(pbTest, &pbBufRestore[-off], cbIdtr + off);
2856 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2857 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2858 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2859 if (bRing != 0)
2860 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2861 else if (off >= 0)
2862 {
2863 CtxUdExpected.rbx = Ctx.rbx;
2864 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2865 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
2866 Bs3TestFailedF("Mismatch (%s, #8): expected %.*Rhxs, got %.*Rhxs\n",
2867 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2868 }
2869 else
2870 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off);
2871 g_usBs3TestStep++;
2872
2873 /* Again with messed up base as well (triple fault if buggy). */
2874 if (off < 0 && off > -cbIdtr)
2875 {
2876 if (off + 2 >= 0)
2877 pbTest[off + 2] |= 15;
2878 pbTest[off + cbIdtr - 1] ^= 0xaa;
2879 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2880 if (bRing != 0)
2881 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2882 else
2883 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off);
2884 g_usBs3TestStep++;
2885 }
2886 }
2887
2888 /*
2889 * Combine paging and segment limit and check ordering.
2890 * This is kind of interesting here since it the instruction seems to
2891 * actually be doing two separate read, just like it's S[IG]DT counterpart.
2892 *
2893 * Note! My 486DX4 does a DWORD limit read when the operand size is 32-bit,
2894 * that's what f486Weirdness deals with.
2895 */
2896 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2897 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2898 {
2899 bool const f486Weirdness = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80486
2900 && BS3_MODE_IS_32BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE);
2901 uint16_t cbLimit;
2902
2903 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2904 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2905 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2906 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2907 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2908
2909 if (pWorker->fSs)
2910 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2911 else
2912 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2913
2914 /* Expand up (normal), approaching tail guard page. */
2915 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2916 {
2917 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2918 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2919 {
2920 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2921 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr * 2);
2922 if (off < X86_PAGE_SIZE)
2923 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(cbIdtr, X86_PAGE_SIZE - off));
2924 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2925 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2926 if (bRing != 0)
2927 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2928 else if (off + cbIdtr <= cbLimit + 1)
2929 {
2930 /* No #GP, but maybe #PF. */
2931 if (off + cbIdtr <= X86_PAGE_SIZE)
2932 {
2933 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2934 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2935 Bs3TestFailedF("Mismatch (%s, #9): expected %.*Rhxs, got %.*Rhxs\n",
2936 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2937 }
2938 else
2939 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE));
2940 }
2941 /* No #GP/#SS on limit, but instead #PF? */
2942 else if ( !f486Weirdness
2943 ? off < cbLimit && off >= 0xfff
2944 : off + 2 < cbLimit && off >= 0xffd)
2945 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE));
2946 /* #GP/#SS on limit or base. */
2947 else if (pWorker->fSs)
2948 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2949 else
2950 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2951
2952 g_usBs3TestStep++;
2953
2954 /* Set DS to 0 and check that we get #GP(0). */
2955 if (!pWorker->fSs)
2956 {
2957 Ctx.ds = 0;
2958 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2959 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2960 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2961 g_usBs3TestStep++;
2962 }
2963 }
2964 }
2965
2966 /* Expand down. */
2967 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
2968 uFlatTest -= X86_PAGE_SIZE;
2969
2970 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2971 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2972 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2973 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2974
2975 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2976 {
2977 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2978 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2979 {
2980 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2981 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller1, cbIdtr * 2);
2982 if (off >= X86_PAGE_SIZE)
2983 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
2984 else if (off > X86_PAGE_SIZE - cbIdtr)
2985 Bs3MemCpy(&pbTest[X86_PAGE_SIZE], &pbBufRestore[X86_PAGE_SIZE - off], cbIdtr - (X86_PAGE_SIZE - off));
2986 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2987 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2988 if (bRing != 0)
2989 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2990 else if (cbLimit < off && off >= X86_PAGE_SIZE)
2991 {
2992 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2993 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2994 Bs3TestFailedF("Mismatch (%s, #10): expected %.*Rhxs, got %.*Rhxs\n",
2995 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2996 }
2997 else if (cbLimit < off && off < X86_PAGE_SIZE)
2998 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off);
2999 else if (pWorker->fSs)
3000 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3001 else
3002 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3003 g_usBs3TestStep++;
3004 }
3005 }
3006
3007 pbTest += X86_PAGE_SIZE;
3008 uFlatTest += X86_PAGE_SIZE;
3009 }
3010
3011 Bs3MemGuardedTestPageFree(pbTest);
3012 }
3013
3014 /*
3015 * Check non-canonical 64-bit space.
3016 */
3017 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
3018 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
3019 {
3020 /* Make our references relative to the gap. */
3021 pbTest += g_cbBs3PagingOneCanonicalTrap;
3022
3023 /* Hit it from below. */
3024 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3025 {
3026 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
3027 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3028 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3029 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3030 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3031 if (off + cbIdtr > 0 || bRing != 0)
3032 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3033 else
3034 {
3035 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3036 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3037 Bs3TestFailedF("Mismatch (%s, #11): expected %.*Rhxs, got %.*Rhxs\n",
3038 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3039 }
3040 }
3041
3042 /* Hit it from above. */
3043 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3044 {
3045 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
3046 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3047 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3048 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3049 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3050 if (off < 0 || bRing != 0)
3051 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3052 else
3053 {
3054 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3055 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3056 Bs3TestFailedF("Mismatch (%s, #19): expected %.*Rhxs, got %.*Rhxs\n",
3057 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3058 }
3059 }
3060
3061 }
3062}
3063
3064
3065static void bs3CpuBasic2_lidt_lgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
3066 void const *pvRestore, size_t cbRestore, uint8_t const *pbExpected)
3067{
3068 unsigned idx;
3069 unsigned bRing;
3070 unsigned iStep = 0;
3071
3072 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
3073 test and don't want to bother with double faults. */
3074 for (bRing = BS3_MODE_IS_V86(bTestMode) ? 3 : 0; bRing <= 3; bRing++)
3075 {
3076 for (idx = 0; idx < cWorkers; idx++)
3077 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
3078 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ )
3079 && ( !(paWorkers[idx].fFlags & BS3CB2SIDTSGDT_F_386PLUS)
3080 || ( bTestMode > BS3_MODE_PE16
3081 || ( bTestMode == BS3_MODE_PE16
3082 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)) ) )
3083 {
3084 //Bs3TestPrintf("idx=%-2d fpfnWorker=%p fSs=%d cbInstr=%d\n",
3085 // idx, paWorkers[idx].fpfnWorker, paWorkers[idx].fSs, paWorkers[idx].cbInstr);
3086 g_usBs3TestStep = iStep;
3087 bs3CpuBasic2_lidt_lgdt_One(&paWorkers[idx], bTestMode, bRing, pvRestore, cbRestore, pbExpected);
3088 iStep += 1000;
3089 }
3090 if (BS3_MODE_IS_RM_SYS(bTestMode))
3091 break;
3092 }
3093}
3094
3095
3096BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lidt)(uint8_t bMode)
3097{
3098 union
3099 {
3100 RTIDTR Idtr;
3101 uint8_t ab[32]; /* At least cbIdtr*2! */
3102 } Expected;
3103
3104 //if (bMode != BS3_MODE_LM64) return 0;
3105 bs3CpuBasic2_SetGlobals(bMode);
3106
3107 /*
3108 * Pass to common worker which is only compiled once per mode.
3109 */
3110 Bs3MemZero(&Expected, sizeof(Expected));
3111 ASMGetIDTR(&Expected.Idtr);
3112
3113 if (BS3_MODE_IS_RM_SYS(bMode))
3114 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3115 &Bs3Lidt_Ivt, sizeof(Bs3Lidt_Ivt), Expected.ab);
3116 else if (BS3_MODE_IS_16BIT_SYS(bMode))
3117 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3118 &Bs3Lidt_Idt16, sizeof(Bs3Lidt_Idt16), Expected.ab);
3119 else if (BS3_MODE_IS_32BIT_SYS(bMode))
3120 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3121 &Bs3Lidt_Idt32, sizeof(Bs3Lidt_Idt32), Expected.ab);
3122 else
3123 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3124 &Bs3Lidt_Idt64, sizeof(Bs3Lidt_Idt64), Expected.ab);
3125
3126 /*
3127 * Re-initialize the IDT.
3128 */
3129 Bs3TrapReInit();
3130 return 0;
3131}
3132
3133
3134BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lgdt)(uint8_t bMode)
3135{
3136 union
3137 {
3138 RTGDTR Gdtr;
3139 uint8_t ab[32]; /* At least cbIdtr*2! */
3140 } Expected;
3141
3142 //if (!BS3_MODE_IS_64BIT_SYS(bMode)) return 0;
3143 bs3CpuBasic2_SetGlobals(bMode);
3144
3145 /*
3146 * Pass to common worker which is only compiled once per mode.
3147 */
3148 if (BS3_MODE_IS_RM_SYS(bMode))
3149 ASMSetGDTR((PRTGDTR)&Bs3LgdtDef_Gdt);
3150 Bs3MemZero(&Expected, sizeof(Expected));
3151 ASMGetGDTR(&Expected.Gdtr);
3152
3153 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLgdtWorkers, RT_ELEMENTS(g_aLgdtWorkers),
3154 &Bs3LgdtDef_Gdt, sizeof(Bs3LgdtDef_Gdt), Expected.ab);
3155
3156 /*
3157 * Re-initialize the IDT.
3158 */
3159 Bs3TrapReInit();
3160 return 0;
3161}
3162
3163typedef union IRETBUF
3164{
3165 uint64_t au64[6]; /* max req is 5 */
3166 uint32_t au32[12]; /* max req is 9 */
3167 uint16_t au16[24]; /* max req is 5 */
3168 uint8_t ab[48];
3169} IRETBUF;
3170typedef IRETBUF BS3_FAR *PIRETBUF;
3171
3172
3173static void iretbuf_SetupFrame(PIRETBUF pIretBuf, unsigned const cbPop,
3174 uint16_t uCS, uint64_t uPC, uint32_t fEfl, uint16_t uSS, uint64_t uSP)
3175{
3176 if (cbPop == 2)
3177 {
3178 pIretBuf->au16[0] = (uint16_t)uPC;
3179 pIretBuf->au16[1] = uCS;
3180 pIretBuf->au16[2] = (uint16_t)fEfl;
3181 pIretBuf->au16[3] = (uint16_t)uSP;
3182 pIretBuf->au16[4] = uSS;
3183 }
3184 else if (cbPop != 8)
3185 {
3186 pIretBuf->au32[0] = (uint32_t)uPC;
3187 pIretBuf->au16[1*2] = uCS;
3188 pIretBuf->au32[2] = (uint32_t)fEfl;
3189 pIretBuf->au32[3] = (uint32_t)uSP;
3190 pIretBuf->au16[4*2] = uSS;
3191 }
3192 else
3193 {
3194 pIretBuf->au64[0] = uPC;
3195 pIretBuf->au16[1*4] = uCS;
3196 pIretBuf->au64[2] = fEfl;
3197 pIretBuf->au64[3] = uSP;
3198 pIretBuf->au16[4*4] = uSS;
3199 }
3200}
3201
3202uint32_t ASMGetESP(void);
3203#pragma aux ASMGetESP = \
3204 ".386" \
3205 "mov ax, sp" \
3206 "mov edx, esp" \
3207 "shr edx, 16" \
3208 value [ax dx] \
3209 modify exact [ax dx];
3210
3211
3212static void bs3CpuBasic2_iret_Worker(uint8_t bTestMode, FPFNBS3FAR pfnIret, unsigned const cbPop,
3213 PIRETBUF pIretBuf, const char BS3_FAR *pszDesc)
3214{
3215 BS3TRAPFRAME TrapCtx;
3216 BS3REGCTX Ctx;
3217 BS3REGCTX CtxUdExpected;
3218 BS3REGCTX TmpCtx;
3219 BS3REGCTX TmpCtxExpected;
3220 uint8_t abLowUd[8];
3221 uint8_t abLowIret[8];
3222 FPFNBS3FAR pfnUdLow = (FPFNBS3FAR)abLowUd;
3223 FPFNBS3FAR pfnIretLow = (FPFNBS3FAR)abLowIret;
3224 unsigned const cbSameCplFrame = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 5*cbPop : 3*cbPop;
3225 bool const fUseLowCode = cbPop == 2 && !BS3_MODE_IS_16BIT_CODE(bTestMode);
3226 int iRingDst;
3227 int iRingSrc;
3228 uint16_t uDplSs;
3229 uint16_t uRplCs;
3230 uint16_t uRplSs;
3231// int i;
3232 uint8_t BS3_FAR *pbTest;
3233
3234 NOREF(abLowUd);
3235#define IRETBUF_SET_SEL(a_idx, a_uValue) \
3236 do { *(uint16_t)&pIretBuf->ab[a_idx * cbPop] = (a_uValue); } while (0)
3237#define IRETBUF_SET_REG(a_idx, a_uValue) \
3238 do { uint8_t const BS3_FAR *pbTmp = &pIretBuf->ab[a_idx * cbPop]; \
3239 if (cbPop == 2) *(uint16_t)pbTmp = (uint16_t)(a_uValue); \
3240 else if (cbPop != 8) *(uint32_t)pbTmp = (uint32_t)(a_uValue); \
3241 else *(uint64_t)pbTmp = (a_uValue); \
3242 } while (0)
3243
3244 /* make sure they're allocated */
3245 Bs3MemZero(&Ctx, sizeof(Ctx));
3246 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
3247 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
3248 Bs3MemZero(&TmpCtxExpected, sizeof(TmpCtxExpected));
3249 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3250
3251 /*
3252 * When dealing with 16-bit irets in 32-bit or 64-bit mode, we must have
3253 * copies of both iret and ud in the first 64KB of memory. The stack is
3254 * below 64KB, so we'll just copy the instructions onto the stack.
3255 */
3256 Bs3MemCpy(abLowUd, bs3CpuBasic2_ud2, 4);
3257 Bs3MemCpy(abLowIret, pfnIret, 4);
3258
3259 /*
3260 * Create a context (stack is irrelevant, we'll mainly be using pIretBuf).
3261 * - Point the context at our iret instruction.
3262 * - Point SS:xSP at pIretBuf.
3263 */
3264 Bs3RegCtxSaveEx(&Ctx, bTestMode, 0);
3265 if (!fUseLowCode)
3266 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pfnIret);
3267 else
3268 Bs3RegCtxSetRipCsFromCurPtr(&Ctx, pfnIretLow);
3269 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
3270 g_uBs3TrapEipHint = Ctx.rip.u32;
3271 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3272
3273 /*
3274 * The first success (UD) context keeps the same code bit-count as the iret.
3275 */
3276 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
3277 if (!fUseLowCode)
3278 Bs3RegCtxSetRipCsFromLnkPtr(&CtxUdExpected, bs3CpuBasic2_ud2);
3279 else
3280 Bs3RegCtxSetRipCsFromCurPtr(&CtxUdExpected, pfnUdLow);
3281 CtxUdExpected.rsp.u += cbSameCplFrame;
3282
3283 /*
3284 * Check that it works at all.
3285 */
3286 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3287 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3288
3289 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3290 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3291 g_usBs3TestStep++;
3292
3293 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
3294 {
3295 /* Selectors are modified when switching rings, so we need to know
3296 what we're dealing with there. */
3297 if ( !BS3_SEL_IS_IN_R0_RANGE(Ctx.cs) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ss)
3298 || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ds) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.es))
3299 Bs3TestFailedF("Expected R0 CS, SS, DS and ES; not %#x, %#x, %#x and %#x\n", Ctx.cs, Ctx.ss, Ctx.ds, Ctx.es);
3300 if (Ctx.fs || Ctx.gs)
3301 Bs3TestFailed("Expected R0 FS and GS to be 0!\n");
3302
3303 /*
3304 * Test returning to outer rings if protected mode.
3305 */
3306 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3307 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3308 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3309 {
3310 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3311 TmpCtxExpected.ds = iRingDst ? 0 : TmpCtx.ds;
3312 TmpCtx.es = TmpCtxExpected.es;
3313 iretbuf_SetupFrame(pIretBuf, cbPop, TmpCtxExpected.cs, TmpCtxExpected.rip.u,
3314 TmpCtxExpected.rflags.u32, TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3315 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3316 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3317 g_usBs3TestStep++;
3318 }
3319
3320 /*
3321 * Check CS.RPL and SS.RPL.
3322 */
3323 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3324 {
3325 uint16_t const uDstSsR0 = (CtxUdExpected.ss & BS3_SEL_RING_SUB_MASK) + BS3_SEL_R0_FIRST;
3326 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3327 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3328 for (iRingSrc = 3; iRingSrc >= 0; iRingSrc--)
3329 {
3330 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3331 Bs3RegCtxConvertToRingX(&TmpCtx, iRingSrc);
3332 TmpCtx.es = TmpCtxExpected.es;
3333 TmpCtxExpected.ds = iRingDst != iRingSrc ? 0 : TmpCtx.ds;
3334 for (uRplCs = 0; uRplCs <= 3; uRplCs++)
3335 {
3336 uint16_t const uSrcEs = TmpCtx.es;
3337 uint16_t const uDstCs = (TmpCtxExpected.cs & X86_SEL_MASK_OFF_RPL) | uRplCs;
3338 //Bs3TestPrintf("dst=%d src=%d rplCS=%d\n", iRingDst, iRingSrc, uRplCs);
3339
3340 /* CS.RPL */
3341 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u, TmpCtxExpected.rflags.u32,
3342 TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3343 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3344 if (uRplCs == iRingDst && iRingDst >= iRingSrc)
3345 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3346 else
3347 {
3348 if (iRingDst < iRingSrc)
3349 TmpCtx.es = 0;
3350 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3351 TmpCtx.es = uSrcEs;
3352 }
3353 g_usBs3TestStep++;
3354
3355 /* SS.RPL */
3356 if (iRingDst != iRingSrc || BS3_MODE_IS_64BIT_CODE(bTestMode))
3357 {
3358 uint16_t uSavedDstSs = TmpCtxExpected.ss;
3359 for (uRplSs = 0; uRplSs <= 3; uRplSs++)
3360 {
3361 /* SS.DPL (iRingDst == CS.DPL) */
3362 for (uDplSs = 0; uDplSs <= 3; uDplSs++)
3363 {
3364 uint16_t const uDstSs = ((uDplSs << BS3_SEL_RING_SHIFT) | uRplSs) + uDstSsR0;
3365 //Bs3TestPrintf("dst=%d src=%d rplCS=%d rplSS=%d dplSS=%d dst %04x:%08RX64 %08RX32 %04x:%08RX64\n",
3366 // iRingDst, iRingSrc, uRplCs, uRplSs, uDplSs, uDstCs, TmpCtxExpected.rip.u,
3367 // TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3368
3369 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u,
3370 TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3371 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3372 if (uRplCs != iRingDst || iRingDst < iRingSrc)
3373 {
3374 if (iRingDst < iRingSrc)
3375 TmpCtx.es = 0;
3376 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3377 }
3378 else if (uRplSs != iRingDst || uDplSs != iRingDst)
3379 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstSs & X86_SEL_MASK_OFF_RPL);
3380 else
3381 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3382 TmpCtx.es = uSrcEs;
3383 g_usBs3TestStep++;
3384 }
3385 }
3386
3387 TmpCtxExpected.ss = uSavedDstSs;
3388 }
3389 }
3390 }
3391 }
3392 }
3393
3394 /*
3395 * Special 64-bit checks.
3396 */
3397 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
3398 {
3399 /* The VM flag is completely ignored. */
3400 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3401 CtxUdExpected.rflags.u32 | X86_EFL_VM, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3402 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3403 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3404 g_usBs3TestStep++;
3405
3406 /* The NT flag can be loaded just fine. */
3407 CtxUdExpected.rflags.u32 |= X86_EFL_NT;
3408 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3409 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3410 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3411 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3412 CtxUdExpected.rflags.u32 &= ~X86_EFL_NT;
3413 g_usBs3TestStep++;
3414
3415 /* However, we'll #GP(0) if it's already set (in RFLAGS) when executing IRET. */
3416 Ctx.rflags.u32 |= X86_EFL_NT;
3417 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3418 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3419 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3420 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3421 g_usBs3TestStep++;
3422
3423 /* The NT flag #GP(0) should trump all other exceptions - pit it against #PF. */
3424 pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED);
3425 if (pbTest != NULL)
3426 {
3427 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, &pbTest[X86_PAGE_SIZE]);
3428 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3429 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3430 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3431 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3432 g_usBs3TestStep++;
3433
3434 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3435 Bs3MemGuardedTestPageFree(pbTest);
3436 }
3437 Ctx.rflags.u32 &= ~X86_EFL_NT;
3438 }
3439}
3440
3441
3442BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_iret)(uint8_t bMode)
3443{
3444 struct
3445 {
3446 uint8_t abExtraStack[4096]; /**< we've got ~30KB of stack, so 4KB for the trap handlers++ is not a problem. */
3447 IRETBUF IRetBuf;
3448 uint8_t abGuard[32];
3449 } uBuf;
3450 size_t cbUnused;
3451
3452 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
3453 bs3CpuBasic2_SetGlobals(bMode);
3454
3455 /*
3456 * Primary instruction form.
3457 */
3458 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3459 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3460 if (BS3_MODE_IS_16BIT_CODE(bMode))
3461 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 2, &uBuf.IRetBuf, "iret");
3462 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3463 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3464 else
3465 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_rexw, 8, &uBuf.IRetBuf, "o64 iret");
3466
3467 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3468 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3469 - (uintptr_t)uBuf.abExtraStack;
3470 if (cbUnused < 2048)
3471 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 1);
3472
3473 /*
3474 * Secondary variation: opsize prefixed.
3475 */
3476 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3477 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3478 if (BS3_MODE_IS_16BIT_CODE(bMode) && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)
3479 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 4, &uBuf.IRetBuf, "o32 iret");
3480 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3481 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3482 else if (BS3_MODE_IS_64BIT_CODE(bMode))
3483 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3484 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3485 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3486 - (uintptr_t)uBuf.abExtraStack;
3487 if (cbUnused < 2048)
3488 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 2);
3489
3490 /*
3491 * Third variation: 16-bit in 64-bit mode (truly unlikely)
3492 */
3493 if (BS3_MODE_IS_64BIT_CODE(bMode))
3494 {
3495 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3496 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3497 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3498 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3499 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3500 - (uintptr_t)uBuf.abExtraStack;
3501 if (cbUnused < 2048)
3502 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 3);
3503 }
3504
3505 return 0;
3506}
3507
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette