VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-x0.c@ 95532

Last change on this file since 95532 was 95431, checked in by vboxsync, 2 years ago

ValKit/bs3-cpu-basic-2: Added fld, fbld and fxsave to the #AC tests. Fixed corruption problem caused by calling Bs3RegCtxConvertToRingX in RM and V86 contexts. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 166.7 KB
Line 
1/* $Id: bs3-cpu-basic-2-x0.c 95431 2022-06-29 14:26:40Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-basic-2, C test driver code (16-bit).
4 */
5
6/*
7 * Copyright (C) 2007-2022 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*********************************************************************************************************************************
29* Header Files *
30*********************************************************************************************************************************/
31#define BS3_USE_X0_TEXT_SEG
32#include <bs3kit.h>
33#include <iprt/asm.h>
34#include <iprt/asm-amd64-x86.h>
35
36
37/*********************************************************************************************************************************
38* Defined Constants And Macros *
39*********************************************************************************************************************************/
40#undef CHECK_MEMBER
41#define CHECK_MEMBER(a_szName, a_szFmt, a_Actual, a_Expected) \
42 do \
43 { \
44 if ((a_Actual) == (a_Expected)) { /* likely */ } \
45 else bs3CpuBasic2_FailedF(a_szName "=" a_szFmt " expected " a_szFmt, (a_Actual), (a_Expected)); \
46 } while (0)
47
48
49/** Indicating that we've got operand size prefix and that it matters. */
50#define BS3CB2SIDTSGDT_F_OPSIZE UINT8_C(0x01)
51/** Worker requires 386 or later. */
52#define BS3CB2SIDTSGDT_F_386PLUS UINT8_C(0x02)
53
54
55/** @name MYOP_XXX - Values for FNBS3CPUBASIC2ACTSTCODE::fOp.
56 *
57 * These are flags, though we've precombined a few shortening things down.
58 *
59 * @{ */
60#define MYOP_LD 0x1 /**< The instruction loads. */
61#define MYOP_ST 0x2 /**< The instruction stores */
62#define MYOP_EFL 0x4 /**< The instruction modifies EFLAGS. */
63#define MYOP_AC_GP 0x8 /**< The instruction may cause either \#AC or \#GP (FXSAVE). */
64
65#define MYOP_LD_ST 0x3 /**< Convenience: The instruction both loads and stores. */
66#define MYOP_LD_DIV 0x5 /**< Convenience: DIV instruction - loading and modifying flags. */
67/** @} */
68
69
70/*********************************************************************************************************************************
71* Structures and Typedefs *
72*********************************************************************************************************************************/
73typedef struct BS3CB2INVLDESCTYPE
74{
75 uint8_t u4Type;
76 uint8_t u1DescType;
77} BS3CB2INVLDESCTYPE;
78
79typedef struct BS3CB2SIDTSGDT
80{
81 const char *pszDesc;
82 FPFNBS3FAR fpfnWorker;
83 uint8_t cbInstr;
84 bool fSs;
85 uint8_t bMode;
86 uint8_t fFlags;
87} BS3CB2SIDTSGDT;
88
89
90typedef void BS3_CALL FNBS3CPUBASIC2ACSNIPPET(void);
91
92typedef struct FNBS3CPUBASIC2ACTSTCODE
93{
94 FNBS3CPUBASIC2ACSNIPPET BS3_FAR *pfn;
95 uint8_t fOp;
96 uint16_t cbMem;
97 uint8_t cbAlign;
98 uint8_t offFaultInstr; /**< For skipping fninit with the fld test. */
99} FNBS3CPUBASIC2ACTSTCODE;
100typedef FNBS3CPUBASIC2ACTSTCODE const *PCFNBS3CPUBASIC2ACTSTCODE;
101
102typedef struct BS3CPUBASIC2ACTTSTCMNMODE
103{
104 uint8_t bMode;
105 uint16_t cEntries;
106 PCFNBS3CPUBASIC2ACTSTCODE paEntries;
107} BS3CPUBASIC2PFTTSTCMNMODE;
108typedef BS3CPUBASIC2PFTTSTCMNMODE const *PCBS3CPUBASIC2PFTTSTCMNMODE;
109
110
111/*********************************************************************************************************************************
112* External Symbols *
113*********************************************************************************************************************************/
114extern FNBS3FAR bs3CpuBasic2_Int80;
115extern FNBS3FAR bs3CpuBasic2_Int81;
116extern FNBS3FAR bs3CpuBasic2_Int82;
117extern FNBS3FAR bs3CpuBasic2_Int83;
118
119extern FNBS3FAR bs3CpuBasic2_ud2;
120#define g_bs3CpuBasic2_ud2_FlatAddr BS3_DATA_NM(g_bs3CpuBasic2_ud2_FlatAddr)
121extern uint32_t g_bs3CpuBasic2_ud2_FlatAddr;
122
123extern FNBS3FAR bs3CpuBasic2_iret;
124extern FNBS3FAR bs3CpuBasic2_iret_opsize;
125extern FNBS3FAR bs3CpuBasic2_iret_rexw;
126
127extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c16;
128extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c32;
129extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c64;
130extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c16;
131extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c32;
132extern FNBS3FAR bs3CpuBasic2_sidt_rexw_bx_ud2_c64;
133extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c16;
134extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c32;
135extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c64;
136extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16;
137extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32;
138extern FNBS3FAR bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64;
139
140extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c16;
141extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c32;
142extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c64;
143extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c16;
144extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c32;
145extern FNBS3FAR bs3CpuBasic2_sgdt_rexw_bx_ud2_c64;
146extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c16;
147extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c32;
148extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c64;
149extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16;
150extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32;
151extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64;
152
153extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16;
154extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32;
155extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64;
156extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
157extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
158extern FNBS3FAR bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
159extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16;
160extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16;
161extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32;
162extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64;
163extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
164extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
165extern FNBS3FAR bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
166
167extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
168extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
169extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
170extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
171extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
172extern FNBS3FAR bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
173extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
174extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
175extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
176extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
177extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
178extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
179
180
181/* bs3-cpu-basic-2-template.mac: */
182FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c16;
183FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c16;
184FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16;
185FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16;
186FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c16;
187FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16;
188FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16;
189FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16;
190FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c16;
191
192FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c32;
193FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c32;
194FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32;
195FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32;
196FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c32;
197FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32;
198FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32;
199FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32;
200FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c32;
201
202FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c64;
203FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c64;
204FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64;
205FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64;
206FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c64;
207FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64;
208FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64;
209FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64;
210FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c64;
211
212
213/*********************************************************************************************************************************
214* Global Variables *
215*********************************************************************************************************************************/
216static const char BS3_FAR *g_pszTestMode = (const char *)1;
217static uint8_t g_bTestMode = 1;
218static bool g_f16BitSys = 1;
219
220
221/** SIDT test workers. */
222static BS3CB2SIDTSGDT const g_aSidtWorkers[] =
223{
224 { "sidt [bx]", bs3CpuBasic2_sidt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
225 { "sidt [ss:bx]", bs3CpuBasic2_sidt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
226 { "o32 sidt [bx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
227 { "o32 sidt [ss:bx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
228 { "sidt [ebx]", bs3CpuBasic2_sidt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
229 { "sidt [ss:ebx]", bs3CpuBasic2_sidt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
230 { "o16 sidt [ebx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
231 { "o16 sidt [ss:ebx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
232 { "sidt [rbx]", bs3CpuBasic2_sidt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
233 { "o64 sidt [rbx]", bs3CpuBasic2_sidt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
234 { "o32 sidt [rbx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
235 { "o32 o64 sidt [rbx]", bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
236};
237
238/** SGDT test workers. */
239static BS3CB2SIDTSGDT const g_aSgdtWorkers[] =
240{
241 { "sgdt [bx]", bs3CpuBasic2_sgdt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
242 { "sgdt [ss:bx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
243 { "o32 sgdt [bx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
244 { "o32 sgdt [ss:bx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
245 { "sgdt [ebx]", bs3CpuBasic2_sgdt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
246 { "sgdt [ss:ebx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
247 { "o16 sgdt [ebx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
248 { "o16 sgdt [ss:ebx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
249 { "sgdt [rbx]", bs3CpuBasic2_sgdt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
250 { "o64 sgdt [rbx]", bs3CpuBasic2_sgdt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
251 { "o32 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
252 { "o32 o64 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
253};
254
255/** LIDT test workers. */
256static BS3CB2SIDTSGDT const g_aLidtWorkers[] =
257{
258 { "lidt [bx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
259 { "lidt [ss:bx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
260 { "o32 lidt [bx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
261 { "o32 lidt [bx]; sidt32", bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16, 27, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
262 { "o32 lidt [ss:bx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
263 { "lidt [ebx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
264 { "lidt [ss:ebx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
265 { "o16 lidt [ebx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
266 { "o16 lidt [ss:ebx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
267 { "lidt [rbx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
268 { "o64 lidt [rbx]", bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
269 { "o32 lidt [rbx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
270 { "o32 o64 lidt [rbx]", bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
271};
272
273/** LGDT test workers. */
274static BS3CB2SIDTSGDT const g_aLgdtWorkers[] =
275{
276 { "lgdt [bx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
277 { "lgdt [ss:bx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
278 { "o32 lgdt [bx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
279 { "o32 lgdt [ss:bx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
280 { "lgdt [ebx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
281 { "lgdt [ss:ebx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
282 { "o16 lgdt [ebx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
283 { "o16 lgdt [ss:ebx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
284 { "lgdt [rbx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
285 { "o64 lgdt [rbx]", bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
286 { "o32 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
287 { "o32 o64 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
288};
289
290
291
292#if 0
293/** Table containing invalid CS selector types. */
294static const BS3CB2INVLDESCTYPE g_aInvalidCsTypes[] =
295{
296 { X86_SEL_TYPE_RO, 1 },
297 { X86_SEL_TYPE_RO_ACC, 1 },
298 { X86_SEL_TYPE_RW, 1 },
299 { X86_SEL_TYPE_RW_ACC, 1 },
300 { X86_SEL_TYPE_RO_DOWN, 1 },
301 { X86_SEL_TYPE_RO_DOWN_ACC, 1 },
302 { X86_SEL_TYPE_RW_DOWN, 1 },
303 { X86_SEL_TYPE_RW_DOWN_ACC, 1 },
304 { 0, 0 },
305 { 1, 0 },
306 { 2, 0 },
307 { 3, 0 },
308 { 4, 0 },
309 { 5, 0 },
310 { 6, 0 },
311 { 7, 0 },
312 { 8, 0 },
313 { 9, 0 },
314 { 10, 0 },
315 { 11, 0 },
316 { 12, 0 },
317 { 13, 0 },
318 { 14, 0 },
319 { 15, 0 },
320};
321
322/** Table containing invalid SS selector types. */
323static const BS3CB2INVLDESCTYPE g_aInvalidSsTypes[] =
324{
325 { X86_SEL_TYPE_EO, 1 },
326 { X86_SEL_TYPE_EO_ACC, 1 },
327 { X86_SEL_TYPE_ER, 1 },
328 { X86_SEL_TYPE_ER_ACC, 1 },
329 { X86_SEL_TYPE_EO_CONF, 1 },
330 { X86_SEL_TYPE_EO_CONF_ACC, 1 },
331 { X86_SEL_TYPE_ER_CONF, 1 },
332 { X86_SEL_TYPE_ER_CONF_ACC, 1 },
333 { 0, 0 },
334 { 1, 0 },
335 { 2, 0 },
336 { 3, 0 },
337 { 4, 0 },
338 { 5, 0 },
339 { 6, 0 },
340 { 7, 0 },
341 { 8, 0 },
342 { 9, 0 },
343 { 10, 0 },
344 { 11, 0 },
345 { 12, 0 },
346 { 13, 0 },
347 { 14, 0 },
348 { 15, 0 },
349};
350#endif
351
352
353static const FNBS3CPUBASIC2ACTSTCODE g_aCmn16[] =
354{
355 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, MYOP_LD, 2, 2 },
356 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, MYOP_ST, 2, 2 },
357 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, MYOP_LD_ST, 2, 2 },
358 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, MYOP_LD_ST | MYOP_EFL, 2, 2 },
359 { bs3CpuBasic2_div_ds_bx__ud2_c16, MYOP_LD_DIV, 2, 2 },
360 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
361 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
362 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
363 { bs3CpuBasic2_fxsave_ds_bx__ud2_c16, MYOP_ST | MYOP_AC_GP, 512, 16 },
364};
365
366static const FNBS3CPUBASIC2ACTSTCODE g_aCmn32[] =
367{
368 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c32, MYOP_LD, 4, 4 },
369 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c32, MYOP_ST, 4, 4 },
370 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32, MYOP_LD_ST, 4, 4 },
371 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32, MYOP_LD_ST | MYOP_EFL, 4, 4 },
372 { bs3CpuBasic2_div_ds_bx__ud2_c32, MYOP_LD_DIV, 4, 4 },
373 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
374 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
375 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
376 { bs3CpuBasic2_fxsave_ds_bx__ud2_c32, MYOP_ST | MYOP_AC_GP, 512, 16 },
377};
378
379static const FNBS3CPUBASIC2ACTSTCODE g_aCmn64[] =
380{
381 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c64, MYOP_LD, 8, 8 },
382 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c64, MYOP_ST, 8, 8 },
383 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64, MYOP_LD_ST, 8, 8 },
384 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64, MYOP_LD_ST | MYOP_EFL, 8, 8 },
385 { bs3CpuBasic2_div_ds_bx__ud2_c64, MYOP_LD_DIV, 8, 8 },
386 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
387 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
388 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
389 { bs3CpuBasic2_fxsave_ds_bx__ud2_c64, MYOP_ST | MYOP_AC_GP, 512, 16 },
390};
391
392static const BS3CPUBASIC2PFTTSTCMNMODE g_aCmnModes[] =
393{
394 { BS3_MODE_CODE_16, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
395 { BS3_MODE_CODE_V86, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
396 { BS3_MODE_CODE_32, RT_ELEMENTS(g_aCmn32), g_aCmn32 },
397 { BS3_MODE_CODE_64, RT_ELEMENTS(g_aCmn64), g_aCmn64 },
398};
399
400
401/**
402 * Sets globals according to the mode.
403 *
404 * @param bTestMode The test mode.
405 */
406static void bs3CpuBasic2_SetGlobals(uint8_t bTestMode)
407{
408 g_bTestMode = bTestMode;
409 g_pszTestMode = Bs3GetModeName(bTestMode);
410 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(bTestMode);
411 g_usBs3TestStep = 0;
412}
413
414
415uint32_t ASMGetESP(void);
416#pragma aux ASMGetESP = \
417 ".386" \
418 "mov ax, sp" \
419 "mov edx, esp" \
420 "shr edx, 16" \
421 value [ax dx] \
422 modify exact [ax dx];
423
424
425/**
426 * Wrapper around Bs3TestFailedF that prefixes the error with g_usBs3TestStep
427 * and g_pszTestMode.
428 */
429static void bs3CpuBasic2_FailedF(const char *pszFormat, ...)
430{
431 va_list va;
432
433 char szTmp[168];
434 va_start(va, pszFormat);
435 Bs3StrPrintfV(szTmp, sizeof(szTmp), pszFormat, va);
436 va_end(va);
437
438 Bs3TestFailedF("%u - %s: %s", g_usBs3TestStep, g_pszTestMode, szTmp);
439}
440
441
442#if 0
443/**
444 * Compares trap stuff.
445 */
446static void bs3CpuBasic2_CompareIntCtx1(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t bXcpt)
447{
448 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
449 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
450 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
451 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, 2 /*int xx*/, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
452 if (Bs3TestSubErrorCount() != cErrorsBefore)
453 {
454 Bs3TrapPrintFrame(pTrapCtx);
455#if 1
456 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
457 Bs3TestPrintf("Halting in CompareTrapCtx1: bXcpt=%#x\n", bXcpt);
458 ASMHalt();
459#endif
460 }
461}
462#endif
463
464
465#if 0
466/**
467 * Compares trap stuff.
468 */
469static void bs3CpuBasic2_CompareTrapCtx2(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t cbIpAdjust,
470 uint8_t bXcpt, uint16_t uHandlerCs)
471{
472 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
473 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
474 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
475 CHECK_MEMBER("uHandlerCs", "%#06x", pTrapCtx->uHandlerCs, uHandlerCs);
476 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
477 if (Bs3TestSubErrorCount() != cErrorsBefore)
478 {
479 Bs3TrapPrintFrame(pTrapCtx);
480#if 1
481 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
482 Bs3TestPrintf("Halting in CompareTrapCtx2: bXcpt=%#x\n", bXcpt);
483 ASMHalt();
484#endif
485 }
486}
487#endif
488
489/**
490 * Compares a CPU trap.
491 */
492static void bs3CpuBasic2_CompareCpuTrapCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd,
493 uint8_t bXcpt, bool f486ResumeFlagHint, uint8_t cbIpAdjust)
494{
495 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
496 uint32_t fExtraEfl;
497
498 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
499 CHECK_MEMBER("bErrCd", "%#06RX16", (uint16_t)pTrapCtx->uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
500
501 if ( g_f16BitSys
502 || ( !f486ResumeFlagHint
503 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) <= BS3CPU_80486 ) )
504 fExtraEfl = 0;
505 else
506 fExtraEfl = X86_EFL_RF;
507#if 0 /** @todo Running on an AMD Phenom II X6 1100T under AMD-V I'm not getting good X86_EFL_RF results. Enable this to get on with other work. */
508 fExtraEfl = pTrapCtx->Ctx.rflags.u32 & X86_EFL_RF;
509#endif
510 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, fExtraEfl, g_pszTestMode, g_usBs3TestStep);
511 if (Bs3TestSubErrorCount() != cErrorsBefore)
512 {
513 Bs3TrapPrintFrame(pTrapCtx);
514#if 1
515 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
516 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
517 ASMHalt();
518#endif
519 }
520}
521
522
523/**
524 * Compares \#GP trap.
525 */
526static void bs3CpuBasic2_CompareGpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
527{
528 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_GP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
529}
530
531#if 0
532/**
533 * Compares \#NP trap.
534 */
535static void bs3CpuBasic2_CompareNpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
536{
537 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_NP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
538}
539#endif
540
541/**
542 * Compares \#SS trap.
543 */
544static void bs3CpuBasic2_CompareSsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd, bool f486ResumeFlagHint)
545{
546 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_SS, f486ResumeFlagHint, 0 /*cbIpAdjust*/);
547}
548
549#if 0
550/**
551 * Compares \#TS trap.
552 */
553static void bs3CpuBasic2_CompareTsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
554{
555 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_TS, false /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
556}
557#endif
558
559/**
560 * Compares \#PF trap.
561 */
562static void bs3CpuBasic2_ComparePfCtx(PCBS3TRAPFRAME pTrapCtx, PBS3REGCTX pStartCtx, uint16_t uErrCd,
563 uint64_t uCr2Expected, uint8_t cbIpAdjust)
564{
565 uint64_t const uCr2Saved = pStartCtx->cr2.u;
566 pStartCtx->cr2.u = uCr2Expected;
567 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_PF, true /*f486ResumeFlagHint*/, cbIpAdjust);
568 pStartCtx->cr2.u = uCr2Saved;
569}
570
571/**
572 * Compares \#UD trap.
573 */
574static void bs3CpuBasic2_CompareUdCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx)
575{
576 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*no error code*/, X86_XCPT_UD,
577 true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
578}
579
580/**
581 * Compares \#AC trap.
582 */
583static void bs3CpuBasic2_CompareAcCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t cbIpAdjust)
584{
585 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_AC, true /*f486ResumeFlagHint*/, cbIpAdjust);
586}
587
588
589#if 0 /* convert me */
590static void bs3CpuBasic2_RaiseXcpt1Common(uint16_t const uSysR0Cs, uint16_t const uSysR0CsConf, uint16_t const uSysR0Ss,
591 PX86DESC const paIdt, unsigned const cIdteShift)
592{
593 BS3TRAPFRAME TrapCtx;
594 BS3REGCTX Ctx80;
595 BS3REGCTX Ctx81;
596 BS3REGCTX Ctx82;
597 BS3REGCTX Ctx83;
598 BS3REGCTX CtxTmp;
599 BS3REGCTX CtxTmp2;
600 PBS3REGCTX apCtx8x[4];
601 unsigned iCtx;
602 unsigned iRing;
603 unsigned iDpl;
604 unsigned iRpl;
605 unsigned i, j, k;
606 uint32_t uExpected;
607 bool const f486Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486;
608# if TMPL_BITS == 16
609 bool const f386Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386;
610 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
611# else
612 bool const f286 = false;
613 bool const f386Plus = true;
614 int rc;
615 uint8_t *pbIdtCopyAlloc;
616 PX86DESC pIdtCopy;
617 const unsigned cbIdte = 1 << (3 + cIdteShift);
618 RTCCUINTXREG uCr0Saved = ASMGetCR0();
619 RTGDTR GdtrSaved;
620# endif
621 RTIDTR IdtrSaved;
622 RTIDTR Idtr;
623
624 ASMGetIDTR(&IdtrSaved);
625# if TMPL_BITS != 16
626 ASMGetGDTR(&GdtrSaved);
627# endif
628
629 /* make sure they're allocated */
630 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
631 Bs3MemZero(&Ctx80, sizeof(Ctx80));
632 Bs3MemZero(&Ctx81, sizeof(Ctx81));
633 Bs3MemZero(&Ctx82, sizeof(Ctx82));
634 Bs3MemZero(&Ctx83, sizeof(Ctx83));
635 Bs3MemZero(&CtxTmp, sizeof(CtxTmp));
636 Bs3MemZero(&CtxTmp2, sizeof(CtxTmp2));
637
638 /* Context array. */
639 apCtx8x[0] = &Ctx80;
640 apCtx8x[1] = &Ctx81;
641 apCtx8x[2] = &Ctx82;
642 apCtx8x[3] = &Ctx83;
643
644# if TMPL_BITS != 16
645 /* Allocate memory for playing around with the IDT. */
646 pbIdtCopyAlloc = NULL;
647 if (BS3_MODE_IS_PAGED(g_bTestMode))
648 pbIdtCopyAlloc = Bs3MemAlloc(BS3MEMKIND_FLAT32, 12*_1K);
649# endif
650
651 /*
652 * IDT entry 80 thru 83 are assigned DPLs according to the number.
653 * (We'll be useing more, but this'll do for now.)
654 */
655 paIdt[0x80 << cIdteShift].Gate.u2Dpl = 0;
656 paIdt[0x81 << cIdteShift].Gate.u2Dpl = 1;
657 paIdt[0x82 << cIdteShift].Gate.u2Dpl = 2;
658 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
659
660 Bs3RegCtxSave(&Ctx80);
661 Ctx80.rsp.u -= 0x300;
662 Ctx80.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int80);
663# if TMPL_BITS == 16
664 Ctx80.cs = BS3_MODE_IS_RM_OR_V86(g_bTestMode) ? BS3_SEL_TEXT16 : BS3_SEL_R0_CS16;
665# elif TMPL_BITS == 32
666 g_uBs3TrapEipHint = Ctx80.rip.u32;
667# endif
668 Bs3MemCpy(&Ctx81, &Ctx80, sizeof(Ctx80));
669 Ctx81.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int81);
670 Bs3MemCpy(&Ctx82, &Ctx80, sizeof(Ctx80));
671 Ctx82.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int82);
672 Bs3MemCpy(&Ctx83, &Ctx80, sizeof(Ctx80));
673 Ctx83.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int83);
674
675 /*
676 * Check that all the above gates work from ring-0.
677 */
678 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
679 {
680 g_usBs3TestStep = iCtx;
681# if TMPL_BITS == 32
682 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
683# endif
684 Bs3TrapSetJmpAndRestore(apCtx8x[iCtx], &TrapCtx);
685 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, apCtx8x[iCtx], 0x80+iCtx /*bXcpt*/);
686 }
687
688 /*
689 * Check that the gate DPL checks works.
690 */
691 g_usBs3TestStep = 100;
692 for (iRing = 0; iRing <= 3; iRing++)
693 {
694 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
695 {
696 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
697 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
698# if TMPL_BITS == 32
699 g_uBs3TrapEipHint = CtxTmp.rip.u32;
700# endif
701 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
702 if (iCtx < iRing)
703 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
704 else
705 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
706 g_usBs3TestStep++;
707 }
708 }
709
710 /*
711 * Modify the gate CS value and run the handler at a different CPL.
712 * Throw RPL variations into the mix (completely ignored) together
713 * with gate presence.
714 * 1. CPL <= GATE.DPL
715 * 2. GATE.P
716 * 3. GATE.CS.DPL <= CPL (non-conforming segments)
717 */
718 g_usBs3TestStep = 1000;
719 for (i = 0; i <= 3; i++)
720 {
721 for (iRing = 0; iRing <= 3; iRing++)
722 {
723 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
724 {
725# if TMPL_BITS == 32
726 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
727# endif
728 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
729 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
730
731 for (j = 0; j <= 3; j++)
732 {
733 uint16_t const uCs = (uSysR0Cs | j) + (i << BS3_SEL_RING_SHIFT);
734 for (k = 0; k < 2; k++)
735 {
736 g_usBs3TestStep++;
737 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
738 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
739 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = k;
740 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
741 /*Bs3TrapPrintFrame(&TrapCtx);*/
742 if (iCtx < iRing)
743 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
744 else if (k == 0)
745 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
746 else if (i > iRing)
747 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
748 else
749 {
750 uint16_t uExpectedCs = uCs & X86_SEL_MASK_OFF_RPL;
751 if (i <= iCtx && i <= iRing)
752 uExpectedCs |= i;
753 bs3CpuBasic2_CompareTrapCtx2(&TrapCtx, &CtxTmp, 2 /*int 8xh*/, 0x80 + iCtx /*bXcpt*/, uExpectedCs);
754 }
755 }
756 }
757
758 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
759 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
760 }
761 }
762 }
763 BS3_ASSERT(g_usBs3TestStep < 1600);
764
765 /*
766 * Various CS and SS related faults
767 *
768 * We temporarily reconfigure gate 80 and 83 with new CS selectors, the
769 * latter have a CS.DPL of 2 for testing ring transisions and SS loading
770 * without making it impossible to handle faults.
771 */
772 g_usBs3TestStep = 1600;
773 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
774 Bs3GdteTestPage00.Gen.u1Present = 0;
775 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
776 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
777
778 /* CS.PRESENT = 0 */
779 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
780 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
781 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
782 bs3CpuBasic2_FailedF("selector was accessed");
783 g_usBs3TestStep++;
784
785 /* Check that GATE.DPL is checked before CS.PRESENT. */
786 for (iRing = 1; iRing < 4; iRing++)
787 {
788 Bs3MemCpy(&CtxTmp, &Ctx80, sizeof(CtxTmp));
789 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
790 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
791 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x80 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
792 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
793 bs3CpuBasic2_FailedF("selector was accessed");
794 g_usBs3TestStep++;
795 }
796
797 /* CS.DPL mismatch takes precedence over CS.PRESENT = 0. */
798 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
799 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
800 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
801 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
802 bs3CpuBasic2_FailedF("CS selector was accessed");
803 g_usBs3TestStep++;
804 for (iDpl = 1; iDpl < 4; iDpl++)
805 {
806 Bs3GdteTestPage00.Gen.u2Dpl = iDpl;
807 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
808 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
809 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
810 bs3CpuBasic2_FailedF("CS selector was accessed");
811 g_usBs3TestStep++;
812 }
813
814 /* 1608: Check all the invalid CS selector types alone. */
815 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
816 for (i = 0; i < RT_ELEMENTS(g_aInvalidCsTypes); i++)
817 {
818 Bs3GdteTestPage00.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
819 Bs3GdteTestPage00.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
820 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
821 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
822 if (Bs3GdteTestPage00.Gen.u4Type != g_aInvalidCsTypes[i].u4Type)
823 bs3CpuBasic2_FailedF("Invalid CS type %#x/%u -> %#x/%u\n",
824 g_aInvalidCsTypes[i].u4Type, g_aInvalidCsTypes[i].u1DescType,
825 Bs3GdteTestPage00.Gen.u4Type, Bs3GdteTestPage00.Gen.u1DescType);
826 g_usBs3TestStep++;
827
828 /* Incorrect CS.TYPE takes precedence over CS.PRESENT = 0. */
829 Bs3GdteTestPage00.Gen.u1Present = 0;
830 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
831 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
832 Bs3GdteTestPage00.Gen.u1Present = 1;
833 g_usBs3TestStep++;
834 }
835
836 /* Fix CS again. */
837 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
838
839 /* 1632: Test SS. */
840 if (!BS3_MODE_IS_64BIT_SYS(g_bTestMode))
841 {
842 uint16_t BS3_FAR *puTssSs2 = BS3_MODE_IS_16BIT_SYS(g_bTestMode) ? &Bs3Tss16.ss2 : &Bs3Tss32.ss2;
843 uint16_t const uSavedSs2 = *puTssSs2;
844 X86DESC const SavedGate83 = paIdt[0x83 << cIdteShift];
845
846 /* Make the handler execute in ring-2. */
847 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
848 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
849 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_02 | 2;
850
851 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
852 Bs3RegCtxConvertToRingX(&CtxTmp, 3); /* yeah, from 3 so SS:xSP is reloaded. */
853 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
854 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
855 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
856 bs3CpuBasic2_FailedF("CS selector was not access");
857 g_usBs3TestStep++;
858
859 /* Create a SS.DPL=2 stack segment and check that SS2.RPL matters and
860 that we get #SS if the selector isn't present. */
861 i = 0; /* used for cycling thru invalid CS types */
862 for (k = 0; k < 10; k++)
863 {
864 /* k=0: present,
865 k=1: not-present,
866 k=2: present but very low limit,
867 k=3: not-present, low limit.
868 k=4: present, read-only.
869 k=5: not-present, read-only.
870 k=6: present, code-selector.
871 k=7: not-present, code-selector.
872 k=8: present, read-write / no access + system (=LDT).
873 k=9: not-present, read-write / no access + system (=LDT).
874 */
875 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
876 Bs3GdteTestPage03.Gen.u1Present = !(k & 1);
877 if (k >= 8)
878 {
879 Bs3GdteTestPage03.Gen.u1DescType = 0; /* system */
880 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW; /* = LDT */
881 }
882 else if (k >= 6)
883 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_ER;
884 else if (k >= 4)
885 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RO;
886 else if (k >= 2)
887 {
888 Bs3GdteTestPage03.Gen.u16LimitLow = 0x400;
889 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
890 Bs3GdteTestPage03.Gen.u1Granularity = 0;
891 }
892
893 for (iDpl = 0; iDpl < 4; iDpl++)
894 {
895 Bs3GdteTestPage03.Gen.u2Dpl = iDpl;
896
897 for (iRpl = 0; iRpl < 4; iRpl++)
898 {
899 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | iRpl;
900 //Bs3TestPrintf("k=%u iDpl=%u iRpl=%u step=%u\n", k, iDpl, iRpl, g_usBs3TestStep);
901 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
902 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
903 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
904 if (iRpl != 2 || iRpl != iDpl || k >= 4)
905 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
906 else if (k != 0)
907 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03,
908 k == 2 /*f486ResumeFlagHint*/);
909 else
910 {
911 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
912 if (TrapCtx.uHandlerSs != (BS3_SEL_TEST_PAGE_03 | 2))
913 bs3CpuBasic2_FailedF("uHandlerSs=%#x expected %#x\n", TrapCtx.uHandlerSs, BS3_SEL_TEST_PAGE_03 | 2);
914 }
915 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
916 bs3CpuBasic2_FailedF("CS selector was not access");
917 if ( TrapCtx.bXcpt == 0x83
918 || (TrapCtx.bXcpt == X86_XCPT_SS && k == 2) )
919 {
920 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
921 bs3CpuBasic2_FailedF("SS selector was not accessed");
922 }
923 else if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
924 bs3CpuBasic2_FailedF("SS selector was accessed");
925 g_usBs3TestStep++;
926
927 /* +1: Modify the gate DPL to check that this is checked before SS.DPL and SS.PRESENT. */
928 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 2;
929 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
930 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x83 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
931 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
932 g_usBs3TestStep++;
933
934 /* +2: Check the CS.DPL check is done before the SS ones. Restoring the
935 ring-0 INT 83 context triggers the CS.DPL < CPL check. */
936 Bs3TrapSetJmpAndRestore(&Ctx83, &TrapCtx);
937 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx83, BS3_SEL_TEST_PAGE_02);
938 g_usBs3TestStep++;
939
940 /* +3: Now mark the CS selector not present and check that that also triggers before SS stuff. */
941 Bs3GdteTestPage02.Gen.u1Present = 0;
942 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
943 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
944 Bs3GdteTestPage02.Gen.u1Present = 1;
945 g_usBs3TestStep++;
946
947 /* +4: Make the CS selector some invalid type and check it triggers before SS stuff. */
948 Bs3GdteTestPage02.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
949 Bs3GdteTestPage02.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
950 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
951 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
952 Bs3GdteTestPage02.Gen.u4Type = X86_SEL_TYPE_ER_ACC;
953 Bs3GdteTestPage02.Gen.u1DescType = 1;
954 g_usBs3TestStep++;
955
956 /* +5: Now, make the CS selector limit too small and that it triggers after SS trouble.
957 The 286 had a simpler approach to these GP(0). */
958 Bs3GdteTestPage02.Gen.u16LimitLow = 0;
959 Bs3GdteTestPage02.Gen.u4LimitHigh = 0;
960 Bs3GdteTestPage02.Gen.u1Granularity = 0;
961 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
962 if (f286)
963 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
964 else if (iRpl != 2 || iRpl != iDpl || k >= 4)
965 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
966 else if (k != 0)
967 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, k == 2 /*f486ResumeFlagHint*/);
968 else
969 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
970 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
971 g_usBs3TestStep++;
972 }
973 }
974 }
975
976 /* Check all the invalid SS selector types alone. */
977 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
978 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
979 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
980 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
981 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
982 g_usBs3TestStep++;
983 for (i = 0; i < RT_ELEMENTS(g_aInvalidSsTypes); i++)
984 {
985 Bs3GdteTestPage03.Gen.u4Type = g_aInvalidSsTypes[i].u4Type;
986 Bs3GdteTestPage03.Gen.u1DescType = g_aInvalidSsTypes[i].u1DescType;
987 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
988 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
989 if (Bs3GdteTestPage03.Gen.u4Type != g_aInvalidSsTypes[i].u4Type)
990 bs3CpuBasic2_FailedF("Invalid SS type %#x/%u -> %#x/%u\n",
991 g_aInvalidSsTypes[i].u4Type, g_aInvalidSsTypes[i].u1DescType,
992 Bs3GdteTestPage03.Gen.u4Type, Bs3GdteTestPage03.Gen.u1DescType);
993 g_usBs3TestStep++;
994 }
995
996 /*
997 * Continue the SS experiments with a expand down segment. We'll use
998 * the same setup as we already have with gate 83h being DPL and
999 * having CS.DPL=2.
1000 *
1001 * Expand down segments are weird. The valid area is practically speaking
1002 * reversed. So, a 16-bit segment with a limit of 0x6000 will have valid
1003 * addresses from 0xffff thru 0x6001.
1004 *
1005 * So, with expand down segments we can more easily cut partially into the
1006 * pushing of the iret frame and trigger more interesting behavior than
1007 * with regular "expand up" segments where the whole pushing area is either
1008 * all fine or not not fine.
1009 */
1010 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1011 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1012 Bs3GdteTestPage03.Gen.u2Dpl = 2;
1013 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW_DOWN;
1014 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1015
1016 /* First test, limit = max --> no bytes accessible --> #GP */
1017 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1018 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1019
1020 /* Second test, limit = 0 --> all by zero byte accessible --> works */
1021 Bs3GdteTestPage03.Gen.u16LimitLow = 0;
1022 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
1023 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1024 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1025
1026 /* Modify the gate handler to be a dummy that immediately does UD2
1027 and triggers #UD, then advance the limit down till we get the #UD. */
1028 Bs3GdteTestPage03.Gen.u1Granularity = 0;
1029
1030 Bs3MemCpy(&CtxTmp2, &CtxTmp, sizeof(CtxTmp2)); /* #UD result context */
1031 if (g_f16BitSys)
1032 {
1033 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr - BS3_ADDR_BS3TEXT16;
1034 Bs3Trap16SetGate(0x83, X86_SEL_TYPE_SYS_286_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u16, 0 /*cParams*/);
1035 CtxTmp2.rsp.u = Bs3Tss16.sp2 - 2*5;
1036 }
1037 else
1038 {
1039 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr;
1040 Bs3Trap32SetGate(0x83, X86_SEL_TYPE_SYS_386_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u32, 0 /*cParams*/);
1041 CtxTmp2.rsp.u = Bs3Tss32.esp2 - 4*5;
1042 }
1043 CtxTmp2.bMode = g_bTestMode; /* g_bBs3CurrentMode not changed by the UD2 handler. */
1044 CtxTmp2.cs = BS3_SEL_TEST_PAGE_02 | 2;
1045 CtxTmp2.ss = BS3_SEL_TEST_PAGE_03 | 2;
1046 CtxTmp2.bCpl = 2;
1047
1048 /* test run. */
1049 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1050 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1051 g_usBs3TestStep++;
1052
1053 /* Real run. */
1054 i = (g_f16BitSys ? 2 : 4) * 6 + 1;
1055 while (i-- > 0)
1056 {
1057 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1058 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1059 if (i > 0)
1060 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1061 else
1062 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1063 g_usBs3TestStep++;
1064 }
1065
1066 /* Do a run where we do the same-ring kind of access. */
1067 Bs3RegCtxConvertToRingX(&CtxTmp, 2);
1068 if (g_f16BitSys)
1069 {
1070 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 2*3;
1071 i = 2*3 - 1;
1072 }
1073 else
1074 {
1075 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 4*3;
1076 i = 4*3 - 1;
1077 }
1078 CtxTmp.ss = BS3_SEL_TEST_PAGE_03 | 2;
1079 CtxTmp2.ds = CtxTmp.ds;
1080 CtxTmp2.es = CtxTmp.es;
1081 CtxTmp2.fs = CtxTmp.fs;
1082 CtxTmp2.gs = CtxTmp.gs;
1083 while (i-- > 0)
1084 {
1085 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1086 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1087 if (i > 0)
1088 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, 0 /*BS3_SEL_TEST_PAGE_03*/, true /*f486ResumeFlagHint*/);
1089 else
1090 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1091 g_usBs3TestStep++;
1092 }
1093
1094 *puTssSs2 = uSavedSs2;
1095 paIdt[0x83 << cIdteShift] = SavedGate83;
1096 }
1097 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1098 BS3_ASSERT(g_usBs3TestStep < 3000);
1099
1100 /*
1101 * Modify the gate CS value with a conforming segment.
1102 */
1103 g_usBs3TestStep = 3000;
1104 for (i = 0; i <= 3; i++) /* cs.dpl */
1105 {
1106 for (iRing = 0; iRing <= 3; iRing++)
1107 {
1108 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1109 {
1110 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1111 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1112# if TMPL_BITS == 32
1113 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1114# endif
1115
1116 for (j = 0; j <= 3; j++) /* rpl */
1117 {
1118 uint16_t const uCs = (uSysR0CsConf | j) + (i << BS3_SEL_RING_SHIFT);
1119 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1120 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1121 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1122 //Bs3TestPrintf("%u/%u/%u/%u: cs=%04x hcs=%04x xcpt=%02x\n", i, iRing, iCtx, j, uCs, TrapCtx.uHandlerCs, TrapCtx.bXcpt);
1123 /*Bs3TrapPrintFrame(&TrapCtx);*/
1124 g_usBs3TestStep++;
1125 if (iCtx < iRing)
1126 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1127 else if (i > iRing)
1128 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1129 else
1130 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1131 }
1132 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1133 }
1134 }
1135 }
1136 BS3_ASSERT(g_usBs3TestStep < 3500);
1137
1138 /*
1139 * The gates must be 64-bit in long mode.
1140 */
1141 if (cIdteShift != 0)
1142 {
1143 g_usBs3TestStep = 3500;
1144 for (i = 0; i <= 3; i++)
1145 {
1146 for (iRing = 0; iRing <= 3; iRing++)
1147 {
1148 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1149 {
1150 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1151 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1152
1153 for (j = 0; j < 2; j++)
1154 {
1155 static const uint16_t s_auCSes[2] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32 };
1156 uint16_t uCs = (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT);
1157 g_usBs3TestStep++;
1158 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1159 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1160 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1161 /*Bs3TrapPrintFrame(&TrapCtx);*/
1162 if (iCtx < iRing)
1163 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1164 else
1165 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1166 }
1167 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1168 }
1169 }
1170 }
1171 BS3_ASSERT(g_usBs3TestStep < 4000);
1172 }
1173
1174 /*
1175 * IDT limit check. The 286 does not access X86DESCGATE::u16OffsetHigh.
1176 */
1177 g_usBs3TestStep = 5000;
1178 i = (0x80 << (cIdteShift + 3)) - 1;
1179 j = (0x82 << (cIdteShift + 3)) - (!f286 ? 1 : 3);
1180 k = (0x83 << (cIdteShift + 3)) - 1;
1181 for (; i <= k; i++, g_usBs3TestStep++)
1182 {
1183 Idtr = IdtrSaved;
1184 Idtr.cbIdt = i;
1185 ASMSetIDTR(&Idtr);
1186 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1187 if (i < j)
1188 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx81, (0x81 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1189 else
1190 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1191 }
1192 ASMSetIDTR(&IdtrSaved);
1193 BS3_ASSERT(g_usBs3TestStep < 5100);
1194
1195# if TMPL_BITS != 16 /* Only do the paging related stuff in 32-bit and 64-bit modes. */
1196
1197 /*
1198 * IDT page not present. Placing the IDT copy such that 0x80 is on the
1199 * first page and 0x81 is on the second page. We need proceed to move
1200 * it down byte by byte to check that any inaccessible byte means #PF.
1201 *
1202 * Note! We must reload the alternative IDTR for each run as any kind of
1203 * printing to the string (like error reporting) will cause a switch
1204 * to real mode and back, reloading the default IDTR.
1205 */
1206 g_usBs3TestStep = 5200;
1207 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1208 {
1209 uint32_t const uCr2Expected = Bs3SelPtrToFlat(pbIdtCopyAlloc) + _4K;
1210 for (j = 0; j < cbIdte; j++)
1211 {
1212 pIdtCopy = (PX86DESC)&pbIdtCopyAlloc[_4K - cbIdte * 0x81 - j];
1213 Bs3MemCpy(pIdtCopy, paIdt, cbIdte * 256);
1214
1215 Idtr.cbIdt = IdtrSaved.cbIdt;
1216 Idtr.pIdt = Bs3SelPtrToFlat(pIdtCopy);
1217
1218 ASMSetIDTR(&Idtr);
1219 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1220 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1221 g_usBs3TestStep++;
1222
1223 ASMSetIDTR(&Idtr);
1224 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1225 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1226 g_usBs3TestStep++;
1227
1228 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1229 if (RT_SUCCESS(rc))
1230 {
1231 ASMSetIDTR(&Idtr);
1232 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1233 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1234 g_usBs3TestStep++;
1235
1236 ASMSetIDTR(&Idtr);
1237 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1238 if (f486Plus)
1239 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1240 else
1241 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1242 g_usBs3TestStep++;
1243
1244 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1245
1246 /* Check if that the entry type is checked after the whole IDTE has been cleared for #PF. */
1247 pIdtCopy[0x80 << cIdteShift].Gate.u4Type = 0;
1248 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1249 if (RT_SUCCESS(rc))
1250 {
1251 ASMSetIDTR(&Idtr);
1252 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1253 if (f486Plus)
1254 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1255 else
1256 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1257 g_usBs3TestStep++;
1258
1259 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1260 }
1261 }
1262 else
1263 Bs3TestPrintf("Bs3PagingProtectPtr: %d\n", i);
1264
1265 ASMSetIDTR(&IdtrSaved);
1266 }
1267 }
1268
1269 /*
1270 * The read/write and user/supervisor bits the IDT PTEs are irrelevant.
1271 */
1272 g_usBs3TestStep = 5300;
1273 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1274 {
1275 Bs3MemCpy(pbIdtCopyAlloc, paIdt, cbIdte * 256);
1276 Idtr.cbIdt = IdtrSaved.cbIdt;
1277 Idtr.pIdt = Bs3SelPtrToFlat(pbIdtCopyAlloc);
1278
1279 ASMSetIDTR(&Idtr);
1280 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1281 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1282 g_usBs3TestStep++;
1283
1284 rc = Bs3PagingProtect(Idtr.pIdt, _4K, 0 /*fSet*/, X86_PTE_RW | X86_PTE_US /*fClear*/);
1285 if (RT_SUCCESS(rc))
1286 {
1287 ASMSetIDTR(&Idtr);
1288 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1289 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1290 g_usBs3TestStep++;
1291
1292 Bs3PagingProtect(Idtr.pIdt, _4K, X86_PTE_RW | X86_PTE_US /*fSet*/, 0 /*fClear*/);
1293 }
1294 ASMSetIDTR(&IdtrSaved);
1295 }
1296
1297 /*
1298 * Check that CS.u1Accessed is set to 1. Use the test page selector #0 and #3 together
1299 * with interrupt gates 80h and 83h, respectively.
1300 */
1301/** @todo Throw in SS.u1Accessed too. */
1302 g_usBs3TestStep = 5400;
1303 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1304 {
1305 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
1306 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1307 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
1308
1309 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Cs + (3 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1310 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1311 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_03; /* rpl is ignored, so leave it as zero. */
1312
1313 /* Check that the CS.A bit is being set on a general basis and that
1314 the special CS values work with out generic handler code. */
1315 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1316 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1317 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1318 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed", Bs3GdteTestPage00.Gen.u4Type);
1319 g_usBs3TestStep++;
1320
1321 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1322 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1323 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1324 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1325 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1326 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1327 if (TrapCtx.uHandlerCs != (BS3_SEL_TEST_PAGE_03 | 3))
1328 bs3CpuBasic2_FailedF("uHandlerCs=%#x, expected %#x", TrapCtx.uHandlerCs, (BS3_SEL_TEST_PAGE_03 | 3));
1329 g_usBs3TestStep++;
1330
1331 /*
1332 * Now check that setting CS.u1Access to 1 does __NOT__ trigger a page
1333 * fault due to the RW bit being zero.
1334 * (We check both with with and without the WP bit if 80486.)
1335 */
1336 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1337 ASMSetCR0(uCr0Saved | X86_CR0_WP);
1338
1339 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1340 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1341 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_RW /*fClear*/);
1342 if (RT_SUCCESS(rc))
1343 {
1344 /* ring-0 handler */
1345 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1346 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1347 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1348 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1349 g_usBs3TestStep++;
1350
1351 /* ring-3 handler */
1352 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1353 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1354 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1355 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1356 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1357 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1358 g_usBs3TestStep++;
1359
1360 /* clear WP and repeat the above. */
1361 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1362 ASMSetCR0(uCr0Saved & ~X86_CR0_WP);
1363 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1364 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1365
1366 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1367 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1368 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1369 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1370 g_usBs3TestStep++;
1371
1372 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1373 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1374 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1375 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!n", Bs3GdteTestPage03.Gen.u4Type);
1376 g_usBs3TestStep++;
1377
1378 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_RW /*fSet*/, 0 /*fClear*/);
1379 }
1380
1381 ASMSetCR0(uCr0Saved);
1382
1383 /*
1384 * While we're here, check that if the CS GDT entry is a non-present
1385 * page we do get a #PF with the rigth error code and CR2.
1386 */
1387 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* Just for fun, really a pointless gesture. */
1388 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1389 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1390 if (RT_SUCCESS(rc))
1391 {
1392 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1393 if (f486Plus)
1394 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00);
1395 else
1396 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00 + 4);
1397 g_usBs3TestStep++;
1398
1399 /* Do it from ring-3 to check ErrCd, which doesn't set X86_TRAP_PF_US it turns out. */
1400 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1401 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1402 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1403
1404 if (f486Plus)
1405 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03);
1406 else
1407 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03 + 4);
1408 g_usBs3TestStep++;
1409
1410 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1411 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1412 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #1", Bs3GdteTestPage00.Gen.u4Type);
1413 if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1414 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #2", Bs3GdteTestPage03.Gen.u4Type);
1415 }
1416
1417 /* restore */
1418 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1419 paIdt[0x83 << cIdteShift].Gate.u16Sel = uSysR0Cs;// + (3 << BS3_SEL_RING_SHIFT) + 3;
1420 }
1421
1422# endif /* 32 || 64*/
1423
1424 /*
1425 * Check broad EFLAGS effects.
1426 */
1427 g_usBs3TestStep = 5600;
1428 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1429 {
1430 for (iRing = 0; iRing < 4; iRing++)
1431 {
1432 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1433 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1434
1435 /* all set */
1436 CtxTmp.rflags.u32 &= X86_EFL_VM | X86_EFL_1;
1437 CtxTmp.rflags.u32 |= X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF /* | X86_EFL_TF */ /*| X86_EFL_IF*/
1438 | X86_EFL_DF | X86_EFL_OF | X86_EFL_IOPL /* | X86_EFL_NT*/;
1439 if (f486Plus)
1440 CtxTmp.rflags.u32 |= X86_EFL_AC;
1441 if (f486Plus && !g_f16BitSys)
1442 CtxTmp.rflags.u32 |= X86_EFL_RF;
1443 if (g_uBs3CpuDetected & BS3CPU_F_CPUID)
1444 CtxTmp.rflags.u32 |= X86_EFL_VIF | X86_EFL_VIP;
1445 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1446 CtxTmp.rflags.u32 &= ~X86_EFL_RF;
1447
1448 if (iCtx >= iRing)
1449 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1450 else
1451 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1452 uExpected = CtxTmp.rflags.u32
1453 & ( X86_EFL_1 | X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF | X86_EFL_DF
1454 | X86_EFL_OF | X86_EFL_IOPL | X86_EFL_NT | X86_EFL_VM | X86_EFL_AC | X86_EFL_VIF | X86_EFL_VIP
1455 | X86_EFL_ID /*| X86_EFL_TF*/ /*| X86_EFL_IF*/ /*| X86_EFL_RF*/ );
1456 if (TrapCtx.fHandlerRfl != uExpected)
1457 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1458 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1459 g_usBs3TestStep++;
1460
1461 /* all cleared */
1462 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80286)
1463 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_RA1_MASK | UINT16_C(0xf000));
1464 else
1465 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_VM | X86_EFL_RA1_MASK);
1466 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1467 if (iCtx >= iRing)
1468 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1469 else
1470 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1471 uExpected = CtxTmp.rflags.u32;
1472 if (TrapCtx.fHandlerRfl != uExpected)
1473 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1474 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1475 g_usBs3TestStep++;
1476 }
1477 }
1478
1479/** @todo CS.LIMIT / canonical(CS) */
1480
1481
1482 /*
1483 * Check invalid gate types.
1484 */
1485 g_usBs3TestStep = 32000;
1486 for (iRing = 0; iRing <= 3; iRing++)
1487 {
1488 static const uint16_t s_auCSes[] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32, BS3_SEL_R0_CS64,
1489 BS3_SEL_TSS16, BS3_SEL_TSS32, BS3_SEL_TSS64, 0, BS3_SEL_SPARE_1f };
1490 static uint16_t const s_auInvlTypes64[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
1491 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1492 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f };
1493 static uint16_t const s_auInvlTypes32[] = { 0, 1, 2, 3, 8, 9, 10, 11, 13,
1494 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1495 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
1496 /*286:*/ 12, 14, 15 };
1497 uint16_t const * const pauInvTypes = cIdteShift != 0 ? s_auInvlTypes64 : s_auInvlTypes32;
1498 uint16_t const cInvTypes = cIdteShift != 0 ? RT_ELEMENTS(s_auInvlTypes64)
1499 : f386Plus ? RT_ELEMENTS(s_auInvlTypes32) - 3 : RT_ELEMENTS(s_auInvlTypes32);
1500
1501
1502 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1503 {
1504 unsigned iType;
1505
1506 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1507 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1508# if TMPL_BITS == 32
1509 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1510# endif
1511 for (iType = 0; iType < cInvTypes; iType++)
1512 {
1513 uint8_t const bSavedType = paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type;
1514 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = pauInvTypes[iType] >> 4;
1515 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = pauInvTypes[iType] & 0xf;
1516
1517 for (i = 0; i < 4; i++)
1518 {
1519 for (j = 0; j < RT_ELEMENTS(s_auCSes); j++)
1520 {
1521 uint16_t uCs = (unsigned)(s_auCSes[j] - BS3_SEL_R0_FIRST) < (unsigned)(4 << BS3_SEL_RING_SHIFT)
1522 ? (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT)
1523 : s_auCSes[j] | i;
1524 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x type=%#x\n", g_usBs3TestStep, iCtx, iRing, i, uCs, pauInvTypes[iType]);*/
1525 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1526 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1527 g_usBs3TestStep++;
1528 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1529
1530 /* Mark it not-present to check that invalid type takes precedence. */
1531 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 0;
1532 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1533 g_usBs3TestStep++;
1534 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1535 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1536 }
1537 }
1538
1539 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1540 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = bSavedType;
1541 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = 0;
1542 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1543 }
1544 }
1545 }
1546 BS3_ASSERT(g_usBs3TestStep < 62000U && g_usBs3TestStep > 32000U);
1547
1548
1549 /** @todo
1550 * - Run \#PF and \#GP (and others?) at CPLs other than zero.
1551 * - Quickly generate all faults.
1552 * - All the peculiarities v8086.
1553 */
1554
1555# if TMPL_BITS != 16
1556 Bs3MemFree(pbIdtCopyAlloc, 12*_1K);
1557# endif
1558}
1559#endif /* convert me */
1560
1561
1562static void bs3CpuBasic2_RaiseXcpt11Worker(uint8_t bMode, uint8_t *pbBuf, unsigned cbCacheLine, bool fAm, bool fPf,
1563 RTCCUINTXREG uFlatBufPtr, BS3CPUBASIC2PFTTSTCMNMODE const BS3_FAR *pCmn)
1564{
1565 BS3TRAPFRAME TrapCtx;
1566 BS3REGCTX Ctx;
1567 BS3REGCTX CtxUdExpected;
1568 uint8_t const cRings = bMode == BS3_MODE_RM ? 1 : 4;
1569 uint8_t iRing;
1570 uint16_t iTest;
1571
1572 /* make sure they're allocated */
1573 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1574 Bs3MemZero(&Ctx, sizeof(Ctx));
1575 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1576
1577 /*
1578 * Test all relevant rings.
1579 *
1580 * The memory operand is ds:xBX, so point it to pbBuf.
1581 * The test snippets mostly use xAX as operand, with the div
1582 * one also using xDX, so make sure they make some sense.
1583 */
1584 Bs3RegCtxSaveEx(&Ctx, bMode, 512);
1585
1586 Ctx.cr0.u32 &= ~(X86_CR0_MP | X86_CR0_EM | X86_CR0_TS); /* so fninit + fld works */
1587
1588 for (iRing = BS3_MODE_IS_V86(bMode) ? 3 : 0; iRing < cRings; iRing++)
1589 {
1590 uint32_t uEbx;
1591 uint8_t fAc;
1592
1593 if (!BS3_MODE_IS_RM_OR_V86(bMode))
1594 Bs3RegCtxConvertToRingX(&Ctx, iRing);
1595
1596 if (!fPf || BS3_MODE_IS_32BIT_CODE(bMode) || BS3_MODE_IS_64BIT_CODE(bMode))
1597 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, pbBuf);
1598 else
1599 {
1600 /* Bs3RegCtxSetGrpDsFromCurPtr barfs when trying to output a sel:off address for the aliased buffer. */
1601 Ctx.ds = BS3_FP_SEG(pbBuf);
1602 Ctx.rbx.u32 = BS3_FP_OFF(pbBuf);
1603 }
1604 uEbx = Ctx.rbx.u32;
1605
1606 Ctx.rax.u = (bMode & BS3_MODE_CODE_MASK) == BS3_MODE_CODE_64
1607 ? UINT64_C(0x80868028680386fe) : UINT32_C(0x65020686);
1608 Ctx.rdx.u = UINT32_C(0x00100100); /* careful with range due to div */
1609
1610 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1611
1612 /*
1613 * AC flag loop.
1614 */
1615 for (fAc = 0; fAc < 2; fAc++)
1616 {
1617 if (fAc)
1618 Ctx.rflags.u32 |= X86_EFL_AC;
1619 else
1620 Ctx.rflags.u32 &= ~X86_EFL_AC;
1621
1622 /*
1623 * Loop over the test snippets.
1624 */
1625 for (iTest = 0; iTest < pCmn->cEntries; iTest++)
1626 {
1627 uint8_t const fOp = pCmn->paEntries[iTest].fOp;
1628 uint16_t const cbMem = pCmn->paEntries[iTest].cbMem;
1629 uint8_t const cbAlign = pCmn->paEntries[iTest].cbAlign;
1630 uint16_t const cbMax = cbCacheLine + cbMem;
1631 uint16_t offMem;
1632 uint8_t BS3_FAR *poffUd = (uint8_t BS3_FAR *)Bs3SelLnkPtrToCurPtr(pCmn->paEntries[iTest].pfn);
1633 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pCmn->paEntries[iTest].pfn);
1634 CtxUdExpected.rip = Ctx.rip;
1635 CtxUdExpected.rip.u = Ctx.rip.u + poffUd[-1];
1636 CtxUdExpected.cs = Ctx.cs;
1637 CtxUdExpected.rflags = Ctx.rflags;
1638 if (bMode == BS3_MODE_RM)
1639 CtxUdExpected.rflags.u32 &= ~X86_EFL_AC; /** @todo investigate. automatically cleared, or is it just our code? Observed with bs3-cpu-instr-3 too (10980xe), seems to be the CPU doing it. */
1640 CtxUdExpected.rdx = Ctx.rdx;
1641 CtxUdExpected.rax = Ctx.rax;
1642 if (fOp & MYOP_LD)
1643 {
1644 switch (cbMem)
1645 {
1646 case 2:
1647 CtxUdExpected.rax.u16 = 0x0101;
1648 break;
1649 case 4:
1650 CtxUdExpected.rax.u32 = UINT32_C(0x01010101);
1651 break;
1652 case 8:
1653 CtxUdExpected.rax.u64 = UINT64_C(0x0101010101010101);
1654 break;
1655 }
1656 }
1657
1658 /*
1659 * Buffer misalignment loop.
1660 * Note! We must make sure to cross a cache line here to make sure
1661 * to cover the split-lock scenario. (The buffer is cache
1662 * line aligned.)
1663 */
1664 for (offMem = 0; offMem < cbMax; offMem++)
1665 {
1666 bool const fMisaligned = (offMem & (cbAlign - 1)) != 0;
1667 unsigned offBuf = cbMax + cbMem * 2;
1668 while (offBuf-- > 0)
1669 pbBuf[offBuf] = 1; /* byte-by-byte to make sure it doesn't trigger AC. */
1670
1671 CtxUdExpected.rbx.u32 = Ctx.rbx.u32 = uEbx + offMem; /* ASSUMES memory in first 4GB. */
1672 if (BS3_MODE_IS_16BIT_SYS(bMode))
1673 g_uBs3TrapEipHint = Ctx.rip.u32;
1674
1675 //Bs3TestPrintf("iRing=%d iTest=%d cs:rip=%04RX16:%08RX32 ds:rbx=%04RX16:%08RX32 ss:esp=%04RX16:%08RX32 bXcpt=%#x errcd=%#x fAm=%d fAc=%d ESP=%#RX32\n",
1676 // iRing, iTest, Ctx.cs, Ctx.rip.u32, Ctx.ds, Ctx.rbx.u32, Ctx.ss, Ctx.rsp.u32, TrapCtx.bXcpt, (unsigned)TrapCtx.uErrCd, fAm, fAc, ASMGetESP());
1677
1678 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1679
1680 if ( (pCmn->paEntries[iTest].fOp & MYOP_AC_GP)
1681 && fMisaligned
1682 && (!fAm || iRing != 3 || !fAc || (offMem & 3 /* 10980XE */) == 0) )
1683 {
1684 if (fAc && bMode == BS3_MODE_RM)
1685 TrapCtx.Ctx.rflags.u32 |= X86_EFL_AC;
1686 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1687 }
1688 else if (fPf && iRing == 3 && (!fAm || !fAc || !fMisaligned)) /* #AC beats #PF */
1689 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx,
1690 X86_TRAP_PF_P | X86_TRAP_PF_US
1691 | (pCmn->paEntries[iTest].fOp & MYOP_ST ? X86_TRAP_PF_RW : 0),
1692 uFlatBufPtr + offMem + (cbMem > 64 ? cbMem - 1 /*FXSAVE*/ : 0),
1693 pCmn->paEntries[iTest].offFaultInstr);
1694 else if (!fAm || iRing != 3 || !fAc || !fMisaligned)
1695 {
1696 if (fOp & MYOP_EFL)
1697 {
1698 CtxUdExpected.rflags.u16 &= ~X86_EFL_STATUS_BITS;
1699 CtxUdExpected.rflags.u16 |= TrapCtx.Ctx.rflags.u16 & X86_EFL_STATUS_BITS;
1700 }
1701 if (fOp == MYOP_LD_DIV)
1702 {
1703 CtxUdExpected.rax = TrapCtx.Ctx.rax;
1704 CtxUdExpected.rdx = TrapCtx.Ctx.rdx;
1705 }
1706 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1707 }
1708 else
1709 bs3CpuBasic2_CompareAcCtx(&TrapCtx, &Ctx, pCmn->paEntries[iTest].offFaultInstr);
1710
1711 g_usBs3TestStep++;
1712 }
1713 }
1714 }
1715 }
1716}
1717
1718
1719/**
1720 * Entrypoint for \#AC tests.
1721 *
1722 * @returns 0 or BS3TESTDOMODE_SKIPPED.
1723 * @param bMode The CPU mode we're testing.
1724 *
1725 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
1726 * with control registers and such.
1727 */
1728BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_RaiseXcpt11)(uint8_t bMode)
1729{
1730 unsigned cbCacheLine = 128; /** @todo detect */
1731 uint8_t BS3_FAR *pbBufAlloc;
1732 uint8_t BS3_FAR *pbBuf;
1733 unsigned idxCmnModes;
1734 uint32_t fCr0;
1735
1736 /*
1737 * Skip if 386 or older.
1738 */
1739 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80486)
1740 {
1741 Bs3TestSkipped("#AC test requires 486 or later");
1742 return BS3TESTDOMODE_SKIPPED;
1743 }
1744
1745 bs3CpuBasic2_SetGlobals(bMode);
1746
1747 /* Get us a 64-byte aligned buffer. */
1748 pbBufAlloc = pbBuf = Bs3MemAllocZ(BS3_MODE_IS_RM_OR_V86(bMode) ? BS3MEMKIND_REAL : BS3MEMKIND_TILED, X86_PAGE_SIZE * 2);
1749 if (!pbBufAlloc)
1750 return Bs3TestFailed("Failed to allocate 2 pages of real-mode memory");
1751 if (BS3_FP_OFF(pbBuf) & (X86_PAGE_SIZE - 1))
1752 pbBuf = &pbBufAlloc[X86_PAGE_SIZE - (BS3_FP_OFF(pbBuf) & X86_PAGE_OFFSET_MASK)];
1753 BS3_ASSERT(pbBuf - pbBufAlloc <= X86_PAGE_SIZE);
1754 //Bs3TestPrintf("pbBuf=%p\n", pbBuf);
1755
1756 /* Find the g_aCmnModes entry. */
1757 idxCmnModes = 0;
1758 while (g_aCmnModes[idxCmnModes].bMode != (bMode & BS3_MODE_CODE_MASK))
1759 idxCmnModes++;
1760 //Bs3TestPrintf("idxCmnModes=%d bMode=%#x\n", idxCmnModes, bMode);
1761
1762 /* First round is w/o alignment checks enabled. */
1763 //Bs3TestPrintf("round 1\n");
1764 fCr0 = Bs3RegGetCr0();
1765 BS3_ASSERT(!(fCr0 & X86_CR0_AM));
1766 Bs3RegSetCr0(fCr0 & ~X86_CR0_AM);
1767#if 1
1768 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, false /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1769#endif
1770
1771 /* The second round is with aligment checks enabled. */
1772#if 1
1773 //Bs3TestPrintf("round 2\n");
1774 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1775 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, true /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1776#endif
1777
1778#if 1
1779 /* The third and fourth round access the buffer via a page alias that's not
1780 accessible from ring-3. The third round has ACs disabled and the fourth
1781 has them enabled. */
1782 if (BS3_MODE_IS_PAGED(bMode) && !BS3_MODE_IS_V86(bMode))
1783 {
1784 /* Alias the buffer as system memory so ring-3 access with AC+AM will cause #PF: */
1785 /** @todo the aliasing is not necessary any more... */
1786 int rc;
1787 RTCCUINTXREG uFlatBufPtr = Bs3SelPtrToFlat(pbBuf);
1788 uint64_t const uAliasPgPtr = bMode & BS3_MODE_CODE_64 ? UINT64_C(0x0000648680000000) : UINT32_C(0x80000000);
1789 rc = Bs3PagingAlias(uAliasPgPtr, uFlatBufPtr & ~(uint64_t)X86_PAGE_OFFSET_MASK, X86_PAGE_SIZE * 2,
1790 X86_PTE_P | X86_PTE_RW);
1791 if (RT_SUCCESS(rc))
1792 {
1793 /* We 'misalign' the segment base here to make sure it's the final
1794 address that gets alignment checked and not just the operand value. */
1795 RTCCUINTXREG uAliasBufPtr = (RTCCUINTXREG)uAliasPgPtr + (uFlatBufPtr & X86_PAGE_OFFSET_MASK);
1796 uint8_t BS3_FAR *pbBufAlias = BS3_FP_MAKE(BS3_SEL_SPARE_00 | 3, (uFlatBufPtr & X86_PAGE_OFFSET_MASK) + 1);
1797 Bs3SelSetup16BitData(&Bs3GdteSpare00, uAliasPgPtr - 1);
1798
1799 //Bs3TestPrintf("round 3 pbBufAlias=%p\n", pbBufAlias);
1800 Bs3RegSetCr0(Bs3RegGetCr0() & ~X86_CR0_AM);
1801 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, false /*fAm*/,
1802 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1803
1804 //Bs3TestPrintf("round 4\n");
1805 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1806 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, true /*fAm*/,
1807 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1808
1809 Bs3PagingUnalias(uAliasPgPtr, X86_PAGE_SIZE * 2);
1810 }
1811 else
1812 Bs3TestFailedF("Bs3PagingAlias failed with %Rrc", rc);
1813 }
1814#endif
1815
1816 Bs3MemFree(pbBufAlloc, X86_PAGE_SIZE * 2);
1817 Bs3RegSetCr0(fCr0);
1818 return 0;
1819}
1820
1821
1822/**
1823 * Executes one round of SIDT and SGDT tests using one assembly worker.
1824 *
1825 * This is written with driving everything from the 16-bit or 32-bit worker in
1826 * mind, i.e. not assuming the test bitcount is the same as the current.
1827 */
1828static void bs3CpuBasic2_sidt_sgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
1829 uint8_t const *pbExpected)
1830{
1831 BS3TRAPFRAME TrapCtx;
1832 BS3REGCTX Ctx;
1833 BS3REGCTX CtxUdExpected;
1834 BS3REGCTX TmpCtx;
1835 uint8_t const cbBuf = 8*2; /* test buffer area */
1836 uint8_t abBuf[8*2 + 8 + 8]; /* test buffer w/ misalignment test space and some extra guard. */
1837 uint8_t BS3_FAR *pbBuf = abBuf;
1838 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
1839 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
1840 uint8_t bFiller;
1841 int off;
1842 int off2;
1843 unsigned cb;
1844 uint8_t BS3_FAR *pbTest;
1845
1846 /* make sure they're allocated */
1847 Bs3MemZero(&Ctx, sizeof(Ctx));
1848 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1849 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
1850 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1851 Bs3MemZero(&abBuf, sizeof(abBuf));
1852
1853 /* Create a context, give this routine some more stack space, point the context
1854 at our SIDT [xBX] + UD2 combo, and point DS:xBX at abBuf. */
1855 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
1856 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1857 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
1858 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
1859 g_uBs3TrapEipHint = Ctx.rip.u32;
1860 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
1861 Bs3RegCtxConvertToRingX(&Ctx, bRing);
1862
1863 /* For successful SIDT attempts, we'll stop at the UD2. */
1864 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1865 CtxUdExpected.rip.u += pWorker->cbInstr;
1866
1867 /*
1868 * Check that it works at all and that only bytes we expect gets written to.
1869 */
1870 /* First with zero buffer. */
1871 Bs3MemZero(abBuf, sizeof(abBuf));
1872 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), 0))
1873 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1874 if (!ASMMemIsZero(abBuf, sizeof(abBuf)))
1875 Bs3TestFailedF("ASMMemIsZero or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1876 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1877 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1878 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1879 Bs3TestFailedF("286: Top base byte isn't 0xff (#1): %#x\n", abBuf[cbIdtr - 1]);
1880 if (!ASMMemIsZero(&abBuf[cbIdtr], cbBuf - cbIdtr))
1881 Bs3TestFailedF("Unexpected buffer bytes set (#1): cbIdtr=%u abBuf=%.*Rhxs\n", cbIdtr, cbBuf, pbBuf);
1882 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1883 Bs3TestFailedF("Mismatch (%s,#1): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1884 g_usBs3TestStep++;
1885
1886 /* Again with a buffer filled with a byte not occuring in the previous result. */
1887 bFiller = 0x55;
1888 while (Bs3MemChr(abBuf, bFiller, cbBuf) != NULL)
1889 bFiller++;
1890 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1891 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1892 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemSet is busted: bFiller=%#x abBuf=%.*Rhxs\n", bFiller, sizeof(abBuf), pbBuf);
1893
1894 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1895 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1896 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1897 Bs3TestFailedF("286: Top base byte isn't 0xff (#2): %#x\n", abBuf[cbIdtr - 1]);
1898 if (!ASMMemIsAllU8(&abBuf[cbIdtr], cbBuf - cbIdtr, bFiller))
1899 Bs3TestFailedF("Unexpected buffer bytes set (#2): cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1900 if (Bs3MemChr(abBuf, bFiller, cbIdtr) != NULL)
1901 Bs3TestFailedF("Not all bytes touched: cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1902 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1903 Bs3TestFailedF("Mismatch (%s,#2): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1904 g_usBs3TestStep++;
1905
1906 /*
1907 * Slide the buffer along 8 bytes to cover misalignment.
1908 */
1909 for (off = 0; off < 8; off++)
1910 {
1911 pbBuf = &abBuf[off];
1912 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBuf[off]);
1913 CtxUdExpected.rbx.u = Ctx.rbx.u;
1914
1915 /* First with zero buffer. */
1916 Bs3MemZero(abBuf, sizeof(abBuf));
1917 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1918 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1919 if (off > 0 && !ASMMemIsZero(abBuf, off))
1920 Bs3TestFailedF("Unexpected buffer bytes set before (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1921 cbIdtr, off, off + cbBuf, abBuf);
1922 if (!ASMMemIsZero(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off))
1923 Bs3TestFailedF("Unexpected buffer bytes set after (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1924 cbIdtr, off, off + cbBuf, abBuf);
1925 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1926 Bs3TestFailedF("286: Top base byte isn't 0xff (#3): %#x\n", abBuf[off + cbIdtr - 1]);
1927 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1928 Bs3TestFailedF("Mismatch (#3): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1929 g_usBs3TestStep++;
1930
1931 /* Again with a buffer filled with a byte not occuring in the previous result. */
1932 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1933 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1934 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1935 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1936 Bs3TestFailedF("Unexpected buffer bytes set before (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1937 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1938 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off, bFiller))
1939 Bs3TestFailedF("Unexpected buffer bytes set after (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1940 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1941 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1942 Bs3TestFailedF("Not all bytes touched (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1943 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1944 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1945 Bs3TestFailedF("286: Top base byte isn't 0xff (#4): %#x\n", abBuf[off + cbIdtr - 1]);
1946 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1947 Bs3TestFailedF("Mismatch (#4): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1948 g_usBs3TestStep++;
1949 }
1950 pbBuf = abBuf;
1951 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1952 CtxUdExpected.rbx.u = Ctx.rbx.u;
1953
1954 /*
1955 * Play with the selector limit if the target mode supports limit checking
1956 * We use BS3_SEL_TEST_PAGE_00 for this
1957 */
1958 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
1959 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
1960 {
1961 uint16_t cbLimit;
1962 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBuf);
1963 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
1964 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
1965 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
1966 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
1967 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
1968
1969 if (pWorker->fSs)
1970 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
1971 else
1972 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
1973
1974 /* Expand up (normal). */
1975 for (off = 0; off < 8; off++)
1976 {
1977 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
1978 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
1979 {
1980 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
1981 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1982 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1983 if (off + cbIdtr <= cbLimit + 1)
1984 {
1985 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1986 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1987 Bs3TestFailedF("Not all bytes touched (#5): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1988 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1989 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1990 Bs3TestFailedF("Mismatch (#5): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1991 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1992 Bs3TestFailedF("286: Top base byte isn't 0xff (#5): %#x\n", abBuf[off + cbIdtr - 1]);
1993 }
1994 else
1995 {
1996 if (pWorker->fSs)
1997 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
1998 else
1999 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2000 if (off + 2 <= cbLimit + 1)
2001 {
2002 if (Bs3MemChr(&abBuf[off], bFiller, 2) != NULL)
2003 Bs3TestFailedF("Limit bytes not touched (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2004 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2005 if (Bs3MemCmp(&abBuf[off], pbExpected, 2) != 0)
2006 Bs3TestFailedF("Mismatch (#6): expected %.2Rhxs, got %.2Rhxs\n", pbExpected, &abBuf[off]);
2007 if (!ASMMemIsAllU8(&abBuf[off + 2], cbIdtr - 2, bFiller))
2008 Bs3TestFailedF("Base bytes touched on #GP (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2009 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2010 }
2011 else if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2012 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2013 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2014 }
2015
2016 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2017 Bs3TestFailedF("Leading bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2018 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2019 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2020 Bs3TestFailedF("Trailing bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2021 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2022
2023 g_usBs3TestStep++;
2024 }
2025 }
2026
2027 /* Expand down (weird). Inverted valid area compared to expand up,
2028 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2029 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2030 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2031 (because in a normal expand up the 0ffffh means all 64KB are
2032 accessible). */
2033 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2034 for (off = 0; off < 8; off++)
2035 {
2036 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2037 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2038 {
2039 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2040 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2041 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2042
2043 if (off > cbLimit)
2044 {
2045 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2046 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2047 Bs3TestFailedF("Not all bytes touched (#8): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2048 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2049 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2050 Bs3TestFailedF("Mismatch (#8): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2051 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2052 Bs3TestFailedF("286: Top base byte isn't 0xff (#8): %#x\n", abBuf[off + cbIdtr - 1]);
2053 }
2054 else
2055 {
2056 if (pWorker->fSs)
2057 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2058 else
2059 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2060 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2061 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2062 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2063 }
2064
2065 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2066 Bs3TestFailedF("Leading bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2067 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2068 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2069 Bs3TestFailedF("Trailing bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2070 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2071
2072 g_usBs3TestStep++;
2073 }
2074 }
2075
2076 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2077 CtxUdExpected.rbx.u = Ctx.rbx.u;
2078 CtxUdExpected.ss = Ctx.ss;
2079 CtxUdExpected.ds = Ctx.ds;
2080 }
2081
2082 /*
2083 * Play with the paging.
2084 */
2085 if ( BS3_MODE_IS_PAGED(bTestMode)
2086 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2087 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2088 {
2089 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2090
2091 /*
2092 * Slide the buffer towards the trailing guard page. We'll observe the
2093 * first word being written entirely separately from the 2nd dword/qword.
2094 */
2095 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2096 {
2097 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2098 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2099 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2100 if (off + cbIdtr <= X86_PAGE_SIZE)
2101 {
2102 CtxUdExpected.rbx = Ctx.rbx;
2103 CtxUdExpected.ss = Ctx.ss;
2104 CtxUdExpected.ds = Ctx.ds;
2105 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2106 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2107 Bs3TestFailedF("Mismatch (#9): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2108 }
2109 else
2110 {
2111 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2112 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2113 if ( off <= X86_PAGE_SIZE - 2
2114 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2115 Bs3TestFailedF("Mismatch (#10): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2116 pbExpected, &pbTest[off], off);
2117 if ( off < X86_PAGE_SIZE - 2
2118 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_SIZE - off - 2, bFiller))
2119 Bs3TestFailedF("Wrote partial base on #PF (#10): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2120 bFiller, X86_PAGE_SIZE - off - 2, &pbTest[off + 2], off);
2121 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2122 Bs3TestFailedF("Wrote partial limit on #PF (#10): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2123 }
2124 g_usBs3TestStep++;
2125 }
2126
2127 /*
2128 * Now, do it the other way around. It should look normal now since writing
2129 * the limit will #PF first and nothing should be written.
2130 */
2131 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2132 {
2133 Bs3MemSet(pbTest, bFiller, 48);
2134 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2135 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2136 if (off >= 0)
2137 {
2138 CtxUdExpected.rbx = Ctx.rbx;
2139 CtxUdExpected.ss = Ctx.ss;
2140 CtxUdExpected.ds = Ctx.ds;
2141 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2142 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2143 Bs3TestFailedF("Mismatch (#11): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2144 }
2145 else
2146 {
2147 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2148 uFlatTest + off, 0 /*cbIpAdjust*/);
2149 if ( -off < cbIdtr
2150 && !ASMMemIsAllU8(pbTest, cbIdtr + off, bFiller))
2151 Bs3TestFailedF("Wrote partial content on #PF (#12): bFiller=%#x, found %.*Rhxs; off=%d\n",
2152 bFiller, cbIdtr + off, pbTest, off);
2153 }
2154 if (!ASMMemIsAllU8(&pbTest[RT_MAX(cbIdtr + off, 0)], 16, bFiller))
2155 Bs3TestFailedF("Wrote beyond expected area (#13): bFiller=%#x, found %.16Rhxs; off=%d\n",
2156 bFiller, &pbTest[RT_MAX(cbIdtr + off, 0)], off);
2157 g_usBs3TestStep++;
2158 }
2159
2160 /*
2161 * Combine paging and segment limit and check ordering.
2162 * This is kind of interesting here since it the instruction seems to
2163 * be doing two separate writes.
2164 */
2165 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2166 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2167 {
2168 uint16_t cbLimit;
2169
2170 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2171 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2172 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2173 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2174 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2175
2176 if (pWorker->fSs)
2177 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2178 else
2179 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2180
2181 /* Expand up (normal), approaching tail guard page. */
2182 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2183 {
2184 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2185 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2186 {
2187 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2188 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2189 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2190 if (off + cbIdtr <= cbLimit + 1)
2191 {
2192 /* No #GP, but maybe #PF. */
2193 if (off + cbIdtr <= X86_PAGE_SIZE)
2194 {
2195 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2196 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2197 Bs3TestFailedF("Mismatch (#14): expected %.*Rhxs, got %.*Rhxs\n",
2198 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2199 }
2200 else
2201 {
2202 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2203 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2204 if ( off <= X86_PAGE_SIZE - 2
2205 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2206 Bs3TestFailedF("Mismatch (#15): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2207 pbExpected, &pbTest[off], off);
2208 cb = X86_PAGE_SIZE - off - 2;
2209 if ( off < X86_PAGE_SIZE - 2
2210 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2211 Bs3TestFailedF("Wrote partial base on #PF (#15): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2212 bFiller, cb, &pbTest[off + 2], off);
2213 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2214 Bs3TestFailedF("Wrote partial limit on #PF (#15): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2215 }
2216 }
2217 else if (off + 2 <= cbLimit + 1)
2218 {
2219 /* [ig]tr.limit writing does not cause #GP, but may cause #PG, if not writing the base causes #GP. */
2220 if (off <= X86_PAGE_SIZE - 2)
2221 {
2222 if (pWorker->fSs)
2223 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2224 else
2225 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2226 if (Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2227 Bs3TestFailedF("Mismatch (#16): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2228 pbExpected, &pbTest[off], off);
2229 cb = X86_PAGE_SIZE - off - 2;
2230 if ( off < X86_PAGE_SIZE - 2
2231 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2232 Bs3TestFailedF("Wrote partial base with limit (#16): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2233 bFiller, cb, &pbTest[off + 2], off);
2234 }
2235 else
2236 {
2237 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2238 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2239 if ( off < X86_PAGE_SIZE
2240 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2241 Bs3TestFailedF("Mismatch (#16): Partial limit write on #PF: bFiller=%#x, got %.*Rhxs\n",
2242 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2243 }
2244 }
2245 else
2246 {
2247 /* #GP/#SS on limit. */
2248 if (pWorker->fSs)
2249 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2250 else
2251 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2252 if ( off < X86_PAGE_SIZE
2253 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2254 Bs3TestFailedF("Mismatch (#17): Partial write on #GP: bFiller=%#x, got %.*Rhxs\n",
2255 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2256 }
2257
2258 cb = RT_MIN(cbIdtr * 2, off - (X86_PAGE_SIZE - cbIdtr*2));
2259 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], cb, bFiller))
2260 Bs3TestFailedF("Leading bytes touched (#18): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2261 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE - cbIdtr * 2]);
2262
2263 g_usBs3TestStep++;
2264
2265 /* Set DS to 0 and check that we get #GP(0). */
2266 if (!pWorker->fSs)
2267 {
2268 Ctx.ds = 0;
2269 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2270 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2271 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2272 g_usBs3TestStep++;
2273 }
2274 }
2275 }
2276
2277 /* Expand down. */
2278 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
2279 uFlatTest -= X86_PAGE_SIZE;
2280
2281 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2282 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2283 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2284 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2285
2286 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2287 {
2288 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2289 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2290 {
2291 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2292 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller, cbIdtr * 2);
2293 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2294 if (cbLimit < off && off >= X86_PAGE_SIZE)
2295 {
2296 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2297 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2298 Bs3TestFailedF("Mismatch (#19): expected %.*Rhxs, got %.*Rhxs\n",
2299 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2300 cb = X86_PAGE_SIZE + cbIdtr*2 - off;
2301 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], cb, bFiller))
2302 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2303 cbIdtr, off, cbLimit, bFiller, cb, pbTest[off + cbIdtr]);
2304 }
2305 else
2306 {
2307 if (cbLimit < off && off < X86_PAGE_SIZE)
2308 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2309 uFlatTest + off, 0 /*cbIpAdjust*/);
2310 else if (pWorker->fSs)
2311 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2312 else
2313 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2314 cb = cbIdtr*2;
2315 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE], cb, bFiller))
2316 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2317 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE]);
2318 }
2319 g_usBs3TestStep++;
2320 }
2321 }
2322
2323 pbTest += X86_PAGE_SIZE;
2324 uFlatTest += X86_PAGE_SIZE;
2325 }
2326
2327 Bs3MemGuardedTestPageFree(pbTest);
2328 }
2329
2330 /*
2331 * Check non-canonical 64-bit space.
2332 */
2333 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
2334 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
2335 {
2336 /* Make our references relative to the gap. */
2337 pbTest += g_cbBs3PagingOneCanonicalTrap;
2338
2339 /* Hit it from below. */
2340 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2341 {
2342 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
2343 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2344 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2345 if (off + cbIdtr <= 0)
2346 {
2347 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2348 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2349 Bs3TestFailedF("Mismatch (#21): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2350 }
2351 else
2352 {
2353 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2354 if (off <= -2 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2355 Bs3TestFailedF("Mismatch (#21): expected limit %.2Rhxs, got %.2Rhxs\n", pbExpected, &pbTest[off]);
2356 off2 = off <= -2 ? 2 : 0;
2357 cb = cbIdtr - off2;
2358 if (!ASMMemIsAllU8(&pbTest[off + off2], cb, bFiller))
2359 Bs3TestFailedF("Mismatch (#21): touched base %.*Rhxs, got %.*Rhxs\n",
2360 cb, &pbExpected[off], cb, &pbTest[off + off2]);
2361 }
2362 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2363 Bs3TestFailedF("Leading bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2364 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2365 Bs3TestFailedF("Trailing bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2366 }
2367
2368 /* Hit it from above. */
2369 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2370 {
2371 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
2372 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2373 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2374 if (off >= 0)
2375 {
2376 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2377 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2378 Bs3TestFailedF("Mismatch (#22): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2379 }
2380 else
2381 {
2382 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2383 if (!ASMMemIsAllU8(&pbTest[off], cbIdtr, bFiller))
2384 Bs3TestFailedF("Mismatch (#22): touched base %.*Rhxs, got %.*Rhxs\n",
2385 cbIdtr, &pbExpected[off], cbIdtr, &pbTest[off]);
2386 }
2387 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2388 Bs3TestFailedF("Leading bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2389 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2390 Bs3TestFailedF("Trailing bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2391 }
2392
2393 }
2394}
2395
2396
2397static void bs3CpuBasic2_sidt_sgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
2398 uint8_t const *pbExpected)
2399{
2400 unsigned idx;
2401 unsigned bRing;
2402 unsigned iStep = 0;
2403
2404 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
2405 test and don't want to bother with double faults. */
2406 for (bRing = 0; bRing <= 3; bRing++)
2407 {
2408 for (idx = 0; idx < cWorkers; idx++)
2409 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
2410 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ ))
2411 {
2412 g_usBs3TestStep = iStep;
2413 bs3CpuBasic2_sidt_sgdt_One(&paWorkers[idx], bTestMode, bRing, pbExpected);
2414 iStep += 1000;
2415 }
2416 if (BS3_MODE_IS_RM_OR_V86(bTestMode))
2417 break;
2418 }
2419}
2420
2421
2422BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sidt)(uint8_t bMode)
2423{
2424 union
2425 {
2426 RTIDTR Idtr;
2427 uint8_t ab[16];
2428 } Expected;
2429
2430 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2431 bs3CpuBasic2_SetGlobals(bMode);
2432
2433 /*
2434 * Pass to common worker which is only compiled once per mode.
2435 */
2436 Bs3MemZero(&Expected, sizeof(Expected));
2437 ASMGetIDTR(&Expected.Idtr);
2438 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSidtWorkers, RT_ELEMENTS(g_aSidtWorkers), Expected.ab);
2439
2440 /*
2441 * Re-initialize the IDT.
2442 */
2443 Bs3TrapReInit();
2444 return 0;
2445}
2446
2447
2448BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sgdt)(uint8_t bMode)
2449{
2450 uint64_t const uOrgAddr = Bs3Lgdt_Gdt.uAddr;
2451 uint64_t uNew = 0;
2452 union
2453 {
2454 RTGDTR Gdtr;
2455 uint8_t ab[16];
2456 } Expected;
2457
2458 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2459 bs3CpuBasic2_SetGlobals(bMode);
2460
2461 /*
2462 * If paged mode, try push the GDT way up.
2463 */
2464 Bs3MemZero(&Expected, sizeof(Expected));
2465 ASMGetGDTR(&Expected.Gdtr);
2466 if (BS3_MODE_IS_PAGED(bMode))
2467 {
2468/** @todo loading non-canonical base addresses. */
2469 int rc;
2470 uNew = BS3_MODE_IS_64BIT_SYS(bMode) ? UINT64_C(0xffff80fedcb70000) : UINT64_C(0xc2d28000);
2471 uNew |= uOrgAddr & X86_PAGE_OFFSET_MASK;
2472 rc = Bs3PagingAlias(uNew, uOrgAddr, Bs3Lgdt_Gdt.cb, X86_PTE_P | X86_PTE_RW | X86_PTE_US | X86_PTE_D | X86_PTE_A);
2473 if (RT_SUCCESS(rc))
2474 {
2475 Bs3Lgdt_Gdt.uAddr = uNew;
2476 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uNew);
2477 ASMGetGDTR(&Expected.Gdtr);
2478 if (BS3_MODE_IS_64BIT_SYS(bMode) && ARCH_BITS != 64)
2479 *(uint32_t *)&Expected.ab[6] = (uint32_t)(uNew >> 32);
2480 }
2481 }
2482
2483 /*
2484 * Pass to common worker which is only compiled once per mode.
2485 */
2486 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSgdtWorkers, RT_ELEMENTS(g_aSgdtWorkers), Expected.ab);
2487
2488 /*
2489 * Unalias the GDT.
2490 */
2491 if (uNew != 0)
2492 {
2493 Bs3Lgdt_Gdt.uAddr = uOrgAddr;
2494 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uOrgAddr);
2495 Bs3PagingUnalias(uNew, Bs3Lgdt_Gdt.cb);
2496 }
2497
2498 /*
2499 * Re-initialize the IDT.
2500 */
2501 Bs3TrapReInit();
2502 return 0;
2503}
2504
2505
2506
2507/*
2508 * LIDT & LGDT
2509 */
2510
2511/**
2512 * Executes one round of LIDT and LGDT tests using one assembly worker.
2513 *
2514 * This is written with driving everything from the 16-bit or 32-bit worker in
2515 * mind, i.e. not assuming the test bitcount is the same as the current.
2516 */
2517static void bs3CpuBasic2_lidt_lgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
2518 uint8_t const *pbRestore, size_t cbRestore, uint8_t const *pbExpected)
2519{
2520 static const struct
2521 {
2522 bool fGP;
2523 uint16_t cbLimit;
2524 uint64_t u64Base;
2525 } s_aValues64[] =
2526 {
2527 { false, 0x0000, UINT64_C(0x0000000000000000) },
2528 { false, 0x0001, UINT64_C(0x0000000000000001) },
2529 { false, 0x0002, UINT64_C(0x0000000000000010) },
2530 { false, 0x0003, UINT64_C(0x0000000000000123) },
2531 { false, 0x0004, UINT64_C(0x0000000000001234) },
2532 { false, 0x0005, UINT64_C(0x0000000000012345) },
2533 { false, 0x0006, UINT64_C(0x0000000000123456) },
2534 { false, 0x0007, UINT64_C(0x0000000001234567) },
2535 { false, 0x0008, UINT64_C(0x0000000012345678) },
2536 { false, 0x0009, UINT64_C(0x0000000123456789) },
2537 { false, 0x000a, UINT64_C(0x000000123456789a) },
2538 { false, 0x000b, UINT64_C(0x00000123456789ab) },
2539 { false, 0x000c, UINT64_C(0x0000123456789abc) },
2540 { false, 0x001c, UINT64_C(0x00007ffffeefefef) },
2541 { false, 0xffff, UINT64_C(0x00007fffffffffff) },
2542 { true, 0xf3f1, UINT64_C(0x0000800000000000) },
2543 { true, 0x0000, UINT64_C(0x0000800000000000) },
2544 { true, 0x0000, UINT64_C(0x0000800000000333) },
2545 { true, 0x00f0, UINT64_C(0x0001000000000000) },
2546 { true, 0x0ff0, UINT64_C(0x0012000000000000) },
2547 { true, 0x0eff, UINT64_C(0x0123000000000000) },
2548 { true, 0xe0fe, UINT64_C(0x1234000000000000) },
2549 { true, 0x00ad, UINT64_C(0xffff300000000000) },
2550 { true, 0x0000, UINT64_C(0xffff7fffffffffff) },
2551 { true, 0x00f0, UINT64_C(0xffff7fffffffffff) },
2552 { false, 0x5678, UINT64_C(0xffff800000000000) },
2553 { false, 0x2969, UINT64_C(0xffffffffffeefefe) },
2554 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2555 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2556 };
2557 static const struct
2558 {
2559 uint16_t cbLimit;
2560 uint32_t u32Base;
2561 } s_aValues32[] =
2562 {
2563 { 0xdfdf, UINT32_C(0xefefefef) },
2564 { 0x0000, UINT32_C(0x00000000) },
2565 { 0x0001, UINT32_C(0x00000001) },
2566 { 0x0002, UINT32_C(0x00000012) },
2567 { 0x0003, UINT32_C(0x00000123) },
2568 { 0x0004, UINT32_C(0x00001234) },
2569 { 0x0005, UINT32_C(0x00012345) },
2570 { 0x0006, UINT32_C(0x00123456) },
2571 { 0x0007, UINT32_C(0x01234567) },
2572 { 0x0008, UINT32_C(0x12345678) },
2573 { 0x0009, UINT32_C(0x80204060) },
2574 { 0x000a, UINT32_C(0xddeeffaa) },
2575 { 0x000b, UINT32_C(0xfdecdbca) },
2576 { 0x000c, UINT32_C(0x6098456b) },
2577 { 0x000d, UINT32_C(0x98506099) },
2578 { 0x000e, UINT32_C(0x206950bc) },
2579 { 0x000f, UINT32_C(0x9740395d) },
2580 { 0x0334, UINT32_C(0x64a9455e) },
2581 { 0xb423, UINT32_C(0xd20b6eff) },
2582 { 0x4955, UINT32_C(0x85296d46) },
2583 { 0xffff, UINT32_C(0x07000039) },
2584 { 0xefe1, UINT32_C(0x0007fe00) },
2585 };
2586
2587 BS3TRAPFRAME TrapCtx;
2588 BS3REGCTX Ctx;
2589 BS3REGCTX CtxUdExpected;
2590 BS3REGCTX TmpCtx;
2591 uint8_t abBufLoad[40]; /* Test buffer w/ misalignment test space and some (cbIdtr) extra guard. */
2592 uint8_t abBufSave[32]; /* For saving the result after loading. */
2593 uint8_t abBufRestore[24]; /* For restoring sane value (same seg as abBufSave!). */
2594 uint8_t abExpectedFilled[32]; /* Same as pbExpected, except it's filled with bFiller2 instead of zeros. */
2595 uint8_t BS3_FAR *pbBufSave; /* Correctly aligned pointer into abBufSave. */
2596 uint8_t BS3_FAR *pbBufRestore; /* Correctly aligned pointer into abBufRestore. */
2597 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
2598 uint8_t const cbBaseLoaded = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 8
2599 : BS3_MODE_IS_16BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE)
2600 ? 3 : 4;
2601 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
2602 uint8_t const bTop16BitBase = f286 ? 0xff : 0x00;
2603 uint8_t bFiller1; /* For filling abBufLoad. */
2604 uint8_t bFiller2; /* For filling abBufSave and expectations. */
2605 int off;
2606 uint8_t BS3_FAR *pbTest;
2607 unsigned i;
2608
2609 /* make sure they're allocated */
2610 Bs3MemZero(&Ctx, sizeof(Ctx));
2611 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
2612 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
2613 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
2614 Bs3MemZero(abBufSave, sizeof(abBufSave));
2615 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2616 Bs3MemZero(abBufRestore, sizeof(abBufRestore));
2617
2618 /*
2619 * Create a context, giving this routine some more stack space.
2620 * - Point the context at our LIDT [xBX] + SIDT [xDI] + LIDT [xSI] + UD2 combo.
2621 * - Point DS/SS:xBX at abBufLoad.
2622 * - Point ES:xDI at abBufSave.
2623 * - Point ES:xSI at abBufRestore.
2624 */
2625 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
2626 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
2627 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
2628 g_uBs3TrapEipHint = Ctx.rip.u32;
2629 Ctx.rflags.u16 &= ~X86_EFL_IF;
2630 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2631
2632 pbBufSave = abBufSave;
2633 if ((BS3_FP_OFF(pbBufSave) + 2) & 7)
2634 pbBufSave += 8 - ((BS3_FP_OFF(pbBufSave) + 2) & 7);
2635 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rdi, &Ctx.es, pbBufSave);
2636
2637 pbBufRestore = abBufRestore;
2638 if ((BS3_FP_OFF(pbBufRestore) + 2) & 7)
2639 pbBufRestore += 8 - ((BS3_FP_OFF(pbBufRestore) + 2) & 7);
2640 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsi, &Ctx.es, pbBufRestore);
2641 Bs3MemCpy(pbBufRestore, pbRestore, cbRestore);
2642
2643 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
2644 Bs3RegCtxConvertToRingX(&Ctx, bRing);
2645
2646 /* For successful SIDT attempts, we'll stop at the UD2. */
2647 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
2648 CtxUdExpected.rip.u += pWorker->cbInstr;
2649
2650 /*
2651 * Check that it works at all.
2652 */
2653 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2654 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2655 Bs3MemZero(abBufSave, sizeof(abBufSave));
2656 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2657 if (bRing != 0)
2658 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2659 else
2660 {
2661 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2662 if (Bs3MemCmp(pbBufSave, pbExpected, cbIdtr * 2) != 0)
2663 Bs3TestFailedF("Mismatch (%s, #1): expected %.*Rhxs, got %.*Rhxs\n",
2664 pWorker->pszDesc, cbIdtr*2, pbExpected, cbIdtr*2, pbBufSave);
2665 }
2666 g_usBs3TestStep++;
2667
2668 /* Determine two filler bytes that doesn't appear in the previous result or our expectations. */
2669 bFiller1 = ~0x55;
2670 while ( Bs3MemChr(pbBufSave, bFiller1, cbIdtr) != NULL
2671 || Bs3MemChr(pbRestore, bFiller1, cbRestore) != NULL
2672 || bFiller1 == 0xff)
2673 bFiller1++;
2674 bFiller2 = 0x33;
2675 while ( Bs3MemChr(pbBufSave, bFiller2, cbIdtr) != NULL
2676 || Bs3MemChr(pbRestore, bFiller2, cbRestore) != NULL
2677 || bFiller2 == 0xff
2678 || bFiller2 == bFiller1)
2679 bFiller2++;
2680 Bs3MemSet(abExpectedFilled, bFiller2, sizeof(abExpectedFilled));
2681 Bs3MemCpy(abExpectedFilled, pbExpected, cbIdtr);
2682
2683 /* Again with a buffer filled with a byte not occuring in the previous result. */
2684 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2685 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2686 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2687 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2688 if (bRing != 0)
2689 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2690 else
2691 {
2692 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2693 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2694 Bs3TestFailedF("Mismatch (%s, #2): expected %.*Rhxs, got %.*Rhxs\n",
2695 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2696 }
2697 g_usBs3TestStep++;
2698
2699 /*
2700 * Try loading a bunch of different limit+base value to check what happens,
2701 * especially what happens wrt the top part of the base in 16-bit mode.
2702 */
2703 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
2704 {
2705 for (i = 0; i < RT_ELEMENTS(s_aValues64); i++)
2706 {
2707 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2708 Bs3MemCpy(&abBufLoad[0], &s_aValues64[i].cbLimit, 2);
2709 Bs3MemCpy(&abBufLoad[2], &s_aValues64[i].u64Base, 8);
2710 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2711 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2712 if (bRing != 0 || s_aValues64[i].fGP)
2713 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2714 else
2715 {
2716 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2717 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues64[i].cbLimit, 2) != 0
2718 || Bs3MemCmp(&pbBufSave[2], &s_aValues64[i].u64Base, 8) != 0
2719 || !ASMMemIsAllU8(&pbBufSave[10], cbIdtr, bFiller2))
2720 Bs3TestFailedF("Mismatch (%s, #2): expected %04RX16:%016RX64, fillers %#x %#x, got %.*Rhxs\n",
2721 pWorker->pszDesc, s_aValues64[i].cbLimit, s_aValues64[i].u64Base,
2722 bFiller1, bFiller2, cbIdtr*2, pbBufSave);
2723 }
2724 g_usBs3TestStep++;
2725 }
2726 }
2727 else
2728 {
2729 for (i = 0; i < RT_ELEMENTS(s_aValues32); i++)
2730 {
2731 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2732 Bs3MemCpy(&abBufLoad[0], &s_aValues32[i].cbLimit, 2);
2733 Bs3MemCpy(&abBufLoad[2], &s_aValues32[i].u32Base, cbBaseLoaded);
2734 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2735 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2736 if (bRing != 0)
2737 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2738 else
2739 {
2740 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2741 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues32[i].cbLimit, 2) != 0
2742 || Bs3MemCmp(&pbBufSave[2], &s_aValues32[i].u32Base, cbBaseLoaded) != 0
2743 || ( cbBaseLoaded != 4
2744 && pbBufSave[2+3] != bTop16BitBase)
2745 || !ASMMemIsAllU8(&pbBufSave[8], cbIdtr, bFiller2))
2746 Bs3TestFailedF("Mismatch (%s,#3): loaded %04RX16:%08RX32, fillers %#x %#x%s, got %.*Rhxs\n",
2747 pWorker->pszDesc, s_aValues32[i].cbLimit, s_aValues32[i].u32Base, bFiller1, bFiller2,
2748 f286 ? ", 286" : "", cbIdtr*2, pbBufSave);
2749 }
2750 g_usBs3TestStep++;
2751 }
2752 }
2753
2754 /*
2755 * Slide the buffer along 8 bytes to cover misalignment.
2756 */
2757 for (off = 0; off < 8; off++)
2758 {
2759 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBufLoad[off]);
2760 CtxUdExpected.rbx.u = Ctx.rbx.u;
2761
2762 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2763 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2764 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2765 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2766 if (bRing != 0)
2767 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2768 else
2769 {
2770 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2771 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2772 Bs3TestFailedF("Mismatch (%s, #4): expected %.*Rhxs, got %.*Rhxs\n",
2773 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2774 }
2775 g_usBs3TestStep++;
2776 }
2777 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2778 CtxUdExpected.rbx.u = Ctx.rbx.u;
2779
2780 /*
2781 * Play with the selector limit if the target mode supports limit checking
2782 * We use BS3_SEL_TEST_PAGE_00 for this
2783 */
2784 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2785 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2786 {
2787 uint16_t cbLimit;
2788 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBufLoad);
2789 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2790 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2791 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2792 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2793 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2794
2795 if (pWorker->fSs)
2796 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2797 else
2798 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2799
2800 /* Expand up (normal). */
2801 for (off = 0; off < 8; off++)
2802 {
2803 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2804 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2805 {
2806 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2807
2808 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2809 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2810 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2811 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2812 if (bRing != 0)
2813 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2814 else if (off + cbIdtr <= cbLimit + 1)
2815 {
2816 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2817 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2818 Bs3TestFailedF("Mismatch (%s, #5): expected %.*Rhxs, got %.*Rhxs\n",
2819 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2820 }
2821 else if (pWorker->fSs)
2822 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2823 else
2824 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2825 g_usBs3TestStep++;
2826
2827 /* Again with zero limit and messed up base (should trigger tripple fault if partially loaded). */
2828 abBufLoad[off] = abBufLoad[off + 1] = 0;
2829 abBufLoad[off + 2] |= 1;
2830 abBufLoad[off + cbIdtr - 2] ^= 0x5a;
2831 abBufLoad[off + cbIdtr - 1] ^= 0xa5;
2832 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2833 if (bRing != 0)
2834 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2835 else if (off + cbIdtr <= cbLimit + 1)
2836 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2837 else if (pWorker->fSs)
2838 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2839 else
2840 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2841 }
2842 }
2843
2844 /* Expand down (weird). Inverted valid area compared to expand up,
2845 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2846 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2847 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2848 (because in a normal expand up the 0ffffh means all 64KB are
2849 accessible). */
2850 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2851 for (off = 0; off < 8; off++)
2852 {
2853 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2854 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2855 {
2856 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2857
2858 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2859 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2860 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2861 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2862 if (bRing != 0)
2863 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2864 else if (off > cbLimit)
2865 {
2866 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2867 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2868 Bs3TestFailedF("Mismatch (%s, #6): expected %.*Rhxs, got %.*Rhxs\n",
2869 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2870 }
2871 else if (pWorker->fSs)
2872 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2873 else
2874 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2875 g_usBs3TestStep++;
2876
2877 /* Again with zero limit and messed up base (should trigger triple fault if partially loaded). */
2878 abBufLoad[off] = abBufLoad[off + 1] = 0;
2879 abBufLoad[off + 2] |= 3;
2880 abBufLoad[off + cbIdtr - 2] ^= 0x55;
2881 abBufLoad[off + cbIdtr - 1] ^= 0xaa;
2882 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2883 if (bRing != 0)
2884 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2885 else if (off > cbLimit)
2886 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2887 else if (pWorker->fSs)
2888 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2889 else
2890 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2891 }
2892 }
2893
2894 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2895 CtxUdExpected.rbx.u = Ctx.rbx.u;
2896 CtxUdExpected.ss = Ctx.ss;
2897 CtxUdExpected.ds = Ctx.ds;
2898 }
2899
2900 /*
2901 * Play with the paging.
2902 */
2903 if ( BS3_MODE_IS_PAGED(bTestMode)
2904 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2905 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2906 {
2907 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2908
2909 /*
2910 * Slide the load buffer towards the trailing guard page.
2911 */
2912 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[X86_PAGE_SIZE]);
2913 CtxUdExpected.ss = Ctx.ss;
2914 CtxUdExpected.ds = Ctx.ds;
2915 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2916 {
2917 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr*2);
2918 if (off < X86_PAGE_SIZE)
2919 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(X86_PAGE_SIZE - off, cbIdtr));
2920 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2921 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2922 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2923 if (bRing != 0)
2924 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2925 else if (off + cbIdtr <= X86_PAGE_SIZE)
2926 {
2927 CtxUdExpected.rbx = Ctx.rbx;
2928 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2929 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
2930 Bs3TestFailedF("Mismatch (%s, #7): expected %.*Rhxs, got %.*Rhxs\n",
2931 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2932 }
2933 else
2934 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2935 g_usBs3TestStep++;
2936
2937 /* Again with zero limit and maybe messed up base as well (triple fault if buggy).
2938 The 386DX-40 here triple faults (or something) with off == 0xffe, nothing else. */
2939 if ( off < X86_PAGE_SIZE && off + cbIdtr > X86_PAGE_SIZE
2940 && ( off != X86_PAGE_SIZE - 2
2941 || (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) != BS3CPU_80386)
2942 )
2943 {
2944 pbTest[off] = 0;
2945 if (off + 1 < X86_PAGE_SIZE)
2946 pbTest[off + 1] = 0;
2947 if (off + 2 < X86_PAGE_SIZE)
2948 pbTest[off + 2] |= 7;
2949 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2950 if (bRing != 0)
2951 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2952 else
2953 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2954 g_usBs3TestStep++;
2955 }
2956 }
2957
2958 /*
2959 * Now, do it the other way around. It should look normal now since writing
2960 * the limit will #PF first and nothing should be written.
2961 */
2962 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2963 {
2964 Bs3MemSet(pbTest, bFiller1, 48);
2965 if (off >= 0)
2966 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
2967 else if (off + cbIdtr > 0)
2968 Bs3MemCpy(pbTest, &pbBufRestore[-off], cbIdtr + off);
2969 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2970 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2971 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2972 if (bRing != 0)
2973 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2974 else if (off >= 0)
2975 {
2976 CtxUdExpected.rbx = Ctx.rbx;
2977 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2978 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
2979 Bs3TestFailedF("Mismatch (%s, #8): expected %.*Rhxs, got %.*Rhxs\n",
2980 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2981 }
2982 else
2983 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
2984 g_usBs3TestStep++;
2985
2986 /* Again with messed up base as well (triple fault if buggy). */
2987 if (off < 0 && off > -cbIdtr)
2988 {
2989 if (off + 2 >= 0)
2990 pbTest[off + 2] |= 15;
2991 pbTest[off + cbIdtr - 1] ^= 0xaa;
2992 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2993 if (bRing != 0)
2994 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2995 else
2996 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
2997 g_usBs3TestStep++;
2998 }
2999 }
3000
3001 /*
3002 * Combine paging and segment limit and check ordering.
3003 * This is kind of interesting here since it the instruction seems to
3004 * actually be doing two separate read, just like it's S[IG]DT counterpart.
3005 *
3006 * Note! My 486DX4 does a DWORD limit read when the operand size is 32-bit,
3007 * that's what f486Weirdness deals with.
3008 */
3009 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
3010 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
3011 {
3012 bool const f486Weirdness = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80486
3013 && BS3_MODE_IS_32BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE);
3014 uint16_t cbLimit;
3015
3016 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
3017 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
3018 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3019 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3020 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3021
3022 if (pWorker->fSs)
3023 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
3024 else
3025 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3026
3027 /* Expand up (normal), approaching tail guard page. */
3028 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3029 {
3030 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3031 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3032 {
3033 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3034 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr * 2);
3035 if (off < X86_PAGE_SIZE)
3036 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(cbIdtr, X86_PAGE_SIZE - off));
3037 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3038 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3039 if (bRing != 0)
3040 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3041 else if (off + cbIdtr <= cbLimit + 1)
3042 {
3043 /* No #GP, but maybe #PF. */
3044 if (off + cbIdtr <= X86_PAGE_SIZE)
3045 {
3046 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3047 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3048 Bs3TestFailedF("Mismatch (%s, #9): expected %.*Rhxs, got %.*Rhxs\n",
3049 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3050 }
3051 else
3052 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3053 }
3054 /* No #GP/#SS on limit, but instead #PF? */
3055 else if ( !f486Weirdness
3056 ? off < cbLimit && off >= 0xfff
3057 : off + 2 < cbLimit && off >= 0xffd)
3058 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3059 /* #GP/#SS on limit or base. */
3060 else if (pWorker->fSs)
3061 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3062 else
3063 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3064
3065 g_usBs3TestStep++;
3066
3067 /* Set DS to 0 and check that we get #GP(0). */
3068 if (!pWorker->fSs)
3069 {
3070 Ctx.ds = 0;
3071 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3072 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3073 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3074 g_usBs3TestStep++;
3075 }
3076 }
3077 }
3078
3079 /* Expand down. */
3080 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
3081 uFlatTest -= X86_PAGE_SIZE;
3082
3083 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
3084 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3085 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3086 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3087
3088 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3089 {
3090 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3091 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3092 {
3093 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3094 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller1, cbIdtr * 2);
3095 if (off >= X86_PAGE_SIZE)
3096 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3097 else if (off > X86_PAGE_SIZE - cbIdtr)
3098 Bs3MemCpy(&pbTest[X86_PAGE_SIZE], &pbBufRestore[X86_PAGE_SIZE - off], cbIdtr - (X86_PAGE_SIZE - off));
3099 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3100 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3101 if (bRing != 0)
3102 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3103 else if (cbLimit < off && off >= X86_PAGE_SIZE)
3104 {
3105 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3106 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3107 Bs3TestFailedF("Mismatch (%s, #10): expected %.*Rhxs, got %.*Rhxs\n",
3108 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3109 }
3110 else if (cbLimit < off && off < X86_PAGE_SIZE)
3111 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3112 else if (pWorker->fSs)
3113 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3114 else
3115 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3116 g_usBs3TestStep++;
3117 }
3118 }
3119
3120 pbTest += X86_PAGE_SIZE;
3121 uFlatTest += X86_PAGE_SIZE;
3122 }
3123
3124 Bs3MemGuardedTestPageFree(pbTest);
3125 }
3126
3127 /*
3128 * Check non-canonical 64-bit space.
3129 */
3130 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
3131 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
3132 {
3133 /* Make our references relative to the gap. */
3134 pbTest += g_cbBs3PagingOneCanonicalTrap;
3135
3136 /* Hit it from below. */
3137 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3138 {
3139 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
3140 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3141 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3142 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3143 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3144 if (off + cbIdtr > 0 || bRing != 0)
3145 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3146 else
3147 {
3148 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3149 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3150 Bs3TestFailedF("Mismatch (%s, #11): expected %.*Rhxs, got %.*Rhxs\n",
3151 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3152 }
3153 }
3154
3155 /* Hit it from above. */
3156 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3157 {
3158 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
3159 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3160 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3161 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3162 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3163 if (off < 0 || bRing != 0)
3164 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3165 else
3166 {
3167 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3168 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3169 Bs3TestFailedF("Mismatch (%s, #19): expected %.*Rhxs, got %.*Rhxs\n",
3170 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3171 }
3172 }
3173
3174 }
3175}
3176
3177
3178static void bs3CpuBasic2_lidt_lgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
3179 void const *pvRestore, size_t cbRestore, uint8_t const *pbExpected)
3180{
3181 unsigned idx;
3182 unsigned bRing;
3183 unsigned iStep = 0;
3184
3185 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
3186 test and don't want to bother with double faults. */
3187 for (bRing = BS3_MODE_IS_V86(bTestMode) ? 3 : 0; bRing <= 3; bRing++)
3188 {
3189 for (idx = 0; idx < cWorkers; idx++)
3190 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
3191 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ )
3192 && ( !(paWorkers[idx].fFlags & BS3CB2SIDTSGDT_F_386PLUS)
3193 || ( bTestMode > BS3_MODE_PE16
3194 || ( bTestMode == BS3_MODE_PE16
3195 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)) ) )
3196 {
3197 //Bs3TestPrintf("idx=%-2d fpfnWorker=%p fSs=%d cbInstr=%d\n",
3198 // idx, paWorkers[idx].fpfnWorker, paWorkers[idx].fSs, paWorkers[idx].cbInstr);
3199 g_usBs3TestStep = iStep;
3200 bs3CpuBasic2_lidt_lgdt_One(&paWorkers[idx], bTestMode, bRing, pvRestore, cbRestore, pbExpected);
3201 iStep += 1000;
3202 }
3203 if (BS3_MODE_IS_RM_SYS(bTestMode))
3204 break;
3205 }
3206}
3207
3208
3209BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lidt)(uint8_t bMode)
3210{
3211 union
3212 {
3213 RTIDTR Idtr;
3214 uint8_t ab[32]; /* At least cbIdtr*2! */
3215 } Expected;
3216
3217 //if (bMode != BS3_MODE_LM64) return 0;
3218 bs3CpuBasic2_SetGlobals(bMode);
3219
3220 /*
3221 * Pass to common worker which is only compiled once per mode.
3222 */
3223 Bs3MemZero(&Expected, sizeof(Expected));
3224 ASMGetIDTR(&Expected.Idtr);
3225
3226 if (BS3_MODE_IS_RM_SYS(bMode))
3227 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3228 &Bs3Lidt_Ivt, sizeof(Bs3Lidt_Ivt), Expected.ab);
3229 else if (BS3_MODE_IS_16BIT_SYS(bMode))
3230 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3231 &Bs3Lidt_Idt16, sizeof(Bs3Lidt_Idt16), Expected.ab);
3232 else if (BS3_MODE_IS_32BIT_SYS(bMode))
3233 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3234 &Bs3Lidt_Idt32, sizeof(Bs3Lidt_Idt32), Expected.ab);
3235 else
3236 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3237 &Bs3Lidt_Idt64, sizeof(Bs3Lidt_Idt64), Expected.ab);
3238
3239 /*
3240 * Re-initialize the IDT.
3241 */
3242 Bs3TrapReInit();
3243 return 0;
3244}
3245
3246
3247BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lgdt)(uint8_t bMode)
3248{
3249 union
3250 {
3251 RTGDTR Gdtr;
3252 uint8_t ab[32]; /* At least cbIdtr*2! */
3253 } Expected;
3254
3255 //if (!BS3_MODE_IS_64BIT_SYS(bMode)) return 0;
3256 bs3CpuBasic2_SetGlobals(bMode);
3257
3258 /*
3259 * Pass to common worker which is only compiled once per mode.
3260 */
3261 if (BS3_MODE_IS_RM_SYS(bMode))
3262 ASMSetGDTR((PRTGDTR)&Bs3LgdtDef_Gdt);
3263 Bs3MemZero(&Expected, sizeof(Expected));
3264 ASMGetGDTR(&Expected.Gdtr);
3265
3266 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLgdtWorkers, RT_ELEMENTS(g_aLgdtWorkers),
3267 &Bs3LgdtDef_Gdt, sizeof(Bs3LgdtDef_Gdt), Expected.ab);
3268
3269 /*
3270 * Re-initialize the IDT.
3271 */
3272 Bs3TrapReInit();
3273 return 0;
3274}
3275
3276typedef union IRETBUF
3277{
3278 uint64_t au64[6]; /* max req is 5 */
3279 uint32_t au32[12]; /* max req is 9 */
3280 uint16_t au16[24]; /* max req is 5 */
3281 uint8_t ab[48];
3282} IRETBUF;
3283typedef IRETBUF BS3_FAR *PIRETBUF;
3284
3285
3286static void iretbuf_SetupFrame(PIRETBUF pIretBuf, unsigned const cbPop,
3287 uint16_t uCS, uint64_t uPC, uint32_t fEfl, uint16_t uSS, uint64_t uSP)
3288{
3289 if (cbPop == 2)
3290 {
3291 pIretBuf->au16[0] = (uint16_t)uPC;
3292 pIretBuf->au16[1] = uCS;
3293 pIretBuf->au16[2] = (uint16_t)fEfl;
3294 pIretBuf->au16[3] = (uint16_t)uSP;
3295 pIretBuf->au16[4] = uSS;
3296 }
3297 else if (cbPop != 8)
3298 {
3299 pIretBuf->au32[0] = (uint32_t)uPC;
3300 pIretBuf->au16[1*2] = uCS;
3301 pIretBuf->au32[2] = (uint32_t)fEfl;
3302 pIretBuf->au32[3] = (uint32_t)uSP;
3303 pIretBuf->au16[4*2] = uSS;
3304 }
3305 else
3306 {
3307 pIretBuf->au64[0] = uPC;
3308 pIretBuf->au16[1*4] = uCS;
3309 pIretBuf->au64[2] = fEfl;
3310 pIretBuf->au64[3] = uSP;
3311 pIretBuf->au16[4*4] = uSS;
3312 }
3313}
3314
3315
3316static void bs3CpuBasic2_iret_Worker(uint8_t bTestMode, FPFNBS3FAR pfnIret, unsigned const cbPop,
3317 PIRETBUF pIretBuf, const char BS3_FAR *pszDesc)
3318{
3319 BS3TRAPFRAME TrapCtx;
3320 BS3REGCTX Ctx;
3321 BS3REGCTX CtxUdExpected;
3322 BS3REGCTX TmpCtx;
3323 BS3REGCTX TmpCtxExpected;
3324 uint8_t abLowUd[8];
3325 uint8_t abLowIret[8];
3326 FPFNBS3FAR pfnUdLow = (FPFNBS3FAR)abLowUd;
3327 FPFNBS3FAR pfnIretLow = (FPFNBS3FAR)abLowIret;
3328 unsigned const cbSameCplFrame = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 5*cbPop : 3*cbPop;
3329 bool const fUseLowCode = cbPop == 2 && !BS3_MODE_IS_16BIT_CODE(bTestMode);
3330 int iRingDst;
3331 int iRingSrc;
3332 uint16_t uDplSs;
3333 uint16_t uRplCs;
3334 uint16_t uRplSs;
3335// int i;
3336 uint8_t BS3_FAR *pbTest;
3337
3338 NOREF(abLowUd);
3339#define IRETBUF_SET_SEL(a_idx, a_uValue) \
3340 do { *(uint16_t)&pIretBuf->ab[a_idx * cbPop] = (a_uValue); } while (0)
3341#define IRETBUF_SET_REG(a_idx, a_uValue) \
3342 do { uint8_t const BS3_FAR *pbTmp = &pIretBuf->ab[a_idx * cbPop]; \
3343 if (cbPop == 2) *(uint16_t)pbTmp = (uint16_t)(a_uValue); \
3344 else if (cbPop != 8) *(uint32_t)pbTmp = (uint32_t)(a_uValue); \
3345 else *(uint64_t)pbTmp = (a_uValue); \
3346 } while (0)
3347
3348 /* make sure they're allocated */
3349 Bs3MemZero(&Ctx, sizeof(Ctx));
3350 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
3351 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
3352 Bs3MemZero(&TmpCtxExpected, sizeof(TmpCtxExpected));
3353 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3354
3355 /*
3356 * When dealing with 16-bit irets in 32-bit or 64-bit mode, we must have
3357 * copies of both iret and ud in the first 64KB of memory. The stack is
3358 * below 64KB, so we'll just copy the instructions onto the stack.
3359 */
3360 Bs3MemCpy(abLowUd, bs3CpuBasic2_ud2, 4);
3361 Bs3MemCpy(abLowIret, pfnIret, 4);
3362
3363 /*
3364 * Create a context (stack is irrelevant, we'll mainly be using pIretBuf).
3365 * - Point the context at our iret instruction.
3366 * - Point SS:xSP at pIretBuf.
3367 */
3368 Bs3RegCtxSaveEx(&Ctx, bTestMode, 0);
3369 if (!fUseLowCode)
3370 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pfnIret);
3371 else
3372 Bs3RegCtxSetRipCsFromCurPtr(&Ctx, pfnIretLow);
3373 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
3374 g_uBs3TrapEipHint = Ctx.rip.u32;
3375 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3376
3377 /*
3378 * The first success (UD) context keeps the same code bit-count as the iret.
3379 */
3380 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
3381 if (!fUseLowCode)
3382 Bs3RegCtxSetRipCsFromLnkPtr(&CtxUdExpected, bs3CpuBasic2_ud2);
3383 else
3384 Bs3RegCtxSetRipCsFromCurPtr(&CtxUdExpected, pfnUdLow);
3385 CtxUdExpected.rsp.u += cbSameCplFrame;
3386
3387 /*
3388 * Check that it works at all.
3389 */
3390 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3391 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3392
3393 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3394 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3395 g_usBs3TestStep++;
3396
3397 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
3398 {
3399 /* Selectors are modified when switching rings, so we need to know
3400 what we're dealing with there. */
3401 if ( !BS3_SEL_IS_IN_R0_RANGE(Ctx.cs) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ss)
3402 || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ds) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.es))
3403 Bs3TestFailedF("Expected R0 CS, SS, DS and ES; not %#x, %#x, %#x and %#x\n", Ctx.cs, Ctx.ss, Ctx.ds, Ctx.es);
3404 if (Ctx.fs || Ctx.gs)
3405 Bs3TestFailed("Expected R0 FS and GS to be 0!\n");
3406
3407 /*
3408 * Test returning to outer rings if protected mode.
3409 */
3410 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3411 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3412 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3413 {
3414 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3415 TmpCtxExpected.ds = iRingDst ? 0 : TmpCtx.ds;
3416 TmpCtx.es = TmpCtxExpected.es;
3417 iretbuf_SetupFrame(pIretBuf, cbPop, TmpCtxExpected.cs, TmpCtxExpected.rip.u,
3418 TmpCtxExpected.rflags.u32, TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3419 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3420 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3421 g_usBs3TestStep++;
3422 }
3423
3424 /*
3425 * Check CS.RPL and SS.RPL.
3426 */
3427 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3428 {
3429 uint16_t const uDstSsR0 = (CtxUdExpected.ss & BS3_SEL_RING_SUB_MASK) + BS3_SEL_R0_FIRST;
3430 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3431 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3432 for (iRingSrc = 3; iRingSrc >= 0; iRingSrc--)
3433 {
3434 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3435 Bs3RegCtxConvertToRingX(&TmpCtx, iRingSrc);
3436 TmpCtx.es = TmpCtxExpected.es;
3437 TmpCtxExpected.ds = iRingDst != iRingSrc ? 0 : TmpCtx.ds;
3438 for (uRplCs = 0; uRplCs <= 3; uRplCs++)
3439 {
3440 uint16_t const uSrcEs = TmpCtx.es;
3441 uint16_t const uDstCs = (TmpCtxExpected.cs & X86_SEL_MASK_OFF_RPL) | uRplCs;
3442 //Bs3TestPrintf("dst=%d src=%d rplCS=%d\n", iRingDst, iRingSrc, uRplCs);
3443
3444 /* CS.RPL */
3445 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u, TmpCtxExpected.rflags.u32,
3446 TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3447 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3448 if (uRplCs == iRingDst && iRingDst >= iRingSrc)
3449 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3450 else
3451 {
3452 if (iRingDst < iRingSrc)
3453 TmpCtx.es = 0;
3454 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3455 TmpCtx.es = uSrcEs;
3456 }
3457 g_usBs3TestStep++;
3458
3459 /* SS.RPL */
3460 if (iRingDst != iRingSrc || BS3_MODE_IS_64BIT_CODE(bTestMode))
3461 {
3462 uint16_t uSavedDstSs = TmpCtxExpected.ss;
3463 for (uRplSs = 0; uRplSs <= 3; uRplSs++)
3464 {
3465 /* SS.DPL (iRingDst == CS.DPL) */
3466 for (uDplSs = 0; uDplSs <= 3; uDplSs++)
3467 {
3468 uint16_t const uDstSs = ((uDplSs << BS3_SEL_RING_SHIFT) | uRplSs) + uDstSsR0;
3469 //Bs3TestPrintf("dst=%d src=%d rplCS=%d rplSS=%d dplSS=%d dst %04x:%08RX64 %08RX32 %04x:%08RX64\n",
3470 // iRingDst, iRingSrc, uRplCs, uRplSs, uDplSs, uDstCs, TmpCtxExpected.rip.u,
3471 // TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3472
3473 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u,
3474 TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3475 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3476 if (uRplCs != iRingDst || iRingDst < iRingSrc)
3477 {
3478 if (iRingDst < iRingSrc)
3479 TmpCtx.es = 0;
3480 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3481 }
3482 else if (uRplSs != iRingDst || uDplSs != iRingDst)
3483 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstSs & X86_SEL_MASK_OFF_RPL);
3484 else
3485 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3486 TmpCtx.es = uSrcEs;
3487 g_usBs3TestStep++;
3488 }
3489 }
3490
3491 TmpCtxExpected.ss = uSavedDstSs;
3492 }
3493 }
3494 }
3495 }
3496 }
3497
3498 /*
3499 * Special 64-bit checks.
3500 */
3501 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
3502 {
3503 /* The VM flag is completely ignored. */
3504 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3505 CtxUdExpected.rflags.u32 | X86_EFL_VM, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3506 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3507 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3508 g_usBs3TestStep++;
3509
3510 /* The NT flag can be loaded just fine. */
3511 CtxUdExpected.rflags.u32 |= X86_EFL_NT;
3512 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3513 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3514 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3515 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3516 CtxUdExpected.rflags.u32 &= ~X86_EFL_NT;
3517 g_usBs3TestStep++;
3518
3519 /* However, we'll #GP(0) if it's already set (in RFLAGS) when executing IRET. */
3520 Ctx.rflags.u32 |= X86_EFL_NT;
3521 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3522 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3523 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3524 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3525 g_usBs3TestStep++;
3526
3527 /* The NT flag #GP(0) should trump all other exceptions - pit it against #PF. */
3528 pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED);
3529 if (pbTest != NULL)
3530 {
3531 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, &pbTest[X86_PAGE_SIZE]);
3532 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3533 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3534 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3535 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3536 g_usBs3TestStep++;
3537
3538 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3539 Bs3MemGuardedTestPageFree(pbTest);
3540 }
3541 Ctx.rflags.u32 &= ~X86_EFL_NT;
3542 }
3543}
3544
3545
3546BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_iret)(uint8_t bMode)
3547{
3548 struct
3549 {
3550 uint8_t abExtraStack[4096]; /**< we've got ~30KB of stack, so 4KB for the trap handlers++ is not a problem. */
3551 IRETBUF IRetBuf;
3552 uint8_t abGuard[32];
3553 } uBuf;
3554 size_t cbUnused;
3555
3556 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
3557 bs3CpuBasic2_SetGlobals(bMode);
3558
3559 /*
3560 * Primary instruction form.
3561 */
3562 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3563 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3564 if (BS3_MODE_IS_16BIT_CODE(bMode))
3565 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 2, &uBuf.IRetBuf, "iret");
3566 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3567 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3568 else
3569 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_rexw, 8, &uBuf.IRetBuf, "o64 iret");
3570
3571 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3572 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3573 - (uintptr_t)uBuf.abExtraStack;
3574 if (cbUnused < 2048)
3575 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 1);
3576
3577 /*
3578 * Secondary variation: opsize prefixed.
3579 */
3580 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3581 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3582 if (BS3_MODE_IS_16BIT_CODE(bMode) && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)
3583 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 4, &uBuf.IRetBuf, "o32 iret");
3584 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3585 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3586 else if (BS3_MODE_IS_64BIT_CODE(bMode))
3587 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3588 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3589 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3590 - (uintptr_t)uBuf.abExtraStack;
3591 if (cbUnused < 2048)
3592 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 2);
3593
3594 /*
3595 * Third variation: 16-bit in 64-bit mode (truly unlikely)
3596 */
3597 if (BS3_MODE_IS_64BIT_CODE(bMode))
3598 {
3599 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3600 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3601 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3602 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3603 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3604 - (uintptr_t)uBuf.abExtraStack;
3605 if (cbUnused < 2048)
3606 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 3);
3607 }
3608
3609 return 0;
3610}
3611
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette