VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-x0.c@ 97613

Last change on this file since 97613 was 97585, checked in by vboxsync, 2 years ago

ValKit/bs3-cpu-basic-2: Test both variations of rex.w and opsize prefixes since AMD cares about these and the order obviously matters. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 236.0 KB
Line 
1/* $Id: bs3-cpu-basic-2-x0.c 97585 2022-11-17 00:12:23Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-basic-2, C test driver code (16-bit).
4 */
5
6/*
7 * Copyright (C) 2007-2022 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * The contents of this file may alternatively be used under the terms
26 * of the Common Development and Distribution License Version 1.0
27 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
28 * in the VirtualBox distribution, in which case the provisions of the
29 * CDDL are applicable instead of those of the GPL.
30 *
31 * You may elect to license modified versions of this file under the
32 * terms and conditions of either the GPL or the CDDL or both.
33 *
34 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
35 */
36
37
38/*********************************************************************************************************************************
39* Header Files *
40*********************************************************************************************************************************/
41#define BS3_USE_X0_TEXT_SEG
42#include <bs3kit.h>
43#include <iprt/asm.h>
44#include <iprt/asm-amd64-x86.h>
45
46
47/*********************************************************************************************************************************
48* Defined Constants And Macros *
49*********************************************************************************************************************************/
50#undef CHECK_MEMBER
51#define CHECK_MEMBER(a_szName, a_szFmt, a_Actual, a_Expected) \
52 do \
53 { \
54 if ((a_Actual) == (a_Expected)) { /* likely */ } \
55 else bs3CpuBasic2_FailedF(a_szName "=" a_szFmt " expected " a_szFmt, (a_Actual), (a_Expected)); \
56 } while (0)
57
58
59/** Indicating that we've got operand size prefix and that it matters. */
60#define BS3CB2SIDTSGDT_F_OPSIZE UINT8_C(0x01)
61/** Worker requires 386 or later. */
62#define BS3CB2SIDTSGDT_F_386PLUS UINT8_C(0x02)
63
64
65/** @name MYOP_XXX - Values for FNBS3CPUBASIC2ACTSTCODE::fOp.
66 *
67 * These are flags, though we've precombined a few shortening things down.
68 *
69 * @{ */
70#define MYOP_LD 0x1 /**< The instruction loads. */
71#define MYOP_ST 0x2 /**< The instruction stores */
72#define MYOP_EFL 0x4 /**< The instruction modifies EFLAGS. */
73#define MYOP_AC_GP 0x8 /**< The instruction may cause either \#AC or \#GP (FXSAVE). */
74
75#define MYOP_LD_ST 0x3 /**< Convenience: The instruction both loads and stores. */
76#define MYOP_LD_DIV 0x5 /**< Convenience: DIV instruction - loading and modifying flags. */
77/** @} */
78
79
80/*********************************************************************************************************************************
81* Structures and Typedefs *
82*********************************************************************************************************************************/
83typedef struct BS3CB2INVLDESCTYPE
84{
85 uint8_t u4Type;
86 uint8_t u1DescType;
87} BS3CB2INVLDESCTYPE;
88
89typedef struct BS3CB2SIDTSGDT
90{
91 const char *pszDesc;
92 FPFNBS3FAR fpfnWorker;
93 uint8_t cbInstr;
94 bool fSs;
95 uint8_t bMode;
96 uint8_t fFlags;
97} BS3CB2SIDTSGDT;
98
99
100typedef void BS3_CALL FNBS3CPUBASIC2ACSNIPPET(void);
101
102typedef struct FNBS3CPUBASIC2ACTSTCODE
103{
104 FNBS3CPUBASIC2ACSNIPPET BS3_FAR *pfn;
105 uint8_t fOp;
106 uint16_t cbMem;
107 uint8_t cbAlign;
108 uint8_t offFaultInstr; /**< For skipping fninit with the fld test. */
109} FNBS3CPUBASIC2ACTSTCODE;
110typedef FNBS3CPUBASIC2ACTSTCODE const *PCFNBS3CPUBASIC2ACTSTCODE;
111
112typedef struct BS3CPUBASIC2ACTTSTCMNMODE
113{
114 uint8_t bMode;
115 uint16_t cEntries;
116 PCFNBS3CPUBASIC2ACTSTCODE paEntries;
117} BS3CPUBASIC2PFTTSTCMNMODE;
118typedef BS3CPUBASIC2PFTTSTCMNMODE const *PCBS3CPUBASIC2PFTTSTCMNMODE;
119
120
121/*********************************************************************************************************************************
122* External Symbols *
123*********************************************************************************************************************************/
124extern FNBS3FAR bs3CpuBasic2_Int80;
125extern FNBS3FAR bs3CpuBasic2_Int81;
126extern FNBS3FAR bs3CpuBasic2_Int82;
127extern FNBS3FAR bs3CpuBasic2_Int83;
128
129extern FNBS3FAR bs3CpuBasic2_ud2;
130#define g_bs3CpuBasic2_ud2_FlatAddr BS3_DATA_NM(g_bs3CpuBasic2_ud2_FlatAddr)
131extern uint32_t g_bs3CpuBasic2_ud2_FlatAddr;
132
133extern FNBS3FAR bs3CpuBasic2_iret;
134extern FNBS3FAR bs3CpuBasic2_iret_opsize;
135extern FNBS3FAR bs3CpuBasic2_iret_rexw;
136
137extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c16;
138extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c32;
139extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c64;
140extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c16;
141extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c32;
142extern FNBS3FAR bs3CpuBasic2_sidt_rexw_bx_ud2_c64;
143extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c16;
144extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c32;
145extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c64;
146extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16;
147extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32;
148extern FNBS3FAR bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64;
149
150extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c16;
151extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c32;
152extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c64;
153extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c16;
154extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c32;
155extern FNBS3FAR bs3CpuBasic2_sgdt_rexw_bx_ud2_c64;
156extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c16;
157extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c32;
158extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c64;
159extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16;
160extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32;
161extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64;
162
163extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16;
164extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32;
165extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64;
166extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
167extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
168extern FNBS3FAR bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
169extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16;
170extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16;
171extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32;
172extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64;
173extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
174extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
175extern FNBS3FAR bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
176
177extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
178extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
179extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
180extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
181extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
182extern FNBS3FAR bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
183extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
184extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
185extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
186extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
187extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
188extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
189
190
191/* bs3-cpu-basic-2-template.mac: */
192FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c16;
193FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c16;
194FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16;
195FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16;
196FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c16;
197FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16;
198FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16;
199FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16;
200FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c16;
201
202FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c32;
203FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c32;
204FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32;
205FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32;
206FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c32;
207FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32;
208FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32;
209FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32;
210FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c32;
211
212FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c64;
213FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c64;
214FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64;
215FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64;
216FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c64;
217FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64;
218FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64;
219FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64;
220FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c64;
221
222
223/*********************************************************************************************************************************
224* Global Variables *
225*********************************************************************************************************************************/
226static const char BS3_FAR *g_pszTestMode = (const char *)1;
227static uint8_t g_bTestMode = 1;
228static bool g_f16BitSys = 1;
229
230
231/** SIDT test workers. */
232static BS3CB2SIDTSGDT const g_aSidtWorkers[] =
233{
234 { "sidt [bx]", bs3CpuBasic2_sidt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
235 { "sidt [ss:bx]", bs3CpuBasic2_sidt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
236 { "o32 sidt [bx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
237 { "o32 sidt [ss:bx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
238 { "sidt [ebx]", bs3CpuBasic2_sidt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
239 { "sidt [ss:ebx]", bs3CpuBasic2_sidt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
240 { "o16 sidt [ebx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
241 { "o16 sidt [ss:ebx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
242 { "sidt [rbx]", bs3CpuBasic2_sidt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
243 { "o64 sidt [rbx]", bs3CpuBasic2_sidt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
244 { "o32 sidt [rbx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
245 { "o32 o64 sidt [rbx]", bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
246};
247
248/** SGDT test workers. */
249static BS3CB2SIDTSGDT const g_aSgdtWorkers[] =
250{
251 { "sgdt [bx]", bs3CpuBasic2_sgdt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
252 { "sgdt [ss:bx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
253 { "o32 sgdt [bx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
254 { "o32 sgdt [ss:bx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
255 { "sgdt [ebx]", bs3CpuBasic2_sgdt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
256 { "sgdt [ss:ebx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
257 { "o16 sgdt [ebx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
258 { "o16 sgdt [ss:ebx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
259 { "sgdt [rbx]", bs3CpuBasic2_sgdt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
260 { "o64 sgdt [rbx]", bs3CpuBasic2_sgdt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
261 { "o32 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
262 { "o32 o64 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
263};
264
265/** LIDT test workers. */
266static BS3CB2SIDTSGDT const g_aLidtWorkers[] =
267{
268 { "lidt [bx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
269 { "lidt [ss:bx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
270 { "o32 lidt [bx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
271 { "o32 lidt [bx]; sidt32", bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16, 27, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
272 { "o32 lidt [ss:bx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
273 { "lidt [ebx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
274 { "lidt [ss:ebx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
275 { "o16 lidt [ebx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
276 { "o16 lidt [ss:ebx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
277 { "lidt [rbx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
278 { "o64 lidt [rbx]", bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
279 { "o32 lidt [rbx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
280 { "o32 o64 lidt [rbx]", bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
281};
282
283/** LGDT test workers. */
284static BS3CB2SIDTSGDT const g_aLgdtWorkers[] =
285{
286 { "lgdt [bx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
287 { "lgdt [ss:bx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
288 { "o32 lgdt [bx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
289 { "o32 lgdt [ss:bx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
290 { "lgdt [ebx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
291 { "lgdt [ss:ebx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
292 { "o16 lgdt [ebx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
293 { "o16 lgdt [ss:ebx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
294 { "lgdt [rbx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
295 { "o64 lgdt [rbx]", bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
296 { "o32 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
297 { "o32 o64 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
298};
299
300
301
302#if 0
303/** Table containing invalid CS selector types. */
304static const BS3CB2INVLDESCTYPE g_aInvalidCsTypes[] =
305{
306 { X86_SEL_TYPE_RO, 1 },
307 { X86_SEL_TYPE_RO_ACC, 1 },
308 { X86_SEL_TYPE_RW, 1 },
309 { X86_SEL_TYPE_RW_ACC, 1 },
310 { X86_SEL_TYPE_RO_DOWN, 1 },
311 { X86_SEL_TYPE_RO_DOWN_ACC, 1 },
312 { X86_SEL_TYPE_RW_DOWN, 1 },
313 { X86_SEL_TYPE_RW_DOWN_ACC, 1 },
314 { 0, 0 },
315 { 1, 0 },
316 { 2, 0 },
317 { 3, 0 },
318 { 4, 0 },
319 { 5, 0 },
320 { 6, 0 },
321 { 7, 0 },
322 { 8, 0 },
323 { 9, 0 },
324 { 10, 0 },
325 { 11, 0 },
326 { 12, 0 },
327 { 13, 0 },
328 { 14, 0 },
329 { 15, 0 },
330};
331
332/** Table containing invalid SS selector types. */
333static const BS3CB2INVLDESCTYPE g_aInvalidSsTypes[] =
334{
335 { X86_SEL_TYPE_EO, 1 },
336 { X86_SEL_TYPE_EO_ACC, 1 },
337 { X86_SEL_TYPE_ER, 1 },
338 { X86_SEL_TYPE_ER_ACC, 1 },
339 { X86_SEL_TYPE_EO_CONF, 1 },
340 { X86_SEL_TYPE_EO_CONF_ACC, 1 },
341 { X86_SEL_TYPE_ER_CONF, 1 },
342 { X86_SEL_TYPE_ER_CONF_ACC, 1 },
343 { 0, 0 },
344 { 1, 0 },
345 { 2, 0 },
346 { 3, 0 },
347 { 4, 0 },
348 { 5, 0 },
349 { 6, 0 },
350 { 7, 0 },
351 { 8, 0 },
352 { 9, 0 },
353 { 10, 0 },
354 { 11, 0 },
355 { 12, 0 },
356 { 13, 0 },
357 { 14, 0 },
358 { 15, 0 },
359};
360#endif
361
362
363static const FNBS3CPUBASIC2ACTSTCODE g_aCmn16[] =
364{
365 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, MYOP_LD, 2, 2 },
366 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, MYOP_ST, 2, 2 },
367 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, MYOP_LD_ST, 2, 2 },
368 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, MYOP_LD_ST | MYOP_EFL, 2, 2 },
369 { bs3CpuBasic2_div_ds_bx__ud2_c16, MYOP_LD_DIV, 2, 2 },
370 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
371 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
372 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
373 { bs3CpuBasic2_fxsave_ds_bx__ud2_c16, MYOP_ST | MYOP_AC_GP, 512, 16 },
374};
375
376static const FNBS3CPUBASIC2ACTSTCODE g_aCmn32[] =
377{
378 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c32, MYOP_LD, 4, 4 },
379 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c32, MYOP_ST, 4, 4 },
380 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32, MYOP_LD_ST, 4, 4 },
381 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32, MYOP_LD_ST | MYOP_EFL, 4, 4 },
382 { bs3CpuBasic2_div_ds_bx__ud2_c32, MYOP_LD_DIV, 4, 4 },
383 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
384 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
385 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
386 { bs3CpuBasic2_fxsave_ds_bx__ud2_c32, MYOP_ST | MYOP_AC_GP, 512, 16 },
387};
388
389static const FNBS3CPUBASIC2ACTSTCODE g_aCmn64[] =
390{
391 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c64, MYOP_LD, 8, 8 },
392 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c64, MYOP_ST, 8, 8 },
393 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64, MYOP_LD_ST, 8, 8 },
394 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64, MYOP_LD_ST | MYOP_EFL, 8, 8 },
395 { bs3CpuBasic2_div_ds_bx__ud2_c64, MYOP_LD_DIV, 8, 8 },
396 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
397 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
398 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
399 { bs3CpuBasic2_fxsave_ds_bx__ud2_c64, MYOP_ST | MYOP_AC_GP, 512, 16 },
400};
401
402static const BS3CPUBASIC2PFTTSTCMNMODE g_aCmnModes[] =
403{
404 { BS3_MODE_CODE_16, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
405 { BS3_MODE_CODE_V86, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
406 { BS3_MODE_CODE_32, RT_ELEMENTS(g_aCmn32), g_aCmn32 },
407 { BS3_MODE_CODE_64, RT_ELEMENTS(g_aCmn64), g_aCmn64 },
408};
409
410
411/**
412 * Sets globals according to the mode.
413 *
414 * @param bTestMode The test mode.
415 */
416static void bs3CpuBasic2_SetGlobals(uint8_t bTestMode)
417{
418 g_bTestMode = bTestMode;
419 g_pszTestMode = Bs3GetModeName(bTestMode);
420 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(bTestMode);
421 g_usBs3TestStep = 0;
422}
423
424
425uint32_t ASMGetESP(void);
426#pragma aux ASMGetESP = \
427 ".386" \
428 "mov ax, sp" \
429 "mov edx, esp" \
430 "shr edx, 16" \
431 value [ax dx] \
432 modify exact [ax dx];
433
434
435/**
436 * Wrapper around Bs3TestFailedF that prefixes the error with g_usBs3TestStep
437 * and g_pszTestMode.
438 */
439static void bs3CpuBasic2_FailedF(const char *pszFormat, ...)
440{
441 va_list va;
442
443 char szTmp[168];
444 va_start(va, pszFormat);
445 Bs3StrPrintfV(szTmp, sizeof(szTmp), pszFormat, va);
446 va_end(va);
447
448 Bs3TestFailedF("%u - %s: %s", g_usBs3TestStep, g_pszTestMode, szTmp);
449}
450
451
452#if 0
453/**
454 * Compares trap stuff.
455 */
456static void bs3CpuBasic2_CompareIntCtx1(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t bXcpt)
457{
458 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
459 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
460 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
461 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, 2 /*int xx*/, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
462 if (Bs3TestSubErrorCount() != cErrorsBefore)
463 {
464 Bs3TrapPrintFrame(pTrapCtx);
465#if 1
466 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
467 Bs3TestPrintf("Halting in CompareTrapCtx1: bXcpt=%#x\n", bXcpt);
468 ASMHalt();
469#endif
470 }
471}
472#endif
473
474
475#if 0
476/**
477 * Compares trap stuff.
478 */
479static void bs3CpuBasic2_CompareTrapCtx2(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t cbIpAdjust,
480 uint8_t bXcpt, uint16_t uHandlerCs)
481{
482 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
483 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
484 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
485 CHECK_MEMBER("uHandlerCs", "%#06x", pTrapCtx->uHandlerCs, uHandlerCs);
486 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
487 if (Bs3TestSubErrorCount() != cErrorsBefore)
488 {
489 Bs3TrapPrintFrame(pTrapCtx);
490#if 1
491 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
492 Bs3TestPrintf("Halting in CompareTrapCtx2: bXcpt=%#x\n", bXcpt);
493 ASMHalt();
494#endif
495 }
496}
497#endif
498
499/**
500 * Compares a CPU trap.
501 */
502static void bs3CpuBasic2_CompareCpuTrapCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd,
503 uint8_t bXcpt, bool f486ResumeFlagHint, uint8_t cbIpAdjust)
504{
505 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
506 uint32_t fExtraEfl;
507
508 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
509 CHECK_MEMBER("bErrCd", "%#06RX16", (uint16_t)pTrapCtx->uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
510
511 if ( g_f16BitSys
512 || bXcpt == X86_XCPT_DB /* hack (10980xe)... */
513 || ( !f486ResumeFlagHint
514 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) <= BS3CPU_80486 ) )
515 fExtraEfl = 0;
516 else
517 fExtraEfl = X86_EFL_RF;
518#if 0 /** @todo Running on an AMD Phenom II X6 1100T under AMD-V I'm not getting good X86_EFL_RF results. Enable this to get on with other work. */
519 fExtraEfl = pTrapCtx->Ctx.rflags.u32 & X86_EFL_RF;
520#endif
521 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, fExtraEfl, g_pszTestMode, g_usBs3TestStep);
522 if (Bs3TestSubErrorCount() != cErrorsBefore)
523 {
524 Bs3TrapPrintFrame(pTrapCtx);
525#if 1
526 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
527 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
528 ASMHalt();
529#endif
530 }
531}
532
533
534/**
535 * Compares \#GP trap.
536 */
537static void bs3CpuBasic2_CompareGpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
538{
539 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_GP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
540}
541
542#if 0
543/**
544 * Compares \#NP trap.
545 */
546static void bs3CpuBasic2_CompareNpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
547{
548 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_NP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
549}
550#endif
551
552/**
553 * Compares \#SS trap.
554 */
555static void bs3CpuBasic2_CompareSsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd, bool f486ResumeFlagHint)
556{
557 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_SS, f486ResumeFlagHint, 0 /*cbIpAdjust*/);
558}
559
560#if 0
561/**
562 * Compares \#TS trap.
563 */
564static void bs3CpuBasic2_CompareTsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
565{
566 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_TS, false /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
567}
568#endif
569
570/**
571 * Compares \#PF trap.
572 */
573static void bs3CpuBasic2_ComparePfCtx(PCBS3TRAPFRAME pTrapCtx, PBS3REGCTX pStartCtx, uint16_t uErrCd,
574 uint64_t uCr2Expected, uint8_t cbIpAdjust)
575{
576 uint64_t const uCr2Saved = pStartCtx->cr2.u;
577 pStartCtx->cr2.u = uCr2Expected;
578 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_PF, true /*f486ResumeFlagHint*/, cbIpAdjust);
579 pStartCtx->cr2.u = uCr2Saved;
580}
581
582/**
583 * Compares \#UD trap.
584 */
585static void bs3CpuBasic2_CompareUdCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx)
586{
587 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*no error code*/, X86_XCPT_UD,
588 true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
589}
590
591/**
592 * Compares \#AC trap.
593 */
594static void bs3CpuBasic2_CompareAcCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t cbIpAdjust)
595{
596 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_AC, true /*f486ResumeFlagHint*/, cbIpAdjust);
597}
598
599/**
600 * Compares \#DB trap.
601 */
602static void bs3CpuBasic2_CompareDbCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint32_t fDr6Expect)
603{
604 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
605 uint32_t const fDr6 = Bs3RegGetDr6();
606 fDr6Expect |= X86_DR6_RA1_MASK;
607 CHECK_MEMBER("dr6", "%#08RX32", fDr6, fDr6Expect);
608
609 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_DB, false /*f486ResumeFlagHint?*/, 0 /*cbIpAdjust*/);
610
611 if (Bs3TestSubErrorCount() > cErrorsBefore)
612 {
613#if 0
614 Bs3TestPrintf("Halting\n");
615 ASMHalt();
616#endif
617 }
618}
619
620
621/**
622 * Checks that DR6 has the initial value, i.e. is unchanged when other exception
623 * was raised before a \#DB could occur.
624 */
625static void bs3CpuBasic2_CheckDr6InitVal(void)
626{
627 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
628 uint32_t const fDr6 = Bs3RegGetDr6();
629 uint32_t const fDr6Expect = X86_DR6_INIT_VAL;
630 CHECK_MEMBER("dr6", "%#08RX32", fDr6, fDr6Expect);
631 if (Bs3TestSubErrorCount() > cErrorsBefore)
632 {
633 Bs3TestPrintf("Halting\n");
634 ASMHalt();
635 }
636}
637
638#if 0 /* convert me */
639static void bs3CpuBasic2_RaiseXcpt1Common(uint16_t const uSysR0Cs, uint16_t const uSysR0CsConf, uint16_t const uSysR0Ss,
640 PX86DESC const paIdt, unsigned const cIdteShift)
641{
642 BS3TRAPFRAME TrapCtx;
643 BS3REGCTX Ctx80;
644 BS3REGCTX Ctx81;
645 BS3REGCTX Ctx82;
646 BS3REGCTX Ctx83;
647 BS3REGCTX CtxTmp;
648 BS3REGCTX CtxTmp2;
649 PBS3REGCTX apCtx8x[4];
650 unsigned iCtx;
651 unsigned iRing;
652 unsigned iDpl;
653 unsigned iRpl;
654 unsigned i, j, k;
655 uint32_t uExpected;
656 bool const f486Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486;
657# if TMPL_BITS == 16
658 bool const f386Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386;
659 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
660# else
661 bool const f286 = false;
662 bool const f386Plus = true;
663 int rc;
664 uint8_t *pbIdtCopyAlloc;
665 PX86DESC pIdtCopy;
666 const unsigned cbIdte = 1 << (3 + cIdteShift);
667 RTCCUINTXREG uCr0Saved = ASMGetCR0();
668 RTGDTR GdtrSaved;
669# endif
670 RTIDTR IdtrSaved;
671 RTIDTR Idtr;
672
673 ASMGetIDTR(&IdtrSaved);
674# if TMPL_BITS != 16
675 ASMGetGDTR(&GdtrSaved);
676# endif
677
678 /* make sure they're allocated */
679 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
680 Bs3MemZero(&Ctx80, sizeof(Ctx80));
681 Bs3MemZero(&Ctx81, sizeof(Ctx81));
682 Bs3MemZero(&Ctx82, sizeof(Ctx82));
683 Bs3MemZero(&Ctx83, sizeof(Ctx83));
684 Bs3MemZero(&CtxTmp, sizeof(CtxTmp));
685 Bs3MemZero(&CtxTmp2, sizeof(CtxTmp2));
686
687 /* Context array. */
688 apCtx8x[0] = &Ctx80;
689 apCtx8x[1] = &Ctx81;
690 apCtx8x[2] = &Ctx82;
691 apCtx8x[3] = &Ctx83;
692
693# if TMPL_BITS != 16
694 /* Allocate memory for playing around with the IDT. */
695 pbIdtCopyAlloc = NULL;
696 if (BS3_MODE_IS_PAGED(g_bTestMode))
697 pbIdtCopyAlloc = Bs3MemAlloc(BS3MEMKIND_FLAT32, 12*_1K);
698# endif
699
700 /*
701 * IDT entry 80 thru 83 are assigned DPLs according to the number.
702 * (We'll be useing more, but this'll do for now.)
703 */
704 paIdt[0x80 << cIdteShift].Gate.u2Dpl = 0;
705 paIdt[0x81 << cIdteShift].Gate.u2Dpl = 1;
706 paIdt[0x82 << cIdteShift].Gate.u2Dpl = 2;
707 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
708
709 Bs3RegCtxSave(&Ctx80);
710 Ctx80.rsp.u -= 0x300;
711 Ctx80.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int80);
712# if TMPL_BITS == 16
713 Ctx80.cs = BS3_MODE_IS_RM_OR_V86(g_bTestMode) ? BS3_SEL_TEXT16 : BS3_SEL_R0_CS16;
714# elif TMPL_BITS == 32
715 g_uBs3TrapEipHint = Ctx80.rip.u32;
716# endif
717 Bs3MemCpy(&Ctx81, &Ctx80, sizeof(Ctx80));
718 Ctx81.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int81);
719 Bs3MemCpy(&Ctx82, &Ctx80, sizeof(Ctx80));
720 Ctx82.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int82);
721 Bs3MemCpy(&Ctx83, &Ctx80, sizeof(Ctx80));
722 Ctx83.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int83);
723
724 /*
725 * Check that all the above gates work from ring-0.
726 */
727 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
728 {
729 g_usBs3TestStep = iCtx;
730# if TMPL_BITS == 32
731 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
732# endif
733 Bs3TrapSetJmpAndRestore(apCtx8x[iCtx], &TrapCtx);
734 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, apCtx8x[iCtx], 0x80+iCtx /*bXcpt*/);
735 }
736
737 /*
738 * Check that the gate DPL checks works.
739 */
740 g_usBs3TestStep = 100;
741 for (iRing = 0; iRing <= 3; iRing++)
742 {
743 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
744 {
745 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
746 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
747# if TMPL_BITS == 32
748 g_uBs3TrapEipHint = CtxTmp.rip.u32;
749# endif
750 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
751 if (iCtx < iRing)
752 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
753 else
754 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
755 g_usBs3TestStep++;
756 }
757 }
758
759 /*
760 * Modify the gate CS value and run the handler at a different CPL.
761 * Throw RPL variations into the mix (completely ignored) together
762 * with gate presence.
763 * 1. CPL <= GATE.DPL
764 * 2. GATE.P
765 * 3. GATE.CS.DPL <= CPL (non-conforming segments)
766 */
767 g_usBs3TestStep = 1000;
768 for (i = 0; i <= 3; i++)
769 {
770 for (iRing = 0; iRing <= 3; iRing++)
771 {
772 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
773 {
774# if TMPL_BITS == 32
775 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
776# endif
777 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
778 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
779
780 for (j = 0; j <= 3; j++)
781 {
782 uint16_t const uCs = (uSysR0Cs | j) + (i << BS3_SEL_RING_SHIFT);
783 for (k = 0; k < 2; k++)
784 {
785 g_usBs3TestStep++;
786 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
787 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
788 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = k;
789 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
790 /*Bs3TrapPrintFrame(&TrapCtx);*/
791 if (iCtx < iRing)
792 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
793 else if (k == 0)
794 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
795 else if (i > iRing)
796 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
797 else
798 {
799 uint16_t uExpectedCs = uCs & X86_SEL_MASK_OFF_RPL;
800 if (i <= iCtx && i <= iRing)
801 uExpectedCs |= i;
802 bs3CpuBasic2_CompareTrapCtx2(&TrapCtx, &CtxTmp, 2 /*int 8xh*/, 0x80 + iCtx /*bXcpt*/, uExpectedCs);
803 }
804 }
805 }
806
807 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
808 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
809 }
810 }
811 }
812 BS3_ASSERT(g_usBs3TestStep < 1600);
813
814 /*
815 * Various CS and SS related faults
816 *
817 * We temporarily reconfigure gate 80 and 83 with new CS selectors, the
818 * latter have a CS.DPL of 2 for testing ring transisions and SS loading
819 * without making it impossible to handle faults.
820 */
821 g_usBs3TestStep = 1600;
822 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
823 Bs3GdteTestPage00.Gen.u1Present = 0;
824 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
825 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
826
827 /* CS.PRESENT = 0 */
828 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
829 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
830 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
831 bs3CpuBasic2_FailedF("selector was accessed");
832 g_usBs3TestStep++;
833
834 /* Check that GATE.DPL is checked before CS.PRESENT. */
835 for (iRing = 1; iRing < 4; iRing++)
836 {
837 Bs3MemCpy(&CtxTmp, &Ctx80, sizeof(CtxTmp));
838 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
839 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
840 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x80 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
841 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
842 bs3CpuBasic2_FailedF("selector was accessed");
843 g_usBs3TestStep++;
844 }
845
846 /* CS.DPL mismatch takes precedence over CS.PRESENT = 0. */
847 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
848 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
849 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
850 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
851 bs3CpuBasic2_FailedF("CS selector was accessed");
852 g_usBs3TestStep++;
853 for (iDpl = 1; iDpl < 4; iDpl++)
854 {
855 Bs3GdteTestPage00.Gen.u2Dpl = iDpl;
856 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
857 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
858 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
859 bs3CpuBasic2_FailedF("CS selector was accessed");
860 g_usBs3TestStep++;
861 }
862
863 /* 1608: Check all the invalid CS selector types alone. */
864 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
865 for (i = 0; i < RT_ELEMENTS(g_aInvalidCsTypes); i++)
866 {
867 Bs3GdteTestPage00.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
868 Bs3GdteTestPage00.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
869 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
870 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
871 if (Bs3GdteTestPage00.Gen.u4Type != g_aInvalidCsTypes[i].u4Type)
872 bs3CpuBasic2_FailedF("Invalid CS type %#x/%u -> %#x/%u\n",
873 g_aInvalidCsTypes[i].u4Type, g_aInvalidCsTypes[i].u1DescType,
874 Bs3GdteTestPage00.Gen.u4Type, Bs3GdteTestPage00.Gen.u1DescType);
875 g_usBs3TestStep++;
876
877 /* Incorrect CS.TYPE takes precedence over CS.PRESENT = 0. */
878 Bs3GdteTestPage00.Gen.u1Present = 0;
879 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
880 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
881 Bs3GdteTestPage00.Gen.u1Present = 1;
882 g_usBs3TestStep++;
883 }
884
885 /* Fix CS again. */
886 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
887
888 /* 1632: Test SS. */
889 if (!BS3_MODE_IS_64BIT_SYS(g_bTestMode))
890 {
891 uint16_t BS3_FAR *puTssSs2 = BS3_MODE_IS_16BIT_SYS(g_bTestMode) ? &Bs3Tss16.ss2 : &Bs3Tss32.ss2;
892 uint16_t const uSavedSs2 = *puTssSs2;
893 X86DESC const SavedGate83 = paIdt[0x83 << cIdteShift];
894
895 /* Make the handler execute in ring-2. */
896 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
897 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
898 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_02 | 2;
899
900 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
901 Bs3RegCtxConvertToRingX(&CtxTmp, 3); /* yeah, from 3 so SS:xSP is reloaded. */
902 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
903 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
904 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
905 bs3CpuBasic2_FailedF("CS selector was not access");
906 g_usBs3TestStep++;
907
908 /* Create a SS.DPL=2 stack segment and check that SS2.RPL matters and
909 that we get #SS if the selector isn't present. */
910 i = 0; /* used for cycling thru invalid CS types */
911 for (k = 0; k < 10; k++)
912 {
913 /* k=0: present,
914 k=1: not-present,
915 k=2: present but very low limit,
916 k=3: not-present, low limit.
917 k=4: present, read-only.
918 k=5: not-present, read-only.
919 k=6: present, code-selector.
920 k=7: not-present, code-selector.
921 k=8: present, read-write / no access + system (=LDT).
922 k=9: not-present, read-write / no access + system (=LDT).
923 */
924 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
925 Bs3GdteTestPage03.Gen.u1Present = !(k & 1);
926 if (k >= 8)
927 {
928 Bs3GdteTestPage03.Gen.u1DescType = 0; /* system */
929 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW; /* = LDT */
930 }
931 else if (k >= 6)
932 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_ER;
933 else if (k >= 4)
934 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RO;
935 else if (k >= 2)
936 {
937 Bs3GdteTestPage03.Gen.u16LimitLow = 0x400;
938 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
939 Bs3GdteTestPage03.Gen.u1Granularity = 0;
940 }
941
942 for (iDpl = 0; iDpl < 4; iDpl++)
943 {
944 Bs3GdteTestPage03.Gen.u2Dpl = iDpl;
945
946 for (iRpl = 0; iRpl < 4; iRpl++)
947 {
948 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | iRpl;
949 //Bs3TestPrintf("k=%u iDpl=%u iRpl=%u step=%u\n", k, iDpl, iRpl, g_usBs3TestStep);
950 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
951 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
952 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
953 if (iRpl != 2 || iRpl != iDpl || k >= 4)
954 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
955 else if (k != 0)
956 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03,
957 k == 2 /*f486ResumeFlagHint*/);
958 else
959 {
960 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
961 if (TrapCtx.uHandlerSs != (BS3_SEL_TEST_PAGE_03 | 2))
962 bs3CpuBasic2_FailedF("uHandlerSs=%#x expected %#x\n", TrapCtx.uHandlerSs, BS3_SEL_TEST_PAGE_03 | 2);
963 }
964 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
965 bs3CpuBasic2_FailedF("CS selector was not access");
966 if ( TrapCtx.bXcpt == 0x83
967 || (TrapCtx.bXcpt == X86_XCPT_SS && k == 2) )
968 {
969 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
970 bs3CpuBasic2_FailedF("SS selector was not accessed");
971 }
972 else if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
973 bs3CpuBasic2_FailedF("SS selector was accessed");
974 g_usBs3TestStep++;
975
976 /* +1: Modify the gate DPL to check that this is checked before SS.DPL and SS.PRESENT. */
977 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 2;
978 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
979 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x83 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
980 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
981 g_usBs3TestStep++;
982
983 /* +2: Check the CS.DPL check is done before the SS ones. Restoring the
984 ring-0 INT 83 context triggers the CS.DPL < CPL check. */
985 Bs3TrapSetJmpAndRestore(&Ctx83, &TrapCtx);
986 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx83, BS3_SEL_TEST_PAGE_02);
987 g_usBs3TestStep++;
988
989 /* +3: Now mark the CS selector not present and check that that also triggers before SS stuff. */
990 Bs3GdteTestPage02.Gen.u1Present = 0;
991 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
992 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
993 Bs3GdteTestPage02.Gen.u1Present = 1;
994 g_usBs3TestStep++;
995
996 /* +4: Make the CS selector some invalid type and check it triggers before SS stuff. */
997 Bs3GdteTestPage02.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
998 Bs3GdteTestPage02.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
999 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1000 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
1001 Bs3GdteTestPage02.Gen.u4Type = X86_SEL_TYPE_ER_ACC;
1002 Bs3GdteTestPage02.Gen.u1DescType = 1;
1003 g_usBs3TestStep++;
1004
1005 /* +5: Now, make the CS selector limit too small and that it triggers after SS trouble.
1006 The 286 had a simpler approach to these GP(0). */
1007 Bs3GdteTestPage02.Gen.u16LimitLow = 0;
1008 Bs3GdteTestPage02.Gen.u4LimitHigh = 0;
1009 Bs3GdteTestPage02.Gen.u1Granularity = 0;
1010 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1011 if (f286)
1012 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
1013 else if (iRpl != 2 || iRpl != iDpl || k >= 4)
1014 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
1015 else if (k != 0)
1016 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, k == 2 /*f486ResumeFlagHint*/);
1017 else
1018 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
1019 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1020 g_usBs3TestStep++;
1021 }
1022 }
1023 }
1024
1025 /* Check all the invalid SS selector types alone. */
1026 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1027 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1028 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1029 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1030 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1031 g_usBs3TestStep++;
1032 for (i = 0; i < RT_ELEMENTS(g_aInvalidSsTypes); i++)
1033 {
1034 Bs3GdteTestPage03.Gen.u4Type = g_aInvalidSsTypes[i].u4Type;
1035 Bs3GdteTestPage03.Gen.u1DescType = g_aInvalidSsTypes[i].u1DescType;
1036 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1037 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
1038 if (Bs3GdteTestPage03.Gen.u4Type != g_aInvalidSsTypes[i].u4Type)
1039 bs3CpuBasic2_FailedF("Invalid SS type %#x/%u -> %#x/%u\n",
1040 g_aInvalidSsTypes[i].u4Type, g_aInvalidSsTypes[i].u1DescType,
1041 Bs3GdteTestPage03.Gen.u4Type, Bs3GdteTestPage03.Gen.u1DescType);
1042 g_usBs3TestStep++;
1043 }
1044
1045 /*
1046 * Continue the SS experiments with a expand down segment. We'll use
1047 * the same setup as we already have with gate 83h being DPL and
1048 * having CS.DPL=2.
1049 *
1050 * Expand down segments are weird. The valid area is practically speaking
1051 * reversed. So, a 16-bit segment with a limit of 0x6000 will have valid
1052 * addresses from 0xffff thru 0x6001.
1053 *
1054 * So, with expand down segments we can more easily cut partially into the
1055 * pushing of the iret frame and trigger more interesting behavior than
1056 * with regular "expand up" segments where the whole pushing area is either
1057 * all fine or not not fine.
1058 */
1059 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1060 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1061 Bs3GdteTestPage03.Gen.u2Dpl = 2;
1062 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW_DOWN;
1063 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1064
1065 /* First test, limit = max --> no bytes accessible --> #GP */
1066 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1067 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1068
1069 /* Second test, limit = 0 --> all by zero byte accessible --> works */
1070 Bs3GdteTestPage03.Gen.u16LimitLow = 0;
1071 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
1072 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1073 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1074
1075 /* Modify the gate handler to be a dummy that immediately does UD2
1076 and triggers #UD, then advance the limit down till we get the #UD. */
1077 Bs3GdteTestPage03.Gen.u1Granularity = 0;
1078
1079 Bs3MemCpy(&CtxTmp2, &CtxTmp, sizeof(CtxTmp2)); /* #UD result context */
1080 if (g_f16BitSys)
1081 {
1082 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr - BS3_ADDR_BS3TEXT16;
1083 Bs3Trap16SetGate(0x83, X86_SEL_TYPE_SYS_286_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u16, 0 /*cParams*/);
1084 CtxTmp2.rsp.u = Bs3Tss16.sp2 - 2*5;
1085 }
1086 else
1087 {
1088 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr;
1089 Bs3Trap32SetGate(0x83, X86_SEL_TYPE_SYS_386_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u32, 0 /*cParams*/);
1090 CtxTmp2.rsp.u = Bs3Tss32.esp2 - 4*5;
1091 }
1092 CtxTmp2.bMode = g_bTestMode; /* g_bBs3CurrentMode not changed by the UD2 handler. */
1093 CtxTmp2.cs = BS3_SEL_TEST_PAGE_02 | 2;
1094 CtxTmp2.ss = BS3_SEL_TEST_PAGE_03 | 2;
1095 CtxTmp2.bCpl = 2;
1096
1097 /* test run. */
1098 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1099 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1100 g_usBs3TestStep++;
1101
1102 /* Real run. */
1103 i = (g_f16BitSys ? 2 : 4) * 6 + 1;
1104 while (i-- > 0)
1105 {
1106 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1107 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1108 if (i > 0)
1109 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1110 else
1111 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1112 g_usBs3TestStep++;
1113 }
1114
1115 /* Do a run where we do the same-ring kind of access. */
1116 Bs3RegCtxConvertToRingX(&CtxTmp, 2);
1117 if (g_f16BitSys)
1118 {
1119 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 2*3;
1120 i = 2*3 - 1;
1121 }
1122 else
1123 {
1124 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 4*3;
1125 i = 4*3 - 1;
1126 }
1127 CtxTmp.ss = BS3_SEL_TEST_PAGE_03 | 2;
1128 CtxTmp2.ds = CtxTmp.ds;
1129 CtxTmp2.es = CtxTmp.es;
1130 CtxTmp2.fs = CtxTmp.fs;
1131 CtxTmp2.gs = CtxTmp.gs;
1132 while (i-- > 0)
1133 {
1134 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1135 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1136 if (i > 0)
1137 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, 0 /*BS3_SEL_TEST_PAGE_03*/, true /*f486ResumeFlagHint*/);
1138 else
1139 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1140 g_usBs3TestStep++;
1141 }
1142
1143 *puTssSs2 = uSavedSs2;
1144 paIdt[0x83 << cIdteShift] = SavedGate83;
1145 }
1146 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1147 BS3_ASSERT(g_usBs3TestStep < 3000);
1148
1149 /*
1150 * Modify the gate CS value with a conforming segment.
1151 */
1152 g_usBs3TestStep = 3000;
1153 for (i = 0; i <= 3; i++) /* cs.dpl */
1154 {
1155 for (iRing = 0; iRing <= 3; iRing++)
1156 {
1157 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1158 {
1159 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1160 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1161# if TMPL_BITS == 32
1162 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1163# endif
1164
1165 for (j = 0; j <= 3; j++) /* rpl */
1166 {
1167 uint16_t const uCs = (uSysR0CsConf | j) + (i << BS3_SEL_RING_SHIFT);
1168 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1169 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1170 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1171 //Bs3TestPrintf("%u/%u/%u/%u: cs=%04x hcs=%04x xcpt=%02x\n", i, iRing, iCtx, j, uCs, TrapCtx.uHandlerCs, TrapCtx.bXcpt);
1172 /*Bs3TrapPrintFrame(&TrapCtx);*/
1173 g_usBs3TestStep++;
1174 if (iCtx < iRing)
1175 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1176 else if (i > iRing)
1177 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1178 else
1179 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1180 }
1181 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1182 }
1183 }
1184 }
1185 BS3_ASSERT(g_usBs3TestStep < 3500);
1186
1187 /*
1188 * The gates must be 64-bit in long mode.
1189 */
1190 if (cIdteShift != 0)
1191 {
1192 g_usBs3TestStep = 3500;
1193 for (i = 0; i <= 3; i++)
1194 {
1195 for (iRing = 0; iRing <= 3; iRing++)
1196 {
1197 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1198 {
1199 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1200 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1201
1202 for (j = 0; j < 2; j++)
1203 {
1204 static const uint16_t s_auCSes[2] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32 };
1205 uint16_t uCs = (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT);
1206 g_usBs3TestStep++;
1207 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1208 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1209 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1210 /*Bs3TrapPrintFrame(&TrapCtx);*/
1211 if (iCtx < iRing)
1212 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1213 else
1214 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1215 }
1216 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1217 }
1218 }
1219 }
1220 BS3_ASSERT(g_usBs3TestStep < 4000);
1221 }
1222
1223 /*
1224 * IDT limit check. The 286 does not access X86DESCGATE::u16OffsetHigh.
1225 */
1226 g_usBs3TestStep = 5000;
1227 i = (0x80 << (cIdteShift + 3)) - 1;
1228 j = (0x82 << (cIdteShift + 3)) - (!f286 ? 1 : 3);
1229 k = (0x83 << (cIdteShift + 3)) - 1;
1230 for (; i <= k; i++, g_usBs3TestStep++)
1231 {
1232 Idtr = IdtrSaved;
1233 Idtr.cbIdt = i;
1234 ASMSetIDTR(&Idtr);
1235 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1236 if (i < j)
1237 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx81, (0x81 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1238 else
1239 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1240 }
1241 ASMSetIDTR(&IdtrSaved);
1242 BS3_ASSERT(g_usBs3TestStep < 5100);
1243
1244# if TMPL_BITS != 16 /* Only do the paging related stuff in 32-bit and 64-bit modes. */
1245
1246 /*
1247 * IDT page not present. Placing the IDT copy such that 0x80 is on the
1248 * first page and 0x81 is on the second page. We need proceed to move
1249 * it down byte by byte to check that any inaccessible byte means #PF.
1250 *
1251 * Note! We must reload the alternative IDTR for each run as any kind of
1252 * printing to the string (like error reporting) will cause a switch
1253 * to real mode and back, reloading the default IDTR.
1254 */
1255 g_usBs3TestStep = 5200;
1256 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1257 {
1258 uint32_t const uCr2Expected = Bs3SelPtrToFlat(pbIdtCopyAlloc) + _4K;
1259 for (j = 0; j < cbIdte; j++)
1260 {
1261 pIdtCopy = (PX86DESC)&pbIdtCopyAlloc[_4K - cbIdte * 0x81 - j];
1262 Bs3MemCpy(pIdtCopy, paIdt, cbIdte * 256);
1263
1264 Idtr.cbIdt = IdtrSaved.cbIdt;
1265 Idtr.pIdt = Bs3SelPtrToFlat(pIdtCopy);
1266
1267 ASMSetIDTR(&Idtr);
1268 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1269 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1270 g_usBs3TestStep++;
1271
1272 ASMSetIDTR(&Idtr);
1273 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1274 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1275 g_usBs3TestStep++;
1276
1277 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1278 if (RT_SUCCESS(rc))
1279 {
1280 ASMSetIDTR(&Idtr);
1281 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1282 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1283 g_usBs3TestStep++;
1284
1285 ASMSetIDTR(&Idtr);
1286 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1287 if (f486Plus)
1288 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1289 else
1290 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1291 g_usBs3TestStep++;
1292
1293 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1294
1295 /* Check if that the entry type is checked after the whole IDTE has been cleared for #PF. */
1296 pIdtCopy[0x80 << cIdteShift].Gate.u4Type = 0;
1297 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1298 if (RT_SUCCESS(rc))
1299 {
1300 ASMSetIDTR(&Idtr);
1301 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1302 if (f486Plus)
1303 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1304 else
1305 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1306 g_usBs3TestStep++;
1307
1308 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1309 }
1310 }
1311 else
1312 Bs3TestPrintf("Bs3PagingProtectPtr: %d\n", i);
1313
1314 ASMSetIDTR(&IdtrSaved);
1315 }
1316 }
1317
1318 /*
1319 * The read/write and user/supervisor bits the IDT PTEs are irrelevant.
1320 */
1321 g_usBs3TestStep = 5300;
1322 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1323 {
1324 Bs3MemCpy(pbIdtCopyAlloc, paIdt, cbIdte * 256);
1325 Idtr.cbIdt = IdtrSaved.cbIdt;
1326 Idtr.pIdt = Bs3SelPtrToFlat(pbIdtCopyAlloc);
1327
1328 ASMSetIDTR(&Idtr);
1329 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1330 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1331 g_usBs3TestStep++;
1332
1333 rc = Bs3PagingProtect(Idtr.pIdt, _4K, 0 /*fSet*/, X86_PTE_RW | X86_PTE_US /*fClear*/);
1334 if (RT_SUCCESS(rc))
1335 {
1336 ASMSetIDTR(&Idtr);
1337 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1338 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1339 g_usBs3TestStep++;
1340
1341 Bs3PagingProtect(Idtr.pIdt, _4K, X86_PTE_RW | X86_PTE_US /*fSet*/, 0 /*fClear*/);
1342 }
1343 ASMSetIDTR(&IdtrSaved);
1344 }
1345
1346 /*
1347 * Check that CS.u1Accessed is set to 1. Use the test page selector #0 and #3 together
1348 * with interrupt gates 80h and 83h, respectively.
1349 */
1350/** @todo Throw in SS.u1Accessed too. */
1351 g_usBs3TestStep = 5400;
1352 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1353 {
1354 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
1355 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1356 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
1357
1358 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Cs + (3 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1359 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1360 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_03; /* rpl is ignored, so leave it as zero. */
1361
1362 /* Check that the CS.A bit is being set on a general basis and that
1363 the special CS values work with out generic handler code. */
1364 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1365 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1366 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1367 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed", Bs3GdteTestPage00.Gen.u4Type);
1368 g_usBs3TestStep++;
1369
1370 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1371 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1372 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1373 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1374 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1375 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1376 if (TrapCtx.uHandlerCs != (BS3_SEL_TEST_PAGE_03 | 3))
1377 bs3CpuBasic2_FailedF("uHandlerCs=%#x, expected %#x", TrapCtx.uHandlerCs, (BS3_SEL_TEST_PAGE_03 | 3));
1378 g_usBs3TestStep++;
1379
1380 /*
1381 * Now check that setting CS.u1Access to 1 does __NOT__ trigger a page
1382 * fault due to the RW bit being zero.
1383 * (We check both with with and without the WP bit if 80486.)
1384 */
1385 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1386 ASMSetCR0(uCr0Saved | X86_CR0_WP);
1387
1388 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1389 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1390 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_RW /*fClear*/);
1391 if (RT_SUCCESS(rc))
1392 {
1393 /* ring-0 handler */
1394 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1395 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1396 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1397 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1398 g_usBs3TestStep++;
1399
1400 /* ring-3 handler */
1401 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1402 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1403 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1404 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1405 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1406 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1407 g_usBs3TestStep++;
1408
1409 /* clear WP and repeat the above. */
1410 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1411 ASMSetCR0(uCr0Saved & ~X86_CR0_WP);
1412 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1413 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1414
1415 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1416 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1417 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1418 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1419 g_usBs3TestStep++;
1420
1421 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1422 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1423 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1424 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!n", Bs3GdteTestPage03.Gen.u4Type);
1425 g_usBs3TestStep++;
1426
1427 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_RW /*fSet*/, 0 /*fClear*/);
1428 }
1429
1430 ASMSetCR0(uCr0Saved);
1431
1432 /*
1433 * While we're here, check that if the CS GDT entry is a non-present
1434 * page we do get a #PF with the rigth error code and CR2.
1435 */
1436 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* Just for fun, really a pointless gesture. */
1437 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1438 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1439 if (RT_SUCCESS(rc))
1440 {
1441 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1442 if (f486Plus)
1443 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00);
1444 else
1445 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00 + 4);
1446 g_usBs3TestStep++;
1447
1448 /* Do it from ring-3 to check ErrCd, which doesn't set X86_TRAP_PF_US it turns out. */
1449 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1450 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1451 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1452
1453 if (f486Plus)
1454 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03);
1455 else
1456 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03 + 4);
1457 g_usBs3TestStep++;
1458
1459 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1460 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1461 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #1", Bs3GdteTestPage00.Gen.u4Type);
1462 if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1463 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #2", Bs3GdteTestPage03.Gen.u4Type);
1464 }
1465
1466 /* restore */
1467 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1468 paIdt[0x83 << cIdteShift].Gate.u16Sel = uSysR0Cs;// + (3 << BS3_SEL_RING_SHIFT) + 3;
1469 }
1470
1471# endif /* 32 || 64*/
1472
1473 /*
1474 * Check broad EFLAGS effects.
1475 */
1476 g_usBs3TestStep = 5600;
1477 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1478 {
1479 for (iRing = 0; iRing < 4; iRing++)
1480 {
1481 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1482 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1483
1484 /* all set */
1485 CtxTmp.rflags.u32 &= X86_EFL_VM | X86_EFL_1;
1486 CtxTmp.rflags.u32 |= X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF /* | X86_EFL_TF */ /*| X86_EFL_IF*/
1487 | X86_EFL_DF | X86_EFL_OF | X86_EFL_IOPL /* | X86_EFL_NT*/;
1488 if (f486Plus)
1489 CtxTmp.rflags.u32 |= X86_EFL_AC;
1490 if (f486Plus && !g_f16BitSys)
1491 CtxTmp.rflags.u32 |= X86_EFL_RF;
1492 if (g_uBs3CpuDetected & BS3CPU_F_CPUID)
1493 CtxTmp.rflags.u32 |= X86_EFL_VIF | X86_EFL_VIP;
1494 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1495 CtxTmp.rflags.u32 &= ~X86_EFL_RF;
1496
1497 if (iCtx >= iRing)
1498 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1499 else
1500 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1501 uExpected = CtxTmp.rflags.u32
1502 & ( X86_EFL_1 | X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF | X86_EFL_DF
1503 | X86_EFL_OF | X86_EFL_IOPL | X86_EFL_NT | X86_EFL_VM | X86_EFL_AC | X86_EFL_VIF | X86_EFL_VIP
1504 | X86_EFL_ID /*| X86_EFL_TF*/ /*| X86_EFL_IF*/ /*| X86_EFL_RF*/ );
1505 if (TrapCtx.fHandlerRfl != uExpected)
1506 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1507 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1508 g_usBs3TestStep++;
1509
1510 /* all cleared */
1511 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80286)
1512 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_RA1_MASK | UINT16_C(0xf000));
1513 else
1514 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_VM | X86_EFL_RA1_MASK);
1515 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1516 if (iCtx >= iRing)
1517 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1518 else
1519 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1520 uExpected = CtxTmp.rflags.u32;
1521 if (TrapCtx.fHandlerRfl != uExpected)
1522 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1523 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1524 g_usBs3TestStep++;
1525 }
1526 }
1527
1528/** @todo CS.LIMIT / canonical(CS) */
1529
1530
1531 /*
1532 * Check invalid gate types.
1533 */
1534 g_usBs3TestStep = 32000;
1535 for (iRing = 0; iRing <= 3; iRing++)
1536 {
1537 static const uint16_t s_auCSes[] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32, BS3_SEL_R0_CS64,
1538 BS3_SEL_TSS16, BS3_SEL_TSS32, BS3_SEL_TSS64, 0, BS3_SEL_SPARE_1f };
1539 static uint16_t const s_auInvlTypes64[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
1540 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1541 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f };
1542 static uint16_t const s_auInvlTypes32[] = { 0, 1, 2, 3, 8, 9, 10, 11, 13,
1543 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1544 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
1545 /*286:*/ 12, 14, 15 };
1546 uint16_t const * const pauInvTypes = cIdteShift != 0 ? s_auInvlTypes64 : s_auInvlTypes32;
1547 uint16_t const cInvTypes = cIdteShift != 0 ? RT_ELEMENTS(s_auInvlTypes64)
1548 : f386Plus ? RT_ELEMENTS(s_auInvlTypes32) - 3 : RT_ELEMENTS(s_auInvlTypes32);
1549
1550
1551 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1552 {
1553 unsigned iType;
1554
1555 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1556 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1557# if TMPL_BITS == 32
1558 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1559# endif
1560 for (iType = 0; iType < cInvTypes; iType++)
1561 {
1562 uint8_t const bSavedType = paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type;
1563 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = pauInvTypes[iType] >> 4;
1564 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = pauInvTypes[iType] & 0xf;
1565
1566 for (i = 0; i < 4; i++)
1567 {
1568 for (j = 0; j < RT_ELEMENTS(s_auCSes); j++)
1569 {
1570 uint16_t uCs = (unsigned)(s_auCSes[j] - BS3_SEL_R0_FIRST) < (unsigned)(4 << BS3_SEL_RING_SHIFT)
1571 ? (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT)
1572 : s_auCSes[j] | i;
1573 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x type=%#x\n", g_usBs3TestStep, iCtx, iRing, i, uCs, pauInvTypes[iType]);*/
1574 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1575 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1576 g_usBs3TestStep++;
1577 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1578
1579 /* Mark it not-present to check that invalid type takes precedence. */
1580 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 0;
1581 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1582 g_usBs3TestStep++;
1583 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1584 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1585 }
1586 }
1587
1588 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1589 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = bSavedType;
1590 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = 0;
1591 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1592 }
1593 }
1594 }
1595 BS3_ASSERT(g_usBs3TestStep < 62000U && g_usBs3TestStep > 32000U);
1596
1597
1598 /** @todo
1599 * - Run \#PF and \#GP (and others?) at CPLs other than zero.
1600 * - Quickly generate all faults.
1601 * - All the peculiarities v8086.
1602 */
1603
1604# if TMPL_BITS != 16
1605 Bs3MemFree(pbIdtCopyAlloc, 12*_1K);
1606# endif
1607}
1608#endif /* convert me */
1609
1610
1611static void bs3CpuBasic2_RaiseXcpt11Worker(uint8_t bMode, uint8_t *pbBuf, unsigned cbCacheLine, bool fAm, bool fPf,
1612 RTCCUINTXREG uFlatBufPtr, BS3CPUBASIC2PFTTSTCMNMODE const BS3_FAR *pCmn)
1613{
1614 BS3TRAPFRAME TrapCtx;
1615 BS3REGCTX Ctx;
1616 BS3REGCTX CtxUdExpected;
1617 uint8_t const cRings = bMode == BS3_MODE_RM ? 1 : 4;
1618 uint8_t iRing;
1619 uint16_t iTest;
1620
1621 /* make sure they're allocated */
1622 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1623 Bs3MemZero(&Ctx, sizeof(Ctx));
1624 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1625
1626 /*
1627 * Test all relevant rings.
1628 *
1629 * The memory operand is ds:xBX, so point it to pbBuf.
1630 * The test snippets mostly use xAX as operand, with the div
1631 * one also using xDX, so make sure they make some sense.
1632 */
1633 Bs3RegCtxSaveEx(&Ctx, bMode, 512);
1634
1635 Ctx.cr0.u32 &= ~(X86_CR0_MP | X86_CR0_EM | X86_CR0_TS); /* so fninit + fld works */
1636
1637 for (iRing = BS3_MODE_IS_V86(bMode) ? 3 : 0; iRing < cRings; iRing++)
1638 {
1639 uint32_t uEbx;
1640 uint8_t fAc;
1641
1642 if (!BS3_MODE_IS_RM_OR_V86(bMode))
1643 Bs3RegCtxConvertToRingX(&Ctx, iRing);
1644
1645 if (!fPf || BS3_MODE_IS_32BIT_CODE(bMode) || BS3_MODE_IS_64BIT_CODE(bMode))
1646 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, pbBuf);
1647 else
1648 {
1649 /* Bs3RegCtxSetGrpDsFromCurPtr barfs when trying to output a sel:off address for the aliased buffer. */
1650 Ctx.ds = BS3_FP_SEG(pbBuf);
1651 Ctx.rbx.u32 = BS3_FP_OFF(pbBuf);
1652 }
1653 uEbx = Ctx.rbx.u32;
1654
1655 Ctx.rax.u = (bMode & BS3_MODE_CODE_MASK) == BS3_MODE_CODE_64
1656 ? UINT64_C(0x80868028680386fe) : UINT32_C(0x65020686);
1657 Ctx.rdx.u = UINT32_C(0x00100100); /* careful with range due to div */
1658
1659 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1660
1661 /*
1662 * AC flag loop.
1663 */
1664 for (fAc = 0; fAc < 2; fAc++)
1665 {
1666 if (fAc)
1667 Ctx.rflags.u32 |= X86_EFL_AC;
1668 else
1669 Ctx.rflags.u32 &= ~X86_EFL_AC;
1670
1671 /*
1672 * Loop over the test snippets.
1673 */
1674 for (iTest = 0; iTest < pCmn->cEntries; iTest++)
1675 {
1676 uint8_t const fOp = pCmn->paEntries[iTest].fOp;
1677 uint16_t const cbMem = pCmn->paEntries[iTest].cbMem;
1678 uint8_t const cbAlign = pCmn->paEntries[iTest].cbAlign;
1679 uint16_t const cbMax = cbCacheLine + cbMem;
1680 uint16_t offMem;
1681 uint8_t BS3_FAR *poffUd = (uint8_t BS3_FAR *)Bs3SelLnkPtrToCurPtr(pCmn->paEntries[iTest].pfn);
1682 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pCmn->paEntries[iTest].pfn);
1683 CtxUdExpected.rip = Ctx.rip;
1684 CtxUdExpected.rip.u = Ctx.rip.u + poffUd[-1];
1685 CtxUdExpected.cs = Ctx.cs;
1686 CtxUdExpected.rflags = Ctx.rflags;
1687 if (bMode == BS3_MODE_RM)
1688 CtxUdExpected.rflags.u32 &= ~X86_EFL_AC; /** @todo investigate. automatically cleared, or is it just our code? Observed with bs3-cpu-instr-3 too (10980xe), seems to be the CPU doing it. */
1689 CtxUdExpected.rdx = Ctx.rdx;
1690 CtxUdExpected.rax = Ctx.rax;
1691 if (fOp & MYOP_LD)
1692 {
1693 switch (cbMem)
1694 {
1695 case 2:
1696 CtxUdExpected.rax.u16 = 0x0101;
1697 break;
1698 case 4:
1699 CtxUdExpected.rax.u32 = UINT32_C(0x01010101);
1700 break;
1701 case 8:
1702 CtxUdExpected.rax.u64 = UINT64_C(0x0101010101010101);
1703 break;
1704 }
1705 }
1706
1707 /*
1708 * Buffer misalignment loop.
1709 * Note! We must make sure to cross a cache line here to make sure
1710 * to cover the split-lock scenario. (The buffer is cache
1711 * line aligned.)
1712 */
1713 for (offMem = 0; offMem < cbMax; offMem++)
1714 {
1715 bool const fMisaligned = (offMem & (cbAlign - 1)) != 0;
1716 unsigned offBuf = cbMax + cbMem * 2;
1717 while (offBuf-- > 0)
1718 pbBuf[offBuf] = 1; /* byte-by-byte to make sure it doesn't trigger AC. */
1719
1720 CtxUdExpected.rbx.u32 = Ctx.rbx.u32 = uEbx + offMem; /* ASSUMES memory in first 4GB. */
1721 if (BS3_MODE_IS_16BIT_SYS(bMode))
1722 g_uBs3TrapEipHint = Ctx.rip.u32;
1723
1724 //Bs3TestPrintf("iRing=%d iTest=%d cs:rip=%04RX16:%08RX32 ds:rbx=%04RX16:%08RX32 ss:esp=%04RX16:%08RX32 bXcpt=%#x errcd=%#x fAm=%d fAc=%d ESP=%#RX32\n",
1725 // iRing, iTest, Ctx.cs, Ctx.rip.u32, Ctx.ds, Ctx.rbx.u32, Ctx.ss, Ctx.rsp.u32, TrapCtx.bXcpt, (unsigned)TrapCtx.uErrCd, fAm, fAc, ASMGetESP());
1726
1727 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1728
1729 if ( (pCmn->paEntries[iTest].fOp & MYOP_AC_GP)
1730 && fMisaligned
1731 && (!fAm || iRing != 3 || !fAc || (offMem & 3 /* 10980XE */) == 0) )
1732 {
1733 if (fAc && bMode == BS3_MODE_RM)
1734 TrapCtx.Ctx.rflags.u32 |= X86_EFL_AC;
1735 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1736 }
1737 else if (fPf && iRing == 3 && (!fAm || !fAc || !fMisaligned)) /* #AC beats #PF */
1738 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx,
1739 X86_TRAP_PF_P | X86_TRAP_PF_US
1740 | (pCmn->paEntries[iTest].fOp & MYOP_ST ? X86_TRAP_PF_RW : 0),
1741 uFlatBufPtr + offMem + (cbMem > 64 ? cbMem - 1 /*FXSAVE*/ : 0),
1742 pCmn->paEntries[iTest].offFaultInstr);
1743 else if (!fAm || iRing != 3 || !fAc || !fMisaligned)
1744 {
1745 if (fOp & MYOP_EFL)
1746 {
1747 CtxUdExpected.rflags.u16 &= ~X86_EFL_STATUS_BITS;
1748 CtxUdExpected.rflags.u16 |= TrapCtx.Ctx.rflags.u16 & X86_EFL_STATUS_BITS;
1749 }
1750 if (fOp == MYOP_LD_DIV)
1751 {
1752 CtxUdExpected.rax = TrapCtx.Ctx.rax;
1753 CtxUdExpected.rdx = TrapCtx.Ctx.rdx;
1754 }
1755 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1756 }
1757 else
1758 bs3CpuBasic2_CompareAcCtx(&TrapCtx, &Ctx, pCmn->paEntries[iTest].offFaultInstr);
1759
1760 g_usBs3TestStep++;
1761 }
1762 }
1763 }
1764 }
1765}
1766
1767
1768/**
1769 * Entrypoint for \#AC tests.
1770 *
1771 * @returns 0 or BS3TESTDOMODE_SKIPPED.
1772 * @param bMode The CPU mode we're testing.
1773 *
1774 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
1775 * with control registers and such.
1776 */
1777BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_RaiseXcpt11)(uint8_t bMode)
1778{
1779 unsigned cbCacheLine = 128; /** @todo detect */
1780 uint8_t BS3_FAR *pbBufAlloc;
1781 uint8_t BS3_FAR *pbBuf;
1782 unsigned idxCmnModes;
1783 uint32_t fCr0;
1784
1785 /*
1786 * Skip if 386 or older.
1787 */
1788 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80486)
1789 {
1790 Bs3TestSkipped("#AC test requires 486 or later");
1791 return BS3TESTDOMODE_SKIPPED;
1792 }
1793
1794 bs3CpuBasic2_SetGlobals(bMode);
1795
1796 /* Get us a 64-byte aligned buffer. */
1797 pbBufAlloc = pbBuf = Bs3MemAllocZ(BS3_MODE_IS_RM_OR_V86(bMode) ? BS3MEMKIND_REAL : BS3MEMKIND_TILED, X86_PAGE_SIZE * 2);
1798 if (!pbBufAlloc)
1799 return Bs3TestFailed("Failed to allocate 2 pages of real-mode memory");
1800 if (BS3_FP_OFF(pbBuf) & (X86_PAGE_SIZE - 1))
1801 pbBuf = &pbBufAlloc[X86_PAGE_SIZE - (BS3_FP_OFF(pbBuf) & X86_PAGE_OFFSET_MASK)];
1802 BS3_ASSERT(pbBuf - pbBufAlloc <= X86_PAGE_SIZE);
1803 //Bs3TestPrintf("pbBuf=%p\n", pbBuf);
1804
1805 /* Find the g_aCmnModes entry. */
1806 idxCmnModes = 0;
1807 while (g_aCmnModes[idxCmnModes].bMode != (bMode & BS3_MODE_CODE_MASK))
1808 idxCmnModes++;
1809 //Bs3TestPrintf("idxCmnModes=%d bMode=%#x\n", idxCmnModes, bMode);
1810
1811 /* First round is w/o alignment checks enabled. */
1812 //Bs3TestPrintf("round 1\n");
1813 fCr0 = Bs3RegGetCr0();
1814 BS3_ASSERT(!(fCr0 & X86_CR0_AM));
1815 Bs3RegSetCr0(fCr0 & ~X86_CR0_AM);
1816#if 1
1817 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, false /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1818#endif
1819
1820 /* The second round is with aligment checks enabled. */
1821#if 1
1822 //Bs3TestPrintf("round 2\n");
1823 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1824 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, true /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1825#endif
1826
1827#if 1
1828 /* The third and fourth round access the buffer via a page alias that's not
1829 accessible from ring-3. The third round has ACs disabled and the fourth
1830 has them enabled. */
1831 if (BS3_MODE_IS_PAGED(bMode) && !BS3_MODE_IS_V86(bMode))
1832 {
1833 /* Alias the buffer as system memory so ring-3 access with AC+AM will cause #PF: */
1834 /** @todo the aliasing is not necessary any more... */
1835 int rc;
1836 RTCCUINTXREG uFlatBufPtr = Bs3SelPtrToFlat(pbBuf);
1837 uint64_t const uAliasPgPtr = bMode & BS3_MODE_CODE_64 ? UINT64_C(0x0000648680000000) : UINT32_C(0x80000000);
1838 rc = Bs3PagingAlias(uAliasPgPtr, uFlatBufPtr & ~(uint64_t)X86_PAGE_OFFSET_MASK, X86_PAGE_SIZE * 2,
1839 X86_PTE_P | X86_PTE_RW);
1840 if (RT_SUCCESS(rc))
1841 {
1842 /* We 'misalign' the segment base here to make sure it's the final
1843 address that gets alignment checked and not just the operand value. */
1844 RTCCUINTXREG uAliasBufPtr = (RTCCUINTXREG)uAliasPgPtr + (uFlatBufPtr & X86_PAGE_OFFSET_MASK);
1845 uint8_t BS3_FAR *pbBufAlias = BS3_FP_MAKE(BS3_SEL_SPARE_00 | 3, (uFlatBufPtr & X86_PAGE_OFFSET_MASK) + 1);
1846 Bs3SelSetup16BitData(&Bs3GdteSpare00, uAliasPgPtr - 1);
1847
1848 //Bs3TestPrintf("round 3 pbBufAlias=%p\n", pbBufAlias);
1849 Bs3RegSetCr0(Bs3RegGetCr0() & ~X86_CR0_AM);
1850 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, false /*fAm*/,
1851 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1852
1853 //Bs3TestPrintf("round 4\n");
1854 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1855 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, true /*fAm*/,
1856 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1857
1858 Bs3PagingUnalias(uAliasPgPtr, X86_PAGE_SIZE * 2);
1859 }
1860 else
1861 Bs3TestFailedF("Bs3PagingAlias failed with %Rrc", rc);
1862 }
1863#endif
1864
1865 Bs3MemFree(pbBufAlloc, X86_PAGE_SIZE * 2);
1866 Bs3RegSetCr0(fCr0);
1867 return 0;
1868}
1869
1870
1871/**
1872 * Executes one round of SIDT and SGDT tests using one assembly worker.
1873 *
1874 * This is written with driving everything from the 16-bit or 32-bit worker in
1875 * mind, i.e. not assuming the test bitcount is the same as the current.
1876 */
1877static void bs3CpuBasic2_sidt_sgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
1878 uint8_t const *pbExpected)
1879{
1880 BS3TRAPFRAME TrapCtx;
1881 BS3REGCTX Ctx;
1882 BS3REGCTX CtxUdExpected;
1883 BS3REGCTX TmpCtx;
1884 uint8_t const cbBuf = 8*2; /* test buffer area */
1885 uint8_t abBuf[8*2 + 8 + 8]; /* test buffer w/ misalignment test space and some extra guard. */
1886 uint8_t BS3_FAR *pbBuf = abBuf;
1887 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
1888 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
1889 uint8_t bFiller;
1890 int off;
1891 int off2;
1892 unsigned cb;
1893 uint8_t BS3_FAR *pbTest;
1894
1895 /* make sure they're allocated */
1896 Bs3MemZero(&Ctx, sizeof(Ctx));
1897 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1898 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
1899 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1900 Bs3MemZero(&abBuf, sizeof(abBuf));
1901
1902 /* Create a context, give this routine some more stack space, point the context
1903 at our SIDT [xBX] + UD2 combo, and point DS:xBX at abBuf. */
1904 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
1905 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1906 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
1907 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
1908 g_uBs3TrapEipHint = Ctx.rip.u32;
1909 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
1910 Bs3RegCtxConvertToRingX(&Ctx, bRing);
1911
1912 /* For successful SIDT attempts, we'll stop at the UD2. */
1913 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1914 CtxUdExpected.rip.u += pWorker->cbInstr;
1915
1916 /*
1917 * Check that it works at all and that only bytes we expect gets written to.
1918 */
1919 /* First with zero buffer. */
1920 Bs3MemZero(abBuf, sizeof(abBuf));
1921 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), 0))
1922 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1923 if (!ASMMemIsZero(abBuf, sizeof(abBuf)))
1924 Bs3TestFailedF("ASMMemIsZero or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1925 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1926 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1927 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1928 Bs3TestFailedF("286: Top base byte isn't 0xff (#1): %#x\n", abBuf[cbIdtr - 1]);
1929 if (!ASMMemIsZero(&abBuf[cbIdtr], cbBuf - cbIdtr))
1930 Bs3TestFailedF("Unexpected buffer bytes set (#1): cbIdtr=%u abBuf=%.*Rhxs\n", cbIdtr, cbBuf, pbBuf);
1931 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1932 Bs3TestFailedF("Mismatch (%s,#1): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1933 g_usBs3TestStep++;
1934
1935 /* Again with a buffer filled with a byte not occuring in the previous result. */
1936 bFiller = 0x55;
1937 while (Bs3MemChr(abBuf, bFiller, cbBuf) != NULL)
1938 bFiller++;
1939 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1940 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1941 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemSet is busted: bFiller=%#x abBuf=%.*Rhxs\n", bFiller, sizeof(abBuf), pbBuf);
1942
1943 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1944 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1945 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1946 Bs3TestFailedF("286: Top base byte isn't 0xff (#2): %#x\n", abBuf[cbIdtr - 1]);
1947 if (!ASMMemIsAllU8(&abBuf[cbIdtr], cbBuf - cbIdtr, bFiller))
1948 Bs3TestFailedF("Unexpected buffer bytes set (#2): cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1949 if (Bs3MemChr(abBuf, bFiller, cbIdtr) != NULL)
1950 Bs3TestFailedF("Not all bytes touched: cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1951 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1952 Bs3TestFailedF("Mismatch (%s,#2): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1953 g_usBs3TestStep++;
1954
1955 /*
1956 * Slide the buffer along 8 bytes to cover misalignment.
1957 */
1958 for (off = 0; off < 8; off++)
1959 {
1960 pbBuf = &abBuf[off];
1961 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBuf[off]);
1962 CtxUdExpected.rbx.u = Ctx.rbx.u;
1963
1964 /* First with zero buffer. */
1965 Bs3MemZero(abBuf, sizeof(abBuf));
1966 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1967 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1968 if (off > 0 && !ASMMemIsZero(abBuf, off))
1969 Bs3TestFailedF("Unexpected buffer bytes set before (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1970 cbIdtr, off, off + cbBuf, abBuf);
1971 if (!ASMMemIsZero(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off))
1972 Bs3TestFailedF("Unexpected buffer bytes set after (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1973 cbIdtr, off, off + cbBuf, abBuf);
1974 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1975 Bs3TestFailedF("286: Top base byte isn't 0xff (#3): %#x\n", abBuf[off + cbIdtr - 1]);
1976 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1977 Bs3TestFailedF("Mismatch (#3): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1978 g_usBs3TestStep++;
1979
1980 /* Again with a buffer filled with a byte not occuring in the previous result. */
1981 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1982 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1983 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1984 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1985 Bs3TestFailedF("Unexpected buffer bytes set before (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1986 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1987 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off, bFiller))
1988 Bs3TestFailedF("Unexpected buffer bytes set after (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1989 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1990 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1991 Bs3TestFailedF("Not all bytes touched (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1992 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1993 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1994 Bs3TestFailedF("286: Top base byte isn't 0xff (#4): %#x\n", abBuf[off + cbIdtr - 1]);
1995 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1996 Bs3TestFailedF("Mismatch (#4): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1997 g_usBs3TestStep++;
1998 }
1999 pbBuf = abBuf;
2000 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2001 CtxUdExpected.rbx.u = Ctx.rbx.u;
2002
2003 /*
2004 * Play with the selector limit if the target mode supports limit checking
2005 * We use BS3_SEL_TEST_PAGE_00 for this
2006 */
2007 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2008 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2009 {
2010 uint16_t cbLimit;
2011 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBuf);
2012 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2013 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2014 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2015 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2016 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2017
2018 if (pWorker->fSs)
2019 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2020 else
2021 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2022
2023 /* Expand up (normal). */
2024 for (off = 0; off < 8; off++)
2025 {
2026 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2027 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2028 {
2029 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2030 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2031 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2032 if (off + cbIdtr <= cbLimit + 1)
2033 {
2034 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2035 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2036 Bs3TestFailedF("Not all bytes touched (#5): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2037 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2038 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2039 Bs3TestFailedF("Mismatch (#5): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2040 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2041 Bs3TestFailedF("286: Top base byte isn't 0xff (#5): %#x\n", abBuf[off + cbIdtr - 1]);
2042 }
2043 else
2044 {
2045 if (pWorker->fSs)
2046 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2047 else
2048 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2049 if (off + 2 <= cbLimit + 1)
2050 {
2051 if (Bs3MemChr(&abBuf[off], bFiller, 2) != NULL)
2052 Bs3TestFailedF("Limit bytes not touched (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2053 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2054 if (Bs3MemCmp(&abBuf[off], pbExpected, 2) != 0)
2055 Bs3TestFailedF("Mismatch (#6): expected %.2Rhxs, got %.2Rhxs\n", pbExpected, &abBuf[off]);
2056 if (!ASMMemIsAllU8(&abBuf[off + 2], cbIdtr - 2, bFiller))
2057 Bs3TestFailedF("Base bytes touched on #GP (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2058 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2059 }
2060 else if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2061 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2062 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2063 }
2064
2065 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2066 Bs3TestFailedF("Leading bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2067 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2068 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2069 Bs3TestFailedF("Trailing bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2070 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2071
2072 g_usBs3TestStep++;
2073 }
2074 }
2075
2076 /* Expand down (weird). Inverted valid area compared to expand up,
2077 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2078 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2079 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2080 (because in a normal expand up the 0ffffh means all 64KB are
2081 accessible). */
2082 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2083 for (off = 0; off < 8; off++)
2084 {
2085 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2086 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2087 {
2088 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2089 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2090 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2091
2092 if (off > cbLimit)
2093 {
2094 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2095 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2096 Bs3TestFailedF("Not all bytes touched (#8): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2097 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2098 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2099 Bs3TestFailedF("Mismatch (#8): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2100 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2101 Bs3TestFailedF("286: Top base byte isn't 0xff (#8): %#x\n", abBuf[off + cbIdtr - 1]);
2102 }
2103 else
2104 {
2105 if (pWorker->fSs)
2106 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2107 else
2108 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2109 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2110 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2111 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2112 }
2113
2114 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2115 Bs3TestFailedF("Leading bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2116 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2117 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2118 Bs3TestFailedF("Trailing bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2119 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2120
2121 g_usBs3TestStep++;
2122 }
2123 }
2124
2125 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2126 CtxUdExpected.rbx.u = Ctx.rbx.u;
2127 CtxUdExpected.ss = Ctx.ss;
2128 CtxUdExpected.ds = Ctx.ds;
2129 }
2130
2131 /*
2132 * Play with the paging.
2133 */
2134 if ( BS3_MODE_IS_PAGED(bTestMode)
2135 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2136 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2137 {
2138 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2139
2140 /*
2141 * Slide the buffer towards the trailing guard page. We'll observe the
2142 * first word being written entirely separately from the 2nd dword/qword.
2143 */
2144 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2145 {
2146 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2147 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2148 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2149 if (off + cbIdtr <= X86_PAGE_SIZE)
2150 {
2151 CtxUdExpected.rbx = Ctx.rbx;
2152 CtxUdExpected.ss = Ctx.ss;
2153 CtxUdExpected.ds = Ctx.ds;
2154 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2155 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2156 Bs3TestFailedF("Mismatch (#9): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2157 }
2158 else
2159 {
2160 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2161 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2162 if ( off <= X86_PAGE_SIZE - 2
2163 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2164 Bs3TestFailedF("Mismatch (#10): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2165 pbExpected, &pbTest[off], off);
2166 if ( off < X86_PAGE_SIZE - 2
2167 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_SIZE - off - 2, bFiller))
2168 Bs3TestFailedF("Wrote partial base on #PF (#10): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2169 bFiller, X86_PAGE_SIZE - off - 2, &pbTest[off + 2], off);
2170 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2171 Bs3TestFailedF("Wrote partial limit on #PF (#10): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2172 }
2173 g_usBs3TestStep++;
2174 }
2175
2176 /*
2177 * Now, do it the other way around. It should look normal now since writing
2178 * the limit will #PF first and nothing should be written.
2179 */
2180 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2181 {
2182 Bs3MemSet(pbTest, bFiller, 48);
2183 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2184 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2185 if (off >= 0)
2186 {
2187 CtxUdExpected.rbx = Ctx.rbx;
2188 CtxUdExpected.ss = Ctx.ss;
2189 CtxUdExpected.ds = Ctx.ds;
2190 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2191 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2192 Bs3TestFailedF("Mismatch (#11): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2193 }
2194 else
2195 {
2196 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2197 uFlatTest + off, 0 /*cbIpAdjust*/);
2198 if ( -off < cbIdtr
2199 && !ASMMemIsAllU8(pbTest, cbIdtr + off, bFiller))
2200 Bs3TestFailedF("Wrote partial content on #PF (#12): bFiller=%#x, found %.*Rhxs; off=%d\n",
2201 bFiller, cbIdtr + off, pbTest, off);
2202 }
2203 if (!ASMMemIsAllU8(&pbTest[RT_MAX(cbIdtr + off, 0)], 16, bFiller))
2204 Bs3TestFailedF("Wrote beyond expected area (#13): bFiller=%#x, found %.16Rhxs; off=%d\n",
2205 bFiller, &pbTest[RT_MAX(cbIdtr + off, 0)], off);
2206 g_usBs3TestStep++;
2207 }
2208
2209 /*
2210 * Combine paging and segment limit and check ordering.
2211 * This is kind of interesting here since it the instruction seems to
2212 * be doing two separate writes.
2213 */
2214 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2215 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2216 {
2217 uint16_t cbLimit;
2218
2219 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2220 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2221 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2222 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2223 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2224
2225 if (pWorker->fSs)
2226 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2227 else
2228 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2229
2230 /* Expand up (normal), approaching tail guard page. */
2231 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2232 {
2233 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2234 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2235 {
2236 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2237 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2238 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2239 if (off + cbIdtr <= cbLimit + 1)
2240 {
2241 /* No #GP, but maybe #PF. */
2242 if (off + cbIdtr <= X86_PAGE_SIZE)
2243 {
2244 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2245 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2246 Bs3TestFailedF("Mismatch (#14): expected %.*Rhxs, got %.*Rhxs\n",
2247 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2248 }
2249 else
2250 {
2251 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2252 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2253 if ( off <= X86_PAGE_SIZE - 2
2254 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2255 Bs3TestFailedF("Mismatch (#15): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2256 pbExpected, &pbTest[off], off);
2257 cb = X86_PAGE_SIZE - off - 2;
2258 if ( off < X86_PAGE_SIZE - 2
2259 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2260 Bs3TestFailedF("Wrote partial base on #PF (#15): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2261 bFiller, cb, &pbTest[off + 2], off);
2262 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2263 Bs3TestFailedF("Wrote partial limit on #PF (#15): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2264 }
2265 }
2266 else if (off + 2 <= cbLimit + 1)
2267 {
2268 /* [ig]tr.limit writing does not cause #GP, but may cause #PG, if not writing the base causes #GP. */
2269 if (off <= X86_PAGE_SIZE - 2)
2270 {
2271 if (pWorker->fSs)
2272 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2273 else
2274 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2275 if (Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2276 Bs3TestFailedF("Mismatch (#16): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2277 pbExpected, &pbTest[off], off);
2278 cb = X86_PAGE_SIZE - off - 2;
2279 if ( off < X86_PAGE_SIZE - 2
2280 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2281 Bs3TestFailedF("Wrote partial base with limit (#16): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2282 bFiller, cb, &pbTest[off + 2], off);
2283 }
2284 else
2285 {
2286 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2287 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2288 if ( off < X86_PAGE_SIZE
2289 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2290 Bs3TestFailedF("Mismatch (#16): Partial limit write on #PF: bFiller=%#x, got %.*Rhxs\n",
2291 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2292 }
2293 }
2294 else
2295 {
2296 /* #GP/#SS on limit. */
2297 if (pWorker->fSs)
2298 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2299 else
2300 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2301 if ( off < X86_PAGE_SIZE
2302 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2303 Bs3TestFailedF("Mismatch (#17): Partial write on #GP: bFiller=%#x, got %.*Rhxs\n",
2304 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2305 }
2306
2307 cb = RT_MIN(cbIdtr * 2, off - (X86_PAGE_SIZE - cbIdtr*2));
2308 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], cb, bFiller))
2309 Bs3TestFailedF("Leading bytes touched (#18): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2310 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE - cbIdtr * 2]);
2311
2312 g_usBs3TestStep++;
2313
2314 /* Set DS to 0 and check that we get #GP(0). */
2315 if (!pWorker->fSs)
2316 {
2317 Ctx.ds = 0;
2318 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2319 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2320 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2321 g_usBs3TestStep++;
2322 }
2323 }
2324 }
2325
2326 /* Expand down. */
2327 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
2328 uFlatTest -= X86_PAGE_SIZE;
2329
2330 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2331 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2332 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2333 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2334
2335 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2336 {
2337 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2338 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2339 {
2340 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2341 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller, cbIdtr * 2);
2342 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2343 if (cbLimit < off && off >= X86_PAGE_SIZE)
2344 {
2345 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2346 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2347 Bs3TestFailedF("Mismatch (#19): expected %.*Rhxs, got %.*Rhxs\n",
2348 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2349 cb = X86_PAGE_SIZE + cbIdtr*2 - off;
2350 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], cb, bFiller))
2351 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2352 cbIdtr, off, cbLimit, bFiller, cb, pbTest[off + cbIdtr]);
2353 }
2354 else
2355 {
2356 if (cbLimit < off && off < X86_PAGE_SIZE)
2357 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2358 uFlatTest + off, 0 /*cbIpAdjust*/);
2359 else if (pWorker->fSs)
2360 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2361 else
2362 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2363 cb = cbIdtr*2;
2364 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE], cb, bFiller))
2365 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2366 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE]);
2367 }
2368 g_usBs3TestStep++;
2369 }
2370 }
2371
2372 pbTest += X86_PAGE_SIZE;
2373 uFlatTest += X86_PAGE_SIZE;
2374 }
2375
2376 Bs3MemGuardedTestPageFree(pbTest);
2377 }
2378
2379 /*
2380 * Check non-canonical 64-bit space.
2381 */
2382 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
2383 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
2384 {
2385 /* Make our references relative to the gap. */
2386 pbTest += g_cbBs3PagingOneCanonicalTrap;
2387
2388 /* Hit it from below. */
2389 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2390 {
2391 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
2392 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2393 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2394 if (off + cbIdtr <= 0)
2395 {
2396 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2397 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2398 Bs3TestFailedF("Mismatch (#21): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2399 }
2400 else
2401 {
2402 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2403 if (off <= -2 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2404 Bs3TestFailedF("Mismatch (#21): expected limit %.2Rhxs, got %.2Rhxs\n", pbExpected, &pbTest[off]);
2405 off2 = off <= -2 ? 2 : 0;
2406 cb = cbIdtr - off2;
2407 if (!ASMMemIsAllU8(&pbTest[off + off2], cb, bFiller))
2408 Bs3TestFailedF("Mismatch (#21): touched base %.*Rhxs, got %.*Rhxs\n",
2409 cb, &pbExpected[off], cb, &pbTest[off + off2]);
2410 }
2411 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2412 Bs3TestFailedF("Leading bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2413 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2414 Bs3TestFailedF("Trailing bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2415 }
2416
2417 /* Hit it from above. */
2418 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2419 {
2420 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
2421 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2422 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2423 if (off >= 0)
2424 {
2425 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2426 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2427 Bs3TestFailedF("Mismatch (#22): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2428 }
2429 else
2430 {
2431 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2432 if (!ASMMemIsAllU8(&pbTest[off], cbIdtr, bFiller))
2433 Bs3TestFailedF("Mismatch (#22): touched base %.*Rhxs, got %.*Rhxs\n",
2434 cbIdtr, &pbExpected[off], cbIdtr, &pbTest[off]);
2435 }
2436 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2437 Bs3TestFailedF("Leading bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2438 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2439 Bs3TestFailedF("Trailing bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2440 }
2441
2442 }
2443}
2444
2445
2446static void bs3CpuBasic2_sidt_sgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
2447 uint8_t const *pbExpected)
2448{
2449 unsigned idx;
2450 unsigned bRing;
2451 unsigned iStep = 0;
2452
2453 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
2454 test and don't want to bother with double faults. */
2455 for (bRing = 0; bRing <= 3; bRing++)
2456 {
2457 for (idx = 0; idx < cWorkers; idx++)
2458 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
2459 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ ))
2460 {
2461 g_usBs3TestStep = iStep;
2462 bs3CpuBasic2_sidt_sgdt_One(&paWorkers[idx], bTestMode, bRing, pbExpected);
2463 iStep += 1000;
2464 }
2465 if (BS3_MODE_IS_RM_OR_V86(bTestMode))
2466 break;
2467 }
2468}
2469
2470
2471BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sidt)(uint8_t bMode)
2472{
2473 union
2474 {
2475 RTIDTR Idtr;
2476 uint8_t ab[16];
2477 } Expected;
2478
2479 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2480 bs3CpuBasic2_SetGlobals(bMode);
2481
2482 /*
2483 * Pass to common worker which is only compiled once per mode.
2484 */
2485 Bs3MemZero(&Expected, sizeof(Expected));
2486 ASMGetIDTR(&Expected.Idtr);
2487 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSidtWorkers, RT_ELEMENTS(g_aSidtWorkers), Expected.ab);
2488
2489 /*
2490 * Re-initialize the IDT.
2491 */
2492 Bs3TrapReInit();
2493 return 0;
2494}
2495
2496
2497BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sgdt)(uint8_t bMode)
2498{
2499 uint64_t const uOrgAddr = Bs3Lgdt_Gdt.uAddr;
2500 uint64_t uNew = 0;
2501 union
2502 {
2503 RTGDTR Gdtr;
2504 uint8_t ab[16];
2505 } Expected;
2506
2507 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2508 bs3CpuBasic2_SetGlobals(bMode);
2509
2510 /*
2511 * If paged mode, try push the GDT way up.
2512 */
2513 Bs3MemZero(&Expected, sizeof(Expected));
2514 ASMGetGDTR(&Expected.Gdtr);
2515 if (BS3_MODE_IS_PAGED(bMode))
2516 {
2517/** @todo loading non-canonical base addresses. */
2518 int rc;
2519 uNew = BS3_MODE_IS_64BIT_SYS(bMode) ? UINT64_C(0xffff80fedcb70000) : UINT64_C(0xc2d28000);
2520 uNew |= uOrgAddr & X86_PAGE_OFFSET_MASK;
2521 rc = Bs3PagingAlias(uNew, uOrgAddr, Bs3Lgdt_Gdt.cb, X86_PTE_P | X86_PTE_RW | X86_PTE_US | X86_PTE_D | X86_PTE_A);
2522 if (RT_SUCCESS(rc))
2523 {
2524 Bs3Lgdt_Gdt.uAddr = uNew;
2525 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uNew);
2526 ASMGetGDTR(&Expected.Gdtr);
2527 if (BS3_MODE_IS_64BIT_SYS(bMode) && ARCH_BITS != 64)
2528 *(uint32_t *)&Expected.ab[6] = (uint32_t)(uNew >> 32);
2529 }
2530 }
2531
2532 /*
2533 * Pass to common worker which is only compiled once per mode.
2534 */
2535 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSgdtWorkers, RT_ELEMENTS(g_aSgdtWorkers), Expected.ab);
2536
2537 /*
2538 * Unalias the GDT.
2539 */
2540 if (uNew != 0)
2541 {
2542 Bs3Lgdt_Gdt.uAddr = uOrgAddr;
2543 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uOrgAddr);
2544 Bs3PagingUnalias(uNew, Bs3Lgdt_Gdt.cb);
2545 }
2546
2547 /*
2548 * Re-initialize the IDT.
2549 */
2550 Bs3TrapReInit();
2551 return 0;
2552}
2553
2554
2555
2556/*
2557 * LIDT & LGDT
2558 */
2559
2560/**
2561 * Executes one round of LIDT and LGDT tests using one assembly worker.
2562 *
2563 * This is written with driving everything from the 16-bit or 32-bit worker in
2564 * mind, i.e. not assuming the test bitcount is the same as the current.
2565 */
2566static void bs3CpuBasic2_lidt_lgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
2567 uint8_t const *pbRestore, size_t cbRestore, uint8_t const *pbExpected)
2568{
2569 static const struct
2570 {
2571 bool fGP;
2572 uint16_t cbLimit;
2573 uint64_t u64Base;
2574 } s_aValues64[] =
2575 {
2576 { false, 0x0000, UINT64_C(0x0000000000000000) },
2577 { false, 0x0001, UINT64_C(0x0000000000000001) },
2578 { false, 0x0002, UINT64_C(0x0000000000000010) },
2579 { false, 0x0003, UINT64_C(0x0000000000000123) },
2580 { false, 0x0004, UINT64_C(0x0000000000001234) },
2581 { false, 0x0005, UINT64_C(0x0000000000012345) },
2582 { false, 0x0006, UINT64_C(0x0000000000123456) },
2583 { false, 0x0007, UINT64_C(0x0000000001234567) },
2584 { false, 0x0008, UINT64_C(0x0000000012345678) },
2585 { false, 0x0009, UINT64_C(0x0000000123456789) },
2586 { false, 0x000a, UINT64_C(0x000000123456789a) },
2587 { false, 0x000b, UINT64_C(0x00000123456789ab) },
2588 { false, 0x000c, UINT64_C(0x0000123456789abc) },
2589 { false, 0x001c, UINT64_C(0x00007ffffeefefef) },
2590 { false, 0xffff, UINT64_C(0x00007fffffffffff) },
2591 { true, 0xf3f1, UINT64_C(0x0000800000000000) },
2592 { true, 0x0000, UINT64_C(0x0000800000000000) },
2593 { true, 0x0000, UINT64_C(0x0000800000000333) },
2594 { true, 0x00f0, UINT64_C(0x0001000000000000) },
2595 { true, 0x0ff0, UINT64_C(0x0012000000000000) },
2596 { true, 0x0eff, UINT64_C(0x0123000000000000) },
2597 { true, 0xe0fe, UINT64_C(0x1234000000000000) },
2598 { true, 0x00ad, UINT64_C(0xffff300000000000) },
2599 { true, 0x0000, UINT64_C(0xffff7fffffffffff) },
2600 { true, 0x00f0, UINT64_C(0xffff7fffffffffff) },
2601 { false, 0x5678, UINT64_C(0xffff800000000000) },
2602 { false, 0x2969, UINT64_C(0xffffffffffeefefe) },
2603 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2604 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2605 };
2606 static const struct
2607 {
2608 uint16_t cbLimit;
2609 uint32_t u32Base;
2610 } s_aValues32[] =
2611 {
2612 { 0xdfdf, UINT32_C(0xefefefef) },
2613 { 0x0000, UINT32_C(0x00000000) },
2614 { 0x0001, UINT32_C(0x00000001) },
2615 { 0x0002, UINT32_C(0x00000012) },
2616 { 0x0003, UINT32_C(0x00000123) },
2617 { 0x0004, UINT32_C(0x00001234) },
2618 { 0x0005, UINT32_C(0x00012345) },
2619 { 0x0006, UINT32_C(0x00123456) },
2620 { 0x0007, UINT32_C(0x01234567) },
2621 { 0x0008, UINT32_C(0x12345678) },
2622 { 0x0009, UINT32_C(0x80204060) },
2623 { 0x000a, UINT32_C(0xddeeffaa) },
2624 { 0x000b, UINT32_C(0xfdecdbca) },
2625 { 0x000c, UINT32_C(0x6098456b) },
2626 { 0x000d, UINT32_C(0x98506099) },
2627 { 0x000e, UINT32_C(0x206950bc) },
2628 { 0x000f, UINT32_C(0x9740395d) },
2629 { 0x0334, UINT32_C(0x64a9455e) },
2630 { 0xb423, UINT32_C(0xd20b6eff) },
2631 { 0x4955, UINT32_C(0x85296d46) },
2632 { 0xffff, UINT32_C(0x07000039) },
2633 { 0xefe1, UINT32_C(0x0007fe00) },
2634 };
2635
2636 BS3TRAPFRAME TrapCtx;
2637 BS3REGCTX Ctx;
2638 BS3REGCTX CtxUdExpected;
2639 BS3REGCTX TmpCtx;
2640 uint8_t abBufLoad[40]; /* Test buffer w/ misalignment test space and some (cbIdtr) extra guard. */
2641 uint8_t abBufSave[32]; /* For saving the result after loading. */
2642 uint8_t abBufRestore[24]; /* For restoring sane value (same seg as abBufSave!). */
2643 uint8_t abExpectedFilled[32]; /* Same as pbExpected, except it's filled with bFiller2 instead of zeros. */
2644 uint8_t BS3_FAR *pbBufSave; /* Correctly aligned pointer into abBufSave. */
2645 uint8_t BS3_FAR *pbBufRestore; /* Correctly aligned pointer into abBufRestore. */
2646 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
2647 uint8_t const cbBaseLoaded = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 8
2648 : BS3_MODE_IS_16BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE)
2649 ? 3 : 4;
2650 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
2651 uint8_t const bTop16BitBase = f286 ? 0xff : 0x00;
2652 uint8_t bFiller1; /* For filling abBufLoad. */
2653 uint8_t bFiller2; /* For filling abBufSave and expectations. */
2654 int off;
2655 uint8_t BS3_FAR *pbTest;
2656 unsigned i;
2657
2658 /* make sure they're allocated */
2659 Bs3MemZero(&Ctx, sizeof(Ctx));
2660 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
2661 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
2662 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
2663 Bs3MemZero(abBufSave, sizeof(abBufSave));
2664 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2665 Bs3MemZero(abBufRestore, sizeof(abBufRestore));
2666
2667 /*
2668 * Create a context, giving this routine some more stack space.
2669 * - Point the context at our LIDT [xBX] + SIDT [xDI] + LIDT [xSI] + UD2 combo.
2670 * - Point DS/SS:xBX at abBufLoad.
2671 * - Point ES:xDI at abBufSave.
2672 * - Point ES:xSI at abBufRestore.
2673 */
2674 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
2675 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
2676 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
2677 g_uBs3TrapEipHint = Ctx.rip.u32;
2678 Ctx.rflags.u16 &= ~X86_EFL_IF;
2679 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2680
2681 pbBufSave = abBufSave;
2682 if ((BS3_FP_OFF(pbBufSave) + 2) & 7)
2683 pbBufSave += 8 - ((BS3_FP_OFF(pbBufSave) + 2) & 7);
2684 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rdi, &Ctx.es, pbBufSave);
2685
2686 pbBufRestore = abBufRestore;
2687 if ((BS3_FP_OFF(pbBufRestore) + 2) & 7)
2688 pbBufRestore += 8 - ((BS3_FP_OFF(pbBufRestore) + 2) & 7);
2689 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsi, &Ctx.es, pbBufRestore);
2690 Bs3MemCpy(pbBufRestore, pbRestore, cbRestore);
2691
2692 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
2693 Bs3RegCtxConvertToRingX(&Ctx, bRing);
2694
2695 /* For successful SIDT attempts, we'll stop at the UD2. */
2696 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
2697 CtxUdExpected.rip.u += pWorker->cbInstr;
2698
2699 /*
2700 * Check that it works at all.
2701 */
2702 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2703 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2704 Bs3MemZero(abBufSave, sizeof(abBufSave));
2705 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2706 if (bRing != 0)
2707 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2708 else
2709 {
2710 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2711 if (Bs3MemCmp(pbBufSave, pbExpected, cbIdtr * 2) != 0)
2712 Bs3TestFailedF("Mismatch (%s, #1): expected %.*Rhxs, got %.*Rhxs\n",
2713 pWorker->pszDesc, cbIdtr*2, pbExpected, cbIdtr*2, pbBufSave);
2714 }
2715 g_usBs3TestStep++;
2716
2717 /* Determine two filler bytes that doesn't appear in the previous result or our expectations. */
2718 bFiller1 = ~0x55;
2719 while ( Bs3MemChr(pbBufSave, bFiller1, cbIdtr) != NULL
2720 || Bs3MemChr(pbRestore, bFiller1, cbRestore) != NULL
2721 || bFiller1 == 0xff)
2722 bFiller1++;
2723 bFiller2 = 0x33;
2724 while ( Bs3MemChr(pbBufSave, bFiller2, cbIdtr) != NULL
2725 || Bs3MemChr(pbRestore, bFiller2, cbRestore) != NULL
2726 || bFiller2 == 0xff
2727 || bFiller2 == bFiller1)
2728 bFiller2++;
2729 Bs3MemSet(abExpectedFilled, bFiller2, sizeof(abExpectedFilled));
2730 Bs3MemCpy(abExpectedFilled, pbExpected, cbIdtr);
2731
2732 /* Again with a buffer filled with a byte not occuring in the previous result. */
2733 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2734 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2735 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2736 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2737 if (bRing != 0)
2738 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2739 else
2740 {
2741 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2742 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2743 Bs3TestFailedF("Mismatch (%s, #2): expected %.*Rhxs, got %.*Rhxs\n",
2744 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2745 }
2746 g_usBs3TestStep++;
2747
2748 /*
2749 * Try loading a bunch of different limit+base value to check what happens,
2750 * especially what happens wrt the top part of the base in 16-bit mode.
2751 */
2752 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
2753 {
2754 for (i = 0; i < RT_ELEMENTS(s_aValues64); i++)
2755 {
2756 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2757 Bs3MemCpy(&abBufLoad[0], &s_aValues64[i].cbLimit, 2);
2758 Bs3MemCpy(&abBufLoad[2], &s_aValues64[i].u64Base, 8);
2759 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2760 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2761 if (bRing != 0 || s_aValues64[i].fGP)
2762 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2763 else
2764 {
2765 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2766 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues64[i].cbLimit, 2) != 0
2767 || Bs3MemCmp(&pbBufSave[2], &s_aValues64[i].u64Base, 8) != 0
2768 || !ASMMemIsAllU8(&pbBufSave[10], cbIdtr, bFiller2))
2769 Bs3TestFailedF("Mismatch (%s, #2): expected %04RX16:%016RX64, fillers %#x %#x, got %.*Rhxs\n",
2770 pWorker->pszDesc, s_aValues64[i].cbLimit, s_aValues64[i].u64Base,
2771 bFiller1, bFiller2, cbIdtr*2, pbBufSave);
2772 }
2773 g_usBs3TestStep++;
2774 }
2775 }
2776 else
2777 {
2778 for (i = 0; i < RT_ELEMENTS(s_aValues32); i++)
2779 {
2780 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2781 Bs3MemCpy(&abBufLoad[0], &s_aValues32[i].cbLimit, 2);
2782 Bs3MemCpy(&abBufLoad[2], &s_aValues32[i].u32Base, cbBaseLoaded);
2783 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2784 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2785 if (bRing != 0)
2786 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2787 else
2788 {
2789 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2790 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues32[i].cbLimit, 2) != 0
2791 || Bs3MemCmp(&pbBufSave[2], &s_aValues32[i].u32Base, cbBaseLoaded) != 0
2792 || ( cbBaseLoaded != 4
2793 && pbBufSave[2+3] != bTop16BitBase)
2794 || !ASMMemIsAllU8(&pbBufSave[8], cbIdtr, bFiller2))
2795 Bs3TestFailedF("Mismatch (%s,#3): loaded %04RX16:%08RX32, fillers %#x %#x%s, got %.*Rhxs\n",
2796 pWorker->pszDesc, s_aValues32[i].cbLimit, s_aValues32[i].u32Base, bFiller1, bFiller2,
2797 f286 ? ", 286" : "", cbIdtr*2, pbBufSave);
2798 }
2799 g_usBs3TestStep++;
2800 }
2801 }
2802
2803 /*
2804 * Slide the buffer along 8 bytes to cover misalignment.
2805 */
2806 for (off = 0; off < 8; off++)
2807 {
2808 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBufLoad[off]);
2809 CtxUdExpected.rbx.u = Ctx.rbx.u;
2810
2811 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2812 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2813 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2814 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2815 if (bRing != 0)
2816 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2817 else
2818 {
2819 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2820 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2821 Bs3TestFailedF("Mismatch (%s, #4): expected %.*Rhxs, got %.*Rhxs\n",
2822 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2823 }
2824 g_usBs3TestStep++;
2825 }
2826 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2827 CtxUdExpected.rbx.u = Ctx.rbx.u;
2828
2829 /*
2830 * Play with the selector limit if the target mode supports limit checking
2831 * We use BS3_SEL_TEST_PAGE_00 for this
2832 */
2833 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2834 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2835 {
2836 uint16_t cbLimit;
2837 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBufLoad);
2838 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2839 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2840 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2841 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2842 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2843
2844 if (pWorker->fSs)
2845 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2846 else
2847 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2848
2849 /* Expand up (normal). */
2850 for (off = 0; off < 8; off++)
2851 {
2852 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2853 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2854 {
2855 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2856
2857 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2858 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2859 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2860 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2861 if (bRing != 0)
2862 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2863 else if (off + cbIdtr <= cbLimit + 1)
2864 {
2865 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2866 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2867 Bs3TestFailedF("Mismatch (%s, #5): expected %.*Rhxs, got %.*Rhxs\n",
2868 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2869 }
2870 else if (pWorker->fSs)
2871 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2872 else
2873 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2874 g_usBs3TestStep++;
2875
2876 /* Again with zero limit and messed up base (should trigger tripple fault if partially loaded). */
2877 abBufLoad[off] = abBufLoad[off + 1] = 0;
2878 abBufLoad[off + 2] |= 1;
2879 abBufLoad[off + cbIdtr - 2] ^= 0x5a;
2880 abBufLoad[off + cbIdtr - 1] ^= 0xa5;
2881 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2882 if (bRing != 0)
2883 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2884 else if (off + cbIdtr <= cbLimit + 1)
2885 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2886 else if (pWorker->fSs)
2887 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2888 else
2889 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2890 }
2891 }
2892
2893 /* Expand down (weird). Inverted valid area compared to expand up,
2894 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2895 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2896 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2897 (because in a normal expand up the 0ffffh means all 64KB are
2898 accessible). */
2899 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2900 for (off = 0; off < 8; off++)
2901 {
2902 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2903 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2904 {
2905 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2906
2907 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2908 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2909 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2910 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2911 if (bRing != 0)
2912 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2913 else if (off > cbLimit)
2914 {
2915 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2916 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2917 Bs3TestFailedF("Mismatch (%s, #6): expected %.*Rhxs, got %.*Rhxs\n",
2918 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2919 }
2920 else if (pWorker->fSs)
2921 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2922 else
2923 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2924 g_usBs3TestStep++;
2925
2926 /* Again with zero limit and messed up base (should trigger triple fault if partially loaded). */
2927 abBufLoad[off] = abBufLoad[off + 1] = 0;
2928 abBufLoad[off + 2] |= 3;
2929 abBufLoad[off + cbIdtr - 2] ^= 0x55;
2930 abBufLoad[off + cbIdtr - 1] ^= 0xaa;
2931 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2932 if (bRing != 0)
2933 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2934 else if (off > cbLimit)
2935 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2936 else if (pWorker->fSs)
2937 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2938 else
2939 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2940 }
2941 }
2942
2943 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2944 CtxUdExpected.rbx.u = Ctx.rbx.u;
2945 CtxUdExpected.ss = Ctx.ss;
2946 CtxUdExpected.ds = Ctx.ds;
2947 }
2948
2949 /*
2950 * Play with the paging.
2951 */
2952 if ( BS3_MODE_IS_PAGED(bTestMode)
2953 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2954 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2955 {
2956 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2957
2958 /*
2959 * Slide the load buffer towards the trailing guard page.
2960 */
2961 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[X86_PAGE_SIZE]);
2962 CtxUdExpected.ss = Ctx.ss;
2963 CtxUdExpected.ds = Ctx.ds;
2964 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2965 {
2966 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr*2);
2967 if (off < X86_PAGE_SIZE)
2968 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(X86_PAGE_SIZE - off, cbIdtr));
2969 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2970 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2971 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2972 if (bRing != 0)
2973 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2974 else if (off + cbIdtr <= X86_PAGE_SIZE)
2975 {
2976 CtxUdExpected.rbx = Ctx.rbx;
2977 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2978 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
2979 Bs3TestFailedF("Mismatch (%s, #7): expected %.*Rhxs, got %.*Rhxs\n",
2980 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2981 }
2982 else
2983 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2984 g_usBs3TestStep++;
2985
2986 /* Again with zero limit and maybe messed up base as well (triple fault if buggy).
2987 The 386DX-40 here triple faults (or something) with off == 0xffe, nothing else. */
2988 if ( off < X86_PAGE_SIZE && off + cbIdtr > X86_PAGE_SIZE
2989 && ( off != X86_PAGE_SIZE - 2
2990 || (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) != BS3CPU_80386)
2991 )
2992 {
2993 pbTest[off] = 0;
2994 if (off + 1 < X86_PAGE_SIZE)
2995 pbTest[off + 1] = 0;
2996 if (off + 2 < X86_PAGE_SIZE)
2997 pbTest[off + 2] |= 7;
2998 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2999 if (bRing != 0)
3000 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3001 else
3002 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3003 g_usBs3TestStep++;
3004 }
3005 }
3006
3007 /*
3008 * Now, do it the other way around. It should look normal now since writing
3009 * the limit will #PF first and nothing should be written.
3010 */
3011 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
3012 {
3013 Bs3MemSet(pbTest, bFiller1, 48);
3014 if (off >= 0)
3015 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3016 else if (off + cbIdtr > 0)
3017 Bs3MemCpy(pbTest, &pbBufRestore[-off], cbIdtr + off);
3018 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
3019 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3020 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3021 if (bRing != 0)
3022 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3023 else if (off >= 0)
3024 {
3025 CtxUdExpected.rbx = Ctx.rbx;
3026 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3027 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
3028 Bs3TestFailedF("Mismatch (%s, #8): expected %.*Rhxs, got %.*Rhxs\n",
3029 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3030 }
3031 else
3032 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3033 g_usBs3TestStep++;
3034
3035 /* Again with messed up base as well (triple fault if buggy). */
3036 if (off < 0 && off > -cbIdtr)
3037 {
3038 if (off + 2 >= 0)
3039 pbTest[off + 2] |= 15;
3040 pbTest[off + cbIdtr - 1] ^= 0xaa;
3041 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3042 if (bRing != 0)
3043 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3044 else
3045 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3046 g_usBs3TestStep++;
3047 }
3048 }
3049
3050 /*
3051 * Combine paging and segment limit and check ordering.
3052 * This is kind of interesting here since it the instruction seems to
3053 * actually be doing two separate read, just like it's S[IG]DT counterpart.
3054 *
3055 * Note! My 486DX4 does a DWORD limit read when the operand size is 32-bit,
3056 * that's what f486Weirdness deals with.
3057 */
3058 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
3059 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
3060 {
3061 bool const f486Weirdness = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80486
3062 && BS3_MODE_IS_32BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE);
3063 uint16_t cbLimit;
3064
3065 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
3066 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
3067 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3068 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3069 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3070
3071 if (pWorker->fSs)
3072 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
3073 else
3074 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3075
3076 /* Expand up (normal), approaching tail guard page. */
3077 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3078 {
3079 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3080 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3081 {
3082 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3083 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr * 2);
3084 if (off < X86_PAGE_SIZE)
3085 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(cbIdtr, X86_PAGE_SIZE - off));
3086 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3087 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3088 if (bRing != 0)
3089 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3090 else if (off + cbIdtr <= cbLimit + 1)
3091 {
3092 /* No #GP, but maybe #PF. */
3093 if (off + cbIdtr <= X86_PAGE_SIZE)
3094 {
3095 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3096 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3097 Bs3TestFailedF("Mismatch (%s, #9): expected %.*Rhxs, got %.*Rhxs\n",
3098 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3099 }
3100 else
3101 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3102 }
3103 /* No #GP/#SS on limit, but instead #PF? */
3104 else if ( !f486Weirdness
3105 ? off < cbLimit && off >= 0xfff
3106 : off + 2 < cbLimit && off >= 0xffd)
3107 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3108 /* #GP/#SS on limit or base. */
3109 else if (pWorker->fSs)
3110 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3111 else
3112 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3113
3114 g_usBs3TestStep++;
3115
3116 /* Set DS to 0 and check that we get #GP(0). */
3117 if (!pWorker->fSs)
3118 {
3119 Ctx.ds = 0;
3120 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3121 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3122 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3123 g_usBs3TestStep++;
3124 }
3125 }
3126 }
3127
3128 /* Expand down. */
3129 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
3130 uFlatTest -= X86_PAGE_SIZE;
3131
3132 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
3133 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3134 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3135 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3136
3137 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3138 {
3139 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3140 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3141 {
3142 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3143 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller1, cbIdtr * 2);
3144 if (off >= X86_PAGE_SIZE)
3145 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3146 else if (off > X86_PAGE_SIZE - cbIdtr)
3147 Bs3MemCpy(&pbTest[X86_PAGE_SIZE], &pbBufRestore[X86_PAGE_SIZE - off], cbIdtr - (X86_PAGE_SIZE - off));
3148 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3149 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3150 if (bRing != 0)
3151 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3152 else if (cbLimit < off && off >= X86_PAGE_SIZE)
3153 {
3154 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3155 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3156 Bs3TestFailedF("Mismatch (%s, #10): expected %.*Rhxs, got %.*Rhxs\n",
3157 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3158 }
3159 else if (cbLimit < off && off < X86_PAGE_SIZE)
3160 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3161 else if (pWorker->fSs)
3162 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3163 else
3164 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3165 g_usBs3TestStep++;
3166 }
3167 }
3168
3169 pbTest += X86_PAGE_SIZE;
3170 uFlatTest += X86_PAGE_SIZE;
3171 }
3172
3173 Bs3MemGuardedTestPageFree(pbTest);
3174 }
3175
3176 /*
3177 * Check non-canonical 64-bit space.
3178 */
3179 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
3180 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
3181 {
3182 /* Make our references relative to the gap. */
3183 pbTest += g_cbBs3PagingOneCanonicalTrap;
3184
3185 /* Hit it from below. */
3186 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3187 {
3188 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
3189 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3190 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3191 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3192 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3193 if (off + cbIdtr > 0 || bRing != 0)
3194 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3195 else
3196 {
3197 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3198 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3199 Bs3TestFailedF("Mismatch (%s, #11): expected %.*Rhxs, got %.*Rhxs\n",
3200 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3201 }
3202 }
3203
3204 /* Hit it from above. */
3205 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3206 {
3207 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
3208 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3209 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3210 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3211 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3212 if (off < 0 || bRing != 0)
3213 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3214 else
3215 {
3216 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3217 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3218 Bs3TestFailedF("Mismatch (%s, #19): expected %.*Rhxs, got %.*Rhxs\n",
3219 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3220 }
3221 }
3222
3223 }
3224}
3225
3226
3227static void bs3CpuBasic2_lidt_lgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
3228 void const *pvRestore, size_t cbRestore, uint8_t const *pbExpected)
3229{
3230 unsigned idx;
3231 unsigned bRing;
3232 unsigned iStep = 0;
3233
3234 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
3235 test and don't want to bother with double faults. */
3236 for (bRing = BS3_MODE_IS_V86(bTestMode) ? 3 : 0; bRing <= 3; bRing++)
3237 {
3238 for (idx = 0; idx < cWorkers; idx++)
3239 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
3240 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ )
3241 && ( !(paWorkers[idx].fFlags & BS3CB2SIDTSGDT_F_386PLUS)
3242 || ( bTestMode > BS3_MODE_PE16
3243 || ( bTestMode == BS3_MODE_PE16
3244 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)) ) )
3245 {
3246 //Bs3TestPrintf("idx=%-2d fpfnWorker=%p fSs=%d cbInstr=%d\n",
3247 // idx, paWorkers[idx].fpfnWorker, paWorkers[idx].fSs, paWorkers[idx].cbInstr);
3248 g_usBs3TestStep = iStep;
3249 bs3CpuBasic2_lidt_lgdt_One(&paWorkers[idx], bTestMode, bRing, pvRestore, cbRestore, pbExpected);
3250 iStep += 1000;
3251 }
3252 if (BS3_MODE_IS_RM_SYS(bTestMode))
3253 break;
3254 }
3255}
3256
3257
3258BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lidt)(uint8_t bMode)
3259{
3260 union
3261 {
3262 RTIDTR Idtr;
3263 uint8_t ab[32]; /* At least cbIdtr*2! */
3264 } Expected;
3265
3266 //if (bMode != BS3_MODE_LM64) return 0;
3267 bs3CpuBasic2_SetGlobals(bMode);
3268
3269 /*
3270 * Pass to common worker which is only compiled once per mode.
3271 */
3272 Bs3MemZero(&Expected, sizeof(Expected));
3273 ASMGetIDTR(&Expected.Idtr);
3274
3275 if (BS3_MODE_IS_RM_SYS(bMode))
3276 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3277 &Bs3Lidt_Ivt, sizeof(Bs3Lidt_Ivt), Expected.ab);
3278 else if (BS3_MODE_IS_16BIT_SYS(bMode))
3279 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3280 &Bs3Lidt_Idt16, sizeof(Bs3Lidt_Idt16), Expected.ab);
3281 else if (BS3_MODE_IS_32BIT_SYS(bMode))
3282 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3283 &Bs3Lidt_Idt32, sizeof(Bs3Lidt_Idt32), Expected.ab);
3284 else
3285 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3286 &Bs3Lidt_Idt64, sizeof(Bs3Lidt_Idt64), Expected.ab);
3287
3288 /*
3289 * Re-initialize the IDT.
3290 */
3291 Bs3TrapReInit();
3292 return 0;
3293}
3294
3295
3296BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lgdt)(uint8_t bMode)
3297{
3298 union
3299 {
3300 RTGDTR Gdtr;
3301 uint8_t ab[32]; /* At least cbIdtr*2! */
3302 } Expected;
3303
3304 //if (!BS3_MODE_IS_64BIT_SYS(bMode)) return 0;
3305 bs3CpuBasic2_SetGlobals(bMode);
3306
3307 /*
3308 * Pass to common worker which is only compiled once per mode.
3309 */
3310 if (BS3_MODE_IS_RM_SYS(bMode))
3311 ASMSetGDTR((PRTGDTR)&Bs3LgdtDef_Gdt);
3312 Bs3MemZero(&Expected, sizeof(Expected));
3313 ASMGetGDTR(&Expected.Gdtr);
3314
3315 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLgdtWorkers, RT_ELEMENTS(g_aLgdtWorkers),
3316 &Bs3LgdtDef_Gdt, sizeof(Bs3LgdtDef_Gdt), Expected.ab);
3317
3318 /*
3319 * Re-initialize the IDT.
3320 */
3321 Bs3TrapReInit();
3322 return 0;
3323}
3324
3325typedef union IRETBUF
3326{
3327 uint64_t au64[6]; /* max req is 5 */
3328 uint32_t au32[12]; /* max req is 9 */
3329 uint16_t au16[24]; /* max req is 5 */
3330 uint8_t ab[48];
3331} IRETBUF;
3332typedef IRETBUF BS3_FAR *PIRETBUF;
3333
3334
3335static void iretbuf_SetupFrame(PIRETBUF pIretBuf, unsigned const cbPop,
3336 uint16_t uCS, uint64_t uPC, uint32_t fEfl, uint16_t uSS, uint64_t uSP)
3337{
3338 if (cbPop == 2)
3339 {
3340 pIretBuf->au16[0] = (uint16_t)uPC;
3341 pIretBuf->au16[1] = uCS;
3342 pIretBuf->au16[2] = (uint16_t)fEfl;
3343 pIretBuf->au16[3] = (uint16_t)uSP;
3344 pIretBuf->au16[4] = uSS;
3345 }
3346 else if (cbPop != 8)
3347 {
3348 pIretBuf->au32[0] = (uint32_t)uPC;
3349 pIretBuf->au16[1*2] = uCS;
3350 pIretBuf->au32[2] = (uint32_t)fEfl;
3351 pIretBuf->au32[3] = (uint32_t)uSP;
3352 pIretBuf->au16[4*2] = uSS;
3353 }
3354 else
3355 {
3356 pIretBuf->au64[0] = uPC;
3357 pIretBuf->au16[1*4] = uCS;
3358 pIretBuf->au64[2] = fEfl;
3359 pIretBuf->au64[3] = uSP;
3360 pIretBuf->au16[4*4] = uSS;
3361 }
3362}
3363
3364
3365static void bs3CpuBasic2_iret_Worker(uint8_t bTestMode, FPFNBS3FAR pfnIret, unsigned const cbPop,
3366 PIRETBUF pIretBuf, const char BS3_FAR *pszDesc)
3367{
3368 BS3TRAPFRAME TrapCtx;
3369 BS3REGCTX Ctx;
3370 BS3REGCTX CtxUdExpected;
3371 BS3REGCTX TmpCtx;
3372 BS3REGCTX TmpCtxExpected;
3373 uint8_t abLowUd[8];
3374 uint8_t abLowIret[8];
3375 FPFNBS3FAR pfnUdLow = (FPFNBS3FAR)abLowUd;
3376 FPFNBS3FAR pfnIretLow = (FPFNBS3FAR)abLowIret;
3377 unsigned const cbSameCplFrame = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 5*cbPop : 3*cbPop;
3378 bool const fUseLowCode = cbPop == 2 && !BS3_MODE_IS_16BIT_CODE(bTestMode);
3379 int iRingDst;
3380 int iRingSrc;
3381 uint16_t uDplSs;
3382 uint16_t uRplCs;
3383 uint16_t uRplSs;
3384// int i;
3385 uint8_t BS3_FAR *pbTest;
3386
3387 NOREF(abLowUd);
3388#define IRETBUF_SET_SEL(a_idx, a_uValue) \
3389 do { *(uint16_t)&pIretBuf->ab[a_idx * cbPop] = (a_uValue); } while (0)
3390#define IRETBUF_SET_REG(a_idx, a_uValue) \
3391 do { uint8_t const BS3_FAR *pbTmp = &pIretBuf->ab[a_idx * cbPop]; \
3392 if (cbPop == 2) *(uint16_t)pbTmp = (uint16_t)(a_uValue); \
3393 else if (cbPop != 8) *(uint32_t)pbTmp = (uint32_t)(a_uValue); \
3394 else *(uint64_t)pbTmp = (a_uValue); \
3395 } while (0)
3396
3397 /* make sure they're allocated */
3398 Bs3MemZero(&Ctx, sizeof(Ctx));
3399 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
3400 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
3401 Bs3MemZero(&TmpCtxExpected, sizeof(TmpCtxExpected));
3402 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3403
3404 /*
3405 * When dealing with 16-bit irets in 32-bit or 64-bit mode, we must have
3406 * copies of both iret and ud in the first 64KB of memory. The stack is
3407 * below 64KB, so we'll just copy the instructions onto the stack.
3408 */
3409 Bs3MemCpy(abLowUd, bs3CpuBasic2_ud2, 4);
3410 Bs3MemCpy(abLowIret, pfnIret, 4);
3411
3412 /*
3413 * Create a context (stack is irrelevant, we'll mainly be using pIretBuf).
3414 * - Point the context at our iret instruction.
3415 * - Point SS:xSP at pIretBuf.
3416 */
3417 Bs3RegCtxSaveEx(&Ctx, bTestMode, 0);
3418 if (!fUseLowCode)
3419 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pfnIret);
3420 else
3421 Bs3RegCtxSetRipCsFromCurPtr(&Ctx, pfnIretLow);
3422 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
3423 g_uBs3TrapEipHint = Ctx.rip.u32;
3424 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3425
3426 /*
3427 * The first success (UD) context keeps the same code bit-count as the iret.
3428 */
3429 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
3430 if (!fUseLowCode)
3431 Bs3RegCtxSetRipCsFromLnkPtr(&CtxUdExpected, bs3CpuBasic2_ud2);
3432 else
3433 Bs3RegCtxSetRipCsFromCurPtr(&CtxUdExpected, pfnUdLow);
3434 CtxUdExpected.rsp.u += cbSameCplFrame;
3435
3436 /*
3437 * Check that it works at all.
3438 */
3439 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3440 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3441
3442 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3443 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3444 g_usBs3TestStep++;
3445
3446 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
3447 {
3448 /* Selectors are modified when switching rings, so we need to know
3449 what we're dealing with there. */
3450 if ( !BS3_SEL_IS_IN_R0_RANGE(Ctx.cs) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ss)
3451 || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ds) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.es))
3452 Bs3TestFailedF("Expected R0 CS, SS, DS and ES; not %#x, %#x, %#x and %#x\n", Ctx.cs, Ctx.ss, Ctx.ds, Ctx.es);
3453 if (Ctx.fs || Ctx.gs)
3454 Bs3TestFailed("Expected R0 FS and GS to be 0!\n");
3455
3456 /*
3457 * Test returning to outer rings if protected mode.
3458 */
3459 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3460 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3461 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3462 {
3463 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3464 TmpCtxExpected.ds = iRingDst ? 0 : TmpCtx.ds;
3465 TmpCtx.es = TmpCtxExpected.es;
3466 iretbuf_SetupFrame(pIretBuf, cbPop, TmpCtxExpected.cs, TmpCtxExpected.rip.u,
3467 TmpCtxExpected.rflags.u32, TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3468 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3469 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3470 g_usBs3TestStep++;
3471 }
3472
3473 /*
3474 * Check CS.RPL and SS.RPL.
3475 */
3476 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3477 {
3478 uint16_t const uDstSsR0 = (CtxUdExpected.ss & BS3_SEL_RING_SUB_MASK) + BS3_SEL_R0_FIRST;
3479 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3480 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3481 for (iRingSrc = 3; iRingSrc >= 0; iRingSrc--)
3482 {
3483 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3484 Bs3RegCtxConvertToRingX(&TmpCtx, iRingSrc);
3485 TmpCtx.es = TmpCtxExpected.es;
3486 TmpCtxExpected.ds = iRingDst != iRingSrc ? 0 : TmpCtx.ds;
3487 for (uRplCs = 0; uRplCs <= 3; uRplCs++)
3488 {
3489 uint16_t const uSrcEs = TmpCtx.es;
3490 uint16_t const uDstCs = (TmpCtxExpected.cs & X86_SEL_MASK_OFF_RPL) | uRplCs;
3491 //Bs3TestPrintf("dst=%d src=%d rplCS=%d\n", iRingDst, iRingSrc, uRplCs);
3492
3493 /* CS.RPL */
3494 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u, TmpCtxExpected.rflags.u32,
3495 TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3496 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3497 if (uRplCs == iRingDst && iRingDst >= iRingSrc)
3498 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3499 else
3500 {
3501 if (iRingDst < iRingSrc)
3502 TmpCtx.es = 0;
3503 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3504 TmpCtx.es = uSrcEs;
3505 }
3506 g_usBs3TestStep++;
3507
3508 /* SS.RPL */
3509 if (iRingDst != iRingSrc || BS3_MODE_IS_64BIT_CODE(bTestMode))
3510 {
3511 uint16_t uSavedDstSs = TmpCtxExpected.ss;
3512 for (uRplSs = 0; uRplSs <= 3; uRplSs++)
3513 {
3514 /* SS.DPL (iRingDst == CS.DPL) */
3515 for (uDplSs = 0; uDplSs <= 3; uDplSs++)
3516 {
3517 uint16_t const uDstSs = ((uDplSs << BS3_SEL_RING_SHIFT) | uRplSs) + uDstSsR0;
3518 //Bs3TestPrintf("dst=%d src=%d rplCS=%d rplSS=%d dplSS=%d dst %04x:%08RX64 %08RX32 %04x:%08RX64\n",
3519 // iRingDst, iRingSrc, uRplCs, uRplSs, uDplSs, uDstCs, TmpCtxExpected.rip.u,
3520 // TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3521
3522 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u,
3523 TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3524 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3525 if (uRplCs != iRingDst || iRingDst < iRingSrc)
3526 {
3527 if (iRingDst < iRingSrc)
3528 TmpCtx.es = 0;
3529 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3530 }
3531 else if (uRplSs != iRingDst || uDplSs != iRingDst)
3532 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstSs & X86_SEL_MASK_OFF_RPL);
3533 else
3534 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3535 TmpCtx.es = uSrcEs;
3536 g_usBs3TestStep++;
3537 }
3538 }
3539
3540 TmpCtxExpected.ss = uSavedDstSs;
3541 }
3542 }
3543 }
3544 }
3545 }
3546
3547 /*
3548 * Special 64-bit checks.
3549 */
3550 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
3551 {
3552 /* The VM flag is completely ignored. */
3553 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3554 CtxUdExpected.rflags.u32 | X86_EFL_VM, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3555 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3556 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3557 g_usBs3TestStep++;
3558
3559 /* The NT flag can be loaded just fine. */
3560 CtxUdExpected.rflags.u32 |= X86_EFL_NT;
3561 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3562 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3563 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3564 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3565 CtxUdExpected.rflags.u32 &= ~X86_EFL_NT;
3566 g_usBs3TestStep++;
3567
3568 /* However, we'll #GP(0) if it's already set (in RFLAGS) when executing IRET. */
3569 Ctx.rflags.u32 |= X86_EFL_NT;
3570 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3571 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3572 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3573 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3574 g_usBs3TestStep++;
3575
3576 /* The NT flag #GP(0) should trump all other exceptions - pit it against #PF. */
3577 pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED);
3578 if (pbTest != NULL)
3579 {
3580 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, &pbTest[X86_PAGE_SIZE]);
3581 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3582 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3583 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3584 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3585 g_usBs3TestStep++;
3586
3587 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3588 Bs3MemGuardedTestPageFree(pbTest);
3589 }
3590 Ctx.rflags.u32 &= ~X86_EFL_NT;
3591 }
3592}
3593
3594
3595BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_iret)(uint8_t bMode)
3596{
3597 struct
3598 {
3599 uint8_t abExtraStack[4096]; /**< we've got ~30KB of stack, so 4KB for the trap handlers++ is not a problem. */
3600 IRETBUF IRetBuf;
3601 uint8_t abGuard[32];
3602 } uBuf;
3603 size_t cbUnused;
3604
3605 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
3606 bs3CpuBasic2_SetGlobals(bMode);
3607
3608 /*
3609 * Primary instruction form.
3610 */
3611 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3612 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3613 if (BS3_MODE_IS_16BIT_CODE(bMode))
3614 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 2, &uBuf.IRetBuf, "iret");
3615 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3616 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3617 else
3618 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_rexw, 8, &uBuf.IRetBuf, "o64 iret");
3619
3620 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3621 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3622 - (uintptr_t)uBuf.abExtraStack;
3623 if (cbUnused < 2048)
3624 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 1);
3625
3626 /*
3627 * Secondary variation: opsize prefixed.
3628 */
3629 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3630 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3631 if (BS3_MODE_IS_16BIT_CODE(bMode) && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)
3632 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 4, &uBuf.IRetBuf, "o32 iret");
3633 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3634 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3635 else if (BS3_MODE_IS_64BIT_CODE(bMode))
3636 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3637 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3638 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3639 - (uintptr_t)uBuf.abExtraStack;
3640 if (cbUnused < 2048)
3641 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 2);
3642
3643 /*
3644 * Third variation: 16-bit in 64-bit mode (truly unlikely)
3645 */
3646 if (BS3_MODE_IS_64BIT_CODE(bMode))
3647 {
3648 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3649 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3650 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3651 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3652 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3653 - (uintptr_t)uBuf.abExtraStack;
3654 if (cbUnused < 2048)
3655 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 3);
3656 }
3657
3658 return 0;
3659}
3660
3661
3662
3663/*********************************************************************************************************************************
3664* Non-far JMP & CALL Tests *
3665*********************************************************************************************************************************/
3666#define PROTO_ALL(a_Template) \
3667 FNBS3FAR a_Template ## _c16, \
3668 a_Template ## _c32, \
3669 a_Template ## _c64
3670PROTO_ALL(bs3CpuBasic2_jmp_jb__ud2);
3671PROTO_ALL(bs3CpuBasic2_jmp_jb_back__ud2);
3672PROTO_ALL(bs3CpuBasic2_jmp_jv__ud2);
3673PROTO_ALL(bs3CpuBasic2_jmp_jv_back__ud2);
3674PROTO_ALL(bs3CpuBasic2_jmp_ind_mem__ud2);
3675PROTO_ALL(bs3CpuBasic2_jmp_ind_xAX__ud2);
3676PROTO_ALL(bs3CpuBasic2_jmp_ind_xDI__ud2);
3677FNBS3FAR bs3CpuBasic2_jmp_ind_r9__ud2_c64;
3678PROTO_ALL(bs3CpuBasic2_call_jv__ud2);
3679PROTO_ALL(bs3CpuBasic2_call_jv_back__ud2);
3680PROTO_ALL(bs3CpuBasic2_call_ind_mem__ud2);
3681PROTO_ALL(bs3CpuBasic2_call_ind_xAX__ud2);
3682PROTO_ALL(bs3CpuBasic2_call_ind_xDI__ud2);
3683FNBS3FAR bs3CpuBasic2_call_ind_r9__ud2_c64;
3684
3685PROTO_ALL(bs3CpuBasic2_jmp_opsize_begin);
3686PROTO_ALL(bs3CpuBasic2_jmp_jb_opsize__ud2);
3687PROTO_ALL(bs3CpuBasic2_jmp_jb_opsize_back__ud2);
3688PROTO_ALL(bs3CpuBasic2_jmp_jv_opsize__ud2);
3689PROTO_ALL(bs3CpuBasic2_jmp_jv_opsize_back__ud2);
3690PROTO_ALL(bs3CpuBasic2_jmp_ind_mem_opsize__ud2);
3691FNBS3FAR bs3CpuBasic2_jmp_ind_mem_opsize__ud2__intel_c64;
3692PROTO_ALL(bs3CpuBasic2_jmp_ind_xAX_opsize__ud2);
3693PROTO_ALL(bs3CpuBasic2_call_jv_opsize__ud2);
3694PROTO_ALL(bs3CpuBasic2_call_jv_opsize_back__ud2);
3695PROTO_ALL(bs3CpuBasic2_call_ind_mem_opsize__ud2);
3696FNBS3FAR bs3CpuBasic2_call_ind_mem_opsize__ud2__intel_c64;
3697PROTO_ALL(bs3CpuBasic2_call_ind_xAX_opsize__ud2);
3698PROTO_ALL(bs3CpuBasic2_jmp_opsize_end);
3699#undef PROTO_ALL
3700
3701FNBS3FAR bs3CpuBasic2_jmptext16_start;
3702
3703FNBS3FAR bs3CpuBasic2_jmp_target_wrap_forward;
3704FNBS3FAR bs3CpuBasic2_jmp_jb_wrap_forward__ud2;
3705FNBS3FAR bs3CpuBasic2_jmp_jb_opsize_wrap_forward__ud2;
3706FNBS3FAR bs3CpuBasic2_jmp_jv16_wrap_forward__ud2;
3707FNBS3FAR bs3CpuBasic2_jmp_jv16_opsize_wrap_forward__ud2;
3708FNBS3FAR bs3CpuBasic2_call_jv16_wrap_forward__ud2;
3709FNBS3FAR bs3CpuBasic2_call_jv16_opsize_wrap_forward__ud2;
3710
3711FNBS3FAR bs3CpuBasic2_jmp_target_wrap_backward;
3712FNBS3FAR bs3CpuBasic2_jmp_jb_wrap_backward__ud2;
3713FNBS3FAR bs3CpuBasic2_jmp_jb_opsize_wrap_backward__ud2;
3714FNBS3FAR bs3CpuBasic2_jmp_jv16_wrap_backward__ud2;
3715FNBS3FAR bs3CpuBasic2_jmp_jv16_opsize_wrap_backward__ud2;
3716FNBS3FAR bs3CpuBasic2_call_jv16_wrap_backward__ud2;
3717FNBS3FAR bs3CpuBasic2_call_jv16_opsize_wrap_backward__ud2;
3718
3719
3720
3721/**
3722 * Entrypoint for non-far JMP & CALL tests.
3723 *
3724 * @returns 0 or BS3TESTDOMODE_SKIPPED.
3725 * @param bMode The CPU mode we're testing.
3726 *
3727 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
3728 * with control registers and such.
3729 */
3730BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_jmp_call)(uint8_t bMode)
3731{
3732 BS3TRAPFRAME TrapCtx;
3733 BS3REGCTX Ctx;
3734 BS3REGCTX CtxExpected;
3735 unsigned iTest;
3736
3737 /* make sure they're allocated */
3738 Bs3MemZero(&Ctx, sizeof(Ctx));
3739 Bs3MemZero(&CtxExpected, sizeof(Ctx));
3740 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3741
3742 bs3CpuBasic2_SetGlobals(bMode);
3743
3744 /*
3745 * Create a context.
3746 */
3747 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
3748 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
3749
3750 /*
3751 * 16-bit tests.
3752 *
3753 * When opsize is 16-bit relative jumps will do 16-bit calculations and
3754 * modify IP. This means that it is not possible to trigger a segment
3755 * limit #GP(0) when the limit is set to 0xffff.
3756 */
3757 if (BS3_MODE_IS_16BIT_CODE(bMode))
3758 {
3759 static struct
3760 {
3761 int8_t iWrap;
3762 bool fOpSizePfx;
3763 int8_t iGprIndirect;
3764 bool fCall;
3765 FPFNBS3FAR pfnTest;
3766 }
3767 const s_aTests[] =
3768 {
3769 { 0, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c16, },
3770 { 0, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c16, },
3771 { 0, true, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c16, },
3772 { 0, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c16, },
3773 { 0, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c16, },
3774 { 0, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c16, },
3775 { 0, true, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c16, },
3776 { 0, true, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c16, },
3777 { 0, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c16, },
3778 { 0, true, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c16, },
3779 { 0, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c16, },
3780 { 0, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c16, },
3781 { 0, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c16, },
3782 { 0, false, -1, true, bs3CpuBasic2_call_jv__ud2_c16, },
3783 { 0, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c16, },
3784 { 0, true, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c16, },
3785 { 0, true, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c16, },
3786 { 0, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c16, },
3787 { 0, true, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c16, },
3788 { 0, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c16, },
3789 { 0, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c16, },
3790 { 0, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c16, },
3791
3792 { -1, false, -1, false, bs3CpuBasic2_jmp_jb_wrap_backward__ud2, },
3793 { +1, false, -1, false, bs3CpuBasic2_jmp_jb_wrap_forward__ud2, },
3794 { -1, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_wrap_backward__ud2, },
3795 { +1, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_wrap_forward__ud2, },
3796
3797 { -1, false, -1, false, bs3CpuBasic2_jmp_jv16_wrap_backward__ud2, },
3798 { +1, false, -1, false, bs3CpuBasic2_jmp_jv16_wrap_forward__ud2, },
3799 { -1, true, -1, false, bs3CpuBasic2_jmp_jv16_opsize_wrap_backward__ud2, },
3800 { +1, true, -1, false, bs3CpuBasic2_jmp_jv16_opsize_wrap_forward__ud2, },
3801 { -1, false, -1, true, bs3CpuBasic2_call_jv16_wrap_backward__ud2, },
3802 { +1, false, -1, true, bs3CpuBasic2_call_jv16_wrap_forward__ud2, },
3803 { -1, true, -1, true, bs3CpuBasic2_call_jv16_opsize_wrap_backward__ud2, },
3804 { +1, true, -1, true, bs3CpuBasic2_call_jv16_opsize_wrap_forward__ud2, },
3805 };
3806
3807 if (!BS3_MODE_IS_RM_OR_V86(bMode))
3808 Bs3SelSetup16BitCode(&Bs3GdteSpare03, Bs3SelLnkPtrToFlat(bs3CpuBasic2_jmptext16_start), 0);
3809
3810 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3811 {
3812 uint64_t uGprSaved;
3813 if (s_aTests[iTest].iWrap == 0)
3814 {
3815 uint8_t const BS3_FAR *fpbCode;
3816 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
3817 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
3818 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
3819 }
3820 else
3821 {
3822 if (BS3_MODE_IS_RM_OR_V86(bMode))
3823 Ctx.cs = BS3_FP_SEG(s_aTests[iTest].pfnTest);
3824 else
3825 Ctx.cs = BS3_SEL_SPARE_03;
3826 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3827 if (s_aTests[iTest].fOpSizePfx)
3828 CtxExpected.rip.u = Ctx.rip.u;
3829 else if (s_aTests[iTest].iWrap < 0)
3830 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3831 else
3832 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_forward);
3833 }
3834 CtxExpected.cs = Ctx.cs;
3835 if (s_aTests[iTest].iGprIndirect >= 0)
3836 {
3837 uGprSaved = (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u;
3838 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u
3839 = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = CtxExpected.rip.u;
3840 }
3841 CtxExpected.rsp.u = Ctx.rsp.u;
3842 if (s_aTests[iTest].fCall && (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx))
3843 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 2;
3844 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u);
3845
3846 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3847 if (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx)
3848 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
3849 else
3850 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3851 g_usBs3TestStep++;
3852
3853 /* Again single stepping: */
3854 //Bs3TestPrintf("stepping...\n");
3855 Bs3RegSetDr6(0);
3856 Ctx.rflags.u16 |= X86_EFL_TF;
3857 CtxExpected.rflags.u16 = Ctx.rflags.u16;
3858 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3859 if (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx)
3860 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
3861 else
3862 {
3863 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3864 bs3CpuBasic2_CheckDr6InitVal();
3865 }
3866 Ctx.rflags.u16 &= ~X86_EFL_TF;
3867 CtxExpected.rflags.u16 = Ctx.rflags.u16;
3868 g_usBs3TestStep++;
3869
3870 if (s_aTests[iTest].iGprIndirect >= 0)
3871 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = uGprSaved;
3872 }
3873
3874 /* Limit the wraparound CS segment to exclude bs3CpuBasic2_jmp_target_wrap_backward
3875 and run the backward wrapping tests. */
3876 if (!BS3_MODE_IS_RM_OR_V86(bMode))
3877 {
3878 Bs3GdteSpare03.Gen.u16LimitLow = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward) - 1;
3879 CtxExpected.cs = Ctx.cs = BS3_SEL_SPARE_03;
3880 CtxExpected.rsp.u = Ctx.rsp.u;
3881 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3882 if (s_aTests[iTest].iWrap < 0)
3883 {
3884 CtxExpected.rip.u = Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3885 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v1\n", Ctx.cs, Ctx.rip.u);
3886 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3887 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3888 g_usBs3TestStep++;
3889 }
3890
3891 /* Do another round where we put the limit in the middle of the UD2
3892 instruction we're jumping to: */
3893 Bs3GdteSpare03.Gen.u16LimitLow = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3894 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3895 if (s_aTests[iTest].iWrap < 0)
3896 {
3897 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3898 if (s_aTests[iTest].fOpSizePfx)
3899 CtxExpected.rip.u = Ctx.rip.u;
3900 else
3901 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3902 CtxExpected.rsp.u = Ctx.rsp.u;
3903 if (s_aTests[iTest].fCall && (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx))
3904 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 2;
3905 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v2\n", Ctx.cs, Ctx.rip.u);
3906 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3907 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3908 g_usBs3TestStep++;
3909 }
3910 }
3911
3912 }
3913 /*
3914 * 32-bit & 64-bit tests.
3915 *
3916 * When the opsize prefix is applied here, IP is updated and bits 63:16
3917 * cleared. However in 64-bit mode, Intel ignores the opsize prefix
3918 * whereas AMD doesn't and it works like you expect.
3919 */
3920 else
3921 {
3922 static struct
3923 {
3924 uint8_t cBits;
3925 bool fOpSizePfx;
3926 bool fIgnPfx;
3927 int8_t iGprIndirect;
3928 bool fCall;
3929 FPFNBS3FAR pfnTest;
3930 }
3931 const s_aTests[] =
3932 {
3933 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c32, },
3934 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c32, },
3935 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c32, },
3936 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c32, },
3937 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c32, },
3938 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c32, },
3939 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c32, },
3940 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c32, },
3941 { 32, false, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c32, },
3942 { 32, true, false, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c32, },
3943 { 32, false, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c32, },
3944 { 32, false, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c32, },
3945 { 32, true, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c32, },
3946 { 32, false, false, -1, true, bs3CpuBasic2_call_jv__ud2_c32, },
3947 { 32, false, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c32, },
3948 { 32, true, false, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c32, },
3949 { 32, true, false, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c32, },
3950 { 32, false, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c32, },
3951 { 32, true, false, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c32, },
3952 { 32, false, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c32, },
3953 { 32, false, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c32, },
3954 { 32, true, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c32, },
3955 /* 64bit/Intel: Use the _c64 tests, which are written to ignore the o16 prefix. */
3956 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jb__ud2_c64, },
3957 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c64, },
3958 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c64, },
3959 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c64, },
3960 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jv__ud2_c64, },
3961 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c64, },
3962 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c64, },
3963 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c64, },
3964 { 64, false, true, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c64, },
3965 { 64, true, true, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2__intel_c64, },
3966 { 64, false, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c64, },
3967 { 64, false, true, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c64, },
3968 { 64, false, true, X86_GREG_x9, false, bs3CpuBasic2_jmp_ind_r9__ud2_c64, },
3969 { 64, true, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c64, }, /* no intel version needed */
3970 { 64, false, true, -1, true, bs3CpuBasic2_call_jv__ud2_c64, },
3971 { 64, false, true, -1, true, bs3CpuBasic2_call_jv_back__ud2_c64, },
3972 { 64, true, true, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c64, },
3973 { 64, true, true, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c64, },
3974 { 64, false, true, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c64, },
3975 { 64, true, true, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2__intel_c64,},
3976 { 64, false, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c64, },
3977 { 64, false, true, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c64, },
3978 { 64, false, true, X86_GREG_x9, true, bs3CpuBasic2_call_ind_r9__ud2_c64, },
3979 { 64, true, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c64, }, /* no intel version needed */
3980 /* 64bit/AMD: Use the _c32 tests. */
3981 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c32, },
3982 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c32, },
3983 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c32, },
3984 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c32, },
3985 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c32, },
3986 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c32, },
3987 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c32, },
3988 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c32, },
3989 { 64, false, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c64, }, /* using c64 here */
3990 { 64, true, false, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c64, }, /* ditto */
3991 { 64, false, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c64, }, /* ditto */
3992 { 64, false, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c64, }, /* ditto */
3993 { 64, false, false, X86_GREG_x9, false, bs3CpuBasic2_jmp_ind_r9__ud2_c64, }, /* ditto */
3994 { 64, true, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c64, }, /* ditto */
3995 { 64, false, false, -1, true, bs3CpuBasic2_call_jv__ud2_c32, }, /* using c32 again */
3996 { 64, false, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c32, },
3997 { 64, true, false, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c32, },
3998 { 64, true, false, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c32, },
3999 { 64, false, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c64, }, /* using c64 here */
4000 { 64, true, false, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c64, }, /* ditto */
4001 { 64, false, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c64, }, /* ditto */
4002 { 64, false, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c64, }, /* ditto */
4003 { 64, false, false, X86_GREG_x9, true, bs3CpuBasic2_call_ind_r9__ud2_c64, }, /* ditto */
4004 { 64, true, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c64, }, /* ditto */
4005 };
4006 uint8_t const cBits = BS3_MODE_IS_64BIT_CODE(bMode) ? 64 : 32;
4007 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4008 bool const fIgnPfx = cBits == 64 && enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4009
4010 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed tests. */
4011 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_jmp_opsize_begin_c32);
4012 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_jmp_opsize_end_c64) - offLow;
4013 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4014 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4015 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4016 Bs3TestFailedF("Opsize overriden jumps are out of place: %#x LB %#x\n", offLow, cbLow);
4017 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4018 if (!fIgnPfx)
4019 {
4020 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4021 if (s_aTests[iTest].fOpSizePfx && s_aTests[iTest].cBits == cBits && s_aTests[iTest].fIgnPfx == fIgnPfx)
4022 {
4023 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4024 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4025 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4026 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4027 pbCode16[offUd + 1] = 0xf1;
4028 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4029 pbLow[offUd + 1] = 0x0b;
4030 }
4031 }
4032
4033 /* Run the tests. */
4034 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4035 {
4036 if (s_aTests[iTest].cBits == cBits && s_aTests[iTest].fIgnPfx == fIgnPfx)
4037 {
4038 uint64_t uGprSaved;
4039 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4040 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4041 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4042 if (s_aTests[iTest].iGprIndirect >= 0)
4043 {
4044 uGprSaved = (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u;
4045 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u
4046 = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = CtxExpected.rip.u;
4047 }
4048 if (s_aTests[iTest].fOpSizePfx && !fIgnPfx)
4049 CtxExpected.rip.u &= UINT16_MAX;
4050 CtxExpected.rsp.u = Ctx.rsp.u;
4051 if (s_aTests[iTest].fCall)
4052 CtxExpected.rsp.u -= s_aTests[iTest].cBits == 64 ? 8
4053 : !s_aTests[iTest].fOpSizePfx ? 4 : 2;
4054
4055 //Bs3TestPrintf("cs:rip=%04RX16:%08RX64\n", Ctx.cs, Ctx.rip.u);
4056
4057 if (BS3_MODE_IS_16BIT_SYS(bMode))
4058 g_uBs3TrapEipHint = s_aTests[iTest].fOpSizePfx ? 0 : Ctx.rip.u32;
4059 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4060
4061 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4062 g_usBs3TestStep++;
4063
4064 /* Again single stepping: */
4065 //Bs3TestPrintf("stepping...\n");
4066 Bs3RegSetDr6(0);
4067 Ctx.rflags.u16 |= X86_EFL_TF;
4068 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4069 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4070 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4071 Ctx.rflags.u16 &= ~X86_EFL_TF;
4072 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4073 g_usBs3TestStep++;
4074
4075 if (s_aTests[iTest].iGprIndirect >= 0)
4076 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = uGprSaved;
4077 }
4078 }
4079
4080 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4081 }
4082
4083 return 0;
4084}
4085
4086
4087/*********************************************************************************************************************************
4088* FAR JMP & FAR CALL Tests *
4089*********************************************************************************************************************************/
4090#define PROTO_ALL(a_Template) \
4091 FNBS3FAR a_Template ## _c16, \
4092 a_Template ## _c32, \
4093 a_Template ## _c64
4094PROTO_ALL(bs3CpuBasic2_far_jmp_call_opsize_begin);
4095
4096FNBS3FAR bs3CpuBasic2_jmpf_ptr_rm__ud2_c16;
4097PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r0__ud2);
4098PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r1__ud2);
4099PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r2__ud2);
4100PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r3__ud2);
4101PROTO_ALL(bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2);
4102PROTO_ALL(bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2);
4103PROTO_ALL(bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2);
4104
4105FNBS3FAR bs3CpuBasic2_callf_ptr_rm__ud2_c16;
4106PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r0__ud2);
4107PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r1__ud2);
4108PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r2__ud2);
4109PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r3__ud2);
4110PROTO_ALL(bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2);
4111PROTO_ALL(bs3CpuBasic2_callf_ptr_r0_cs64__ud2);
4112PROTO_ALL(bs3CpuBasic2_callf_ptr_r0_cs16l__ud2);
4113
4114FNBS3FAR bs3CpuBasic2_jmpf_mem_rm__ud2_c16;
4115PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r0__ud2);
4116PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r1__ud2);
4117PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r2__ud2);
4118PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r3__ud2);
4119PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs16__ud2);
4120PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs32__ud2);
4121PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs64__ud2);
4122PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2);
4123
4124FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r0__ud2_intel_c64;
4125FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r1__ud2_intel_c64;
4126FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r2__ud2_intel_c64;
4127FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r3__ud2_intel_c64;
4128FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_intel_c64;
4129FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_intel_c64;
4130FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_intel_c64;
4131FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_intel_c64;
4132
4133FNBS3FAR bs3CpuBasic2_callf_mem_rm__ud2_c16;
4134PROTO_ALL(bs3CpuBasic2_callf_mem_same_r0__ud2);
4135PROTO_ALL(bs3CpuBasic2_callf_mem_same_r1__ud2);
4136PROTO_ALL(bs3CpuBasic2_callf_mem_same_r2__ud2);
4137PROTO_ALL(bs3CpuBasic2_callf_mem_same_r3__ud2);
4138PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs16__ud2);
4139PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs32__ud2);
4140PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs64__ud2);
4141PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs16l__ud2);
4142
4143FNBS3FAR bs3CpuBasic2_callf_mem_same_r0__ud2_intel_c64;
4144FNBS3FAR bs3CpuBasic2_callf_mem_same_r1__ud2_intel_c64;
4145FNBS3FAR bs3CpuBasic2_callf_mem_same_r2__ud2_intel_c64;
4146FNBS3FAR bs3CpuBasic2_callf_mem_same_r3__ud2_intel_c64;
4147FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs16__ud2_intel_c64;
4148FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs32__ud2_intel_c64;
4149FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs64__ud2_intel_c64;
4150FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs16l__ud2_intel_c64;
4151
4152PROTO_ALL(bs3CpuBasic2_far_jmp_call_opsize_end);
4153#undef PROTO_ALL
4154
4155
4156
4157/**
4158 * Entrypoint for FAR JMP & FAR CALL tests.
4159 *
4160 * @returns 0 or BS3TESTDOMODE_SKIPPED.
4161 * @param bMode The CPU mode we're testing.
4162 *
4163 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
4164 * with control registers and such.
4165 */
4166BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_far_jmp_call)(uint8_t bMode)
4167{
4168 BS3TRAPFRAME TrapCtx;
4169 BS3REGCTX Ctx;
4170 BS3REGCTX CtxExpected;
4171 unsigned iTest;
4172
4173 /* make sure they're allocated */
4174 Bs3MemZero(&Ctx, sizeof(Ctx));
4175 Bs3MemZero(&CtxExpected, sizeof(Ctx));
4176 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
4177
4178 bs3CpuBasic2_SetGlobals(bMode);
4179
4180 /*
4181 * Create a context.
4182 */
4183 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
4184 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
4185
4186 if (Ctx.rax.u8 == 0 || Ctx.rax.u8 == 0xff) /* for salc & the 64-bit detection */
4187 CtxExpected.rax.u8 = Ctx.rax.u8 = 0x42;
4188
4189 /*
4190 * Set up spare selectors.
4191 */
4192 Bs3GdteSpare00 = Bs3Gdte_CODE16;
4193 Bs3GdteSpare00.Gen.u1Long = 1;
4194
4195 /*
4196 * 16-bit tests.
4197 */
4198 if (BS3_MODE_IS_16BIT_CODE(bMode))
4199 {
4200 static struct
4201 {
4202 bool fRmOrV86;
4203 bool fCall;
4204 uint16_t uDstSel;
4205 uint8_t uDstBits;
4206 bool fOpSizePfx;
4207 FPFNBS3FAR pfnTest;
4208 }
4209 const s_aTests[] =
4210 {
4211 { true, false, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_jmpf_ptr_rm__ud2_c16, },
4212 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c16, },
4213 { false, false, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c16, },
4214 { false, false, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c16, },
4215 { false, false, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c16, },
4216 { false, false, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c16, },
4217 { false, false, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4218 { false, false, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4219
4220 { true, true, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_callf_ptr_rm__ud2_c16, },
4221 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_ptr_same_r0__ud2_c16, },
4222 { false, true, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_callf_ptr_same_r1__ud2_c16, },
4223 { false, true, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_callf_ptr_same_r2__ud2_c16, },
4224 { false, true, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_callf_ptr_same_r3__ud2_c16, },
4225 { false, true, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c16, },
4226 { false, true, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4227 { false, true, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4228
4229 { true, false, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_jmpf_mem_rm__ud2_c16, },
4230 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c16, },
4231 { false, false, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c16, },
4232 { false, false, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c16, },
4233 { false, false, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c16, },
4234 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c16, },
4235 { false, false, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c16, },
4236 { false, false, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4237 { false, false, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4238
4239 { true, true, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_callf_mem_rm__ud2_c16, },
4240 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c16, },
4241 { false, true, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c16, },
4242 { false, true, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c16, },
4243 { false, true, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c16, },
4244 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c16, },
4245 { false, true, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c16, },
4246 { false, true, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4247 { false, true, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4248 };
4249 bool const fRmOrV86 = BS3_MODE_IS_RM_OR_V86(bMode);
4250
4251 /* Prepare a copy of the SALC & UD2 instructions in low memory for opsize
4252 prefixed tests jumping to BS3_SEL_SPARE_00 when in 64-bit mode, because
4253 it'll be a 64-bit CS then with base=0 instead of a CS16 with base=0x10000. */
4254 if (BS3_MODE_IS_64BIT_SYS(bMode))
4255 {
4256 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_begin_c16);
4257 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_end_c16) - offLow;
4258 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4259 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4260 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4261 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4262 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4263 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4264 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64)
4265 {
4266 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4267 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4268 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4269 pbLow[offUd - 1] = 0xd6; /* plant salc + ud2 in low memory */
4270 pbLow[offUd] = 0x0f;
4271 pbLow[offUd + 1] = 0x0b;
4272 }
4273 }
4274
4275 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4276 if (s_aTests[iTest].fRmOrV86 == fRmOrV86)
4277 {
4278 uint64_t const uSavedRsp = Ctx.rsp.u;
4279 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4280 uint8_t const BS3_FAR *fpbCode;
4281
4282 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
4283 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
4284 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4285 if ( s_aTests[iTest].uDstBits == 32
4286 || ( s_aTests[iTest].uDstBits == 64
4287 && !BS3_MODE_IS_16BIT_SYS(bMode)
4288 && s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00))
4289 CtxExpected.rip.u += BS3_ADDR_BS3TEXT16;
4290 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64 && BS3_MODE_IS_64BIT_SYS(bMode))
4291 CtxExpected.rip.u &= UINT16_MAX;
4292 CtxExpected.cs = s_aTests[iTest].uDstSel;
4293 if (fGp)
4294 {
4295 CtxExpected.rip.u = Ctx.rip.u;
4296 CtxExpected.cs = Ctx.cs;
4297 }
4298 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4299 CtxExpected.rsp.u = Ctx.rsp.u;
4300 if (s_aTests[iTest].fCall && !fGp)
4301 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 8 : 4;
4302 if (s_aTests[iTest].uDstBits == 64 && !fGp)
4303 {
4304 if (BS3_MODE_IS_64BIT_SYS(bMode))
4305 CtxExpected.rip.u -= 1;
4306 else
4307 CtxExpected.rax.u8 = CtxExpected.rflags.u & X86_EFL_CF ? 0xff : 0x00;
4308 }
4309 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4310 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4311 if (!fGp)
4312 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4313 else
4314 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4315 Ctx.rsp.u = uSavedRsp;
4316 g_usBs3TestStep++;
4317
4318 /* Again single stepping: */
4319 //Bs3TestPrintf("stepping...\n");
4320 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4321 Ctx.rflags.u16 |= X86_EFL_TF;
4322 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4323 CtxExpected.rax.u = Ctx.rax.u;
4324 if (s_aTests[iTest].uDstBits == 64 && !fGp && !BS3_MODE_IS_64BIT_SYS(bMode))
4325 CtxExpected.rip.u -= 1;
4326 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4327 if (!fGp)
4328 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4329 else
4330 {
4331 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4332 bs3CpuBasic2_CheckDr6InitVal();
4333 }
4334 Ctx.rflags.u16 &= ~X86_EFL_TF;
4335 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4336 Ctx.rsp.u = uSavedRsp;
4337 g_usBs3TestStep++;
4338 }
4339 }
4340 /*
4341 * 32-bit tests.
4342 */
4343 else if (BS3_MODE_IS_32BIT_CODE(bMode))
4344 {
4345 static struct
4346 {
4347 bool fCall;
4348 uint16_t uDstSel;
4349 uint8_t uDstBits;
4350 bool fOpSizePfx;
4351 FPFNBS3FAR pfnTest;
4352 }
4353 const s_aTests[] =
4354 {
4355 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c32, },
4356 { false, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c32, },
4357 { false, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c32, },
4358 { false, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c32, },
4359 { false, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c32, },
4360 { false, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4361 { false, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4362
4363 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_ptr_same_r0__ud2_c32, },
4364 { true, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_callf_ptr_same_r1__ud2_c32, },
4365 { true, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_callf_ptr_same_r2__ud2_c32, },
4366 { true, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_callf_ptr_same_r3__ud2_c32, },
4367 { true, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c32, },
4368 { true, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4369 { true, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4370
4371 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c32, },
4372 { false, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c32, },
4373 { false, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c32, },
4374 { false, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c32, },
4375 { false, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c32, },
4376 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c32, },
4377 { false, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4378 { false, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4379
4380 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c32, },
4381 { true, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c32, },
4382 { true, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c32, },
4383 { true, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c32, },
4384 { true, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c32, },
4385 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c32, },
4386 { true, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4387 { true, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4388 };
4389
4390 /* Prepare a copy of the SALC & UD2 instructions in low memory for opsize
4391 prefixed tests jumping to BS3_SEL_SPARE_00 when in 64-bit mode, because
4392 it'll be a 64-bit CS then with base=0 instead of a CS16 with base=0x10000. */
4393 if (BS3_MODE_IS_64BIT_SYS(bMode))
4394 {
4395 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_begin_c32);
4396 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_end_c32) - offLow;
4397 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4398 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4399 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4400 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4401 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4402 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4403 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64)
4404 {
4405 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4406 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4407 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4408 pbLow[offUd - 1] = 0xd6; /* plant salc + ud2 in low memory */
4409 pbLow[offUd] = 0x0f;
4410 pbLow[offUd + 1] = 0x0b;
4411 }
4412 }
4413 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4414 {
4415 uint64_t const uSavedRsp = Ctx.rsp.u;
4416 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4417 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4418
4419 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4420 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4421 if ( s_aTests[iTest].uDstBits == 16
4422 || ( s_aTests[iTest].uDstBits == 64
4423 && ( BS3_MODE_IS_16BIT_SYS(bMode))
4424 || s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00))
4425 CtxExpected.rip.u &= UINT16_MAX;
4426 CtxExpected.cs = s_aTests[iTest].uDstSel;
4427 if (fGp)
4428 {
4429 CtxExpected.rip.u = Ctx.rip.u;
4430 CtxExpected.cs = Ctx.cs;
4431 }
4432 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4433 CtxExpected.rsp.u = Ctx.rsp.u;
4434 if (s_aTests[iTest].fCall && !fGp)
4435 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 8;
4436 if (s_aTests[iTest].uDstBits == 64 && !fGp)
4437 {
4438 if (BS3_MODE_IS_64BIT_SYS(bMode))
4439 CtxExpected.rip.u -= 1;
4440 else
4441 CtxExpected.rax.u8 = CtxExpected.rflags.u & X86_EFL_CF ? 0xff : 0x00;
4442 }
4443 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4444 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4445 if (!fGp)
4446 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4447 else
4448 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4449 Ctx.rsp.u = uSavedRsp;
4450 g_usBs3TestStep++;
4451
4452 /* Again single stepping: */
4453 //Bs3TestPrintf("stepping...\n");
4454 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4455 Ctx.rflags.u16 |= X86_EFL_TF;
4456 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4457 CtxExpected.rax.u = Ctx.rax.u;
4458 if (s_aTests[iTest].uDstBits == 64 && !fGp && !BS3_MODE_IS_64BIT_SYS(bMode))
4459 CtxExpected.rip.u -= 1;
4460 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4461 if (!fGp)
4462 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4463 else
4464 {
4465 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4466 bs3CpuBasic2_CheckDr6InitVal();
4467 }
4468 Ctx.rflags.u16 &= ~X86_EFL_TF;
4469 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4470 Ctx.rsp.u = uSavedRsp;
4471 g_usBs3TestStep++;
4472 }
4473 }
4474 /*
4475 * 64-bit tests.
4476 */
4477 else if (BS3_MODE_IS_64BIT_CODE(bMode))
4478 {
4479 static struct
4480 {
4481 bool fInvalid;
4482 bool fCall;
4483 uint16_t uDstSel;
4484 uint8_t uDstBits;
4485 uint8_t fOpSizePfx; /**< 0: none, 1: 066h, 2: REX.W, 3: 066h REX.W */
4486 int8_t fFix64OpSize;
4487 FPFNBS3FAR pfnTest;
4488 }
4489 const s_aTests[] =
4490 {
4491 /* invalid opcodes: */
4492 { true, false, BS3_SEL_R0_CS32, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c32, },
4493 { true, false, BS3_SEL_R1_CS32 | 1, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c32, },
4494 { true, false, BS3_SEL_R2_CS32 | 2, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c32, },
4495 { true, false, BS3_SEL_R3_CS32 | 3, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c32, },
4496 { true, false, BS3_SEL_R0_CS16, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c32, },
4497 { true, false, BS3_SEL_R0_CS64, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c32, },
4498 { true, false, BS3_SEL_SPARE_00, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c32, },
4499
4500 { true, true, BS3_SEL_R0_CS32, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r0__ud2_c32, },
4501 { true, true, BS3_SEL_R1_CS32 | 1, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r1__ud2_c32, },
4502 { true, true, BS3_SEL_R2_CS32 | 2, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r2__ud2_c32, },
4503 { true, true, BS3_SEL_R3_CS32 | 3, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r3__ud2_c32, },
4504 { true, true, BS3_SEL_R0_CS16, 64, 0, -1, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c32, },
4505 { true, true, BS3_SEL_R0_CS64, 64, 0, -1, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c32, },
4506 { true, true, BS3_SEL_SPARE_00, 64, 0, -1, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c32, },
4507
4508 { false, false, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c64, },
4509 { false, false, BS3_SEL_R1_CS64 | 1, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c64, },
4510 { false, false, BS3_SEL_R2_CS64 | 2, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c64, },
4511 { false, false, BS3_SEL_R3_CS64 | 3, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c64, },
4512 { false, false, BS3_SEL_R0_CS16, 16, 1, false, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c64, },
4513 { false, false, BS3_SEL_R0_CS32, 32, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c64, },
4514 { false, false, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c64, }, /* 16-bit CS, except in LM. */
4515 { false, false, BS3_SEL_SPARE_00, 64, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c64, }, /* 16-bit CS, except in LM. */
4516
4517 { false, false, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r0__ud2_intel_c64, },
4518 { false, false, BS3_SEL_R1_CS64 | 1, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r1__ud2_intel_c64, },
4519 { false, false, BS3_SEL_R2_CS64 | 2, 64, 0, true, bs3CpuBasic2_jmpf_mem_same_r2__ud2_intel_c64, },
4520 { false, false, BS3_SEL_R3_CS64 | 3, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r3__ud2_intel_c64, },
4521 { false, false, BS3_SEL_R0_CS16, 16, 1, true, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_intel_c64, },
4522 { false, false, BS3_SEL_R0_CS32, 32, 0, true, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_intel_c64, },
4523 { false, false, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4524 { false, false, BS3_SEL_SPARE_00, 64, 0, true, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4525
4526 { false, true, BS3_SEL_R0_CS64, 64, 2, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c64, },
4527 { false, true, BS3_SEL_R1_CS64 | 1, 64, 2, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c64, },
4528 { false, true, BS3_SEL_R2_CS64 | 2, 64, 0, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c64, },
4529 { false, true, BS3_SEL_R3_CS64 | 3, 64, 2, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c64, },
4530 { false, true, BS3_SEL_R0_CS16, 16, 1, false, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c64, },
4531 { false, true, BS3_SEL_R0_CS32, 32, 2, false, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c64, },
4532 { false, true, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c64, }, /* 16-bit CS, except in LM. */
4533 { false, true, BS3_SEL_SPARE_00, 64, 0, false, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c64, }, /* 16-bit CS, except in LM. */
4534
4535 { false, true, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_callf_mem_same_r0__ud2_intel_c64, },
4536 { false, true, BS3_SEL_R1_CS64 | 1, 64, 2, true, bs3CpuBasic2_callf_mem_same_r1__ud2_intel_c64, },
4537 { false, true, BS3_SEL_R2_CS64 | 2, 64, 0, true, bs3CpuBasic2_callf_mem_same_r2__ud2_intel_c64, },
4538 { false, true, BS3_SEL_R3_CS64 | 3, 64, 2, true, bs3CpuBasic2_callf_mem_same_r3__ud2_intel_c64, },
4539 { false, true, BS3_SEL_R0_CS16, 16, 1, true, bs3CpuBasic2_callf_mem_r0_cs16__ud2_intel_c64, },
4540 { false, true, BS3_SEL_R0_CS32, 32, 0, true, bs3CpuBasic2_callf_mem_r0_cs32__ud2_intel_c64, },
4541 { false, true, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_callf_mem_r0_cs64__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4542 { false, true, BS3_SEL_SPARE_00, 64, 0, true, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4543 };
4544 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4545 bool const fFix64OpSize = enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4546
4547 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4548 {
4549 uint64_t const uSavedRsp = Ctx.rsp.u;
4550 bool const fUd = s_aTests[iTest].fInvalid;
4551 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4552 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4553
4554 if (s_aTests[iTest].fFix64OpSize != fFix64OpSize && s_aTests[iTest].fFix64OpSize >= 0)
4555 continue;
4556
4557 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4558 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4559 CtxExpected.cs = s_aTests[iTest].uDstSel;
4560 if (s_aTests[iTest].uDstBits == 16)
4561 CtxExpected.rip.u &= UINT16_MAX;
4562 else if (s_aTests[iTest].uDstBits == 64 && fFix64OpSize && s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00)
4563 CtxExpected.rip.u |= UINT64_C(0xfffff00000000000);
4564
4565 if (fGp || fUd)
4566 {
4567 CtxExpected.rip.u = Ctx.rip.u;
4568 CtxExpected.cs = Ctx.cs;
4569 }
4570 CtxExpected.rsp.u = Ctx.rsp.u;
4571 if (s_aTests[iTest].fCall && !fGp && !fUd)
4572 {
4573 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx == 0 ? 8
4574 : s_aTests[iTest].fOpSizePfx == 1 ? 4 : 16;
4575 //Bs3TestPrintf("cs:rsp=%04RX16:%04RX64 -> %04RX64 (fOpSizePfx=%d)\n", Ctx.ss, Ctx.rsp.u, CtxExpected.rsp.u, s_aTests[iTest].fOpSizePfx);
4576 }
4577 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4578 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4579 if (!fGp || fUd)
4580 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4581 else
4582 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4583 Ctx.rsp.u = uSavedRsp;
4584 g_usBs3TestStep++;
4585
4586 /* Again single stepping: */
4587 //Bs3TestPrintf("stepping...\n");
4588 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4589 Ctx.rflags.u16 |= X86_EFL_TF;
4590 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4591 CtxExpected.rax.u = Ctx.rax.u;
4592 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4593 if (fUd)
4594 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4595 else if (!fGp)
4596 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4597 else
4598 {
4599 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4600 bs3CpuBasic2_CheckDr6InitVal();
4601 }
4602 Ctx.rflags.u16 &= ~X86_EFL_TF;
4603 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4604 Ctx.rsp.u = uSavedRsp;
4605 g_usBs3TestStep++;
4606 }
4607 }
4608 else
4609 Bs3TestFailed("wtf?");
4610
4611 return 0;
4612}
4613
4614
4615/*********************************************************************************************************************************
4616* Near RET *
4617*********************************************************************************************************************************/
4618#define PROTO_ALL(a_Template) \
4619 FNBS3FAR a_Template ## _c16, \
4620 a_Template ## _c32, \
4621 a_Template ## _c64
4622PROTO_ALL(bs3CpuBasic2_retn_opsize_begin);
4623PROTO_ALL(bs3CpuBasic2_retn__ud2);
4624PROTO_ALL(bs3CpuBasic2_retn_opsize__ud2);
4625PROTO_ALL(bs3CpuBasic2_retn_i24__ud2);
4626PROTO_ALL(bs3CpuBasic2_retn_i24_opsize__ud2);
4627PROTO_ALL(bs3CpuBasic2_retn_i0__ud2);
4628PROTO_ALL(bs3CpuBasic2_retn_i0_opsize__ud2);
4629FNBS3FAR bs3CpuBasic2_retn_rexw__ud2_c64;
4630FNBS3FAR bs3CpuBasic2_retn_i24_rexw__ud2_c64;
4631FNBS3FAR bs3CpuBasic2_retn_opsize_rexw__ud2_c64;
4632FNBS3FAR bs3CpuBasic2_retn_rexw_opsize__ud2_c64;
4633FNBS3FAR bs3CpuBasic2_retn_i24_opsize_rexw__ud2_c64;
4634FNBS3FAR bs3CpuBasic2_retn_i24_rexw_opsize__ud2_c64;
4635PROTO_ALL(bs3CpuBasic2_retn_opsize_end);
4636#undef PROTO_ALL
4637
4638
4639static void bs3CpuBasic2_retn_PrepStack(BS3PTRUNION StkPtr, PCBS3REGCTX pCtxExpected, uint8_t cbAddr)
4640{
4641 StkPtr.pu32[3] = UINT32_MAX;
4642 StkPtr.pu32[2] = UINT32_MAX;
4643 StkPtr.pu32[1] = UINT32_MAX;
4644 StkPtr.pu32[0] = UINT32_MAX;
4645 StkPtr.pu32[-1] = UINT32_MAX;
4646 StkPtr.pu32[-2] = UINT32_MAX;
4647 StkPtr.pu32[-3] = UINT32_MAX;
4648 StkPtr.pu32[-4] = UINT32_MAX;
4649 if (cbAddr == 2)
4650 StkPtr.pu16[0] = pCtxExpected->rip.u16;
4651 else if (cbAddr == 4)
4652 StkPtr.pu32[0] = pCtxExpected->rip.u32;
4653 else
4654 StkPtr.pu64[0] = pCtxExpected->rip.u64;
4655}
4656
4657
4658/**
4659 * Entrypoint for FAR JMP & FAR CALL tests.
4660 *
4661 * @returns 0 or BS3TESTDOMODE_SKIPPED.
4662 * @param bMode The CPU mode we're testing.
4663 *
4664 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
4665 * with control registers and such.
4666 */
4667BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_near_ret)(uint8_t bMode)
4668{
4669 BS3TRAPFRAME TrapCtx;
4670 BS3REGCTX Ctx;
4671 BS3REGCTX CtxExpected;
4672 unsigned iTest;
4673 BS3PTRUNION StkPtr;
4674
4675 /* make sure they're allocated */
4676 Bs3MemZero(&Ctx, sizeof(Ctx));
4677 Bs3MemZero(&CtxExpected, sizeof(Ctx));
4678 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
4679
4680 bs3CpuBasic2_SetGlobals(bMode);
4681
4682 /*
4683 * Create a context.
4684 *
4685 * ASSUMES we're in ring-0, we'll be using the ring-2 stack for the testing
4686 * to avoid overwriting it.
4687 */
4688 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
4689 Ctx.rsp.u = BS3_ADDR_STACK - _16K;
4690 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
4691
4692 StkPtr.pv = Bs3RegCtxGetRspSsAsCurPtr(&Ctx);
4693 //Bs3TestPrintf("Stack=%p rsp=%RX64\n", StkPtr.pv, Ctx.rsp.u);
4694
4695 /*
4696 * 16-bit tests.
4697 */
4698 if (BS3_MODE_IS_16BIT_CODE(bMode))
4699 {
4700 static struct
4701 {
4702 bool fOpSizePfx;
4703 uint16_t cbImm;
4704 FPFNBS3FAR pfnTest;
4705 }
4706 const s_aTests[] =
4707 {
4708 { false, 0, bs3CpuBasic2_retn__ud2_c16, },
4709 { true, 0, bs3CpuBasic2_retn_opsize__ud2_c16, },
4710 { false, 24, bs3CpuBasic2_retn_i24__ud2_c16, },
4711 { true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c16, },
4712 { false, 0, bs3CpuBasic2_retn_i0__ud2_c16, },
4713 { true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c16, },
4714 };
4715
4716 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4717 {
4718 uint8_t const BS3_FAR *fpbCode;
4719
4720 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
4721 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
4722 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4723 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4724 CtxExpected.cs = Ctx.cs;
4725 if (!s_aTests[iTest].fOpSizePfx)
4726 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4727 else
4728 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 4;
4729 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4730 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4731 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 4 : 2);
4732 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4733 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4734 g_usBs3TestStep++;
4735
4736 /* Again single stepping: */
4737 //Bs3TestPrintf("stepping...\n");
4738 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4739 Ctx.rflags.u16 |= X86_EFL_TF;
4740 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4741 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 4 : 2);
4742 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4743 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4744 Ctx.rflags.u16 &= ~X86_EFL_TF;
4745 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4746 g_usBs3TestStep++;
4747 }
4748 }
4749 /*
4750 * 32-bit tests.
4751 */
4752 else if (BS3_MODE_IS_32BIT_CODE(bMode))
4753 {
4754 static struct
4755 {
4756 uint8_t cBits;
4757 bool fOpSizePfx;
4758 uint16_t cbImm;
4759 FPFNBS3FAR pfnTest;
4760 }
4761 const s_aTests[] =
4762 {
4763 { 32, false, 0, bs3CpuBasic2_retn__ud2_c32, },
4764 { 32, true, 0, bs3CpuBasic2_retn_opsize__ud2_c32, },
4765 { 32, false, 24, bs3CpuBasic2_retn_i24__ud2_c32, },
4766 { 32, true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c32, },
4767 { 32, false, 0, bs3CpuBasic2_retn_i0__ud2_c32, },
4768 { 32, true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c32, },
4769 };
4770
4771 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed tests. */
4772 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_begin_c32);
4773 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_end_c32) - offLow;
4774 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4775 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4776 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4777 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4778 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4779 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4780 if (s_aTests[iTest].fOpSizePfx)
4781 {
4782 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4783 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4784 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4785 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4786 pbCode16[offUd + 1] = 0xf1;
4787 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4788 pbLow[offUd + 1] = 0x0b;
4789 }
4790
4791 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4792 {
4793 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4794
4795 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4796 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4797 CtxExpected.cs = Ctx.cs;
4798 if (!s_aTests[iTest].fOpSizePfx)
4799 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 4;
4800 else
4801 {
4802 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4803 CtxExpected.rip.u &= UINT16_MAX;
4804 }
4805 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4806 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4807 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4808 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 2 : 4);
4809 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4810 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4811 g_usBs3TestStep++;
4812
4813 /* Again single stepping: */
4814 //Bs3TestPrintf("stepping...\n");
4815 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4816 Ctx.rflags.u16 |= X86_EFL_TF;
4817 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4818 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 2 : 4);
4819 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4820 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4821 Ctx.rflags.u16 &= ~X86_EFL_TF;
4822 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4823 g_usBs3TestStep++;
4824 }
4825 }
4826 /*
4827 * 64-bit tests.
4828 */
4829 else if (BS3_MODE_IS_64BIT_CODE(bMode))
4830 {
4831 static struct
4832 {
4833 uint8_t cBits;
4834 bool fOpSizePfx;
4835 uint16_t cbImm;
4836 FPFNBS3FAR pfnTest;
4837 }
4838 const s_aTests[] =
4839 {
4840 { 32, false, 0, bs3CpuBasic2_retn__ud2_c64, },
4841 { 32, false, 0, bs3CpuBasic2_retn_rexw__ud2_c64, },
4842 { 32, true, 0, bs3CpuBasic2_retn_opsize__ud2_c64, },
4843 { 32, false, 0, bs3CpuBasic2_retn_opsize_rexw__ud2_c64, },
4844 { 32, true, 0, bs3CpuBasic2_retn_rexw_opsize__ud2_c64, },
4845 { 32, false, 24, bs3CpuBasic2_retn_i24__ud2_c64, },
4846 { 32, false, 24, bs3CpuBasic2_retn_i24_rexw__ud2_c64, },
4847 { 32, true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c64, },
4848 { 32, false, 24, bs3CpuBasic2_retn_i24_opsize_rexw__ud2_c64, },
4849 { 32, true, 24, bs3CpuBasic2_retn_i24_rexw_opsize__ud2_c64, },
4850 { 32, false, 0, bs3CpuBasic2_retn_i0__ud2_c64, },
4851 { 32, true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c64, },
4852 };
4853 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4854 bool const fFix64OpSize = enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4855
4856 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed
4857 tests, unless we're on intel where the opsize prefix is ignored. Here we
4858 just fill low memory with int3's so we can detect non-intel behaviour. */
4859 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_begin_c64);
4860 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_end_c64) - offLow;
4861 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4862 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4863 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4864 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4865 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4866 if (!fFix64OpSize)
4867 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4868 if (s_aTests[iTest].fOpSizePfx)
4869 {
4870 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4871 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4872 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4873 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4874 pbCode16[offUd + 1] = 0xf1;
4875 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4876 pbLow[offUd + 1] = 0x0b;
4877 }
4878
4879 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4880 {
4881 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4882
4883 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4884 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4885 CtxExpected.cs = Ctx.cs;
4886 if (!s_aTests[iTest].fOpSizePfx || fFix64OpSize)
4887 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 8;
4888 else
4889 {
4890 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4891 CtxExpected.rip.u &= UINT16_MAX;
4892 }
4893 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4894 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4895 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4896 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx && !fFix64OpSize ? 2 : 8);
4897 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4898 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4899 g_usBs3TestStep++;
4900
4901 /* Again single stepping: */
4902 //Bs3TestPrintf("stepping...\n");
4903 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4904 Ctx.rflags.u16 |= X86_EFL_TF;
4905 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4906 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx && !fFix64OpSize ? 2 : 8);
4907 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4908 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4909 Ctx.rflags.u16 &= ~X86_EFL_TF;
4910 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4911 g_usBs3TestStep++;
4912 }
4913 }
4914 else
4915 Bs3TestFailed("wtf?");
4916
4917 return 0;
4918}
4919
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette