1 | /* $Id: tstVMMR0CallHost-1.cpp 91775 2021-10-17 10:53:28Z vboxsync $ */
|
---|
2 | /** @file
|
---|
3 | * Testcase for the VMMR0JMPBUF operations.
|
---|
4 | */
|
---|
5 |
|
---|
6 | /*
|
---|
7 | * Copyright (C) 2006-2020 Oracle Corporation
|
---|
8 | *
|
---|
9 | * This file is part of VirtualBox Open Source Edition (OSE), as
|
---|
10 | * available from http://www.virtualbox.org. This file is free software;
|
---|
11 | * you can redistribute it and/or modify it under the terms of the GNU
|
---|
12 | * General Public License (GPL) as published by the Free Software
|
---|
13 | * Foundation, in version 2 as it comes in the "COPYING" file of the
|
---|
14 | * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
|
---|
15 | * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
|
---|
16 | */
|
---|
17 |
|
---|
18 |
|
---|
19 | /*********************************************************************************************************************************
|
---|
20 | * Header Files *
|
---|
21 | *********************************************************************************************************************************/
|
---|
22 | #include <iprt/errcore.h>
|
---|
23 | #include <VBox/param.h>
|
---|
24 | #include <iprt/alloca.h>
|
---|
25 | #include <iprt/initterm.h>
|
---|
26 | #include <iprt/rand.h>
|
---|
27 | #include <iprt/string.h>
|
---|
28 | #include <iprt/stream.h>
|
---|
29 | #include <iprt/test.h>
|
---|
30 |
|
---|
31 | #define IN_VMM_R0
|
---|
32 | #define IN_RING0 /* pretent we're in Ring-0 to get the prototypes. */
|
---|
33 | #include <VBox/vmm/vmm.h>
|
---|
34 | #include "VMMInternal.h"
|
---|
35 |
|
---|
36 |
|
---|
37 | /*********************************************************************************************************************************
|
---|
38 | * Defined Constants And Macros *
|
---|
39 | *********************************************************************************************************************************/
|
---|
40 | #if !defined(VMM_R0_SWITCH_STACK) && !defined(VMM_R0_NO_SWITCH_STACK)
|
---|
41 | # error "VMM_R0_SWITCH_STACK or VMM_R0_NO_SWITCH_STACK has to be defined."
|
---|
42 | #endif
|
---|
43 |
|
---|
44 |
|
---|
45 | /*********************************************************************************************************************************
|
---|
46 | * Global Variables *
|
---|
47 | *********************************************************************************************************************************/
|
---|
48 | /** The jump buffer. */
|
---|
49 | static VMMR0JMPBUF g_Jmp;
|
---|
50 | /** The number of jumps we've done. */
|
---|
51 | static unsigned volatile g_cJmps;
|
---|
52 | /** Number of bytes allocated last time we called foo(). */
|
---|
53 | static size_t volatile g_cbFoo;
|
---|
54 | /** Number of bytes used last time we called foo(). */
|
---|
55 | static intptr_t volatile g_cbFooUsed;
|
---|
56 | /** Set if we're in a long jump. */
|
---|
57 | static bool g_fInLongJmp;
|
---|
58 |
|
---|
59 |
|
---|
60 | int foo(int i, int iZero, int iMinusOne)
|
---|
61 | {
|
---|
62 | NOREF(iZero);
|
---|
63 |
|
---|
64 | /* allocate a buffer which we fill up to the end. */
|
---|
65 | size_t cb = (i % 1555) + 32;
|
---|
66 | g_cbFoo = cb;
|
---|
67 | char *pv = (char *)alloca(cb);
|
---|
68 | RTStrPrintf(pv, cb, "i=%d%*s\n", i, cb, "");
|
---|
69 | #ifdef VMM_R0_SWITCH_STACK
|
---|
70 | g_cbFooUsed = VMM_STACK_SIZE - ((uintptr_t)pv - (uintptr_t)g_Jmp.pvSavedStack);
|
---|
71 | RTTESTI_CHECK_MSG_RET(g_cbFooUsed < (intptr_t)VMM_STACK_SIZE - 128, ("%#x - (%p - %p) -> %#x; cb=%#x i=%d\n", VMM_STACK_SIZE, pv, g_Jmp.pvSavedStack, g_cbFooUsed, cb, i), -15);
|
---|
72 | #elif defined(RT_ARCH_AMD64)
|
---|
73 | g_cbFooUsed = (uintptr_t)g_Jmp.rsp - (uintptr_t)pv;
|
---|
74 | RTTESTI_CHECK_MSG_RET(g_cbFooUsed < VMM_STACK_SIZE - 128, ("%p - %p -> %#x; cb=%#x i=%d\n", g_Jmp.rsp, pv, g_cbFooUsed, cb, i), -15);
|
---|
75 | #elif defined(RT_ARCH_X86)
|
---|
76 | g_cbFooUsed = (uintptr_t)g_Jmp.esp - (uintptr_t)pv;
|
---|
77 | RTTESTI_CHECK_MSG_RET(g_cbFooUsed < (intptr_t)VMM_STACK_SIZE - 128, ("%p - %p -> %#x; cb=%#x i=%d\n", g_Jmp.esp, pv, g_cbFooUsed, cb, i), -15);
|
---|
78 | #endif
|
---|
79 |
|
---|
80 | /* Twice in a row, every 7th time. */
|
---|
81 | if ((i % 7) <= 1)
|
---|
82 | {
|
---|
83 | g_cJmps++;
|
---|
84 | g_fInLongJmp = true;
|
---|
85 | int rc = vmmR0CallRing3LongJmp(&g_Jmp, 42);
|
---|
86 | g_fInLongJmp = false;
|
---|
87 | if (!rc)
|
---|
88 | return i + 10000;
|
---|
89 | return -1;
|
---|
90 | }
|
---|
91 | NOREF(iMinusOne);
|
---|
92 | return i;
|
---|
93 | }
|
---|
94 |
|
---|
95 |
|
---|
96 | DECLCALLBACK(int) tst2(intptr_t i, intptr_t i2)
|
---|
97 | {
|
---|
98 | RTTESTI_CHECK_MSG_RET(i >= 0 && i <= 8192, ("i=%d is out of range [0..8192]\n", i), 1);
|
---|
99 | RTTESTI_CHECK_MSG_RET(i2 == 0, ("i2=%d is out of range [0]\n", i2), 1);
|
---|
100 | int iExpect = (i % 7) <= 1 ? i + 10000 : i;
|
---|
101 | int rc = foo(i, 0, -1);
|
---|
102 | RTTESTI_CHECK_MSG_RET(rc == iExpect, ("i=%d rc=%d expected=%d\n", i, rc, iExpect), 1);
|
---|
103 | return 0;
|
---|
104 | }
|
---|
105 |
|
---|
106 |
|
---|
107 | DECLCALLBACK(DECL_NO_INLINE(RT_NOTHING, int)) stackRandom(PVMMR0JMPBUF pJmpBuf, PFNVMMR0SETJMP pfn, PVM pVM, PVMCPU pVCpu)
|
---|
108 | {
|
---|
109 | #ifdef RT_ARCH_AMD64
|
---|
110 | uint32_t cbRand = RTRandU32Ex(1, 96);
|
---|
111 | #else
|
---|
112 | uint32_t cbRand = 1;
|
---|
113 | #endif
|
---|
114 | uint8_t volatile *pabFuzz = (uint8_t volatile *)alloca(cbRand);
|
---|
115 | memset((void *)pabFuzz, 0xfa, cbRand);
|
---|
116 | int rc = vmmR0CallRing3SetJmp(pJmpBuf, pfn, pVM, pVCpu);
|
---|
117 | memset((void *)pabFuzz, 0xaf, cbRand);
|
---|
118 | return rc;
|
---|
119 | }
|
---|
120 |
|
---|
121 |
|
---|
122 | void tst(int iFrom, int iTo, int iInc)
|
---|
123 | {
|
---|
124 | #ifdef VMM_R0_SWITCH_STACK
|
---|
125 | int const cIterations = iFrom > iTo ? iFrom - iTo : iTo - iFrom;
|
---|
126 | void *pvPrev = alloca(1);
|
---|
127 | #endif
|
---|
128 |
|
---|
129 | RTR0PTR R0PtrSaved = g_Jmp.pvSavedStack;
|
---|
130 | RT_ZERO(g_Jmp);
|
---|
131 | g_Jmp.pvSavedStack = R0PtrSaved;
|
---|
132 | memset((void *)g_Jmp.pvSavedStack, '\0', VMM_STACK_SIZE);
|
---|
133 | g_cbFoo = 0;
|
---|
134 | g_cJmps = 0;
|
---|
135 | g_cbFooUsed = 0;
|
---|
136 | g_fInLongJmp = false;
|
---|
137 |
|
---|
138 | int iOrg = iFrom;
|
---|
139 | for (int i = iFrom, iItr = 0; i != iTo; i += iInc, iItr++)
|
---|
140 | {
|
---|
141 | if (!g_fInLongJmp)
|
---|
142 | iOrg = i;
|
---|
143 | int rc = stackRandom(&g_Jmp, (PFNVMMR0SETJMP)(uintptr_t)tst2, (PVM)(uintptr_t)iOrg, 0);
|
---|
144 | RTTESTI_CHECK_MSG_RETV(rc == (g_fInLongJmp ? 42 : 0),
|
---|
145 | ("i=%d iOrg=%d rc=%d setjmp; cbFoo=%#x cbFooUsed=%#x fInLongJmp=%d\n",
|
---|
146 | i, iOrg, rc, g_cbFoo, g_cbFooUsed, g_fInLongJmp));
|
---|
147 |
|
---|
148 | #ifdef VMM_R0_SWITCH_STACK
|
---|
149 | /* Make the stack pointer slide for the second half of the calls. */
|
---|
150 | if (iItr >= cIterations / 2)
|
---|
151 | {
|
---|
152 | /* Note! gcc does funny rounding up of alloca(). */
|
---|
153 | # if !defined(VBOX_WITH_GCC_SANITIZER) && !defined(__MSVC_RUNTIME_CHECKS)
|
---|
154 | void *pv2 = alloca((i % 63) | 1);
|
---|
155 | size_t cb2 = (uintptr_t)pvPrev - (uintptr_t)pv2;
|
---|
156 | # else
|
---|
157 | size_t cb2 = ((i % 3) + 1) * 16; /* We get what we ask for here, and it's not at RSP/ESP due to guards. */
|
---|
158 | void *pv2 = alloca(cb2);
|
---|
159 | # endif
|
---|
160 | RTTESTI_CHECK_MSG(cb2 >= 16 && cb2 <= 128, ("cb2=%zu pv2=%p pvPrev=%p iAlloca=%d\n", cb2, pv2, pvPrev, iItr));
|
---|
161 | memset(pv2, 0xff, cb2);
|
---|
162 | memset(pvPrev, 0xee, 1);
|
---|
163 | pvPrev = pv2;
|
---|
164 | }
|
---|
165 | #endif
|
---|
166 | }
|
---|
167 | RTTESTI_CHECK_MSG_RETV(g_cJmps, ("No jumps!"));
|
---|
168 | if (g_Jmp.cbUsedAvg || g_Jmp.cUsedTotal)
|
---|
169 | RTTestIPrintf(RTTESTLVL_ALWAYS, "cbUsedAvg=%#x cbUsedMax=%#x cUsedTotal=%#llx\n",
|
---|
170 | g_Jmp.cbUsedAvg, g_Jmp.cbUsedMax, g_Jmp.cUsedTotal);
|
---|
171 | }
|
---|
172 |
|
---|
173 |
|
---|
174 | #if defined(VMM_R0_SWITCH_STACK) && defined(RT_ARCH_AMD64)
|
---|
175 | /*
|
---|
176 | * Stack switch back tests.
|
---|
177 | */
|
---|
178 | RT_C_DECLS_BEGIN
|
---|
179 | DECLCALLBACK(int) tstWrapped1( PVMMR0JMPBUF pJmp, uintptr_t u1, uintptr_t u2, uintptr_t u3, uintptr_t u4, uintptr_t u5,
|
---|
180 | uintptr_t u6, uintptr_t u7, uintptr_t u8, uintptr_t u9);
|
---|
181 | DECLCALLBACK(int) StkBack_tstWrapped1(PVMMR0JMPBUF pJmp, uintptr_t u1, uintptr_t u2, uintptr_t u3, uintptr_t u4, uintptr_t u5,
|
---|
182 | uintptr_t u6, uintptr_t u7, uintptr_t u8, uintptr_t u9);
|
---|
183 | DECLCALLBACK(int) tstWrappedThin(PVMMR0JMPBUF pJmp);
|
---|
184 | DECLCALLBACK(int) StkBack_tstWrappedThin(PVMMR0JMPBUF pJmp);
|
---|
185 | RT_C_DECLS_END
|
---|
186 |
|
---|
187 |
|
---|
188 | DECLCALLBACK(int) StkBack_tstWrapped1(PVMMR0JMPBUF pJmp, uintptr_t u1, uintptr_t u2, uintptr_t u3, uintptr_t u4, uintptr_t u5,
|
---|
189 | uintptr_t u6, uintptr_t u7, uintptr_t u8, uintptr_t u9)
|
---|
190 | {
|
---|
191 | RTTESTI_CHECK_RET(pJmp == &g_Jmp, -1);
|
---|
192 | RTTESTI_CHECK_RET(u1 == ~(uintptr_t)1U, -2);
|
---|
193 | RTTESTI_CHECK_RET(u2 == ~(uintptr_t)2U, -3);
|
---|
194 | RTTESTI_CHECK_RET(u3 == ~(uintptr_t)3U, -4);
|
---|
195 | RTTESTI_CHECK_RET(u4 == ~(uintptr_t)4U, -5);
|
---|
196 | RTTESTI_CHECK_RET(u5 == ~(uintptr_t)5U, -6);
|
---|
197 | RTTESTI_CHECK_RET(u6 == ~(uintptr_t)6U, -7);
|
---|
198 | RTTESTI_CHECK_RET(u7 == ~(uintptr_t)7U, -8);
|
---|
199 | RTTESTI_CHECK_RET(u8 == ~(uintptr_t)8U, -9);
|
---|
200 | RTTESTI_CHECK_RET(u9 == ~(uintptr_t)9U, -10);
|
---|
201 |
|
---|
202 | void *pv = alloca(32);
|
---|
203 | memset(pv, 'a', 32);
|
---|
204 | RTTESTI_CHECK_RET((uintptr_t)pv - (uintptr_t)g_Jmp.pvSavedStack > VMM_STACK_SIZE, -11);
|
---|
205 |
|
---|
206 | return 42;
|
---|
207 | }
|
---|
208 |
|
---|
209 |
|
---|
210 | DECLCALLBACK(int) tstSwitchBackInner(intptr_t i1, intptr_t i2)
|
---|
211 | {
|
---|
212 | RTTESTI_CHECK_RET(i1 == -42, -20);
|
---|
213 | RTTESTI_CHECK_RET(i2 == (intptr_t)&g_Jmp, -21);
|
---|
214 |
|
---|
215 | void *pv = alloca(32);
|
---|
216 | memset(pv, 'b', 32);
|
---|
217 | RTTESTI_CHECK_RET((uintptr_t)pv - (uintptr_t)g_Jmp.pvSavedStack < VMM_STACK_SIZE, -22);
|
---|
218 |
|
---|
219 | int rc = tstWrapped1(&g_Jmp,
|
---|
220 | ~(uintptr_t)1U,
|
---|
221 | ~(uintptr_t)2U,
|
---|
222 | ~(uintptr_t)3U,
|
---|
223 | ~(uintptr_t)4U,
|
---|
224 | ~(uintptr_t)5U,
|
---|
225 | ~(uintptr_t)6U,
|
---|
226 | ~(uintptr_t)7U,
|
---|
227 | ~(uintptr_t)8U,
|
---|
228 | ~(uintptr_t)9U);
|
---|
229 | RTTESTI_CHECK_RET(rc == 42, -23);
|
---|
230 | return rc;
|
---|
231 | }
|
---|
232 |
|
---|
233 |
|
---|
234 | DECLCALLBACK(int) StkBack_tstWrappedThin(PVMMR0JMPBUF pJmp)
|
---|
235 | {
|
---|
236 | RTTESTI_CHECK_RET(pJmp == &g_Jmp, -31);
|
---|
237 |
|
---|
238 | void *pv = alloca(32);
|
---|
239 | memset(pv, 'c', 32);
|
---|
240 | RTTESTI_CHECK_RET((uintptr_t)pv - (uintptr_t)g_Jmp.pvSavedStack > VMM_STACK_SIZE, -32);
|
---|
241 |
|
---|
242 | return 42;
|
---|
243 | }
|
---|
244 |
|
---|
245 | DECLCALLBACK(int) tstSwitchBackInnerThin(intptr_t i1, intptr_t i2)
|
---|
246 | {
|
---|
247 | RT_NOREF(i1);
|
---|
248 | return tstWrappedThin((PVMMR0JMPBUF)i2);
|
---|
249 | }
|
---|
250 |
|
---|
251 |
|
---|
252 | void tstSwitchBack(void)
|
---|
253 | {
|
---|
254 | RTR0PTR R0PtrSaved = g_Jmp.pvSavedStack;
|
---|
255 | RT_ZERO(g_Jmp);
|
---|
256 | g_Jmp.pvSavedStack = R0PtrSaved;
|
---|
257 | memset((void *)g_Jmp.pvSavedStack, '\0', VMM_STACK_SIZE);
|
---|
258 | g_cbFoo = 0;
|
---|
259 | g_cJmps = 0;
|
---|
260 | g_cbFooUsed = 0;
|
---|
261 | g_fInLongJmp = false;
|
---|
262 |
|
---|
263 | //for (int i = iFrom, iItr = 0; i != iTo; i += iInc, iItr++)
|
---|
264 | {
|
---|
265 | int rc = stackRandom(&g_Jmp, (PFNVMMR0SETJMP)(uintptr_t)tstSwitchBackInner, (PVM)(intptr_t)-42, (PVMCPU)&g_Jmp);
|
---|
266 | RTTESTI_CHECK_MSG_RETV(rc == 42,
|
---|
267 | ("i=%d iOrg=%d rc=%d setjmp; cbFoo=%#x cbFooUsed=%#x fInLongJmp=%d\n",
|
---|
268 | 0, 0 /*i, iOrg*/, rc, g_cbFoo, g_cbFooUsed, g_fInLongJmp));
|
---|
269 |
|
---|
270 | rc = stackRandom(&g_Jmp, (PFNVMMR0SETJMP)(uintptr_t)tstSwitchBackInnerThin, NULL, (PVMCPU)&g_Jmp);
|
---|
271 | RTTESTI_CHECK_MSG_RETV(rc == 42,
|
---|
272 | ("i=%d iOrg=%d rc=%d setjmp; cbFoo=%#x cbFooUsed=%#x fInLongJmp=%d\n",
|
---|
273 | 0, 0 /*i, iOrg*/, rc, g_cbFoo, g_cbFooUsed, g_fInLongJmp));
|
---|
274 |
|
---|
275 | }
|
---|
276 | //RTTESTI_CHECK_MSG_RETV(g_cJmps, ("No jumps!"));
|
---|
277 | }
|
---|
278 |
|
---|
279 | #endif
|
---|
280 |
|
---|
281 |
|
---|
282 | int main()
|
---|
283 | {
|
---|
284 | /*
|
---|
285 | * Init.
|
---|
286 | */
|
---|
287 | RTTEST hTest;
|
---|
288 | #ifdef VMM_R0_NO_SWITCH_STACK
|
---|
289 | RTEXITCODE rcExit = RTTestInitAndCreate("tstVMMR0CallHost-1", &hTest);
|
---|
290 | #else
|
---|
291 | RTEXITCODE rcExit = RTTestInitAndCreate("tstVMMR0CallHost-2", &hTest);
|
---|
292 | #endif
|
---|
293 | if (rcExit != RTEXITCODE_SUCCESS)
|
---|
294 | return rcExit;
|
---|
295 | RTTestBanner(hTest);
|
---|
296 |
|
---|
297 | g_Jmp.pvSavedStack = (RTR0PTR)RTTestGuardedAllocTail(hTest, VMM_STACK_SIZE);
|
---|
298 |
|
---|
299 | /*
|
---|
300 | * Run two test with about 1000 long jumps each.
|
---|
301 | */
|
---|
302 | RTTestSub(hTest, "Increasing stack usage");
|
---|
303 | tst(0, 7000, 1);
|
---|
304 | RTTestSub(hTest, "Decreasing stack usage");
|
---|
305 | tst(7599, 0, -1);
|
---|
306 | #if defined(VMM_R0_SWITCH_STACK) && defined(RT_ARCH_AMD64)
|
---|
307 | RTTestSub(hTest, "Switch back");
|
---|
308 | tstSwitchBack();
|
---|
309 | #endif
|
---|
310 |
|
---|
311 | return RTTestSummaryAndDestroy(hTest);
|
---|
312 | }
|
---|