VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMR0/VMMR0JmpA-x86.asm@ 93723

Last change on this file since 93723 was 93115, checked in by vboxsync, 3 years ago

scm --update-copyright-year

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id Revision
File size: 11.5 KB
Line 
1; $Id: VMMR0JmpA-x86.asm 93115 2022-01-01 11:31:46Z vboxsync $
2;; @file
3; VMM - R0 SetJmp / LongJmp routines for X86.
4;
5
6;
7; Copyright (C) 2006-2022 Oracle Corporation
8;
9; This file is part of VirtualBox Open Source Edition (OSE), as
10; available from http://www.virtualbox.org. This file is free software;
11; you can redistribute it and/or modify it under the terms of the GNU
12; General Public License (GPL) as published by the Free Software
13; Foundation, in version 2 as it comes in the "COPYING" file of the
14; VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15; hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16;
17
18;*******************************************************************************
19;* Header Files *
20;*******************************************************************************
21%include "VBox/asmdefs.mac"
22%include "VMMInternal.mac"
23%include "VBox/err.mac"
24%include "VBox/param.mac"
25
26
27;*******************************************************************************
28;* Defined Constants And Macros *
29;*******************************************************************************
30%define RESUME_MAGIC 07eadf00dh
31%define STACK_PADDING 0eeeeeeeeh
32
33
34
35BEGINCODE
36
37
38;;
39; The setjmp variant used for calling Ring-3.
40;
41; This differs from the normal setjmp in that it will resume VMMRZCallRing3 if we're
42; in the middle of a ring-3 call. Another differences is the function pointer and
43; argument. This has to do with resuming code and the stack frame of the caller.
44;
45; @returns VINF_SUCCESS on success or whatever is passed to vmmR0CallRing3LongJmp.
46; @param pJmpBuf msc:rcx gcc:rdi x86:[esp+0x04] Our jmp_buf.
47; @param pfn msc:rdx gcc:rsi x86:[esp+0x08] The function to be called when not resuming.
48; @param pvUser1 msc:r8 gcc:rdx x86:[esp+0x0c] The argument of that function.
49; @param pvUser2 msc:r9 gcc:rcx x86:[esp+0x10] The argument of that function.
50;
51BEGINPROC vmmR0CallRing3SetJmp
52GLOBALNAME vmmR0CallRing3SetJmp2
53GLOBALNAME vmmR0CallRing3SetJmpEx
54 ;
55 ; Save the registers.
56 ;
57 mov edx, [esp + 4h] ; pJmpBuf
58 mov [xDX + VMMR0JMPBUF.ebx], ebx
59 mov [xDX + VMMR0JMPBUF.esi], esi
60 mov [xDX + VMMR0JMPBUF.edi], edi
61 mov [xDX + VMMR0JMPBUF.ebp], ebp
62 mov xAX, [esp]
63 mov [xDX + VMMR0JMPBUF.eip], xAX
64 lea ecx, [esp + 4] ; (used in resume)
65 mov [xDX + VMMR0JMPBUF.esp], ecx
66 pushf
67 pop xAX
68 mov [xDX + VMMR0JMPBUF.eflags], xAX
69
70 ;
71 ; If we're not in a ring-3 call, call pfn and return.
72 ;
73 test byte [xDX + VMMR0JMPBUF.fInRing3Call], 1
74 jnz .resume
75
76 mov ebx, edx ; pJmpBuf -> ebx (persistent reg)
77%ifdef VMM_R0_SWITCH_STACK
78 mov esi, [ebx + VMMR0JMPBUF.pvSavedStack]
79 test esi, esi
80 jz .entry_error
81 %ifdef VBOX_STRICT
82 cmp dword [esi], 0h
83 jne .entry_error
84 mov edx, esi
85 mov edi, esi
86 mov ecx, VMM_STACK_SIZE / 4
87 mov eax, STACK_PADDING
88 repne stosd
89 %endif
90 lea esi, [esi + VMM_STACK_SIZE - 32]
91 mov [esi + 1ch], dword 0deadbeefh ; Marker 1.
92 mov [esi + 18h], ebx ; Save pJmpBuf pointer.
93 mov [esi + 14h], dword 00c00ffeeh ; Marker 2.
94 mov [esi + 10h], dword 0f00dbeefh ; Marker 3.
95 mov edx, [esp + 10h] ; pvArg2
96 mov ecx, [esp + 0ch] ; pvArg1
97 mov eax, [esp + 08h] ; pfn
98 %if 1 ; Use this to eat of some extra stack - handy for finding paths using lots of stack.
99 %define FRAME_OFFSET 0
100 %else
101 %define FRAME_OFFSET 1024
102 %endif
103 mov [esi - FRAME_OFFSET + 04h], edx
104 mov [esi - FRAME_OFFSET ], ecx
105 lea esp, [esi - FRAME_OFFSET] ; Switch stack!
106 call eax
107 and dword [esi + 1ch], byte 0 ; reset marker.
108
109 %ifdef VBOX_STRICT
110 ; Calc stack usage and check for overflows.
111 mov edi, [ebx + VMMR0JMPBUF.pvSavedStack]
112 cmp dword [edi], STACK_PADDING ; Check for obvious stack overflow.
113 jne .stack_overflow
114 mov esi, eax ; save eax
115 mov eax, STACK_PADDING
116 mov ecx, VMM_STACK_SIZE / 4
117 cld
118 repe scasd
119 shl ecx, 2 ; *4
120 cmp ecx, VMM_STACK_SIZE - 64 ; Less than 64 bytes left -> overflow as well.
121 mov eax, esi ; restore eax in case of overflow (esi remains used)
122 jae .stack_overflow_almost
123
124 ; Update stack usage statistics.
125 cmp ecx, [ebx + VMMR0JMPBUF.cbUsedMax] ; New max usage?
126 jle .no_used_max
127 mov [ebx + VMMR0JMPBUF.cbUsedMax], ecx
128.no_used_max:
129 ; To simplify the average stuff, just historize before we hit div errors.
130 inc dword [ebx + VMMR0JMPBUF.cUsedTotal]
131 test [ebx + VMMR0JMPBUF.cUsedTotal], dword 0c0000000h
132 jz .no_historize
133 mov dword [ebx + VMMR0JMPBUF.cUsedTotal], 2
134 mov edi, [ebx + VMMR0JMPBUF.cbUsedAvg]
135 mov [ebx + VMMR0JMPBUF.cbUsedTotal], edi
136 mov dword [ebx + VMMR0JMPBUF.cbUsedTotal + 4], 0
137.no_historize:
138 add [ebx + VMMR0JMPBUF.cbUsedTotal], ecx
139 adc dword [ebx + VMMR0JMPBUF.cbUsedTotal + 4], 0
140 mov eax, [ebx + VMMR0JMPBUF.cbUsedTotal]
141 mov edx, [ebx + VMMR0JMPBUF.cbUsedTotal + 4]
142 mov edi, [ebx + VMMR0JMPBUF.cUsedTotal]
143 div edi
144 mov [ebx + VMMR0JMPBUF.cbUsedAvg], eax
145
146 mov eax, esi ; restore eax (final, esi released)
147
148 mov edi, [ebx + VMMR0JMPBUF.pvSavedStack]
149 mov dword [edi], 0h ; Reset the overflow marker.
150 %endif ; VBOX_STRICT
151
152%else ; !VMM_R0_SWITCH_STACK
153 mov ecx, [esp + 0ch] ; pvArg1
154 mov edx, [esp + 10h] ; pvArg2
155 mov eax, [esp + 08h] ; pfn
156 sub esp, 12 ; align the stack on a 16-byte boundary.
157 mov [esp ], ecx
158 mov [esp + 04h], edx
159 call eax
160%endif ; !VMM_R0_SWITCH_STACK
161 mov edx, ebx ; pJmpBuf -> edx (volatile reg)
162
163 ;
164 ; Return like in the long jump but clear eip, no short cuts here.
165 ;
166.proper_return:
167 mov ebx, [xDX + VMMR0JMPBUF.ebx]
168 mov esi, [xDX + VMMR0JMPBUF.esi]
169 mov edi, [xDX + VMMR0JMPBUF.edi]
170 mov ebp, [xDX + VMMR0JMPBUF.ebp]
171 mov xCX, [xDX + VMMR0JMPBUF.eip]
172 and dword [xDX + VMMR0JMPBUF.eip], byte 0 ; used for valid check.
173 mov esp, [xDX + VMMR0JMPBUF.esp]
174 push dword [xDX + VMMR0JMPBUF.eflags]
175 popf
176 jmp xCX
177
178.entry_error:
179 mov eax, VERR_VMM_SET_JMP_ERROR
180 jmp .proper_return
181
182.stack_overflow:
183 mov eax, VERR_VMM_SET_JMP_STACK_OVERFLOW
184 mov edx, ebx
185 jmp .proper_return
186
187.stack_overflow_almost:
188 mov eax, VERR_VMM_SET_JMP_STACK_OVERFLOW
189 mov edx, ebx
190 jmp .proper_return
191
192 ;
193 ; Aborting resume.
194 ;
195.bad:
196 and dword [xDX + VMMR0JMPBUF.eip], byte 0 ; used for valid check.
197 mov edi, [xDX + VMMR0JMPBUF.edi]
198 mov esi, [xDX + VMMR0JMPBUF.esi]
199 mov ebx, [xDX + VMMR0JMPBUF.ebx]
200 mov eax, VERR_VMM_SET_JMP_ABORTED_RESUME
201 ret
202
203 ;
204 ; Resume VMMRZCallRing3 the call.
205 ;
206.resume:
207 ; Sanity checks.
208%ifdef VMM_R0_SWITCH_STACK
209 mov eax, [xDX + VMMR0JMPBUF.pvSavedStack]
210 %ifdef RT_STRICT
211 cmp dword [eax], STACK_PADDING
212 %endif
213 lea eax, [eax + VMM_STACK_SIZE - 32]
214 cmp dword [eax + 1ch], 0deadbeefh ; Marker 1.
215 jne .bad
216 %ifdef RT_STRICT
217 cmp [esi + 18h], edx ; The saved pJmpBuf pointer.
218 jne .bad
219 cmp dword [esi + 14h], 00c00ffeeh ; Marker 2.
220 jne .bad
221 cmp dword [esi + 10h], 0f00dbeefh ; Marker 3.
222 jne .bad
223 %endif
224%else ; !VMM_R0_SWITCH_STACK
225 cmp ecx, [xDX + VMMR0JMPBUF.SpCheck]
226 jne .bad
227.espCheck_ok:
228 mov ecx, [xDX + VMMR0JMPBUF.cbSavedStack]
229 cmp ecx, VMM_STACK_SIZE
230 ja .bad
231 test ecx, 3
232 jnz .bad
233 mov edi, [xDX + VMMR0JMPBUF.esp]
234 sub edi, [xDX + VMMR0JMPBUF.SpResume]
235 cmp ecx, edi
236 jne .bad
237%endif
238
239%ifdef VMM_R0_SWITCH_STACK
240 ; Switch stack.
241 mov esp, [xDX + VMMR0JMPBUF.SpResume]
242%else
243 ; Restore the stack.
244 mov ecx, [xDX + VMMR0JMPBUF.cbSavedStack]
245 shr ecx, 2
246 mov esi, [xDX + VMMR0JMPBUF.pvSavedStack]
247 mov edi, [xDX + VMMR0JMPBUF.SpResume]
248 mov esp, edi
249 rep movsd
250%endif ; !VMM_R0_SWITCH_STACK
251 mov byte [xDX + VMMR0JMPBUF.fInRing3Call], 0
252
253 ;
254 ; Continue where we left off.
255 ;
256%ifdef VBOX_STRICT
257 pop eax ; magic
258 cmp eax, RESUME_MAGIC
259 je .magic_ok
260 mov ecx, 0123h
261 mov [ecx], edx
262.magic_ok:
263%endif
264 popf
265 pop ebx
266 pop esi
267 pop edi
268 pop ebp
269 xor eax, eax ; VINF_SUCCESS
270 ret
271ENDPROC vmmR0CallRing3SetJmp
272
273
274;;
275; Worker for VMMRZCallRing3.
276; This will save the stack and registers.
277;
278; @param pJmpBuf msc:rcx gcc:rdi x86:[ebp+8] Pointer to the jump buffer.
279; @param rc msc:rdx gcc:rsi x86:[ebp+c] The return code.
280;
281BEGINPROC vmmR0CallRing3LongJmp
282 ;
283 ; Save the registers on the stack.
284 ;
285 push ebp
286 mov ebp, esp
287 push edi
288 push esi
289 push ebx
290 pushf
291%ifdef VBOX_STRICT
292 push RESUME_MAGIC
293%endif
294
295 ;
296 ; Load parameters.
297 ;
298 mov edx, [ebp + 08h] ; pJmpBuf
299 mov eax, [ebp + 0ch] ; rc
300
301 ;
302 ; Is the jump buffer armed?
303 ;
304 cmp dword [xDX + VMMR0JMPBUF.eip], byte 0
305 je .nok
306
307 ;
308 ; Sanity checks.
309 ;
310 mov edi, [xDX + VMMR0JMPBUF.pvSavedStack]
311 test edi, edi ; darwin may set this to 0.
312 jz .nok
313 mov [xDX + VMMR0JMPBUF.SpResume], esp
314%ifndef VMM_R0_SWITCH_STACK
315 mov esi, esp
316 mov ecx, [xDX + VMMR0JMPBUF.esp]
317 sub ecx, esi
318
319 ; two sanity checks on the size.
320 cmp ecx, VMM_STACK_SIZE ; check max size.
321 jnbe .nok
322
323 ;
324 ; Copy the stack.
325 ;
326 test ecx, 3 ; check alignment
327 jnz .nok
328 mov [xDX + VMMR0JMPBUF.cbSavedStack], ecx
329 shr ecx, 2
330 rep movsd
331%endif ; !VMM_R0_SWITCH_STACK
332
333 ; Save a PC here to assist unwinding.
334.unwind_point:
335 mov dword [xDX + VMMR0JMPBUF.SavedEipForUnwind], .unwind_point
336 mov ecx, [xDX + VMMR0JMPBUF.ebp]
337 lea ecx, [ecx + 4]
338 mov [xDX + VMMR0JMPBUF.UnwindRetPcLocation], ecx
339
340 ; Save ESP & EBP to enable stack dumps
341 mov ecx, ebp
342 mov [xDX + VMMR0JMPBUF.SavedEbp], ecx
343 sub ecx, 4
344 mov [xDX + VMMR0JMPBUF.SavedEsp], ecx
345
346 ; store the last pieces of info.
347 mov ecx, [xDX + VMMR0JMPBUF.esp]
348 mov [xDX + VMMR0JMPBUF.SpCheck], ecx
349 mov byte [xDX + VMMR0JMPBUF.fInRing3Call], 1
350
351 ;
352 ; Do the long jump.
353 ;
354 mov ebx, [xDX + VMMR0JMPBUF.ebx]
355 mov esi, [xDX + VMMR0JMPBUF.esi]
356 mov edi, [xDX + VMMR0JMPBUF.edi]
357 mov ebp, [xDX + VMMR0JMPBUF.ebp]
358 mov ecx, [xDX + VMMR0JMPBUF.eip]
359 mov [xDX + VMMR0JMPBUF.UnwindRetPcValue], ecx
360 mov esp, [xDX + VMMR0JMPBUF.esp]
361 push dword [xDX + VMMR0JMPBUF.eflags]
362 popf
363 jmp ecx
364
365 ;
366 ; Failure
367 ;
368.nok:
369%ifdef VBOX_STRICT
370 pop eax ; magic
371 cmp eax, RESUME_MAGIC
372 je .magic_ok
373 mov ecx, 0123h
374 mov [ecx], edx
375.magic_ok:
376%endif
377 popf
378 pop ebx
379 pop esi
380 pop edi
381 mov eax, VERR_VMM_LONG_JMP_ERROR
382 leave
383 ret
384ENDPROC vmmR0CallRing3LongJmp
385
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette