VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMR0/VMMR0JmpA-amd64.asm@ 24760

Last change on this file since 24760 was 23487, checked in by vboxsync, 15 years ago

VMM: Saved and restore the [R|E]FLAGS register in the cmmR3CallRing3SetJmp/Long code.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 12.6 KB
Line 
1; $Id: VMMR0JmpA-amd64.asm 23487 2009-10-01 14:57:14Z vboxsync $
2;; @file
3; VMM - R0 SetJmp / LongJmp routines for AMD64.
4;
5
6;
7; Copyright (C) 2006-2009 Sun Microsystems, Inc.
8;
9; This file is part of VirtualBox Open Source Edition (OSE), as
10; available from http://www.virtualbox.org. This file is free software;
11; you can redistribute it and/or modify it under the terms of the GNU
12; General Public License (GPL) as published by the Free Software
13; Foundation, in version 2 as it comes in the "COPYING" file of the
14; VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15; hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16;
17; Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa
18; Clara, CA 95054 USA or visit http://www.sun.com if you need
19; additional information or have any questions.
20;
21
22;*******************************************************************************
23;* Header Files *
24;*******************************************************************************
25%include "VBox/asmdefs.mac"
26%include "../VMMInternal.mac"
27%include "iprt/err.mac"
28%include "VBox/param.mac"
29
30
31;*******************************************************************************
32;* Defined Constants And Macros *
33;*******************************************************************************
34%define RESUME_MAGIC 07eadf00dh
35%define STACK_PADDING 0eeeeeeeeeeeeeeeeh
36
37
38; For vmmR0LoggerWrapper. (The other architecture(s) use(s) C99 variadict macros.)
39extern NAME(RTLogLogger)
40
41
42BEGINCODE
43
44
45;;
46; The setjmp variant used for calling Ring-3.
47;
48; This differs from the normal setjmp in that it will resume VMMRZCallRing3 if we're
49; in the middle of a ring-3 call. Another differences is the function pointer and
50; argument. This has to do with resuming code and the stack frame of the caller.
51;
52; @returns VINF_SUCCESS on success or whatever is passed to vmmR0CallRing3LongJmp.
53; @param pJmpBuf msc:rcx gcc:rdi x86:[esp+0x04] Our jmp_buf.
54; @param pfn msc:rdx gcc:rsi x86:[esp+0x08] The function to be called when not resuming.
55; @param pvUser1 msc:r8 gcc:rdx x86:[esp+0x0c] The argument of that function.
56; @param pvUser2 msc:r9 gcc:rcx x86:[esp+0x10] The argument of that function.
57;
58BEGINPROC vmmR0CallRing3SetJmp
59GLOBALNAME vmmR0CallRing3SetJmpEx
60 ;
61 ; Save the registers.
62 ;
63 push rbp
64 mov rbp, rsp
65 %ifdef ASM_CALL64_MSC
66 sub rsp, 30h
67 mov r11, rdx ; pfn
68 mov rdx, rcx ; pJmpBuf;
69 %else
70 sub rsp, 10h
71 mov r8, rdx ; pvUser1 (save it like MSC)
72 mov r9, rcx ; pvUser2 (save it like MSC)
73 mov r11, rsi ; pfn
74 mov rdx, rdi ; pJmpBuf
75 %endif
76 mov [xDX + VMMR0JMPBUF.rbx], rbx
77 %ifdef ASM_CALL64_MSC
78 mov [xDX + VMMR0JMPBUF.rsi], rsi
79 mov [xDX + VMMR0JMPBUF.rdi], rdi
80 %endif
81 mov r10, [rbp]
82 mov [xDX + VMMR0JMPBUF.rbp], r10
83 mov [xDX + VMMR0JMPBUF.r12], r12
84 mov [xDX + VMMR0JMPBUF.r13], r13
85 mov [xDX + VMMR0JMPBUF.r14], r14
86 mov [xDX + VMMR0JMPBUF.r15], r15
87 mov xAX, [rbp + 8]
88 mov [xDX + VMMR0JMPBUF.rip], xAX
89 lea r10, [rbp + 10h] ; (used in resume)
90 mov [xDX + VMMR0JMPBUF.rsp], r10
91 %ifdef RT_OS_WINDOWS
92 movdqa [xDX + VMMR0JMPBUF.xmm6], xmm6
93 movdqa [xDX + VMMR0JMPBUF.xmm7], xmm7
94 movdqa [xDX + VMMR0JMPBUF.xmm8], xmm8
95 movdqa [xDX + VMMR0JMPBUF.xmm9], xmm9
96 movdqa [xDX + VMMR0JMPBUF.xmm10], xmm10
97 movdqa [xDX + VMMR0JMPBUF.xmm11], xmm11
98 movdqa [xDX + VMMR0JMPBUF.xmm12], xmm12
99 movdqa [xDX + VMMR0JMPBUF.xmm13], xmm13
100 movdqa [xDX + VMMR0JMPBUF.xmm14], xmm14
101 movdqa [xDX + VMMR0JMPBUF.xmm15], xmm15
102 %endif
103 pushf
104 pop xAX
105 mov [xDX + VMMR0JMPBUF.rflags], xAX
106
107 ;
108 ; If we're not in a ring-3 call, call pfn and return.
109 ;
110 test byte [xDX + VMMR0JMPBUF.fInRing3Call], 1
111 jnz .resume
112
113 %ifdef VMM_R0_SWITCH_STACK
114 mov r15, [xDX + VMMR0JMPBUF.pvSavedStack]
115 test r15, r15
116 jz .entry_error
117 %ifdef VBOX_STRICT
118 cmp dword [r15], 0h
119 jne .entry_error
120 mov rdi, r15
121 mov rcx, VMM_STACK_SIZE / 8
122 mov rax, qword 0eeeeeeeffeeeeeeeh
123 repne stosq
124 mov [rdi - 10h], rbx
125 %endif
126 lea r15, [r15 + VMM_STACK_SIZE - 40h]
127 mov rsp, r15 ; Switch stack!
128 %endif ; VMM_R0_SWITCH_STACK
129
130 mov r12, rdx ; Save pJmpBuf.
131 %ifdef ASM_CALL64_MSC
132 mov rcx, r8 ; pvUser -> arg0
133 mov rdx, r9
134 %else
135 mov rdi, r8 ; pvUser -> arg0
136 mov rsi, r9
137 %endif
138 call r11
139 mov rdx, r12 ; Restore pJmpBuf
140
141 %ifdef VMM_R0_SWITCH_STACK
142 %ifdef VBOX_STRICT
143 mov r15, [xDX + VMMR0JMPBUF.pvSavedStack]
144 mov dword [r15], 0h ; Reset the marker
145 %endif
146 %endif
147
148 ;
149 ; Return like in the long jump but clear eip, no short cuts here.
150 ;
151.proper_return:
152%ifdef RT_OS_WINDOWS
153 movdqa xmm6, [xDX + VMMR0JMPBUF.xmm6 ]
154 movdqa xmm7, [xDX + VMMR0JMPBUF.xmm7 ]
155 movdqa xmm8, [xDX + VMMR0JMPBUF.xmm8 ]
156 movdqa xmm9, [xDX + VMMR0JMPBUF.xmm9 ]
157 movdqa xmm10, [xDX + VMMR0JMPBUF.xmm10]
158 movdqa xmm11, [xDX + VMMR0JMPBUF.xmm11]
159 movdqa xmm12, [xDX + VMMR0JMPBUF.xmm12]
160 movdqa xmm13, [xDX + VMMR0JMPBUF.xmm13]
161 movdqa xmm14, [xDX + VMMR0JMPBUF.xmm14]
162 movdqa xmm15, [xDX + VMMR0JMPBUF.xmm15]
163%endif
164 mov rbx, [xDX + VMMR0JMPBUF.rbx]
165%ifdef ASM_CALL64_MSC
166 mov rsi, [xDX + VMMR0JMPBUF.rsi]
167 mov rdi, [xDX + VMMR0JMPBUF.rdi]
168%endif
169 mov r12, [xDX + VMMR0JMPBUF.r12]
170 mov r13, [xDX + VMMR0JMPBUF.r13]
171 mov r14, [xDX + VMMR0JMPBUF.r14]
172 mov r15, [xDX + VMMR0JMPBUF.r15]
173 mov rbp, [xDX + VMMR0JMPBUF.rbp]
174 mov xCX, [xDX + VMMR0JMPBUF.rip]
175 and qword [xDX + VMMR0JMPBUF.rip], byte 0 ; used for valid check.
176 mov rsp, [xDX + VMMR0JMPBUF.rsp]
177 push qword [xDX + VMMR0JMPBUF.rflags]
178 popf
179 jmp xCX
180
181.entry_error:
182 mov eax, VERR_INTERNAL_ERROR_2
183 jmp .proper_return
184
185.stack_overflow:
186 mov eax, VERR_INTERNAL_ERROR_5
187 jmp .proper_return
188
189 ;
190 ; Aborting resume.
191 ; Note! No need to restore XMM registers here since we haven't touched them yet.
192 ;
193.bad:
194 and qword [xDX + VMMR0JMPBUF.rip], byte 0 ; used for valid check.
195 mov rbx, [xDX + VMMR0JMPBUF.rbx]
196 %ifdef ASM_CALL64_MSC
197 mov rsi, [xDX + VMMR0JMPBUF.rsi]
198 mov rdi, [xDX + VMMR0JMPBUF.rdi]
199 %endif
200 mov r12, [xDX + VMMR0JMPBUF.r12]
201 mov r13, [xDX + VMMR0JMPBUF.r13]
202 mov r14, [xDX + VMMR0JMPBUF.r14]
203 mov r15, [xDX + VMMR0JMPBUF.r15]
204 mov eax, VERR_INTERNAL_ERROR_3 ; todo better return code!
205 leave
206 ret
207
208 ;
209 ; Resume VMMRZCallRing3 the call.
210 ;
211.resume:
212 ; Sanity checks.
213 %ifdef VMM_R0_SWITCH_STACK
214 ;; @todo amd64/switch/resume sanity.
215 %else ; !VMM_R0_SWITCH_STACK
216 cmp r10, [xDX + VMMR0JMPBUF.SpCheck]
217 jne .bad
218
219 mov ecx, [xDX + VMMR0JMPBUF.cbSavedStack]
220 cmp rcx, VMM_STACK_SIZE
221 ja .bad
222 test rcx, 3
223 jnz .bad
224 mov rdi, [xDX + VMMR0JMPBUF.rsp]
225 sub rdi, [xDX + VMMR0JMPBUF.SpResume]
226 cmp rcx, rdi
227 jne .bad
228 %endif
229
230%ifdef VMM_R0_SWITCH_STACK
231 ; Switch stack.
232 mov rsp, [xDX + VMMR0JMPBUF.SpResume]
233%else
234 ; Restore the stack.
235 mov ecx, [xDX + VMMR0JMPBUF.cbSavedStack]
236 shr ecx, 3
237 mov rsi, [xDX + VMMR0JMPBUF.pvSavedStack]
238 mov rdi, [xDX + VMMR0JMPBUF.SpResume]
239 mov rsp, rdi
240 rep movsq
241%endif ; !VMM_R0_SWITCH_STACK
242 mov byte [xDX + VMMR0JMPBUF.fInRing3Call], 0
243
244 ;
245 ; Continue where we left off.
246 ;
247%ifdef VBOX_STRICT
248 pop rax ; magic
249 cmp rax, RESUME_MAGIC
250 je .magic_ok
251 mov ecx, 0123h
252 mov [ecx], edx
253.magic_ok:
254%endif
255%ifdef RT_OS_WINDOWS
256 movdqa xmm6, [rsp + 000h]
257 movdqa xmm7, [rsp + 010h]
258 movdqa xmm8, [rsp + 020h]
259 movdqa xmm9, [rsp + 030h]
260 movdqa xmm10, [rsp + 040h]
261 movdqa xmm11, [rsp + 050h]
262 movdqa xmm12, [rsp + 060h]
263 movdqa xmm13, [rsp + 070h]
264 movdqa xmm14, [rsp + 080h]
265 movdqa xmm15, [rsp + 090h]
266 add rsp, 0a0h
267%endif
268 popf
269 pop rbx
270%ifdef ASM_CALL64_MSC
271 pop rsi
272 pop rdi
273%endif
274 pop r12
275 pop r13
276 pop r14
277 pop r15
278 pop rbp
279 xor eax, eax ; VINF_SUCCESS
280 ret
281ENDPROC vmmR0CallRing3SetJmp
282
283
284;;
285; Worker for VMMRZCallRing3.
286; This will save the stack and registers.
287;
288; @param pJmpBuf msc:rcx gcc:rdi x86:[ebp+8] Pointer to the jump buffer.
289; @param rc msc:rdx gcc:rsi x86:[ebp+c] The return code.
290;
291BEGINPROC vmmR0CallRing3LongJmp
292 ;
293 ; Save the registers on the stack.
294 ;
295 push rbp
296 mov rbp, rsp
297 push r15
298 push r14
299 push r13
300 push r12
301%ifdef ASM_CALL64_MSC
302 push rdi
303 push rsi
304%endif
305 push rbx
306 pushf
307%ifdef RT_OS_WINDOWS
308 sub rsp, 0a0h
309 movdqa [rsp + 000h], xmm6
310 movdqa [rsp + 010h], xmm7
311 movdqa [rsp + 020h], xmm8
312 movdqa [rsp + 030h], xmm9
313 movdqa [rsp + 040h], xmm10
314 movdqa [rsp + 050h], xmm11
315 movdqa [rsp + 060h], xmm12
316 movdqa [rsp + 070h], xmm13
317 movdqa [rsp + 080h], xmm14
318 movdqa [rsp + 090h], xmm15
319%endif
320%ifdef VBOX_STRICT
321 push RESUME_MAGIC
322%endif
323
324 ;
325 ; Normalize the parameters.
326 ;
327%ifdef ASM_CALL64_MSC
328 mov eax, edx ; rc
329 mov rdx, rcx ; pJmpBuf
330%else
331 mov rdx, rdi ; pJmpBuf
332 mov eax, esi ; rc
333%endif
334
335 ;
336 ; Is the jump buffer armed?
337 ;
338 cmp qword [xDX + VMMR0JMPBUF.rip], byte 0
339 je .nok
340
341 ;
342 ; Sanity checks.
343 ;
344 mov rdi, [xDX + VMMR0JMPBUF.pvSavedStack]
345 test rdi, rdi ; darwin may set this to 0.
346 jz .nok
347 mov [xDX + VMMR0JMPBUF.SpResume], rsp
348 %ifndef VMM_R0_SWITCH_STACK
349 mov rsi, rsp
350 mov rcx, [xDX + VMMR0JMPBUF.rsp]
351 sub rcx, rsi
352
353 ; two sanity checks on the size.
354 cmp rcx, VMM_STACK_SIZE ; check max size.
355 jnbe .nok
356
357 ;
358 ; Copy the stack
359 ;
360 test ecx, 7 ; check alignment
361 jnz .nok
362 mov [xDX + VMMR0JMPBUF.cbSavedStack], ecx
363 shr ecx, 3
364 rep movsq
365
366 %endif ; !VMM_R0_SWITCH_STACK
367
368 ; Save RSP & RBP to enable stack dumps
369 mov rcx, rbp
370 mov [xDX + VMMR0JMPBUF.SavedEbp], rcx
371 sub rcx, 8
372 mov [xDX + VMMR0JMPBUF.SavedEsp], rcx
373
374 ; store the last pieces of info.
375 mov rcx, [xDX + VMMR0JMPBUF.rsp]
376 mov [xDX + VMMR0JMPBUF.SpCheck], rcx
377 mov byte [xDX + VMMR0JMPBUF.fInRing3Call], 1
378
379 ;
380 ; Do the long jump.
381 ;
382%ifdef RT_OS_WINDOWS
383 movdqa xmm6, [xDX + VMMR0JMPBUF.xmm6 ]
384 movdqa xmm7, [xDX + VMMR0JMPBUF.xmm7 ]
385 movdqa xmm8, [xDX + VMMR0JMPBUF.xmm8 ]
386 movdqa xmm9, [xDX + VMMR0JMPBUF.xmm9 ]
387 movdqa xmm10, [xDX + VMMR0JMPBUF.xmm10]
388 movdqa xmm11, [xDX + VMMR0JMPBUF.xmm11]
389 movdqa xmm12, [xDX + VMMR0JMPBUF.xmm12]
390 movdqa xmm13, [xDX + VMMR0JMPBUF.xmm13]
391 movdqa xmm14, [xDX + VMMR0JMPBUF.xmm14]
392 movdqa xmm15, [xDX + VMMR0JMPBUF.xmm15]
393%endif
394 mov rbx, [xDX + VMMR0JMPBUF.rbx]
395%ifdef ASM_CALL64_MSC
396 mov rsi, [xDX + VMMR0JMPBUF.rsi]
397 mov rdi, [xDX + VMMR0JMPBUF.rdi]
398%endif
399 mov r12, [xDX + VMMR0JMPBUF.r12]
400 mov r13, [xDX + VMMR0JMPBUF.r13]
401 mov r14, [xDX + VMMR0JMPBUF.r14]
402 mov r15, [xDX + VMMR0JMPBUF.r15]
403 mov rbp, [xDX + VMMR0JMPBUF.rbp]
404 mov rcx, [xDX + VMMR0JMPBUF.rip]
405 mov rsp, [xDX + VMMR0JMPBUF.rsp]
406 push qword [xDX + VMMR0JMPBUF.rflags]
407 popf
408 jmp rcx
409
410 ;
411 ; Failure
412 ;
413.nok:
414%ifdef VBOX_STRICT
415 pop rax ; magic
416 cmp rax, RESUME_MAGIC
417 je .magic_ok
418 mov ecx, 0123h
419 mov [rcx], edx
420.magic_ok:
421%endif
422 mov eax, VERR_INTERNAL_ERROR_4
423%ifdef RT_OS_WINDOWS
424 add rsp, 0a0h ; skip XMM registers since they are unmodified.
425%endif
426 popf
427 pop rbx
428%ifdef ASM_CALL64_MSC
429 pop rsi
430 pop rdi
431%endif
432 pop r12
433 pop r13
434 pop r14
435 pop r15
436 leave
437 ret
438ENDPROC vmmR0CallRing3LongJmp
439
440
441;;
442; Internal R0 logger worker: Logger wrapper.
443;
444; @cproto VMMR0DECL(void) vmmR0LoggerWrapper(const char *pszFormat, ...)
445;
446EXPORTEDNAME vmmR0LoggerWrapper
447 int3
448 int3
449 int3
450 ret
451ENDPROC vmmR0LoggerWrapper
452
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette