VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMR0/VMMR0JmpA-amd64.asm@ 20541

Last change on this file since 20541 was 20541, checked in by vboxsync, 16 years ago

VMM: Split up VMMR0A.asm into VMMR0JmpA-x86.asm and VMMR0JmpA-amd64.asm to simplify diff view editing (code structure is the same, registers and a few other bits differs).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 9.1 KB
Line 
1; $Id: VMMR0JmpA-amd64.asm 20541 2009-06-13 21:31:22Z vboxsync $
2;; @file
3; VMM - R0 SetJmp / LongJmp routines for AMD64.
4;
5
6;
7; Copyright (C) 2006-2009 Sun Microsystems, Inc.
8;
9; This file is part of VirtualBox Open Source Edition (OSE), as
10; available from http://www.virtualbox.org. This file is free software;
11; you can redistribute it and/or modify it under the terms of the GNU
12; General Public License (GPL) as published by the Free Software
13; Foundation, in version 2 as it comes in the "COPYING" file of the
14; VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15; hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16;
17; Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa
18; Clara, CA 95054 USA or visit http://www.sun.com if you need
19; additional information or have any questions.
20;
21
22;*******************************************************************************
23;* Header Files *
24;*******************************************************************************
25%include "VBox/asmdefs.mac"
26%include "VMMInternal.mac"
27%include "iprt/err.mac"
28
29%ifdef RT_OS_DARWIN
30 %define VMM_R0_SWITCH_STACK
31%endif
32
33
34BEGINCODE
35
36
37;;
38; The setjmp variant used for calling Ring-3.
39;
40; This differs from the normal setjmp in that it will resume VMMR0CallHost if we're
41; in the middle of a ring-3 call. Another differences is the function pointer and
42; argument. This has to do with resuming code and the stack frame of the caller.
43;
44; @returns VINF_SUCCESS on success or whatever is passed to vmmR0CallHostLongJmp.
45; @param pJmpBuf msc:rcx gcc:rdi x86:[esp+0x04] Our jmp_buf.
46; @param pfn msc:rdx gcc:rsi x86:[esp+0x08] The function to be called when not resuming.
47; @param pvUser1 msc:r8 gcc:rdx x86:[esp+0x0c] The argument of that function.
48; @param pvUser2 msc:r9 gcc:rcx x86:[esp+0x10] The argument of that function.
49;
50BEGINPROC vmmR0CallHostSetJmp
51GLOBALNAME vmmR0CallHostSetJmpEx
52 ;
53 ; Save the registers.
54 ;
55 push rbp
56 mov rbp, rsp
57 %ifdef ASM_CALL64_MSC
58 sub rsp, 30h
59 mov r11, rdx ; pfn
60 mov rdx, rcx ; pJmpBuf;
61 %else
62 sub rsp, 10h
63 mov r8, rdx ; pvUser1 (save it like MSC)
64 mov r9, rcx ; pvUser2 (save it like MSC)
65 mov r11, rsi ; pfn
66 mov rdx, rdi ; pJmpBuf
67 %endif
68 mov [rdx + VMMR0JMPBUF.rbx], rbx
69 %ifdef ASM_CALL64_MSC
70 mov [rdx + VMMR0JMPBUF.rsi], rsi
71 mov [rdx + VMMR0JMPBUF.rdi], rdi
72 %endif
73 mov r10, [rbp]
74 mov [rdx + VMMR0JMPBUF.rbp], r10
75 mov [rdx + VMMR0JMPBUF.r12], r12
76 mov [rdx + VMMR0JMPBUF.r13], r13
77 mov [rdx + VMMR0JMPBUF.r14], r14
78 mov [rdx + VMMR0JMPBUF.r15], r15
79 mov rax, [rbp + 8]
80 mov [rdx + VMMR0JMPBUF.rip], rax
81 lea r10, [rbp + 10h] ; (used in resume)
82 mov [rdx + VMMR0JMPBUF.rsp], r10
83
84 ;
85 ; If we're not in a ring-3 call, call pfn and return.
86 ;
87 test byte [rdx + VMMR0JMPBUF.fInRing3Call], 1
88 jnz .resume
89
90 %ifdef VMM_R0_SWITCH_STACK
91 mov r15, [rdx + VMMR0JMPBUF.pvSavedStack]
92 test r15, r15
93 jz .entry_error
94 %ifdef VBOX_STRICT
95 cmp dword [r15], 0h
96 jne .entry_error
97 mov rdi, r15
98 mov rcx, 1024
99 mov rax, 00eeeeeeeffeeeeeeeh
100 repne stosq
101 mov [rdi - 10h], rbx
102 %endif
103 lea r15, [r15 + 8192 - 40h]
104 mov rsp, r15 ; Switch stack!
105 %endif ; VMM_R0_SWITCH_STACK
106
107 mov r12, rdx ; Save pJmpBuf.
108 %ifdef ASM_CALL64_MSC
109 mov rcx, r8 ; pvUser -> arg0
110 mov rdx, r9
111 %else
112 mov rdi, r8 ; pvUser -> arg0
113 mov rsi, r9
114 %endif
115 call r11
116 mov rdx, r12 ; Restore pJmpBuf
117
118 %ifdef VMM_R0_SWITCH_STACK
119 %ifdef VBOX_STRICT
120 mov r15, [rdx + VMMR0JMPBUF.pvSavedStack]
121 mov dword [r15], 0h ; Reset the marker
122 %endif
123 %endif
124
125 ;
126 ; Return like in the long jump but clear eip, no short cuts here.
127 ;
128.proper_return:
129 mov rbx, [rdx + VMMR0JMPBUF.rbx]
130 %ifdef ASM_CALL64_MSC
131 mov rsi, [rdx + VMMR0JMPBUF.rsi]
132 mov rdi, [rdx + VMMR0JMPBUF.rdi]
133 %endif
134 mov r12, [rdx + VMMR0JMPBUF.r12]
135 mov r13, [rdx + VMMR0JMPBUF.r13]
136 mov r14, [rdx + VMMR0JMPBUF.r14]
137 mov r15, [rdx + VMMR0JMPBUF.r15]
138 mov rbp, [rdx + VMMR0JMPBUF.rbp]
139 mov rcx, [rdx + VMMR0JMPBUF.rip]
140 and qword [rdx + VMMR0JMPBUF.rip], byte 0 ; used for valid check.
141 mov rsp, [rdx + VMMR0JMPBUF.rsp]
142 jmp rcx
143
144.entry_error:
145 mov eax, VERR_INTERNAL_ERROR_2
146 jmp .proper_return
147
148 ;
149 ; Resume VMMR0CallHost the call.
150 ;
151.resume:
152 %ifdef VMM_R0_SWITCH_STACK
153 ; Switch stack.
154 mov rsp, [rdx + VMMR0JMPBUF.SpResume]
155 %else ; !VMM_R0_SWITCH_STACK
156 ; Sanity checks.
157 cmp r10, [rdx + VMMR0JMPBUF.SpCheck]
158 je .rspCheck_ok
159.bad:
160 and qword [rdx + VMMR0JMPBUF.rip], byte 0 ; used for valid check.
161 mov rbx, [rdx + VMMR0JMPBUF.rbx]
162 %ifdef ASM_CALL64_MSC
163 mov rsi, [rdx + VMMR0JMPBUF.rsi]
164 mov rdi, [rdx + VMMR0JMPBUF.rdi]
165 %endif
166 mov r12, [rdx + VMMR0JMPBUF.r12]
167 mov r13, [rdx + VMMR0JMPBUF.r13]
168 mov r14, [rdx + VMMR0JMPBUF.r14]
169 mov r15, [rdx + VMMR0JMPBUF.r15]
170 mov eax, VERR_INTERNAL_ERROR_2
171 leave
172 ret
173
174.rspCheck_ok:
175 mov ecx, [rdx + VMMR0JMPBUF.cbSavedStack]
176 cmp rcx, 8192
177 ja .bad
178 test rcx, 3
179 jnz .bad
180 mov rdi, [rdx + VMMR0JMPBUF.rsp]
181 sub rdi, [rdx + VMMR0JMPBUF.SpResume]
182 cmp rcx, rdi
183 jne .bad
184
185 ;
186 ; Restore the stack.
187 ;
188 mov ecx, [rdx + VMMR0JMPBUF.cbSavedStack]
189 shr ecx, 3
190 mov rsi, [rdx + VMMR0JMPBUF.pvSavedStack]
191 mov rdi, [rdx + VMMR0JMPBUF.SpResume]
192 mov rsp, rdi
193 rep movsq
194 %endif ; !VMM_R0_SWITCH_STACK
195 mov byte [rdx + VMMR0JMPBUF.fInRing3Call], 0
196
197 ;
198 ; Continue where we left off.
199 ;
200 popf
201 pop rbx
202 %ifdef ASM_CALL64_MSC
203 pop rsi
204 pop rdi
205 %endif
206 pop r12
207 pop r13
208 pop r14
209 pop r15
210 pop rbp
211 xor eax, eax ; VINF_SUCCESS
212 ret
213ENDPROC vmmR0CallHostSetJmp
214
215
216;;
217; Worker for VMMR0CallHost.
218; This will save the stack and registers.
219;
220; @param pJmpBuf msc:rcx gcc:rdi x86:[ebp+8] Pointer to the jump buffer.
221; @param rc msc:rdx gcc:rsi x86:[ebp+c] The return code.
222;
223BEGINPROC vmmR0CallHostLongJmp
224 ;
225 ; Save the registers on the stack.
226 ;
227 push rbp
228 mov rbp, rsp
229 push r15
230 push r14
231 push r13
232 push r12
233 %ifdef ASM_CALL64_MSC
234 push rdi
235 push rsi
236 %endif
237 push rbx
238 pushf
239
240 ;
241 ; Normalize the parameters.
242 ;
243 %ifdef ASM_CALL64_MSC
244 mov eax, edx ; rc
245 mov rdx, rcx ; pJmpBuf
246 %else
247 mov rdx, rdi ; pJmpBuf
248 mov eax, esi ; rc
249 %endif
250
251 ;
252 ; Is the jump buffer armed?
253 ;
254 cmp qword [rdx + VMMR0JMPBUF.rip], byte 0
255 je .nok
256
257 ;
258 ; Sanity checks.
259 ;
260 mov rdi, [rdx + VMMR0JMPBUF.pvSavedStack]
261 test rdi, rdi ; darwin may set this to 0.
262 jz .nok
263 mov [rdx + VMMR0JMPBUF.SpResume], rsp
264 %ifndef VMM_R0_SWITCH_STACK
265 mov rsi, rsp
266 mov rcx, [rdx + VMMR0JMPBUF.rsp]
267 sub rcx, rsi
268
269 ; two sanity checks on the size.
270 cmp rcx, 8192 ; check max size.
271 jnbe .nok
272
273 ;
274 ; Copy the stack
275 ;
276 test ecx, 7 ; check alignment
277 jnz .nok
278 mov [rdx + VMMR0JMPBUF.cbSavedStack], ecx
279 shr ecx, 3
280 rep movsq
281
282 %endif ; !VMM_R0_SWITCH_STACK
283
284 ; Save RSP & RBP to enable stack dumps
285 mov rcx, rbp
286 mov [rdx + VMMR0JMPBUF.SavedEbp], rcx
287 sub rcx, 8
288 mov [rdx + VMMR0JMPBUF.SavedEsp], rcx
289
290 ; store the last pieces of info.
291 mov rcx, [rdx + VMMR0JMPBUF.rsp]
292 mov [rdx + VMMR0JMPBUF.SpCheck], rcx
293 mov byte [rdx + VMMR0JMPBUF.fInRing3Call], 1
294
295 ;
296 ; Do the long jump.
297 ;
298 mov rbx, [rdx + VMMR0JMPBUF.rbx]
299 %ifdef ASM_CALL64_MSC
300 mov rsi, [rdx + VMMR0JMPBUF.rsi]
301 mov rdi, [rdx + VMMR0JMPBUF.rdi]
302 %endif
303 mov r12, [rdx + VMMR0JMPBUF.r12]
304 mov r13, [rdx + VMMR0JMPBUF.r13]
305 mov r14, [rdx + VMMR0JMPBUF.r14]
306 mov r15, [rdx + VMMR0JMPBUF.r15]
307 mov rbp, [rdx + VMMR0JMPBUF.rbp]
308 mov rcx, [rdx + VMMR0JMPBUF.rip]
309 mov rsp, [rdx + VMMR0JMPBUF.rsp]
310 jmp rcx
311
312 ;
313 ; Failure
314 ;
315.nok:
316 mov eax, VERR_INTERNAL_ERROR_4
317 popf
318 pop rbx
319 %ifdef ASM_CALL64_MSC
320 pop rsi
321 pop rdi
322 %endif
323 pop r12
324 pop r13
325 pop r14
326 pop r15
327 leave
328 ret
329ENDPROC vmmR0CallHostLongJmp
330
331
332;;
333; Internal R0 logger worker: Logger wrapper.
334;
335; @cproto VMMR0DECL(void) vmmR0LoggerWrapper(const char *pszFormat, ...)
336;
337EXPORTEDNAME vmmR0LoggerWrapper
338 int3
339 int3
340 int3
341 ret
342ENDPROC vmmR0LoggerWrapper
343
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette