Changeset 20543 in vbox
- Timestamp:
- Jun 13, 2009 10:09:32 PM (16 years ago)
- svn:sync-xref-src-repo-rev:
- 48546
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMInternal.mac
r19577 r20543 5 5 6 6 ; 7 ; Copyright (C) 2006-200 7Sun Microsystems, Inc.7 ; Copyright (C) 2006-2009 Sun Microsystems, Inc. 8 8 ; 9 9 ; This file is part of VirtualBox Open Source Edition (OSE), as … … 21 21 22 22 %include "VBox/asmdefs.mac" 23 24 ; 25 ; Determin the default stack switching unless specified explicitly. 26 ; 27 %ifndef VMM_R0_SWITCH_STACK 28 %ifndef VMM_R0_NO_SWITCH_STACK 29 %ifdef RT_OS_DARWIN 30 %define VMM_R0_SWITCH_STACK 31 %endif 32 %endif 33 %endif 34 23 35 24 36 struc VMMR0JMPBUF … … 45 57 ; traditional jmp_buf 46 58 .rbx resq 1 47 %ifdef RT_OS_WINDOWS59 %ifdef RT_OS_WINDOWS 48 60 .rsi resq 1 49 61 .rdi resq 1 50 %endif62 %endif 51 63 .rbp resq 1 52 64 .r12 resq 1 -
trunk/src/VBox/VMM/VMMR0/VMMR0JmpA-amd64.asm
r20541 r20543 24 24 ;******************************************************************************* 25 25 %include "VBox/asmdefs.mac" 26 %include " VMMInternal.mac"26 %include "../VMMInternal.mac" 27 27 %include "iprt/err.mac" 28 29 %ifdef RT_OS_DARWIN 30 %define VMM_R0_SWITCH_STACK 31 %endif 28 %include "VBox/param.mac" 32 29 33 30 … … 66 63 mov rdx, rdi ; pJmpBuf 67 64 %endif 68 mov [ rdx+ VMMR0JMPBUF.rbx], rbx69 %ifdef ASM_CALL64_MSC 70 mov [ rdx+ VMMR0JMPBUF.rsi], rsi71 mov [ rdx+ VMMR0JMPBUF.rdi], rdi65 mov [xDX + VMMR0JMPBUF.rbx], rbx 66 %ifdef ASM_CALL64_MSC 67 mov [xDX + VMMR0JMPBUF.rsi], rsi 68 mov [xDX + VMMR0JMPBUF.rdi], rdi 72 69 %endif 73 70 mov r10, [rbp] 74 mov [ rdx+ VMMR0JMPBUF.rbp], r1075 mov [ rdx+ VMMR0JMPBUF.r12], r1276 mov [ rdx+ VMMR0JMPBUF.r13], r1377 mov [ rdx+ VMMR0JMPBUF.r14], r1478 mov [ rdx+ VMMR0JMPBUF.r15], r1579 mov rax, [rbp + 8]80 mov [ rdx + VMMR0JMPBUF.rip], rax71 mov [xDX + VMMR0JMPBUF.rbp], r10 72 mov [xDX + VMMR0JMPBUF.r12], r12 73 mov [xDX + VMMR0JMPBUF.r13], r13 74 mov [xDX + VMMR0JMPBUF.r14], r14 75 mov [xDX + VMMR0JMPBUF.r15], r15 76 mov xAX, [rbp + 8] 77 mov [xDX + VMMR0JMPBUF.rip], xAX 81 78 lea r10, [rbp + 10h] ; (used in resume) 82 mov [ rdx+ VMMR0JMPBUF.rsp], r1079 mov [xDX + VMMR0JMPBUF.rsp], r10 83 80 84 81 ; 85 82 ; If we're not in a ring-3 call, call pfn and return. 86 83 ; 87 test byte [ rdx+ VMMR0JMPBUF.fInRing3Call], 184 test byte [xDX + VMMR0JMPBUF.fInRing3Call], 1 88 85 jnz .resume 89 86 90 87 %ifdef VMM_R0_SWITCH_STACK 91 mov r15, [ rdx+ VMMR0JMPBUF.pvSavedStack]88 mov r15, [xDX + VMMR0JMPBUF.pvSavedStack] 92 89 test r15, r15 93 90 jz .entry_error … … 96 93 jne .entry_error 97 94 mov rdi, r15 98 mov rcx, 102499 mov rax, 00eeeeeee ffeeeeeeeh95 mov rcx, VMM_STACK_SIZE / 8 96 mov rax, 00eeeeeeeeffeeeeeeeh 100 97 repne stosq 101 98 mov [rdi - 10h], rbx 102 99 %endif 103 lea r15, [r15 + 8192- 40h]100 lea r15, [r15 + VMM_STACK_SIZE - 40h] 104 101 mov rsp, r15 ; Switch stack! 105 102 %endif ; VMM_R0_SWITCH_STACK … … 118 115 %ifdef VMM_R0_SWITCH_STACK 119 116 %ifdef VBOX_STRICT 120 mov r15, [ rdx+ VMMR0JMPBUF.pvSavedStack]117 mov r15, [xDX + VMMR0JMPBUF.pvSavedStack] 121 118 mov dword [r15], 0h ; Reset the marker 122 119 %endif … … 127 124 ; 128 125 .proper_return: 129 mov rbx, [ rdx+ VMMR0JMPBUF.rbx]130 %ifdef ASM_CALL64_MSC 131 mov rsi, [ rdx+ VMMR0JMPBUF.rsi]132 mov rdi, [ rdx+ VMMR0JMPBUF.rdi]133 %endif 134 mov r12, [ rdx+ VMMR0JMPBUF.r12]135 mov r13, [ rdx+ VMMR0JMPBUF.r13]136 mov r14, [ rdx+ VMMR0JMPBUF.r14]137 mov r15, [ rdx+ VMMR0JMPBUF.r15]138 mov rbp, [ rdx+ VMMR0JMPBUF.rbp]139 mov rcx, [rdx+ VMMR0JMPBUF.rip]140 and qword [ rdx+ VMMR0JMPBUF.rip], byte 0 ; used for valid check.141 mov rsp, [ rdx+ VMMR0JMPBUF.rsp]142 jmp rcx126 mov rbx, [xDX + VMMR0JMPBUF.rbx] 127 %ifdef ASM_CALL64_MSC 128 mov rsi, [xDX + VMMR0JMPBUF.rsi] 129 mov rdi, [xDX + VMMR0JMPBUF.rdi] 130 %endif 131 mov r12, [xDX + VMMR0JMPBUF.r12] 132 mov r13, [xDX + VMMR0JMPBUF.r13] 133 mov r14, [xDX + VMMR0JMPBUF.r14] 134 mov r15, [xDX + VMMR0JMPBUF.r15] 135 mov rbp, [xDX + VMMR0JMPBUF.rbp] 136 mov xCX, [xDX + VMMR0JMPBUF.rip] 137 and qword [xDX + VMMR0JMPBUF.rip], byte 0 ; used for valid check. 138 mov rsp, [xDX + VMMR0JMPBUF.rsp] 139 jmp xCX 143 140 144 141 .entry_error: … … 146 143 jmp .proper_return 147 144 148 ; 149 ; Resume VMMR0CallHost the call. 150 ; 151 .resume: 152 %ifdef VMM_R0_SWITCH_STACK 153 ; Switch stack. 154 mov rsp, [rdx + VMMR0JMPBUF.SpResume] 155 %else ; !VMM_R0_SWITCH_STACK 156 ; Sanity checks. 157 cmp r10, [rdx + VMMR0JMPBUF.SpCheck] 158 je .rspCheck_ok 145 .stack_overflow: 146 mov eax, VERR_INTERNAL_ERROR_5 147 jmp .proper_return 148 149 ; 150 ; Aborting resume. 151 ; 159 152 .bad: 160 and qword [ rdx+ VMMR0JMPBUF.rip], byte 0 ; used for valid check.161 mov rbx, [ rdx+ VMMR0JMPBUF.rbx]162 163 mov rsi, [ rdx+ VMMR0JMPBUF.rsi]164 mov rdi, [ rdx+ VMMR0JMPBUF.rdi]165 166 mov r12, [ rdx+ VMMR0JMPBUF.r12]167 mov r13, [ rdx+ VMMR0JMPBUF.r13]168 mov r14, [ rdx+ VMMR0JMPBUF.r14]169 mov r15, [ rdx+ VMMR0JMPBUF.r15]170 mov eax, VERR_INTERNAL_ERROR_ 2153 and qword [xDX + VMMR0JMPBUF.rip], byte 0 ; used for valid check. 154 mov rbx, [xDX + VMMR0JMPBUF.rbx] 155 %ifdef ASM_CALL64_MSC 156 mov rsi, [xDX + VMMR0JMPBUF.rsi] 157 mov rdi, [xDX + VMMR0JMPBUF.rdi] 158 %endif 159 mov r12, [xDX + VMMR0JMPBUF.r12] 160 mov r13, [xDX + VMMR0JMPBUF.r13] 161 mov r14, [xDX + VMMR0JMPBUF.r14] 162 mov r15, [xDX + VMMR0JMPBUF.r15] 163 mov eax, VERR_INTERNAL_ERROR_3 ; todo better return code! 171 164 leave 172 165 ret 173 166 174 .rspCheck_ok: 175 mov ecx, [rdx + VMMR0JMPBUF.cbSavedStack] 176 cmp rcx, 8192 167 ; 168 ; Resume VMMR0CallHost the call. 169 ; 170 .resume: 171 ; Sanity checks. 172 %ifdef VMM_R0_SWITCH_STACK 173 ;; @todo amd64/switch/resume sanity. 174 %else ; !VMM_R0_SWITCH_STACK 175 cmp r10, [xDX + VMMR0JMPBUF.SpCheck] 176 jne .bad 177 178 mov ecx, [xDX + VMMR0JMPBUF.cbSavedStack] 179 cmp rcx, VMM_STACK_SIZE 177 180 ja .bad 178 181 test rcx, 3 179 182 jnz .bad 180 mov rdi, [ rdx+ VMMR0JMPBUF.rsp]181 sub rdi, [ rdx+ VMMR0JMPBUF.SpResume]183 mov rdi, [xDX + VMMR0JMPBUF.rsp] 184 sub rdi, [xDX + VMMR0JMPBUF.SpResume] 182 185 cmp rcx, rdi 183 186 jne .bad 184 185 ; 187 %endif 188 189 %ifdef VMM_R0_SWITCH_STACK 190 ; Switch stack. 191 mov rsp, [xDX + VMMR0JMPBUF.SpResume] 192 %else 186 193 ; Restore the stack. 187 ; 188 mov ecx, [rdx + VMMR0JMPBUF.cbSavedStack] 194 mov ecx, [xDX + VMMR0JMPBUF.cbSavedStack] 189 195 shr ecx, 3 190 mov rsi, [ rdx+ VMMR0JMPBUF.pvSavedStack]191 mov rdi, [ rdx+ VMMR0JMPBUF.SpResume]196 mov rsi, [xDX + VMMR0JMPBUF.pvSavedStack] 197 mov rdi, [xDX + VMMR0JMPBUF.SpResume] 192 198 mov rsp, rdi 193 199 rep movsq 194 195 mov byte [ rdx+ VMMR0JMPBUF.fInRing3Call], 0200 %endif ; !VMM_R0_SWITCH_STACK 201 mov byte [xDX + VMMR0JMPBUF.fInRing3Call], 0 196 202 197 203 ; 198 204 ; Continue where we left off. 199 205 ; 206 %ifdef VBOX_STRICT 207 pop eax ; magic 208 cmp eax, 0f00dbed0h 209 je .magic_ok 210 mov ecx, 0123h 211 mov [ecx], edx 212 .magic_ok: 213 %endif 200 214 popf 201 215 pop rbx … … 237 251 push rbx 238 252 pushf 253 %ifdef VBOX_STRICT 254 push dword 0f00dbed0h 255 %endif 239 256 240 257 ; … … 252 269 ; Is the jump buffer armed? 253 270 ; 254 cmp qword [ rdx+ VMMR0JMPBUF.rip], byte 0271 cmp qword [xDX + VMMR0JMPBUF.rip], byte 0 255 272 je .nok 256 273 … … 258 275 ; Sanity checks. 259 276 ; 260 mov rdi, [ rdx+ VMMR0JMPBUF.pvSavedStack]277 mov rdi, [xDX + VMMR0JMPBUF.pvSavedStack] 261 278 test rdi, rdi ; darwin may set this to 0. 262 279 jz .nok 263 mov [ rdx+ VMMR0JMPBUF.SpResume], rsp280 mov [xDX + VMMR0JMPBUF.SpResume], rsp 264 281 %ifndef VMM_R0_SWITCH_STACK 265 282 mov rsi, rsp 266 mov rcx, [ rdx+ VMMR0JMPBUF.rsp]283 mov rcx, [xDX + VMMR0JMPBUF.rsp] 267 284 sub rcx, rsi 268 285 269 286 ; two sanity checks on the size. 270 cmp rcx, 8192; check max size.287 cmp rcx, VMM_STACK_SIZE ; check max size. 271 288 jnbe .nok 272 289 … … 276 293 test ecx, 7 ; check alignment 277 294 jnz .nok 278 mov [ rdx+ VMMR0JMPBUF.cbSavedStack], ecx295 mov [xDX + VMMR0JMPBUF.cbSavedStack], ecx 279 296 shr ecx, 3 280 297 rep movsq … … 284 301 ; Save RSP & RBP to enable stack dumps 285 302 mov rcx, rbp 286 mov [ rdx+ VMMR0JMPBUF.SavedEbp], rcx303 mov [xDX + VMMR0JMPBUF.SavedEbp], rcx 287 304 sub rcx, 8 288 mov [ rdx+ VMMR0JMPBUF.SavedEsp], rcx305 mov [xDX + VMMR0JMPBUF.SavedEsp], rcx 289 306 290 307 ; store the last pieces of info. 291 mov rcx, [ rdx+ VMMR0JMPBUF.rsp]292 mov [ rdx+ VMMR0JMPBUF.SpCheck], rcx293 mov byte [ rdx+ VMMR0JMPBUF.fInRing3Call], 1308 mov rcx, [xDX + VMMR0JMPBUF.rsp] 309 mov [xDX + VMMR0JMPBUF.SpCheck], rcx 310 mov byte [xDX + VMMR0JMPBUF.fInRing3Call], 1 294 311 295 312 ; 296 313 ; Do the long jump. 297 314 ; 298 mov rbx, [ rdx+ VMMR0JMPBUF.rbx]299 %ifdef ASM_CALL64_MSC 300 mov rsi, [ rdx+ VMMR0JMPBUF.rsi]301 mov rdi, [ rdx+ VMMR0JMPBUF.rdi]302 %endif 303 mov r12, [ rdx+ VMMR0JMPBUF.r12]304 mov r13, [ rdx+ VMMR0JMPBUF.r13]305 mov r14, [ rdx+ VMMR0JMPBUF.r14]306 mov r15, [ rdx+ VMMR0JMPBUF.r15]307 mov rbp, [ rdx+ VMMR0JMPBUF.rbp]308 mov rcx, [ rdx+ VMMR0JMPBUF.rip]309 mov rsp, [ rdx+ VMMR0JMPBUF.rsp]315 mov rbx, [xDX + VMMR0JMPBUF.rbx] 316 %ifdef ASM_CALL64_MSC 317 mov rsi, [xDX + VMMR0JMPBUF.rsi] 318 mov rdi, [xDX + VMMR0JMPBUF.rdi] 319 %endif 320 mov r12, [xDX + VMMR0JMPBUF.r12] 321 mov r13, [xDX + VMMR0JMPBUF.r13] 322 mov r14, [xDX + VMMR0JMPBUF.r14] 323 mov r15, [xDX + VMMR0JMPBUF.r15] 324 mov rbp, [xDX + VMMR0JMPBUF.rbp] 325 mov rcx, [xDX + VMMR0JMPBUF.rip] 326 mov rsp, [xDX + VMMR0JMPBUF.rsp] 310 327 jmp rcx 311 328 … … 314 331 ; 315 332 .nok: 333 %ifdef VBOX_STRICT 334 pop rax ; magic 335 cmp eax, 0f00dbed0h 336 je .magic_ok 337 mov ecx, 0123h 338 mov [rcx], edx 339 .magic_ok: 340 %endif 316 341 mov eax, VERR_INTERNAL_ERROR_4 317 342 popf -
trunk/src/VBox/VMM/VMMR0/VMMR0JmpA-x86.asm
r20541 r20543 24 24 ;******************************************************************************* 25 25 %include "VBox/asmdefs.mac" 26 %include " VMMInternal.mac"26 %include "../VMMInternal.mac" 27 27 %include "iprt/err.mac" 28 %include "VBox/param.mac" 28 29 29 30 30 31 ; For vmmR0LoggerWrapper. (The other architecture(s) use(s) C99 variadict macros.) 31 32 extern NAME(RTLogLogger) 32 33 %ifdef RT_OS_DARWIN34 %define VMM_R0_SWITCH_STACK35 %endif36 33 37 34 … … 58 55 ; 59 56 mov edx, [esp + 4h] ; pJmpBuf 60 mov [ edx+ VMMR0JMPBUF.ebx], ebx61 mov [ edx+ VMMR0JMPBUF.esi], esi62 mov [ edx+ VMMR0JMPBUF.edi], edi63 mov [ edx+ VMMR0JMPBUF.ebp], ebp64 mov eax, [esp]65 mov [ edx + VMMR0JMPBUF.eip], eax57 mov [xDX + VMMR0JMPBUF.ebx], ebx 58 mov [xDX + VMMR0JMPBUF.esi], esi 59 mov [xDX + VMMR0JMPBUF.edi], edi 60 mov [xDX + VMMR0JMPBUF.ebp], ebp 61 mov xAX, [esp] 62 mov [xDX + VMMR0JMPBUF.eip], xAX 66 63 lea ecx, [esp + 4] ; (used in resume) 67 mov [ edx+ VMMR0JMPBUF.esp], ecx64 mov [xDX + VMMR0JMPBUF.esp], ecx 68 65 69 66 ; 70 67 ; If we're not in a ring-3 call, call pfn and return. 71 68 ; 72 test byte [ edx+ VMMR0JMPBUF.fInRing3Call], 169 test byte [xDX + VMMR0JMPBUF.fInRing3Call], 1 73 70 jnz .resume 74 71 … … 83 80 mov edx, esi 84 81 mov edi, esi 85 mov ecx, 204882 mov ecx, VMM_STACK_SIZE / 4 86 83 mov eax, 0eeeeeeeeh 87 84 repne stosd 88 85 %endif 89 lea esi, [esi + 8192- 32]86 lea esi, [esi + VMM_STACK_SIZE - 32] 90 87 mov [esi + 1ch], dword 0deadbeefh ; Marker 1. 91 88 mov [esi + 18h], ebx ; Save pJmpBuf pointer. … … 135 132 ; 136 133 .proper_return: 137 mov ebx, [ edx+ VMMR0JMPBUF.ebx]138 mov esi, [ edx+ VMMR0JMPBUF.esi]139 mov edi, [ edx+ VMMR0JMPBUF.edi]140 mov ebp, [ edx+ VMMR0JMPBUF.ebp]141 mov ecx, [edx+ VMMR0JMPBUF.eip]142 and dword [ edx+ VMMR0JMPBUF.eip], byte 0 ; used for valid check.143 mov esp, [ edx+ VMMR0JMPBUF.esp]144 jmp ecx134 mov ebx, [xDX + VMMR0JMPBUF.ebx] 135 mov esi, [xDX + VMMR0JMPBUF.esi] 136 mov edi, [xDX + VMMR0JMPBUF.edi] 137 mov ebp, [xDX + VMMR0JMPBUF.ebp] 138 mov xCX, [xDX + VMMR0JMPBUF.eip] 139 and dword [xDX + VMMR0JMPBUF.eip], byte 0 ; used for valid check. 140 mov esp, [xDX + VMMR0JMPBUF.esp] 141 jmp xCX 145 142 146 143 .entry_error: … … 156 153 ; 157 154 .bad: 158 and dword [ edx+ VMMR0JMPBUF.eip], byte 0 ; used for valid check.159 mov edi, [ edx+ VMMR0JMPBUF.edi]160 mov esi, [ edx+ VMMR0JMPBUF.esi]161 mov ebx, [ edx+ VMMR0JMPBUF.ebx]155 and dword [xDX + VMMR0JMPBUF.eip], byte 0 ; used for valid check. 156 mov edi, [xDX + VMMR0JMPBUF.edi] 157 mov esi, [xDX + VMMR0JMPBUF.esi] 158 mov ebx, [xDX + VMMR0JMPBUF.ebx] 162 159 mov eax, VERR_INTERNAL_ERROR_3 ; todo better return code! 163 160 ret … … 169 166 ; Sanity checks. 170 167 %ifdef VMM_R0_SWITCH_STACK 171 mov eax, [ edx+ VMMR0JMPBUF.pvSavedStack]168 mov eax, [xDX + VMMR0JMPBUF.pvSavedStack] 172 169 %ifdef RT_STRICT 173 170 cmp dword [eax], 0eeeeeeeeh 174 171 %endif 175 lea eax, [eax + 8192- 32]172 lea eax, [eax + VMM_STACK_SIZE - 32] 176 173 cmp dword [eax + 1ch], 0deadbeefh ; Marker 1. 177 174 jne .bad … … 185 182 %endif 186 183 %else ; !VMM_R0_SWITCH_STACK 187 cmp ecx, [ edx+ VMMR0JMPBUF.SpCheck]184 cmp ecx, [xDX + VMMR0JMPBUF.SpCheck] 188 185 jne .bad 189 186 .espCheck_ok: 190 mov ecx, [ edx+ VMMR0JMPBUF.cbSavedStack]191 cmp ecx, 8192187 mov ecx, [xDX + VMMR0JMPBUF.cbSavedStack] 188 cmp ecx, VMM_STACK_SIZE 192 189 ja .bad 193 190 test ecx, 3 194 191 jnz .bad 195 mov edi, [ edx+ VMMR0JMPBUF.esp]196 sub edi, [ edx+ VMMR0JMPBUF.SpResume]192 mov edi, [xDX + VMMR0JMPBUF.esp] 193 sub edi, [xDX + VMMR0JMPBUF.SpResume] 197 194 cmp ecx, edi 198 195 jne .bad … … 201 198 %ifdef VMM_R0_SWITCH_STACK 202 199 ; Switch stack. 203 mov esp, [ edx+ VMMR0JMPBUF.SpResume]200 mov esp, [xDX + VMMR0JMPBUF.SpResume] 204 201 %else 205 202 ; Restore the stack. 206 mov ecx, [ edx+ VMMR0JMPBUF.cbSavedStack]203 mov ecx, [xDX + VMMR0JMPBUF.cbSavedStack] 207 204 shr ecx, 2 208 mov esi, [ edx+ VMMR0JMPBUF.pvSavedStack]209 mov edi, [ edx+ VMMR0JMPBUF.SpResume]205 mov esi, [xDX + VMMR0JMPBUF.pvSavedStack] 206 mov edi, [xDX + VMMR0JMPBUF.SpResume] 210 207 mov esp, edi 211 208 rep movsd 212 209 %endif ; !VMM_R0_SWITCH_STACK 213 mov byte [edx + VMMR0JMPBUF.fInRing3Call], 0 214 210 mov byte [xDX + VMMR0JMPBUF.fInRing3Call], 0 211 212 ; 215 213 ; Continue where we left off. 214 ; 216 215 %ifdef VBOX_STRICT 217 216 pop eax ; magic … … 262 261 ; Is the jump buffer armed? 263 262 ; 264 cmp dword [ edx+ VMMR0JMPBUF.eip], byte 0263 cmp dword [xDX + VMMR0JMPBUF.eip], byte 0 265 264 je .nok 266 265 … … 268 267 ; Sanity checks. 269 268 ; 270 mov edi, [ edx+ VMMR0JMPBUF.pvSavedStack]269 mov edi, [xDX + VMMR0JMPBUF.pvSavedStack] 271 270 test edi, edi ; darwin may set this to 0. 272 271 jz .nok 273 mov [ edx+ VMMR0JMPBUF.SpResume], esp272 mov [xDX + VMMR0JMPBUF.SpResume], esp 274 273 %ifndef VMM_R0_SWITCH_STACK 275 274 mov esi, esp 276 mov ecx, [ edx+ VMMR0JMPBUF.esp]275 mov ecx, [xDX + VMMR0JMPBUF.esp] 277 276 sub ecx, esi 278 277 279 278 ; two sanity checks on the size. 280 cmp ecx, 8192; check max size.279 cmp ecx, VMM_STACK_SIZE ; check max size. 281 280 jnbe .nok 282 281 … … 286 285 test ecx, 3 ; check alignment 287 286 jnz .nok 288 mov [ edx+ VMMR0JMPBUF.cbSavedStack], ecx287 mov [xDX + VMMR0JMPBUF.cbSavedStack], ecx 289 288 shr ecx, 2 290 289 rep movsd … … 293 292 ; Save ESP & EBP to enable stack dumps 294 293 mov ecx, ebp 295 mov [ edx+ VMMR0JMPBUF.SavedEbp], ecx294 mov [xDX + VMMR0JMPBUF.SavedEbp], ecx 296 295 sub ecx, 4 297 mov [ edx+ VMMR0JMPBUF.SavedEsp], ecx296 mov [xDX + VMMR0JMPBUF.SavedEsp], ecx 298 297 299 298 ; store the last pieces of info. 300 mov ecx, [ edx+ VMMR0JMPBUF.esp]301 mov [ edx+ VMMR0JMPBUF.SpCheck], ecx302 mov byte [ edx+ VMMR0JMPBUF.fInRing3Call], 1299 mov ecx, [xDX + VMMR0JMPBUF.esp] 300 mov [xDX + VMMR0JMPBUF.SpCheck], ecx 301 mov byte [xDX + VMMR0JMPBUF.fInRing3Call], 1 303 302 304 303 ; 305 304 ; Do the long jump. 306 305 ; 307 mov ebx, [ edx+ VMMR0JMPBUF.ebx]308 mov esi, [ edx+ VMMR0JMPBUF.esi]309 mov edi, [ edx+ VMMR0JMPBUF.edi]310 mov ebp, [ edx+ VMMR0JMPBUF.ebp]311 mov ecx, [ edx+ VMMR0JMPBUF.eip]312 mov esp, [ edx+ VMMR0JMPBUF.esp]306 mov ebx, [xDX + VMMR0JMPBUF.ebx] 307 mov esi, [xDX + VMMR0JMPBUF.esi] 308 mov edi, [xDX + VMMR0JMPBUF.edi] 309 mov ebp, [xDX + VMMR0JMPBUF.ebp] 310 mov ecx, [xDX + VMMR0JMPBUF.eip] 311 mov esp, [xDX + VMMR0JMPBUF.esp] 313 312 jmp ecx 314 313 -
trunk/src/VBox/VMM/testcase/Makefile.kmk
r20541 r20543 31 31 endif 32 32 ifdef VBOX_WITH_TESTCASES 33 PROGRAMS += tstCFGM tstSSM tstMMHyperHeap tstVMREQ tstMicro tstCompiler tstVMMR0CallHost-1 33 PROGRAMS += tstCFGM tstSSM tstMMHyperHeap tstVMREQ tstMicro tstCompiler tstVMMR0CallHost-1 tstVMMR0CallHost-2 34 34 ifneq ($(KBUILD_TARGET),l4) 35 35 PROGRAMS += tstAnimate … … 158 158 tstVMMFork_LIBS = $(LIB_VMM) $(LIB_REM) $(LIB_RUNTIME) 159 159 160 tstVMMR0CallHost-1_TEMPLATE = VBOXR3EXE 161 tstVMMR0CallHost-1_INCS = $(VBOX_PATH_VMM_SRC) 160 tstVMMR0CallHost-1_TEMPLATE = VBOXR3TSTEXE 161 tstVMMR0CallHost-1_DEFS = VMM_R0_NO_SWITCH_STACK 162 tstVMMR0CallHost-1_INCS = $(VBOX_PATH_VMM_SRC) 162 163 tstVMMR0CallHost-1_SOURCES = \ 163 164 tstVMMR0CallHost-1.cpp … … 166 167 tstVMMR0CallHost-1_SOURCES.x86 = \ 167 168 $(VBOX_PATH_VMM_SRC)/VMMR0/VMMR0JmpA-x86.asm 168 tstVMMR0CallHost-1_LIBS = $(LIB_VMM) $(LIB_REM) $(LIB_RUNTIME) 169 170 tstVMMR0CallHost-2_EXTENDS = tstVMMR0CallHost-1 171 tstVMMR0CallHost-2_DEFS = VMM_R0_SWITCH_STACK 169 172 170 173 tstVMREQ_TEMPLATE = VBOXR3EXE -
trunk/src/VBox/VMM/testcase/tstVMMR0CallHost-1.cpp
r18848 r20543 39 39 * Defined Constants And Macros * 40 40 *******************************************************************************/ 41 #if def RT_OS_DARWIN42 # define VMM_R0_SWITCH_STACK41 #if !defined(VMM_R0_SWITCH_STACK) && !defined(VMM_R0_NO_SWITCH_STACK) 42 # error "VMM_R0_SWITCH_STACK or VMM_R0_NO_SWITCH_STACK has to be defined. 43 43 #endif 44 44
Note:
See TracChangeset
for help on using the changeset viewer.