Changeset 61806 in vbox
- Timestamp:
- Jun 21, 2016 5:39:13 PM (8 years ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMR0/VMMR0JmpA-amd64.asm
r56287 r61806 31 31 %define STACK_PADDING 0eeeeeeeeeeeeeeeeh 32 32 33 ;; Workaround for linux 4.6 fast/slow syscall stack depth difference. 34 %ifdef VMM_R0_SWITCH_STACK 35 %define STACK_FUZZ_SIZE 0 36 %else 37 %define STACK_FUZZ_SIZE 128 38 %endif 33 39 34 40 … … 57 63 mov rbp, rsp 58 64 %ifdef ASM_CALL64_MSC 59 sub rsp, 30h 65 sub rsp, 30h + STACK_FUZZ_SIZE ; (10h is used by resume (??), 20h for callee spill area) 60 66 mov r11, rdx ; pfn 61 67 mov rdx, rcx ; pJmpBuf; 62 68 %else 63 sub rsp, 10h 69 sub rsp, 10h + STACK_FUZZ_SIZE ; (10h is used by resume (??)) 64 70 mov r8, rdx ; pvUser1 (save it like MSC) 65 71 mov r9, rcx ; pvUser2 (save it like MSC) … … 72 78 mov [xDX + VMMR0JMPBUF.rdi], rdi 73 79 %endif 74 mov r10, [rbp] 75 mov [xDX + VMMR0JMPBUF.rbp], r10 80 mov [xDX + VMMR0JMPBUF.rbp], rbp 76 81 mov [xDX + VMMR0JMPBUF.r12], r12 77 82 mov [xDX + VMMR0JMPBUF.r13], r13 78 83 mov [xDX + VMMR0JMPBUF.r14], r14 79 84 mov [xDX + VMMR0JMPBUF.r15], r15 80 mov xAX, [rbp + 8] 85 mov xAX, [rbp + 8] ; (not really necessary, except for validity check) 81 86 mov [xDX + VMMR0JMPBUF.rip], xAX 82 lea r10, [rbp + 10h] ; (used in resume) 87 %ifdef ASM_CALL64_MSC 88 lea r10, [rsp + 20h] ; must save the spill area 89 %else 90 lea r10, [rsp] 91 %endif 83 92 mov [xDX + VMMR0JMPBUF.rsp], r10 84 93 %ifdef RT_OS_WINDOWS … … 140 149 141 150 ; 142 ; Return like in the long jump but clear eip, no short 151 ; Return like in the long jump but clear eip, no shortcuts here. 143 152 ; 144 153 .proper_return: … … 165 174 mov r15, [xDX + VMMR0JMPBUF.r15] 166 175 mov rbp, [xDX + VMMR0JMPBUF.rbp] 167 mov xCX, [xDX + VMMR0JMPBUF.rip]168 176 and qword [xDX + VMMR0JMPBUF.rip], byte 0 ; used for valid check. 169 177 mov rsp, [xDX + VMMR0JMPBUF.rsp] 170 178 push qword [xDX + VMMR0JMPBUF.rflags] 171 179 popf 172 jmp xCX 180 leave 181 ret 173 182 174 183 .entry_error: … … 203 212 ; 204 213 .resume: 205 ; Sanity checks. 206 %ifdef VMM_R0_SWITCH_STACK 207 ;; @todo amd64/switch/resume sanity. 208 %else ; !VMM_R0_SWITCH_STACK 209 cmp r10, [xDX + VMMR0JMPBUF.SpCheck] 210 jne .bad 211 214 %ifndef VMM_R0_SWITCH_STACK 215 ; Sanity checks incoming stack, applying fuzz if needed. 216 sub r10, [xDX + VMMR0JMPBUF.SpCheck] 217 jz .resume_stack_checked_out 218 add r10, STACK_FUZZ_SIZE ; plus/minus STACK_FUZZ_SIZE is fine. 219 cmp r10, STACK_FUZZ_SIZE * 2 220 ja .bad 221 222 mov r10, [xDX + VMMR0JMPBUF.SpCheck] 223 mov [xDX + VMMR0JMPBUF.rsp], r10 ; Must be update in case of another long jump (used for save calc). 224 225 .resume_stack_checked_out: 212 226 mov ecx, [xDX + VMMR0JMPBUF.cbSavedStack] 213 227 cmp rcx, VMM_STACK_SIZE 214 228 ja .bad 215 test rcx, 3229 test rcx, 7 216 230 jnz .bad 217 mov rdi, [xDX + VMMR0JMPBUF. rsp]231 mov rdi, [xDX + VMMR0JMPBUF.SpCheck] 218 232 sub rdi, [xDX + VMMR0JMPBUF.SpResume] 219 233 cmp rcx, rdi … … 395 409 mov r15, [xDX + VMMR0JMPBUF.r15] 396 410 mov rbp, [xDX + VMMR0JMPBUF.rbp] 397 mov rcx, [xDX + VMMR0JMPBUF.rip]398 411 mov rsp, [xDX + VMMR0JMPBUF.rsp] 399 412 push qword [xDX + VMMR0JMPBUF.rflags] 400 413 popf 401 jmp rcx 414 leave 415 ret 402 416 403 417 ; -
trunk/src/VBox/VMM/testcase/tstVMMR0CallHost-1.cpp
r61793 r61806 103 103 DECLCALLBACK(DECL_NO_INLINE(RT_NOTHING, int)) stackRandom(PVMMR0JMPBUF pJmpBuf, PFNVMMR0SETJMP pfn, PVM pVM, PVMCPU pVCpu) 104 104 { 105 #if 0106 uint32_t cbRand = RTRandU32Ex(1, 64);105 #ifdef RT_ARCH_AMD64 106 uint32_t cbRand = RTRandU32Ex(1, 96); 107 107 #else 108 108 uint32_t cbRand = 1;
Note:
See TracChangeset
for help on using the changeset viewer.