VirtualBox

Changeset 61806 in vbox


Ignore:
Timestamp:
Jun 21, 2016 5:39:13 PM (8 years ago)
Author:
vboxsync
Message:

VMM: bugref:8412, ticketref:15439: workaround for changed stack layout of Linux >= 4.6

Location:
trunk/src/VBox/VMM
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMR0/VMMR0JmpA-amd64.asm

    r56287 r61806  
    3131%define STACK_PADDING   0eeeeeeeeeeeeeeeeh
    3232
     33;; Workaround for linux 4.6 fast/slow syscall stack depth difference.
     34%ifdef VMM_R0_SWITCH_STACK
     35 %define STACK_FUZZ_SIZE 0
     36%else
     37 %define STACK_FUZZ_SIZE 128
     38%endif
    3339
    3440
     
    5763    mov     rbp, rsp
    5864 %ifdef ASM_CALL64_MSC
    59     sub     rsp, 30h
     65    sub     rsp, 30h + STACK_FUZZ_SIZE  ; (10h is used by resume (??), 20h for callee spill area)
    6066    mov     r11, rdx                    ; pfn
    6167    mov     rdx, rcx                    ; pJmpBuf;
    6268 %else
    63     sub     rsp, 10h
     69    sub     rsp, 10h + STACK_FUZZ_SIZE  ; (10h is used by resume (??))
    6470    mov     r8, rdx                     ; pvUser1 (save it like MSC)
    6571    mov     r9, rcx                     ; pvUser2 (save it like MSC)
     
    7278    mov     [xDX + VMMR0JMPBUF.rdi], rdi
    7379 %endif
    74     mov     r10, [rbp]
    75     mov     [xDX + VMMR0JMPBUF.rbp], r10
     80    mov     [xDX + VMMR0JMPBUF.rbp], rbp
    7681    mov     [xDX + VMMR0JMPBUF.r12], r12
    7782    mov     [xDX + VMMR0JMPBUF.r13], r13
    7883    mov     [xDX + VMMR0JMPBUF.r14], r14
    7984    mov     [xDX + VMMR0JMPBUF.r15], r15
    80     mov     xAX, [rbp + 8]
     85    mov     xAX, [rbp + 8]              ; (not really necessary, except for validity check)
    8186    mov     [xDX + VMMR0JMPBUF.rip], xAX
    82     lea     r10, [rbp + 10h]            ; (used in resume)
     87 %ifdef ASM_CALL64_MSC
     88    lea     r10, [rsp + 20h]            ; must save the spill area
     89 %else
     90    lea     r10, [rsp]
     91 %endif
    8392    mov     [xDX + VMMR0JMPBUF.rsp], r10
    8493 %ifdef RT_OS_WINDOWS
     
    140149
    141150    ;
    142     ; Return like in the long jump but clear eip, no short cuts here.
     151    ; Return like in the long jump but clear eip, no shortcuts here.
    143152    ;
    144153.proper_return:
     
    165174    mov     r15, [xDX + VMMR0JMPBUF.r15]
    166175    mov     rbp, [xDX + VMMR0JMPBUF.rbp]
    167     mov     xCX, [xDX + VMMR0JMPBUF.rip]
    168176    and     qword [xDX + VMMR0JMPBUF.rip], byte 0 ; used for valid check.
    169177    mov     rsp, [xDX + VMMR0JMPBUF.rsp]
    170178    push    qword [xDX + VMMR0JMPBUF.rflags]
    171179    popf
    172     jmp     xCX
     180    leave
     181    ret
    173182
    174183.entry_error:
     
    203212    ;
    204213.resume:
    205     ; Sanity checks.
    206  %ifdef VMM_R0_SWITCH_STACK
    207     ;; @todo amd64/switch/resume sanity.
    208  %else  ; !VMM_R0_SWITCH_STACK
    209     cmp     r10, [xDX + VMMR0JMPBUF.SpCheck]
    210     jne     .bad
    211 
     214 %ifndef VMM_R0_SWITCH_STACK
     215    ; Sanity checks incoming stack, applying fuzz if needed.
     216    sub     r10, [xDX + VMMR0JMPBUF.SpCheck]
     217    jz      .resume_stack_checked_out
     218    add     r10, STACK_FUZZ_SIZE        ; plus/minus STACK_FUZZ_SIZE is fine.
     219    cmp     r10, STACK_FUZZ_SIZE * 2
     220    ja      .bad
     221
     222    mov     r10, [xDX + VMMR0JMPBUF.SpCheck]
     223    mov     [xDX + VMMR0JMPBUF.rsp], r10 ; Must be update in case of another long jump (used for save calc).
     224
     225.resume_stack_checked_out:
    212226    mov     ecx, [xDX + VMMR0JMPBUF.cbSavedStack]
    213227    cmp     rcx, VMM_STACK_SIZE
    214228    ja      .bad
    215     test    rcx, 3
     229    test    rcx, 7
    216230    jnz     .bad
    217     mov     rdi, [xDX + VMMR0JMPBUF.rsp]
     231    mov     rdi, [xDX + VMMR0JMPBUF.SpCheck]
    218232    sub     rdi, [xDX + VMMR0JMPBUF.SpResume]
    219233    cmp     rcx, rdi
     
    395409    mov     r15, [xDX + VMMR0JMPBUF.r15]
    396410    mov     rbp, [xDX + VMMR0JMPBUF.rbp]
    397     mov     rcx, [xDX + VMMR0JMPBUF.rip]
    398411    mov     rsp, [xDX + VMMR0JMPBUF.rsp]
    399412    push    qword [xDX + VMMR0JMPBUF.rflags]
    400413    popf
    401     jmp     rcx
     414    leave
     415    ret
    402416
    403417    ;
  • trunk/src/VBox/VMM/testcase/tstVMMR0CallHost-1.cpp

    r61793 r61806  
    103103DECLCALLBACK(DECL_NO_INLINE(RT_NOTHING, int)) stackRandom(PVMMR0JMPBUF pJmpBuf, PFNVMMR0SETJMP pfn, PVM pVM, PVMCPU pVCpu)
    104104{
    105 #if 0
    106     uint32_t            cbRand  = RTRandU32Ex(1, 64);
     105#ifdef RT_ARCH_AMD64
     106    uint32_t            cbRand  = RTRandU32Ex(1, 96);
    107107#else
    108108    uint32_t            cbRand  = 1;
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette