VirtualBox

Changeset 87311 in vbox


Ignore:
Timestamp:
Jan 19, 2021 11:45:43 PM (4 years ago)
Author:
vboxsync
Message:

VMM: Cleaned out non-AMD64 code from HMR0A.asm.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMR0/HMR0A.asm

    r87310 r87311  
    3939%define XMM_OFF_IN_X86FXSTATE   160
    4040
    41 ;; Spectre filler for 32-bit mode.
    42 ; Some user space address that points to a 4MB page boundrary in hope that it
    43 ; will somehow make it less useful.
    44 %define SPECTRE_FILLER32        0x227fffff
    4541;; Spectre filler for 64-bit mode.
    4642; Choosen to be an invalid address (also with 5 level paging).
    47 %define SPECTRE_FILLER64        0x02204204207fffff
    48 ;; Spectre filler for the current CPU mode.
    49 %ifdef RT_ARCH_AMD64
    50  %define SPECTRE_FILLER         SPECTRE_FILLER64
    51 %else
    52  %define SPECTRE_FILLER         SPECTRE_FILLER32
    53 %endif
     43%define SPECTRE_FILLER          0x02204204207fffff
    5444
    5545;;
    5646; Determine skipping restoring of GDTR, IDTR, TR across VMX non-root operation.
    5747;
    58 %ifdef RT_ARCH_AMD64
    59  %define VMX_SKIP_GDTR
    60  %define VMX_SKIP_TR
    61  %define VBOX_SKIP_RESTORE_SEG
    62  %ifdef RT_OS_DARWIN
    63   ; Load the NULL selector into DS, ES, FS and GS on 64-bit darwin so we don't
    64   ; risk loading a stale LDT value or something invalid.
    65   %define HM_64_BIT_USE_NULL_SEL
    66   ; Darwin (Mavericks) uses IDTR limit to store the CPU Id so we need to restore it always.
    67   ; See @bugref{6875}.
    68  %else
    69   %define VMX_SKIP_IDTR
    70  %endif
     48%define VMX_SKIP_GDTR
     49%define VMX_SKIP_TR
     50%define VBOX_SKIP_RESTORE_SEG
     51%ifdef RT_OS_DARWIN
     52 ; Load the NULL selector into DS, ES, FS and GS on 64-bit darwin so we don't
     53 ; risk loading a stale LDT value or something invalid.
     54 %define HM_64_BIT_USE_NULL_SEL
     55 ; Darwin (Mavericks) uses IDTR limit to store the CPU Id so we need to restore it always.
     56 ; See @bugref{6875}.
     57%else
     58 %define VMX_SKIP_IDTR
    7159%endif
    7260
     
    8876
    8977%ifdef ASM_CALL64_GCC
    90  %macro MYPUSHAD64 0
     78 %macro MYPUSHAD 0
    9179   push    r15
    9280   push    r14
     
    9583   push    rbx
    9684 %endmacro
    97  %macro MYPOPAD64 0
     85 %macro MYPOPAD 0
    9886   pop     rbx
    9987   pop     r12
     
    10492
    10593%else ; ASM_CALL64_MSC
    106  %macro MYPUSHAD64 0
     94 %macro MYPUSHAD 0
    10795   push    r15
    10896   push    r14
     
    113101   push    rdi
    114102 %endmacro
    115  %macro MYPOPAD64 0
     103 %macro MYPOPAD 0
    116104   pop     rdi
    117105   pop     rsi
     
    125113
    126114%ifdef VBOX_SKIP_RESTORE_SEG
    127  %macro MYPUSHSEGS64 2
     115 %macro MYPUSHSEGS 2
    128116 %endmacro
    129117
    130  %macro MYPOPSEGS64 2
     118 %macro MYPOPSEGS 2
    131119 %endmacro
    132120%else       ; !VBOX_SKIP_RESTORE_SEG
    133121 ; Trashes, rax, rdx & rcx.
    134  %macro MYPUSHSEGS64 2
     122 %macro MYPUSHSEGS 2
    135123  %ifndef HM_64_BIT_USE_NULL_SEL
    136124   mov     %2, es
     
    162150
    163151 ; trashes, rax, rdx & rcx
    164  %macro MYPOPSEGS64 2
     152 %macro MYPOPSEGS 2
    165153   ; Note: do not step through this code with a debugger!
    166154  %ifndef HM_64_BIT_USE_NULL_SEL
     
    198186%endif ; VBOX_SKIP_RESTORE_SEG
    199187
    200 %macro MYPUSHAD32 0
    201   pushad
    202 %endmacro
    203 %macro MYPOPAD32 0
    204   popad
    205 %endmacro
    206 
    207 %macro MYPUSHSEGS32 2
    208   push    ds
    209   push    es
    210   push    fs
    211   push    gs
    212 %endmacro
    213 %macro MYPOPSEGS32 2
    214   pop     gs
    215   pop     fs
    216   pop     es
    217   pop     ds
    218 %endmacro
    219 
    220 %ifdef RT_ARCH_AMD64
    221  %define MYPUSHAD       MYPUSHAD64
    222  %define MYPOPAD        MYPOPAD64
    223  %define MYPUSHSEGS     MYPUSHSEGS64
    224  %define MYPOPSEGS      MYPOPSEGS64
    225 %else
    226  %define MYPUSHAD       MYPUSHAD32
    227  %define MYPOPAD        MYPOPAD32
    228  %define MYPUSHSEGS     MYPUSHSEGS32
    229  %define MYPOPSEGS      MYPOPSEGS32
    230 %endif
    231188
    232189;;
     
    310267ALIGNCODE(16)
    311268BEGINPROC VMXRestoreHostState
    312 %ifdef RT_ARCH_AMD64
    313  %ifndef ASM_CALL64_GCC
     269%ifndef ASM_CALL64_GCC
    314270    ; Use GCC's input registers since we'll be needing both rcx and rdx further
    315271    ; down with the wrmsr instruction.  Use the R10 and R11 register for saving
     
    319275    mov         rdi, rcx
    320276    mov         rsi, rdx
    321  %endif
     277%endif
    322278
    323279    test        edi, VMX_RESTORE_HOST_GDTR
     
    413369.restore_success:
    414370    mov         eax, VINF_SUCCESS
    415  %ifndef ASM_CALL64_GCC
     371%ifndef ASM_CALL64_GCC
    416372    ; Restore RDI and RSI on MSC.
    417373    mov         rdi, r10
    418374    mov         rsi, r11
    419  %endif
    420 %else  ; RT_ARCH_X86
    421     mov         eax, VERR_NOT_IMPLEMENTED
    422375%endif
    423376    ret
     
    800753
    801754
    802 %ifdef RT_ARCH_AMD64
    803755;; @def RESTORE_STATE_VM64
    804756; Macro restoring essential host state and updating guest state
     
    824776
    825777    mov     qword [xDI + CPUMCTX.eax], rax
    826     mov     rax, SPECTRE_FILLER64
     778    mov     rax, SPECTRE_FILLER
    827779    mov     qword [xDI + CPUMCTX.ebx], rbx
    828780    mov     rbx, rax
     
    10701022    jmp     .vmstart64_end
    10711023ENDPROC VMXR0StartVM64
    1072 %endif ; RT_ARCH_AMD64
    10731024
    10741025
     
    10851036
    10861037
    1087 %ifdef RT_ARCH_AMD64
    10881038;;
    10891039; Prepares for and executes VMRUN (32-bit and 64-bit guests).
     
    12111161
    12121162    mov     qword [rax + CPUMCTX.ebx], rbx
    1213     mov     rbx, SPECTRE_FILLER64
     1163    mov     rbx, SPECTRE_FILLER
    12141164    mov     qword [rax + CPUMCTX.ecx], rcx
    12151165    mov     rcx, rbx
     
    12611211    ret
    12621212ENDPROC SVMR0VMRun
    1263 %endif ; RT_ARCH_AMD64
    1264 
     1213
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette