VirtualBox

Changeset 46099 in vbox for trunk/src/VBox/VMM


Ignore:
Timestamp:
May 15, 2013 2:23:49 PM (12 years ago)
Author:
vboxsync
Message:

VMM/VMMR0: Tidying of the assembly code.

Location:
trunk/src/VBox/VMM
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMR0/HMR0Mixed.mac

    r45875 r46099  
    3535    cli
    3636
    37     ;/* Save all general purpose host registers. */
     37    ; Save all general purpose host registers.
    3838    MYPUSHAD
    3939
    40     ;/* First we have to save some final CPU context registers. */
     40    ; First we have to save some final CPU context registers.
    4141    mov     eax, VMX_VMCS_HOST_RIP
    4242%ifdef RT_ARCH_AMD64
     
    4747    vmwrite eax, ecx
    4848%endif
    49     ;/* Note: assumes success... */
    50 
    51     ;/* Manual save and restore:
    52     ; * - General purpose registers except RIP, RSP
    53     ; *
    54     ; * Trashed:
    55     ; * - CR2 (we don't care)
    56     ; * - LDTR (reset to 0)
    57     ; * - DRx (presumably not changed at all)
    58     ; * - DR7 (reset to 0x400)
    59     ; * - EFLAGS (reset to RT_BIT(1); not relevant)
    60     ; *
    61     ; */
    62 
    63     ;/* Save the Guest CPU context pointer. */
     49    ; Note: assumes success!
     50
     51    ; Manual save and restore:
     52    ;  - General purpose registers except RIP, RSP.
     53    ;
     54    ; Trashed:
     55    ;  - CR2 (we don't care).
     56    ;  - LDTR (reset to 0).
     57    ;  - DRx (presumably not changed at all).
     58    ;  - DR7 (reset to 0x400).
     59    ;  - EFLAGS (reset to RT_BIT(1); not relevant).
     60
     61    ; Save the Guest CPU context pointer.
    6462%ifdef RT_ARCH_AMD64
    6563 %ifdef ASM_CALL64_GCC
     
    7876%endif
    7977
    80     ;/* Save segment registers */
    81     ; Note: MYPUSHSEGS trashes rdx & rcx, so we moved it here (msvc amd64 case)
     78    ; Save segment registers.
     79    ; Note: MYPUSHSEGS trashes rdx & rcx, so we moved it here (msvc amd64 case).
    8280    MYPUSHSEGS xAX, ax
    8381
     
    10199.no_cached_writes:
    102100
    103     ; Save the pCache pointer
     101    ; Save the pCache pointer.
    104102    push    xBX
    105103%endif
    106104
    107     ; Save the pCtx pointer
     105    ; Save the pCtx pointer.
    108106    push    xSI
    109107
    110     ; Save LDTR
     108    ; Save LDTR.
    111109    xor     eax, eax
    112110    sldt    ax
    113111    push    xAX
    114112
    115     ; The TR limit is reset to 0x67; restore it manually
     113    ; The TR limit is reset to 0x67; restore it manually.
    116114    str     eax
    117115    push    xAX
    118116
    119     ; VMX only saves the base of the GDTR & IDTR and resets the limit to 0xffff; we must restore the limit correctly!
     117    ; VT-x only saves the base of the GDTR & IDTR and resets the limit to 0xffff; we must restore the limit correctly!
    120118    sub     xSP, xS*2
    121119    sgdt    [xSP]
     
    125123
    126124%ifdef VBOX_WITH_DR6_EXPERIMENT
    127     ; Restore DR6 - experiment, not safe!
     125    ; Load DR6 - experiment, not safe!
    128126    mov     xBX, [xSI + CPUMCTX.dr6]
    129127    mov     dr6, xBX
    130128%endif
    131129
    132     ; Restore CR2
     130    ; Load CR2 if necessary (may be expensive as writing CR2 is a synchronizing instruction).
    133131    mov     xBX, [xSI + CPUMCTX.cr2]
    134132    mov     xDX, cr2
     
    140138    mov     eax, VMX_VMCS_HOST_RSP
    141139    vmwrite xAX, xSP
    142     ;/* Note: assumes success... */
    143     ;/* Don't mess with ESP anymore!! */
    144 
    145     ;/* Restore Guest's general purpose registers. */
     140    ; Note: assumes success!
     141    ; Don't mess with ESP anymore!!!
     142
     143    ; Load Guest's general purpose registers.
    146144    mov     eax, [xSI + CPUMCTX.eax]
    147145    mov     ebx, [xSI + CPUMCTX.ebx]
     
    150148    mov     ebp, [xSI + CPUMCTX.ebp]
    151149
    152     ; resume or start?
     150    ; Resume or start?
    153151    cmp     xDI, 0                  ; fResume
    154     je      .vmlauch_lauch
    155 
    156     ;/* Restore edi & esi. */
     152    je      .vmlaunch_launch
     153
     154    ; Restore edi & esi.
    157155    mov     edi, [xSI + CPUMCTX.edi]
    158156    mov     esi, [xSI + CPUMCTX.esi]
    159157
    160158    vmresume
    161     jmp     .vmlaunch_done;      ;/* here if vmresume detected a failure. */
    162 
    163 .vmlauch_lauch:
    164     ;/* Restore edi & esi. */
     159    jmp     .vmlaunch_done;      ; Here if vmresume detected a failure.
     160
     161.vmlaunch_launch:
     162    ; Restore edi & esi.
    165163    mov     edi, [xSI + CPUMCTX.edi]
    166164    mov     esi, [xSI + CPUMCTX.esi]
    167165
    168166    vmlaunch
    169     jmp     .vmlaunch_done;      ;/* here if vmlaunch detected a failure. */
     167    jmp     .vmlaunch_done;      ; Here if vmlaunch detected a failure.
    170168
    171169ALIGNCODE(16) ;; @todo YASM BUG - this alignment is wrong on darwin, it's 1 byte off.
     
    174172    jz      near .vmxstart_start_failed
    175173
    176     ; Restore base and limit of the IDTR & GDTR
     174    ; Restore base and limit of the IDTR & GDTR.
    177175    lidt    [xSP]
    178176    add     xSP, xS*2
     
    181179
    182180    push    xDI
    183     mov     xDI, [xSP + xS * 3]         ; pCtx (*3 to skip the saved LDTR + TR)
     181    mov     xDI, [xSP + xS * 3]         ; pCtx (*3 to skip the saved LDTR + TR).
    184182
    185183    mov     [ss:xDI + CPUMCTX.eax], eax
     
    195193
    196194%ifdef RT_ARCH_AMD64
    197     pop     xAX                                 ; the guest edi we pushed above
     195    pop     xAX                                 ; The guest edi we pushed above.
    198196    mov     dword [ss:xDI + CPUMCTX.edi], eax
    199197%else
    200     pop     dword [ss:xDI + CPUMCTX.edi]        ; the guest edi we pushed above
     198    pop     dword [ss:xDI + CPUMCTX.edi]        ; The guest edi we pushed above.
    201199%endif
    202200
    203201%ifdef VBOX_WITH_DR6_EXPERIMENT
    204     ; Save DR6 - experiment, not safe!
     202    ; Restore DR6 - experiment, not safe!
    205203    mov     xAX, dr6
    206204    mov     [ss:xDI + CPUMCTX.dr6], xAX
     
    210208    ; ASSUME that this is supposed to be 'BUSY'. (saves 20-30 ticks on the T42p)
    211209    ; @todo get rid of sgdt
    212     pop     xBX         ; saved TR
     210    pop     xBX         ; Saved TR
    213211    sub     xSP, xS*2
    214212    sgdt    [xSP]
    215213    mov     xAX, xBX
    216     and     al, 0F8h                                ; mask away TI and RPL bits, get descriptor offset.
     214    and     al, 0F8h                                ; Mask away TI and RPL bits, get descriptor offset.
    217215    add     xAX, [xSP + 2]                          ; eax <- GDTR.address + descriptor offset.
    218     and     dword [ss:xAX + 4], ~0200h                 ; clear busy flag (2nd type2 bit)
     216    and     dword [ss:xAX + 4], ~0200h              ; Clear busy flag (2nd type2 bit).
    219217    ltr     bx
    220218    add     xSP, xS*2
    221219
    222     pop     xAX         ; saved LDTR
     220    pop     xAX         ; Saved LDTR
    223221    lldt    ax
    224222
    225     add     xSP, xS      ; pCtx
     223    add     xSP, xS     ; pCtx
    226224
    227225%ifdef VMX_USE_CACHED_VMCS_ACCESSES
    228     pop     xDX         ; saved pCache
     226    pop     xDX         ; Saved pCache
    229227
    230228    mov     ecx, [ss:xDX + VMCSCACHE.Read.cValidEntries]
    231     cmp     ecx, 0  ; can't happen
     229    cmp     ecx, 0      ; Can't happen
    232230    je      .no_cached_reads
    233231    jmp     .cached_read
     
    243241
    244242%ifdef VBOX_WITH_OLD_VTX_CODE
    245     ; Save CR2 for EPT
     243    ; Restore CR2 into VMCS-cache field (for EPT).
    246244    mov     xAX, cr2
    247245    mov     [ss:xDX + VMCSCACHE.cr2], xAX
     
    249247%endif
    250248
    251     ; Restore segment registers
     249    ; Restore segment registers.
    252250    MYPOPSEGS xAX, ax
    253251
    254     ; Restore general purpose registers
     252    ; Restore general purpose registers.
    255253    MYPOPAD
    256254
     
    273271    ; ASSUME that this is supposed to be 'BUSY'. (saves 20-30 ticks on the T42p)
    274272    ; @todo get rid of sgdt
    275     pop     xBX         ; saved TR
     273    pop     xBX         ; Saved TR
    276274    sub     xSP, xS*2
    277275    sgdt    [xSP]
    278276    mov     xAX, xBX
    279     and     al, 0F8h                                ; mask away TI and RPL bits, get descriptor offset.
     277    and     al, 0F8h                                ; Mask away TI and RPL bits, get descriptor offset.
    280278    add     xAX, [xSP + 2]                          ; eax <- GDTR.address + descriptor offset.
    281     and     dword [ss:xAX + 4], ~0200h                 ; clear busy flag (2nd type2 bit)
     279    and     dword [ss:xAX + 4], ~0200h              ; Clear busy flag (2nd type2 bit).
    282280    ltr     bx
    283281    add     xSP, xS*2
    284282
    285     pop     xAX         ; saved LDTR
     283    pop     xAX         ; Saved LDTR
    286284    lldt    ax
    287285
     
    292290%endif
    293291
    294     ; Restore segment registers
     292    ; Restore segment registers.
    295293    MYPOPSEGS xAX, ax
    296294
     
    301299
    302300.vmxstart_start_failed:
    303     ; Restore base and limit of the IDTR & GDTR
     301    ; Restore base and limit of the IDTR & GDTR.
    304302    lidt    [xSP]
    305303    add     xSP, xS*2
     
    310308    ; ASSUME that this is supposed to be 'BUSY'. (saves 20-30 ticks on the T42p)
    311309    ; @todo get rid of sgdt
    312     pop     xBX         ; saved TR
     310    pop     xBX         ; Saved TR
    313311    sub     xSP, xS*2
    314312    sgdt    [xSP]
    315313    mov     xAX, xBX
    316     and     al, 0F8h                                ; mask away TI and RPL bits, get descriptor offset.
     314    and     al, 0F8h                                ; Mask away TI and RPL bits, get descriptor offset.
    317315    add     xAX, [xSP + 2]                          ; eax <- GDTR.address + descriptor offset.
    318     and     dword [ss:xAX + 4], ~0200h                 ; clear busy flag (2nd type2 bit)
     316    and     dword [ss:xAX + 4], ~0200h              ; Clear busy flag (2nd type2 bit).
    319317    ltr     bx
    320318    add     xSP, xS*2
    321319
    322     pop     xAX         ; saved LDTR
     320    pop     xAX         ; Saved LDTR
    323321    lldt    ax
    324322
     
    329327%endif
    330328
    331     ; Restore segment registers
     329    ; Restore segment registers.
    332330    MYPOPSEGS xAX, ax
    333331
     
    356354    cli
    357355
    358     ;/* Save all general purpose host registers. */
     356    ; Save all general purpose host registers.
    359357    MYPUSHAD
    360358
    361     ;/* First we have to save some final CPU context registers. */
     359    ; First we have to save some final CPU context registers.
    362360    lea     r10, [.vmlaunch64_done wrt rip]
    363     mov     rax, VMX_VMCS_HOST_RIP  ;/* return address (too difficult to continue after VMLAUNCH?) */
     361    mov     rax, VMX_VMCS_HOST_RIP      ; Return address (too difficult to continue after VMLAUNCH?).
    364362    vmwrite rax, r10
    365     ;/* Note: assumes success... */
    366 
    367     ;/* Manual save and restore:
    368     ; * - General purpose registers except RIP, RSP
    369     ; *
    370     ; * Trashed:
    371     ; * - CR2 (we don't care)
    372     ; * - LDTR (reset to 0)
    373     ; * - DRx (presumably not changed at all)
    374     ; * - DR7 (reset to 0x400)
    375     ; * - EFLAGS (reset to RT_BIT(1); not relevant)
    376     ; *
    377     ; */
    378 
    379     ;/* Save the Guest CPU context pointer. */
     363    ; Note: assumes success!
     364
     365    ; Manual save and restore:
     366    ;  - General purpose registers except RIP, RSP.
     367    ;
     368    ; Trashed:
     369    ;  - CR2 (we don't care).
     370    ;  - LDTR (reset to 0).
     371    ;  - DRx (presumably not changed at all).
     372    ;  - DR7 (reset to 0x400).
     373    ;  - EFLAGS (reset to RT_BIT(1); not relevant).
     374
     375    ; Save the Guest CPU context pointer.
    380376%ifdef ASM_CALL64_GCC
    381377    ; fResume already in rdi
     
    388384%endif
    389385
    390     ;/* Save segment registers */
    391     ; Note: MYPUSHSEGS trashes rdx & rcx, so we moved it here (msvc amd64 case)
     386    ; Save segment registers.
     387    ; Note: MYPUSHSEGS trashes rdx & rcx, so we moved it here (msvc amd64 case).
    392388    MYPUSHSEGS xAX, ax
    393389
     
    411407.no_cached_writes:
    412408
    413     ; Save the pCache pointer
     409    ; Save the pCache pointer.
    414410    push    xBX
    415411%endif
    416412
    417413%ifndef VBOX_WITH_AUTO_MSR_LOAD_RESTORE
    418     ; Save the host MSRs and load the guest MSRs
     414    ; Save the host MSRs and load the guest MSRs.
    419415    LOADGUESTMSR MSR_K8_LSTAR, CPUMCTX.msrLSTAR
    420416    LOADGUESTMSR MSR_K6_STAR, CPUMCTX.msrSTAR
     
    428424%endif
    429425
    430     ; Save the pCtx pointer
     426    ; Save the pCtx pointer.
    431427    push    xSI
    432428
    433     ; Save LDTR
     429    ; Save LDTR.
    434430    xor     eax, eax
    435431    sldt    ax
    436432    push    xAX
    437433
    438     ; The TR limit is reset to 0x67; restore it manually
     434    ; The TR limit is reset to 0x67; restore it manually.
    439435    str     eax
    440436    push    xAX
    441437
    442     ; VMX only saves the base of the GDTR & IDTR and resets the limit to 0xffff; we must restore the limit correctly!
     438    ; VT-x only saves the base of the GDTR & IDTR and resets the limit to 0xffff; we must restore the limit correctly!
    443439    sub     xSP, xS*2
    444440    sgdt    [xSP]
     
    448444
    449445%ifdef VBOX_WITH_DR6_EXPERIMENT
    450     ; Restore DR6 - experiment, not safe!
     446    ; Load DR6 - experiment, not safe!
    451447    mov     xBX, [xSI + CPUMCTX.dr6]
    452448    mov     dr6, xBX
    453449%endif
    454450
    455     ; Restore CR2
     451    ; Load CR2 if necessary (may be expensive as writing CR2 is a synchronizing instruction).
    456452    mov     rbx, qword [xSI + CPUMCTX.cr2]
    457453    mov     rdx, cr2
     
    463459    mov     eax, VMX_VMCS_HOST_RSP
    464460    vmwrite xAX, xSP
    465     ;/* Note: assumes success... */
    466     ;/* Don't mess with ESP anymore!! */
    467 
    468     ;/* Restore Guest's general purpose registers. */
     461    ; Note: assumes success!
     462    ; Don't mess with ESP anymore!!!
     463
     464    ; Restore Guest's general purpose registers.
    469465    mov     rax, qword [xSI + CPUMCTX.eax]
    470466    mov     rbx, qword [xSI + CPUMCTX.ebx]
     
    481477    mov     r15, qword [xSI + CPUMCTX.r15]
    482478
    483     ; resume or start?
     479    ; Resume or start?
    484480    cmp     xDI, 0                  ; fResume
    485     je      .vmlauch64_lauch
    486 
    487     ;/* Restore edi & esi. */
     481    je      .vmlaunch64_launch
     482
     483    ; Restore edi & esi.
    488484    mov     rdi, qword [xSI + CPUMCTX.edi]
    489485    mov     rsi, qword [xSI + CPUMCTX.esi]
    490486
    491487    vmresume
    492     jmp     .vmlaunch64_done;      ;/* here if vmresume detected a failure. */
    493 
    494 .vmlauch64_lauch:
    495     ;/* Restore rdi & rsi. */
     488    jmp     .vmlaunch64_done;      ; Here if vmresume detected a failure.
     489
     490.vmlaunch64_launch:
     491    ; Restore rdi & rsi.
    496492    mov     rdi, qword [xSI + CPUMCTX.edi]
    497493    mov     rsi, qword [xSI + CPUMCTX.esi]
    498494
    499495    vmlaunch
    500     jmp     .vmlaunch64_done;      ;/* here if vmlaunch detected a failure. */
     496    jmp     .vmlaunch64_done;      ; Here if vmlaunch detected a failure.
    501497
    502498ALIGNCODE(16)
     
    533529%endif
    534530
    535     pop     xAX                                 ; the guest edi we pushed above
     531    pop     xAX                                 ; The guest edi we pushed above
    536532    mov     qword [xDI + CPUMCTX.edi], rax
    537533
    538534%ifdef VBOX_WITH_DR6_EXPERIMENT
    539     ; Save DR6 - experiment, not safe!
     535    ; Restore DR6 - experiment, not safe!
    540536    mov     xAX, dr6
    541537    mov     [xDI + CPUMCTX.dr6], xAX
     
    543539
    544540    ; Restore TSS selector; must mark it as not busy before using ltr (!)
    545     ; ASSUME that this is supposed to be 'BUSY'. (saves 20-30 ticks on the T42p)
     541    ; ASSUME that this is supposed to be 'BUSY'. (saves 20-30 ticks on the T42p).
    546542    ; @todo get rid of sgdt
    547     pop     xBX         ; saved TR
     543    pop     xBX         ; Saved TR
    548544    sub     xSP, xS*2
    549545    sgdt    [xSP]
    550546    mov     xAX, xBX
    551     and     al, 0F8h                                ; mask away TI and RPL bits, get descriptor offset.
     547    and     al, 0F8h                                ; Mask away TI and RPL bits, get descriptor offset.
    552548    add     xAX, [xSP + 2]                          ; eax <- GDTR.address + descriptor offset.
    553     and     dword [xAX + 4], ~0200h                 ; clear busy flag (2nd type2 bit)
     549    and     dword [xAX + 4], ~0200h                 ; Clear busy flag (2nd type2 bit).
    554550    ltr     bx
    555551    add     xSP, xS*2
    556552
    557     pop     xAX         ; saved LDTR
     553    pop     xAX         ; Saved LDTR
    558554    lldt    ax
    559555
     
    561557
    562558%ifndef VBOX_WITH_AUTO_MSR_LOAD_RESTORE
    563     ; Save the guest MSRs and load the host MSRs
     559    ; Save the guest MSRs and load the host MSRs.
    564560    LOADHOSTMSREX MSR_K8_KERNEL_GS_BASE, CPUMCTX.msrKERNELGSBASE
    565561    LOADHOSTMSREX MSR_K8_SF_MASK, CPUMCTX.msrSFMASK
     
    574570
    575571%ifdef VMX_USE_CACHED_VMCS_ACCESSES
    576     pop     xDX         ; saved pCache
     572    pop     xDX         ; Saved pCache
    577573
    578574    mov     ecx, [xDX + VMCSCACHE.Read.cValidEntries]
    579     cmp     ecx, 0  ; can't happen
     575    cmp     ecx, 0      ; Can't happen
    580576    je      .no_cached_reads
    581577    jmp     .cached_read
     
    591587
    592588%ifdef VBOX_WITH_OLD_VTX_CODE
    593     ; Save CR2 for EPT
     589    ; Restore CR2 into VMCS-cache field (for EPT).
    594590    mov     xAX, cr2
    595591    mov     [xDX + VMCSCACHE.cr2], xAX
     
    597593%endif
    598594
    599     ; Restore segment registers
     595    ; Restore segment registers.
    600596    MYPOPSEGS xAX, ax
    601597
    602     ; Restore general purpose registers
     598    ; Restore general purpose registers.
    603599    MYPOPAD
    604600
     
    612608
    613609.vmxstart64_invalid_vmcs_ptr:
    614     ; Restore base and limit of the IDTR & GDTR
     610    ; Restore base and limit of the IDTR & GDTR.
    615611    lidt    [xSP]
    616612    add     xSP, xS*2
     
    619615
    620616    ; Restore TSS selector; must mark it as not busy before using ltr (!)
    621     ; ASSUME that this is supposed to be 'BUSY'. (saves 20-30 ticks on the T42p)
     617    ; ASSUME that this is supposed to be 'BUSY'. (saves 20-30 ticks on the T42p).
    622618    ; @todo get rid of sgdt
    623     pop     xBX         ; saved TR
     619    pop     xBX         ; Saved TR
    624620    sub     xSP, xS*2
    625621    sgdt    [xSP]
    626622    mov     xAX, xBX
    627     and     al, 0F8h                                ; mask away TI and RPL bits, get descriptor offset.
     623    and     al, 0F8h                                ; Mask away TI and RPL bits, get descriptor offset.
    628624    add     xAX, [xSP + 2]                          ; eax <- GDTR.address + descriptor offset.
    629     and     dword [xAX + 4], ~0200h                 ; clear busy flag (2nd type2 bit)
     625    and     dword [xAX + 4], ~0200h                 ; Clear busy flag (2nd type2 bit).
    630626    ltr     bx
    631627    add     xSP, xS*2
    632628
    633     pop     xAX         ; saved LDTR
     629    pop     xAX         ; Saved LDTR
    634630    lldt    ax
    635631
     
    637633
    638634%ifndef VBOX_WITH_AUTO_MSR_LOAD_RESTORE
    639     ; Load the host MSRs
     635    ; Load the host MSRs. Don't bother saving the guest MSRs as vmlaunch/vmresume failed.
    640636    LOADHOSTMSR MSR_K8_KERNEL_GS_BASE
    641637    LOADHOSTMSR MSR_K8_SF_MASK
     
    653649%endif
    654650
    655     ; Restore segment registers
     651    ; Restore segment registers.
    656652    MYPOPSEGS xAX, ax
    657653
     
    662658
    663659.vmxstart64_start_failed:
    664     ; Restore base and limit of the IDTR & GDTR
     660    ; Restore base and limit of the IDTR & GDTR.
    665661    lidt    [xSP]
    666662    add     xSP, xS*2
     
    669665
    670666    ; Restore TSS selector; must mark it as not busy before using ltr (!)
    671     ; ASSUME that this is supposed to be 'BUSY'. (saves 20-30 ticks on the T42p)
     667    ; ASSUME that this is supposed to be 'BUSY'. (saves 20-30 ticks on the T42p).
    672668    ; @todo get rid of sgdt
    673     pop     xBX         ; saved TR
     669    pop     xBX         ; Saved TR
    674670    sub     xSP, xS*2
    675671    sgdt    [xSP]
    676672    mov     xAX, xBX
    677     and     al, 0F8h                                ; mask away TI and RPL bits, get descriptor offset.
     673    and     al, 0F8h                                ; Mask away TI and RPL bits, get descriptor offset.
    678674    add     xAX, [xSP + 2]                          ; eax <- GDTR.address + descriptor offset.
    679     and     dword [xAX + 4], ~0200h                 ; clear busy flag (2nd type2 bit)
     675    and     dword [xAX + 4], ~0200h                 ; Clear busy flag (2nd type2 bit).
    680676    ltr     bx
    681677    add     xSP, xS*2
    682678
    683     pop     xAX         ; saved LDTR
     679    pop     xAX         ; Saved LDTR
    684680    lldt    ax
    685681
    686     pop     xSI         ; pCtx (needed in rsi by the macros below)
     682    pop     xSI         ; pCtx (needed in rsi by the macros below).
    687683
    688684%ifndef VBOX_WITH_AUTO_MSR_LOAD_RESTORE
    689     ; Load the host MSRs
     685    ; Load the host MSRs. Don't bother saving the guest MSRs as vmlaunch/vmresume failed.
    690686    LOADHOSTMSR MSR_K8_KERNEL_GS_BASE
    691687    LOADHOSTMSR MSR_K8_SF_MASK
     
    703699%endif
    704700
    705     ; Restore segment registers
     701    ; Restore segment registers.
    706702    MYPOPSEGS xAX, ax
    707703
     
    740736    pushf
    741737
    742     ;/* Manual save and restore:
    743     ; * - General purpose registers except RIP, RSP, RAX
    744     ; *
    745     ; * Trashed:
    746     ; * - CR2 (we don't care)
    747     ; * - LDTR (reset to 0)
    748     ; * - DRx (presumably not changed at all)
    749     ; * - DR7 (reset to 0x400)
    750     ; */
    751 
    752     ;/* Save all general purpose host registers. */
     738    ; Manual save and restore:
     739    ;  - General purpose registers except RIP, RSP, RAX
     740    ;
     741    ; Trashed:
     742    ;  - CR2 (we don't care)
     743    ;  - LDTR (reset to 0)
     744    ;  - DRx (presumably not changed at all)
     745    ;  - DR7 (reset to 0x400)
     746
     747    ; Save all general purpose host registers.
    753748    MYPUSHAD
    754749
    755     ;/* Save the Guest CPU context pointer. */
     750    ; Save the Guest CPU context pointer.
    756751    mov     xSI, [xBP + xS*2 + RTHCPHYS_CB*2]   ; pCtx
    757     push    xSI                     ; push for saving the state at the end
    758 
    759     ; save host fs, gs, sysenter msr etc
     752    push    xSI                                 ; push for saving the state at the end
     753
     754    ; Save host fs, gs, sysenter msr etc.
    760755    mov     xAX, [xBP + xS*2]       ; pVMCBHostPhys (64 bits physical address; x86: take low dword only)
    761756    push    xAX                     ; save for the vmload after vmrun
    762757    vmsave
    763758
    764     ; setup eax for VMLOAD
     759    ; Setup eax for VMLOAD.
    765760    mov     xAX, [xBP + xS*2 + RTHCPHYS_CB]     ; pVMCBPhys (64 bits physical address; take low dword only)
    766761
    767     ;/* Restore Guest's general purpose registers. */
    768     ;/* EAX is loaded from the VMCB by VMRUN */
     762    ; Restore Guest's general purpose registers.
     763    ; eax is loaded from the VMCB by VMRUN.
    769764    mov     ebx, [xSI + CPUMCTX.ebx]
    770765    mov     ecx, [xSI + CPUMCTX.ecx]
     
    774769    mov     esi, [xSI + CPUMCTX.esi]
    775770
    776     ; Clear the global interrupt flag & execute sti to make sure external interrupts cause a world switch
     771    ; Clear the global interrupt flag & execute sti to make sure external interrupts cause a world switch.
    777772    clgi
    778773    sti
    779774
    780     ; load guest fs, gs, sysenter msr etc
     775    ; Load guest fs, gs, sysenter msr etc.
    781776    vmload
    782     ; run the VM
     777    ; Run the VM.
    783778    vmrun
    784779
    785     ;/* EAX is in the VMCB already; we can use it here. */
    786 
    787     ; save guest fs, gs, sysenter msr etc
     780    ; eax is in the VMCB already; we can use it here.
     781
     782    ; Save guest fs, gs, sysenter msr etc.
    788783    vmsave
    789784
    790     ; load host fs, gs, sysenter msr etc
    791     pop     xAX                     ; pushed above
     785    ; Load host fs, gs, sysenter msr etc.
     786    pop     xAX                     ; Pushed above
    792787    vmload
    793788
     
    805800    mov     [ss:xAX + CPUMCTX.ebp], ebp
    806801
    807     ; Restore general purpose registers
     802    ; Restore general purpose registers.
    808803    MYPOPAD
    809804
     
    829824ALIGNCODE(16)
    830825BEGINPROC MY_NAME(SVMR0VMRun64)
    831     ; fake a cdecl stack frame
     826    ; Fake a cdecl stack frame
    832827 %ifdef ASM_CALL64_GCC
    833828    push    rdx
     
    844839    pushf
    845840
    846     ;/* Manual save and restore:
    847     ; * - General purpose registers except RIP, RSP, RAX
    848     ; *
    849     ; * Trashed:
    850     ; * - CR2 (we don't care)
    851     ; * - LDTR (reset to 0)
    852     ; * - DRx (presumably not changed at all)
    853     ; * - DR7 (reset to 0x400)
    854     ; */
    855 
    856     ;/* Save all general purpose host registers. */
     841    ; Manual save and restore:
     842    ; - General purpose registers except RIP, RSP, RAX
     843    ;
     844    ; Trashed:
     845    ; - CR2 (we don't care)
     846    ; - LDTR (reset to 0)
     847    ; - DRx (presumably not changed at all)
     848    ; - DR7 (reset to 0x400)
     849    ;
     850
     851    ; Save all general purpose host registers.
    857852    MYPUSHAD
    858853
    859     ;/* Save the Guest CPU context pointer. */
     854    ; Save the Guest CPU context pointer.
    860855    mov     rsi, [rbp + xS*2 + RTHCPHYS_CB*2]   ; pCtx
    861856    push    rsi                     ; push for saving the state at the end
    862857
    863     ; save host fs, gs, sysenter msr etc
     858    ; Save host fs, gs, sysenter msr etc.
    864859    mov     rax, [rbp + xS*2]       ; pVMCBHostPhys (64 bits physical address; x86: take low dword only)
    865     push    rax                     ; save for the vmload after vmrun
     860    push    rax                     ; Save for the vmload after vmrun
    866861    vmsave
    867862
    868     ; setup eax for VMLOAD
     863    ; Setup eax for VMLOAD.
    869864    mov     rax, [rbp + xS*2 + RTHCPHYS_CB]     ; pVMCBPhys (64 bits physical address; take low dword only)
    870865
    871     ;/* Restore Guest's general purpose registers. */
    872     ;/* RAX is loaded from the VMCB by VMRUN */
     866    ; Restore Guest's general purpose registers.
     867    ; rax is loaded from the VMCB by VMRUN.
    873868    mov     rbx, qword [xSI + CPUMCTX.ebx]
    874869    mov     rcx, qword [xSI + CPUMCTX.ecx]
     
    886881    mov     rsi, qword [xSI + CPUMCTX.esi]
    887882
    888     ; Clear the global interrupt flag & execute sti to make sure external interrupts cause a world switch
     883    ; Clear the global interrupt flag & execute sti to make sure external interrupts cause a world switch.
    889884    clgi
    890885    sti
    891886
    892     ; load guest fs, gs, sysenter msr etc
     887    ; Load guest fs, gs, sysenter msr etc.
    893888    vmload
    894     ; run the VM
     889    ; Run the VM.
    895890    vmrun
    896891
    897     ;/* RAX is in the VMCB already; we can use it here. */
    898 
    899     ; save guest fs, gs, sysenter msr etc
     892    ; rax is in the VMCB already; we can use it here.
     893
     894    ; Save guest fs, gs, sysenter msr etc.
    900895    vmsave
    901896
    902     ; load host fs, gs, sysenter msr etc
     897    ; Load host fs, gs, sysenter msr etc.
    903898    pop     rax                     ; pushed above
    904899    vmload
     
    925920    mov     qword [rax + CPUMCTX.r15], r15
    926921
    927     ; Restore general purpose registers
     922    ; Restore general purpose registers.
    928923    MYPOPAD
    929924
  • trunk/src/VBox/VMM/VMMSwitcher/LegacyandAMD64.mac

    r45875 r46099  
    588588    mov     rbp, rsp
    589589
    590     ; Make sure VT-x instructions are allowed
     590    ; Make sure VT-x instructions are allowed.
    591591    mov     rax, cr4
    592592    or      rax, X86_CR4_VMXE
    593593    mov     cr4, rax
    594594
    595     ;/* Enter VMX Root Mode */
     595    ; Enter VMX Root Mode.
    596596    vmxon   [rbp + 8 + 8]
    597597    jnc     .vmxon_success
     
    621621    push    qword [rbp + 16 + 8];
    622622
    623     ;/* Save segment registers */
     623    ; Save segment registers.
    624624    MYPUSHSEGS rax
    625625
    626626%ifdef VMX_USE_CACHED_VMCS_ACCESSES
    627     ; Flush the VMCS write cache first (before any other vmreads/vmwrites!)
     627    ; Flush the VMCS write cache first (before any other vmreads/vmwrites!).
    628628    mov     rbx, [rbp + 24 + 8]                             ; pCache
    629629
     
    662662    mov     qword [rbx + VMCSCACHE.uPos], 3
    663663 %endif
    664     ; Save the pCache pointer
     664    ; Save the pCache pointer.
    665665    push    rbx
    666666%endif
    667667
    668     ; Save the host state that's relevant in the temporary 64 bits mode
     668    ; Save the host state that's relevant in the temporary 64-bit mode.
    669669    mov     rdx, cr0
    670670    mov     eax, VMX_VMCS_HOST_CR0
     
    697697%endif
    698698
    699     ; hopefully we can ignore TR (we restore it anyway on the way back to 32 bits mode)
    700 
    701     ;/* First we have to save some final CPU context registers. */
     699    ; Hopefully we can ignore TR (we restore it anyway on the way back to 32-bit mode).
     700
     701    ; First we have to save some final CPU context registers.
    702702    lea     rdx, [.vmlaunch64_done wrt rip]
    703     mov     rax, VMX_VMCS_HOST_RIP  ;/* return address (too difficult to continue after VMLAUNCH?) */
     703    mov     rax, VMX_VMCS_HOST_RIP  ; Return address (too difficult to continue after VMLAUNCH?).
    704704    vmwrite rax, rdx
    705     ;/* Note: assumes success... */
    706 
    707     ;/* Manual save and restore:
    708     ; * - General purpose registers except RIP, RSP
    709     ; *
    710     ; * Trashed:
    711     ; * - CR2 (we don't care)
    712     ; * - LDTR (reset to 0)
    713     ; * - DRx (presumably not changed at all)
    714     ; * - DR7 (reset to 0x400)
    715     ; * - EFLAGS (reset to RT_BIT(1); not relevant)
    716     ; *
    717     ; */
     705    ; Note: assumes success!
     706
     707    ; Manual save and restore:
     708    ;  - General purpose registers except RIP, RSP
     709    ;
     710    ; Trashed:
     711    ;  - CR2 (we don't care)
     712    ;  - LDTR (reset to 0)
     713    ;  - DRx (presumably not changed at all)
     714    ;  - DR7 (reset to 0x400)
     715    ;  - EFLAGS (reset to RT_BIT(1); not relevant)
    718716
    719717%ifndef VBOX_WITH_AUTO_MSR_LOAD_RESTORE
    720     ; Load the guest LSTAR, CSTAR, SFMASK & KERNEL_GSBASE MSRs
     718    ; Load the guest LSTAR, CSTAR, SFMASK & KERNEL_GSBASE MSRs.
    721719    LOADGUESTMSR MSR_K8_LSTAR,          CPUMCTX.msrLSTAR
    722720    LOADGUESTMSR MSR_K6_STAR,           CPUMCTX.msrSTAR
     
    737735    push    rsi
    738736
    739     ; Restore CR2
     737    ; Load CR2 if necessary (may be expensive as writing CR2 is a synchronizing instruction).
    740738    mov     rbx, qword [rsi + CPUMCTX.cr2]
    741739    mov     rdx, cr2
     
    747745    mov     eax, VMX_VMCS_HOST_RSP
    748746    vmwrite rax, rsp
    749     ;/* Note: assumes success... */
    750     ;/* Don't mess with ESP anymore!! */
    751 
    752     ;/* Restore Guest's general purpose registers. */
     747    ; Note: assumes success!
     748    ; Don't mess with ESP anymore!!!
     749
     750    ; Save Guest's general purpose registers.
    753751    mov     rax, qword [rsi + CPUMCTX.eax]
    754752    mov     rbx, qword [rsi + CPUMCTX.ebx]
     
    765763    mov     r15, qword [rsi + CPUMCTX.r15]
    766764
    767     ;/* Restore rdi & rsi. */
     765    ; Save rdi & rsi.
    768766    mov     rdi, qword [rsi + CPUMCTX.edi]
    769767    mov     rsi, qword [rsi + CPUMCTX.esi]
    770768
    771769    vmlaunch
    772     jmp     .vmlaunch64_done;      ;/* here if vmlaunch detected a failure. */
     770    jmp     .vmlaunch64_done;      ; Here if vmlaunch detected a failure.
    773771
    774772ALIGNCODE(16)
     
    799797%endif
    800798
    801     pop     rax                                 ; the guest edi we pushed above
     799    pop     rax         ; The guest edi we pushed above
    802800    mov     qword [rdi + CPUMCTX.edi], rax
    803801
     
    817815
    818816%ifdef VMX_USE_CACHED_VMCS_ACCESSES
    819     pop     rdi         ; saved pCache
     817    pop     rdi         ; Saved pCache
    820818
    821819 %ifdef VBOX_WITH_CRASHDUMP_MAGIC
     
    830828
    831829    mov     ecx, [rdi + VMCSCACHE.Read.cValidEntries]
    832     cmp     ecx, 0  ; can't happen
     830    cmp     ecx, 0  ; Can't happen
    833831    je      .no_cached_reads
    834832    jmp     .cached_read
     
    844842
    845843 %ifdef VBOX_WITH_OLD_VTX_CODE
    846     ; Save CR2 for EPT
     844    ; Restore CR2 into VMCS-cache field (for EPT).
    847845    mov     rax, cr2
    848846    mov     [rdi + VMCSCACHE.cr2], rax
     
    853851%endif
    854852
    855     ; Restore segment registers
     853    ; Restore segment registers.
    856854    MYPOPSEGS rax
    857855
     
    870868%endif
    871869
    872     ; Write back the data and disable the VMCS
    873     vmclear qword [rsp]  ;Pushed pVMCS
     870    ; Write back the data and disable the VMCS.
     871    vmclear qword [rsp]  ; Pushed pVMCS
    874872    add     rsp, 8
    875873
    876874.vmstart64_vmxoff_end:
    877     ; Disable VMX root mode
     875    ; Disable VMX root mode.
    878876    vmxoff
    879877.vmstart64_vmxon_failed:
     
    911909%endif
    912910
    913     ; Restore segment registers
     911    ; Restore segment registers.
    914912    MYPOPSEGS rax
    915913
     
    933931%endif
    934932
    935     ; Restore segment registers
     933    ; Restore segment registers.
    936934    MYPOPSEGS rax
    937935
     
    955953    pushf
    956954
    957     ;/* Manual save and restore:
    958     ; * - General purpose registers except RIP, RSP, RAX
    959     ; *
    960     ; * Trashed:
    961     ; * - CR2 (we don't care)
    962     ; * - LDTR (reset to 0)
    963     ; * - DRx (presumably not changed at all)
    964     ; * - DR7 (reset to 0x400)
    965     ; */
    966 
    967     ;/* Save the Guest CPU context pointer. */
    968     push    rsi                     ; push for saving the state at the end
    969 
    970     ; save host fs, gs, sysenter msr etc
     955    ; Manual save and restore:
     956    ;  - General purpose registers except RIP, RSP, RAX
     957    ;
     958    ; Trashed:
     959    ;  - CR2 (we don't care)
     960    ;  - LDTR (reset to 0)
     961    ;  - DRx (presumably not changed at all)
     962    ;  - DR7 (reset to 0x400)
     963
     964    ; Save the Guest CPU context pointer.
     965    push    rsi                             ; Push for saving the state at the end
     966
     967    ; Save host fs, gs, sysenter msr etc
    971968    mov     rax, [rbp + 8 + 8]              ; pVMCBHostPhys (64 bits physical address)
    972     push    rax                             ; save for the vmload after vmrun
     969    push    rax                             ; Save for the vmload after vmrun
    973970    vmsave
    974971
    975     ; setup eax for VMLOAD
     972    ; Setup eax for VMLOAD
    976973    mov     rax, [rbp + 8 + 8 + RTHCPHYS_CB]   ; pVMCBPhys (64 bits physical address)
    977974
    978     ;/* Restore Guest's general purpose registers. */
    979     ;/* RAX is loaded from the VMCB by VMRUN */
     975    ; Restore Guest's general purpose registers.
     976    ; rax is loaded from the VMCB by VMRUN.
    980977    mov     rbx, qword [rsi + CPUMCTX.ebx]
    981978    mov     rcx, qword [rsi + CPUMCTX.ecx]
     
    993990    mov     rsi, qword [rsi + CPUMCTX.esi]
    994991
    995     ; Clear the global interrupt flag & execute sti to make sure external interrupts cause a world switch
     992    ; Clear the global interrupt flag & execute sti to make sure external interrupts cause a world switch.
    996993    clgi
    997994    sti
    998995
    999     ; load guest fs, gs, sysenter msr etc
     996    ; Load guest fs, gs, sysenter msr etc
    1000997    vmload
    1001     ; run the VM
     998    ; Run the VM
    1002999    vmrun
    10031000
    1004     ;/* RAX is in the VMCB already; we can use it here. */
    1005 
    1006     ; save guest fs, gs, sysenter msr etc
     1001    ; rax is in the VMCB already; we can use it here.
     1002
     1003    ; Save guest fs, gs, sysenter msr etc.
    10071004    vmsave
    10081005
    1009     ; load host fs, gs, sysenter msr etc
    1010     pop     rax                     ; pushed above
     1006    ; Load host fs, gs, sysenter msr etc.
     1007    pop     rax                     ; Pushed above
    10111008    vmload
    10121009
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette