VirtualBox

Changeset 87335 in vbox


Ignore:
Timestamp:
Jan 21, 2021 12:46:22 AM (4 years ago)
Author:
vboxsync
Message:

VMM/HMR0A.asm: Adding unwind info tom SVMR0VMRun and frame locations for the non-volatile registers.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMR0/HMR0A.asm

    r87334 r87335  
    1919;*  Header Files                                                                                                                 *
    2020;*********************************************************************************************************************************
     21%define RT_ASM_WITH_SEH64
    2122%include "VBox/asmdefs.mac"
    2223%include "VBox/err.mac"
     
    5960%endif
    6061
     62
     63;; @def CALLEE_PRESERVED_REGISTER_COUNT
     64; Number of registers pushed by PUSH_CALLEE_PRESERVED_REGISTERS
     65%ifdef ASM_CALL64_GCC
     66 %define CALLEE_PRESERVED_REGISTER_COUNT 5
     67%else
     68 %define CALLEE_PRESERVED_REGISTER_COUNT 7
     69%endif
     70
    6171;; @def PUSH_CALLEE_PRESERVED_REGISTERS
    62 ; Macro generating an equivalent to PUSHAD instruction.
     72; Macro for pushing all GPRs we must preserve for the caller.
     73%macro PUSH_CALLEE_PRESERVED_REGISTERS 0
     74        push    r15
     75        SEH64_PUSH_GREG r15
     76        %assign cbFrame         cbFrame + 8
     77        %assign frm_saved_r15   -cbFrame
     78
     79        push    r14
     80        SEH64_PUSH_GREG r14
     81        %assign cbFrame         cbFrame + 8
     82        %assign frm_saved_r14   -cbFrame
     83
     84        push    r13
     85        SEH64_PUSH_GREG r13
     86        %assign cbFrame         cbFrame + 8
     87        %assign frm_saved_r13   -cbFrame
     88
     89        push    r12
     90        SEH64_PUSH_GREG r12
     91        %assign cbFrame         cbFrame + 8
     92        %assign frm_saved_r12   -cbFrame
     93
     94        push    rbx
     95        SEH64_PUSH_GREG rbx
     96        %assign cbFrame         cbFrame + 8
     97        %assign frm_saved_rbx   -cbFrame
     98
     99 %ifdef ASM_CALL64_MSC
     100        push    rsi
     101        SEH64_PUSH_GREG rsi
     102        %assign cbFrame         cbFrame + 8
     103        %assign frm_saved_rsi   -cbFrame
     104
     105        push    rdi
     106        SEH64_PUSH_GREG rdi
     107        %assign cbFrame         cbFrame + 8
     108        %assign frm_saved_rdi   -cbFrame
     109 %endif
     110%endmacro
    63111
    64112;; @def POP_CALLEE_PRESERVED_REGISTERS
    65 ; Macro generating an equivalent to POPAD instruction.
     113; Counterpart to PUSH_CALLEE_PRESERVED_REGISTERS for use in the epilogue.
     114%macro POP_CALLEE_PRESERVED_REGISTERS 0
     115 %ifdef ASM_CALL64_MSC
     116        pop     rdi
     117        %assign cbFrame         cbFrame - 8
     118        %undef  frm_saved_rdi
     119
     120        pop     rsi
     121        %assign cbFrame         cbFrame - 8
     122        %undef  frm_saved_rsi
     123 %endif
     124        pop     rbx
     125        %assign cbFrame         cbFrame - 8
     126        %undef  frm_saved_rbx
     127
     128        pop     r12
     129        %assign cbFrame         cbFrame - 8
     130        %undef  frm_saved_r12
     131
     132        pop     r13
     133        %assign cbFrame         cbFrame - 8
     134        %undef  frm_saved_r13
     135
     136        pop     r14
     137        %assign cbFrame         cbFrame - 8
     138        %undef  frm_saved_r14
     139
     140        pop     r15
     141        %assign cbFrame         cbFrame - 8
     142        %undef  frm_saved_r15
     143%endmacro
    66144
    67145;; @def PUSH_RELEVANT_SEGMENT_REGISTERS
     
    74152; @param 1  Full width register name.
    75153; @param 2  16-bit register name for \a 1.
    76 
    77 %ifdef ASM_CALL64_GCC
    78  %define CALLEE_PRESERVED_REGISTER_COUNT 5
    79  %macro PUSH_CALLEE_PRESERVED_REGISTERS 0
    80    push    r15
    81    push    r14
    82    push    r13
    83    push    r12
    84    push    rbx
    85  %endmacro
    86  %macro POP_CALLEE_PRESERVED_REGISTERS 0
    87    pop     rbx
    88    pop     r12
    89    pop     r13
    90    pop     r14
    91    pop     r15
    92  %endmacro
    93 
    94 %else ; ASM_CALL64_MSC
    95  %define CALLEE_PRESERVED_REGISTER_COUNT 7
    96  %macro PUSH_CALLEE_PRESERVED_REGISTERS 0
    97    push    r15
    98    push    r14
    99    push    r13
    100    push    r12
    101    push    rbx
    102    push    rsi
    103    push    rdi
    104  %endmacro
    105  %macro POP_CALLEE_PRESERVED_REGISTERS 0
    106    pop     rdi
    107    pop     rsi
    108    pop     rbx
    109    pop     r12
    110    pop     r13
    111    pop     r14
    112    pop     r15
    113  %endmacro
    114 %endif
    115 
    116154%ifdef VBOX_SKIP_RESTORE_SEG
    117155 %macro PUSH_RELEVANT_SEGMENT_REGISTERS 2
     
    293331    mov         rsi, rdx
    294332%endif
     333    SEH64_END_PROLOGUE
    295334
    296335    test        edi, VMX_RESTORE_HOST_GDTR
     
    401440BEGINPROC VMXDispatchHostNmi
    402441    ; NMI is always vector 2. The IDT[2] IRQ handler cannot be anything else. See Intel spec. 6.3.1 "External Interrupts".
     442    SEH64_END_PROLOGUE
    403443    int 2
    404444    ret
     
    436476ALIGNCODE(16)
    437477BEGINPROC hmR0VMXStartVMWrapXMM
     478        SEH64_END_PROLOGUE
    438479        push    xBP
    439480        mov     xBP, xSP
     
    612653ALIGNCODE(64)
    613654BEGINPROC hmR0SVMRunWrapXMM
     655        SEH64_END_PROLOGUE
    614656        push    xBP
    615657        mov     xBP, xSP
     
    879921
    880922    ; Save all general purpose host registers.
     923%assign cbFrame 0
    881924    PUSH_CALLEE_PRESERVED_REGISTERS
     925    SEH64_END_PROLOGUE
    882926
    883927    ; First we have to save some final CPU context registers.
     
    10361080ALIGNCODE(16)
    10371081BEGINPROC hmR0MdsClear
     1082        SEH64_END_PROLOGUE
    10381083        sub     xSP, xCB
    10391084        mov     [xSP], ds
     
    10541099ALIGNCODE(64)
    10551100BEGINPROC SVMR0VMRun
    1056     push    rbp
    1057     mov     rbp, rsp
    1058     pushf
    1059     sub     rsp, 30h - 8h                   ; The frame is 30h bytes, but the rbp-08h entry is the above pushf.
    1060                                             ; And we have CALLEE_PRESERVED_REGISTER_COUNT following it.
     1101        push    rbp
     1102        SEH64_PUSH_xBP
     1103        mov     rbp, rsp
     1104        SEH64_SET_FRAME_xBP 0
     1105        pushf
     1106        sub     rsp, 30h - 8h                   ; The frame is 30h bytes, but the rbp-08h entry is the above pushf.
     1107        SEH64_ALLOCATE_STACK 30h                ; And we have CALLEE_PRESERVED_REGISTER_COUNT following it.
     1108
     1109%define frm_fRFlags         -08h
    10611110%define frm_uHostXcr0       -18h            ; 128-bit
    10621111%define frm_fNoRestoreXcr0  -20h            ; Non-zero if we should skip XCR0 restoring.
    10631112%define frm_pVCpu           -28h            ; Where we stash pVCpu for use after the vmrun.
    10641113%define frm_HCPhysVmcbHost  -30h            ; Where we stash HCPhysVmcbHost for the vmload after vmrun.
    1065 %define cbFrame            ( 30h + CALLEE_PRESERVED_REGISTER_COUNT*8 )
    1066 
    1067     ; Manual save and restore:
    1068     ;  - General purpose registers except RIP, RSP, RAX
    1069     ;
    1070     ; Trashed:
    1071     ;  - CR2 (we don't care)
    1072     ;  - LDTR (reset to 0)
    1073     ;  - DRx (presumably not changed at all)
    1074     ;  - DR7 (reset to 0x400)
    1075 
    1076     ; Save all general purpose host registers.
    1077     PUSH_CALLEE_PRESERVED_REGISTERS
    1078 
    1079     ; Shuffle parameter registers so that r8=HCPhysVmcb and rsi=pVCpu.  (rdx & rcx will soon be trashed.)
     1114%assign cbFrame              30h
     1115
     1116        ; Manual save and restore:
     1117        ;  - General purpose registers except RIP, RSP, RAX
     1118        ;
     1119        ; Trashed:
     1120        ;  - CR2 (we don't care)
     1121        ;  - LDTR (reset to 0)
     1122        ;  - DRx (presumably not changed at all)
     1123        ;  - DR7 (reset to 0x400)
     1124
     1125        ; Save all general purpose host registers.
     1126        PUSH_CALLEE_PRESERVED_REGISTERS
     1127        SEH64_END_PROLOGUE
     1128%if cbFrame != (30h + 8 * CALLEE_PRESERVED_REGISTER_COUNT)
     1129 %error Bad cbFrame value
     1130%endif
     1131
     1132        ; Shuffle parameter registers so that r8=HCPhysVmcb and rsi=pVCpu.  (rdx & rcx will soon be trashed.)
    10801133%ifdef ASM_CALL64_GCC
    1081     mov     r8, rdx                         ; Put HCPhysVmcb in r8 like on MSC as rdx is trashed below.
     1134        mov     r8, rdx                         ; Put HCPhysVmcb in r8 like on MSC as rdx is trashed below.
    10821135%else
    1083     mov     rsi, rdx                        ; Put pVCpu in rsi like on GCC as rdx is trashed below.
    1084     ;mov     rdi, rcx                        ; Put pVM in rdi like on GCC as rcx is trashed below.
    1085 %endif
    1086 
    1087     ; Save the host XCR0 and load the guest one if necessary.
    1088     mov     ecx, 3fh                        ; indicate that we need not restore XCR0 (in case we jump)
    1089     test    byte [rsi + VMCPU.hm + HMCPU.fLoadSaveGuestXcr0], 1
    1090     jz      .xcr0_before_skip
    1091 
    1092     xor     ecx, ecx
    1093     xgetbv                                  ; save the host XCR0 on the stack
    1094     mov     [rbp + frm_uHostXcr0 + 8], rdx
    1095     mov     [rbp + frm_uHostXcr0    ], rax
    1096 
    1097     mov     eax, [rsi + VMCPU.cpum.GstCtx + CPUMCTX.aXcr] ; load the guest XCR0
    1098     mov     edx, [rsi + VMCPU.cpum.GstCtx + CPUMCTX.aXcr + 4]
    1099     xor     ecx, ecx                        ; paranoia; Also, indicates that we must restore XCR0 (moved into ecx, thus 0).
    1100     xsetbv
     1136        mov     rsi, rdx                        ; Put pVCpu in rsi like on GCC as rdx is trashed below.
     1137        ;mov     rdi, rcx                        ; Put pVM in rdi like on GCC as rcx is trashed below.
     1138%endif
     1139
     1140        ; Save the host XCR0 and load the guest one if necessary.
     1141        mov     ecx, 3fh                        ; indicate that we need not restore XCR0 (in case we jump)
     1142        test    byte [rsi + VMCPU.hm + HMCPU.fLoadSaveGuestXcr0], 1
     1143        jz      .xcr0_before_skip
     1144
     1145        xor     ecx, ecx
     1146        xgetbv                                  ; save the host XCR0 on the stack
     1147        mov     [rbp + frm_uHostXcr0 + 8], rdx
     1148        mov     [rbp + frm_uHostXcr0    ], rax
     1149
     1150        mov     eax, [rsi + VMCPU.cpum.GstCtx + CPUMCTX.aXcr] ; load the guest XCR0
     1151        mov     edx, [rsi + VMCPU.cpum.GstCtx + CPUMCTX.aXcr + 4]
     1152        xor     ecx, ecx                        ; paranoia; Also, indicates that we must restore XCR0 (moved into ecx, thus 0).
     1153        xsetbv
    11011154
    11021155.xcr0_before_skip:
    1103     mov     [rbp + frm_fNoRestoreXcr0], rcx
    1104 
    1105     ; Save pVCpu pointer for simplifying saving of the GPRs afterwards.
    1106     mov     qword [rbp + frm_pVCpu], rsi
    1107 
    1108     ; Save host fs, gs, sysenter msr etc.
    1109     mov     rax, [rsi + VMCPU.hm + HMCPU.u + HMCPUSVM.HCPhysVmcbHost]
    1110     mov     qword [rbp + frm_HCPhysVmcbHost], rax          ; save for the vmload after vmrun
    1111     vmsave
    1112 
    1113     ; Fight spectre (trashes rax, rdx and rcx).
    1114     INDIRECT_BRANCH_PREDICTION_BARRIER rsi, CPUMCTX_WSF_IBPB_ENTRY
    1115 
    1116     ; Setup rax for VMLOAD.
    1117     mov     rax, r8                         ; HCPhysVmcb (64 bits physical address; take low dword only)
    1118 
    1119     ; Load guest general purpose registers (rax is loaded from the VMCB by VMRUN).
    1120     mov     rbx, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.ebx]
    1121     mov     rcx, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.ecx]
    1122     mov     rdx, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.edx]
    1123     mov     rdi, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.edi]
    1124     mov     rbp, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.ebp]
    1125     mov     r8,  qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r8]
    1126     mov     r9,  qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r9]
    1127     mov     r10, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r10]
    1128     mov     r11, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r11]
    1129     mov     r12, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r12]
    1130     mov     r13, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r13]
    1131     mov     r14, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r14]
    1132     mov     r15, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r15]
    1133     mov     rsi, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.esi]
    1134 
    1135     ; Clear the global interrupt flag & execute sti to make sure external interrupts cause a world switch.
    1136     clgi
    1137     sti
    1138 
    1139     ; Load guest FS, GS, Sysenter MSRs etc.
    1140     vmload
    1141 
    1142     ; Run the VM.
    1143     vmrun
    1144 
    1145     ; Save guest fs, gs, sysenter msr etc.
    1146     vmsave
    1147 
    1148     ; Load host fs, gs, sysenter msr etc.
    1149     mov     rax, [rsp + cbFrame + frm_HCPhysVmcbHost] ; load HCPhysVmcbHost (rbp is not operational yet, thus rsp)
    1150     vmload
    1151 
    1152     ; Set the global interrupt flag again, but execute cli to make sure IF=0.
    1153     cli
    1154     stgi
    1155 
    1156     ; Pop pVCpu (saved above) and save the guest GPRs (sans RSP and RAX).
    1157     mov     rax, [rsp + cbFrame + frm_pVCpu] ; (rbp still not operational)
    1158 
    1159     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.ebx], rbx
    1160     mov     rbx, SPECTRE_FILLER
    1161     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.ecx], rcx
    1162     mov     rcx, rbx
    1163     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.edx], rdx
    1164     mov     rdx, rbx
    1165     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.esi], rsi
    1166     mov     rsi, rbx
    1167     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.edi], rdi
    1168     mov     rdi, rbx
    1169     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.ebp], rbp
    1170     lea     rbp, [rsp + cbFrame]
    1171     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r8],  r8
    1172     mov     r8, rbx
    1173     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r9],  r9
    1174     mov     r9, rbx
    1175     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r10], r10
    1176     mov     r10, rbx
    1177     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r11], r11
    1178     mov     r11, rbx
    1179     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r12], r12
    1180     mov     r12, rbx
    1181     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r13], r13
    1182     mov     r13, rbx
    1183     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r14], r14
    1184     mov     r14, rbx
    1185     mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r15], r15
    1186     mov     r15, rbx
    1187 
    1188     ; Fight spectre.  Note! Trashes rax, rdx and rcx!
    1189     INDIRECT_BRANCH_PREDICTION_BARRIER rax, CPUMCTX_WSF_IBPB_EXIT
    1190 
    1191     ; Restore the host xcr0 if necessary.
    1192     mov     rcx, [rbp + frm_fNoRestoreXcr0]
    1193     test    ecx, ecx
    1194     jnz     .xcr0_after_skip
    1195     mov     rdx, [rbp + frm_uHostXcr0 + 8]
    1196     mov     rax, [rbp + frm_uHostXcr0]
    1197     xsetbv                              ; ecx is already zero
     1156        mov     [rbp + frm_fNoRestoreXcr0], rcx
     1157
     1158        ; Save pVCpu pointer for simplifying saving of the GPRs afterwards.
     1159        mov     qword [rbp + frm_pVCpu], rsi
     1160
     1161        ; Save host fs, gs, sysenter msr etc.
     1162        mov     rax, [rsi + VMCPU.hm + HMCPU.u + HMCPUSVM.HCPhysVmcbHost]
     1163        mov     qword [rbp + frm_HCPhysVmcbHost], rax          ; save for the vmload after vmrun
     1164        vmsave
     1165
     1166        ; Fight spectre (trashes rax, rdx and rcx).
     1167        INDIRECT_BRANCH_PREDICTION_BARRIER rsi, CPUMCTX_WSF_IBPB_ENTRY
     1168
     1169        ; Setup rax for VMLOAD.
     1170        mov     rax, r8                         ; HCPhysVmcb (64 bits physical address; take low dword only)
     1171
     1172        ; Load guest general purpose registers (rax is loaded from the VMCB by VMRUN).
     1173        mov     rbx, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.ebx]
     1174        mov     rcx, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.ecx]
     1175        mov     rdx, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.edx]
     1176        mov     rdi, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.edi]
     1177        mov     rbp, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.ebp]
     1178        mov     r8,  qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r8]
     1179        mov     r9,  qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r9]
     1180        mov     r10, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r10]
     1181        mov     r11, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r11]
     1182        mov     r12, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r12]
     1183        mov     r13, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r13]
     1184        mov     r14, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r14]
     1185        mov     r15, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.r15]
     1186        mov     rsi, qword [rsi + VMCPU.cpum.GstCtx + CPUMCTX.esi]
     1187
     1188        ; Clear the global interrupt flag & execute sti to make sure external interrupts cause a world switch.
     1189        clgi
     1190        sti
     1191
     1192        ; Load guest FS, GS, Sysenter MSRs etc.
     1193        vmload
     1194
     1195        ; Run the VM.
     1196        vmrun
     1197
     1198        ; Save guest fs, gs, sysenter msr etc.
     1199        vmsave
     1200
     1201        ; Load host fs, gs, sysenter msr etc.
     1202        mov     rax, [rsp + cbFrame + frm_HCPhysVmcbHost] ; load HCPhysVmcbHost (rbp is not operational yet, thus rsp)
     1203        vmload
     1204
     1205        ; Set the global interrupt flag again, but execute cli to make sure IF=0.
     1206        cli
     1207        stgi
     1208
     1209        ; Pop pVCpu (saved above) and save the guest GPRs (sans RSP and RAX).
     1210        mov     rax, [rsp + cbFrame + frm_pVCpu] ; (rbp still not operational)
     1211
     1212        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.ebx], rbx
     1213        mov     rbx, SPECTRE_FILLER
     1214        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.ecx], rcx
     1215        mov     rcx, rbx
     1216        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.edx], rdx
     1217        mov     rdx, rbx
     1218        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.esi], rsi
     1219        mov     rsi, rbx
     1220        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.edi], rdi
     1221        mov     rdi, rbx
     1222        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.ebp], rbp
     1223        lea     rbp, [rsp + cbFrame]
     1224        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r8],  r8
     1225        mov     r8, rbx
     1226        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r9],  r9
     1227        mov     r9, rbx
     1228        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r10], r10
     1229        mov     r10, rbx
     1230        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r11], r11
     1231        mov     r11, rbx
     1232        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r12], r12
     1233        mov     r12, rbx
     1234        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r13], r13
     1235        mov     r13, rbx
     1236        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r14], r14
     1237        mov     r14, rbx
     1238        mov     qword [rax + VMCPU.cpum.GstCtx + CPUMCTX.r15], r15
     1239        mov     r15, rbx
     1240
     1241        ; Fight spectre.  Note! Trashes rax, rdx and rcx!
     1242        INDIRECT_BRANCH_PREDICTION_BARRIER rax, CPUMCTX_WSF_IBPB_EXIT
     1243
     1244        ; Restore the host xcr0 if necessary.
     1245        mov     rcx, [rbp + frm_fNoRestoreXcr0]
     1246        test    ecx, ecx
     1247        jnz     .xcr0_after_skip
     1248        mov     rdx, [rbp + frm_uHostXcr0 + 8]
     1249        mov     rax, [rbp + frm_uHostXcr0]
     1250        xsetbv                              ; ecx is already zero
    11981251.xcr0_after_skip:
    11991252
    1200     ; Restore host general purpose registers.
    1201     POP_CALLEE_PRESERVED_REGISTERS
    1202 
    1203     mov     eax, VINF_SUCCESS
    1204 
    1205     add     rsp, 30h - 8h
    1206     popf
    1207     leave
    1208     ret
     1253        ; Restore host general purpose registers.
     1254        POP_CALLEE_PRESERVED_REGISTERS
     1255
     1256        mov     eax, VINF_SUCCESS
     1257
     1258        add     rsp, 30h - 8h
     1259        popf
     1260        leave
     1261        ret
    12091262%undef frm_uHostXcr0
    12101263%undef frm_fNoRestoreXcr0
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette