Changeset 87337 in vbox
- Timestamp:
- Jan 21, 2021 1:32:19 AM (4 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMR0/HMR0A.asm
r87336 r87337 232 232 ; @param 1 How to address CPUMCTX. 233 233 ; @param 2 Which flag to test for (CPUMCTX_WSF_IBPB_ENTRY or CPUMCTX_WSF_IBPB_EXIT) 234 %macro INDIRECT_BRANCH_PREDICTION_BARRIER_ OLD2234 %macro INDIRECT_BRANCH_PREDICTION_BARRIER_CTX 2 235 235 test byte [%1 + CPUMCTX.fWorldSwitcher], %2 236 236 jz %%no_indirect_branch_barrier … … 860 860 861 861 ; Fight spectre. 862 INDIRECT_BRANCH_PREDICTION_BARRIER_ OLDxDI, CPUMCTX_WSF_IBPB_EXIT862 INDIRECT_BRANCH_PREDICTION_BARRIER_CTX xDI, CPUMCTX_WSF_IBPB_EXIT 863 863 864 864 %ifndef VMX_SKIP_TR … … 1110 1110 %define frm_uHostXcr0 -18h ; 128-bit 1111 1111 %define frm_fNoRestoreXcr0 -20h ; Non-zero if we should skip XCR0 restoring. 1112 %define frm_p VCpu -28h ; Where we stash pVCpufor use after the vmrun.1112 %define frm_pGstCtx -28h ; Where we stash guest CPU context for use after the vmrun. 1113 1113 %define frm_HCPhysVmcbHost -30h ; Where we stash HCPhysVmcbHost for the vmload after vmrun. 1114 1114 %assign cbFrame 30h … … 1156 1156 mov [rbp + frm_fNoRestoreXcr0], rcx 1157 1157 1158 ; Save pVCpu pointer for simplifying saving of the GPRs afterwards.1159 mov qword [rbp + frm_pVCpu], rsi1160 1161 1158 ; Save host fs, gs, sysenter msr etc. 1162 1159 mov rax, [rsi + VMCPU.hm + HMCPU.u + HMCPUSVM.HCPhysVmcbHost] 1163 1160 mov qword [rbp + frm_HCPhysVmcbHost], rax ; save for the vmload after vmrun 1161 lea rsi, [rsi + VMCPU.cpum.GstCtx] 1162 mov qword [rbp + frm_pGstCtx], rsi 1164 1163 vmsave 1165 1164 1166 1165 ; Fight spectre (trashes rax, rdx and rcx). 1167 INDIRECT_BRANCH_PREDICTION_BARRIER rsi, CPUMCTX_WSF_IBPB_ENTRY1166 INDIRECT_BRANCH_PREDICTION_BARRIER_CTX rsi, CPUMCTX_WSF_IBPB_ENTRY 1168 1167 1169 1168 ; Setup rax for VMLOAD. … … 1171 1170 1172 1171 ; Load guest general purpose registers (rax is loaded from the VMCB by VMRUN). 1173 mov rbx, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.ebx]1174 mov rcx, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.ecx]1175 mov rdx, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.edx]1176 mov rdi, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.edi]1177 mov rbp, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.ebp]1178 mov r8, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.r8]1179 mov r9, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.r9]1180 mov r10, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.r10]1181 mov r11, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.r11]1182 mov r12, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.r12]1183 mov r13, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.r13]1184 mov r14, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.r14]1185 mov r15, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.r15]1186 mov rsi, qword [rsi + VMCPU.cpum.GstCtx +CPUMCTX.esi]1172 mov rbx, qword [rsi + CPUMCTX.ebx] 1173 mov rcx, qword [rsi + CPUMCTX.ecx] 1174 mov rdx, qword [rsi + CPUMCTX.edx] 1175 mov rdi, qword [rsi + CPUMCTX.edi] 1176 mov rbp, qword [rsi + CPUMCTX.ebp] 1177 mov r8, qword [rsi + CPUMCTX.r8] 1178 mov r9, qword [rsi + CPUMCTX.r9] 1179 mov r10, qword [rsi + CPUMCTX.r10] 1180 mov r11, qword [rsi + CPUMCTX.r11] 1181 mov r12, qword [rsi + CPUMCTX.r12] 1182 mov r13, qword [rsi + CPUMCTX.r13] 1183 mov r14, qword [rsi + CPUMCTX.r14] 1184 mov r15, qword [rsi + CPUMCTX.r15] 1185 mov rsi, qword [rsi + CPUMCTX.esi] 1187 1186 1188 1187 ; Clear the global interrupt flag & execute sti to make sure external interrupts cause a world switch. … … 1208 1207 1209 1208 ; Pop pVCpu (saved above) and save the guest GPRs (sans RSP and RAX). 1210 mov rax, [rsp + cbFrame + frm_p VCpu] ; (rbp still not operational)1211 1212 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.ebp], rbp1209 mov rax, [rsp + cbFrame + frm_pGstCtx] ; (rbp still not operational) 1210 1211 mov qword [rax + CPUMCTX.ebp], rbp 1213 1212 lea rbp, [rsp + cbFrame] 1214 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.ecx], rcx1213 mov qword [rax + CPUMCTX.ecx], rcx 1215 1214 mov rcx, SPECTRE_FILLER 1216 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.edx], rdx1215 mov qword [rax + CPUMCTX.edx], rdx 1217 1216 mov rdx, rcx 1218 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.r8], r81217 mov qword [rax + CPUMCTX.r8], r8 1219 1218 mov r8, rcx 1220 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.r9], r91219 mov qword [rax + CPUMCTX.r9], r9 1221 1220 mov r9, rcx 1222 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.r10], r101221 mov qword [rax + CPUMCTX.r10], r10 1223 1222 mov r10, rcx 1224 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.r11], r111223 mov qword [rax + CPUMCTX.r11], r11 1225 1224 mov r11, rcx 1226 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.edi], rdi1225 mov qword [rax + CPUMCTX.edi], rdi 1227 1226 %ifdef ASM_CALL64_MSC 1228 1227 mov rdi, [rbp + frm_saved_rdi] … … 1230 1229 mov rdi, rcx 1231 1230 %endif 1232 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.esi], rsi1231 mov qword [rax + CPUMCTX.esi], rsi 1233 1232 %ifdef ASM_CALL64_MSC 1234 1233 mov rsi, [rbp + frm_saved_rsi] … … 1236 1235 mov rsi, rcx 1237 1236 %endif 1238 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.ebx], rbx1237 mov qword [rax + CPUMCTX.ebx], rbx 1239 1238 mov rbx, [rbp + frm_saved_rbx] 1240 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.r12], r121239 mov qword [rax + CPUMCTX.r12], r12 1241 1240 mov r12, [rbp + frm_saved_r12] 1242 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.r13], r131241 mov qword [rax + CPUMCTX.r13], r13 1243 1242 mov r13, [rbp + frm_saved_r13] 1244 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.r14], r141243 mov qword [rax + CPUMCTX.r14], r14 1245 1244 mov r14, [rbp + frm_saved_r14] 1246 mov qword [rax + VMCPU.cpum.GstCtx +CPUMCTX.r15], r151245 mov qword [rax + CPUMCTX.r15], r15 1247 1246 mov r15, [rbp + frm_saved_r15] 1248 1247 1249 1248 ; Fight spectre. Note! Trashes rax, rdx and rcx! 1250 INDIRECT_BRANCH_PREDICTION_BARRIER rax, CPUMCTX_WSF_IBPB_EXIT1249 INDIRECT_BRANCH_PREDICTION_BARRIER_CTX rax, CPUMCTX_WSF_IBPB_EXIT 1251 1250 1252 1251 ; Restore the host xcr0 if necessary.
Note:
See TracChangeset
for help on using the changeset viewer.