Changeset 87522 in vbox for trunk/src/VBox/VMM/VMMR0
- Timestamp:
- Feb 1, 2021 10:32:33 PM (4 years ago)
- svn:sync-xref-src-repo-rev:
- 142547
- Location:
- trunk/src/VBox/VMM/VMMR0
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMR0/HMR0.cpp
r87521 r87522 1223 1223 */ 1224 1224 pVM->hmr0.s.fHostKernelFeatures = SUPR0GetKernelFeatures(); 1225 1226 /* 1227 * Configure defences against spectre and other CPU bugs. 1228 */ 1229 uint32_t fWorldSwitcher = 0; 1230 uint32_t cLastStdLeaf = ASMCpuId_EAX(0); 1231 if (cLastStdLeaf >= 0x00000007 && ASMIsValidStdRange(cLastStdLeaf)) 1232 { 1233 uint32_t uEdx = 0; 1234 ASMCpuIdExSlow(0x00000007, 0, 0, 0, NULL, NULL, NULL, &uEdx); 1235 1236 if ((pVM->hm.s.fIbpbOnVmExit || pVM->hm.s.fIbpbOnVmEntry) && (uEdx & X86_CPUID_STEXT_FEATURE_EDX_IBRS_IBPB)) 1237 { 1238 if (pVM->hm.s.fIbpbOnVmExit) 1239 fWorldSwitcher |= HM_WSF_IBPB_EXIT; 1240 if (pVM->hm.s.fIbpbOnVmEntry) 1241 fWorldSwitcher |= HM_WSF_IBPB_ENTRY; 1242 } 1243 if (pVM->hm.s.fL1dFlushOnVmEntry && (uEdx & X86_CPUID_STEXT_FEATURE_EDX_FLUSH_CMD)) 1244 fWorldSwitcher |= HM_WSF_L1D_ENTRY; 1245 if (pVM->hm.s.fMdsClearOnVmEntry && (uEdx & X86_CPUID_STEXT_FEATURE_EDX_MD_CLEAR)) 1246 fWorldSwitcher |= HM_WSF_MDS_ENTRY; 1247 } 1248 for (VMCPUID idCpu = 0; idCpu < pVM->cCpus; idCpu++) 1249 { 1250 PVMCPUCC pVCpu = VMCC_GET_CPU(pVM, idCpu); 1251 pVCpu->hmr0.s.fWorldSwitcher = fWorldSwitcher; 1252 } 1253 pVM->hm.s.fWorldSwitcherForLog = fWorldSwitcher; 1254 1225 1255 1226 1256 /* -
trunk/src/VBox/VMM/VMMR0/HMR0A.asm
r87503 r87522 230 230 231 231 232 ;;233 ; Creates an indirect branch prediction barrier on CPUs that need and supports that.234 ; @clobbers eax, edx, ecx235 ; @param 1 How to address CPUMCTX.236 ; @param 2 Which flag to test for (CPUMCTX_WSF_IBPB_ENTRY or CPUMCTX_WSF_IBPB_EXIT)237 %macro INDIRECT_BRANCH_PREDICTION_BARRIER_CTX 2238 test byte [%1 + CPUMCTX.fWorldSwitcher], %2239 jz %%no_indirect_branch_barrier240 mov ecx, MSR_IA32_PRED_CMD241 mov eax, MSR_IA32_PRED_CMD_F_IBPB242 xor edx, edx243 wrmsr244 %%no_indirect_branch_barrier:245 %endmacro246 247 ;;248 ; Creates an indirect branch prediction barrier on CPUs that need and supports that.249 ; @clobbers eax, edx, ecx250 ; @param 1 How to address VMCPU.251 ; @param 2 Which flag to test for (CPUMCTX_WSF_IBPB_ENTRY or CPUMCTX_WSF_IBPB_EXIT)252 %macro INDIRECT_BRANCH_PREDICTION_BARRIER 2253 test byte [%1 + VMCPU.cpum.GstCtx + CPUMCTX.fWorldSwitcher], %2254 jz %%no_indirect_branch_barrier255 mov ecx, MSR_IA32_PRED_CMD256 mov eax, MSR_IA32_PRED_CMD_F_IBPB257 xor edx, edx258 wrmsr259 %%no_indirect_branch_barrier:260 %endmacro261 262 ;;263 ; Creates an indirect branch prediction and L1D barrier on CPUs that need and supports that.264 ; @clobbers eax, edx, ecx265 ; @param 1 How to address CPUMCTX.266 ; @param 2 Which IBPB flag to test for (CPUMCTX_WSF_IBPB_ENTRY or CPUMCTX_WSF_IBPB_EXIT)267 ; @param 3 Which FLUSH flag to test for (CPUMCTX_WSF_L1D_ENTRY)268 ; @param 4 Which MDS flag to test for (CPUMCTX_WSF_MDS_ENTRY)269 %macro INDIRECT_BRANCH_PREDICTION_AND_L1_CACHE_BARRIER 4270 ; Only one test+jmp when disabled CPUs.271 test byte [%1 + CPUMCTX.fWorldSwitcher], (%2 | %3 | %4)272 jz %%no_barrier_needed273 274 ; The eax:edx value is the same for both.275 AssertCompile(MSR_IA32_PRED_CMD_F_IBPB == MSR_IA32_FLUSH_CMD_F_L1D)276 mov eax, MSR_IA32_PRED_CMD_F_IBPB277 xor edx, edx278 279 ; Indirect branch barrier.280 test byte [%1 + CPUMCTX.fWorldSwitcher], %2281 jz %%no_indirect_branch_barrier282 mov ecx, MSR_IA32_PRED_CMD283 wrmsr284 %%no_indirect_branch_barrier:285 286 ; Level 1 data cache flush.287 test byte [%1 + CPUMCTX.fWorldSwitcher], %3288 jz %%no_cache_flush_barrier289 mov ecx, MSR_IA32_FLUSH_CMD290 wrmsr291 jmp %%no_mds_buffer_flushing ; MDS flushing is included in L1D_FLUSH292 %%no_cache_flush_barrier:293 294 ; MDS buffer flushing.295 test byte [%1 + CPUMCTX.fWorldSwitcher], %4296 jz %%no_mds_buffer_flushing297 sub xSP, xSP298 mov [xSP], ds299 verw [xSP]300 add xSP, xSP301 %%no_mds_buffer_flushing:302 303 %%no_barrier_needed:304 %endmacro305 306 307 232 ;********************************************************************************************************************************* 308 233 ;* External Symbols * … … 569 494 ; @param 1 Zero if regular return, non-zero if error return. Controls label emission. 570 495 ; @param 2 fLoadSaveGuestXcr0 value 571 ; @param 3 The ( CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_L1D_ENTRY | CPUMCTX_WSF_MDS_ENTRY) + CPUMCTX_WSF_IBPB_EXIT value.496 ; @param 3 The (HM_WSF_IBPB_ENTRY | HM_WSF_L1D_ENTRY | HM_WSF_MDS_ENTRY) + HM_WSF_IBPB_EXIT value. 572 497 ; The entry values are either all set or not at all, as we're too lazy to flesh out all the variants. 573 498 ; @param 4 The SSE saving/restoring: 0 to do nothing, 1 to do it manually, 2 to use xsave/xrstor. … … 635 560 %endif 636 561 637 %if %3 & CPUMCTX_WSF_IBPB_EXIT562 %if %3 & HM_WSF_IBPB_EXIT 638 563 ; Fight spectre (trashes rax, rdx and rcx). 639 564 %if %1 = 0 ; Skip this in failure branch (=> guru) … … 685 610 ; @param 1 The suffix of the variation. 686 611 ; @param 2 fLoadSaveGuestXcr0 value 687 ; @param 3 The CPUMCTX_WSF_IBPB_ENTRY + CPUMCTX_WSF_IBPB_EXIT value.612 ; @param 3 The HM_WSF_IBPB_ENTRY + HM_WSF_IBPB_EXIT value. 688 613 ; @param 4 The SSE saving/restoring: 0 to do nothing, 1 to do it manually, 2 to use xsave/xrstor. 689 614 ; Drivers shouldn't use AVX registers without saving+loading: … … 792 717 jne NAME(RT_CONCAT(hmR0VmxStartVmHostRIP,%1).precond_failure_return) 793 718 794 mov eax, [rsi + VMCPU.cpum.GstCtx + CPUMCTX.fWorldSwitcher]795 and eax, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_L1D_ENTRY | CPUMCTX_WSF_MDS_ENTRY | CPUMCTX_WSF_IBPB_EXIT719 mov eax, [rsi + GVMCPU.hmr0 + HMR0PERVCPU.fWorldSwitcher] 720 and eax, HM_WSF_IBPB_ENTRY | HM_WSF_L1D_ENTRY | HM_WSF_MDS_ENTRY | HM_WSF_IBPB_EXIT 796 721 cmp eax, %3 797 722 mov eax, VERR_VMX_STARTVM_PRECOND_1 … … 915 840 ; Fight spectre and similar. Trashes rax, rcx, and rdx. 916 841 ; 917 %if %3 & ( CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_L1D_ENTRY) ; The eax:edx value is the same for the first two.842 %if %3 & (HM_WSF_IBPB_ENTRY | HM_WSF_L1D_ENTRY) ; The eax:edx value is the same for the first two. 918 843 AssertCompile(MSR_IA32_PRED_CMD_F_IBPB == MSR_IA32_FLUSH_CMD_F_L1D) 919 844 mov eax, MSR_IA32_PRED_CMD_F_IBPB 920 845 xor edx, edx 921 846 %endif 922 %if %3 & CPUMCTX_WSF_IBPB_ENTRY; Indirect branch barrier.847 %if %3 & HM_WSF_IBPB_ENTRY ; Indirect branch barrier. 923 848 mov ecx, MSR_IA32_PRED_CMD 924 849 wrmsr 925 850 %endif 926 %if %3 & CPUMCTX_WSF_L1D_ENTRY; Level 1 data cache flush.851 %if %3 & HM_WSF_L1D_ENTRY ; Level 1 data cache flush. 927 852 mov ecx, MSR_IA32_FLUSH_CMD 928 853 wrmsr 929 %elif %3 & CPUMCTX_WSF_MDS_ENTRY; MDS flushing is included in L1D_FLUSH854 %elif %3 & HM_WSF_MDS_ENTRY ; MDS flushing is included in L1D_FLUSH 930 855 mov word [rbp + frm_MDS_seg], ds 931 856 verw word [rbp + frm_MDS_seg] … … 1098 1023 1099 1024 %macro hmR0VmxStartVmSseTemplate 3 1100 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_SansL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 0, 0 | 0 | 0 | 0, %11101 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_SansL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 1, 0 | 0 | 0 | 0, %11102 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_SansL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 0, CPUMCTX_WSF_IBPB_ENTRY | 0 | 0 | 0, %11103 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_SansL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 1, CPUMCTX_WSF_IBPB_ENTRY | 0 | 0 | 0, %11104 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_WithL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 0, 0 | CPUMCTX_WSF_L1D_ENTRY | 0 | 0, %11105 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_WithL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 1, 0 | CPUMCTX_WSF_L1D_ENTRY | 0 | 0, %11106 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_WithL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 0, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_L1D_ENTRY | 0 | 0, %11107 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_WithL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 1, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_L1D_ENTRY | 0 | 0, %11108 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_SansL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 0, 0 | 0 | CPUMCTX_WSF_MDS_ENTRY | 0, %11109 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_SansL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 1, 0 | 0 | CPUMCTX_WSF_MDS_ENTRY | 0, %11110 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_SansL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 0, CPUMCTX_WSF_IBPB_ENTRY | 0 | CPUMCTX_WSF_MDS_ENTRY | 0, %11111 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_SansL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 1, CPUMCTX_WSF_IBPB_ENTRY | 0 | CPUMCTX_WSF_MDS_ENTRY | 0, %11112 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_WithL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 0, 0 | CPUMCTX_WSF_L1D_ENTRY | CPUMCTX_WSF_MDS_ENTRY | 0, %11113 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_WithL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 1, 0 | CPUMCTX_WSF_L1D_ENTRY | CPUMCTX_WSF_MDS_ENTRY | 0, %11114 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_WithL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 0, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_L1D_ENTRY | CPUMCTX_WSF_MDS_ENTRY | 0, %11115 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_WithL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 1, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_L1D_ENTRY | CPUMCTX_WSF_MDS_ENTRY | 0, %11116 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_SansL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 0, 0 | 0 | 0 | CPUMCTX_WSF_IBPB_EXIT, %11117 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_SansL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 1, 0 | 0 | 0 | CPUMCTX_WSF_IBPB_EXIT, %11118 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_SansL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 0, CPUMCTX_WSF_IBPB_ENTRY | 0 | 0 | CPUMCTX_WSF_IBPB_EXIT, %11119 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_SansL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 1, CPUMCTX_WSF_IBPB_ENTRY | 0 | 0 | CPUMCTX_WSF_IBPB_EXIT, %11120 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_WithL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 0, 0 | CPUMCTX_WSF_L1D_ENTRY | 0 | CPUMCTX_WSF_IBPB_EXIT, %11121 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_WithL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 1, 0 | CPUMCTX_WSF_L1D_ENTRY | 0 | CPUMCTX_WSF_IBPB_EXIT, %11122 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_WithL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 0, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_L1D_ENTRY | 0 | CPUMCTX_WSF_IBPB_EXIT, %11123 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_WithL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 1, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_L1D_ENTRY | 0 | CPUMCTX_WSF_IBPB_EXIT, %11124 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_SansL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 0, 0 | 0 | CPUMCTX_WSF_MDS_ENTRY | CPUMCTX_WSF_IBPB_EXIT, %11125 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_SansL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 1, 0 | 0 | CPUMCTX_WSF_MDS_ENTRY | CPUMCTX_WSF_IBPB_EXIT, %11126 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_SansL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 0, CPUMCTX_WSF_IBPB_ENTRY | 0 | CPUMCTX_WSF_MDS_ENTRY | CPUMCTX_WSF_IBPB_EXIT, %11127 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_SansL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 1, CPUMCTX_WSF_IBPB_ENTRY | 0 | CPUMCTX_WSF_MDS_ENTRY | CPUMCTX_WSF_IBPB_EXIT, %11128 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_WithL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 0, 0 | CPUMCTX_WSF_L1D_ENTRY | CPUMCTX_WSF_MDS_ENTRY | CPUMCTX_WSF_IBPB_EXIT, %11129 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_WithL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 1, 0 | CPUMCTX_WSF_L1D_ENTRY | CPUMCTX_WSF_MDS_ENTRY | CPUMCTX_WSF_IBPB_EXIT, %11130 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_WithL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 0, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_L1D_ENTRY | CPUMCTX_WSF_MDS_ENTRY | CPUMCTX_WSF_IBPB_EXIT, %11131 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_WithL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 1, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_L1D_ENTRY | CPUMCTX_WSF_MDS_ENTRY | CPUMCTX_WSF_IBPB_EXIT, %11025 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_SansL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 0, 0 | 0 | 0 | 0 , %1 1026 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_SansL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 1, 0 | 0 | 0 | 0 , %1 1027 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_SansL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 0, HM_WSF_IBPB_ENTRY | 0 | 0 | 0 , %1 1028 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_SansL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 1, HM_WSF_IBPB_ENTRY | 0 | 0 | 0 , %1 1029 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_WithL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 0, 0 | HM_WSF_L1D_ENTRY | 0 | 0 , %1 1030 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_WithL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 1, 0 | HM_WSF_L1D_ENTRY | 0 | 0 , %1 1031 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_WithL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 0, HM_WSF_IBPB_ENTRY | HM_WSF_L1D_ENTRY | 0 | 0 , %1 1032 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_WithL1dEntry_SansMdsEntry_SansIbpbExit %+ %2, 1, HM_WSF_IBPB_ENTRY | HM_WSF_L1D_ENTRY | 0 | 0 , %1 1033 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_SansL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 0, 0 | 0 | HM_WSF_MDS_ENTRY | 0 , %1 1034 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_SansL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 1, 0 | 0 | HM_WSF_MDS_ENTRY | 0 , %1 1035 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_SansL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 0, HM_WSF_IBPB_ENTRY | 0 | HM_WSF_MDS_ENTRY | 0 , %1 1036 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_SansL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 1, HM_WSF_IBPB_ENTRY | 0 | HM_WSF_MDS_ENTRY | 0 , %1 1037 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_WithL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 0, 0 | HM_WSF_L1D_ENTRY | HM_WSF_MDS_ENTRY | 0 , %1 1038 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_WithL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 1, 0 | HM_WSF_L1D_ENTRY | HM_WSF_MDS_ENTRY | 0 , %1 1039 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_WithL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 0, HM_WSF_IBPB_ENTRY | HM_WSF_L1D_ENTRY | HM_WSF_MDS_ENTRY | 0 , %1 1040 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_WithL1dEntry_WithMdsEntry_SansIbpbExit %+ %2, 1, HM_WSF_IBPB_ENTRY | HM_WSF_L1D_ENTRY | HM_WSF_MDS_ENTRY | 0 , %1 1041 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_SansL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 0, 0 | 0 | 0 | HM_WSF_IBPB_EXIT, %1 1042 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_SansL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 1, 0 | 0 | 0 | HM_WSF_IBPB_EXIT, %1 1043 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_SansL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 0, HM_WSF_IBPB_ENTRY | 0 | 0 | HM_WSF_IBPB_EXIT, %1 1044 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_SansL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 1, HM_WSF_IBPB_ENTRY | 0 | 0 | HM_WSF_IBPB_EXIT, %1 1045 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_WithL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 0, 0 | HM_WSF_L1D_ENTRY | 0 | HM_WSF_IBPB_EXIT, %1 1046 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_WithL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 1, 0 | HM_WSF_L1D_ENTRY | 0 | HM_WSF_IBPB_EXIT, %1 1047 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_WithL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 0, HM_WSF_IBPB_ENTRY | HM_WSF_L1D_ENTRY | 0 | HM_WSF_IBPB_EXIT, %1 1048 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_WithL1dEntry_SansMdsEntry_WithIbpbExit %+ %2, 1, HM_WSF_IBPB_ENTRY | HM_WSF_L1D_ENTRY | 0 | HM_WSF_IBPB_EXIT, %1 1049 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_SansL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 0, 0 | 0 | HM_WSF_MDS_ENTRY | HM_WSF_IBPB_EXIT, %1 1050 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_SansL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 1, 0 | 0 | HM_WSF_MDS_ENTRY | HM_WSF_IBPB_EXIT, %1 1051 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_SansL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 0, HM_WSF_IBPB_ENTRY | 0 | HM_WSF_MDS_ENTRY | HM_WSF_IBPB_EXIT, %1 1052 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_SansL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 1, HM_WSF_IBPB_ENTRY | 0 | HM_WSF_MDS_ENTRY | HM_WSF_IBPB_EXIT, %1 1053 hmR0VmxStartVmTemplate _SansXcr0_SansIbpbEntry_WithL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 0, 0 | HM_WSF_L1D_ENTRY | HM_WSF_MDS_ENTRY | HM_WSF_IBPB_EXIT, %1 1054 hmR0VmxStartVmTemplate _WithXcr0_SansIbpbEntry_WithL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 1, 0 | HM_WSF_L1D_ENTRY | HM_WSF_MDS_ENTRY | HM_WSF_IBPB_EXIT, %1 1055 hmR0VmxStartVmTemplate _SansXcr0_WithIbpbEntry_WithL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 0, HM_WSF_IBPB_ENTRY | HM_WSF_L1D_ENTRY | HM_WSF_MDS_ENTRY | HM_WSF_IBPB_EXIT, %1 1056 hmR0VmxStartVmTemplate _WithXcr0_WithIbpbEntry_WithL1dEntry_WithMdsEntry_WithIbpbExit %+ %2, 1, HM_WSF_IBPB_ENTRY | HM_WSF_L1D_ENTRY | HM_WSF_MDS_ENTRY | HM_WSF_IBPB_EXIT, %1 1132 1057 %endmacro 1133 1058 … … 1157 1082 ; @param 1 The suffix of the variation. 1158 1083 ; @param 2 fLoadSaveGuestXcr0 value 1159 ; @param 3 The CPUMCTX_WSF_IBPB_ENTRY + CPUMCTX_WSF_IBPB_EXIT value.1084 ; @param 3 The HM_WSF_IBPB_ENTRY + HM_WSF_IBPB_EXIT value. 1160 1085 ; @param 4 The SSE saving/restoring: 0 to do nothing, 1 to do it manually, 2 to use xsave/xrstor. 1161 1086 ; Drivers shouldn't use AVX registers without saving+loading: … … 1265 1190 jne .failure_return 1266 1191 1267 mov eax, [rsi + VMCPU.cpum.GstCtx + CPUMCTX.fWorldSwitcher]1268 and eax, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_IBPB_EXIT1192 mov eax, [rsi + GVMCPU.hmr0 + HMR0PERVCPU.fWorldSwitcher] 1193 and eax, HM_WSF_IBPB_ENTRY | HM_WSF_IBPB_EXIT 1269 1194 cmp eax, %3 1270 1195 mov eax, VERR_SVM_VMRUN_PRECOND_1 … … 1356 1281 vmsave 1357 1282 1358 %if %3 & CPUMCTX_WSF_IBPB_ENTRY1283 %if %3 & HM_WSF_IBPB_ENTRY 1359 1284 ; Fight spectre (trashes rax, rdx and rcx). 1360 1285 mov ecx, MSR_IA32_PRED_CMD … … 1449 1374 %endif 1450 1375 1451 %if %3 & CPUMCTX_WSF_IBPB_EXIT1376 %if %3 & HM_WSF_IBPB_EXIT 1452 1377 ; Fight spectre (trashes rax, rdx and rcx). 1453 1378 mov ecx, MSR_IA32_PRED_CMD … … 1541 1466 ; Instantiate the hmR0SvmVmRun various variations. 1542 1467 ; 1543 hmR0SvmVmRunTemplate _SansXcr0_SansIbpbEntry_SansIbpbExit, 0, 0, 1544 hmR0SvmVmRunTemplate _WithXcr0_SansIbpbEntry_SansIbpbExit, 1, 0, 1545 hmR0SvmVmRunTemplate _SansXcr0_WithIbpbEntry_SansIbpbExit, 0, CPUMCTX_WSF_IBPB_ENTRY,01546 hmR0SvmVmRunTemplate _WithXcr0_WithIbpbEntry_SansIbpbExit, 1, CPUMCTX_WSF_IBPB_ENTRY,01547 hmR0SvmVmRunTemplate _SansXcr0_SansIbpbEntry_WithIbpbExit, 0, CPUMCTX_WSF_IBPB_EXIT,01548 hmR0SvmVmRunTemplate _WithXcr0_SansIbpbEntry_WithIbpbExit, 1, CPUMCTX_WSF_IBPB_EXIT,01549 hmR0SvmVmRunTemplate _SansXcr0_WithIbpbEntry_WithIbpbExit, 0, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_IBPB_EXIT, 01550 hmR0SvmVmRunTemplate _WithXcr0_WithIbpbEntry_WithIbpbExit, 1, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_IBPB_EXIT, 01468 hmR0SvmVmRunTemplate _SansXcr0_SansIbpbEntry_SansIbpbExit, 0, 0, 0 1469 hmR0SvmVmRunTemplate _WithXcr0_SansIbpbEntry_SansIbpbExit, 1, 0, 0 1470 hmR0SvmVmRunTemplate _SansXcr0_WithIbpbEntry_SansIbpbExit, 0, HM_WSF_IBPB_ENTRY, 0 1471 hmR0SvmVmRunTemplate _WithXcr0_WithIbpbEntry_SansIbpbExit, 1, HM_WSF_IBPB_ENTRY, 0 1472 hmR0SvmVmRunTemplate _SansXcr0_SansIbpbEntry_WithIbpbExit, 0, HM_WSF_IBPB_EXIT, 0 1473 hmR0SvmVmRunTemplate _WithXcr0_SansIbpbEntry_WithIbpbExit, 1, HM_WSF_IBPB_EXIT, 0 1474 hmR0SvmVmRunTemplate _SansXcr0_WithIbpbEntry_WithIbpbExit, 0, HM_WSF_IBPB_ENTRY | HM_WSF_IBPB_EXIT, 0 1475 hmR0SvmVmRunTemplate _WithXcr0_WithIbpbEntry_WithIbpbExit, 1, HM_WSF_IBPB_ENTRY | HM_WSF_IBPB_EXIT, 0 1551 1476 %ifdef VBOX_WITH_KERNEL_USING_XMM 1552 hmR0SvmVmRunTemplate _SansXcr0_SansIbpbEntry_SansIbpbExit_SseManual, 0, 0, 1553 hmR0SvmVmRunTemplate _WithXcr0_SansIbpbEntry_SansIbpbExit_SseManual, 1, 0, 1554 hmR0SvmVmRunTemplate _SansXcr0_WithIbpbEntry_SansIbpbExit_SseManual, 0, CPUMCTX_WSF_IBPB_ENTRY,11555 hmR0SvmVmRunTemplate _WithXcr0_WithIbpbEntry_SansIbpbExit_SseManual, 1, CPUMCTX_WSF_IBPB_ENTRY,11556 hmR0SvmVmRunTemplate _SansXcr0_SansIbpbEntry_WithIbpbExit_SseManual, 0, CPUMCTX_WSF_IBPB_EXIT,11557 hmR0SvmVmRunTemplate _WithXcr0_SansIbpbEntry_WithIbpbExit_SseManual, 1, CPUMCTX_WSF_IBPB_EXIT,11558 hmR0SvmVmRunTemplate _SansXcr0_WithIbpbEntry_WithIbpbExit_SseManual, 0, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_IBPB_EXIT, 11559 hmR0SvmVmRunTemplate _WithXcr0_WithIbpbEntry_WithIbpbExit_SseManual, 1, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_IBPB_EXIT, 11560 1561 hmR0SvmVmRunTemplate _SansXcr0_SansIbpbEntry_SansIbpbExit_SseXSave, 0, 0, 1562 hmR0SvmVmRunTemplate _WithXcr0_SansIbpbEntry_SansIbpbExit_SseXSave, 1, 0, 1563 hmR0SvmVmRunTemplate _SansXcr0_WithIbpbEntry_SansIbpbExit_SseXSave, 0, CPUMCTX_WSF_IBPB_ENTRY,21564 hmR0SvmVmRunTemplate _WithXcr0_WithIbpbEntry_SansIbpbExit_SseXSave, 1, CPUMCTX_WSF_IBPB_ENTRY,21565 hmR0SvmVmRunTemplate _SansXcr0_SansIbpbEntry_WithIbpbExit_SseXSave, 0, CPUMCTX_WSF_IBPB_EXIT,21566 hmR0SvmVmRunTemplate _WithXcr0_SansIbpbEntry_WithIbpbExit_SseXSave, 1, CPUMCTX_WSF_IBPB_EXIT,21567 hmR0SvmVmRunTemplate _SansXcr0_WithIbpbEntry_WithIbpbExit_SseXSave, 0, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_IBPB_EXIT, 21568 hmR0SvmVmRunTemplate _WithXcr0_WithIbpbEntry_WithIbpbExit_SseXSave, 1, CPUMCTX_WSF_IBPB_ENTRY | CPUMCTX_WSF_IBPB_EXIT, 21477 hmR0SvmVmRunTemplate _SansXcr0_SansIbpbEntry_SansIbpbExit_SseManual, 0, 0, 1 1478 hmR0SvmVmRunTemplate _WithXcr0_SansIbpbEntry_SansIbpbExit_SseManual, 1, 0, 1 1479 hmR0SvmVmRunTemplate _SansXcr0_WithIbpbEntry_SansIbpbExit_SseManual, 0, HM_WSF_IBPB_ENTRY, 1 1480 hmR0SvmVmRunTemplate _WithXcr0_WithIbpbEntry_SansIbpbExit_SseManual, 1, HM_WSF_IBPB_ENTRY, 1 1481 hmR0SvmVmRunTemplate _SansXcr0_SansIbpbEntry_WithIbpbExit_SseManual, 0, HM_WSF_IBPB_EXIT, 1 1482 hmR0SvmVmRunTemplate _WithXcr0_SansIbpbEntry_WithIbpbExit_SseManual, 1, HM_WSF_IBPB_EXIT, 1 1483 hmR0SvmVmRunTemplate _SansXcr0_WithIbpbEntry_WithIbpbExit_SseManual, 0, HM_WSF_IBPB_ENTRY | HM_WSF_IBPB_EXIT, 1 1484 hmR0SvmVmRunTemplate _WithXcr0_WithIbpbEntry_WithIbpbExit_SseManual, 1, HM_WSF_IBPB_ENTRY | HM_WSF_IBPB_EXIT, 1 1485 1486 hmR0SvmVmRunTemplate _SansXcr0_SansIbpbEntry_SansIbpbExit_SseXSave, 0, 0, 2 1487 hmR0SvmVmRunTemplate _WithXcr0_SansIbpbEntry_SansIbpbExit_SseXSave, 1, 0, 2 1488 hmR0SvmVmRunTemplate _SansXcr0_WithIbpbEntry_SansIbpbExit_SseXSave, 0, HM_WSF_IBPB_ENTRY, 2 1489 hmR0SvmVmRunTemplate _WithXcr0_WithIbpbEntry_SansIbpbExit_SseXSave, 1, HM_WSF_IBPB_ENTRY, 2 1490 hmR0SvmVmRunTemplate _SansXcr0_SansIbpbEntry_WithIbpbExit_SseXSave, 0, HM_WSF_IBPB_EXIT, 2 1491 hmR0SvmVmRunTemplate _WithXcr0_SansIbpbEntry_WithIbpbExit_SseXSave, 1, HM_WSF_IBPB_EXIT, 2 1492 hmR0SvmVmRunTemplate _SansXcr0_WithIbpbEntry_WithIbpbExit_SseXSave, 0, HM_WSF_IBPB_ENTRY | HM_WSF_IBPB_EXIT, 2 1493 hmR0SvmVmRunTemplate _WithXcr0_WithIbpbEntry_WithIbpbExit_SseXSave, 1, HM_WSF_IBPB_ENTRY | HM_WSF_IBPB_EXIT, 2 1569 1494 %endif 1570 1495 -
trunk/src/VBox/VMM/VMMR0/HMSVMR0.cpp
r87521 r87522 731 731 { hmR0SvmVmRun_WithXcr0_WithIbpbEntry_WithIbpbExit }, 732 732 }; 733 uintptr_t const idx = (pVCpu->hmr0.s.fLoadSaveGuestXcr0 734 | (pVCpu-> cpum.GstCtx.fWorldSwitcher & CPUMCTX_WSF_IBPB_ENTRY ? 2 : 0)735 | (pVCpu-> cpum.GstCtx.fWorldSwitcher & CPUMCTX_WSF_IBPB_EXIT ? 4 : 0);733 uintptr_t const idx = (pVCpu->hmr0.s.fLoadSaveGuestXcr0 ? 1 : 0) 734 | (pVCpu->hmr0.s.fWorldSwitcher & HM_WSF_IBPB_ENTRY ? 2 : 0) 735 | (pVCpu->hmr0.s.fWorldSwitcher & HM_WSF_IBPB_EXIT ? 4 : 0); 736 736 PFNHMSVMVMRUN const pfnVMRun = s_aHmR0SvmVmRunFunctions[idx].pfn; 737 737 if (pVCpu->hmr0.s.svm.pfnVMRun != pfnVMRun) -
trunk/src/VBox/VMM/VMMR0/HMVMXR0.cpp
r87521 r87522 4160 4160 * variant selection: 4161 4161 * - pVCpu->hm.s.fLoadSaveGuestXcr0 4162 * - CPUMCTX_WSF_IBPB_ENTRY in pVCpu->cpum.GstCtx.fWorldSwitcher4163 * - CPUMCTX_WSF_IBPB_EXIT in pVCpu->cpum.GstCtx.fWorldSwitcher4162 * - HM_WSF_IBPB_ENTRY in pVCpu->hmr0.s.fWorldSwitcher 4163 * - HM_WSF_IBPB_EXIT in pVCpu->hmr0.s.fWorldSwitcher 4164 4164 * - Perhaps: CPUMIsGuestFPUStateActive() (windows only) 4165 4165 * - Perhaps: CPUMCTX.fXStateMask (windows only) 4166 4166 * 4167 * We currently ASSUME that neither CPUMCTX_WSF_IBPB_ENTRY nor4168 * CPUMCTX_WSF_IBPB_EXITcannot be changed at runtime.4167 * We currently ASSUME that neither HM_WSF_IBPB_ENTRY nor HM_WSF_IBPB_EXIT 4168 * cannot be changed at runtime. 4169 4169 */ 4170 4170 static void hmR0VmxUpdateStartVmFunction(PVMCPUCC pVCpu) … … 4205 4205 { hmR0VmxStartVm_WithXcr0_WithIbpbEntry_WithL1dEntry_WithMdsEntry_WithIbpbExit }, 4206 4206 }; 4207 uintptr_t const idx = (pVCpu->hmr0.s.fLoadSaveGuestXcr0 4208 | (pVCpu-> cpum.GstCtx.fWorldSwitcher & CPUMCTX_WSF_IBPB_ENTRY ? 2 : 0)4209 | (pVCpu-> cpum.GstCtx.fWorldSwitcher & CPUMCTX_WSF_L1D_ENTRY ? 4 : 0)4210 | (pVCpu-> cpum.GstCtx.fWorldSwitcher & CPUMCTX_WSF_MDS_ENTRY ? 8 : 0)4211 | (pVCpu-> cpum.GstCtx.fWorldSwitcher & CPUMCTX_WSF_IBPB_EXIT ? 16 : 0);4207 uintptr_t const idx = (pVCpu->hmr0.s.fLoadSaveGuestXcr0 ? 1 : 0) 4208 | (pVCpu->hmr0.s.fWorldSwitcher & HM_WSF_IBPB_ENTRY ? 2 : 0) 4209 | (pVCpu->hmr0.s.fWorldSwitcher & HM_WSF_L1D_ENTRY ? 4 : 0) 4210 | (pVCpu->hmr0.s.fWorldSwitcher & HM_WSF_MDS_ENTRY ? 8 : 0) 4211 | (pVCpu->hmr0.s.fWorldSwitcher & HM_WSF_IBPB_EXIT ? 16 : 0); 4212 4212 PFNHMVMXSTARTVM const pfnStartVm = s_aHmR0VmxStartVmFunctions[idx].pfn; 4213 4213 if (pVCpu->hmr0.s.vmx.pfnStartVm != pfnStartVm)
Note:
See TracChangeset
for help on using the changeset viewer.