Changeset 90611 in vbox for trunk/src/VBox/VMM
- Timestamp:
- Aug 10, 2021 10:08:53 PM (4 years ago)
- svn:sync-xref-src-repo-rev:
- 146222
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/PDMAllCritSectRw.cpp
r90610 r90611 763 763 } 764 764 765 766 /** 767 * Worker for pdmCritSectRwEnterExcl that handles the red tape after we've 768 * gotten exclusive ownership of the critical section. 769 */ 770 DECLINLINE(int) pdmCritSectRwEnterExclFirst(PPDMCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fNoVal, RTTHREAD hThreadSelf) 771 { 772 RT_NOREF(hThreadSelf, fNoVal, pSrcPos); 773 Assert((ASMAtomicReadU64(&pThis->s.Core.u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)); 774 775 #if 1 /** @todo consider generating less noise... */ 776 ASMAtomicWriteU32(&pThis->s.Core.cWriteRecursions, 1); 777 #else 778 pThis->s.Core.cWriteRecursions = 1; 779 #endif 780 Assert(pThis->s.Core.cWriterReads == 0); 781 782 #if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3) 783 if (!fNoVal) 784 { 785 if (hThreadSelf == NIL_RTTHREAD) 786 hThreadSelf = RTThreadSelfAutoAdopt(); 787 RTLockValidatorRecExclSetOwner(pThis->s.Core.pValidatorWrite, hThreadSelf, pSrcPos, true); 788 } 789 #endif 790 STAM_REL_COUNTER_INC(&pThis->s.CTX_MID_Z(Stat,EnterExcl)); 791 STAM_PROFILE_ADV_START(&pThis->s.StatWriteLocked, swl); 792 return VINF_SUCCESS; 793 } 794 795 765 796 #if defined(IN_RING3) || defined(IN_RING0) 766 797 /** … … 769 800 */ 770 801 static int pdmR3R0CritSectRwEnterExclContended(PVMCC pVM, PVMCPUCC pVCpu, PPDMCRITSECTRW pThis, RTNATIVETHREAD hNativeSelf, 771 PCRTLOCKVALSRCPOS pSrcPos, int rcBusy, RTTHREAD hThreadSelf)772 { 773 RT_NOREF(hThreadSelf, rcBusy, pSrcPos, pVCpu);802 PCRTLOCKVALSRCPOS pSrcPos, bool fNoVal, int rcBusy, RTTHREAD hThreadSelf) 803 { 804 RT_NOREF(hThreadSelf, rcBusy, pSrcPos, fNoVal, pVCpu); 774 805 775 806 for (uint32_t iLoop = 0; ; iLoop++) … … 822 853 ASMAtomicCmpXchgHandle(&pThis->s.Core.hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone); 823 854 if (fDone) 824 return VINF_SUCCESS;855 return pdmCritSectRwEnterExclFirst(pThis, pSrcPos, fNoVal, hThreadSelf); 825 856 } 826 857 AssertMsg(iLoop < 1000, ("%u\n", iLoop)); /* may loop a few times here... */ … … 859 890 #endif 860 891 892 RTTHREAD hThreadSelf = NIL_RTTHREAD; 861 893 #if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3) 862 RTTHREAD hThreadSelf = NIL_RTTHREAD;863 894 if (!fTryOnly) 864 895 { … … 944 975 } 945 976 946 if (pThis->s.Core.u32Magic != RTCRITSECTRW_MAGIC) 977 ASMNopPause(); 978 979 if (pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC) 980 { /* likely */ } 981 else 947 982 return VERR_SEM_DESTROYED; 948 983 … … 963 998 ; 964 999 if (fDone) 1000 { 965 1001 ASMAtomicCmpXchgHandle(&pThis->s.Core.hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone); 966 if (!fDone) 1002 if (fDone) 1003 return pdmCritSectRwEnterExclFirst(pThis, pSrcPos, fNoVal, hThreadSelf); 1004 } 1005 1006 /* 1007 * Okay, we have contention and will have to wait unless we're just trying. 1008 */ 1009 STAM_REL_COUNTER_INC(&pThis->s.CTX_MID_Z(StatContention,EnterExcl)); 1010 1011 #if defined(IN_RING3) || defined(IN_RING0) 1012 if ( !fTryOnly 1013 # ifdef IN_RING0 1014 && RTThreadPreemptIsEnabled(NIL_RTTHREAD) 1015 && ASMIntAreEnabled() 1016 # endif 1017 ) 967 1018 { 968 STAM_REL_COUNTER_INC(&pThis->s.CTX_MID_Z(StatContention,EnterExcl)); 969 970 #if defined(IN_RING3) || defined(IN_RING0) 971 if ( !fTryOnly 972 # ifdef IN_RING0 973 && RTThreadPreemptIsEnabled(NIL_RTTHREAD) 974 && ASMIntAreEnabled() 1019 # if defined(IN_RING3) && defined(PDMCRITSECTRW_STRICT) 1020 if (hThreadSelf == NIL_RTTHREAD) 1021 hThreadSelf = RTThreadSelfAutoAdopt(); 1022 return pdmR3R0CritSectRwEnterExclContended(pVM, NULL, pThis, hNativeSelf, pSrcPos, fNoVal, rcBusy, hThreadSelf); 1023 # elif defined(IN_RING3) 1024 return pdmR3R0CritSectRwEnterExclContended(pVM, NULL, pThis, hNativeSelf, pSrcPos, fNoVal, rcBusy, RTThreadSelf()); 1025 # else 1026 return pdmR3R0CritSectRwEnterExclContended(pVM, NULL, pThis, hNativeSelf, pSrcPos, fNoVal, rcBusy, NIL_RTTHREAD); 975 1027 # endif 976 ) 977 { 978 # if defined(IN_RING3) && defined(PDMCRITSECTRW_STRICT) 979 if (hThreadSelf == NIL_RTTHREAD) 980 hThreadSelf = RTThreadSelfAutoAdopt(); 981 int rc = pdmR3R0CritSectRwEnterExclContended(pVM, NULL, pThis, hNativeSelf, pSrcPos, rcBusy, hThreadSelf); 982 # elif defined(IN_RING3) 983 int rc = pdmR3R0CritSectRwEnterExclContended(pVM, NULL, pThis, hNativeSelf, pSrcPos, rcBusy, RTThreadSelf()); 984 # else 985 int rc = pdmR3R0CritSectRwEnterExclContended(pVM, NULL, pThis, hNativeSelf, pSrcPos, rcBusy, NIL_RTTHREAD); 986 # endif 987 if (RT_SUCCESS(rc)) 988 { /*likely*/ } 989 else 990 return rc; 991 } 992 else 1028 } 993 1029 #endif /* IN_RING3 || IN_RING0 */ 994 { 1030 995 1031 #ifdef IN_RING3 996 /* TryEnter call - decrement the number of (waiting) writers. */ 997 return pdmCritSectRwEnterExclBailOut(pThis, VERR_SEM_BUSY); 1032 /* TryEnter call - decrement the number of (waiting) writers. */ 1033 return pdmCritSectRwEnterExclBailOut(pThis, VERR_SEM_BUSY); 1034 998 1035 #else 999 /* We cannot call SUPSemEventWaitNoResume in this context. Go back to 1000 ring-3 and do it there or return rcBusy. */ 1001 rcBusy = pdmCritSectRwEnterExclBailOut(pThis, rcBusy); 1002 if (rcBusy == VINF_SUCCESS) 1003 { 1004 Assert(!fTryOnly); 1005 PVMCPUCC pVCpu = VMMGetCpu(pVM); AssertPtr(pVCpu); 1006 /** @todo Should actually do this in via VMMR0.cpp instead of going all the way 1007 * back to ring-3. Goes for both kind of crit sects. */ 1008 return VMMRZCallRing3(pVM, pVCpu, VMMCALLRING3_PDM_CRIT_SECT_RW_ENTER_EXCL, MMHyperCCToR3(pVM, pThis)); 1009 } 1010 return rcBusy; 1011 #endif 1012 } 1036 1037 /* We cannot call SUPSemEventWaitNoResume in this context. Go back to 1038 ring-3 and do it there or return rcBusy. */ 1039 rcBusy = pdmCritSectRwEnterExclBailOut(pThis, rcBusy); 1040 if (rcBusy == VINF_SUCCESS) 1041 { 1042 Assert(!fTryOnly); 1043 PVMCPUCC pVCpu = VMMGetCpu(pVM); AssertPtr(pVCpu); 1044 /** @todo Should actually do this in via VMMR0.cpp instead of going all the way 1045 * back to ring-3. Goes for both kind of crit sects. */ 1046 return VMMRZCallRing3(pVM, pVCpu, VMMCALLRING3_PDM_CRIT_SECT_RW_ENTER_EXCL, MMHyperCCToR3(pVM, pThis)); 1013 1047 } 1014 1015 /* 1016 * Got it! 1017 */ 1018 Assert((ASMAtomicReadU64(&pThis->s.Core.u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)); 1019 #if 1 /** @todo consider generating less noise... */ 1020 ASMAtomicWriteU32(&pThis->s.Core.cWriteRecursions, 1); 1021 #else 1022 pThis->s.Core.cWriteRecursions = 1; 1023 #endif 1024 Assert(pThis->s.Core.cWriterReads == 0); 1025 #if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3) 1026 if (!fNoVal) 1027 RTLockValidatorRecExclSetOwner(pThis->s.Core.pValidatorWrite, hThreadSelf, pSrcPos, true); 1028 #endif 1029 STAM_REL_COUNTER_INC(&pThis->s.CTX_MID_Z(Stat,EnterExcl)); 1030 STAM_PROFILE_ADV_START(&pThis->s.StatWriteLocked, swl); 1031 1032 return VINF_SUCCESS; 1048 return rcBusy; 1049 #endif 1033 1050 } 1034 1051
Note:
See TracChangeset
for help on using the changeset viewer.