Changeset 31392 in vbox for trunk/src/VBox/VMM
- Timestamp:
- Aug 5, 2010 11:41:45 AM (14 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/PDMAllCritSect.cpp
r30328 r31392 103 103 104 104 105 #if def IN_RING3106 /** 107 * Deals with the contended case in ring-3 .105 #if defined(IN_RING3) || defined(IN_RING0) 106 /** 107 * Deals with the contended case in ring-3 and ring-0. 108 108 * 109 109 * @returns VINF_SUCCESS or VERR_SEM_DESTROYED. … … 111 111 * @param hNativeSelf The native thread handle. 112 112 */ 113 static int pdmR3 CritSectEnterContended(PPDMCRITSECT pCritSect, RTNATIVETHREAD hNativeSelf, PCRTLOCKVALSRCPOS pSrcPos)113 static int pdmR3R0CritSectEnterContended(PPDMCRITSECT pCritSect, RTNATIVETHREAD hNativeSelf, PCRTLOCKVALSRCPOS pSrcPos) 114 114 { 115 115 /* … … 118 118 if (ASMAtomicIncS32(&pCritSect->s.Core.cLockers) == 0) 119 119 return pdmCritSectEnterFirst(pCritSect, hNativeSelf, pSrcPos); 120 # ifdef IN_RING3 120 121 STAM_COUNTER_INC(&pCritSect->s.StatContentionR3); 122 # else 123 STAM_COUNTER_INC(&pCritSect->s.StatContentionRZLock); 124 # endif 121 125 122 126 /* … … 125 129 PSUPDRVSESSION pSession = pCritSect->s.CTX_SUFF(pVM)->pSession; 126 130 SUPSEMEVENT hEvent = (SUPSEMEVENT)pCritSect->s.Core.EventSem; 127 # ifdef PDMCRITSECT_STRICT 131 # ifdef IN_RING3 132 # ifdef PDMCRITSECT_STRICT 128 133 RTTHREAD hThreadSelf = RTThreadSelfAutoAdopt(); 129 134 int rc2 = RTLockValidatorRecExclCheckOrder(pCritSect->s.Core.pValidatorRec, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT); 130 135 if (RT_FAILURE(rc2)) 131 136 return rc2; 132 # else137 # else 133 138 RTTHREAD hThreadSelf = RTThreadSelf(); 139 # endif 134 140 # endif 135 141 for (;;) … … 141 147 if (RT_FAILURE(rc9)) 142 148 return rc9; 143 # el se149 # elif defined(IN_RING3) 144 150 RTThreadBlocking(hThreadSelf, RTTHREADSTATE_CRITSECT, true); 145 151 # endif 146 152 int rc = SUPSemEventWaitNoResume(pSession, hEvent, RT_INDEFINITE_WAIT); 153 # ifdef IN_RING3 147 154 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_CRITSECT); 155 # endif 148 156 149 157 if (RT_UNLIKELY(pCritSect->s.Core.u32Magic != RTCRITSECT_MAGIC)) … … 155 163 /* won't get here */ 156 164 } 157 #endif /* IN_RING3 */165 #endif /* IN_RING3 || IN_RING0 */ 158 166 159 167 … … 221 229 * Take the slow path. 222 230 */ 223 return pdmR3CritSectEnterContended(pCritSect, hNativeSelf, pSrcPos); 224 #else 225 /* 226 * Return busy. 227 */ 231 return pdmR3R0CritSectEnterContended(pCritSect, hNativeSelf, pSrcPos); 232 233 #elif defined(IN_RING0) 234 /** @todo If preemption is disabled it means we're in VT-x/AMD-V context 235 * and would be better off switching out of that while waiting for 236 * the lock. Several of the locks jumps back to ring-3 just to 237 * get the lock, the ring-3 code will then call the kernel to do 238 * the lock wait and when the call return it will call ring-0 239 * again and resume via in setjmp style. Not very efficient. */ 240 # if 0 241 if (ASMIntAreEnabled()) /** @todo this can be handled as well by changing 242 * callers not prepared for longjmp/blocking to 243 * use PDMCritSectTryEnter. */ 244 { 245 /* 246 * Leave HWACCM context while waiting if necessary. 247 */ 248 int rc; 249 if (RTThreadPreemptIsEnabled(NIL_RTTHREAD)) 250 { 251 STAM_REL_COUNTER_ADD(&pCritSect->s.StatContentionRZLock, 1000000); 252 rc = pdmR3R0CritSectEnterContended(pCritSect, hNativeSelf, pSrcPos); 253 } 254 else 255 { 256 STAM_REL_COUNTER_ADD(&pCritSect->s.StatContentionRZLock, 1000000000); 257 PVM pVM = pCritSect->s.CTX_SUFF(pVM); 258 PVMCPU pVCpu = VMMGetCpu(pVM); 259 HWACCMR0Leave(pVM, pVCpu); 260 RTThreadPreemptRestore(NIL_RTTHREAD, ????); 261 262 rc = pdmR3R0CritSectEnterContended(pCritSect, hNativeSelf, pSrcPos); 263 264 RTThreadPreemptDisable(NIL_RTTHREAD, ????); 265 HWACCMR0Enter(pVM, pVCpu); 266 } 267 return rc; 268 } 269 # else 270 /* 271 * We preemption hasn't been disabled, we can block here in ring-0. 272 */ 273 if ( RTThreadPreemptIsEnabled(NIL_RTTHREAD) 274 && ASMIntAreEnabled()) 275 return pdmR3R0CritSectEnterContended(pCritSect, hNativeSelf, pSrcPos); 276 # endif 277 228 278 STAM_REL_COUNTER_INC(&pCritSect->s.StatContentionRZLock); 229 279 LogFlow(("PDMCritSectEnter: locked => R3 (%Rrc)\n", rcBusy)); 230 280 return rcBusy; 231 #endif 281 282 #else /* IN_RC */ 283 /* 284 * Return busy. 285 */ 286 STAM_REL_COUNTER_INC(&pCritSect->s.StatContentionRZLock); 287 LogFlow(("PDMCritSectEnter: locked => R3 (%Rrc)\n", rcBusy)); 288 return rcBusy; 289 #endif /* IN_RC */ 232 290 } 233 291
Note:
See TracChangeset
for help on using the changeset viewer.