- Timestamp:
- Mar 20, 2013 6:17:29 PM (12 years ago)
- svn:sync-xref-src-repo-rev:
- 84403
- Location:
- trunk/src/VBox/Runtime
- Files:
-
- 5 edited
- 1 copied
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/Runtime/Makefile.kmk
r44625 r45110 448 448 generic/createtemp-generic.cpp \ 449 449 generic/critsect-generic.cpp \ 450 generic/critsectrw-generic.cpp \ 450 451 generic/env-generic.cpp \ 451 452 generic/RTDirCreateUniqueNumbered-generic.cpp \ -
trunk/src/VBox/Runtime/common/misc/lockvalidator.cpp
r44528 r45110 3439 3439 3440 3440 3441 RTDECL(int) RTLockValidatorRecSharedCreateV(PRTLOCKVALRECSHRD *ppRec, RTLOCKVALCLASS hClass, 3442 uint32_t uSubClass, void *pvLock, bool fSignaller, bool fEnabled, 3443 const char *pszNameFmt, va_list va) 3444 { 3445 PRTLOCKVALRECSHRD pRec; 3446 *ppRec = pRec = (PRTLOCKVALRECSHRD)RTMemAlloc(sizeof(*pRec)); 3447 if (!pRec) 3448 return VERR_NO_MEMORY; 3449 RTLockValidatorRecSharedInitV(pRec, hClass, uSubClass, pvLock, fSignaller, fEnabled, pszNameFmt, va); 3450 return VINF_SUCCESS; 3451 } 3452 3453 3454 RTDECL(int) RTLockValidatorRecSharedCreate(PRTLOCKVALRECSHRD *ppRec, RTLOCKVALCLASS hClass, 3455 uint32_t uSubClass, void *pvLock, bool fSignaller, bool fEnabled, 3456 const char *pszNameFmt, ...) 3457 { 3458 va_list va; 3459 va_start(va, pszNameFmt); 3460 int rc = RTLockValidatorRecSharedCreateV(ppRec, hClass, uSubClass, pvLock, fSignaller, fEnabled, pszNameFmt, va); 3461 va_end(va); 3462 return rc; 3463 } 3464 3465 3441 3466 RTDECL(void) RTLockValidatorRecSharedDelete(PRTLOCKVALRECSHRD pRec) 3442 3467 { … … 3476 3501 if (hClass != NIL_RTLOCKVALCLASS) 3477 3502 RTLockValidatorClassRelease(hClass); 3503 } 3504 3505 3506 RTDECL(void) RTLockValidatorRecSharedDestroy(PRTLOCKVALRECSHRD *ppRec) 3507 { 3508 PRTLOCKVALRECSHRD pRec = *ppRec; 3509 *ppRec = NULL; 3510 if (pRec) 3511 { 3512 RTLockValidatorRecSharedDelete(pRec); 3513 RTMemFree(pRec); 3514 } 3478 3515 } 3479 3516 -
trunk/src/VBox/Runtime/generic/cdrom-generic.cpp
r39730 r45110 29 29 * Header Files * 30 30 *******************************************************************************/ 31 #define RTCRITSECT_WITHOUT_REMAPPING32 31 #include <iprt/cdrom.h> 33 32 #include "internal/iprt.h" -
trunk/src/VBox/Runtime/generic/critsectrw-generic.cpp
r45103 r45110 1 1 /* $Id$ */ 2 2 /** @file 3 * IPRT - Read -Write Semaphore, Generic, lockless variant.3 * IPRT - Read/Write Critical Section, Generic. 4 4 */ 5 5 … … 29 29 * Header Files * 30 30 *******************************************************************************/ 31 #define RT SEMRW_WITHOUT_REMAPPING31 #define RTCRITSECTRW_WITHOUT_REMAPPING 32 32 #define RTASSERT_QUIET 33 #include <iprt/ semaphore.h>33 #include <iprt/critsect.h> 34 34 #include "internal/iprt.h" 35 35 … … 39 39 #include <iprt/lockvalidator.h> 40 40 #include <iprt/mem.h> 41 #include <iprt/semaphore.h> 41 42 #include <iprt/thread.h> 42 43 43 44 #include "internal/magics.h" 44 45 #include "internal/strict.h" 45 46 47 /*******************************************************************************48 * Structures and Typedefs *49 *******************************************************************************/50 typedef struct RTSEMRWINTERNAL51 {52 /** Magic value (RTSEMRW_MAGIC). */53 uint32_t volatile u32Magic;54 uint32_t u32Padding; /**< alignment padding.*/55 /* The state variable.56 * All accesses are atomic and it bits are defined like this:57 * Bits 0..14 - cReads.58 * Bit 15 - Unused.59 * Bits 16..31 - cWrites. - doesn't make sense here60 * Bit 31 - fDirection; 0=Read, 1=Write.61 * Bits 32..46 - cWaitingReads62 * Bit 47 - Unused.63 * Bits 48..62 - cWaitingWrites64 * Bit 63 - Unused.65 */66 uint64_t volatile u64State;67 /** The write owner. */68 RTNATIVETHREAD volatile hNativeWriter;69 /** The number of reads made by the current writer. */70 uint32_t volatile cWriterReads;71 /** The number of recursions made by the current writer. (The initial grabbing72 * of the lock counts as the first one.) */73 uint32_t volatile cWriteRecursions;74 75 /** What the writer threads are blocking on. */76 RTSEMEVENT hEvtWrite;77 /** What the read threads are blocking on when waiting for the writer to78 * finish. */79 RTSEMEVENTMULTI hEvtRead;80 /** Indicates whether hEvtRead needs resetting. */81 bool volatile fNeedReset;82 83 #ifdef RTSEMRW_STRICT84 /** The validator record for the writer. */85 RTLOCKVALRECEXCL ValidatorWrite;86 /** The validator record for the readers. */87 RTLOCKVALRECSHRD ValidatorRead;88 #endif89 } RTSEMRWINTERNAL;90 46 91 47 … … 93 49 * Defined Constants And Macros * 94 50 *******************************************************************************/ 95 #define RTSEMRW_CNT_BITS 15 96 #define RTSEMRW_CNT_MASK UINT64_C(0x00007fff) 97 98 #define RTSEMRW_CNT_RD_SHIFT 0 99 #define RTSEMRW_CNT_RD_MASK (RTSEMRW_CNT_MASK << RTSEMRW_CNT_RD_SHIFT) 100 #define RTSEMRW_CNT_WR_SHIFT 16 101 #define RTSEMRW_CNT_WR_MASK (RTSEMRW_CNT_MASK << RTSEMRW_CNT_WR_SHIFT) 102 #define RTSEMRW_DIR_SHIFT 31 103 #define RTSEMRW_DIR_MASK RT_BIT_64(RTSEMRW_DIR_SHIFT) 104 #define RTSEMRW_DIR_READ UINT64_C(0) 105 #define RTSEMRW_DIR_WRITE UINT64_C(1) 106 107 #define RTSEMRW_WAIT_CNT_RD_SHIFT 32 108 #define RTSEMRW_WAIT_CNT_RD_MASK (RTSEMRW_CNT_MASK << RTSEMRW_WAIT_CNT_RD_SHIFT) 109 //#define RTSEMRW_WAIT_CNT_WR_SHIFT 48 110 //#define RTSEMRW_WAIT_CNT_WR_MASK (RTSEMRW_CNT_MASK << RTSEMRW_WAIT_CNT_WR_SHIFT) 111 112 113 RTDECL(int) RTSemRWCreate(PRTSEMRW phRWSem) 114 { 115 return RTSemRWCreateEx(phRWSem, 0 /*fFlags*/, NIL_RTLOCKVALCLASS, RTLOCKVAL_SUB_CLASS_NONE, "RTSemRW"); 116 } 117 RT_EXPORT_SYMBOL(RTSemRWCreate); 118 119 120 RTDECL(int) RTSemRWCreateEx(PRTSEMRW phRWSem, uint32_t fFlags, 121 RTLOCKVALCLASS hClass, uint32_t uSubClass, const char *pszNameFmt, ...) 122 { 123 AssertReturn(!(fFlags & ~RTSEMRW_FLAGS_NO_LOCK_VAL), VERR_INVALID_PARAMETER); 124 125 RTSEMRWINTERNAL *pThis = (RTSEMRWINTERNAL *)RTMemAlloc(sizeof(*pThis)); 126 if (!pThis) 127 return VERR_NO_MEMORY; 128 129 int rc = RTSemEventMultiCreate(&pThis->hEvtRead); 51 /* Note! Using RTCSRW instead of RTCRITSECTRW to save space. */ 52 #define RTCSRW_CNT_BITS 15 53 #define RTCSRW_CNT_MASK UINT64_C(0x00007fff) 54 55 #define RTCSRW_CNT_RD_SHIFT 0 56 #define RTCSRW_CNT_RD_MASK (RTCSRW_CNT_MASK << RTCSRW_CNT_RD_SHIFT) 57 #define RTCSRW_CNT_WR_SHIFT 16 58 #define RTCSRW_CNT_WR_MASK (RTCSRW_CNT_MASK << RTCSRW_CNT_WR_SHIFT) 59 #define RTCSRW_DIR_SHIFT 31 60 #define RTCSRW_DIR_MASK RT_BIT_64(RTCSRW_DIR_SHIFT) 61 #define RTCSRW_DIR_READ UINT64_C(0) 62 #define RTCSRW_DIR_WRITE UINT64_C(1) 63 64 #define RTCSRW_WAIT_CNT_RD_SHIFT 32 65 #define RTCSRW_WAIT_CNT_RD_MASK (RTCSRW_CNT_MASK << RTCSRW_WAIT_CNT_RD_SHIFT) 66 //#define RTCSRW_WAIT_CNT_WR_SHIFT 48 67 //#define RTCSRW_WAIT_CNT_WR_MASK (RTCSRW_CNT_MASK << RTCSRW_WAIT_CNT_WR_SHIFT) 68 69 70 RTDECL(int) RTCritSectRwInit(PRTCRITSECTRW pThis) 71 { 72 return RTCritSectRwInitEx(pThis, 0, NIL_RTLOCKVALCLASS, RTLOCKVAL_SUB_CLASS_NONE, "RTCritSectRw"); 73 } 74 RT_EXPORT_SYMBOL(RTCritSectRwInit); 75 76 77 RTDECL(int) RTCritSectRwInitEx(PRTCRITSECTRW pThis, uint32_t fFlags, 78 RTLOCKVALCLASS hClass, uint32_t uSubClass, const char *pszNameFmt, ...) 79 { 80 int rc; 81 AssertReturn(!(fFlags & ~( RTCRITSECT_FLAGS_NO_NESTING | RTCRITSECT_FLAGS_NO_LOCK_VAL | RTCRITSECT_FLAGS_BOOTSTRAP_HACK 82 | RTCRITSECT_FLAGS_NOP )), 83 VERR_INVALID_PARAMETER); 84 85 /* 86 * Initialize the structure, allocate the lock validator stuff and sems. 87 */ 88 pThis->u32Magic = RTCRITSECTRW_MAGIC_DEAD; 89 pThis->fNeedReset = false; 90 pThis->u64State = 0; 91 pThis->hNativeWriter = NIL_RTNATIVETHREAD; 92 pThis->cWriterReads = 0; 93 pThis->cWriteRecursions = 0; 94 pThis->hEvtWrite = NIL_RTSEMEVENT; 95 pThis->hEvtRead = NIL_RTSEMEVENTMULTI; 96 pThis->pValidatorWrite = NULL; 97 pThis->pValidatorRead = NULL; 98 #if HC_ARCH_BITS == 32 99 pThis->HCPtrPadding = NIL_RTHCPTR; 100 #endif 101 102 #ifdef RTCRITSECTRW_STRICT 103 bool const fLVEnabled = !(fFlags & RTSEMRW_FLAGS_NO_LOCK_VAL); 104 if (!pszNameFmt) 105 { 106 static uint32_t volatile s_iAnon = 0; 107 uint32_t i = ASMAtomicIncU32(&s_iAnon) - 1; 108 rc = RTLockValidatorRecExclCreate(&pThis->pValidatorWrite, hClass, uSubClass, pThis, 109 fLVEnabled, "RTCritSectRw-%u", i); 110 if (RT_SUCCESS(rc)) 111 rc = RTLockValidatorRecSharedCreate(&pThis->pValidatorRead, hClass, uSubClass, pThis, 112 false /*fSignaller*/, fLVEnabled, "RTCritSectRw-%u", i); 113 } 114 else 115 { 116 va_list va; 117 va_start(va, pszNameFmt); 118 rc = RTLockValidatorRecExclCreateV(&pThis->pValidatorWrite, hClass, uSubClass, pThis, 119 fLVEnabled, pszNameFmt, va); 120 va_end(va); 121 if (RT_SUCCESS(rc)) 122 { 123 va_start(va, pszNameFmt); 124 RTLockValidatorRecSharedCreateV(&pThis->pValidatorRead, hClass, uSubClass, pThis, 125 false /*fSignaller*/, fLVEnabled, pszNameFmt, va); 126 va_end(va); 127 } 128 } 130 129 if (RT_SUCCESS(rc)) 131 { 132 rc = RTSemEventCreate(&pThis->hEvtWrite); 130 rc = RTLockValidatorRecMakeSiblings(&pThis->pValidatorWrite->Core, &pThis->pValidatorRead->Core); 131 132 if (RT_SUCCESS(rc)) 133 #endif 134 { 135 rc = RTSemEventMultiCreate(&pThis->hEvtRead); 133 136 if (RT_SUCCESS(rc)) 134 137 { 135 pThis->u32Magic = RTSEMRW_MAGIC; 136 pThis->u32Padding = 0; 137 pThis->u64State = 0; 138 pThis->hNativeWriter = NIL_RTNATIVETHREAD; 139 pThis->cWriterReads = 0; 140 pThis->cWriteRecursions = 0; 141 pThis->fNeedReset = false; 142 #ifdef RTSEMRW_STRICT 143 bool const fLVEnabled = !(fFlags & RTSEMRW_FLAGS_NO_LOCK_VAL); 144 if (!pszNameFmt) 145 { 146 static uint32_t volatile s_iSemRWAnon = 0; 147 uint32_t i = ASMAtomicIncU32(&s_iSemRWAnon) - 1; 148 RTLockValidatorRecExclInit(&pThis->ValidatorWrite, hClass, uSubClass, pThis, 149 fLVEnabled, "RTSemRW-%u", i); 150 RTLockValidatorRecSharedInit(&pThis->ValidatorRead, hClass, uSubClass, pThis, 151 false /*fSignaller*/, fLVEnabled, "RTSemRW-%u", i); 152 } 153 else 154 { 155 va_list va; 156 va_start(va, pszNameFmt); 157 RTLockValidatorRecExclInitV(&pThis->ValidatorWrite, hClass, uSubClass, pThis, 158 fLVEnabled, pszNameFmt, va); 159 va_end(va); 160 va_start(va, pszNameFmt); 161 RTLockValidatorRecSharedInitV(&pThis->ValidatorRead, hClass, uSubClass, pThis, 162 false /*fSignaller*/, fLVEnabled, pszNameFmt, va); 163 va_end(va); 164 } 165 RTLockValidatorRecMakeSiblings(&pThis->ValidatorWrite.Core, &pThis->ValidatorRead.Core); 166 #endif 167 168 *phRWSem = pThis; 169 return VINF_SUCCESS; 170 } 171 RTSemEventMultiDestroy(pThis->hEvtRead); 172 } 138 rc = RTSemEventCreate(&pThis->hEvtWrite); 139 if (RT_SUCCESS(rc)) 140 { 141 pThis->u32Magic = RTCRITSECTRW_MAGIC; 142 return VINF_SUCCESS; 143 } 144 RTSemEventMultiDestroy(pThis->hEvtRead); 145 } 146 } 147 148 #ifdef RTCRITSECTRW_STRICT 149 RTLockValidatorRecSharedDestroy(&pThis->pValidatorRead); 150 RTLockValidatorRecExclDestroy(&pThis->pValidatorWrite); 151 #endif 173 152 return rc; 174 153 } 175 RT_EXPORT_SYMBOL(RTSemRWCreateEx); 176 177 178 RTDECL(int) RTSemRWDestroy(RTSEMRW hRWSem) 154 RT_EXPORT_SYMBOL(RTCritSectRwInitEx); 155 156 157 RTDECL(uint32_t) RTCritSectRwSetSubClass(PRTCRITSECTRW pThis, uint32_t uSubClass) 158 { 159 AssertPtrReturn(pThis, RTLOCKVAL_SUB_CLASS_INVALID); 160 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, RTLOCKVAL_SUB_CLASS_INVALID); 161 #ifdef RTCRITSECTRW_STRICT 162 AssertReturn(!(pThis->fFlags & RTCRITSECT_FLAGS_NOP), RTLOCKVAL_SUB_CLASS_INVALID); 163 164 RTLockValidatorRecSharedSetSubClass(pThis->pValidatorRead, uSubClass); 165 return RTLockValidatorRecExclSetSubClass(pThis->pValidatorWrite, uSubClass); 166 #else 167 NOREF(uSubClass); 168 return RTLOCKVAL_SUB_CLASS_INVALID; 169 #endif 170 } 171 RT_EXPORT_SYMBOL(RTCritSectRwSetSubClass); 172 173 174 static int rtCritSectRwEnterShared(PRTCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly) 179 175 { 180 176 /* 181 177 * Validate input. 182 178 */ 183 RTSEMRWINTERNAL *pThis = hRWSem; 184 if (pThis == NIL_RTSEMRW) 185 return VINF_SUCCESS; 186 AssertPtrReturn(pThis, VERR_INVALID_HANDLE); 187 AssertReturn(pThis->u32Magic == RTSEMRW_MAGIC, VERR_INVALID_HANDLE); 188 Assert(!(ASMAtomicReadU64(&pThis->u64State) & (RTSEMRW_CNT_RD_MASK | RTSEMRW_CNT_WR_MASK))); 189 190 /* 191 * Invalidate the object and free up the resources. 192 */ 193 AssertReturn(ASMAtomicCmpXchgU32(&pThis->u32Magic, ~RTSEMRW_MAGIC, RTSEMRW_MAGIC), VERR_INVALID_HANDLE); 194 195 RTSEMEVENTMULTI hEvtRead; 196 ASMAtomicXchgHandle(&pThis->hEvtRead, NIL_RTSEMEVENTMULTI, &hEvtRead); 197 int rc = RTSemEventMultiDestroy(hEvtRead); 198 AssertRC(rc); 199 200 RTSEMEVENT hEvtWrite; 201 ASMAtomicXchgHandle(&pThis->hEvtWrite, NIL_RTSEMEVENT, &hEvtWrite); 202 rc = RTSemEventDestroy(hEvtWrite); 203 AssertRC(rc); 204 205 #ifdef RTSEMRW_STRICT 206 RTLockValidatorRecSharedDelete(&pThis->ValidatorRead); 207 RTLockValidatorRecExclDelete(&pThis->ValidatorWrite); 208 #endif 209 RTMemFree(pThis); 210 return VINF_SUCCESS; 211 } 212 RT_EXPORT_SYMBOL(RTSemRWDestroy); 213 214 215 RTDECL(uint32_t) RTSemRWSetSubClass(RTSEMRW hRWSem, uint32_t uSubClass) 216 { 217 #ifdef RTSEMRW_STRICT 218 /* 219 * Validate handle. 220 */ 221 struct RTSEMRWINTERNAL *pThis = hRWSem; 222 AssertPtrReturn(pThis, RTLOCKVAL_SUB_CLASS_INVALID); 223 AssertReturn(pThis->u32Magic == RTSEMRW_MAGIC, RTLOCKVAL_SUB_CLASS_INVALID); 224 225 RTLockValidatorRecSharedSetSubClass(&pThis->ValidatorRead, uSubClass); 226 return RTLockValidatorRecExclSetSubClass(&pThis->ValidatorWrite, uSubClass); 227 #else 228 return RTLOCKVAL_SUB_CLASS_INVALID; 229 #endif 230 } 231 RT_EXPORT_SYMBOL(RTSemRWSetSubClass); 232 233 234 static int rtSemRWRequestRead(RTSEMRW hRWSem, RTMSINTERVAL cMillies, bool fInterruptible, PCRTLOCKVALSRCPOS pSrcPos) 235 { 236 /* 237 * Validate input. 238 */ 239 RTSEMRWINTERNAL *pThis = hRWSem; 240 if (pThis == NIL_RTSEMRW) 241 return VINF_SUCCESS; 242 AssertPtrReturn(pThis, VERR_INVALID_HANDLE); 243 AssertReturn(pThis->u32Magic == RTSEMRW_MAGIC, VERR_INVALID_HANDLE); 244 245 #ifdef RTSEMRW_STRICT 179 AssertPtr(pThis); 180 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED); 181 182 #ifdef RTCRITSECTRW_STRICT 246 183 RTTHREAD hThreadSelf = RTThreadSelfAutoAdopt(); 247 if ( cMillies > 0)184 if (!fTryOnly) 248 185 { 249 186 int rc9; … … 251 188 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter); 252 189 if (hNativeWriter != NIL_RTTHREAD && hNativeWriter == RTThreadNativeSelf()) 253 rc9 = RTLockValidatorRecExclCheckOrder( &pThis->ValidatorWrite, hThreadSelf, pSrcPos, cMillies);190 rc9 = RTLockValidatorRecExclCheckOrder(pThis->pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT); 254 191 else 255 rc9 = RTLockValidatorRecSharedCheckOrder( &pThis->ValidatorRead, hThreadSelf, pSrcPos, cMillies);192 rc9 = RTLockValidatorRecSharedCheckOrder(pThis->pValidatorRead, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT); 256 193 if (RT_FAILURE(rc9)) 257 194 return rc9; … … 267 204 for (;;) 268 205 { 269 if ((u64State & RT SEMRW_DIR_MASK) == (RTSEMRW_DIR_READ << RTSEMRW_DIR_SHIFT))206 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT)) 270 207 { 271 208 /* It flows in the right direction, try follow it before it changes. */ 272 uint64_t c = (u64State & RT SEMRW_CNT_RD_MASK) >> RTSEMRW_CNT_RD_SHIFT;209 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT; 273 210 c++; 274 Assert(c < RT SEMRW_CNT_MASK / 2);275 u64State &= ~RT SEMRW_CNT_RD_MASK;276 u64State |= c << RT SEMRW_CNT_RD_SHIFT;211 Assert(c < RTCSRW_CNT_MASK / 2); 212 u64State &= ~RTCSRW_CNT_RD_MASK; 213 u64State |= c << RTCSRW_CNT_RD_SHIFT; 277 214 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) 278 215 { 279 #ifdef RT SEMRW_STRICT280 RTLockValidatorRecSharedAddOwner( &pThis->ValidatorRead, hThreadSelf, pSrcPos);216 #ifdef RTCRITSECTRW_STRICT 217 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos); 281 218 #endif 282 219 break; 283 220 } 284 221 } 285 else if ((u64State & (RT SEMRW_CNT_RD_MASK | RTSEMRW_CNT_WR_MASK)) == 0)222 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0) 286 223 { 287 224 /* Wrong direction, but we're alone here and can simply try switch the direction. */ 288 u64State &= ~(RT SEMRW_CNT_RD_MASK | RTSEMRW_CNT_WR_MASK | RTSEMRW_DIR_MASK);289 u64State |= (UINT64_C(1) << RT SEMRW_CNT_RD_SHIFT) | (RTSEMRW_DIR_READ << RTSEMRW_DIR_SHIFT);225 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK); 226 u64State |= (UINT64_C(1) << RTCSRW_CNT_RD_SHIFT) | (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT); 290 227 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) 291 228 { 292 229 Assert(!pThis->fNeedReset); 293 #ifdef RT SEMRW_STRICT294 RTLockValidatorRecSharedAddOwner( &pThis->ValidatorRead, hThreadSelf, pSrcPos);230 #ifdef RTCRITSECTRW_STRICT 231 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos); 295 232 #endif 296 233 break; … … 305 242 if (hNativeSelf == hNativeWriter) 306 243 { 307 #ifdef RT SEMRW_STRICT308 int rc9 = RTLockValidatorRecExclRecursionMixed( &pThis->ValidatorWrite, &pThis->ValidatorRead.Core, pSrcPos);244 #ifdef RTCRITSECTRW_STRICT 245 int rc9 = RTLockValidatorRecExclRecursionMixed(pThis->pValidatorWrite, &pThis->pValidatorRead->Core, pSrcPos); 309 246 if (RT_FAILURE(rc9)) 310 247 return rc9; … … 315 252 } 316 253 317 /* If the timeout is 0, return already. */318 if ( !cMillies)319 return VERR_ TIMEOUT;254 /* If we're only trying, return already. */ 255 if (fTryOnly) 256 return VERR_SEM_BUSY; 320 257 321 258 /* Add ourselves to the queue and wait for the direction to change. */ 322 uint64_t c = (u64State & RT SEMRW_CNT_RD_MASK) >> RTSEMRW_CNT_RD_SHIFT;259 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT; 323 260 c++; 324 Assert(c < RT SEMRW_CNT_MASK / 2);325 326 uint64_t cWait = (u64State & RT SEMRW_WAIT_CNT_RD_MASK) >> RTSEMRW_WAIT_CNT_RD_SHIFT;261 Assert(c < RTCSRW_CNT_MASK / 2); 262 263 uint64_t cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT; 327 264 cWait++; 328 265 Assert(cWait <= c); 329 Assert(cWait < RT SEMRW_CNT_MASK / 2);330 331 u64State &= ~(RT SEMRW_CNT_RD_MASK | RTSEMRW_WAIT_CNT_RD_MASK);332 u64State |= (c << RT SEMRW_CNT_RD_SHIFT) | (cWait << RTSEMRW_WAIT_CNT_RD_SHIFT);266 Assert(cWait < RTCSRW_CNT_MASK / 2); 267 268 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK); 269 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT); 333 270 334 271 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) … … 337 274 { 338 275 int rc; 339 #ifdef RT SEMRW_STRICT340 rc = RTLockValidatorRecSharedCheckBlocking( &pThis->ValidatorRead, hThreadSelf, pSrcPos, true,341 cMillies, RTTHREADSTATE_RW_READ, false);276 #ifdef RTCRITSECTRW_STRICT 277 rc = RTLockValidatorRecSharedCheckBlocking(pThis->pValidatorRead, hThreadSelf, pSrcPos, true, 278 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_READ, false); 342 279 if (RT_SUCCESS(rc)) 343 280 #else … … 346 283 #endif 347 284 { 348 if (fInterruptible) 349 rc = RTSemEventMultiWaitNoResume(pThis->hEvtRead, cMillies); 350 else 351 rc = RTSemEventMultiWait(pThis->hEvtRead, cMillies); 285 rc = RTSemEventMultiWait(pThis->hEvtRead, RT_INDEFINITE_WAIT); 352 286 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_READ); 353 if (pThis->u32Magic != RT SEMRW_MAGIC)287 if (pThis->u32Magic != RTCRITSECTRW_MAGIC) 354 288 return VERR_SEM_DESTROYED; 355 289 } … … 360 294 { 361 295 u64OldState = u64State = ASMAtomicReadU64(&pThis->u64State); 362 c = (u64State & RT SEMRW_CNT_RD_MASK) >> RTSEMRW_CNT_RD_SHIFT; Assert(c > 0);296 c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT; Assert(c > 0); 363 297 c--; 364 cWait = (u64State & RT SEMRW_WAIT_CNT_RD_MASK) >> RTSEMRW_WAIT_CNT_RD_SHIFT; Assert(cWait > 0);298 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT; Assert(cWait > 0); 365 299 cWait--; 366 u64State &= ~(RT SEMRW_CNT_RD_MASK | RTSEMRW_WAIT_CNT_RD_MASK);367 u64State |= (c << RT SEMRW_CNT_RD_SHIFT) | (cWait << RTSEMRW_WAIT_CNT_RD_SHIFT);300 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK); 301 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT); 368 302 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) 369 303 break; … … 374 308 Assert(pThis->fNeedReset); 375 309 u64State = ASMAtomicReadU64(&pThis->u64State); 376 if ((u64State & RT SEMRW_DIR_MASK) == (RTSEMRW_DIR_READ << RTSEMRW_DIR_SHIFT))310 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT)) 377 311 break; 378 312 AssertMsg(iLoop < 1, ("%u\n", iLoop)); … … 384 318 u64OldState = u64State; 385 319 386 cWait = (u64State & RT SEMRW_WAIT_CNT_RD_MASK) >> RTSEMRW_WAIT_CNT_RD_SHIFT;320 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT; 387 321 Assert(cWait > 0); 388 322 cWait--; 389 u64State &= ~RT SEMRW_WAIT_CNT_RD_MASK;390 u64State |= cWait << RT SEMRW_WAIT_CNT_RD_SHIFT;323 u64State &= ~RTCSRW_WAIT_CNT_RD_MASK; 324 u64State |= cWait << RTCSRW_WAIT_CNT_RD_SHIFT; 391 325 392 326 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) … … 405 339 } 406 340 407 #ifdef RT SEMRW_STRICT408 RTLockValidatorRecSharedAddOwner( &pThis->ValidatorRead, hThreadSelf, pSrcPos);341 #ifdef RTCRITSECTRW_STRICT 342 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos); 409 343 #endif 410 344 break; … … 412 346 } 413 347 414 if (pThis->u32Magic != RT SEMRW_MAGIC)348 if (pThis->u32Magic != RTCRITSECTRW_MAGIC) 415 349 return VERR_SEM_DESTROYED; 416 350 … … 421 355 422 356 /* got it! */ 423 Assert((ASMAtomicReadU64(&pThis->u64State) & RT SEMRW_DIR_MASK) == (RTSEMRW_DIR_READ << RTSEMRW_DIR_SHIFT));357 Assert((ASMAtomicReadU64(&pThis->u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT)); 424 358 return VINF_SUCCESS; 425 359 … … 427 361 428 362 429 RTDECL(int) RT SemRWRequestRead(RTSEMRW hRWSem, RTMSINTERVAL cMillies)430 { 431 #ifndef RT SEMRW_STRICT432 return rt SemRWRequestRead(hRWSem, cMillies, false, NULL);363 RTDECL(int) RTCritSectRwEnterShared(PRTCRITSECTRW pThis) 364 { 365 #ifndef RTCRITSECTRW_STRICT 366 return rtCritSectRwEnterShared(pThis, NULL, false /*fTryOnly*/); 433 367 #else 434 368 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API(); 435 return rt SemRWRequestRead(hRWSem, cMillies, false, &SrcPos);436 #endif 437 } 438 RT_EXPORT_SYMBOL(RT SemRWRequestRead);439 440 441 RTDECL(int) RT SemRWRequestReadDebug(RTSEMRW hRWSem, RTMSINTERVAL cMillies, RTHCUINTPTR uId, RT_SRC_POS_DECL)369 return rtCritSectRwEnterShared(pThis, &SrcPos, false /*fTryOnly*/); 370 #endif 371 } 372 RT_EXPORT_SYMBOL(RTCritSectRwEnterShared); 373 374 375 RTDECL(int) RTCritSectRwEnterSharedDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL) 442 376 { 443 377 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API(); 444 return rt SemRWRequestRead(hRWSem, cMillies, false, &SrcPos);445 } 446 RT_EXPORT_SYMBOL(RT SemRWRequestReadDebug);447 448 449 RTDECL(int) RT SemRWRequestReadNoResume(RTSEMRW hRWSem, RTMSINTERVAL cMillies)450 { 451 #ifndef RT SEMRW_STRICT452 return rt SemRWRequestRead(hRWSem, cMillies, true, NULL);378 return rtCritSectRwEnterShared(pThis, &SrcPos, false /*fTryOnly*/); 379 } 380 RT_EXPORT_SYMBOL(RTCritSectRwEnterSharedDebug); 381 382 383 RTDECL(int) RTCritSectRwTryEnterShared(PRTCRITSECTRW pThis) 384 { 385 #ifndef RTCRITSECTRW_STRICT 386 return rtCritSectRwEnterShared(pThis, NULL, true /*fTryOnly*/); 453 387 #else 454 388 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API(); 455 return rt SemRWRequestRead(hRWSem, cMillies, true, &SrcPos);456 #endif 457 } 458 RT_EXPORT_SYMBOL(RT SemRWRequestReadNoResume);459 460 461 RTDECL(int) RT SemRWRequestReadNoResumeDebug(RTSEMRW hRWSem, RTMSINTERVAL cMillies, RTHCUINTPTR uId, RT_SRC_POS_DECL)389 return rtCritSectRwEnterShared(pThis, &SrcPos, true /*fTryOnly*/); 390 #endif 391 } 392 RT_EXPORT_SYMBOL(RTCritSectRwEnterShared); 393 394 395 RTDECL(int) RTCritSectRwTryEnterSharedDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL) 462 396 { 463 397 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API(); 464 return rt SemRWRequestRead(hRWSem, cMillies, true, &SrcPos);465 } 466 RT_EXPORT_SYMBOL(RT SemRWRequestReadNoResumeDebug);467 468 469 470 RTDECL(int) RT SemRWReleaseRead(RTSEMRW hRWSem)398 return rtCritSectRwEnterShared(pThis, &SrcPos, true /*fTryOnly*/); 399 } 400 RT_EXPORT_SYMBOL(RTCritSectRwEnterSharedDebug); 401 402 403 404 RTDECL(int) RTCritSectRwLeaveShared(PRTCRITSECTRW pThis) 471 405 { 472 406 /* 473 407 * Validate handle. 474 408 */ 475 RTSEMRWINTERNAL *pThis = hRWSem; 476 AssertPtrReturn(pThis, VERR_INVALID_HANDLE); 477 AssertReturn(pThis->u32Magic == RTSEMRW_MAGIC, VERR_INVALID_HANDLE); 409 AssertPtr(pThis); 410 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED); 478 411 479 412 /* … … 482 415 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State); 483 416 uint64_t u64OldState = u64State; 484 if ((u64State & RT SEMRW_DIR_MASK) == (RTSEMRW_DIR_READ << RTSEMRW_DIR_SHIFT))485 { 486 #ifdef RT SEMRW_STRICT487 int rc9 = RTLockValidatorRecSharedCheckAndRelease( &pThis->ValidatorRead, NIL_RTTHREAD);417 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT)) 418 { 419 #ifdef RTCRITSECTRW_STRICT 420 int rc9 = RTLockValidatorRecSharedCheckAndRelease(pThis->pValidatorRead, NIL_RTTHREAD); 488 421 if (RT_FAILURE(rc9)) 489 422 return rc9; … … 491 424 for (;;) 492 425 { 493 uint64_t c = (u64State & RT SEMRW_CNT_RD_MASK) >> RTSEMRW_CNT_RD_SHIFT;426 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT; 494 427 AssertReturn(c > 0, VERR_NOT_OWNER); 495 428 c--; 496 429 497 430 if ( c > 0 498 || (u64State & RT SEMRW_CNT_RD_MASK) == 0)431 || (u64State & RTCSRW_CNT_RD_MASK) == 0) 499 432 { 500 433 /* Don't change the direction. */ 501 u64State &= ~RT SEMRW_CNT_RD_MASK;502 u64State |= c << RT SEMRW_CNT_RD_SHIFT;434 u64State &= ~RTCSRW_CNT_RD_MASK; 435 u64State |= c << RTCSRW_CNT_RD_SHIFT; 503 436 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) 504 437 break; … … 507 440 { 508 441 /* Reverse the direction and signal the reader threads. */ 509 u64State &= ~(RT SEMRW_CNT_RD_MASK | RTSEMRW_DIR_MASK);510 u64State |= RT SEMRW_DIR_WRITE << RTSEMRW_DIR_SHIFT;442 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_DIR_MASK); 443 u64State |= RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT; 511 444 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) 512 445 { … … 529 462 AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER); 530 463 AssertReturn(pThis->cWriterReads > 0, VERR_NOT_OWNER); 531 #ifdef RT SEMRW_STRICT532 int rc = RTLockValidatorRecExclUnwindMixed( &pThis->ValidatorWrite, &pThis->ValidatorRead.Core);464 #ifdef RTCRITSECTRW_STRICT 465 int rc = RTLockValidatorRecExclUnwindMixed(pThis->pValidatorWrite, &pThis->pValidatorRead->Core); 533 466 if (RT_FAILURE(rc)) 534 467 return rc; … … 539 472 return VINF_SUCCESS; 540 473 } 541 RT_EXPORT_SYMBOL(RT SemRWReleaseRead);542 543 544 DECL_FORCE_INLINE(int) rtSemRWRequestWrite(RTSEMRW hRWSem, RTMSINTERVAL cMillies, bool fInterruptible, PCRTLOCKVALSRCPOS pSrcPos)474 RT_EXPORT_SYMBOL(RTCritSectRwLeaveShared); 475 476 477 static int rtCritSectRwEnterExcl(PRTCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly) 545 478 { 546 479 /* 547 480 * Validate input. 548 481 */ 549 RTSEMRWINTERNAL *pThis = hRWSem; 550 if (pThis == NIL_RTSEMRW) 551 return VINF_SUCCESS; 552 AssertPtrReturn(pThis, VERR_INVALID_HANDLE); 553 AssertReturn(pThis->u32Magic == RTSEMRW_MAGIC, VERR_INVALID_HANDLE); 554 555 #ifdef RTSEMRW_STRICT 482 AssertPtr(pThis); 483 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED); 484 485 #ifdef RTCRITSECTRW_STRICT 556 486 RTTHREAD hThreadSelf = NIL_RTTHREAD; 557 if ( cMillies)487 if (!fTryOnly) 558 488 { 559 489 hThreadSelf = RTThreadSelfAutoAdopt(); 560 int rc9 = RTLockValidatorRecExclCheckOrder( &pThis->ValidatorWrite, hThreadSelf, pSrcPos, cMillies);490 int rc9 = RTLockValidatorRecExclCheckOrder(pThis->pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT); 561 491 if (RT_FAILURE(rc9)) 562 492 return rc9; … … 572 502 if (hNativeSelf == hNativeWriter) 573 503 { 574 Assert((ASMAtomicReadU64(&pThis->u64State) & RT SEMRW_DIR_MASK) == (RTSEMRW_DIR_WRITE << RTSEMRW_DIR_SHIFT));575 #ifdef RT SEMRW_STRICT576 int rc9 = RTLockValidatorRecExclRecursion( &pThis->ValidatorWrite, pSrcPos);504 Assert((ASMAtomicReadU64(&pThis->u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)); 505 #ifdef RTCRITSECTRW_STRICT 506 int rc9 = RTLockValidatorRecExclRecursion(pThis->pValidatorWrite, pSrcPos); 577 507 if (RT_FAILURE(rc9)) 578 508 return rc9; … … 591 521 for (;;) 592 522 { 593 if ( (u64State & RT SEMRW_DIR_MASK) == (RTSEMRW_DIR_WRITE << RTSEMRW_DIR_SHIFT)594 || (u64State & (RT SEMRW_CNT_RD_MASK | RTSEMRW_CNT_WR_MASK)) != 0)523 if ( (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT) 524 || (u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) != 0) 595 525 { 596 526 /* It flows in the right direction, try follow it before it changes. */ 597 uint64_t c = (u64State & RT SEMRW_CNT_WR_MASK) >> RTSEMRW_CNT_WR_SHIFT;527 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; 598 528 c++; 599 Assert(c < RT SEMRW_CNT_MASK / 2);600 u64State &= ~RT SEMRW_CNT_WR_MASK;601 u64State |= c << RT SEMRW_CNT_WR_SHIFT;529 Assert(c < RTCSRW_CNT_MASK / 2); 530 u64State &= ~RTCSRW_CNT_WR_MASK; 531 u64State |= c << RTCSRW_CNT_WR_SHIFT; 602 532 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) 603 533 break; 604 534 } 605 else if ((u64State & (RT SEMRW_CNT_RD_MASK | RTSEMRW_CNT_WR_MASK)) == 0)535 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0) 606 536 { 607 537 /* Wrong direction, but we're alone here and can simply try switch the direction. */ 608 u64State &= ~(RT SEMRW_CNT_RD_MASK | RTSEMRW_CNT_WR_MASK | RTSEMRW_DIR_MASK);609 u64State |= (UINT64_C(1) << RT SEMRW_CNT_WR_SHIFT) | (RTSEMRW_DIR_WRITE << RTSEMRW_DIR_SHIFT);538 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK); 539 u64State |= (UINT64_C(1) << RTCSRW_CNT_WR_SHIFT) | (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT); 610 540 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) 611 541 break; 612 542 } 613 else if ( !cMillies)543 else if (fTryOnly) 614 544 /* Wrong direction and we're not supposed to wait, just return. */ 615 return VERR_ TIMEOUT;545 return VERR_SEM_BUSY; 616 546 else 617 547 { 618 548 /* Add ourselves to the write count and break out to do the wait. */ 619 uint64_t c = (u64State & RT SEMRW_CNT_WR_MASK) >> RTSEMRW_CNT_WR_SHIFT;549 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; 620 550 c++; 621 Assert(c < RT SEMRW_CNT_MASK / 2);622 u64State &= ~RT SEMRW_CNT_WR_MASK;623 u64State |= c << RT SEMRW_CNT_WR_SHIFT;551 Assert(c < RTCSRW_CNT_MASK / 2); 552 u64State &= ~RTCSRW_CNT_WR_MASK; 553 u64State |= c << RTCSRW_CNT_WR_SHIFT; 624 554 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) 625 555 break; 626 556 } 627 557 628 if (pThis->u32Magic != RT SEMRW_MAGIC)558 if (pThis->u32Magic != RTCRITSECTRW_MAGIC) 629 559 return VERR_SEM_DESTROYED; 630 560 … … 638 568 * are threads already waiting. 639 569 */ 640 bool fDone = (u64State & RT SEMRW_DIR_MASK) == (RTSEMRW_DIR_WRITE << RTSEMRW_DIR_SHIFT)641 && ( ((u64State & RT SEMRW_CNT_WR_MASK) >> RTSEMRW_CNT_WR_SHIFT) == 1642 || cMillies == 0);570 bool fDone = (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT) 571 && ( ((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT) == 1 572 || fTryOnly); 643 573 if (fDone) 644 574 ASMAtomicCmpXchgHandle(&pThis->hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone); … … 651 581 { 652 582 int rc; 653 #ifdef RT SEMRW_STRICT654 if ( cMillies)583 #ifdef RTCRITSECTRW_STRICT 584 if (!fTryOnly) 655 585 { 656 586 if (hThreadSelf == NIL_RTTHREAD) 657 587 hThreadSelf = RTThreadSelfAutoAdopt(); 658 rc = RTLockValidatorRecExclCheckBlocking( &pThis->ValidatorWrite, hThreadSelf, pSrcPos, true,659 cMillies, RTTHREADSTATE_RW_WRITE, false);588 rc = RTLockValidatorRecExclCheckBlocking(pThis->pValidatorWrite, hThreadSelf, pSrcPos, true, 589 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_WRITE, false); 660 590 } 661 591 else … … 667 597 #endif 668 598 { 669 if (fInterruptible) 670 rc = RTSemEventWaitNoResume(pThis->hEvtWrite, cMillies); 671 else 672 rc = RTSemEventWait(pThis->hEvtWrite, cMillies); 599 rc = RTSemEventWait(pThis->hEvtWrite, RT_INDEFINITE_WAIT); 673 600 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_WRITE); 674 if (pThis->u32Magic != RT SEMRW_MAGIC)601 if (pThis->u32Magic != RTCRITSECTRW_MAGIC) 675 602 return VERR_SEM_DESTROYED; 676 603 } … … 681 608 { 682 609 u64OldState = u64State = ASMAtomicReadU64(&pThis->u64State); 683 uint64_t c = (u64State & RT SEMRW_CNT_WR_MASK) >> RTSEMRW_CNT_WR_SHIFT; Assert(c > 0);610 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; Assert(c > 0); 684 611 c--; 685 u64State &= ~RT SEMRW_CNT_WR_MASK;686 u64State |= c << RT SEMRW_CNT_WR_SHIFT;612 u64State &= ~RTCSRW_CNT_WR_MASK; 613 u64State |= c << RTCSRW_CNT_WR_SHIFT; 687 614 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) 688 615 break; … … 692 619 693 620 u64State = ASMAtomicReadU64(&pThis->u64State); 694 if ((u64State & RT SEMRW_DIR_MASK) == (RTSEMRW_DIR_WRITE << RTSEMRW_DIR_SHIFT))621 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)) 695 622 { 696 623 ASMAtomicCmpXchgHandle(&pThis->hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone); … … 705 632 * Got it! 706 633 */ 707 Assert((ASMAtomicReadU64(&pThis->u64State) & RT SEMRW_DIR_MASK) == (RTSEMRW_DIR_WRITE << RTSEMRW_DIR_SHIFT));634 Assert((ASMAtomicReadU64(&pThis->u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)); 708 635 ASMAtomicWriteU32(&pThis->cWriteRecursions, 1); 709 636 Assert(pThis->cWriterReads == 0); 710 #ifdef RT SEMRW_STRICT711 RTLockValidatorRecExclSetOwner( &pThis->ValidatorWrite, hThreadSelf, pSrcPos, true);637 #ifdef RTCRITSECTRW_STRICT 638 RTLockValidatorRecExclSetOwner(pThis->pValidatorWrite, hThreadSelf, pSrcPos, true); 712 639 #endif 713 640 … … 716 643 717 644 718 RTDECL(int) RT SemRWRequestWrite(RTSEMRW hRWSem, RTMSINTERVAL cMillies)719 { 720 #ifndef RT SEMRW_STRICT721 return rt SemRWRequestWrite(hRWSem, cMillies, false, NULL);645 RTDECL(int) RTCritSectRwEnterExcl(PRTCRITSECTRW pThis) 646 { 647 #ifndef RTCRITSECTRW_STRICT 648 return rtCritSectRwEnterExcl(pThis, NULL, false /*fTryAgain*/); 722 649 #else 723 650 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API(); 724 return rt SemRWRequestWrite(hRWSem, cMillies, false, &SrcPos);725 #endif 726 } 727 RT_EXPORT_SYMBOL(RT SemRWRequestWrite);728 729 730 RTDECL(int) RT SemRWRequestWriteDebug(RTSEMRW hRWSem, RTMSINTERVAL cMillies, RTHCUINTPTR uId, RT_SRC_POS_DECL)651 return rtCritSectRwEnterExcl(pThis, &SrcPos, false /*fTryAgain*/); 652 #endif 653 } 654 RT_EXPORT_SYMBOL(RTCritSectRwEnterExcl); 655 656 657 RTDECL(int) RTCritSectRwEnterExclDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL) 731 658 { 732 659 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API(); 733 return rt SemRWRequestWrite(hRWSem, cMillies, false, &SrcPos);734 } 735 RT_EXPORT_SYMBOL(RT SemRWRequestWriteDebug);736 737 738 RTDECL(int) RT SemRWRequestWriteNoResume(RTSEMRW hRWSem, RTMSINTERVAL cMillies)739 { 740 #ifndef RT SEMRW_STRICT741 return rt SemRWRequestWrite(hRWSem, cMillies, true, NULL);660 return rtCritSectRwEnterExcl(pThis, &SrcPos, false /*fTryAgain*/); 661 } 662 RT_EXPORT_SYMBOL(RTCritSectRwEnterExclDebug); 663 664 665 RTDECL(int) RTCritSectRwTryEnterExcl(PRTCRITSECTRW pThis) 666 { 667 #ifndef RTCRITSECTRW_STRICT 668 return rtCritSectRwEnterExcl(pThis, NULL, true /*fTryAgain*/); 742 669 #else 743 670 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API(); 744 return rt SemRWRequestWrite(hRWSem, cMillies, true, &SrcPos);745 #endif 746 } 747 RT_EXPORT_SYMBOL(RT SemRWRequestWriteNoResume);748 749 750 RTDECL(int) RT SemRWRequestWriteNoResumeDebug(RTSEMRW hRWSem, RTMSINTERVAL cMillies, RTHCUINTPTR uId, RT_SRC_POS_DECL)671 return rtCritSectRwEnterExcl(pThis, &SrcPos, true /*fTryAgain*/); 672 #endif 673 } 674 RT_EXPORT_SYMBOL(RTCritSectRwTryEnterExcl); 675 676 677 RTDECL(int) RTCritSectRwTryEnterExclDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL) 751 678 { 752 679 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API(); 753 return rtSemRWRequestWrite(hRWSem, cMillies, true, &SrcPos); 754 } 755 RT_EXPORT_SYMBOL(RTSemRWRequestWriteNoResumeDebug); 756 757 758 RTDECL(int) RTSemRWReleaseWrite(RTSEMRW hRWSem) 759 { 760 680 return rtCritSectRwEnterExcl(pThis, &SrcPos, true /*fTryAgain*/); 681 } 682 RT_EXPORT_SYMBOL(RTCritSectRwTryEnterExclDebug); 683 684 685 RTDECL(int) RTCritSectRwLeaveExcl(PRTCRITSECTRW pThis) 686 { 761 687 /* 762 688 * Validate handle. 763 689 */ 764 struct RTSEMRWINTERNAL *pThis = hRWSem; 765 AssertPtrReturn(pThis, VERR_INVALID_HANDLE); 766 AssertReturn(pThis->u32Magic == RTSEMRW_MAGIC, VERR_INVALID_HANDLE); 690 AssertPtr(pThis); 691 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED); 767 692 768 693 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf(); … … 777 702 { 778 703 AssertReturn(pThis->cWriterReads == 0, VERR_WRONG_ORDER); /* (must release all read recursions before the final write.) */ 779 #ifdef RT SEMRW_STRICT780 int rc9 = RTLockValidatorRecExclReleaseOwner( &pThis->ValidatorWrite, true);704 #ifdef RTCRITSECTRW_STRICT 705 int rc9 = RTLockValidatorRecExclReleaseOwner(pThis->pValidatorWrite, true); 781 706 if (RT_FAILURE(rc9)) 782 707 return rc9; … … 793 718 uint64_t u64OldState = u64State; 794 719 795 uint64_t c = (u64State & RT SEMRW_CNT_WR_MASK) >> RTSEMRW_CNT_WR_SHIFT;720 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; 796 721 Assert(c > 0); 797 722 c--; 798 723 799 724 if ( c > 0 800 || (u64State & RT SEMRW_CNT_RD_MASK) == 0)725 || (u64State & RTCSRW_CNT_RD_MASK) == 0) 801 726 { 802 727 /* Don't change the direction, wait up the next writer if any. */ 803 u64State &= ~RT SEMRW_CNT_WR_MASK;804 u64State |= c << RT SEMRW_CNT_WR_SHIFT;728 u64State &= ~RTCSRW_CNT_WR_MASK; 729 u64State |= c << RTCSRW_CNT_WR_SHIFT; 805 730 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) 806 731 { … … 816 741 { 817 742 /* Reverse the direction and signal the reader threads. */ 818 u64State &= ~(RT SEMRW_CNT_WR_MASK | RTSEMRW_DIR_MASK);819 u64State |= RT SEMRW_DIR_READ << RTSEMRW_DIR_SHIFT;743 u64State &= ~(RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK); 744 u64State |= RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT; 820 745 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) 821 746 { … … 829 754 830 755 ASMNopPause(); 831 if (pThis->u32Magic != RT SEMRW_MAGIC)756 if (pThis->u32Magic != RTCRITSECTRW_MAGIC) 832 757 return VERR_SEM_DESTROYED; 833 758 } … … 836 761 { 837 762 Assert(pThis->cWriteRecursions != 0); 838 #ifdef RT SEMRW_STRICT839 int rc9 = RTLockValidatorRecExclUnwind( &pThis->ValidatorWrite);763 #ifdef RTCRITSECTRW_STRICT 764 int rc9 = RTLockValidatorRecExclUnwind(pThis->pValidatorWrite); 840 765 if (RT_FAILURE(rc9)) 841 766 return rc9; … … 849 774 850 775 851 RTDECL(bool) RT SemRWIsWriteOwner(RTSEMRW hRWSem)776 RTDECL(bool) RTCritSectRwIsWriteOwner(PRTCRITSECTRW pThis) 852 777 { 853 778 /* 854 779 * Validate handle. 855 780 */ 856 struct RTSEMRWINTERNAL *pThis = hRWSem; 857 AssertPtrReturn(pThis, false); 858 AssertReturn(pThis->u32Magic == RTSEMRW_MAGIC, false); 781 AssertPtr(pThis); 782 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, false); 859 783 860 784 /* … … 866 790 return hNativeWriter == hNativeSelf; 867 791 } 868 RT_EXPORT_SYMBOL(RT SemRWIsWriteOwner);869 870 871 RTDECL(bool) RTSemRWIsReadOwner(RTSEMRW hRWSem, bool fWannaHear)792 RT_EXPORT_SYMBOL(RTCritSectRwIsWriteOwner); 793 794 795 RTDECL(bool) RTCritSectRwIsReadOwner(PRTCRITSECTRW pThis, bool fWannaHear) 872 796 { 873 797 /* 874 798 * Validate handle. 875 799 */ 876 struct RTSEMRWINTERNAL *pThis = hRWSem; 877 AssertPtrReturn(pThis, false); 878 AssertReturn(pThis->u32Magic == RTSEMRW_MAGIC, false); 800 AssertPtr(pThis); 801 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, false); 879 802 880 803 /* … … 882 805 */ 883 806 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State); 884 if ((u64State & RT SEMRW_DIR_MASK) == (RTSEMRW_DIR_WRITE << RTSEMRW_DIR_SHIFT))807 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)) 885 808 { 886 809 /* … … 897 820 * Read mode. If there are no current readers, then we cannot be a reader. 898 821 */ 899 if (!(u64State & RT SEMRW_CNT_RD_MASK))822 if (!(u64State & RTCSRW_CNT_RD_MASK)) 900 823 return false; 901 824 902 #ifdef RT SEMRW_STRICT825 #ifdef RTCRITSECTRW_STRICT 903 826 /* 904 827 * Ask the lock validator. 905 828 */ 906 return RTLockValidatorRecSharedIsOwner( &pThis->ValidatorRead, NIL_RTTHREAD);829 return RTLockValidatorRecSharedIsOwner(pThis->pValidatorRead, NIL_RTTHREAD); 907 830 #else 908 831 /* … … 912 835 #endif 913 836 } 914 RT_EXPORT_SYMBOL(RT SemRWIsReadOwner);915 916 917 RTDECL(uint32_t) RT SemRWGetWriteRecursion(RTSEMRW hRWSem)837 RT_EXPORT_SYMBOL(RTCritSectRwIsReadOwner); 838 839 840 RTDECL(uint32_t) RTCritSectRwGetWriteRecursion(PRTCRITSECTRW pThis) 918 841 { 919 842 /* 920 843 * Validate handle. 921 844 */ 922 struct RTSEMRWINTERNAL *pThis = hRWSem; 923 AssertPtrReturn(pThis, 0); 924 AssertReturn(pThis->u32Magic == RTSEMRW_MAGIC, 0); 845 AssertPtr(pThis); 846 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0); 925 847 926 848 /* … … 929 851 return pThis->cWriteRecursions; 930 852 } 931 RT_EXPORT_SYMBOL(RT SemRWGetWriteRecursion);932 933 934 RTDECL(uint32_t) RT SemRWGetWriterReadRecursion(RTSEMRW hRWSem)853 RT_EXPORT_SYMBOL(RTCritSectRwGetWriteRecursion); 854 855 856 RTDECL(uint32_t) RTCritSectRwGetWriterReadRecursion(PRTCRITSECTRW pThis) 935 857 { 936 858 /* 937 859 * Validate handle. 938 860 */ 939 struct RTSEMRWINTERNAL *pThis = hRWSem; 940 AssertPtrReturn(pThis, 0); 941 AssertReturn(pThis->u32Magic == RTSEMRW_MAGIC, 0); 861 AssertPtr(pThis); 862 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0); 942 863 943 864 /* … … 949 870 950 871 951 RTDECL(uint32_t) RTSemRWGetReadCount( RTSEMRW hRWSem)872 RTDECL(uint32_t) RTSemRWGetReadCount(PRTCRITSECTRW pThis) 952 873 { 953 874 /* 954 875 * Validate input. 955 876 */ 956 struct RTSEMRWINTERNAL *pThis = hRWSem; 957 AssertPtrReturn(pThis, 0); 958 AssertMsgReturn(pThis->u32Magic == RTSEMRW_MAGIC, 959 ("pThis=%p u32Magic=%#x\n", pThis, pThis->u32Magic), 960 0); 877 AssertPtr(pThis); 878 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0); 961 879 962 880 /* … … 964 882 */ 965 883 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State); 966 if ((u64State & RT SEMRW_DIR_MASK) != (RTSEMRW_DIR_READ << RTSEMRW_DIR_SHIFT))884 if ((u64State & RTCSRW_DIR_MASK) != (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT)) 967 885 return 0; 968 return (u64State & RT SEMRW_CNT_RD_MASK) >> RTSEMRW_CNT_RD_SHIFT;886 return (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT; 969 887 } 970 888 RT_EXPORT_SYMBOL(RTSemRWGetReadCount); 971 889 890 891 RTDECL(int) RTCritSectRwDelete(PRTCRITSECTRW pThis) 892 { 893 /* 894 * Assert free waiters and so on. 895 */ 896 AssertPtr(pThis); 897 Assert(pThis->u32Magic == RTCRITSECTRW_MAGIC); 898 //Assert(pThis->cNestings == 0); 899 //Assert(pThis->cLockers == -1); 900 Assert(pThis->hNativeWriter == NIL_RTNATIVETHREAD); 901 902 /* 903 * Invalidate the structure and free the semaphores. 904 */ 905 if (!ASMAtomicCmpXchgU32(&pThis->u32Magic, RTCRITSECTRW_MAGIC_DEAD, RTCRITSECTRW_MAGIC)) 906 return VERR_INVALID_PARAMETER; 907 908 pThis->fFlags = 0; 909 pThis->u64State = 0; 910 911 RTSEMEVENT hEvtWrite = pThis->hEvtWrite; 912 pThis->hEvtWrite = NIL_RTSEMEVENT; 913 RTSEMEVENTMULTI hEvtRead = pThis->hEvtRead; 914 pThis->hEvtRead = NIL_RTSEMEVENTMULTI; 915 916 int rc1 = RTSemEventDestroy(hEvtWrite); AssertRC(rc1); 917 int rc2 = RTSemEventMultiDestroy(hEvtRead); AssertRC(rc2); 918 919 RTLockValidatorRecSharedDestroy(&pThis->pValidatorRead); 920 RTLockValidatorRecExclDestroy(&pThis->pValidatorWrite); 921 922 return RT_SUCCESS(rc1) ? rc2 : rc1; 923 } 924 RT_EXPORT_SYMBOL(RTCritSectRwDelete); 925 -
trunk/src/VBox/Runtime/generic/semrw-lockless-generic.cpp
r45019 r45110 52 52 /** Magic value (RTSEMRW_MAGIC). */ 53 53 uint32_t volatile u32Magic; 54 uint32_t u32Padding; /**< alignment padding.*/ 55 /* The state variable. 54 /** Indicates whether hEvtRead needs resetting. */ 55 bool volatile fNeedReset; 56 57 /** The state variable. 56 58 * All accesses are atomic and it bits are defined like this: 57 59 * Bits 0..14 - cReads. … … 78 80 * finish. */ 79 81 RTSEMEVENTMULTI hEvtRead; 80 /** Indicates whether hEvtRead needs resetting. */81 bool volatile fNeedReset;82 82 83 83 #ifdef RTSEMRW_STRICT -
trunk/src/VBox/Runtime/include/internal/strict.h
r44529 r45110 5 5 6 6 /* 7 * Copyright (C) 2007-201 0Oracle Corporation7 * Copyright (C) 2007-2013 Oracle Corporation 8 8 * 9 9 * This file is part of VirtualBox Open Source Edition (OSE), as … … 36 36 #if (!defined(RTCRITSECT_STRICT) && defined(IN_RING3) && defined(RT_LOCK_STRICT)) || defined(DOXYGEN_RUNNING) 37 37 # define RTCRITSECT_STRICT 38 #endif 39 40 /** @def RTCRITSECTRW_STRICT 41 * Enables strictness checks and lock accounting of the RTCritSectRw API. 42 */ 43 #if (!defined(RTCRITSECTRW_STRICT) && defined(IN_RING3) && defined(RT_LOCK_STRICT)) || defined(DOXYGEN_RUNNING) 44 # define RTCRITSECTRW_STRICT 38 45 #endif 39 46
Note:
See TracChangeset
for help on using the changeset viewer.