VirtualBox

source: vbox/trunk/src/VBox/Runtime/generic/critsectrw-generic.cpp@ 45269

Last change on this file since 45269 was 45151, checked in by vboxsync, 12 years ago

iprt/critsect.h: Cleanups (R/W mostly).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 30.6 KB
Line 
1/* $Id: critsectrw-generic.cpp 45151 2013-03-23 20:35:23Z vboxsync $ */
2/** @file
3 * IPRT - Read/Write Critical Section, Generic.
4 */
5
6/*
7 * Copyright (C) 2009-2013 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*******************************************************************************
29* Header Files *
30*******************************************************************************/
31#define RTCRITSECTRW_WITHOUT_REMAPPING
32#define RTASSERT_QUIET
33#include <iprt/critsect.h>
34#include "internal/iprt.h"
35
36#include <iprt/asm.h>
37#include <iprt/assert.h>
38#include <iprt/err.h>
39#include <iprt/lockvalidator.h>
40#include <iprt/mem.h>
41#include <iprt/semaphore.h>
42#include <iprt/thread.h>
43
44#include "internal/magics.h"
45#include "internal/strict.h"
46
47
48
49RTDECL(int) RTCritSectRwInit(PRTCRITSECTRW pThis)
50{
51 return RTCritSectRwInitEx(pThis, 0, NIL_RTLOCKVALCLASS, RTLOCKVAL_SUB_CLASS_NONE, "RTCritSectRw");
52}
53RT_EXPORT_SYMBOL(RTCritSectRwInit);
54
55
56RTDECL(int) RTCritSectRwInitEx(PRTCRITSECTRW pThis, uint32_t fFlags,
57 RTLOCKVALCLASS hClass, uint32_t uSubClass, const char *pszNameFmt, ...)
58{
59 int rc;
60 AssertReturn(!(fFlags & ~( RTCRITSECT_FLAGS_NO_NESTING | RTCRITSECT_FLAGS_NO_LOCK_VAL | RTCRITSECT_FLAGS_BOOTSTRAP_HACK
61 | RTCRITSECT_FLAGS_NOP )),
62 VERR_INVALID_PARAMETER);
63
64 /*
65 * Initialize the structure, allocate the lock validator stuff and sems.
66 */
67 pThis->u32Magic = RTCRITSECTRW_MAGIC_DEAD;
68 pThis->fNeedReset = false;
69 pThis->u64State = 0;
70 pThis->hNativeWriter = NIL_RTNATIVETHREAD;
71 pThis->cWriterReads = 0;
72 pThis->cWriteRecursions = 0;
73 pThis->hEvtWrite = NIL_RTSEMEVENT;
74 pThis->hEvtRead = NIL_RTSEMEVENTMULTI;
75 pThis->pValidatorWrite = NULL;
76 pThis->pValidatorRead = NULL;
77#if HC_ARCH_BITS == 32
78 pThis->HCPtrPadding = NIL_RTHCPTR;
79#endif
80
81#ifdef RTCRITSECTRW_STRICT
82 bool const fLVEnabled = !(fFlags & RTSEMRW_FLAGS_NO_LOCK_VAL);
83 if (!pszNameFmt)
84 {
85 static uint32_t volatile s_iAnon = 0;
86 uint32_t i = ASMAtomicIncU32(&s_iAnon) - 1;
87 rc = RTLockValidatorRecExclCreate(&pThis->pValidatorWrite, hClass, uSubClass, pThis,
88 fLVEnabled, "RTCritSectRw-%u", i);
89 if (RT_SUCCESS(rc))
90 rc = RTLockValidatorRecSharedCreate(&pThis->pValidatorRead, hClass, uSubClass, pThis,
91 false /*fSignaller*/, fLVEnabled, "RTCritSectRw-%u", i);
92 }
93 else
94 {
95 va_list va;
96 va_start(va, pszNameFmt);
97 rc = RTLockValidatorRecExclCreateV(&pThis->pValidatorWrite, hClass, uSubClass, pThis,
98 fLVEnabled, pszNameFmt, va);
99 va_end(va);
100 if (RT_SUCCESS(rc))
101 {
102 va_start(va, pszNameFmt);
103 RTLockValidatorRecSharedCreateV(&pThis->pValidatorRead, hClass, uSubClass, pThis,
104 false /*fSignaller*/, fLVEnabled, pszNameFmt, va);
105 va_end(va);
106 }
107 }
108 if (RT_SUCCESS(rc))
109 rc = RTLockValidatorRecMakeSiblings(&pThis->pValidatorWrite->Core, &pThis->pValidatorRead->Core);
110
111 if (RT_SUCCESS(rc))
112#endif
113 {
114 rc = RTSemEventMultiCreate(&pThis->hEvtRead);
115 if (RT_SUCCESS(rc))
116 {
117 rc = RTSemEventCreate(&pThis->hEvtWrite);
118 if (RT_SUCCESS(rc))
119 {
120 pThis->u32Magic = RTCRITSECTRW_MAGIC;
121 return VINF_SUCCESS;
122 }
123 RTSemEventMultiDestroy(pThis->hEvtRead);
124 }
125 }
126
127#ifdef RTCRITSECTRW_STRICT
128 RTLockValidatorRecSharedDestroy(&pThis->pValidatorRead);
129 RTLockValidatorRecExclDestroy(&pThis->pValidatorWrite);
130#endif
131 return rc;
132}
133RT_EXPORT_SYMBOL(RTCritSectRwInitEx);
134
135
136RTDECL(uint32_t) RTCritSectRwSetSubClass(PRTCRITSECTRW pThis, uint32_t uSubClass)
137{
138 AssertPtrReturn(pThis, RTLOCKVAL_SUB_CLASS_INVALID);
139 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, RTLOCKVAL_SUB_CLASS_INVALID);
140#ifdef RTCRITSECTRW_STRICT
141 AssertReturn(!(pThis->fFlags & RTCRITSECT_FLAGS_NOP), RTLOCKVAL_SUB_CLASS_INVALID);
142
143 RTLockValidatorRecSharedSetSubClass(pThis->pValidatorRead, uSubClass);
144 return RTLockValidatorRecExclSetSubClass(pThis->pValidatorWrite, uSubClass);
145#else
146 NOREF(uSubClass);
147 return RTLOCKVAL_SUB_CLASS_INVALID;
148#endif
149}
150RT_EXPORT_SYMBOL(RTCritSectRwSetSubClass);
151
152
153static int rtCritSectRwEnterShared(PRTCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly)
154{
155 /*
156 * Validate input.
157 */
158 AssertPtr(pThis);
159 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
160
161#ifdef RTCRITSECTRW_STRICT
162 RTTHREAD hThreadSelf = RTThreadSelfAutoAdopt();
163 if (!fTryOnly)
164 {
165 int rc9;
166 RTNATIVETHREAD hNativeWriter;
167 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
168 if (hNativeWriter != NIL_RTTHREAD && hNativeWriter == RTThreadNativeSelf())
169 rc9 = RTLockValidatorRecExclCheckOrder(pThis->pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
170 else
171 rc9 = RTLockValidatorRecSharedCheckOrder(pThis->pValidatorRead, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
172 if (RT_FAILURE(rc9))
173 return rc9;
174 }
175#endif
176
177 /*
178 * Get cracking...
179 */
180 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
181 uint64_t u64OldState = u64State;
182
183 for (;;)
184 {
185 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
186 {
187 /* It flows in the right direction, try follow it before it changes. */
188 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
189 c++;
190 Assert(c < RTCSRW_CNT_MASK / 2);
191 u64State &= ~RTCSRW_CNT_RD_MASK;
192 u64State |= c << RTCSRW_CNT_RD_SHIFT;
193 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
194 {
195#ifdef RTCRITSECTRW_STRICT
196 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
197#endif
198 break;
199 }
200 }
201 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0)
202 {
203 /* Wrong direction, but we're alone here and can simply try switch the direction. */
204 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
205 u64State |= (UINT64_C(1) << RTCSRW_CNT_RD_SHIFT) | (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT);
206 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
207 {
208 Assert(!pThis->fNeedReset);
209#ifdef RTCRITSECTRW_STRICT
210 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
211#endif
212 break;
213 }
214 }
215 else
216 {
217 /* Is the writer perhaps doing a read recursion? */
218 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
219 RTNATIVETHREAD hNativeWriter;
220 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
221 if (hNativeSelf == hNativeWriter)
222 {
223#ifdef RTCRITSECTRW_STRICT
224 int rc9 = RTLockValidatorRecExclRecursionMixed(pThis->pValidatorWrite, &pThis->pValidatorRead->Core, pSrcPos);
225 if (RT_FAILURE(rc9))
226 return rc9;
227#endif
228 Assert(pThis->cWriterReads < UINT32_MAX / 2);
229 ASMAtomicIncU32(&pThis->cWriterReads);
230 return VINF_SUCCESS; /* don't break! */
231 }
232
233 /* If we're only trying, return already. */
234 if (fTryOnly)
235 return VERR_SEM_BUSY;
236
237 /* Add ourselves to the queue and wait for the direction to change. */
238 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
239 c++;
240 Assert(c < RTCSRW_CNT_MASK / 2);
241
242 uint64_t cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT;
243 cWait++;
244 Assert(cWait <= c);
245 Assert(cWait < RTCSRW_CNT_MASK / 2);
246
247 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK);
248 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT);
249
250 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
251 {
252 for (uint32_t iLoop = 0; ; iLoop++)
253 {
254 int rc;
255#ifdef RTCRITSECTRW_STRICT
256 rc = RTLockValidatorRecSharedCheckBlocking(pThis->pValidatorRead, hThreadSelf, pSrcPos, true,
257 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_READ, false);
258 if (RT_SUCCESS(rc))
259#else
260 RTTHREAD hThreadSelf = RTThreadSelf();
261 RTThreadBlocking(hThreadSelf, RTTHREADSTATE_RW_READ, false);
262#endif
263 {
264 rc = RTSemEventMultiWait(pThis->hEvtRead, RT_INDEFINITE_WAIT);
265 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_READ);
266 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
267 return VERR_SEM_DESTROYED;
268 }
269 if (RT_FAILURE(rc))
270 {
271 /* Decrement the counts and return the error. */
272 for (;;)
273 {
274 u64OldState = u64State = ASMAtomicReadU64(&pThis->u64State);
275 c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT; Assert(c > 0);
276 c--;
277 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT; Assert(cWait > 0);
278 cWait--;
279 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK);
280 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT);
281 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
282 break;
283 }
284 return rc;
285 }
286
287 Assert(pThis->fNeedReset);
288 u64State = ASMAtomicReadU64(&pThis->u64State);
289 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
290 break;
291 AssertMsg(iLoop < 1, ("%u\n", iLoop));
292 }
293
294 /* Decrement the wait count and maybe reset the semaphore (if we're last). */
295 for (;;)
296 {
297 u64OldState = u64State;
298
299 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT;
300 Assert(cWait > 0);
301 cWait--;
302 u64State &= ~RTCSRW_WAIT_CNT_RD_MASK;
303 u64State |= cWait << RTCSRW_WAIT_CNT_RD_SHIFT;
304
305 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
306 {
307 if (cWait == 0)
308 {
309 if (ASMAtomicXchgBool(&pThis->fNeedReset, false))
310 {
311 int rc = RTSemEventMultiReset(pThis->hEvtRead);
312 AssertRCReturn(rc, rc);
313 }
314 }
315 break;
316 }
317 u64State = ASMAtomicReadU64(&pThis->u64State);
318 }
319
320#ifdef RTCRITSECTRW_STRICT
321 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
322#endif
323 break;
324 }
325 }
326
327 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
328 return VERR_SEM_DESTROYED;
329
330 ASMNopPause();
331 u64State = ASMAtomicReadU64(&pThis->u64State);
332 u64OldState = u64State;
333 }
334
335 /* got it! */
336 Assert((ASMAtomicReadU64(&pThis->u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT));
337 return VINF_SUCCESS;
338
339}
340
341
342RTDECL(int) RTCritSectRwEnterShared(PRTCRITSECTRW pThis)
343{
344#ifndef RTCRITSECTRW_STRICT
345 return rtCritSectRwEnterShared(pThis, NULL, false /*fTryOnly*/);
346#else
347 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
348 return rtCritSectRwEnterShared(pThis, &SrcPos, false /*fTryOnly*/);
349#endif
350}
351RT_EXPORT_SYMBOL(RTCritSectRwEnterShared);
352
353
354RTDECL(int) RTCritSectRwEnterSharedDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
355{
356 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
357 return rtCritSectRwEnterShared(pThis, &SrcPos, false /*fTryOnly*/);
358}
359RT_EXPORT_SYMBOL(RTCritSectRwEnterSharedDebug);
360
361
362RTDECL(int) RTCritSectRwTryEnterShared(PRTCRITSECTRW pThis)
363{
364#ifndef RTCRITSECTRW_STRICT
365 return rtCritSectRwEnterShared(pThis, NULL, true /*fTryOnly*/);
366#else
367 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
368 return rtCritSectRwEnterShared(pThis, &SrcPos, true /*fTryOnly*/);
369#endif
370}
371RT_EXPORT_SYMBOL(RTCritSectRwEnterShared);
372
373
374RTDECL(int) RTCritSectRwTryEnterSharedDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
375{
376 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
377 return rtCritSectRwEnterShared(pThis, &SrcPos, true /*fTryOnly*/);
378}
379RT_EXPORT_SYMBOL(RTCritSectRwEnterSharedDebug);
380
381
382
383RTDECL(int) RTCritSectRwLeaveShared(PRTCRITSECTRW pThis)
384{
385 /*
386 * Validate handle.
387 */
388 AssertPtr(pThis);
389 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
390
391 /*
392 * Check the direction and take action accordingly.
393 */
394 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
395 uint64_t u64OldState = u64State;
396 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
397 {
398#ifdef RTCRITSECTRW_STRICT
399 int rc9 = RTLockValidatorRecSharedCheckAndRelease(pThis->pValidatorRead, NIL_RTTHREAD);
400 if (RT_FAILURE(rc9))
401 return rc9;
402#endif
403 for (;;)
404 {
405 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
406 AssertReturn(c > 0, VERR_NOT_OWNER);
407 c--;
408
409 if ( c > 0
410 || (u64State & RTCSRW_CNT_RD_MASK) == 0)
411 {
412 /* Don't change the direction. */
413 u64State &= ~RTCSRW_CNT_RD_MASK;
414 u64State |= c << RTCSRW_CNT_RD_SHIFT;
415 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
416 break;
417 }
418 else
419 {
420 /* Reverse the direction and signal the reader threads. */
421 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_DIR_MASK);
422 u64State |= RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT;
423 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
424 {
425 int rc = RTSemEventSignal(pThis->hEvtWrite);
426 AssertRC(rc);
427 break;
428 }
429 }
430
431 ASMNopPause();
432 u64State = ASMAtomicReadU64(&pThis->u64State);
433 u64OldState = u64State;
434 }
435 }
436 else
437 {
438 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
439 RTNATIVETHREAD hNativeWriter;
440 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
441 AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
442 AssertReturn(pThis->cWriterReads > 0, VERR_NOT_OWNER);
443#ifdef RTCRITSECTRW_STRICT
444 int rc = RTLockValidatorRecExclUnwindMixed(pThis->pValidatorWrite, &pThis->pValidatorRead->Core);
445 if (RT_FAILURE(rc))
446 return rc;
447#endif
448 ASMAtomicDecU32(&pThis->cWriterReads);
449 }
450
451 return VINF_SUCCESS;
452}
453RT_EXPORT_SYMBOL(RTCritSectRwLeaveShared);
454
455
456static int rtCritSectRwEnterExcl(PRTCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly)
457{
458 /*
459 * Validate input.
460 */
461 AssertPtr(pThis);
462 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
463
464#ifdef RTCRITSECTRW_STRICT
465 RTTHREAD hThreadSelf = NIL_RTTHREAD;
466 if (!fTryOnly)
467 {
468 hThreadSelf = RTThreadSelfAutoAdopt();
469 int rc9 = RTLockValidatorRecExclCheckOrder(pThis->pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
470 if (RT_FAILURE(rc9))
471 return rc9;
472 }
473#endif
474
475 /*
476 * Check if we're already the owner and just recursing.
477 */
478 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
479 RTNATIVETHREAD hNativeWriter;
480 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
481 if (hNativeSelf == hNativeWriter)
482 {
483 Assert((ASMAtomicReadU64(&pThis->u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
484#ifdef RTCRITSECTRW_STRICT
485 int rc9 = RTLockValidatorRecExclRecursion(pThis->pValidatorWrite, pSrcPos);
486 if (RT_FAILURE(rc9))
487 return rc9;
488#endif
489 Assert(pThis->cWriteRecursions < UINT32_MAX / 2);
490 ASMAtomicIncU32(&pThis->cWriteRecursions);
491 return VINF_SUCCESS;
492 }
493
494 /*
495 * Get cracking.
496 */
497 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
498 uint64_t u64OldState = u64State;
499
500 for (;;)
501 {
502 if ( (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)
503 || (u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) != 0)
504 {
505 /* It flows in the right direction, try follow it before it changes. */
506 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
507 c++;
508 Assert(c < RTCSRW_CNT_MASK / 2);
509 u64State &= ~RTCSRW_CNT_WR_MASK;
510 u64State |= c << RTCSRW_CNT_WR_SHIFT;
511 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
512 break;
513 }
514 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0)
515 {
516 /* Wrong direction, but we're alone here and can simply try switch the direction. */
517 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
518 u64State |= (UINT64_C(1) << RTCSRW_CNT_WR_SHIFT) | (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT);
519 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
520 break;
521 }
522 else if (fTryOnly)
523 /* Wrong direction and we're not supposed to wait, just return. */
524 return VERR_SEM_BUSY;
525 else
526 {
527 /* Add ourselves to the write count and break out to do the wait. */
528 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
529 c++;
530 Assert(c < RTCSRW_CNT_MASK / 2);
531 u64State &= ~RTCSRW_CNT_WR_MASK;
532 u64State |= c << RTCSRW_CNT_WR_SHIFT;
533 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
534 break;
535 }
536
537 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
538 return VERR_SEM_DESTROYED;
539
540 ASMNopPause();
541 u64State = ASMAtomicReadU64(&pThis->u64State);
542 u64OldState = u64State;
543 }
544
545 /*
546 * If we're in write mode now try grab the ownership. Play fair if there
547 * are threads already waiting.
548 */
549 bool fDone = (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)
550 && ( ((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT) == 1
551 || fTryOnly);
552 if (fDone)
553 ASMAtomicCmpXchgHandle(&pThis->hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone);
554 if (!fDone)
555 {
556 /*
557 * Wait for our turn.
558 */
559 for (uint32_t iLoop = 0; ; iLoop++)
560 {
561 int rc;
562#ifdef RTCRITSECTRW_STRICT
563 if (!fTryOnly)
564 {
565 if (hThreadSelf == NIL_RTTHREAD)
566 hThreadSelf = RTThreadSelfAutoAdopt();
567 rc = RTLockValidatorRecExclCheckBlocking(pThis->pValidatorWrite, hThreadSelf, pSrcPos, true,
568 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_WRITE, false);
569 }
570 else
571 rc = VINF_SUCCESS;
572 if (RT_SUCCESS(rc))
573#else
574 RTTHREAD hThreadSelf = RTThreadSelf();
575 RTThreadBlocking(hThreadSelf, RTTHREADSTATE_RW_WRITE, false);
576#endif
577 {
578 rc = RTSemEventWait(pThis->hEvtWrite, RT_INDEFINITE_WAIT);
579 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_WRITE);
580 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
581 return VERR_SEM_DESTROYED;
582 }
583 if (RT_FAILURE(rc))
584 {
585 /* Decrement the counts and return the error. */
586 for (;;)
587 {
588 u64OldState = u64State = ASMAtomicReadU64(&pThis->u64State);
589 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; Assert(c > 0);
590 c--;
591 u64State &= ~RTCSRW_CNT_WR_MASK;
592 u64State |= c << RTCSRW_CNT_WR_SHIFT;
593 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
594 break;
595 }
596 return rc;
597 }
598
599 u64State = ASMAtomicReadU64(&pThis->u64State);
600 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT))
601 {
602 ASMAtomicCmpXchgHandle(&pThis->hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone);
603 if (fDone)
604 break;
605 }
606 AssertMsg(iLoop < 1000, ("%u\n", iLoop)); /* may loop a few times here... */
607 }
608 }
609
610 /*
611 * Got it!
612 */
613 Assert((ASMAtomicReadU64(&pThis->u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
614 ASMAtomicWriteU32(&pThis->cWriteRecursions, 1);
615 Assert(pThis->cWriterReads == 0);
616#ifdef RTCRITSECTRW_STRICT
617 RTLockValidatorRecExclSetOwner(pThis->pValidatorWrite, hThreadSelf, pSrcPos, true);
618#endif
619
620 return VINF_SUCCESS;
621}
622
623
624RTDECL(int) RTCritSectRwEnterExcl(PRTCRITSECTRW pThis)
625{
626#ifndef RTCRITSECTRW_STRICT
627 return rtCritSectRwEnterExcl(pThis, NULL, false /*fTryAgain*/);
628#else
629 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
630 return rtCritSectRwEnterExcl(pThis, &SrcPos, false /*fTryAgain*/);
631#endif
632}
633RT_EXPORT_SYMBOL(RTCritSectRwEnterExcl);
634
635
636RTDECL(int) RTCritSectRwEnterExclDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
637{
638 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
639 return rtCritSectRwEnterExcl(pThis, &SrcPos, false /*fTryAgain*/);
640}
641RT_EXPORT_SYMBOL(RTCritSectRwEnterExclDebug);
642
643
644RTDECL(int) RTCritSectRwTryEnterExcl(PRTCRITSECTRW pThis)
645{
646#ifndef RTCRITSECTRW_STRICT
647 return rtCritSectRwEnterExcl(pThis, NULL, true /*fTryAgain*/);
648#else
649 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
650 return rtCritSectRwEnterExcl(pThis, &SrcPos, true /*fTryAgain*/);
651#endif
652}
653RT_EXPORT_SYMBOL(RTCritSectRwTryEnterExcl);
654
655
656RTDECL(int) RTCritSectRwTryEnterExclDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
657{
658 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
659 return rtCritSectRwEnterExcl(pThis, &SrcPos, true /*fTryAgain*/);
660}
661RT_EXPORT_SYMBOL(RTCritSectRwTryEnterExclDebug);
662
663
664RTDECL(int) RTCritSectRwLeaveExcl(PRTCRITSECTRW pThis)
665{
666 /*
667 * Validate handle.
668 */
669 AssertPtr(pThis);
670 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
671
672 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
673 RTNATIVETHREAD hNativeWriter;
674 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
675 AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
676
677 /*
678 * Unwind a recursion.
679 */
680 if (pThis->cWriteRecursions == 1)
681 {
682 AssertReturn(pThis->cWriterReads == 0, VERR_WRONG_ORDER); /* (must release all read recursions before the final write.) */
683#ifdef RTCRITSECTRW_STRICT
684 int rc9 = RTLockValidatorRecExclReleaseOwner(pThis->pValidatorWrite, true);
685 if (RT_FAILURE(rc9))
686 return rc9;
687#endif
688 /*
689 * Update the state.
690 */
691 ASMAtomicWriteU32(&pThis->cWriteRecursions, 0);
692 ASMAtomicWriteHandle(&pThis->hNativeWriter, NIL_RTNATIVETHREAD);
693
694 for (;;)
695 {
696 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
697 uint64_t u64OldState = u64State;
698
699 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
700 Assert(c > 0);
701 c--;
702
703 if ( c > 0
704 || (u64State & RTCSRW_CNT_RD_MASK) == 0)
705 {
706 /* Don't change the direction, wait up the next writer if any. */
707 u64State &= ~RTCSRW_CNT_WR_MASK;
708 u64State |= c << RTCSRW_CNT_WR_SHIFT;
709 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
710 {
711 if (c > 0)
712 {
713 int rc = RTSemEventSignal(pThis->hEvtWrite);
714 AssertRC(rc);
715 }
716 break;
717 }
718 }
719 else
720 {
721 /* Reverse the direction and signal the reader threads. */
722 u64State &= ~(RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
723 u64State |= RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT;
724 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
725 {
726 Assert(!pThis->fNeedReset);
727 ASMAtomicWriteBool(&pThis->fNeedReset, true);
728 int rc = RTSemEventMultiSignal(pThis->hEvtRead);
729 AssertRC(rc);
730 break;
731 }
732 }
733
734 ASMNopPause();
735 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
736 return VERR_SEM_DESTROYED;
737 }
738 }
739 else
740 {
741 Assert(pThis->cWriteRecursions != 0);
742#ifdef RTCRITSECTRW_STRICT
743 int rc9 = RTLockValidatorRecExclUnwind(pThis->pValidatorWrite);
744 if (RT_FAILURE(rc9))
745 return rc9;
746#endif
747 ASMAtomicDecU32(&pThis->cWriteRecursions);
748 }
749
750 return VINF_SUCCESS;
751}
752RT_EXPORT_SYMBOL(RTCritSectRwLeaveExcl);
753
754
755RTDECL(bool) RTCritSectRwIsWriteOwner(PRTCRITSECTRW pThis)
756{
757 /*
758 * Validate handle.
759 */
760 AssertPtr(pThis);
761 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, false);
762
763 /*
764 * Check ownership.
765 */
766 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
767 RTNATIVETHREAD hNativeWriter;
768 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
769 return hNativeWriter == hNativeSelf;
770}
771RT_EXPORT_SYMBOL(RTCritSectRwIsWriteOwner);
772
773
774RTDECL(bool) RTCritSectRwIsReadOwner(PRTCRITSECTRW pThis, bool fWannaHear)
775{
776 /*
777 * Validate handle.
778 */
779 AssertPtr(pThis);
780 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, false);
781
782 /*
783 * Inspect the state.
784 */
785 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
786 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT))
787 {
788 /*
789 * It's in write mode, so we can only be a reader if we're also the
790 * current writer.
791 */
792 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
793 RTNATIVETHREAD hWriter;
794 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hWriter);
795 return hWriter == hNativeSelf;
796 }
797
798 /*
799 * Read mode. If there are no current readers, then we cannot be a reader.
800 */
801 if (!(u64State & RTCSRW_CNT_RD_MASK))
802 return false;
803
804#ifdef RTCRITSECTRW_STRICT
805 /*
806 * Ask the lock validator.
807 */
808 return RTLockValidatorRecSharedIsOwner(pThis->pValidatorRead, NIL_RTTHREAD);
809#else
810 /*
811 * Ok, we don't know, just tell the caller what he want to hear.
812 */
813 return fWannaHear;
814#endif
815}
816RT_EXPORT_SYMBOL(RTCritSectRwIsReadOwner);
817
818
819RTDECL(uint32_t) RTCritSectRwGetWriteRecursion(PRTCRITSECTRW pThis)
820{
821 /*
822 * Validate handle.
823 */
824 AssertPtr(pThis);
825 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
826
827 /*
828 * Return the requested data.
829 */
830 return pThis->cWriteRecursions;
831}
832RT_EXPORT_SYMBOL(RTCritSectRwGetWriteRecursion);
833
834
835RTDECL(uint32_t) RTCritSectRwGetWriterReadRecursion(PRTCRITSECTRW pThis)
836{
837 /*
838 * Validate handle.
839 */
840 AssertPtr(pThis);
841 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
842
843 /*
844 * Return the requested data.
845 */
846 return pThis->cWriterReads;
847}
848RT_EXPORT_SYMBOL(RTCritSectRwGetWriterReadRecursion);
849
850
851RTDECL(uint32_t) RTCritSectRwGetReadCount(PRTCRITSECTRW pThis)
852{
853 /*
854 * Validate input.
855 */
856 AssertPtr(pThis);
857 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
858
859 /*
860 * Return the requested data.
861 */
862 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
863 if ((u64State & RTCSRW_DIR_MASK) != (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
864 return 0;
865 return (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
866}
867RT_EXPORT_SYMBOL(RTCritSectRwGetReadCount);
868
869
870RTDECL(int) RTCritSectRwDelete(PRTCRITSECTRW pThis)
871{
872 /*
873 * Assert free waiters and so on.
874 */
875 AssertPtr(pThis);
876 Assert(pThis->u32Magic == RTCRITSECTRW_MAGIC);
877 //Assert(pThis->cNestings == 0);
878 //Assert(pThis->cLockers == -1);
879 Assert(pThis->hNativeWriter == NIL_RTNATIVETHREAD);
880
881 /*
882 * Invalidate the structure and free the semaphores.
883 */
884 if (!ASMAtomicCmpXchgU32(&pThis->u32Magic, RTCRITSECTRW_MAGIC_DEAD, RTCRITSECTRW_MAGIC))
885 return VERR_INVALID_PARAMETER;
886
887 pThis->fFlags = 0;
888 pThis->u64State = 0;
889
890 RTSEMEVENT hEvtWrite = pThis->hEvtWrite;
891 pThis->hEvtWrite = NIL_RTSEMEVENT;
892 RTSEMEVENTMULTI hEvtRead = pThis->hEvtRead;
893 pThis->hEvtRead = NIL_RTSEMEVENTMULTI;
894
895 int rc1 = RTSemEventDestroy(hEvtWrite); AssertRC(rc1);
896 int rc2 = RTSemEventMultiDestroy(hEvtRead); AssertRC(rc2);
897
898 RTLockValidatorRecSharedDestroy(&pThis->pValidatorRead);
899 RTLockValidatorRecExclDestroy(&pThis->pValidatorWrite);
900
901 return RT_SUCCESS(rc1) ? rc2 : rc1;
902}
903RT_EXPORT_SYMBOL(RTCritSectRwDelete);
904
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette