VirtualBox

source: vbox/trunk/src/VBox/Runtime/generic/critsectrw-generic.cpp@ 59036

Last change on this file since 59036 was 59036, checked in by vboxsync, 9 years ago

IPRT,VMMR0: Added trace points to both the IPRT critical section types.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 40.2 KB
Line 
1/* $Id: critsectrw-generic.cpp 59036 2015-12-07 17:49:30Z vboxsync $ */
2/** @file
3 * IPRT - Read/Write Critical Section, Generic.
4 */
5
6/*
7 * Copyright (C) 2009-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*********************************************************************************************************************************
29* Header Files *
30*********************************************************************************************************************************/
31#define RTCRITSECTRW_WITHOUT_REMAPPING
32#define RTASSERT_QUIET
33#include <iprt/critsect.h>
34#include "internal/iprt.h"
35
36#include <iprt/asm.h>
37#include <iprt/assert.h>
38#include <iprt/err.h>
39#include <iprt/lockvalidator.h>
40#include <iprt/mem.h>
41#include <iprt/semaphore.h>
42#include <iprt/thread.h>
43
44#include "internal/magics.h"
45#include "internal/strict.h"
46
47/* Two issues here, (1) the tracepoint generator uses IPRT, and (2) only one .d
48 file per module. */
49#ifdef IPRT_WITH_DTRACE
50# include IPRT_DTRACE_INCLUDE
51# define IPRT_CRITSECTRW_EXCL_ENTERED RT_CONCAT(IPRT_WITH_DTRACE,IPRT_CRITSECTRW_EXCL_ENTERED)
52# define IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED RT_CONCAT(IPRT_WITH_DTRACE,IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED)
53# define IPRT_CRITSECTRW_EXCL_LEAVING RT_CONCAT(IPRT_WITH_DTRACE,IPRT_CRITSECTRW_EXCL_LEAVING)
54# define IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED RT_CONCAT(IPRT_WITH_DTRACE,IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED)
55# define IPRT_CRITSECTRW_EXCL_BUSY RT_CONCAT(IPRT_WITH_DTRACE,IPRT_CRITSECTRW_EXCL_BUSY)
56# define IPRT_CRITSECTRW_EXCL_WAITING RT_CONCAT(IPRT_WITH_DTRACE,IPRT_CRITSECTRW_EXCL_WAITING)
57# define IPRT_CRITSECTRW_EXCL_ENTERED_SHARED RT_CONCAT(IPRT_WITH_DTRACE,IPRT_CRITSECTRW_EXCL_ENTERED_SHARED)
58# define IPRT_CRITSECTRW_EXCL_LEAVING_SHARED RT_CONCAT(IPRT_WITH_DTRACE,IPRT_CRITSECTRW_EXCL_LEAVING_SHARED)
59# define IPRT_CRITSECTRW_SHARED_ENTERED RT_CONCAT(IPRT_WITH_DTRACE,IPRT_CRITSECTRW_SHARED_ENTERED)
60# define IPRT_CRITSECTRW_SHARED_LEAVING RT_CONCAT(IPRT_WITH_DTRACE,IPRT_CRITSECTRW_SHARED_LEAVING)
61# define IPRT_CRITSECTRW_SHARED_BUSY RT_CONCAT(IPRT_WITH_DTRACE,IPRT_CRITSECTRW_SHARED_BUSY)
62# define IPRT_CRITSECTRW_SHARED_WAITING RT_CONCAT(IPRT_WITH_DTRACE,IPRT_CRITSECTRW_SHARED_WAITING)
63#else
64# define IPRT_CRITSECTRW_EXCL_ENTERED(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
65# define IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED() (false)
66# define IPRT_CRITSECTRW_EXCL_LEAVING(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
67# define IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED() (false)
68# define IPRT_CRITSECTRW_EXCL_BUSY( a_pvCritSect, a_pszName, a_fWriteMode, a_cWaitingReaders, a_cReaders, cWriters, a_pvNativeOwnerThread) do {} while (0)
69# define IPRT_CRITSECTRW_EXCL_WAITING(a_pvCritSect, a_pszName, a_fWriteMode, a_cWaitingReaders, a_cReaders, cWriters, a_pvNativeOwnerThread) do {} while (0)
70# define IPRT_CRITSECTRW_EXCL_ENTERED_SHARED(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
71# define IPRT_CRITSECTRW_EXCL_LEAVING_SHARED(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
72# define IPRT_CRITSECTRW_SHARED_ENTERED(a_pvCritSect, a_pszName, a_cReaders, a_cWaitingWriters) do {} while (0)
73# define IPRT_CRITSECTRW_SHARED_LEAVING(a_pvCritSect, a_pszName, a_cReaders, a_cWaitingWriters) do {} while (0)
74# define IPRT_CRITSECTRW_SHARED_BUSY( a_pvCritSect, a_pszName, a_pvNativeOwnerThread, a_cWaitingReaders, a_cWriters) do {} while (0)
75# define IPRT_CRITSECTRW_SHARED_WAITING(a_pvCritSect, a_pszName, a_pvNativeOwnerThread, a_cWaitingReaders, a_cWriters) do {} while (0)
76#endif
77
78
79
80RTDECL(int) RTCritSectRwInit(PRTCRITSECTRW pThis)
81{
82 return RTCritSectRwInitEx(pThis, 0, NIL_RTLOCKVALCLASS, RTLOCKVAL_SUB_CLASS_NONE, "RTCritSectRw");
83}
84RT_EXPORT_SYMBOL(RTCritSectRwInit);
85
86
87RTDECL(int) RTCritSectRwInitEx(PRTCRITSECTRW pThis, uint32_t fFlags,
88 RTLOCKVALCLASS hClass, uint32_t uSubClass, const char *pszNameFmt, ...)
89{
90 int rc;
91 AssertReturn(!(fFlags & ~( RTCRITSECT_FLAGS_NO_NESTING | RTCRITSECT_FLAGS_NO_LOCK_VAL | RTCRITSECT_FLAGS_BOOTSTRAP_HACK
92 | RTCRITSECT_FLAGS_NOP )),
93 VERR_INVALID_PARAMETER);
94
95 /*
96 * Initialize the structure, allocate the lock validator stuff and sems.
97 */
98 pThis->u32Magic = RTCRITSECTRW_MAGIC_DEAD;
99 pThis->fNeedReset = false;
100#ifdef IN_RING0
101 pThis->fFlags = (uint16_t)(fFlags | RTCRITSECT_FLAGS_RING0);
102#else
103 pThis->fFlags = (uint16_t)(fFlags & ~RTCRITSECT_FLAGS_RING0);
104#endif
105 pThis->u64State = 0;
106 pThis->hNativeWriter = NIL_RTNATIVETHREAD;
107 pThis->cWriterReads = 0;
108 pThis->cWriteRecursions = 0;
109 pThis->hEvtWrite = NIL_RTSEMEVENT;
110 pThis->hEvtRead = NIL_RTSEMEVENTMULTI;
111 pThis->pValidatorWrite = NULL;
112 pThis->pValidatorRead = NULL;
113#if HC_ARCH_BITS == 32
114 pThis->HCPtrPadding = NIL_RTHCPTR;
115#endif
116
117#ifdef RTCRITSECTRW_STRICT
118 bool const fLVEnabled = !(fFlags & RTCRITSECT_FLAGS_NO_LOCK_VAL);
119 if (!pszNameFmt)
120 {
121 static uint32_t volatile s_iAnon = 0;
122 uint32_t i = ASMAtomicIncU32(&s_iAnon) - 1;
123 rc = RTLockValidatorRecExclCreate(&pThis->pValidatorWrite, hClass, uSubClass, pThis,
124 fLVEnabled, "RTCritSectRw-%u", i);
125 if (RT_SUCCESS(rc))
126 rc = RTLockValidatorRecSharedCreate(&pThis->pValidatorRead, hClass, uSubClass, pThis,
127 false /*fSignaller*/, fLVEnabled, "RTCritSectRw-%u", i);
128 }
129 else
130 {
131 va_list va;
132 va_start(va, pszNameFmt);
133 rc = RTLockValidatorRecExclCreateV(&pThis->pValidatorWrite, hClass, uSubClass, pThis,
134 fLVEnabled, pszNameFmt, va);
135 va_end(va);
136 if (RT_SUCCESS(rc))
137 {
138 va_start(va, pszNameFmt);
139 RTLockValidatorRecSharedCreateV(&pThis->pValidatorRead, hClass, uSubClass, pThis,
140 false /*fSignaller*/, fLVEnabled, pszNameFmt, va);
141 va_end(va);
142 }
143 }
144 if (RT_SUCCESS(rc))
145 rc = RTLockValidatorRecMakeSiblings(&pThis->pValidatorWrite->Core, &pThis->pValidatorRead->Core);
146
147 if (RT_SUCCESS(rc))
148#endif
149 {
150 rc = RTSemEventMultiCreate(&pThis->hEvtRead);
151 if (RT_SUCCESS(rc))
152 {
153 rc = RTSemEventCreate(&pThis->hEvtWrite);
154 if (RT_SUCCESS(rc))
155 {
156 pThis->u32Magic = RTCRITSECTRW_MAGIC;
157 return VINF_SUCCESS;
158 }
159 RTSemEventMultiDestroy(pThis->hEvtRead);
160 }
161 }
162
163#ifdef RTCRITSECTRW_STRICT
164 RTLockValidatorRecSharedDestroy(&pThis->pValidatorRead);
165 RTLockValidatorRecExclDestroy(&pThis->pValidatorWrite);
166#endif
167 return rc;
168}
169RT_EXPORT_SYMBOL(RTCritSectRwInitEx);
170
171
172RTDECL(uint32_t) RTCritSectRwSetSubClass(PRTCRITSECTRW pThis, uint32_t uSubClass)
173{
174 AssertPtrReturn(pThis, RTLOCKVAL_SUB_CLASS_INVALID);
175 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, RTLOCKVAL_SUB_CLASS_INVALID);
176#ifdef IN_RING0
177 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
178#else
179 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
180#endif
181#ifdef RTCRITSECTRW_STRICT
182 AssertReturn(!(pThis->fFlags & RTCRITSECT_FLAGS_NOP), RTLOCKVAL_SUB_CLASS_INVALID);
183
184 RTLockValidatorRecSharedSetSubClass(pThis->pValidatorRead, uSubClass);
185 return RTLockValidatorRecExclSetSubClass(pThis->pValidatorWrite, uSubClass);
186#else
187 NOREF(uSubClass);
188 return RTLOCKVAL_SUB_CLASS_INVALID;
189#endif
190}
191RT_EXPORT_SYMBOL(RTCritSectRwSetSubClass);
192
193
194static int rtCritSectRwEnterShared(PRTCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly)
195{
196 /*
197 * Validate input.
198 */
199 AssertPtr(pThis);
200 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
201#ifdef IN_RING0
202 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
203#else
204 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
205#endif
206
207#ifdef RTCRITSECTRW_STRICT
208 RTTHREAD hThreadSelf = RTThreadSelfAutoAdopt();
209 if (!fTryOnly)
210 {
211 int rc9;
212 RTNATIVETHREAD hNativeWriter;
213 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
214 if (hNativeWriter != NIL_RTTHREAD && hNativeWriter == RTThreadNativeSelf())
215 rc9 = RTLockValidatorRecExclCheckOrder(pThis->pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
216 else
217 rc9 = RTLockValidatorRecSharedCheckOrder(pThis->pValidatorRead, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
218 if (RT_FAILURE(rc9))
219 return rc9;
220 }
221#endif
222
223 /*
224 * Get cracking...
225 */
226 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
227 uint64_t u64OldState = u64State;
228
229 for (;;)
230 {
231 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
232 {
233 /* It flows in the right direction, try follow it before it changes. */
234 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
235 c++;
236 Assert(c < RTCSRW_CNT_MASK / 2);
237 u64State &= ~RTCSRW_CNT_RD_MASK;
238 u64State |= c << RTCSRW_CNT_RD_SHIFT;
239 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
240 {
241#ifdef RTCRITSECTRW_STRICT
242 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
243#endif
244 break;
245 }
246 }
247 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0)
248 {
249 /* Wrong direction, but we're alone here and can simply try switch the direction. */
250 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
251 u64State |= (UINT64_C(1) << RTCSRW_CNT_RD_SHIFT) | (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT);
252 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
253 {
254 Assert(!pThis->fNeedReset);
255#ifdef RTCRITSECTRW_STRICT
256 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
257#endif
258 break;
259 }
260 }
261 else
262 {
263 /* Is the writer perhaps doing a read recursion? */
264 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
265 RTNATIVETHREAD hNativeWriter;
266 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
267 if (hNativeSelf == hNativeWriter)
268 {
269#ifdef RTCRITSECTRW_STRICT
270 int rc9 = RTLockValidatorRecExclRecursionMixed(pThis->pValidatorWrite, &pThis->pValidatorRead->Core, pSrcPos);
271 if (RT_FAILURE(rc9))
272 return rc9;
273#endif
274 Assert(pThis->cWriterReads < UINT32_MAX / 2);
275 uint32_t const cReads = ASMAtomicIncU32(&pThis->cWriterReads); NOREF(cReads);
276 IPRT_CRITSECTRW_EXCL_ENTERED_SHARED(pThis, NULL,
277 cReads + pThis->cWriteRecursions,
278 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
279 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
280
281 return VINF_SUCCESS; /* don't break! */
282 }
283
284 /* If we're only trying, return already. */
285 if (fTryOnly)
286 {
287 IPRT_CRITSECTRW_SHARED_BUSY(pThis, NULL,
288 (void *)pThis->hNativeWriter,
289 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
290 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
291 return VERR_SEM_BUSY;
292 }
293
294 /* Add ourselves to the queue and wait for the direction to change. */
295 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
296 c++;
297 Assert(c < RTCSRW_CNT_MASK / 2);
298
299 uint64_t cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT;
300 cWait++;
301 Assert(cWait <= c);
302 Assert(cWait < RTCSRW_CNT_MASK / 2);
303
304 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK);
305 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT);
306
307 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
308 {
309 IPRT_CRITSECTRW_SHARED_WAITING(pThis, NULL,
310 (void *)pThis->hNativeWriter,
311 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
312 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
313 for (uint32_t iLoop = 0; ; iLoop++)
314 {
315 int rc;
316#ifdef RTCRITSECTRW_STRICT
317 rc = RTLockValidatorRecSharedCheckBlocking(pThis->pValidatorRead, hThreadSelf, pSrcPos, true,
318 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_READ, false);
319 if (RT_SUCCESS(rc))
320#elif defined(IN_RING3)
321 RTTHREAD hThreadSelf = RTThreadSelf();
322 RTThreadBlocking(hThreadSelf, RTTHREADSTATE_RW_READ, false);
323#endif
324 {
325 rc = RTSemEventMultiWait(pThis->hEvtRead, RT_INDEFINITE_WAIT);
326#ifdef IN_RING3
327 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_READ);
328#endif
329 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
330 return VERR_SEM_DESTROYED;
331 }
332 if (RT_FAILURE(rc))
333 {
334 /* Decrement the counts and return the error. */
335 for (;;)
336 {
337 u64OldState = u64State = ASMAtomicReadU64(&pThis->u64State);
338 c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT; Assert(c > 0);
339 c--;
340 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT; Assert(cWait > 0);
341 cWait--;
342 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK);
343 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT);
344 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
345 break;
346 }
347 return rc;
348 }
349
350 Assert(pThis->fNeedReset);
351 u64State = ASMAtomicReadU64(&pThis->u64State);
352 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
353 break;
354 AssertMsg(iLoop < 1, ("%u\n", iLoop));
355 }
356
357 /* Decrement the wait count and maybe reset the semaphore (if we're last). */
358 for (;;)
359 {
360 u64OldState = u64State;
361
362 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT;
363 Assert(cWait > 0);
364 cWait--;
365 u64State &= ~RTCSRW_WAIT_CNT_RD_MASK;
366 u64State |= cWait << RTCSRW_WAIT_CNT_RD_SHIFT;
367
368 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
369 {
370 if (cWait == 0)
371 {
372 if (ASMAtomicXchgBool(&pThis->fNeedReset, false))
373 {
374 int rc = RTSemEventMultiReset(pThis->hEvtRead);
375 AssertRCReturn(rc, rc);
376 }
377 }
378 break;
379 }
380 u64State = ASMAtomicReadU64(&pThis->u64State);
381 }
382
383#ifdef RTCRITSECTRW_STRICT
384 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
385#endif
386 break;
387 }
388 }
389
390 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
391 return VERR_SEM_DESTROYED;
392
393 ASMNopPause();
394 u64State = ASMAtomicReadU64(&pThis->u64State);
395 u64OldState = u64State;
396 }
397
398 /* got it! */
399 Assert((ASMAtomicReadU64(&pThis->u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT));
400 IPRT_CRITSECTRW_SHARED_ENTERED(pThis, NULL,
401 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT),
402 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
403 return VINF_SUCCESS;
404}
405
406
407RTDECL(int) RTCritSectRwEnterShared(PRTCRITSECTRW pThis)
408{
409#ifndef RTCRITSECTRW_STRICT
410 return rtCritSectRwEnterShared(pThis, NULL, false /*fTryOnly*/);
411#else
412 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
413 return rtCritSectRwEnterShared(pThis, &SrcPos, false /*fTryOnly*/);
414#endif
415}
416RT_EXPORT_SYMBOL(RTCritSectRwEnterShared);
417
418
419RTDECL(int) RTCritSectRwEnterSharedDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
420{
421 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
422 return rtCritSectRwEnterShared(pThis, &SrcPos, false /*fTryOnly*/);
423}
424RT_EXPORT_SYMBOL(RTCritSectRwEnterSharedDebug);
425
426
427RTDECL(int) RTCritSectRwTryEnterShared(PRTCRITSECTRW pThis)
428{
429#ifndef RTCRITSECTRW_STRICT
430 return rtCritSectRwEnterShared(pThis, NULL, true /*fTryOnly*/);
431#else
432 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
433 return rtCritSectRwEnterShared(pThis, &SrcPos, true /*fTryOnly*/);
434#endif
435}
436RT_EXPORT_SYMBOL(RTCritSectRwEnterShared);
437
438
439RTDECL(int) RTCritSectRwTryEnterSharedDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
440{
441 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
442 return rtCritSectRwEnterShared(pThis, &SrcPos, true /*fTryOnly*/);
443}
444RT_EXPORT_SYMBOL(RTCritSectRwEnterSharedDebug);
445
446
447
448RTDECL(int) RTCritSectRwLeaveShared(PRTCRITSECTRW pThis)
449{
450 /*
451 * Validate handle.
452 */
453 AssertPtr(pThis);
454 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
455#ifdef IN_RING0
456 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
457#else
458 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
459#endif
460
461 /*
462 * Check the direction and take action accordingly.
463 */
464 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
465 uint64_t u64OldState = u64State;
466 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
467 {
468#ifdef RTCRITSECTRW_STRICT
469 int rc9 = RTLockValidatorRecSharedCheckAndRelease(pThis->pValidatorRead, NIL_RTTHREAD);
470 if (RT_FAILURE(rc9))
471 return rc9;
472#endif
473 IPRT_CRITSECTRW_SHARED_LEAVING(pThis, NULL,
474 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT) - 1,
475 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
476
477 for (;;)
478 {
479 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
480 AssertReturn(c > 0, VERR_NOT_OWNER);
481 c--;
482
483 if ( c > 0
484 || (u64State & RTCSRW_CNT_WR_MASK) == 0)
485 {
486 /* Don't change the direction. */
487 u64State &= ~RTCSRW_CNT_RD_MASK;
488 u64State |= c << RTCSRW_CNT_RD_SHIFT;
489 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
490 break;
491 }
492 else
493 {
494 /* Reverse the direction and signal the reader threads. */
495 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_DIR_MASK);
496 u64State |= RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT;
497 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
498 {
499 int rc = RTSemEventSignal(pThis->hEvtWrite);
500 AssertRC(rc);
501 break;
502 }
503 }
504
505 ASMNopPause();
506 u64State = ASMAtomicReadU64(&pThis->u64State);
507 u64OldState = u64State;
508 }
509 }
510 else
511 {
512 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
513 RTNATIVETHREAD hNativeWriter;
514 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
515 AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
516 AssertReturn(pThis->cWriterReads > 0, VERR_NOT_OWNER);
517#ifdef RTCRITSECTRW_STRICT
518 int rc = RTLockValidatorRecExclUnwindMixed(pThis->pValidatorWrite, &pThis->pValidatorRead->Core);
519 if (RT_FAILURE(rc))
520 return rc;
521#endif
522 uint32_t cReads = ASMAtomicDecU32(&pThis->cWriterReads); NOREF(cReads);
523 IPRT_CRITSECTRW_EXCL_LEAVING_SHARED(pThis, NULL,
524 cReads + pThis->cWriteRecursions,
525 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
526 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
527 }
528
529 return VINF_SUCCESS;
530}
531RT_EXPORT_SYMBOL(RTCritSectRwLeaveShared);
532
533
534static int rtCritSectRwEnterExcl(PRTCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly)
535{
536 /*
537 * Validate input.
538 */
539 AssertPtr(pThis);
540 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
541#ifdef IN_RING0
542 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
543#else
544 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
545#endif
546
547#ifdef RTCRITSECTRW_STRICT
548 RTTHREAD hThreadSelf = NIL_RTTHREAD;
549 if (!fTryOnly)
550 {
551 hThreadSelf = RTThreadSelfAutoAdopt();
552 int rc9 = RTLockValidatorRecExclCheckOrder(pThis->pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
553 if (RT_FAILURE(rc9))
554 return rc9;
555 }
556#endif
557
558 /*
559 * Check if we're already the owner and just recursing.
560 */
561 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
562 RTNATIVETHREAD hNativeWriter;
563 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
564 if (hNativeSelf == hNativeWriter)
565 {
566 Assert((ASMAtomicReadU64(&pThis->u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
567#ifdef RTCRITSECTRW_STRICT
568 int rc9 = RTLockValidatorRecExclRecursion(pThis->pValidatorWrite, pSrcPos);
569 if (RT_FAILURE(rc9))
570 return rc9;
571#endif
572 Assert(pThis->cWriteRecursions < UINT32_MAX / 2);
573 uint32_t cNestings = ASMAtomicIncU32(&pThis->cWriteRecursions); NOREF(cNestings);
574
575 if (IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED())
576 {
577 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
578 IPRT_CRITSECTRW_EXCL_ENTERED(pThis, NULL, cNestings + pThis->cWriterReads,
579 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
580 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
581 }
582 return VINF_SUCCESS;
583 }
584
585 /*
586 * Get cracking.
587 */
588 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
589 uint64_t u64OldState = u64State;
590
591 for (;;)
592 {
593 if ( (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)
594 || (u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) != 0)
595 {
596 /* It flows in the right direction, try follow it before it changes. */
597 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
598 c++;
599 Assert(c < RTCSRW_CNT_MASK / 2);
600 u64State &= ~RTCSRW_CNT_WR_MASK;
601 u64State |= c << RTCSRW_CNT_WR_SHIFT;
602 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
603 break;
604 }
605 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0)
606 {
607 /* Wrong direction, but we're alone here and can simply try switch the direction. */
608 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
609 u64State |= (UINT64_C(1) << RTCSRW_CNT_WR_SHIFT) | (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT);
610 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
611 break;
612 }
613 else if (fTryOnly)
614 /* Wrong direction and we're not supposed to wait, just return. */
615 return VERR_SEM_BUSY;
616 else
617 {
618 /* Add ourselves to the write count and break out to do the wait. */
619 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
620 c++;
621 Assert(c < RTCSRW_CNT_MASK / 2);
622 u64State &= ~RTCSRW_CNT_WR_MASK;
623 u64State |= c << RTCSRW_CNT_WR_SHIFT;
624 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
625 break;
626 }
627
628 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
629 return VERR_SEM_DESTROYED;
630
631 ASMNopPause();
632 u64State = ASMAtomicReadU64(&pThis->u64State);
633 u64OldState = u64State;
634 }
635
636 /*
637 * If we're in write mode now try grab the ownership. Play fair if there
638 * are threads already waiting.
639 */
640 bool fDone = (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)
641 && ( ((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT) == 1
642 || fTryOnly);
643 if (fDone)
644 ASMAtomicCmpXchgHandle(&pThis->hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone);
645 if (!fDone)
646 {
647 /*
648 * If only trying, undo the above writer incrementation and return.
649 */
650 if (fTryOnly)
651 {
652 for (;;)
653 {
654 u64OldState = u64State = ASMAtomicReadU64(&pThis->u64State);
655 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; Assert(c > 0);
656 c--;
657 u64State &= ~RTCSRW_CNT_WR_MASK;
658 u64State |= c << RTCSRW_CNT_WR_SHIFT;
659 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
660 break;
661 }
662 IPRT_CRITSECTRW_EXCL_BUSY(pThis, NULL,
663 (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT) /*fWrite*/,
664 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
665 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT),
666 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT),
667 (void *)pThis->hNativeWriter);
668 return VERR_SEM_BUSY;
669 }
670
671 /*
672 * Wait for our turn.
673 */
674 IPRT_CRITSECTRW_EXCL_WAITING(pThis, NULL,
675 (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT) /*fWrite*/,
676 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
677 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT),
678 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT),
679 (void *)pThis->hNativeWriter);
680 for (uint32_t iLoop = 0; ; iLoop++)
681 {
682 int rc;
683#ifdef RTCRITSECTRW_STRICT
684 if (hThreadSelf == NIL_RTTHREAD)
685 hThreadSelf = RTThreadSelfAutoAdopt();
686 rc = RTLockValidatorRecExclCheckBlocking(pThis->pValidatorWrite, hThreadSelf, pSrcPos, true,
687 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_WRITE, false);
688 if (RT_SUCCESS(rc))
689#elif defined(IN_RING3)
690 RTTHREAD hThreadSelf = RTThreadSelf();
691 RTThreadBlocking(hThreadSelf, RTTHREADSTATE_RW_WRITE, false);
692#endif
693 {
694 rc = RTSemEventWait(pThis->hEvtWrite, RT_INDEFINITE_WAIT);
695#ifdef IN_RING3
696 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_WRITE);
697#endif
698 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
699 return VERR_SEM_DESTROYED;
700 }
701 if (RT_FAILURE(rc))
702 {
703 /* Decrement the counts and return the error. */
704 for (;;)
705 {
706 u64OldState = u64State = ASMAtomicReadU64(&pThis->u64State);
707 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; Assert(c > 0);
708 c--;
709 u64State &= ~RTCSRW_CNT_WR_MASK;
710 u64State |= c << RTCSRW_CNT_WR_SHIFT;
711 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
712 break;
713 }
714 return rc;
715 }
716
717 u64State = ASMAtomicReadU64(&pThis->u64State);
718 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT))
719 {
720 ASMAtomicCmpXchgHandle(&pThis->hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone);
721 if (fDone)
722 break;
723 }
724 AssertMsg(iLoop < 1000, ("%u\n", iLoop)); /* may loop a few times here... */
725 }
726 }
727
728 /*
729 * Got it!
730 */
731 Assert((ASMAtomicReadU64(&pThis->u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
732 ASMAtomicWriteU32(&pThis->cWriteRecursions, 1);
733 Assert(pThis->cWriterReads == 0);
734#ifdef RTCRITSECTRW_STRICT
735 RTLockValidatorRecExclSetOwner(pThis->pValidatorWrite, hThreadSelf, pSrcPos, true);
736#endif
737 IPRT_CRITSECTRW_EXCL_ENTERED(pThis, NULL, 1,
738 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
739 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
740
741 return VINF_SUCCESS;
742}
743
744
745RTDECL(int) RTCritSectRwEnterExcl(PRTCRITSECTRW pThis)
746{
747#ifndef RTCRITSECTRW_STRICT
748 return rtCritSectRwEnterExcl(pThis, NULL, false /*fTryAgain*/);
749#else
750 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
751 return rtCritSectRwEnterExcl(pThis, &SrcPos, false /*fTryAgain*/);
752#endif
753}
754RT_EXPORT_SYMBOL(RTCritSectRwEnterExcl);
755
756
757RTDECL(int) RTCritSectRwEnterExclDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
758{
759 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
760 return rtCritSectRwEnterExcl(pThis, &SrcPos, false /*fTryAgain*/);
761}
762RT_EXPORT_SYMBOL(RTCritSectRwEnterExclDebug);
763
764
765RTDECL(int) RTCritSectRwTryEnterExcl(PRTCRITSECTRW pThis)
766{
767#ifndef RTCRITSECTRW_STRICT
768 return rtCritSectRwEnterExcl(pThis, NULL, true /*fTryAgain*/);
769#else
770 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
771 return rtCritSectRwEnterExcl(pThis, &SrcPos, true /*fTryAgain*/);
772#endif
773}
774RT_EXPORT_SYMBOL(RTCritSectRwTryEnterExcl);
775
776
777RTDECL(int) RTCritSectRwTryEnterExclDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
778{
779 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
780 return rtCritSectRwEnterExcl(pThis, &SrcPos, true /*fTryAgain*/);
781}
782RT_EXPORT_SYMBOL(RTCritSectRwTryEnterExclDebug);
783
784
785RTDECL(int) RTCritSectRwLeaveExcl(PRTCRITSECTRW pThis)
786{
787 /*
788 * Validate handle.
789 */
790 AssertPtr(pThis);
791 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
792#ifdef IN_RING0
793 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
794#else
795 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
796#endif
797
798 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
799 RTNATIVETHREAD hNativeWriter;
800 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
801 AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
802
803 /*
804 * Unwind a recursion.
805 */
806 if (pThis->cWriteRecursions == 1)
807 {
808 AssertReturn(pThis->cWriterReads == 0, VERR_WRONG_ORDER); /* (must release all read recursions before the final write.) */
809#ifdef RTCRITSECTRW_STRICT
810 int rc9 = RTLockValidatorRecExclReleaseOwner(pThis->pValidatorWrite, true);
811 if (RT_FAILURE(rc9))
812 return rc9;
813#endif
814 /*
815 * Update the state.
816 */
817 ASMAtomicWriteU32(&pThis->cWriteRecursions, 0);
818 ASMAtomicWriteHandle(&pThis->hNativeWriter, NIL_RTNATIVETHREAD);
819
820 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
821 IPRT_CRITSECTRW_EXCL_LEAVING(pThis, NULL, 0,
822 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
823 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
824
825 for (;;)
826 {
827 uint64_t u64OldState = u64State;
828
829 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
830 Assert(c > 0);
831 c--;
832
833 if ( c > 0
834 || (u64State & RTCSRW_CNT_RD_MASK) == 0)
835 {
836 /* Don't change the direction, wait up the next writer if any. */
837 u64State &= ~RTCSRW_CNT_WR_MASK;
838 u64State |= c << RTCSRW_CNT_WR_SHIFT;
839 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
840 {
841 if (c > 0)
842 {
843 int rc = RTSemEventSignal(pThis->hEvtWrite);
844 AssertRC(rc);
845 }
846 break;
847 }
848 }
849 else
850 {
851 /* Reverse the direction and signal the reader threads. */
852 u64State &= ~(RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
853 u64State |= RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT;
854 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
855 {
856 Assert(!pThis->fNeedReset);
857 ASMAtomicWriteBool(&pThis->fNeedReset, true);
858 int rc = RTSemEventMultiSignal(pThis->hEvtRead);
859 AssertRC(rc);
860 break;
861 }
862 }
863
864 ASMNopPause();
865 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
866 return VERR_SEM_DESTROYED;
867 u64State = ASMAtomicReadU64(&pThis->u64State);
868 }
869 }
870 else
871 {
872 Assert(pThis->cWriteRecursions != 0);
873#ifdef RTCRITSECTRW_STRICT
874 int rc9 = RTLockValidatorRecExclUnwind(pThis->pValidatorWrite);
875 if (RT_FAILURE(rc9))
876 return rc9;
877#endif
878 uint32_t cNestings = ASMAtomicDecU32(&pThis->cWriteRecursions); NOREF(cNestings);
879 if (IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED())
880 {
881 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
882 IPRT_CRITSECTRW_EXCL_LEAVING(pThis, NULL, cNestings + pThis->cWriterReads,
883 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
884 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
885 }
886 }
887
888 return VINF_SUCCESS;
889}
890RT_EXPORT_SYMBOL(RTCritSectRwLeaveExcl);
891
892
893RTDECL(bool) RTCritSectRwIsWriteOwner(PRTCRITSECTRW pThis)
894{
895 /*
896 * Validate handle.
897 */
898 AssertPtr(pThis);
899 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, false);
900#ifdef IN_RING0
901 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
902#else
903 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
904#endif
905
906 /*
907 * Check ownership.
908 */
909 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
910 RTNATIVETHREAD hNativeWriter;
911 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
912 return hNativeWriter == hNativeSelf;
913}
914RT_EXPORT_SYMBOL(RTCritSectRwIsWriteOwner);
915
916
917RTDECL(bool) RTCritSectRwIsReadOwner(PRTCRITSECTRW pThis, bool fWannaHear)
918{
919 /*
920 * Validate handle.
921 */
922 AssertPtr(pThis);
923 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, false);
924#ifdef IN_RING0
925 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
926#else
927 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
928#endif
929
930 /*
931 * Inspect the state.
932 */
933 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
934 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT))
935 {
936 /*
937 * It's in write mode, so we can only be a reader if we're also the
938 * current writer.
939 */
940 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
941 RTNATIVETHREAD hWriter;
942 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hWriter);
943 return hWriter == hNativeSelf;
944 }
945
946 /*
947 * Read mode. If there are no current readers, then we cannot be a reader.
948 */
949 if (!(u64State & RTCSRW_CNT_RD_MASK))
950 return false;
951
952#ifdef RTCRITSECTRW_STRICT
953 /*
954 * Ask the lock validator.
955 */
956 return RTLockValidatorRecSharedIsOwner(pThis->pValidatorRead, NIL_RTTHREAD);
957#else
958 /*
959 * Ok, we don't know, just tell the caller what he want to hear.
960 */
961 return fWannaHear;
962#endif
963}
964RT_EXPORT_SYMBOL(RTCritSectRwIsReadOwner);
965
966
967RTDECL(uint32_t) RTCritSectRwGetWriteRecursion(PRTCRITSECTRW pThis)
968{
969 /*
970 * Validate handle.
971 */
972 AssertPtr(pThis);
973 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
974
975 /*
976 * Return the requested data.
977 */
978 return pThis->cWriteRecursions;
979}
980RT_EXPORT_SYMBOL(RTCritSectRwGetWriteRecursion);
981
982
983RTDECL(uint32_t) RTCritSectRwGetWriterReadRecursion(PRTCRITSECTRW pThis)
984{
985 /*
986 * Validate handle.
987 */
988 AssertPtr(pThis);
989 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
990
991 /*
992 * Return the requested data.
993 */
994 return pThis->cWriterReads;
995}
996RT_EXPORT_SYMBOL(RTCritSectRwGetWriterReadRecursion);
997
998
999RTDECL(uint32_t) RTCritSectRwGetReadCount(PRTCRITSECTRW pThis)
1000{
1001 /*
1002 * Validate input.
1003 */
1004 AssertPtr(pThis);
1005 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
1006
1007 /*
1008 * Return the requested data.
1009 */
1010 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
1011 if ((u64State & RTCSRW_DIR_MASK) != (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
1012 return 0;
1013 return (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
1014}
1015RT_EXPORT_SYMBOL(RTCritSectRwGetReadCount);
1016
1017
1018RTDECL(int) RTCritSectRwDelete(PRTCRITSECTRW pThis)
1019{
1020 /*
1021 * Assert free waiters and so on.
1022 */
1023 AssertPtr(pThis);
1024 Assert(pThis->u32Magic == RTCRITSECTRW_MAGIC);
1025 //Assert(pThis->cNestings == 0);
1026 //Assert(pThis->cLockers == -1);
1027 Assert(pThis->hNativeWriter == NIL_RTNATIVETHREAD);
1028#ifdef IN_RING0
1029 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
1030#else
1031 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
1032#endif
1033
1034 /*
1035 * Invalidate the structure and free the semaphores.
1036 */
1037 if (!ASMAtomicCmpXchgU32(&pThis->u32Magic, RTCRITSECTRW_MAGIC_DEAD, RTCRITSECTRW_MAGIC))
1038 return VERR_INVALID_PARAMETER;
1039
1040 pThis->fFlags = 0;
1041 pThis->u64State = 0;
1042
1043 RTSEMEVENT hEvtWrite = pThis->hEvtWrite;
1044 pThis->hEvtWrite = NIL_RTSEMEVENT;
1045 RTSEMEVENTMULTI hEvtRead = pThis->hEvtRead;
1046 pThis->hEvtRead = NIL_RTSEMEVENTMULTI;
1047
1048 int rc1 = RTSemEventDestroy(hEvtWrite); AssertRC(rc1);
1049 int rc2 = RTSemEventMultiDestroy(hEvtRead); AssertRC(rc2);
1050
1051#ifndef IN_RING0
1052 RTLockValidatorRecSharedDestroy(&pThis->pValidatorRead);
1053 RTLockValidatorRecExclDestroy(&pThis->pValidatorWrite);
1054#endif
1055
1056 return RT_SUCCESS(rc1) ? rc2 : rc1;
1057}
1058RT_EXPORT_SYMBOL(RTCritSectRwDelete);
1059
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette