VirtualBox

source: vbox/trunk/src/VBox/Runtime/generic/critsectrw-generic.cpp@ 72140

Last change on this file since 72140 was 69111, checked in by vboxsync, 7 years ago

(C) year

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 40.4 KB
Line 
1/* $Id: critsectrw-generic.cpp 69111 2017-10-17 14:26:02Z vboxsync $ */
2/** @file
3 * IPRT - Read/Write Critical Section, Generic.
4 */
5
6/*
7 * Copyright (C) 2009-2017 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*********************************************************************************************************************************
29* Header Files *
30*********************************************************************************************************************************/
31#define RTCRITSECTRW_WITHOUT_REMAPPING
32#define RTASSERT_QUIET
33#include <iprt/critsect.h>
34#include "internal/iprt.h"
35
36#include <iprt/asm.h>
37#include <iprt/assert.h>
38#include <iprt/err.h>
39#include <iprt/lockvalidator.h>
40#include <iprt/mem.h>
41#include <iprt/semaphore.h>
42#include <iprt/thread.h>
43
44#include "internal/magics.h"
45#include "internal/strict.h"
46
47/* Two issues here, (1) the tracepoint generator uses IPRT, and (2) only one .d
48 file per module. */
49#ifdef IPRT_WITH_DTRACE
50# include IPRT_DTRACE_INCLUDE
51# ifdef IPRT_DTRACE_PREFIX
52# define IPRT_CRITSECTRW_EXCL_ENTERED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_ENTERED)
53# define IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED)
54# define IPRT_CRITSECTRW_EXCL_LEAVING RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_LEAVING)
55# define IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED)
56# define IPRT_CRITSECTRW_EXCL_BUSY RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_BUSY)
57# define IPRT_CRITSECTRW_EXCL_WAITING RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_WAITING)
58# define IPRT_CRITSECTRW_EXCL_ENTERED_SHARED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_ENTERED_SHARED)
59# define IPRT_CRITSECTRW_EXCL_LEAVING_SHARED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_LEAVING_SHARED)
60# define IPRT_CRITSECTRW_SHARED_ENTERED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_SHARED_ENTERED)
61# define IPRT_CRITSECTRW_SHARED_LEAVING RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_SHARED_LEAVING)
62# define IPRT_CRITSECTRW_SHARED_BUSY RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_SHARED_BUSY)
63# define IPRT_CRITSECTRW_SHARED_WAITING RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_SHARED_WAITING)
64# endif
65#else
66# define IPRT_CRITSECTRW_EXCL_ENTERED(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
67# define IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED() (false)
68# define IPRT_CRITSECTRW_EXCL_LEAVING(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
69# define IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED() (false)
70# define IPRT_CRITSECTRW_EXCL_BUSY( a_pvCritSect, a_pszName, a_fWriteMode, a_cWaitingReaders, a_cReaders, cWriters, a_pvNativeOwnerThread) do {} while (0)
71# define IPRT_CRITSECTRW_EXCL_WAITING(a_pvCritSect, a_pszName, a_fWriteMode, a_cWaitingReaders, a_cReaders, cWriters, a_pvNativeOwnerThread) do {} while (0)
72# define IPRT_CRITSECTRW_EXCL_ENTERED_SHARED(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
73# define IPRT_CRITSECTRW_EXCL_LEAVING_SHARED(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
74# define IPRT_CRITSECTRW_SHARED_ENTERED(a_pvCritSect, a_pszName, a_cReaders, a_cWaitingWriters) do {} while (0)
75# define IPRT_CRITSECTRW_SHARED_LEAVING(a_pvCritSect, a_pszName, a_cReaders, a_cWaitingWriters) do {} while (0)
76# define IPRT_CRITSECTRW_SHARED_BUSY( a_pvCritSect, a_pszName, a_pvNativeOwnerThread, a_cWaitingReaders, a_cWriters) do {} while (0)
77# define IPRT_CRITSECTRW_SHARED_WAITING(a_pvCritSect, a_pszName, a_pvNativeOwnerThread, a_cWaitingReaders, a_cWriters) do {} while (0)
78#endif
79
80
81
82RTDECL(int) RTCritSectRwInit(PRTCRITSECTRW pThis)
83{
84 return RTCritSectRwInitEx(pThis, 0, NIL_RTLOCKVALCLASS, RTLOCKVAL_SUB_CLASS_NONE, "RTCritSectRw");
85}
86RT_EXPORT_SYMBOL(RTCritSectRwInit);
87
88
89RTDECL(int) RTCritSectRwInitEx(PRTCRITSECTRW pThis, uint32_t fFlags,
90 RTLOCKVALCLASS hClass, uint32_t uSubClass, const char *pszNameFmt, ...)
91{
92 int rc;
93 AssertReturn(!(fFlags & ~( RTCRITSECT_FLAGS_NO_NESTING | RTCRITSECT_FLAGS_NO_LOCK_VAL | RTCRITSECT_FLAGS_BOOTSTRAP_HACK
94 | RTCRITSECT_FLAGS_NOP )),
95 VERR_INVALID_PARAMETER);
96 RT_NOREF_PV(hClass); RT_NOREF_PV(uSubClass); RT_NOREF_PV(pszNameFmt);
97
98
99 /*
100 * Initialize the structure, allocate the lock validator stuff and sems.
101 */
102 pThis->u32Magic = RTCRITSECTRW_MAGIC_DEAD;
103 pThis->fNeedReset = false;
104#ifdef IN_RING0
105 pThis->fFlags = (uint16_t)(fFlags | RTCRITSECT_FLAGS_RING0);
106#else
107 pThis->fFlags = (uint16_t)(fFlags & ~RTCRITSECT_FLAGS_RING0);
108#endif
109 pThis->u64State = 0;
110 pThis->hNativeWriter = NIL_RTNATIVETHREAD;
111 pThis->cWriterReads = 0;
112 pThis->cWriteRecursions = 0;
113 pThis->hEvtWrite = NIL_RTSEMEVENT;
114 pThis->hEvtRead = NIL_RTSEMEVENTMULTI;
115 pThis->pValidatorWrite = NULL;
116 pThis->pValidatorRead = NULL;
117#if HC_ARCH_BITS == 32
118 pThis->HCPtrPadding = NIL_RTHCPTR;
119#endif
120
121#ifdef RTCRITSECTRW_STRICT
122 bool const fLVEnabled = !(fFlags & RTCRITSECT_FLAGS_NO_LOCK_VAL);
123 if (!pszNameFmt)
124 {
125 static uint32_t volatile s_iAnon = 0;
126 uint32_t i = ASMAtomicIncU32(&s_iAnon) - 1;
127 rc = RTLockValidatorRecExclCreate(&pThis->pValidatorWrite, hClass, uSubClass, pThis,
128 fLVEnabled, "RTCritSectRw-%u", i);
129 if (RT_SUCCESS(rc))
130 rc = RTLockValidatorRecSharedCreate(&pThis->pValidatorRead, hClass, uSubClass, pThis,
131 false /*fSignaller*/, fLVEnabled, "RTCritSectRw-%u", i);
132 }
133 else
134 {
135 va_list va;
136 va_start(va, pszNameFmt);
137 rc = RTLockValidatorRecExclCreateV(&pThis->pValidatorWrite, hClass, uSubClass, pThis,
138 fLVEnabled, pszNameFmt, va);
139 va_end(va);
140 if (RT_SUCCESS(rc))
141 {
142 va_start(va, pszNameFmt);
143 RTLockValidatorRecSharedCreateV(&pThis->pValidatorRead, hClass, uSubClass, pThis,
144 false /*fSignaller*/, fLVEnabled, pszNameFmt, va);
145 va_end(va);
146 }
147 }
148 if (RT_SUCCESS(rc))
149 rc = RTLockValidatorRecMakeSiblings(&pThis->pValidatorWrite->Core, &pThis->pValidatorRead->Core);
150
151 if (RT_SUCCESS(rc))
152#endif
153 {
154 rc = RTSemEventMultiCreate(&pThis->hEvtRead);
155 if (RT_SUCCESS(rc))
156 {
157 rc = RTSemEventCreate(&pThis->hEvtWrite);
158 if (RT_SUCCESS(rc))
159 {
160 pThis->u32Magic = RTCRITSECTRW_MAGIC;
161 return VINF_SUCCESS;
162 }
163 RTSemEventMultiDestroy(pThis->hEvtRead);
164 }
165 }
166
167#ifdef RTCRITSECTRW_STRICT
168 RTLockValidatorRecSharedDestroy(&pThis->pValidatorRead);
169 RTLockValidatorRecExclDestroy(&pThis->pValidatorWrite);
170#endif
171 return rc;
172}
173RT_EXPORT_SYMBOL(RTCritSectRwInitEx);
174
175
176RTDECL(uint32_t) RTCritSectRwSetSubClass(PRTCRITSECTRW pThis, uint32_t uSubClass)
177{
178 AssertPtrReturn(pThis, RTLOCKVAL_SUB_CLASS_INVALID);
179 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, RTLOCKVAL_SUB_CLASS_INVALID);
180#ifdef IN_RING0
181 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
182#else
183 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
184#endif
185#ifdef RTCRITSECTRW_STRICT
186 AssertReturn(!(pThis->fFlags & RTCRITSECT_FLAGS_NOP), RTLOCKVAL_SUB_CLASS_INVALID);
187
188 RTLockValidatorRecSharedSetSubClass(pThis->pValidatorRead, uSubClass);
189 return RTLockValidatorRecExclSetSubClass(pThis->pValidatorWrite, uSubClass);
190#else
191 NOREF(uSubClass);
192 return RTLOCKVAL_SUB_CLASS_INVALID;
193#endif
194}
195RT_EXPORT_SYMBOL(RTCritSectRwSetSubClass);
196
197
198static int rtCritSectRwEnterShared(PRTCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly)
199{
200 /*
201 * Validate input.
202 */
203 AssertPtr(pThis);
204 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
205#ifdef IN_RING0
206 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
207#else
208 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
209#endif
210 RT_NOREF_PV(pSrcPos);
211
212#ifdef RTCRITSECTRW_STRICT
213 RTTHREAD hThreadSelf = RTThreadSelfAutoAdopt();
214 if (!fTryOnly)
215 {
216 int rc9;
217 RTNATIVETHREAD hNativeWriter;
218 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
219 if (hNativeWriter != NIL_RTTHREAD && hNativeWriter == RTThreadNativeSelf())
220 rc9 = RTLockValidatorRecExclCheckOrder(pThis->pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
221 else
222 rc9 = RTLockValidatorRecSharedCheckOrder(pThis->pValidatorRead, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
223 if (RT_FAILURE(rc9))
224 return rc9;
225 }
226#endif
227
228 /*
229 * Get cracking...
230 */
231 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
232 uint64_t u64OldState = u64State;
233
234 for (;;)
235 {
236 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
237 {
238 /* It flows in the right direction, try follow it before it changes. */
239 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
240 c++;
241 Assert(c < RTCSRW_CNT_MASK / 2);
242 u64State &= ~RTCSRW_CNT_RD_MASK;
243 u64State |= c << RTCSRW_CNT_RD_SHIFT;
244 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
245 {
246#ifdef RTCRITSECTRW_STRICT
247 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
248#endif
249 break;
250 }
251 }
252 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0)
253 {
254 /* Wrong direction, but we're alone here and can simply try switch the direction. */
255 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
256 u64State |= (UINT64_C(1) << RTCSRW_CNT_RD_SHIFT) | (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT);
257 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
258 {
259 Assert(!pThis->fNeedReset);
260#ifdef RTCRITSECTRW_STRICT
261 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
262#endif
263 break;
264 }
265 }
266 else
267 {
268 /* Is the writer perhaps doing a read recursion? */
269 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
270 RTNATIVETHREAD hNativeWriter;
271 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
272 if (hNativeSelf == hNativeWriter)
273 {
274#ifdef RTCRITSECTRW_STRICT
275 int rc9 = RTLockValidatorRecExclRecursionMixed(pThis->pValidatorWrite, &pThis->pValidatorRead->Core, pSrcPos);
276 if (RT_FAILURE(rc9))
277 return rc9;
278#endif
279 Assert(pThis->cWriterReads < UINT32_MAX / 2);
280 uint32_t const cReads = ASMAtomicIncU32(&pThis->cWriterReads); NOREF(cReads);
281 IPRT_CRITSECTRW_EXCL_ENTERED_SHARED(pThis, NULL,
282 cReads + pThis->cWriteRecursions,
283 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
284 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
285
286 return VINF_SUCCESS; /* don't break! */
287 }
288
289 /* If we're only trying, return already. */
290 if (fTryOnly)
291 {
292 IPRT_CRITSECTRW_SHARED_BUSY(pThis, NULL,
293 (void *)pThis->hNativeWriter,
294 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
295 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
296 return VERR_SEM_BUSY;
297 }
298
299 /* Add ourselves to the queue and wait for the direction to change. */
300 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
301 c++;
302 Assert(c < RTCSRW_CNT_MASK / 2);
303
304 uint64_t cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT;
305 cWait++;
306 Assert(cWait <= c);
307 Assert(cWait < RTCSRW_CNT_MASK / 2);
308
309 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK);
310 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT);
311
312 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
313 {
314 IPRT_CRITSECTRW_SHARED_WAITING(pThis, NULL,
315 (void *)pThis->hNativeWriter,
316 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
317 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
318 for (uint32_t iLoop = 0; ; iLoop++)
319 {
320 int rc;
321#ifdef RTCRITSECTRW_STRICT
322 rc = RTLockValidatorRecSharedCheckBlocking(pThis->pValidatorRead, hThreadSelf, pSrcPos, true,
323 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_READ, false);
324 if (RT_SUCCESS(rc))
325#elif defined(IN_RING3)
326 RTTHREAD hThreadSelf = RTThreadSelf();
327 RTThreadBlocking(hThreadSelf, RTTHREADSTATE_RW_READ, false);
328#endif
329 {
330 rc = RTSemEventMultiWait(pThis->hEvtRead, RT_INDEFINITE_WAIT);
331#ifdef IN_RING3
332 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_READ);
333#endif
334 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
335 return VERR_SEM_DESTROYED;
336 }
337 if (RT_FAILURE(rc))
338 {
339 /* Decrement the counts and return the error. */
340 for (;;)
341 {
342 u64OldState = u64State = ASMAtomicReadU64(&pThis->u64State);
343 c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT; Assert(c > 0);
344 c--;
345 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT; Assert(cWait > 0);
346 cWait--;
347 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK);
348 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT);
349 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
350 break;
351 }
352 return rc;
353 }
354
355 Assert(pThis->fNeedReset);
356 u64State = ASMAtomicReadU64(&pThis->u64State);
357 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
358 break;
359 AssertMsg(iLoop < 1, ("%u\n", iLoop));
360 }
361
362 /* Decrement the wait count and maybe reset the semaphore (if we're last). */
363 for (;;)
364 {
365 u64OldState = u64State;
366
367 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT;
368 Assert(cWait > 0);
369 cWait--;
370 u64State &= ~RTCSRW_WAIT_CNT_RD_MASK;
371 u64State |= cWait << RTCSRW_WAIT_CNT_RD_SHIFT;
372
373 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
374 {
375 if (cWait == 0)
376 {
377 if (ASMAtomicXchgBool(&pThis->fNeedReset, false))
378 {
379 int rc = RTSemEventMultiReset(pThis->hEvtRead);
380 AssertRCReturn(rc, rc);
381 }
382 }
383 break;
384 }
385 u64State = ASMAtomicReadU64(&pThis->u64State);
386 }
387
388#ifdef RTCRITSECTRW_STRICT
389 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
390#endif
391 break;
392 }
393 }
394
395 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
396 return VERR_SEM_DESTROYED;
397
398 ASMNopPause();
399 u64State = ASMAtomicReadU64(&pThis->u64State);
400 u64OldState = u64State;
401 }
402
403 /* got it! */
404 Assert((ASMAtomicReadU64(&pThis->u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT));
405 IPRT_CRITSECTRW_SHARED_ENTERED(pThis, NULL,
406 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT),
407 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
408 return VINF_SUCCESS;
409}
410
411
412RTDECL(int) RTCritSectRwEnterShared(PRTCRITSECTRW pThis)
413{
414#ifndef RTCRITSECTRW_STRICT
415 return rtCritSectRwEnterShared(pThis, NULL, false /*fTryOnly*/);
416#else
417 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
418 return rtCritSectRwEnterShared(pThis, &SrcPos, false /*fTryOnly*/);
419#endif
420}
421RT_EXPORT_SYMBOL(RTCritSectRwEnterShared);
422
423
424RTDECL(int) RTCritSectRwEnterSharedDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
425{
426 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
427 return rtCritSectRwEnterShared(pThis, &SrcPos, false /*fTryOnly*/);
428}
429RT_EXPORT_SYMBOL(RTCritSectRwEnterSharedDebug);
430
431
432RTDECL(int) RTCritSectRwTryEnterShared(PRTCRITSECTRW pThis)
433{
434#ifndef RTCRITSECTRW_STRICT
435 return rtCritSectRwEnterShared(pThis, NULL, true /*fTryOnly*/);
436#else
437 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
438 return rtCritSectRwEnterShared(pThis, &SrcPos, true /*fTryOnly*/);
439#endif
440}
441RT_EXPORT_SYMBOL(RTCritSectRwEnterShared);
442
443
444RTDECL(int) RTCritSectRwTryEnterSharedDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
445{
446 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
447 return rtCritSectRwEnterShared(pThis, &SrcPos, true /*fTryOnly*/);
448}
449RT_EXPORT_SYMBOL(RTCritSectRwEnterSharedDebug);
450
451
452
453RTDECL(int) RTCritSectRwLeaveShared(PRTCRITSECTRW pThis)
454{
455 /*
456 * Validate handle.
457 */
458 AssertPtr(pThis);
459 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
460#ifdef IN_RING0
461 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
462#else
463 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
464#endif
465
466 /*
467 * Check the direction and take action accordingly.
468 */
469 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
470 uint64_t u64OldState = u64State;
471 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
472 {
473#ifdef RTCRITSECTRW_STRICT
474 int rc9 = RTLockValidatorRecSharedCheckAndRelease(pThis->pValidatorRead, NIL_RTTHREAD);
475 if (RT_FAILURE(rc9))
476 return rc9;
477#endif
478 IPRT_CRITSECTRW_SHARED_LEAVING(pThis, NULL,
479 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT) - 1,
480 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
481
482 for (;;)
483 {
484 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
485 AssertReturn(c > 0, VERR_NOT_OWNER);
486 c--;
487
488 if ( c > 0
489 || (u64State & RTCSRW_CNT_WR_MASK) == 0)
490 {
491 /* Don't change the direction. */
492 u64State &= ~RTCSRW_CNT_RD_MASK;
493 u64State |= c << RTCSRW_CNT_RD_SHIFT;
494 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
495 break;
496 }
497 else
498 {
499 /* Reverse the direction and signal the reader threads. */
500 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_DIR_MASK);
501 u64State |= RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT;
502 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
503 {
504 int rc = RTSemEventSignal(pThis->hEvtWrite);
505 AssertRC(rc);
506 break;
507 }
508 }
509
510 ASMNopPause();
511 u64State = ASMAtomicReadU64(&pThis->u64State);
512 u64OldState = u64State;
513 }
514 }
515 else
516 {
517 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
518 RTNATIVETHREAD hNativeWriter;
519 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
520 AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
521 AssertReturn(pThis->cWriterReads > 0, VERR_NOT_OWNER);
522#ifdef RTCRITSECTRW_STRICT
523 int rc = RTLockValidatorRecExclUnwindMixed(pThis->pValidatorWrite, &pThis->pValidatorRead->Core);
524 if (RT_FAILURE(rc))
525 return rc;
526#endif
527 uint32_t cReads = ASMAtomicDecU32(&pThis->cWriterReads); NOREF(cReads);
528 IPRT_CRITSECTRW_EXCL_LEAVING_SHARED(pThis, NULL,
529 cReads + pThis->cWriteRecursions,
530 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
531 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
532 }
533
534 return VINF_SUCCESS;
535}
536RT_EXPORT_SYMBOL(RTCritSectRwLeaveShared);
537
538
539static int rtCritSectRwEnterExcl(PRTCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly)
540{
541 /*
542 * Validate input.
543 */
544 AssertPtr(pThis);
545 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
546#ifdef IN_RING0
547 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
548#else
549 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
550#endif
551 RT_NOREF_PV(pSrcPos);
552
553#ifdef RTCRITSECTRW_STRICT
554 RTTHREAD hThreadSelf = NIL_RTTHREAD;
555 if (!fTryOnly)
556 {
557 hThreadSelf = RTThreadSelfAutoAdopt();
558 int rc9 = RTLockValidatorRecExclCheckOrder(pThis->pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
559 if (RT_FAILURE(rc9))
560 return rc9;
561 }
562#endif
563
564 /*
565 * Check if we're already the owner and just recursing.
566 */
567 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
568 RTNATIVETHREAD hNativeWriter;
569 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
570 if (hNativeSelf == hNativeWriter)
571 {
572 Assert((ASMAtomicReadU64(&pThis->u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
573#ifdef RTCRITSECTRW_STRICT
574 int rc9 = RTLockValidatorRecExclRecursion(pThis->pValidatorWrite, pSrcPos);
575 if (RT_FAILURE(rc9))
576 return rc9;
577#endif
578 Assert(pThis->cWriteRecursions < UINT32_MAX / 2);
579 uint32_t cNestings = ASMAtomicIncU32(&pThis->cWriteRecursions); NOREF(cNestings);
580
581#ifdef IPRT_WITH_DTRACE
582 if (IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED())
583 {
584 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
585 IPRT_CRITSECTRW_EXCL_ENTERED(pThis, NULL, cNestings + pThis->cWriterReads,
586 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
587 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
588 }
589#endif
590 return VINF_SUCCESS;
591 }
592
593 /*
594 * Get cracking.
595 */
596 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
597 uint64_t u64OldState = u64State;
598
599 for (;;)
600 {
601 if ( (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)
602 || (u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) != 0)
603 {
604 /* It flows in the right direction, try follow it before it changes. */
605 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
606 c++;
607 Assert(c < RTCSRW_CNT_MASK / 2);
608 u64State &= ~RTCSRW_CNT_WR_MASK;
609 u64State |= c << RTCSRW_CNT_WR_SHIFT;
610 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
611 break;
612 }
613 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0)
614 {
615 /* Wrong direction, but we're alone here and can simply try switch the direction. */
616 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
617 u64State |= (UINT64_C(1) << RTCSRW_CNT_WR_SHIFT) | (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT);
618 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
619 break;
620 }
621 else if (fTryOnly)
622 /* Wrong direction and we're not supposed to wait, just return. */
623 return VERR_SEM_BUSY;
624 else
625 {
626 /* Add ourselves to the write count and break out to do the wait. */
627 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
628 c++;
629 Assert(c < RTCSRW_CNT_MASK / 2);
630 u64State &= ~RTCSRW_CNT_WR_MASK;
631 u64State |= c << RTCSRW_CNT_WR_SHIFT;
632 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
633 break;
634 }
635
636 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
637 return VERR_SEM_DESTROYED;
638
639 ASMNopPause();
640 u64State = ASMAtomicReadU64(&pThis->u64State);
641 u64OldState = u64State;
642 }
643
644 /*
645 * If we're in write mode now try grab the ownership. Play fair if there
646 * are threads already waiting.
647 */
648 bool fDone = (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)
649 && ( ((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT) == 1
650 || fTryOnly);
651 if (fDone)
652 ASMAtomicCmpXchgHandle(&pThis->hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone);
653 if (!fDone)
654 {
655 /*
656 * If only trying, undo the above writer incrementation and return.
657 */
658 if (fTryOnly)
659 {
660 for (;;)
661 {
662 u64OldState = u64State = ASMAtomicReadU64(&pThis->u64State);
663 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; Assert(c > 0);
664 c--;
665 u64State &= ~RTCSRW_CNT_WR_MASK;
666 u64State |= c << RTCSRW_CNT_WR_SHIFT;
667 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
668 break;
669 }
670 IPRT_CRITSECTRW_EXCL_BUSY(pThis, NULL,
671 (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT) /*fWrite*/,
672 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
673 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT),
674 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT),
675 (void *)pThis->hNativeWriter);
676 return VERR_SEM_BUSY;
677 }
678
679 /*
680 * Wait for our turn.
681 */
682 IPRT_CRITSECTRW_EXCL_WAITING(pThis, NULL,
683 (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT) /*fWrite*/,
684 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
685 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT),
686 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT),
687 (void *)pThis->hNativeWriter);
688 for (uint32_t iLoop = 0; ; iLoop++)
689 {
690 int rc;
691#ifdef RTCRITSECTRW_STRICT
692 if (hThreadSelf == NIL_RTTHREAD)
693 hThreadSelf = RTThreadSelfAutoAdopt();
694 rc = RTLockValidatorRecExclCheckBlocking(pThis->pValidatorWrite, hThreadSelf, pSrcPos, true,
695 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_WRITE, false);
696 if (RT_SUCCESS(rc))
697#elif defined(IN_RING3)
698 RTTHREAD hThreadSelf = RTThreadSelf();
699 RTThreadBlocking(hThreadSelf, RTTHREADSTATE_RW_WRITE, false);
700#endif
701 {
702 rc = RTSemEventWait(pThis->hEvtWrite, RT_INDEFINITE_WAIT);
703#ifdef IN_RING3
704 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_WRITE);
705#endif
706 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
707 return VERR_SEM_DESTROYED;
708 }
709 if (RT_FAILURE(rc))
710 {
711 /* Decrement the counts and return the error. */
712 for (;;)
713 {
714 u64OldState = u64State = ASMAtomicReadU64(&pThis->u64State);
715 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; Assert(c > 0);
716 c--;
717 u64State &= ~RTCSRW_CNT_WR_MASK;
718 u64State |= c << RTCSRW_CNT_WR_SHIFT;
719 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
720 break;
721 }
722 return rc;
723 }
724
725 u64State = ASMAtomicReadU64(&pThis->u64State);
726 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT))
727 {
728 ASMAtomicCmpXchgHandle(&pThis->hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone);
729 if (fDone)
730 break;
731 }
732 AssertMsg(iLoop < 1000, ("%u\n", iLoop)); /* may loop a few times here... */
733 }
734 }
735
736 /*
737 * Got it!
738 */
739 Assert((ASMAtomicReadU64(&pThis->u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
740 ASMAtomicWriteU32(&pThis->cWriteRecursions, 1);
741 Assert(pThis->cWriterReads == 0);
742#ifdef RTCRITSECTRW_STRICT
743 RTLockValidatorRecExclSetOwner(pThis->pValidatorWrite, hThreadSelf, pSrcPos, true);
744#endif
745 IPRT_CRITSECTRW_EXCL_ENTERED(pThis, NULL, 1,
746 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
747 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
748
749 return VINF_SUCCESS;
750}
751
752
753RTDECL(int) RTCritSectRwEnterExcl(PRTCRITSECTRW pThis)
754{
755#ifndef RTCRITSECTRW_STRICT
756 return rtCritSectRwEnterExcl(pThis, NULL, false /*fTryAgain*/);
757#else
758 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
759 return rtCritSectRwEnterExcl(pThis, &SrcPos, false /*fTryAgain*/);
760#endif
761}
762RT_EXPORT_SYMBOL(RTCritSectRwEnterExcl);
763
764
765RTDECL(int) RTCritSectRwEnterExclDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
766{
767 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
768 return rtCritSectRwEnterExcl(pThis, &SrcPos, false /*fTryAgain*/);
769}
770RT_EXPORT_SYMBOL(RTCritSectRwEnterExclDebug);
771
772
773RTDECL(int) RTCritSectRwTryEnterExcl(PRTCRITSECTRW pThis)
774{
775#ifndef RTCRITSECTRW_STRICT
776 return rtCritSectRwEnterExcl(pThis, NULL, true /*fTryAgain*/);
777#else
778 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
779 return rtCritSectRwEnterExcl(pThis, &SrcPos, true /*fTryAgain*/);
780#endif
781}
782RT_EXPORT_SYMBOL(RTCritSectRwTryEnterExcl);
783
784
785RTDECL(int) RTCritSectRwTryEnterExclDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
786{
787 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
788 return rtCritSectRwEnterExcl(pThis, &SrcPos, true /*fTryAgain*/);
789}
790RT_EXPORT_SYMBOL(RTCritSectRwTryEnterExclDebug);
791
792
793RTDECL(int) RTCritSectRwLeaveExcl(PRTCRITSECTRW pThis)
794{
795 /*
796 * Validate handle.
797 */
798 AssertPtr(pThis);
799 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
800#ifdef IN_RING0
801 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
802#else
803 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
804#endif
805
806 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
807 RTNATIVETHREAD hNativeWriter;
808 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
809 AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
810
811 /*
812 * Unwind a recursion.
813 */
814 if (pThis->cWriteRecursions == 1)
815 {
816 AssertReturn(pThis->cWriterReads == 0, VERR_WRONG_ORDER); /* (must release all read recursions before the final write.) */
817#ifdef RTCRITSECTRW_STRICT
818 int rc9 = RTLockValidatorRecExclReleaseOwner(pThis->pValidatorWrite, true);
819 if (RT_FAILURE(rc9))
820 return rc9;
821#endif
822 /*
823 * Update the state.
824 */
825 ASMAtomicWriteU32(&pThis->cWriteRecursions, 0);
826 ASMAtomicWriteHandle(&pThis->hNativeWriter, NIL_RTNATIVETHREAD);
827
828 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
829 IPRT_CRITSECTRW_EXCL_LEAVING(pThis, NULL, 0,
830 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
831 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
832
833 for (;;)
834 {
835 uint64_t u64OldState = u64State;
836
837 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
838 Assert(c > 0);
839 c--;
840
841 if ( c > 0
842 || (u64State & RTCSRW_CNT_RD_MASK) == 0)
843 {
844 /* Don't change the direction, wait up the next writer if any. */
845 u64State &= ~RTCSRW_CNT_WR_MASK;
846 u64State |= c << RTCSRW_CNT_WR_SHIFT;
847 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
848 {
849 if (c > 0)
850 {
851 int rc = RTSemEventSignal(pThis->hEvtWrite);
852 AssertRC(rc);
853 }
854 break;
855 }
856 }
857 else
858 {
859 /* Reverse the direction and signal the reader threads. */
860 u64State &= ~(RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
861 u64State |= RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT;
862 if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState))
863 {
864 Assert(!pThis->fNeedReset);
865 ASMAtomicWriteBool(&pThis->fNeedReset, true);
866 int rc = RTSemEventMultiSignal(pThis->hEvtRead);
867 AssertRC(rc);
868 break;
869 }
870 }
871
872 ASMNopPause();
873 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
874 return VERR_SEM_DESTROYED;
875 u64State = ASMAtomicReadU64(&pThis->u64State);
876 }
877 }
878 else
879 {
880 Assert(pThis->cWriteRecursions != 0);
881#ifdef RTCRITSECTRW_STRICT
882 int rc9 = RTLockValidatorRecExclUnwind(pThis->pValidatorWrite);
883 if (RT_FAILURE(rc9))
884 return rc9;
885#endif
886 uint32_t cNestings = ASMAtomicDecU32(&pThis->cWriteRecursions); NOREF(cNestings);
887#ifdef IPRT_WITH_DTRACE
888 if (IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED())
889 {
890 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
891 IPRT_CRITSECTRW_EXCL_LEAVING(pThis, NULL, cNestings + pThis->cWriterReads,
892 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
893 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
894 }
895#endif
896 }
897
898 return VINF_SUCCESS;
899}
900RT_EXPORT_SYMBOL(RTCritSectRwLeaveExcl);
901
902
903RTDECL(bool) RTCritSectRwIsWriteOwner(PRTCRITSECTRW pThis)
904{
905 /*
906 * Validate handle.
907 */
908 AssertPtr(pThis);
909 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, false);
910#ifdef IN_RING0
911 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
912#else
913 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
914#endif
915
916 /*
917 * Check ownership.
918 */
919 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
920 RTNATIVETHREAD hNativeWriter;
921 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter);
922 return hNativeWriter == hNativeSelf;
923}
924RT_EXPORT_SYMBOL(RTCritSectRwIsWriteOwner);
925
926
927RTDECL(bool) RTCritSectRwIsReadOwner(PRTCRITSECTRW pThis, bool fWannaHear)
928{
929 RT_NOREF_PV(fWannaHear);
930
931 /*
932 * Validate handle.
933 */
934 AssertPtr(pThis);
935 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, false);
936#ifdef IN_RING0
937 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
938#else
939 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
940#endif
941
942 /*
943 * Inspect the state.
944 */
945 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
946 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT))
947 {
948 /*
949 * It's in write mode, so we can only be a reader if we're also the
950 * current writer.
951 */
952 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
953 RTNATIVETHREAD hWriter;
954 ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hWriter);
955 return hWriter == hNativeSelf;
956 }
957
958 /*
959 * Read mode. If there are no current readers, then we cannot be a reader.
960 */
961 if (!(u64State & RTCSRW_CNT_RD_MASK))
962 return false;
963
964#ifdef RTCRITSECTRW_STRICT
965 /*
966 * Ask the lock validator.
967 */
968 return RTLockValidatorRecSharedIsOwner(pThis->pValidatorRead, NIL_RTTHREAD);
969#else
970 /*
971 * Ok, we don't know, just tell the caller what he want to hear.
972 */
973 return fWannaHear;
974#endif
975}
976RT_EXPORT_SYMBOL(RTCritSectRwIsReadOwner);
977
978
979RTDECL(uint32_t) RTCritSectRwGetWriteRecursion(PRTCRITSECTRW pThis)
980{
981 /*
982 * Validate handle.
983 */
984 AssertPtr(pThis);
985 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
986
987 /*
988 * Return the requested data.
989 */
990 return pThis->cWriteRecursions;
991}
992RT_EXPORT_SYMBOL(RTCritSectRwGetWriteRecursion);
993
994
995RTDECL(uint32_t) RTCritSectRwGetWriterReadRecursion(PRTCRITSECTRW pThis)
996{
997 /*
998 * Validate handle.
999 */
1000 AssertPtr(pThis);
1001 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
1002
1003 /*
1004 * Return the requested data.
1005 */
1006 return pThis->cWriterReads;
1007}
1008RT_EXPORT_SYMBOL(RTCritSectRwGetWriterReadRecursion);
1009
1010
1011RTDECL(uint32_t) RTCritSectRwGetReadCount(PRTCRITSECTRW pThis)
1012{
1013 /*
1014 * Validate input.
1015 */
1016 AssertPtr(pThis);
1017 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
1018
1019 /*
1020 * Return the requested data.
1021 */
1022 uint64_t u64State = ASMAtomicReadU64(&pThis->u64State);
1023 if ((u64State & RTCSRW_DIR_MASK) != (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
1024 return 0;
1025 return (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
1026}
1027RT_EXPORT_SYMBOL(RTCritSectRwGetReadCount);
1028
1029
1030RTDECL(int) RTCritSectRwDelete(PRTCRITSECTRW pThis)
1031{
1032 /*
1033 * Assert free waiters and so on.
1034 */
1035 AssertPtr(pThis);
1036 Assert(pThis->u32Magic == RTCRITSECTRW_MAGIC);
1037 //Assert(pThis->cNestings == 0);
1038 //Assert(pThis->cLockers == -1);
1039 Assert(pThis->hNativeWriter == NIL_RTNATIVETHREAD);
1040#ifdef IN_RING0
1041 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
1042#else
1043 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
1044#endif
1045
1046 /*
1047 * Invalidate the structure and free the semaphores.
1048 */
1049 if (!ASMAtomicCmpXchgU32(&pThis->u32Magic, RTCRITSECTRW_MAGIC_DEAD, RTCRITSECTRW_MAGIC))
1050 return VERR_INVALID_PARAMETER;
1051
1052 pThis->fFlags = 0;
1053 pThis->u64State = 0;
1054
1055 RTSEMEVENT hEvtWrite = pThis->hEvtWrite;
1056 pThis->hEvtWrite = NIL_RTSEMEVENT;
1057 RTSEMEVENTMULTI hEvtRead = pThis->hEvtRead;
1058 pThis->hEvtRead = NIL_RTSEMEVENTMULTI;
1059
1060 int rc1 = RTSemEventDestroy(hEvtWrite); AssertRC(rc1);
1061 int rc2 = RTSemEventMultiDestroy(hEvtRead); AssertRC(rc2);
1062
1063#ifndef IN_RING0
1064 RTLockValidatorRecSharedDestroy(&pThis->pValidatorRead);
1065 RTLockValidatorRecExclDestroy(&pThis->pValidatorWrite);
1066#endif
1067
1068 return RT_SUCCESS(rc1) ? rc2 : rc1;
1069}
1070RT_EXPORT_SYMBOL(RTCritSectRwDelete);
1071
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette