VirtualBox

source: vbox/trunk/src/VBox/Runtime/generic/critsectrw-generic.cpp@ 91586

Last change on this file since 91586 was 90637, checked in by vboxsync, 3 years ago

IPRT/RTCritSectRw,VMM/PDMCritSectRw: Rearranged the core members a little so we can use 128-bit cmpxchg-like hardwoare primitives to update both u64State and hNativeWriter at the same time. This may allow for some optimizations for the PDM version of the code. bugref:6695

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 40.7 KB
Line 
1/* $Id: critsectrw-generic.cpp 90637 2021-08-11 21:15:42Z vboxsync $ */
2/** @file
3 * IPRT - Read/Write Critical Section, Generic.
4 */
5
6/*
7 * Copyright (C) 2009-2020 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*********************************************************************************************************************************
29* Header Files *
30*********************************************************************************************************************************/
31#define RTCRITSECTRW_WITHOUT_REMAPPING
32#define RTASSERT_QUIET
33#include <iprt/critsect.h>
34#include "internal/iprt.h"
35
36#include <iprt/asm.h>
37#include <iprt/assert.h>
38#include <iprt/err.h>
39#include <iprt/lockvalidator.h>
40#include <iprt/mem.h>
41#include <iprt/semaphore.h>
42#include <iprt/thread.h>
43
44#include "internal/magics.h"
45#include "internal/strict.h"
46
47/* Two issues here, (1) the tracepoint generator uses IPRT, and (2) only one .d
48 file per module. */
49#ifdef IPRT_WITH_DTRACE
50# include IPRT_DTRACE_INCLUDE
51# ifdef IPRT_DTRACE_PREFIX
52# define IPRT_CRITSECTRW_EXCL_ENTERED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_ENTERED)
53# define IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED)
54# define IPRT_CRITSECTRW_EXCL_LEAVING RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_LEAVING)
55# define IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED)
56# define IPRT_CRITSECTRW_EXCL_BUSY RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_BUSY)
57# define IPRT_CRITSECTRW_EXCL_WAITING RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_WAITING)
58# define IPRT_CRITSECTRW_EXCL_ENTERED_SHARED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_ENTERED_SHARED)
59# define IPRT_CRITSECTRW_EXCL_LEAVING_SHARED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_LEAVING_SHARED)
60# define IPRT_CRITSECTRW_SHARED_ENTERED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_SHARED_ENTERED)
61# define IPRT_CRITSECTRW_SHARED_LEAVING RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_SHARED_LEAVING)
62# define IPRT_CRITSECTRW_SHARED_BUSY RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_SHARED_BUSY)
63# define IPRT_CRITSECTRW_SHARED_WAITING RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_SHARED_WAITING)
64# endif
65#else
66# define IPRT_CRITSECTRW_EXCL_ENTERED(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
67# define IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED() (false)
68# define IPRT_CRITSECTRW_EXCL_LEAVING(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
69# define IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED() (false)
70# define IPRT_CRITSECTRW_EXCL_BUSY( a_pvCritSect, a_pszName, a_fWriteMode, a_cWaitingReaders, a_cReaders, cWriters, a_pvNativeOwnerThread) do {} while (0)
71# define IPRT_CRITSECTRW_EXCL_WAITING(a_pvCritSect, a_pszName, a_fWriteMode, a_cWaitingReaders, a_cReaders, cWriters, a_pvNativeOwnerThread) do {} while (0)
72# define IPRT_CRITSECTRW_EXCL_ENTERED_SHARED(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
73# define IPRT_CRITSECTRW_EXCL_LEAVING_SHARED(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
74# define IPRT_CRITSECTRW_SHARED_ENTERED(a_pvCritSect, a_pszName, a_cReaders, a_cWaitingWriters) do {} while (0)
75# define IPRT_CRITSECTRW_SHARED_LEAVING(a_pvCritSect, a_pszName, a_cReaders, a_cWaitingWriters) do {} while (0)
76# define IPRT_CRITSECTRW_SHARED_BUSY( a_pvCritSect, a_pszName, a_pvNativeOwnerThread, a_cWaitingReaders, a_cWriters) do {} while (0)
77# define IPRT_CRITSECTRW_SHARED_WAITING(a_pvCritSect, a_pszName, a_pvNativeOwnerThread, a_cWaitingReaders, a_cWriters) do {} while (0)
78#endif
79
80
81
82RTDECL(int) RTCritSectRwInit(PRTCRITSECTRW pThis)
83{
84 return RTCritSectRwInitEx(pThis, 0, NIL_RTLOCKVALCLASS, RTLOCKVAL_SUB_CLASS_NONE, "RTCritSectRw");
85}
86RT_EXPORT_SYMBOL(RTCritSectRwInit);
87
88
89RTDECL(int) RTCritSectRwInitEx(PRTCRITSECTRW pThis, uint32_t fFlags,
90 RTLOCKVALCLASS hClass, uint32_t uSubClass, const char *pszNameFmt, ...)
91{
92 int rc;
93 AssertReturn(!(fFlags & ~( RTCRITSECT_FLAGS_NO_NESTING | RTCRITSECT_FLAGS_NO_LOCK_VAL | RTCRITSECT_FLAGS_BOOTSTRAP_HACK
94 | RTCRITSECT_FLAGS_NOP )),
95 VERR_INVALID_PARAMETER);
96 RT_NOREF_PV(hClass); RT_NOREF_PV(uSubClass); RT_NOREF_PV(pszNameFmt);
97
98
99 /*
100 * Initialize the structure, allocate the lock validator stuff and sems.
101 */
102 pThis->u32Magic = RTCRITSECTRW_MAGIC_DEAD;
103 pThis->fNeedReset = false;
104#ifdef IN_RING0
105 pThis->fFlags = (uint16_t)(fFlags | RTCRITSECT_FLAGS_RING0);
106#else
107 pThis->fFlags = (uint16_t)(fFlags & ~RTCRITSECT_FLAGS_RING0);
108#endif
109 pThis->u.u128.s.Hi = 0;
110 pThis->u.u128.s.Lo = 0;
111 pThis->u.s.hNativeWriter= NIL_RTNATIVETHREAD;
112 AssertCompile(sizeof(pThis->u.u128) >= sizeof(pThis->u.s));
113 pThis->cWriterReads = 0;
114 pThis->cWriteRecursions = 0;
115 pThis->hEvtWrite = NIL_RTSEMEVENT;
116 pThis->hEvtRead = NIL_RTSEMEVENTMULTI;
117 pThis->pValidatorWrite = NULL;
118 pThis->pValidatorRead = NULL;
119
120#ifdef RTCRITSECTRW_STRICT
121 bool const fLVEnabled = !(fFlags & RTCRITSECT_FLAGS_NO_LOCK_VAL);
122 if (!pszNameFmt)
123 {
124 static uint32_t volatile s_iAnon = 0;
125 uint32_t i = ASMAtomicIncU32(&s_iAnon) - 1;
126 rc = RTLockValidatorRecExclCreate(&pThis->pValidatorWrite, hClass, uSubClass, pThis,
127 fLVEnabled, "RTCritSectRw-%u", i);
128 if (RT_SUCCESS(rc))
129 rc = RTLockValidatorRecSharedCreate(&pThis->pValidatorRead, hClass, uSubClass, pThis,
130 false /*fSignaller*/, fLVEnabled, "RTCritSectRw-%u", i);
131 }
132 else
133 {
134 va_list va;
135 va_start(va, pszNameFmt);
136 rc = RTLockValidatorRecExclCreateV(&pThis->pValidatorWrite, hClass, uSubClass, pThis,
137 fLVEnabled, pszNameFmt, va);
138 va_end(va);
139 if (RT_SUCCESS(rc))
140 {
141 va_start(va, pszNameFmt);
142 RTLockValidatorRecSharedCreateV(&pThis->pValidatorRead, hClass, uSubClass, pThis,
143 false /*fSignaller*/, fLVEnabled, pszNameFmt, va);
144 va_end(va);
145 }
146 }
147 if (RT_SUCCESS(rc))
148 rc = RTLockValidatorRecMakeSiblings(&pThis->pValidatorWrite->Core, &pThis->pValidatorRead->Core);
149
150 if (RT_SUCCESS(rc))
151#endif
152 {
153 rc = RTSemEventMultiCreate(&pThis->hEvtRead);
154 if (RT_SUCCESS(rc))
155 {
156 rc = RTSemEventCreate(&pThis->hEvtWrite);
157 if (RT_SUCCESS(rc))
158 {
159 pThis->u32Magic = RTCRITSECTRW_MAGIC;
160 return VINF_SUCCESS;
161 }
162 RTSemEventMultiDestroy(pThis->hEvtRead);
163 }
164 }
165
166#ifdef RTCRITSECTRW_STRICT
167 RTLockValidatorRecSharedDestroy(&pThis->pValidatorRead);
168 RTLockValidatorRecExclDestroy(&pThis->pValidatorWrite);
169#endif
170 return rc;
171}
172RT_EXPORT_SYMBOL(RTCritSectRwInitEx);
173
174
175RTDECL(uint32_t) RTCritSectRwSetSubClass(PRTCRITSECTRW pThis, uint32_t uSubClass)
176{
177 AssertPtrReturn(pThis, RTLOCKVAL_SUB_CLASS_INVALID);
178 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, RTLOCKVAL_SUB_CLASS_INVALID);
179#ifdef IN_RING0
180 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
181#else
182 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
183#endif
184#ifdef RTCRITSECTRW_STRICT
185 AssertReturn(!(pThis->fFlags & RTCRITSECT_FLAGS_NOP), RTLOCKVAL_SUB_CLASS_INVALID);
186
187 RTLockValidatorRecSharedSetSubClass(pThis->pValidatorRead, uSubClass);
188 return RTLockValidatorRecExclSetSubClass(pThis->pValidatorWrite, uSubClass);
189#else
190 NOREF(uSubClass);
191 return RTLOCKVAL_SUB_CLASS_INVALID;
192#endif
193}
194RT_EXPORT_SYMBOL(RTCritSectRwSetSubClass);
195
196
197static int rtCritSectRwEnterShared(PRTCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly)
198{
199 /*
200 * Validate input.
201 */
202 AssertPtr(pThis);
203 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
204#ifdef IN_RING0
205 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
206#else
207 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
208#endif
209 RT_NOREF_PV(pSrcPos);
210
211#ifdef RTCRITSECTRW_STRICT
212 RTTHREAD hThreadSelf = RTThreadSelfAutoAdopt();
213 if (!fTryOnly)
214 {
215 int rc9;
216 RTNATIVETHREAD hNativeWriter;
217 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hNativeWriter);
218 if (hNativeWriter != NIL_RTTHREAD && hNativeWriter == RTThreadNativeSelf())
219 rc9 = RTLockValidatorRecExclCheckOrder(pThis->pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
220 else
221 rc9 = RTLockValidatorRecSharedCheckOrder(pThis->pValidatorRead, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
222 if (RT_FAILURE(rc9))
223 return rc9;
224 }
225#endif
226
227 /*
228 * Get cracking...
229 */
230 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
231 uint64_t u64OldState = u64State;
232
233 for (;;)
234 {
235 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
236 {
237 /* It flows in the right direction, try follow it before it changes. */
238 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
239 c++;
240 Assert(c < RTCSRW_CNT_MASK / 2);
241 u64State &= ~RTCSRW_CNT_RD_MASK;
242 u64State |= c << RTCSRW_CNT_RD_SHIFT;
243 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
244 {
245#ifdef RTCRITSECTRW_STRICT
246 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
247#endif
248 break;
249 }
250 }
251 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0)
252 {
253 /* Wrong direction, but we're alone here and can simply try switch the direction. */
254 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
255 u64State |= (UINT64_C(1) << RTCSRW_CNT_RD_SHIFT) | (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT);
256 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
257 {
258 Assert(!pThis->fNeedReset);
259#ifdef RTCRITSECTRW_STRICT
260 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
261#endif
262 break;
263 }
264 }
265 else
266 {
267 /* Is the writer perhaps doing a read recursion? */
268 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
269 RTNATIVETHREAD hNativeWriter;
270 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hNativeWriter);
271 if (hNativeSelf == hNativeWriter)
272 {
273#ifdef RTCRITSECTRW_STRICT
274 int rc9 = RTLockValidatorRecExclRecursionMixed(pThis->pValidatorWrite, &pThis->pValidatorRead->Core, pSrcPos);
275 if (RT_FAILURE(rc9))
276 return rc9;
277#endif
278 Assert(pThis->cWriterReads < UINT32_MAX / 2);
279 uint32_t const cReads = ASMAtomicIncU32(&pThis->cWriterReads); NOREF(cReads);
280 IPRT_CRITSECTRW_EXCL_ENTERED_SHARED(pThis, NULL,
281 cReads + pThis->cWriteRecursions,
282 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
283 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
284
285 return VINF_SUCCESS; /* don't break! */
286 }
287
288 /* If we're only trying, return already. */
289 if (fTryOnly)
290 {
291 IPRT_CRITSECTRW_SHARED_BUSY(pThis, NULL,
292 (void *)pThis->u.s.hNativeWriter,
293 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
294 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
295 return VERR_SEM_BUSY;
296 }
297
298 /* Add ourselves to the queue and wait for the direction to change. */
299 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
300 c++;
301 Assert(c < RTCSRW_CNT_MASK / 2);
302
303 uint64_t cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT;
304 cWait++;
305 Assert(cWait <= c);
306 Assert(cWait < RTCSRW_CNT_MASK / 2);
307
308 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK);
309 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT);
310
311 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
312 {
313 IPRT_CRITSECTRW_SHARED_WAITING(pThis, NULL,
314 (void *)pThis->u.s.hNativeWriter,
315 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
316 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
317 for (uint32_t iLoop = 0; ; iLoop++)
318 {
319 int rc;
320#ifdef RTCRITSECTRW_STRICT
321 rc = RTLockValidatorRecSharedCheckBlocking(pThis->pValidatorRead, hThreadSelf, pSrcPos, true,
322 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_READ, false);
323 if (RT_SUCCESS(rc))
324#elif defined(IN_RING3)
325 RTTHREAD hThreadSelf = RTThreadSelf();
326 RTThreadBlocking(hThreadSelf, RTTHREADSTATE_RW_READ, false);
327#endif
328 {
329 rc = RTSemEventMultiWait(pThis->hEvtRead, RT_INDEFINITE_WAIT);
330#ifdef IN_RING3
331 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_READ);
332#endif
333 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
334 return VERR_SEM_DESTROYED;
335 }
336 if (RT_FAILURE(rc))
337 {
338 /* Decrement the counts and return the error. */
339 for (;;)
340 {
341 u64OldState = u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
342 c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT; Assert(c > 0);
343 c--;
344 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT; Assert(cWait > 0);
345 cWait--;
346 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK);
347 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT);
348 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
349 break;
350 }
351 return rc;
352 }
353
354 Assert(pThis->fNeedReset);
355 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
356 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
357 break;
358 AssertMsg(iLoop < 1, ("%u\n", iLoop));
359 }
360
361 /* Decrement the wait count and maybe reset the semaphore (if we're last). */
362 for (;;)
363 {
364 u64OldState = u64State;
365
366 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT;
367 Assert(cWait > 0);
368 cWait--;
369 u64State &= ~RTCSRW_WAIT_CNT_RD_MASK;
370 u64State |= cWait << RTCSRW_WAIT_CNT_RD_SHIFT;
371
372 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
373 {
374 if (cWait == 0)
375 {
376 if (ASMAtomicXchgBool(&pThis->fNeedReset, false))
377 {
378 int rc = RTSemEventMultiReset(pThis->hEvtRead);
379 AssertRCReturn(rc, rc);
380 }
381 }
382 break;
383 }
384 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
385 }
386
387#ifdef RTCRITSECTRW_STRICT
388 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
389#endif
390 break;
391 }
392 }
393
394 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
395 return VERR_SEM_DESTROYED;
396
397 ASMNopPause();
398 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
399 u64OldState = u64State;
400 }
401
402 /* got it! */
403 Assert((ASMAtomicReadU64(&pThis->u.s.u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT));
404 IPRT_CRITSECTRW_SHARED_ENTERED(pThis, NULL,
405 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT),
406 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
407 return VINF_SUCCESS;
408}
409
410
411RTDECL(int) RTCritSectRwEnterShared(PRTCRITSECTRW pThis)
412{
413#ifndef RTCRITSECTRW_STRICT
414 return rtCritSectRwEnterShared(pThis, NULL, false /*fTryOnly*/);
415#else
416 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
417 return rtCritSectRwEnterShared(pThis, &SrcPos, false /*fTryOnly*/);
418#endif
419}
420RT_EXPORT_SYMBOL(RTCritSectRwEnterShared);
421
422
423RTDECL(int) RTCritSectRwEnterSharedDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
424{
425 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
426 return rtCritSectRwEnterShared(pThis, &SrcPos, false /*fTryOnly*/);
427}
428RT_EXPORT_SYMBOL(RTCritSectRwEnterSharedDebug);
429
430
431RTDECL(int) RTCritSectRwTryEnterShared(PRTCRITSECTRW pThis)
432{
433#ifndef RTCRITSECTRW_STRICT
434 return rtCritSectRwEnterShared(pThis, NULL, true /*fTryOnly*/);
435#else
436 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
437 return rtCritSectRwEnterShared(pThis, &SrcPos, true /*fTryOnly*/);
438#endif
439}
440RT_EXPORT_SYMBOL(RTCritSectRwEnterShared);
441
442
443RTDECL(int) RTCritSectRwTryEnterSharedDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
444{
445 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
446 return rtCritSectRwEnterShared(pThis, &SrcPos, true /*fTryOnly*/);
447}
448RT_EXPORT_SYMBOL(RTCritSectRwEnterSharedDebug);
449
450
451
452RTDECL(int) RTCritSectRwLeaveShared(PRTCRITSECTRW pThis)
453{
454 /*
455 * Validate handle.
456 */
457 AssertPtr(pThis);
458 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
459#ifdef IN_RING0
460 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
461#else
462 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
463#endif
464
465 /*
466 * Check the direction and take action accordingly.
467 */
468 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
469 uint64_t u64OldState = u64State;
470 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
471 {
472#ifdef RTCRITSECTRW_STRICT
473 int rc9 = RTLockValidatorRecSharedCheckAndRelease(pThis->pValidatorRead, NIL_RTTHREAD);
474 if (RT_FAILURE(rc9))
475 return rc9;
476#endif
477 IPRT_CRITSECTRW_SHARED_LEAVING(pThis, NULL,
478 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT) - 1,
479 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
480
481 for (;;)
482 {
483 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
484 AssertReturn(c > 0, VERR_NOT_OWNER);
485 c--;
486
487 if ( c > 0
488 || (u64State & RTCSRW_CNT_WR_MASK) == 0)
489 {
490 /* Don't change the direction. */
491 u64State &= ~RTCSRW_CNT_RD_MASK;
492 u64State |= c << RTCSRW_CNT_RD_SHIFT;
493 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
494 break;
495 }
496 else
497 {
498 /* Reverse the direction and signal the reader threads. */
499 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_DIR_MASK);
500 u64State |= RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT;
501 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
502 {
503 int rc = RTSemEventSignal(pThis->hEvtWrite);
504 AssertRC(rc);
505 break;
506 }
507 }
508
509 ASMNopPause();
510 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
511 u64OldState = u64State;
512 }
513 }
514 else
515 {
516 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
517 RTNATIVETHREAD hNativeWriter;
518 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hNativeWriter);
519 AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
520 AssertReturn(pThis->cWriterReads > 0, VERR_NOT_OWNER);
521#ifdef RTCRITSECTRW_STRICT
522 int rc = RTLockValidatorRecExclUnwindMixed(pThis->pValidatorWrite, &pThis->pValidatorRead->Core);
523 if (RT_FAILURE(rc))
524 return rc;
525#endif
526 uint32_t cReads = ASMAtomicDecU32(&pThis->cWriterReads); NOREF(cReads);
527 IPRT_CRITSECTRW_EXCL_LEAVING_SHARED(pThis, NULL,
528 cReads + pThis->cWriteRecursions,
529 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
530 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
531 }
532
533 return VINF_SUCCESS;
534}
535RT_EXPORT_SYMBOL(RTCritSectRwLeaveShared);
536
537
538static int rtCritSectRwEnterExcl(PRTCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly)
539{
540 /*
541 * Validate input.
542 */
543 AssertPtr(pThis);
544 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
545#ifdef IN_RING0
546 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
547#else
548 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
549#endif
550 RT_NOREF_PV(pSrcPos);
551
552#ifdef RTCRITSECTRW_STRICT
553 RTTHREAD hThreadSelf = NIL_RTTHREAD;
554 if (!fTryOnly)
555 {
556 hThreadSelf = RTThreadSelfAutoAdopt();
557 int rc9 = RTLockValidatorRecExclCheckOrder(pThis->pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
558 if (RT_FAILURE(rc9))
559 return rc9;
560 }
561#endif
562
563 /*
564 * Check if we're already the owner and just recursing.
565 */
566 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
567 RTNATIVETHREAD hNativeWriter;
568 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hNativeWriter);
569 if (hNativeSelf == hNativeWriter)
570 {
571 Assert((ASMAtomicReadU64(&pThis->u.s.u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
572#ifdef RTCRITSECTRW_STRICT
573 int rc9 = RTLockValidatorRecExclRecursion(pThis->pValidatorWrite, pSrcPos);
574 if (RT_FAILURE(rc9))
575 return rc9;
576#endif
577 Assert(pThis->cWriteRecursions < UINT32_MAX / 2);
578 uint32_t cNestings = ASMAtomicIncU32(&pThis->cWriteRecursions); NOREF(cNestings);
579
580#ifdef IPRT_WITH_DTRACE
581 if (IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED())
582 {
583 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
584 IPRT_CRITSECTRW_EXCL_ENTERED(pThis, NULL, cNestings + pThis->cWriterReads,
585 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
586 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
587 }
588#endif
589 return VINF_SUCCESS;
590 }
591
592 /*
593 * Get cracking.
594 */
595 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
596 uint64_t u64OldState = u64State;
597
598 for (;;)
599 {
600 if ( (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)
601 || (u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) != 0)
602 {
603 /* It flows in the right direction, try follow it before it changes. */
604 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
605 c++;
606 Assert(c < RTCSRW_CNT_MASK / 2);
607 u64State &= ~RTCSRW_CNT_WR_MASK;
608 u64State |= c << RTCSRW_CNT_WR_SHIFT;
609 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
610 break;
611 }
612 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0)
613 {
614 /* Wrong direction, but we're alone here and can simply try switch the direction. */
615 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
616 u64State |= (UINT64_C(1) << RTCSRW_CNT_WR_SHIFT) | (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT);
617 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
618 break;
619 }
620 else if (fTryOnly)
621 /* Wrong direction and we're not supposed to wait, just return. */
622 return VERR_SEM_BUSY;
623 else
624 {
625 /* Add ourselves to the write count and break out to do the wait. */
626 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
627 c++;
628 Assert(c < RTCSRW_CNT_MASK / 2);
629 u64State &= ~RTCSRW_CNT_WR_MASK;
630 u64State |= c << RTCSRW_CNT_WR_SHIFT;
631 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
632 break;
633 }
634
635 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
636 return VERR_SEM_DESTROYED;
637
638 ASMNopPause();
639 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
640 u64OldState = u64State;
641 }
642
643 /*
644 * If we're in write mode now try grab the ownership. Play fair if there
645 * are threads already waiting.
646 */
647 bool fDone = (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)
648 && ( ((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT) == 1
649 || fTryOnly);
650 if (fDone)
651 ASMAtomicCmpXchgHandle(&pThis->u.s.hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone);
652 if (!fDone)
653 {
654 /*
655 * If only trying, undo the above writer incrementation and return.
656 */
657 if (fTryOnly)
658 {
659 for (;;)
660 {
661 u64OldState = u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
662 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; Assert(c > 0);
663 c--;
664 u64State &= ~RTCSRW_CNT_WR_MASK;
665 u64State |= c << RTCSRW_CNT_WR_SHIFT;
666 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
667 break;
668 }
669 IPRT_CRITSECTRW_EXCL_BUSY(pThis, NULL,
670 (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT) /*fWrite*/,
671 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
672 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT),
673 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT),
674 (void *)pThis->u.s.hNativeWriter);
675 return VERR_SEM_BUSY;
676 }
677
678 /*
679 * Wait for our turn.
680 */
681 IPRT_CRITSECTRW_EXCL_WAITING(pThis, NULL,
682 (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT) /*fWrite*/,
683 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
684 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT),
685 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT),
686 (void *)pThis->u.s.hNativeWriter);
687 for (uint32_t iLoop = 0; ; iLoop++)
688 {
689 int rc;
690#ifdef RTCRITSECTRW_STRICT
691 if (hThreadSelf == NIL_RTTHREAD)
692 hThreadSelf = RTThreadSelfAutoAdopt();
693 rc = RTLockValidatorRecExclCheckBlocking(pThis->pValidatorWrite, hThreadSelf, pSrcPos, true,
694 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_WRITE, false);
695 if (RT_SUCCESS(rc))
696#elif defined(IN_RING3)
697 RTTHREAD hThreadSelf = RTThreadSelf();
698 RTThreadBlocking(hThreadSelf, RTTHREADSTATE_RW_WRITE, false);
699#endif
700 {
701 rc = RTSemEventWait(pThis->hEvtWrite, RT_INDEFINITE_WAIT);
702#ifdef IN_RING3
703 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_WRITE);
704#endif
705 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
706 return VERR_SEM_DESTROYED;
707 }
708 if (RT_FAILURE(rc))
709 {
710 /* Decrement the counts and return the error. */
711 for (;;)
712 {
713 u64OldState = u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
714 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; Assert(c > 0);
715 c--;
716 u64State &= ~RTCSRW_CNT_WR_MASK;
717 u64State |= c << RTCSRW_CNT_WR_SHIFT;
718 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
719 break;
720 }
721 return rc;
722 }
723
724 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
725 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT))
726 {
727 ASMAtomicCmpXchgHandle(&pThis->u.s.hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone);
728 if (fDone)
729 break;
730 }
731 AssertMsg(iLoop < 1000, ("%u\n", iLoop)); /* may loop a few times here... */
732 }
733 }
734
735 /*
736 * Got it!
737 */
738 Assert((ASMAtomicReadU64(&pThis->u.s.u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
739 ASMAtomicWriteU32(&pThis->cWriteRecursions, 1);
740 Assert(pThis->cWriterReads == 0);
741#ifdef RTCRITSECTRW_STRICT
742 RTLockValidatorRecExclSetOwner(pThis->pValidatorWrite, hThreadSelf, pSrcPos, true);
743#endif
744 IPRT_CRITSECTRW_EXCL_ENTERED(pThis, NULL, 1,
745 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
746 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
747
748 return VINF_SUCCESS;
749}
750
751
752RTDECL(int) RTCritSectRwEnterExcl(PRTCRITSECTRW pThis)
753{
754#ifndef RTCRITSECTRW_STRICT
755 return rtCritSectRwEnterExcl(pThis, NULL, false /*fTryAgain*/);
756#else
757 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
758 return rtCritSectRwEnterExcl(pThis, &SrcPos, false /*fTryAgain*/);
759#endif
760}
761RT_EXPORT_SYMBOL(RTCritSectRwEnterExcl);
762
763
764RTDECL(int) RTCritSectRwEnterExclDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
765{
766 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
767 return rtCritSectRwEnterExcl(pThis, &SrcPos, false /*fTryAgain*/);
768}
769RT_EXPORT_SYMBOL(RTCritSectRwEnterExclDebug);
770
771
772RTDECL(int) RTCritSectRwTryEnterExcl(PRTCRITSECTRW pThis)
773{
774#ifndef RTCRITSECTRW_STRICT
775 return rtCritSectRwEnterExcl(pThis, NULL, true /*fTryAgain*/);
776#else
777 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
778 return rtCritSectRwEnterExcl(pThis, &SrcPos, true /*fTryAgain*/);
779#endif
780}
781RT_EXPORT_SYMBOL(RTCritSectRwTryEnterExcl);
782
783
784RTDECL(int) RTCritSectRwTryEnterExclDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
785{
786 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
787 return rtCritSectRwEnterExcl(pThis, &SrcPos, true /*fTryAgain*/);
788}
789RT_EXPORT_SYMBOL(RTCritSectRwTryEnterExclDebug);
790
791
792RTDECL(int) RTCritSectRwLeaveExcl(PRTCRITSECTRW pThis)
793{
794 /*
795 * Validate handle.
796 */
797 AssertPtr(pThis);
798 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
799#ifdef IN_RING0
800 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
801#else
802 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
803#endif
804
805 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
806 RTNATIVETHREAD hNativeWriter;
807 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hNativeWriter);
808 AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
809
810 /*
811 * Unwind a recursion.
812 */
813 if (pThis->cWriteRecursions == 1)
814 {
815 AssertReturn(pThis->cWriterReads == 0, VERR_WRONG_ORDER); /* (must release all read recursions before the final write.) */
816#ifdef RTCRITSECTRW_STRICT
817 int rc9 = RTLockValidatorRecExclReleaseOwner(pThis->pValidatorWrite, true);
818 if (RT_FAILURE(rc9))
819 return rc9;
820#endif
821 /*
822 * Update the state.
823 */
824 ASMAtomicWriteU32(&pThis->cWriteRecursions, 0);
825 ASMAtomicWriteHandle(&pThis->u.s.hNativeWriter, NIL_RTNATIVETHREAD);
826
827 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
828 IPRT_CRITSECTRW_EXCL_LEAVING(pThis, NULL, 0,
829 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
830 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
831
832 for (;;)
833 {
834 uint64_t u64OldState = u64State;
835
836 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
837 Assert(c > 0);
838 c--;
839
840 if ( c > 0
841 || (u64State & RTCSRW_CNT_RD_MASK) == 0)
842 {
843 /* Don't change the direction, wait up the next writer if any. */
844 u64State &= ~RTCSRW_CNT_WR_MASK;
845 u64State |= c << RTCSRW_CNT_WR_SHIFT;
846 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
847 {
848 if (c > 0)
849 {
850 int rc = RTSemEventSignal(pThis->hEvtWrite);
851 AssertRC(rc);
852 }
853 break;
854 }
855 }
856 else
857 {
858 /* Reverse the direction and signal the reader threads. */
859 u64State &= ~(RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
860 u64State |= RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT;
861 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
862 {
863 Assert(!pThis->fNeedReset);
864 ASMAtomicWriteBool(&pThis->fNeedReset, true);
865 int rc = RTSemEventMultiSignal(pThis->hEvtRead);
866 AssertRC(rc);
867 break;
868 }
869 }
870
871 ASMNopPause();
872 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
873 return VERR_SEM_DESTROYED;
874 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
875 }
876 }
877 else
878 {
879 Assert(pThis->cWriteRecursions != 0);
880#ifdef RTCRITSECTRW_STRICT
881 int rc9 = RTLockValidatorRecExclUnwind(pThis->pValidatorWrite);
882 if (RT_FAILURE(rc9))
883 return rc9;
884#endif
885 uint32_t cNestings = ASMAtomicDecU32(&pThis->cWriteRecursions); NOREF(cNestings);
886#ifdef IPRT_WITH_DTRACE
887 if (IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED())
888 {
889 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
890 IPRT_CRITSECTRW_EXCL_LEAVING(pThis, NULL, cNestings + pThis->cWriterReads,
891 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
892 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
893 }
894#endif
895 }
896
897 return VINF_SUCCESS;
898}
899RT_EXPORT_SYMBOL(RTCritSectRwLeaveExcl);
900
901
902RTDECL(bool) RTCritSectRwIsWriteOwner(PRTCRITSECTRW pThis)
903{
904 /*
905 * Validate handle.
906 */
907 AssertPtr(pThis);
908 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, false);
909#ifdef IN_RING0
910 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
911#else
912 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
913#endif
914
915 /*
916 * Check ownership.
917 */
918 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
919 RTNATIVETHREAD hNativeWriter;
920 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hNativeWriter);
921 return hNativeWriter == hNativeSelf;
922}
923RT_EXPORT_SYMBOL(RTCritSectRwIsWriteOwner);
924
925
926RTDECL(bool) RTCritSectRwIsReadOwner(PRTCRITSECTRW pThis, bool fWannaHear)
927{
928 RT_NOREF_PV(fWannaHear);
929
930 /*
931 * Validate handle.
932 */
933 AssertPtr(pThis);
934 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, false);
935#ifdef IN_RING0
936 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
937#else
938 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
939#endif
940
941 /*
942 * Inspect the state.
943 */
944 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
945 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT))
946 {
947 /*
948 * It's in write mode, so we can only be a reader if we're also the
949 * current writer.
950 */
951 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
952 RTNATIVETHREAD hWriter;
953 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hWriter);
954 return hWriter == hNativeSelf;
955 }
956
957 /*
958 * Read mode. If there are no current readers, then we cannot be a reader.
959 */
960 if (!(u64State & RTCSRW_CNT_RD_MASK))
961 return false;
962
963#ifdef RTCRITSECTRW_STRICT
964 /*
965 * Ask the lock validator.
966 */
967 return RTLockValidatorRecSharedIsOwner(pThis->pValidatorRead, NIL_RTTHREAD);
968#else
969 /*
970 * Ok, we don't know, just tell the caller what he want to hear.
971 */
972 return fWannaHear;
973#endif
974}
975RT_EXPORT_SYMBOL(RTCritSectRwIsReadOwner);
976
977
978RTDECL(uint32_t) RTCritSectRwGetWriteRecursion(PRTCRITSECTRW pThis)
979{
980 /*
981 * Validate handle.
982 */
983 AssertPtr(pThis);
984 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
985
986 /*
987 * Return the requested data.
988 */
989 return pThis->cWriteRecursions;
990}
991RT_EXPORT_SYMBOL(RTCritSectRwGetWriteRecursion);
992
993
994RTDECL(uint32_t) RTCritSectRwGetWriterReadRecursion(PRTCRITSECTRW pThis)
995{
996 /*
997 * Validate handle.
998 */
999 AssertPtr(pThis);
1000 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
1001
1002 /*
1003 * Return the requested data.
1004 */
1005 return pThis->cWriterReads;
1006}
1007RT_EXPORT_SYMBOL(RTCritSectRwGetWriterReadRecursion);
1008
1009
1010RTDECL(uint32_t) RTCritSectRwGetReadCount(PRTCRITSECTRW pThis)
1011{
1012 /*
1013 * Validate input.
1014 */
1015 AssertPtr(pThis);
1016 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
1017
1018 /*
1019 * Return the requested data.
1020 */
1021 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
1022 if ((u64State & RTCSRW_DIR_MASK) != (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
1023 return 0;
1024 return (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
1025}
1026RT_EXPORT_SYMBOL(RTCritSectRwGetReadCount);
1027
1028
1029RTDECL(int) RTCritSectRwDelete(PRTCRITSECTRW pThis)
1030{
1031 /*
1032 * Assert free waiters and so on.
1033 */
1034 AssertPtr(pThis);
1035 Assert(pThis->u32Magic == RTCRITSECTRW_MAGIC);
1036 //Assert(pThis->cNestings == 0);
1037 //Assert(pThis->cLockers == -1);
1038 Assert(pThis->u.s.hNativeWriter == NIL_RTNATIVETHREAD);
1039#ifdef IN_RING0
1040 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
1041#else
1042 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
1043#endif
1044
1045 /*
1046 * Invalidate the structure and free the semaphores.
1047 */
1048 if (!ASMAtomicCmpXchgU32(&pThis->u32Magic, RTCRITSECTRW_MAGIC_DEAD, RTCRITSECTRW_MAGIC))
1049 return VERR_INVALID_PARAMETER;
1050
1051 pThis->fFlags = 0;
1052 pThis->u.s.u64State = 0;
1053
1054 RTSEMEVENT hEvtWrite = pThis->hEvtWrite;
1055 pThis->hEvtWrite = NIL_RTSEMEVENT;
1056 RTSEMEVENTMULTI hEvtRead = pThis->hEvtRead;
1057 pThis->hEvtRead = NIL_RTSEMEVENTMULTI;
1058
1059 int rc1 = RTSemEventDestroy(hEvtWrite); AssertRC(rc1);
1060 int rc2 = RTSemEventMultiDestroy(hEvtRead); AssertRC(rc2);
1061
1062#ifndef IN_RING0
1063 RTLockValidatorRecSharedDestroy(&pThis->pValidatorRead);
1064 RTLockValidatorRecExclDestroy(&pThis->pValidatorWrite);
1065#endif
1066
1067 return RT_SUCCESS(rc1) ? rc2 : rc1;
1068}
1069RT_EXPORT_SYMBOL(RTCritSectRwDelete);
1070
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette