summaryrefslogtreecommitdiff
path: root/src/VBox
diff options
context:
space:
mode:
Diffstat (limited to 'src/VBox')
-rw-r--r--src/VBox/VMM/VMMAll/PDMAllCritSectRw.cpp45
-rw-r--r--src/VBox/VMM/VMMR3/CFGM.cpp6
-rw-r--r--src/VBox/VMM/VMMR3/STAM.cpp11
-rw-r--r--src/VBox/VMM/VMMR3/VM.cpp16
-rw-r--r--src/VBox/VMM/include/STAMInternal.h51
5 files changed, 90 insertions, 39 deletions
diff --git a/src/VBox/VMM/VMMAll/PDMAllCritSectRw.cpp b/src/VBox/VMM/VMMAll/PDMAllCritSectRw.cpp
index c8dbd34c427..1149f403127 100644
--- a/src/VBox/VMM/VMMAll/PDMAllCritSectRw.cpp
+++ b/src/VBox/VMM/VMMAll/PDMAllCritSectRw.cpp
@@ -109,7 +109,7 @@ VMMDECL(uint32_t) PDMR3CritSectRwSetSubClass(PPDMCRITSECTRW pThis, uint32_t uSub
{
AssertPtrReturn(pThis, RTLOCKVAL_SUB_CLASS_INVALID);
AssertReturn(pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC, RTLOCKVAL_SUB_CLASS_INVALID);
-# ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
AssertReturn(!(pThis->s.Core.fFlags & RTCRITSECT_FLAGS_NOP), RTLOCKVAL_SUB_CLASS_INVALID);
RTLockValidatorRecSharedSetSubClass(pThis->s.Core.pValidatorRead, uSubClass);
@@ -130,7 +130,7 @@ static int pdmCritSectRwEnterShared(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVA
AssertPtr(pThis);
AssertReturn(pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
RTTHREAD hThreadSelf = RTThreadSelfAutoAdopt();
if (!fTryOnly)
{
@@ -164,7 +164,7 @@ static int pdmCritSectRwEnterShared(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVA
u64State |= c << RTCSRW_CNT_RD_SHIFT;
if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
{
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
RTLockValidatorRecSharedAddOwner(pThis->s.Core.pValidatorRead, hThreadSelf, pSrcPos);
#endif
break;
@@ -178,7 +178,7 @@ static int pdmCritSectRwEnterShared(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVA
if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
{
Assert(!pThis->s.Core.fNeedReset);
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
RTLockValidatorRecSharedAddOwner(pThis->s.Core.pValidatorRead, hThreadSelf, pSrcPos);
#endif
break;
@@ -192,7 +192,7 @@ static int pdmCritSectRwEnterShared(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVA
ASMAtomicUoReadHandle(&pThis->s.Core.hNativeWriter, &hNativeWriter);
if (hNativeSelf == hNativeWriter)
{
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
int rc9 = RTLockValidatorRecExclRecursionMixed(pThis->s.Core.pValidatorWrite, &pThis->s.Core.pValidatorRead->Core, pSrcPos);
if (RT_FAILURE(rc9))
return rc9;
@@ -224,7 +224,7 @@ static int pdmCritSectRwEnterShared(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVA
for (uint32_t iLoop = 0; ; iLoop++)
{
int rc;
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
rc = RTLockValidatorRecSharedCheckBlocking(pThis->s.Core.pValidatorRead, hThreadSelf, pSrcPos, true,
RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_READ, false);
if (RT_SUCCESS(rc))
@@ -294,7 +294,7 @@ static int pdmCritSectRwEnterShared(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVA
u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
}
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
RTLockValidatorRecSharedAddOwner(pThis->s.Core.pValidatorRead, hThreadSelf, pSrcPos);
#endif
break;
@@ -341,7 +341,7 @@ static int pdmCritSectRwEnterShared(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVA
*/
VMMDECL(int) PDMCritSectRwEnterShared(PPDMCRITSECTRW pThis, int rcBusy)
{
-#ifndef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
return pdmCritSectRwEnterShared(pThis, rcBusy, NULL, false /*fTryOnly*/);
#else
RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
@@ -400,7 +400,7 @@ VMMDECL(int) PDMCritSectRwEnterSharedDebug(PPDMCRITSECTRW pThis, int rcBusy, RTH
*/
VMMDECL(int) PDMCritSectRwTryEnterShared(PPDMCRITSECTRW pThis)
{
-#ifndef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
return pdmCritSectRwEnterShared(pThis, VERR_SEM_BUSY, NULL, true /*fTryOnly*/);
#else
RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
@@ -461,7 +461,7 @@ VMMDECL(int) PDMCritSectRwLeaveShared(PPDMCRITSECTRW pThis)
uint64_t u64OldState = u64State;
if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
{
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
int rc9 = RTLockValidatorRecSharedCheckAndRelease(pThis->s.Core.pValidatorRead, NIL_RTTHREAD);
if (RT_FAILURE(rc9))
return rc9;
@@ -506,7 +506,7 @@ VMMDECL(int) PDMCritSectRwLeaveShared(PPDMCRITSECTRW pThis)
ASMAtomicUoReadHandle(&pThis->s.Core.hNativeWriter, &hNativeWriter);
AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
AssertReturn(pThis->s.Core.cWriterReads > 0, VERR_NOT_OWNER);
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
int rc = RTLockValidatorRecExclUnwindMixed(pThis->s.Core.pValidatorWrite, &pThis->s.Core.pValidatorRead->Core);
if (RT_FAILURE(rc))
return rc;
@@ -526,7 +526,7 @@ static int pdmCritSectRwEnterExcl(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVALS
AssertPtr(pThis);
AssertReturn(pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
RTTHREAD hThreadSelf = NIL_RTTHREAD;
if (!fTryOnly)
{
@@ -546,7 +546,7 @@ static int pdmCritSectRwEnterExcl(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVALS
if (hNativeSelf == hNativeWriter)
{
Assert((ASMAtomicReadU64(&pThis->s.Core.u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
int rc9 = RTLockValidatorRecExclRecursion(pThis->s.Core.pValidatorWrite, pSrcPos);
if (RT_FAILURE(rc9))
return rc9;
@@ -625,7 +625,7 @@ static int pdmCritSectRwEnterExcl(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVALS
for (uint32_t iLoop = 0; ; iLoop++)
{
int rc;
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
if (!fTryOnly)
{
if (hThreadSelf == NIL_RTTHREAD)
@@ -683,7 +683,7 @@ static int pdmCritSectRwEnterExcl(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVALS
Assert((ASMAtomicReadU64(&pThis->s.Core.u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
ASMAtomicWriteU32(&pThis->s.Core.cWriteRecursions, 1);
Assert(pThis->s.Core.cWriterReads == 0);
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
RTLockValidatorRecExclSetOwner(pThis->s.Core.pValidatorWrite, hThreadSelf, pSrcPos, true);
#endif
STAM_REL_COUNTER_INC(&pThis->s.CTX_MID_Z(Stat,EnterExcl));
@@ -714,7 +714,7 @@ static int pdmCritSectRwEnterExcl(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVALS
*/
VMMDECL(int) PDMCritSectRwEnterExcl(PPDMCRITSECTRW pThis, int rcBusy)
{
-#ifndef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
return pdmCritSectRwEnterExcl(pThis, rcBusy, NULL, false /*fTryAgain*/);
#else
RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
@@ -770,7 +770,7 @@ VMMDECL(int) PDMCritSectRwEnterExclDebug(PPDMCRITSECTRW pThis, int rcBusy, RTHCU
*/
VMMDECL(int) PDMCritSectRwTryEnterExcl(PPDMCRITSECTRW pThis)
{
-#ifndef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
return pdmCritSectRwEnterExcl(pThis, VERR_SEM_BUSY, NULL, true /*fTryAgain*/);
#else
RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
@@ -828,12 +828,12 @@ VMMDECL(int) PDMCritSectRwLeaveExcl(PPDMCRITSECTRW pThis)
AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
/*
- * Unwind a recursion.
+ * Unwind one recursion. Is it the final one?
*/
if (pThis->s.Core.cWriteRecursions == 1)
{
AssertReturn(pThis->s.Core.cWriterReads == 0, VERR_WRONG_ORDER); /* (must release all read recursions before the final write.) */
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
int rc9 = RTLockValidatorRecExclReleaseOwner(pThis->s.Core.pValidatorWrite, true);
if (RT_FAILURE(rc9))
return rc9;
@@ -857,7 +857,7 @@ VMMDECL(int) PDMCritSectRwLeaveExcl(PPDMCRITSECTRW pThis)
if ( c > 0
|| (u64State & RTCSRW_CNT_RD_MASK) == 0)
{
- /* Don't change the direction, wait up the next writer if any. */
+ /* Don't change the direction, wake up the next writer if any. */
u64State &= ~RTCSRW_CNT_WR_MASK;
u64State |= c << RTCSRW_CNT_WR_SHIFT;
if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
@@ -892,8 +892,11 @@ VMMDECL(int) PDMCritSectRwLeaveExcl(PPDMCRITSECTRW pThis)
}
else
{
+ /*
+ * Not the final recursion.
+ */
Assert(pThis->s.Core.cWriteRecursions != 0);
-#ifdef PDMCRITSECTRW_STRICT
+#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
int rc9 = RTLockValidatorRecExclUnwind(pThis->s.Core.pValidatorWrite);
if (RT_FAILURE(rc9))
return rc9;
diff --git a/src/VBox/VMM/VMMR3/CFGM.cpp b/src/VBox/VMM/VMMR3/CFGM.cpp
index 33cf58fc807..502d930202b 100644
--- a/src/VBox/VMM/VMMR3/CFGM.cpp
+++ b/src/VBox/VMM/VMMR3/CFGM.cpp
@@ -33,9 +33,9 @@
* where they are protected from accessing information of any parents. This is
* is implemented via the CFGMR3SetRestrictedRoot() API.
*
- * Data validation out over the basic primitives is left to the caller. The
- * caller is in a better position to know the proper validation rules of the
- * individual properties.
+ * Data validation beyond the basic primitives is left to the caller. The caller
+ * is in a better position to know the proper validation rules of the individual
+ * properties.
*
* @see grp_cfgm
*
diff --git a/src/VBox/VMM/VMMR3/STAM.cpp b/src/VBox/VMM/VMMR3/STAM.cpp
index 53b0eaac36a..f29f037fc4b 100644
--- a/src/VBox/VMM/VMMR3/STAM.cpp
+++ b/src/VBox/VMM/VMMR3/STAM.cpp
@@ -47,6 +47,7 @@
/*******************************************************************************
* Header Files *
*******************************************************************************/
+/*#define USE_PDMCRITSECTRW - testing, not for production. */
#define LOG_GROUP LOG_GROUP_STAM
#include <VBox/vmm/stam.h>
#include "STAMInternal.h"
@@ -262,6 +263,7 @@ static const STAMR0SAMPLE g_aGMMStats[] =
*/
VMMR3DECL(int) STAMR3InitUVM(PUVM pUVM)
{
+ int rc;
LogFlow(("STAMR3Init\n"));
/*
@@ -271,10 +273,12 @@ VMMR3DECL(int) STAMR3InitUVM(PUVM pUVM)
AssertRelease(sizeof(pUVM->stam.s) <= sizeof(pUVM->stam.padding));
/*
- * Setup any fixed pointers and offsets.
+ * Initialize the read/write lock.
*/
- int rc = RTSemRWCreate(&pUVM->stam.s.RWSem);
+#ifndef USE_PDMCRITSECTRW
+ rc = RTSemRWCreate(&pUVM->stam.s.RWSem);
AssertRCReturn(rc, rc);
+#endif
/*
* Register the ring-0 statistics (GVMM/GMM).
@@ -317,9 +321,11 @@ VMMR3DECL(void) STAMR3TermUVM(PUVM pUVM)
}
pUVM->stam.s.pHead = NULL;
+#ifndef USE_PDMCRITSECTRW
Assert(pUVM->stam.s.RWSem != NIL_RTSEMRW);
RTSemRWDestroy(pUVM->stam.s.RWSem);
pUVM->stam.s.RWSem = NIL_RTSEMRW;
+#endif
}
@@ -597,6 +603,7 @@ static int stamR3SlashCompare(const char *psz1, const char *psz2)
static int stamR3RegisterU(PUVM pUVM, void *pvSample, PFNSTAMR3CALLBACKRESET pfnReset, PFNSTAMR3CALLBACKPRINT pfnPrint,
STAMTYPE enmType, STAMVISIBILITY enmVisibility, const char *pszName, STAMUNIT enmUnit, const char *pszDesc)
{
+ STAM_LAZY_INIT(pUVM);
STAM_LOCK_WR(pUVM);
/*
diff --git a/src/VBox/VMM/VMMR3/VM.cpp b/src/VBox/VMM/VMMR3/VM.cpp
index ad3f1a30ca8..7c16d3caf12 100644
--- a/src/VBox/VMM/VMMR3/VM.cpp
+++ b/src/VBox/VMM/VMMR3/VM.cpp
@@ -513,13 +513,13 @@ static int vmR3CreateUVM(uint32_t cCpus, PCVMM2USERMETHODS pVmm2UserMethods, PUV
/*
* Init fundamental (sub-)components - STAM, MMR3Heap and PDMLdr.
*/
- rc = STAMR3InitUVM(pUVM);
+ rc = PDMR3InitUVM(pUVM);
if (RT_SUCCESS(rc))
{
- rc = MMR3InitUVM(pUVM);
+ rc = STAMR3InitUVM(pUVM);
if (RT_SUCCESS(rc))
{
- rc = PDMR3InitUVM(pUVM);
+ rc = MMR3InitUVM(pUVM);
if (RT_SUCCESS(rc))
{
/*
@@ -527,8 +527,8 @@ static int vmR3CreateUVM(uint32_t cCpus, PCVMM2USERMETHODS pVmm2UserMethods, PUV
*/
for (i = 0; i < cCpus; i++)
{
- rc = RTThreadCreateF(&pUVM->aCpus[i].vm.s.ThreadEMT, vmR3EmulationThread, &pUVM->aCpus[i], _1M,
- RTTHREADTYPE_EMULATION, RTTHREADFLAGS_WAITABLE,
+ rc = RTThreadCreateF(&pUVM->aCpus[i].vm.s.ThreadEMT, vmR3EmulationThread, &pUVM->aCpus[i],
+ _1M, RTTHREADTYPE_EMULATION, RTTHREADFLAGS_WAITABLE,
cCpus > 1 ? "EMT-%u" : "EMT", i);
if (RT_FAILURE(rc))
break;
@@ -547,11 +547,11 @@ static int vmR3CreateUVM(uint32_t cCpus, PCVMM2USERMETHODS pVmm2UserMethods, PUV
{
/** @todo rainy day: terminate the EMTs. */
}
- PDMR3TermUVM(pUVM);
+ MMR3TermUVM(pUVM);
}
- MMR3TermUVM(pUVM);
+ STAMR3TermUVM(pUVM);
}
- STAMR3TermUVM(pUVM);
+ PDMR3TermUVM(pUVM);
}
RTCritSectDelete(&pUVM->vm.s.AtErrorCritSect);
}
diff --git a/src/VBox/VMM/include/STAMInternal.h b/src/VBox/VMM/include/STAMInternal.h
index ca1c4e1e9bc..fb35e7b1e1e 100644
--- a/src/VBox/VMM/include/STAMInternal.h
+++ b/src/VBox/VMM/include/STAMInternal.h
@@ -23,7 +23,11 @@
#include <VBox/vmm/stam.h>
#include <VBox/vmm/gvmm.h>
#include <VBox/vmm/gmm.h>
-#include <iprt/semaphore.h>
+#ifndef USE_PDMCRITSECTRW
+# include <iprt/semaphore.h>
+#else
+# include <VBox/vmm/pdmcritsectrw.h>
+#endif
@@ -101,7 +105,11 @@ typedef struct STAMUSERPERVM
/** Pointer to the first sample. */
R3PTRTYPE(PSTAMDESC) pHead;
/** RW Lock for the list. */
+#ifndef USE_PDMCRITSECTRW
RTSEMRW RWSem;
+#else
+ PDMCRITSECTRW CritSectRw;
+#endif
/** The copy of the GVMM statistics. */
GVMMSTATS GVMMStats;
@@ -119,14 +127,47 @@ AssertCompileMemberAlignment(STAMUSERPERVM, GMMStats, 8);
typedef STAMUSERPERVM *PSTAMUSERPERVM;
+#ifndef USE_PDMCRITSECTRW
/** Locks the sample descriptors for reading. */
-#define STAM_LOCK_RD(pUVM) do { int rcSem = RTSemRWRequestRead(pUVM->stam.s.RWSem, RT_INDEFINITE_WAIT); AssertRC(rcSem); } while (0)
+# define STAM_LOCK_RD(pUVM) do { int rcSem = RTSemRWRequestRead(pUVM->stam.s.RWSem, RT_INDEFINITE_WAIT); AssertRC(rcSem); } while (0)
/** Locks the sample descriptors for writing. */
-#define STAM_LOCK_WR(pUVM) do { int rcSem = RTSemRWRequestWrite(pUVM->stam.s.RWSem, RT_INDEFINITE_WAIT); AssertRC(rcSem); } while (0)
+# define STAM_LOCK_WR(pUVM) do { int rcSem = RTSemRWRequestWrite(pUVM->stam.s.RWSem, RT_INDEFINITE_WAIT); AssertRC(rcSem); } while (0)
/** UnLocks the sample descriptors after reading. */
-#define STAM_UNLOCK_RD(pUVM) do { int rcSem = RTSemRWReleaseRead(pUVM->stam.s.RWSem); AssertRC(rcSem); } while (0)
+# define STAM_UNLOCK_RD(pUVM) do { int rcSem = RTSemRWReleaseRead(pUVM->stam.s.RWSem); AssertRC(rcSem); } while (0)
/** UnLocks the sample descriptors after writing. */
-#define STAM_UNLOCK_WR(pUVM) do { int rcSem = RTSemRWReleaseWrite(pUVM->stam.s.RWSem); AssertRC(rcSem); } while (0)
+# define STAM_UNLOCK_WR(pUVM) do { int rcSem = RTSemRWReleaseWrite(pUVM->stam.s.RWSem); AssertRC(rcSem); } while (0)
+/** Lazy initialization */
+# define STAM_LAZY_INIT(pUVM) do { } while (0)
+#else
+/** Locks the sample descriptors for reading. */
+# define STAM_LOCK_RD(pUVM) \
+ if (PDMCritSectRwIsInitialized(&pUVM->stam.s.CritSectRw)) \
+ { int rcSem = PDMCritSectRwEnterShared(&pUVM->stam.s.CritSectRw, VINF_SUCCESS); AssertRC(rcSem); } else do { } while (0)
+/** Locks the sample descriptors for writing. */
+# define STAM_LOCK_WR(pUVM) \
+ if (PDMCritSectRwIsInitialized(&pUVM->stam.s.CritSectRw)) \
+ { int rcSem = PDMCritSectRwEnterExcl(&pUVM->stam.s.CritSectRw, VINF_SUCCESS); AssertRC(rcSem); } else do { } while (0)
+/** UnLocks the sample descriptors after reading. */
+# define STAM_UNLOCK_RD(pUVM) \
+ if (PDMCritSectRwIsInitialized(&pUVM->stam.s.CritSectRw)) \
+ { int rcSem = PDMCritSectRwLeaveShared(&pUVM->stam.s.CritSectRw); AssertRC(rcSem); } else do { } while (0)
+/** UnLocks the sample descriptors after writing. */
+# define STAM_UNLOCK_WR(pUVM) \
+ if (PDMCritSectRwIsInitialized(&pUVM->stam.s.CritSectRw)) \
+ { int rcSem = PDMCritSectRwLeaveExcl(&pUVM->stam.s.CritSectRw); AssertRC(rcSem); } else do { } while (0)
+/** Lazy initialization. */
+# define STAM_LAZY_INIT(pUVM) \
+ if (!PDMCritSectRwIsInitialized(&pUVM->stam.s.CritSectRw) && (pUVM)->pVM) \
+ { \
+ static bool volatile s_fInProgress = false; \
+ if (!s_fInProgress) \
+ { \
+ s_fInProgress = true; \
+ int rcSem = PDMR3CritSectRwInit(pUVM->pVM, &pUVM->stam.s.CritSectRw, RT_SRC_POS, "stam-rw"); \
+ AssertRC(rcSem); Assert(PDMCritSectRwIsInitialized(&pUVM->stam.s.CritSectRw) || RT_FAILURE(rcSem)); \
+ } \
+ } else do { } while (0)
+#endif
/** @} */