aboutsummaryrefslogtreecommitdiff
path: root/src/sync.h
diff options
context:
space:
mode:
authorPieter Wuille <pieter.wuille@gmail.com>2016-04-08 22:14:19 +0200
committerPieter Wuille <pieter.wuille@gmail.com>2016-04-10 14:27:10 +0200
commit5eeb913d6cff9cfe9a6769d7efe4a7b9f23de0f4 (patch)
tree7acd0460c216efe835e8454f0665286c5442c05a /src/sync.h
parent0afac87e8173dd71616e211aa08dcd59cb5cf90e (diff)
Clean up lockorder data of destroyed mutexes
The lockorder potential deadlock detection works by remembering for each lock A that is acquired while holding another B the pair (A,B), and triggering a warning when (B,A) already exists in the table. A and B in the above text are represented by pointers to the CCriticalSection object that is acquired. This does mean however that we need to clean up the table entries that refer to any critical section which is destroyed, as it memory address can potentially be used for another unrelated lock in the future. Implement this clean up by remembering not only the pairs in forward direction, but also backward direction. This allows for fast iteration over all pairs that use a deleted CCriticalSection in either the first or the second position.
Diffstat (limited to 'src/sync.h')
-rw-r--r--src/sync.h33
1 files changed, 21 insertions, 12 deletions
diff --git a/src/sync.h b/src/sync.h
index 34dd8c228e..0c58fb6b4e 100644
--- a/src/sync.h
+++ b/src/sync.h
@@ -71,30 +71,39 @@ public:
}
};
-/**
- * Wrapped boost mutex: supports recursive locking, but no waiting
- * TODO: We should move away from using the recursive lock by default.
- */
-typedef AnnotatedMixin<boost::recursive_mutex> CCriticalSection;
-
-/** Wrapped boost mutex: supports waiting but not recursive locking */
-typedef AnnotatedMixin<boost::mutex> CWaitableCriticalSection;
-
-/** Just a typedef for boost::condition_variable, can be wrapped later if desired */
-typedef boost::condition_variable CConditionVariable;
-
#ifdef DEBUG_LOCKORDER
void EnterCritical(const char* pszName, const char* pszFile, int nLine, void* cs, bool fTry = false);
void LeaveCritical();
std::string LocksHeld();
void AssertLockHeldInternal(const char* pszName, const char* pszFile, int nLine, void* cs);
+void DeleteLock(void* cs);
#else
void static inline EnterCritical(const char* pszName, const char* pszFile, int nLine, void* cs, bool fTry = false) {}
void static inline LeaveCritical() {}
void static inline AssertLockHeldInternal(const char* pszName, const char* pszFile, int nLine, void* cs) {}
+void static inline DeleteLock(void* cs) {}
#endif
#define AssertLockHeld(cs) AssertLockHeldInternal(#cs, __FILE__, __LINE__, &cs)
+/**
+ * Wrapped boost mutex: supports recursive locking, but no waiting
+ * TODO: We should move away from using the recursive lock by default.
+ */
+class CCriticalSection : public AnnotatedMixin<boost::recursive_mutex>
+{
+public:
+ ~CCriticalSection() {
+ DeleteLock((void*)this);
+ }
+};
+
+typedef CCriticalSection CDynamicCriticalSection;
+/** Wrapped boost mutex: supports waiting but not recursive locking */
+typedef AnnotatedMixin<boost::mutex> CWaitableCriticalSection;
+
+/** Just a typedef for boost::condition_variable, can be wrapped later if desired */
+typedef boost::condition_variable CConditionVariable;
+
#ifdef DEBUG_LOCKCONTENTION
void PrintLockContention(const char* pszName, const char* pszFile, int nLine);
#endif