mirror of
https://github.com/RGBCube/serenity
synced 2025-07-25 20:27:45 +00:00
Kernel: Reduce LOCK_DEBUG ifdefs by utilizing Kernel::LockLocation
The LOCK_DEBUG conditional code is pretty ugly for a feature that we only use rarely. We can remove a significant amount of this code by utilizing a zero sized fake type when not building in LOCK_DEBUG mode. This lets us keep the same API, but just let the compiler optimize it away when don't actually care about the location the caller came from.
This commit is contained in:
parent
6c18b4e558
commit
bea74f4b77
4 changed files with 13 additions and 53 deletions
|
@ -12,9 +12,6 @@
|
|||
#include <AK/IntrusiveList.h>
|
||||
#include <AK/Optional.h>
|
||||
#include <AK/OwnPtr.h>
|
||||
#ifdef LOCK_DEBUG
|
||||
# include <AK/SourceLocation.h>
|
||||
#endif
|
||||
#include <AK/String.h>
|
||||
#include <AK/Time.h>
|
||||
#include <AK/Vector.h>
|
||||
|
@ -27,6 +24,7 @@
|
|||
#include <Kernel/Forward.h>
|
||||
#include <Kernel/KResult.h>
|
||||
#include <Kernel/KString.h>
|
||||
#include <Kernel/Locking/LockLocation.h>
|
||||
#include <Kernel/Locking/LockMode.h>
|
||||
#include <Kernel/Memory/VirtualRange.h>
|
||||
#include <Kernel/Scheduler.h>
|
||||
|
@ -1152,7 +1150,7 @@ public:
|
|||
RecursiveSpinLock& get_lock() const { return m_lock; }
|
||||
|
||||
#if LOCK_DEBUG
|
||||
void holding_lock(Mutex& lock, int refs_delta, const SourceLocation& location)
|
||||
void holding_lock(Mutex& lock, int refs_delta, const LockLocation& location)
|
||||
{
|
||||
VERIFY(refs_delta != 0);
|
||||
m_holding_locks.fetch_add(refs_delta, AK::MemoryOrder::memory_order_relaxed);
|
||||
|
@ -1317,7 +1315,7 @@ private:
|
|||
#if LOCK_DEBUG
|
||||
struct HoldingLockInfo {
|
||||
Mutex* lock;
|
||||
SourceLocation source_location;
|
||||
LockLocation lock_location;
|
||||
unsigned count;
|
||||
};
|
||||
Atomic<u32> m_holding_locks { 0 };
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue