1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-07-27 12:47:35 +00:00

Kernel: Make self-contained locking smart pointers their own classes

Until now, our kernel has reimplemented a number of AK classes to
provide automatic internal locking:

- RefPtr
- NonnullRefPtr
- WeakPtr
- Weakable

This patch renames the Kernel classes so that they can coexist with
the original AK classes:

- RefPtr => LockRefPtr
- NonnullRefPtr => NonnullLockRefPtr
- WeakPtr => LockWeakPtr
- Weakable => LockWeakable

The goal here is to eventually get rid of the Lock* classes in favor of
using external locking.
This commit is contained in:
Andreas Kling 2022-08-19 20:53:40 +02:00
parent e475263113
commit 11eee67b85
360 changed files with 1703 additions and 1672 deletions

View file

@ -10,16 +10,16 @@
#include <AK/Atomic.h>
#include <AK/Error.h>
#include <AK/Format.h>
#include <AK/NonnullRefPtr.h>
#include <AK/StdLibExtras.h>
#include <AK/Traits.h>
#include <AK/Types.h>
#include <Kernel/Library/NonnullLockRefPtr.h>
#ifdef KERNEL
# include <Kernel/Arch/Processor.h>
# include <Kernel/Arch/ScopedCritical.h>
#endif
#define THREADSAFEREFPTR_SCRUB_BYTE 0xa0
#define LOCKREFPTR_SCRUB_BYTE 0xa0
namespace AK {
@ -27,7 +27,7 @@ template<typename T>
class OwnPtr;
template<typename T>
struct RefPtrTraits {
struct LockRefPtrTraits {
ALWAYS_INLINE static T* as_ptr(FlatPtr bits)
{
return (T*)(bits & ~(FlatPtr)1);
@ -88,7 +88,7 @@ struct RefPtrTraits {
ALWAYS_INLINE static FlatPtr lock(Atomic<FlatPtr>& atomic_var)
{
// This sets the lock bit atomically, preventing further modifications.
// This is important when e.g. copying a RefPtr where the source
// This is important when e.g. copying a LockRefPtr where the source
// might be released and freed too quickly. This allows us
// to temporarily lock the pointer so we can add a reference, then
// unlock it
@ -117,24 +117,24 @@ struct RefPtrTraits {
};
template<typename T, typename PtrTraits>
class [[nodiscard]] RefPtr {
class [[nodiscard]] LockRefPtr {
template<typename U, typename P>
friend class RefPtr;
friend class LockRefPtr;
template<typename U>
friend class WeakPtr;
friend class LockWeakPtr;
public:
enum AdoptTag {
Adopt
};
RefPtr() = default;
RefPtr(const T* ptr)
LockRefPtr() = default;
LockRefPtr(const T* ptr)
: m_bits(PtrTraits::as_bits(const_cast<T*>(ptr)))
{
ref_if_not_null(const_cast<T*>(ptr));
}
RefPtr(const T& object)
LockRefPtr(const T& object)
: m_bits(PtrTraits::as_bits(const_cast<T*>(&object)))
{
T* ptr = const_cast<T*>(&object);
@ -142,58 +142,58 @@ public:
VERIFY(!is_null());
ptr->ref();
}
RefPtr(AdoptTag, T& object)
LockRefPtr(AdoptTag, T& object)
: m_bits(PtrTraits::as_bits(&object))
{
VERIFY(!is_null());
}
RefPtr(RefPtr&& other)
LockRefPtr(LockRefPtr&& other)
: m_bits(other.leak_ref_raw())
{
}
ALWAYS_INLINE RefPtr(NonnullRefPtr<T> const& other)
ALWAYS_INLINE LockRefPtr(NonnullLockRefPtr<T> const& other)
: m_bits(PtrTraits::as_bits(const_cast<T*>(other.add_ref())))
{
}
template<typename U>
ALWAYS_INLINE RefPtr(NonnullRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
ALWAYS_INLINE LockRefPtr(NonnullLockRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
: m_bits(PtrTraits::as_bits(const_cast<U*>(other.add_ref())))
{
}
template<typename U>
ALWAYS_INLINE RefPtr(NonnullRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
ALWAYS_INLINE LockRefPtr(NonnullLockRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
: m_bits(PtrTraits::as_bits(&other.leak_ref()))
{
VERIFY(!is_null());
}
template<typename U, typename P = RefPtrTraits<U>>
RefPtr(RefPtr<U, P>&& other) requires(IsConvertible<U*, T*>)
template<typename U, typename P = LockRefPtrTraits<U>>
LockRefPtr(LockRefPtr<U, P>&& other) requires(IsConvertible<U*, T*>)
: m_bits(PtrTraits::template convert_from<U, P>(other.leak_ref_raw()))
{
}
RefPtr(RefPtr const& other)
LockRefPtr(LockRefPtr const& other)
: m_bits(other.add_ref_raw())
{
}
template<typename U, typename P = RefPtrTraits<U>>
RefPtr(RefPtr<U, P> const& other) requires(IsConvertible<U*, T*>)
template<typename U, typename P = LockRefPtrTraits<U>>
LockRefPtr(LockRefPtr<U, P> const& other) requires(IsConvertible<U*, T*>)
: m_bits(other.add_ref_raw())
{
}
ALWAYS_INLINE ~RefPtr()
ALWAYS_INLINE ~LockRefPtr()
{
clear();
#ifdef SANITIZE_PTRS
m_bits.store(explode_byte(THREADSAFEREFPTR_SCRUB_BYTE), AK::MemoryOrder::memory_order_relaxed);
m_bits.store(explode_byte(LOCKREFPTR_SCRUB_BYTE), AK::MemoryOrder::memory_order_relaxed);
#endif
}
template<typename U>
RefPtr(OwnPtr<U> const&) = delete;
LockRefPtr(OwnPtr<U> const&) = delete;
template<typename U>
RefPtr& operator=(OwnPtr<U> const&) = delete;
LockRefPtr& operator=(OwnPtr<U> const&) = delete;
void swap(RefPtr& other)
void swap(LockRefPtr& other)
{
if (this == &other)
return;
@ -204,8 +204,8 @@ public:
PtrTraits::exchange(other.m_bits, bits);
}
template<typename U, typename P = RefPtrTraits<U>>
void swap(RefPtr<U, P>& other) requires(IsConvertible<U*, T*>)
template<typename U, typename P = LockRefPtrTraits<U>>
void swap(LockRefPtr<U, P>& other) requires(IsConvertible<U*, T*>)
{
// NOTE: swap is not atomic!
FlatPtr other_bits = P::exchange(other.m_bits, P::default_null_value);
@ -213,41 +213,41 @@ public:
P::exchange(other.m_bits, P::template convert_from<U, P>(bits));
}
ALWAYS_INLINE RefPtr& operator=(RefPtr&& other)
ALWAYS_INLINE LockRefPtr& operator=(LockRefPtr&& other)
{
if (this != &other)
assign_raw(other.leak_ref_raw());
return *this;
}
template<typename U, typename P = RefPtrTraits<U>>
ALWAYS_INLINE RefPtr& operator=(RefPtr<U, P>&& other) requires(IsConvertible<U*, T*>)
template<typename U, typename P = LockRefPtrTraits<U>>
ALWAYS_INLINE LockRefPtr& operator=(LockRefPtr<U, P>&& other) requires(IsConvertible<U*, T*>)
{
assign_raw(PtrTraits::template convert_from<U, P>(other.leak_ref_raw()));
return *this;
}
template<typename U>
ALWAYS_INLINE RefPtr& operator=(NonnullRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
ALWAYS_INLINE LockRefPtr& operator=(NonnullLockRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
{
assign_raw(PtrTraits::as_bits(&other.leak_ref()));
return *this;
}
ALWAYS_INLINE RefPtr& operator=(NonnullRefPtr<T> const& other)
ALWAYS_INLINE LockRefPtr& operator=(NonnullLockRefPtr<T> const& other)
{
assign_raw(PtrTraits::as_bits(other.add_ref()));
return *this;
}
template<typename U>
ALWAYS_INLINE RefPtr& operator=(NonnullRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
ALWAYS_INLINE LockRefPtr& operator=(NonnullLockRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
{
assign_raw(PtrTraits::as_bits(other.add_ref()));
return *this;
}
ALWAYS_INLINE RefPtr& operator=(RefPtr const& other)
ALWAYS_INLINE LockRefPtr& operator=(LockRefPtr const& other)
{
if (this != &other)
assign_raw(other.add_ref_raw());
@ -255,41 +255,41 @@ public:
}
template<typename U>
ALWAYS_INLINE RefPtr& operator=(RefPtr<U> const& other) requires(IsConvertible<U*, T*>)
ALWAYS_INLINE LockRefPtr& operator=(LockRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
{
assign_raw(other.add_ref_raw());
return *this;
}
ALWAYS_INLINE RefPtr& operator=(const T* ptr)
ALWAYS_INLINE LockRefPtr& operator=(const T* ptr)
{
ref_if_not_null(const_cast<T*>(ptr));
assign_raw(PtrTraits::as_bits(const_cast<T*>(ptr)));
return *this;
}
ALWAYS_INLINE RefPtr& operator=(const T& object)
ALWAYS_INLINE LockRefPtr& operator=(const T& object)
{
const_cast<T&>(object).ref();
assign_raw(PtrTraits::as_bits(const_cast<T*>(&object)));
return *this;
}
RefPtr& operator=(std::nullptr_t)
LockRefPtr& operator=(std::nullptr_t)
{
clear();
return *this;
}
ALWAYS_INLINE bool assign_if_null(RefPtr&& other)
ALWAYS_INLINE bool assign_if_null(LockRefPtr&& other)
{
if (this == &other)
return is_null();
return PtrTraits::exchange_if_null(m_bits, other.leak_ref_raw());
}
template<typename U, typename P = RefPtrTraits<U>>
ALWAYS_INLINE bool assign_if_null(RefPtr<U, P>&& other)
template<typename U, typename P = LockRefPtrTraits<U>>
ALWAYS_INLINE bool assign_if_null(LockRefPtr<U, P>&& other)
{
if (this == &other)
return is_null();
@ -309,11 +309,11 @@ public:
return PtrTraits::as_ptr(bits);
}
NonnullRefPtr<T> release_nonnull()
NonnullLockRefPtr<T> release_nonnull()
{
FlatPtr bits = PtrTraits::exchange(m_bits, PtrTraits::default_null_value);
VERIFY(!PtrTraits::is_null(bits));
return NonnullRefPtr<T>(NonnullRefPtr<T>::Adopt, *PtrTraits::as_ptr(bits));
return NonnullLockRefPtr<T>(NonnullLockRefPtr<T>::Adopt, *PtrTraits::as_ptr(bits));
}
ALWAYS_INLINE T* ptr() { return as_ptr(); }
@ -347,11 +347,11 @@ public:
bool operator==(std::nullptr_t) const { return is_null(); }
bool operator!=(std::nullptr_t) const { return !is_null(); }
bool operator==(RefPtr const& other) const { return as_ptr() == other.as_ptr(); }
bool operator!=(RefPtr const& other) const { return as_ptr() != other.as_ptr(); }
bool operator==(LockRefPtr const& other) const { return as_ptr() == other.as_ptr(); }
bool operator!=(LockRefPtr const& other) const { return as_ptr() != other.as_ptr(); }
bool operator==(RefPtr& other) { return as_ptr() == other.as_ptr(); }
bool operator!=(RefPtr& other) { return as_ptr() != other.as_ptr(); }
bool operator==(LockRefPtr& other) { return as_ptr() == other.as_ptr(); }
bool operator!=(LockRefPtr& other) { return as_ptr() != other.as_ptr(); }
bool operator==(const T* other) const { return as_ptr() == other; }
bool operator!=(const T* other) const { return as_ptr() != other; }
@ -405,7 +405,7 @@ private:
Kernel::ScopedCritical critical;
#endif
// This prevents a race condition between thread A and B:
// 1. Thread A copies RefPtr, e.g. through assignment or copy constructor,
// 1. Thread A copies LockRefPtr, e.g. through assignment or copy constructor,
// gets the pointer from source, but is pre-empted before adding
// another reference
// 2. Thread B calls clear, leak_ref, or release_nonnull on source, and
@ -445,64 +445,64 @@ private:
};
template<typename T>
struct Formatter<RefPtr<T>> : Formatter<const T*> {
ErrorOr<void> format(FormatBuilder& builder, RefPtr<T> const& value)
struct Formatter<LockRefPtr<T>> : Formatter<const T*> {
ErrorOr<void> format(FormatBuilder& builder, LockRefPtr<T> const& value)
{
return Formatter<const T*>::format(builder, value.ptr());
}
};
template<typename T>
struct Traits<RefPtr<T>> : public GenericTraits<RefPtr<T>> {
struct Traits<LockRefPtr<T>> : public GenericTraits<LockRefPtr<T>> {
using PeekType = T*;
using ConstPeekType = const T*;
static unsigned hash(RefPtr<T> const& p) { return ptr_hash(p.ptr()); }
static bool equals(RefPtr<T> const& a, RefPtr<T> const& b) { return a.ptr() == b.ptr(); }
static unsigned hash(LockRefPtr<T> const& p) { return ptr_hash(p.ptr()); }
static bool equals(LockRefPtr<T> const& a, LockRefPtr<T> const& b) { return a.ptr() == b.ptr(); }
};
template<typename T, typename U>
inline NonnullRefPtr<T> static_ptr_cast(NonnullRefPtr<U> const& ptr)
inline NonnullLockRefPtr<T> static_ptr_cast(NonnullLockRefPtr<U> const& ptr)
{
return NonnullRefPtr<T>(static_cast<const T&>(*ptr));
return NonnullLockRefPtr<T>(static_cast<const T&>(*ptr));
}
template<typename T, typename U, typename PtrTraits = RefPtrTraits<T>>
inline RefPtr<T> static_ptr_cast(RefPtr<U> const& ptr)
template<typename T, typename U, typename PtrTraits = LockRefPtrTraits<T>>
inline LockRefPtr<T> static_ptr_cast(LockRefPtr<U> const& ptr)
{
return RefPtr<T, PtrTraits>(static_cast<const T*>(ptr.ptr()));
return LockRefPtr<T, PtrTraits>(static_cast<const T*>(ptr.ptr()));
}
template<typename T, typename PtrTraitsT, typename U, typename PtrTraitsU>
inline void swap(RefPtr<T, PtrTraitsT>& a, RefPtr<U, PtrTraitsU>& b) requires(IsConvertible<U*, T*>)
inline void swap(LockRefPtr<T, PtrTraitsT>& a, LockRefPtr<U, PtrTraitsU>& b) requires(IsConvertible<U*, T*>)
{
a.swap(b);
}
template<typename T>
inline RefPtr<T> adopt_ref_if_nonnull(T* object)
inline LockRefPtr<T> adopt_lock_ref_if_nonnull(T* object)
{
if (object)
return RefPtr<T>(RefPtr<T>::Adopt, *object);
return LockRefPtr<T>(LockRefPtr<T>::Adopt, *object);
return {};
}
template<typename T, class... Args>
requires(IsConstructible<T, Args...>) inline ErrorOr<NonnullRefPtr<T>> try_make_ref_counted(Args&&... args)
requires(IsConstructible<T, Args...>) inline ErrorOr<NonnullLockRefPtr<T>> try_make_lock_ref_counted(Args&&... args)
{
return adopt_nonnull_ref_or_enomem(new (nothrow) T(forward<Args>(args)...));
return adopt_nonnull_lock_ref_or_enomem(new (nothrow) T(forward<Args>(args)...));
}
// FIXME: Remove once P0960R3 is available in Clang.
template<typename T, class... Args>
inline ErrorOr<NonnullRefPtr<T>> try_make_ref_counted(Args&&... args)
inline ErrorOr<NonnullLockRefPtr<T>> try_make_lock_ref_counted(Args&&... args)
{
return adopt_nonnull_ref_or_enomem(new (nothrow) T { forward<Args>(args)... });
return adopt_nonnull_lock_ref_or_enomem(new (nothrow) T { forward<Args>(args)... });
}
template<typename T>
inline ErrorOr<NonnullRefPtr<T>> adopt_nonnull_ref_or_enomem(T* object)
inline ErrorOr<NonnullLockRefPtr<T>> adopt_nonnull_lock_ref_or_enomem(T* object)
{
auto result = adopt_ref_if_nonnull(object);
auto result = adopt_lock_ref_if_nonnull(object);
if (!result)
return Error::from_errno(ENOMEM);
return result.release_nonnull();
@ -510,11 +510,11 @@ inline ErrorOr<NonnullRefPtr<T>> adopt_nonnull_ref_or_enomem(T* object)
}
using AK::adopt_ref_if_nonnull;
using AK::RefPtr;
using AK::adopt_lock_ref_if_nonnull;
using AK::LockRefPtr;
using AK::static_ptr_cast;
using AK::try_make_ref_counted;
using AK::try_make_lock_ref_counted;
#ifdef KERNEL
using AK::adopt_nonnull_ref_or_enomem;
using AK::adopt_nonnull_lock_ref_or_enomem;
#endif

View file

@ -6,66 +6,66 @@
#pragma once
#include <AK/Weakable.h>
#include <Kernel/Library/LockWeakable.h>
namespace AK {
template<typename T>
class [[nodiscard]] WeakPtr {
class [[nodiscard]] LockWeakPtr {
template<typename U>
friend class Weakable;
friend class LockWeakable;
public:
WeakPtr() = default;
LockWeakPtr() = default;
template<typename U>
WeakPtr(WeakPtr<U> const& other) requires(IsBaseOf<T, U>)
LockWeakPtr(WeakPtr<U> const& other) requires(IsBaseOf<T, U>)
: m_link(other.m_link)
{
}
template<typename U>
WeakPtr(WeakPtr<U>&& other) requires(IsBaseOf<T, U>)
LockWeakPtr(WeakPtr<U>&& other) requires(IsBaseOf<T, U>)
: m_link(other.take_link())
{
}
template<typename U>
WeakPtr& operator=(WeakPtr<U>&& other) requires(IsBaseOf<T, U>)
LockWeakPtr& operator=(WeakPtr<U>&& other) requires(IsBaseOf<T, U>)
{
m_link = other.take_link();
return *this;
}
template<typename U>
WeakPtr& operator=(WeakPtr<U> const& other) requires(IsBaseOf<T, U>)
LockWeakPtr& operator=(WeakPtr<U> const& other) requires(IsBaseOf<T, U>)
{
if ((void const*)this != (void const*)&other)
m_link = other.m_link;
return *this;
}
WeakPtr& operator=(std::nullptr_t)
LockWeakPtr& operator=(std::nullptr_t)
{
clear();
return *this;
}
template<typename U>
WeakPtr(const U& object) requires(IsBaseOf<T, U>)
LockWeakPtr(const U& object) requires(IsBaseOf<T, U>)
: m_link(object.template try_make_weak_ptr<U>().release_value_but_fixme_should_propagate_errors().take_link())
{
}
template<typename U>
WeakPtr(const U* object) requires(IsBaseOf<T, U>)
LockWeakPtr(const U* object) requires(IsBaseOf<T, U>)
{
if (object)
m_link = object->template try_make_weak_ptr<U>().release_value_but_fixme_should_propagate_errors().take_link();
}
template<typename U>
WeakPtr(RefPtr<U> const& object) requires(IsBaseOf<T, U>)
LockWeakPtr(LockRefPtr<U> const& object) requires(IsBaseOf<T, U>)
{
object.do_while_locked([&](U* obj) {
if (obj)
@ -74,7 +74,7 @@ public:
}
template<typename U>
WeakPtr(NonnullRefPtr<U> const& object) requires(IsBaseOf<T, U>)
LockWeakPtr(NonnullLockRefPtr<U> const& object) requires(IsBaseOf<T, U>)
{
object.do_while_locked([&](U* obj) {
if (obj)
@ -83,14 +83,14 @@ public:
}
template<typename U>
WeakPtr& operator=(const U& object) requires(IsBaseOf<T, U>)
LockWeakPtr& operator=(const U& object) requires(IsBaseOf<T, U>)
{
m_link = object.template try_make_weak_ptr<U>().release_value_but_fixme_should_propagate_errors().take_link();
return *this;
}
template<typename U>
WeakPtr& operator=(const U* object) requires(IsBaseOf<T, U>)
LockWeakPtr& operator=(const U* object) requires(IsBaseOf<T, U>)
{
if (object)
m_link = object->template try_make_weak_ptr<U>().release_value_but_fixme_should_propagate_errors().take_link();
@ -100,7 +100,7 @@ public:
}
template<typename U>
WeakPtr& operator=(RefPtr<U> const& object) requires(IsBaseOf<T, U>)
LockWeakPtr& operator=(LockRefPtr<U> const& object) requires(IsBaseOf<T, U>)
{
object.do_while_locked([&](U* obj) {
if (obj)
@ -112,7 +112,7 @@ public:
}
template<typename U>
WeakPtr& operator=(NonnullRefPtr<U> const& object) requires(IsBaseOf<T, U>)
LockWeakPtr& operator=(NonnullLockRefPtr<U> const& object) requires(IsBaseOf<T, U>)
{
object.do_while_locked([&](U* obj) {
if (obj)
@ -123,13 +123,13 @@ public:
return *this;
}
[[nodiscard]] RefPtr<T> strong_ref() const
[[nodiscard]] LockRefPtr<T> strong_ref() const
{
// This only works with RefCounted objects, but it is the only
// safe way to get a strong reference from a WeakPtr. Any code
// safe way to get a strong reference from a LockWeakPtr. Any code
// that uses objects not derived from RefCounted will have to
// use unsafe_ptr(), but as the name suggests, it is not safe...
RefPtr<T> ref;
LockRefPtr<T> ref;
// Using do_while_locked protects against a race with clear()!
m_link.do_while_locked([&](WeakLink* link) {
if (link)
@ -153,20 +153,20 @@ public:
[[nodiscard]] bool is_null() const { return !m_link || m_link->is_null(); }
void clear() { m_link = nullptr; }
[[nodiscard]] RefPtr<WeakLink> take_link() { return move(m_link); }
[[nodiscard]] LockRefPtr<WeakLink> take_link() { return move(m_link); }
private:
WeakPtr(RefPtr<WeakLink> const& link)
LockWeakPtr(LockRefPtr<WeakLink> const& link)
: m_link(link)
{
}
RefPtr<WeakLink> m_link;
LockRefPtr<WeakLink> m_link;
};
template<typename T>
template<typename U>
inline ErrorOr<WeakPtr<U>> Weakable<T>::try_make_weak_ptr() const
inline ErrorOr<LockWeakPtr<U>> LockWeakable<T>::try_make_weak_ptr() const
{
if constexpr (IsBaseOf<AtomicRefCountedBase, T>) {
// Checking m_being_destroyed isn't sufficient when dealing with
@ -176,21 +176,21 @@ inline ErrorOr<WeakPtr<U>> Weakable<T>::try_make_weak_ptr() const
// that we prevent the destructor and revoke_weak_ptrs from being
// triggered until we're done.
if (!static_cast<const T*>(this)->try_ref())
return WeakPtr<U> {};
return LockWeakPtr<U> {};
} else {
// For non-RefCounted types this means a weak reference can be
// obtained until the ~Weakable destructor is invoked!
// obtained until the ~LockWeakable destructor is invoked!
if (m_being_destroyed.load(AK::MemoryOrder::memory_order_acquire))
return WeakPtr<U> {};
return LockWeakPtr<U> {};
}
if (!m_link) {
// There is a small chance that we create a new WeakLink and throw
// it away because another thread beat us to it. But the window is
// pretty small and the overhead isn't terrible.
m_link.assign_if_null(TRY(adopt_nonnull_ref_or_enomem(new (nothrow) WeakLink(const_cast<T&>(static_cast<const T&>(*this))))));
m_link.assign_if_null(TRY(adopt_nonnull_lock_ref_or_enomem(new (nothrow) WeakLink(const_cast<T&>(static_cast<const T&>(*this))))));
}
WeakPtr<U> weak_ptr(m_link);
LockWeakPtr<U> weak_ptr(m_link);
if constexpr (IsBaseOf<AtomicRefCountedBase, T>) {
// Now drop the reference we temporarily added
@ -198,15 +198,15 @@ inline ErrorOr<WeakPtr<U>> Weakable<T>::try_make_weak_ptr() const
// We just dropped the last reference, which should have called
// revoke_weak_ptrs, which should have invalidated our weak_ptr
VERIFY(!weak_ptr.strong_ref());
return WeakPtr<U> {};
return LockWeakPtr<U> {};
}
}
return weak_ptr;
}
template<typename T>
struct Formatter<WeakPtr<T>> : Formatter<const T*> {
ErrorOr<void> format(FormatBuilder& builder, WeakPtr<T> const& value)
struct Formatter<LockWeakPtr<T>> : Formatter<const T*> {
ErrorOr<void> format(FormatBuilder& builder, LockWeakPtr<T> const& value)
{
auto ref = value.strong_ref();
return Formatter<const T*>::format(builder, ref.ptr());
@ -214,14 +214,14 @@ struct Formatter<WeakPtr<T>> : Formatter<const T*> {
};
template<typename T>
ErrorOr<WeakPtr<T>> try_make_weak_ptr_if_nonnull(T const* ptr)
ErrorOr<LockWeakPtr<T>> try_make_weak_ptr_if_nonnull(T const* ptr)
{
if (ptr) {
return ptr->template try_make_weak_ptr<T>();
}
return WeakPtr<T> {};
return LockWeakPtr<T> {};
}
}
using AK::WeakPtr;
using AK::LockWeakPtr;

View file

@ -9,30 +9,30 @@
#include <AK/Assertions.h>
#include <AK/Atomic.h>
#include <AK/AtomicRefCounted.h>
#include <AK/RefPtr.h>
#include <AK/StdLibExtras.h>
#include <Kernel/Arch/Processor.h>
#include <Kernel/Arch/ScopedCritical.h>
#include <Kernel/Library/LockRefPtr.h>
namespace AK {
template<typename T>
class Weakable;
class LockWeakable;
template<typename T>
class WeakPtr;
class LockWeakPtr;
class WeakLink final : public AtomicRefCounted<WeakLink> {
template<typename T>
friend class Weakable;
friend class LockWeakable;
template<typename T>
friend class WeakPtr;
friend class LockWeakPtr;
public:
template<typename T, typename PtrTraits = RefPtrTraits<T>>
RefPtr<T, PtrTraits> strong_ref() const
template<typename T, typename PtrTraits = LockRefPtrTraits<T>>
LockRefPtr<T, PtrTraits> strong_ref() const
requires(IsBaseOf<AtomicRefCountedBase, T>)
{
RefPtr<T, PtrTraits> ref;
LockRefPtr<T, PtrTraits> ref;
{
// We don't want to be preempted while we are trying to obtain
@ -41,7 +41,7 @@ public:
if (!(m_consumers.fetch_add(1u << 1, AK::MemoryOrder::memory_order_acquire) & 1u)) {
T* ptr = (T*)m_ptr.load(AK::MemoryOrder::memory_order_acquire);
if (ptr && ptr->try_ref())
ref = adopt_ref(*ptr);
ref = adopt_lock_ref(*ptr);
}
m_consumers.fetch_sub(1u << 1, AK::MemoryOrder::memory_order_release);
}
@ -91,18 +91,18 @@ private:
};
template<typename T>
class Weakable {
class LockWeakable {
private:
class Link;
public:
template<typename U = T>
ErrorOr<WeakPtr<U>> try_make_weak_ptr() const;
ErrorOr<LockWeakPtr<U>> try_make_weak_ptr() const;
protected:
Weakable() = default;
LockWeakable() = default;
~Weakable()
~LockWeakable()
{
m_being_destroyed.store(true, AK::MemoryOrder::memory_order_release);
revoke_weak_ptrs();
@ -115,10 +115,10 @@ protected:
}
private:
mutable RefPtr<WeakLink> m_link;
mutable LockRefPtr<WeakLink> m_link;
Atomic<bool> m_being_destroyed { false };
};
}
using AK::Weakable;
using AK::LockWeakable;

View file

@ -9,6 +9,7 @@
#include <AK/Assertions.h>
#include <AK/Atomic.h>
#include <AK/Format.h>
#include <AK/NonnullRefPtr.h>
#include <AK/Traits.h>
#include <AK/Types.h>
#ifdef KERNEL
@ -16,104 +17,90 @@
# include <Kernel/Arch/ScopedCritical.h>
#endif
#define THREADSAFENONNULLREFPTR_SCRUB_BYTE 0xa1
#define NONNULLLOCKREFPTR_SCRUB_BYTE 0xa1
namespace AK {
template<typename T>
class OwnPtr;
template<typename T, typename PtrTraits>
class RefPtr;
class LockRefPtr;
template<typename T>
ALWAYS_INLINE void ref_if_not_null(T* ptr)
{
if (ptr)
ptr->ref();
}
template<typename T>
ALWAYS_INLINE void unref_if_not_null(T* ptr)
{
if (ptr)
ptr->unref();
}
template<typename T>
class [[nodiscard]] NonnullRefPtr {
class [[nodiscard]] NonnullLockRefPtr {
template<typename U, typename P>
friend class RefPtr;
friend class LockRefPtr;
template<typename U>
friend class NonnullRefPtr;
friend class NonnullLockRefPtr;
template<typename U>
friend class WeakPtr;
friend class LockWeakPtr;
public:
using ElementType = T;
enum AdoptTag { Adopt };
ALWAYS_INLINE NonnullRefPtr(const T& object)
ALWAYS_INLINE NonnullLockRefPtr(T const& object)
: m_bits((FlatPtr)&object)
{
VERIFY(!(m_bits & 1));
const_cast<T&>(object).ref();
}
template<typename U>
ALWAYS_INLINE NonnullRefPtr(const U& object) requires(IsConvertible<U*, T*>)
: m_bits((FlatPtr) static_cast<const T*>(&object))
ALWAYS_INLINE NonnullLockRefPtr(U const& object) requires(IsConvertible<U*, T*>)
: m_bits((FlatPtr) static_cast<T const*>(&object))
{
VERIFY(!(m_bits & 1));
const_cast<T&>(static_cast<const T&>(object)).ref();
const_cast<T&>(static_cast<T const&>(object)).ref();
}
ALWAYS_INLINE NonnullRefPtr(AdoptTag, T& object)
ALWAYS_INLINE NonnullLockRefPtr(AdoptTag, T& object)
: m_bits((FlatPtr)&object)
{
VERIFY(!(m_bits & 1));
}
ALWAYS_INLINE NonnullRefPtr(NonnullRefPtr&& other)
ALWAYS_INLINE NonnullLockRefPtr(NonnullLockRefPtr&& other)
: m_bits((FlatPtr)&other.leak_ref())
{
VERIFY(!(m_bits & 1));
}
template<typename U>
ALWAYS_INLINE NonnullRefPtr(NonnullRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
ALWAYS_INLINE NonnullLockRefPtr(NonnullLockRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
: m_bits((FlatPtr)&other.leak_ref())
{
VERIFY(!(m_bits & 1));
}
ALWAYS_INLINE NonnullRefPtr(NonnullRefPtr const& other)
ALWAYS_INLINE NonnullLockRefPtr(NonnullLockRefPtr const& other)
: m_bits((FlatPtr)other.add_ref())
{
VERIFY(!(m_bits & 1));
}
template<typename U>
ALWAYS_INLINE NonnullRefPtr(NonnullRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
ALWAYS_INLINE NonnullLockRefPtr(NonnullLockRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
: m_bits((FlatPtr)other.add_ref())
{
VERIFY(!(m_bits & 1));
}
ALWAYS_INLINE ~NonnullRefPtr()
ALWAYS_INLINE ~NonnullLockRefPtr()
{
assign(nullptr);
#ifdef SANITIZE_PTRS
m_bits.store(explode_byte(THREADSAFENONNULLREFPTR_SCRUB_BYTE), AK::MemoryOrder::memory_order_relaxed);
m_bits.store(explode_byte(NONNULLLOCKREFPTR_SCRUB_BYTE), AK::MemoryOrder::memory_order_relaxed);
#endif
}
template<typename U>
NonnullRefPtr(OwnPtr<U> const&) = delete;
NonnullLockRefPtr(OwnPtr<U> const&) = delete;
template<typename U>
NonnullRefPtr& operator=(OwnPtr<U> const&) = delete;
NonnullLockRefPtr& operator=(OwnPtr<U> const&) = delete;
template<typename U>
NonnullRefPtr(RefPtr<U> const&) = delete;
NonnullLockRefPtr(LockRefPtr<U> const&) = delete;
template<typename U>
NonnullRefPtr& operator=(RefPtr<U> const&) = delete;
NonnullRefPtr(RefPtr<T> const&) = delete;
NonnullRefPtr& operator=(RefPtr<T> const&) = delete;
NonnullLockRefPtr& operator=(LockRefPtr<U> const&) = delete;
NonnullLockRefPtr(LockRefPtr<T> const&) = delete;
NonnullLockRefPtr& operator=(LockRefPtr<T> const&) = delete;
NonnullRefPtr& operator=(NonnullRefPtr const& other)
NonnullLockRefPtr& operator=(NonnullLockRefPtr const& other)
{
if (this != &other)
assign(other.add_ref());
@ -121,13 +108,13 @@ public:
}
template<typename U>
NonnullRefPtr& operator=(NonnullRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
NonnullLockRefPtr& operator=(NonnullLockRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
{
assign(other.add_ref());
return *this;
}
ALWAYS_INLINE NonnullRefPtr& operator=(NonnullRefPtr&& other)
ALWAYS_INLINE NonnullLockRefPtr& operator=(NonnullLockRefPtr&& other)
{
if (this != &other)
assign(&other.leak_ref());
@ -135,13 +122,13 @@ public:
}
template<typename U>
NonnullRefPtr& operator=(NonnullRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
NonnullLockRefPtr& operator=(NonnullLockRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
{
assign(&other.leak_ref());
return *this;
}
NonnullRefPtr& operator=(const T& object)
NonnullLockRefPtr& operator=(T const& object)
{
const_cast<T&>(object).ref();
assign(const_cast<T*>(&object));
@ -159,7 +146,7 @@ public:
{
return as_nonnull_ptr();
}
ALWAYS_INLINE RETURNS_NONNULL const T* ptr() const
ALWAYS_INLINE RETURNS_NONNULL T const* ptr() const
{
return as_nonnull_ptr();
}
@ -168,7 +155,7 @@ public:
{
return as_nonnull_ptr();
}
ALWAYS_INLINE RETURNS_NONNULL const T* operator->() const
ALWAYS_INLINE RETURNS_NONNULL T const* operator->() const
{
return as_nonnull_ptr();
}
@ -177,7 +164,7 @@ public:
{
return *as_nonnull_ptr();
}
ALWAYS_INLINE const T& operator*() const
ALWAYS_INLINE T const& operator*() const
{
return *as_nonnull_ptr();
}
@ -186,7 +173,7 @@ public:
{
return as_nonnull_ptr();
}
ALWAYS_INLINE RETURNS_NONNULL operator const T*() const
ALWAYS_INLINE RETURNS_NONNULL operator T const*() const
{
return as_nonnull_ptr();
}
@ -195,7 +182,7 @@ public:
{
return *as_nonnull_ptr();
}
ALWAYS_INLINE operator const T&() const
ALWAYS_INLINE operator T const&() const
{
return *as_nonnull_ptr();
}
@ -203,7 +190,7 @@ public:
operator bool() const = delete;
bool operator!() const = delete;
void swap(NonnullRefPtr& other)
void swap(NonnullLockRefPtr& other)
{
if (this == &other)
return;
@ -215,7 +202,7 @@ public:
}
template<typename U>
void swap(NonnullRefPtr<U>& other) requires(IsConvertible<U*, T*>)
void swap(NonnullLockRefPtr<U>& other) requires(IsConvertible<U*, T*>)
{
// NOTE: swap is not atomic!
U* other_ptr = other.exchange(nullptr);
@ -225,7 +212,7 @@ public:
// clang-format off
private:
NonnullRefPtr() = delete;
NonnullLockRefPtr() = delete;
// clang-format on
ALWAYS_INLINE T* as_ptr() const
@ -317,21 +304,21 @@ private:
};
template<typename T>
inline NonnullRefPtr<T> adopt_ref(T& object)
inline NonnullLockRefPtr<T> adopt_lock_ref(T& object)
{
return NonnullRefPtr<T>(NonnullRefPtr<T>::Adopt, object);
return NonnullLockRefPtr<T>(NonnullLockRefPtr<T>::Adopt, object);
}
template<typename T>
struct Formatter<NonnullRefPtr<T>> : Formatter<const T*> {
ErrorOr<void> format(FormatBuilder& builder, NonnullRefPtr<T> const& value)
struct Formatter<NonnullLockRefPtr<T>> : Formatter<T const*> {
ErrorOr<void> format(FormatBuilder& builder, NonnullLockRefPtr<T> const& value)
{
return Formatter<const T*>::format(builder, value.ptr());
return Formatter<T const*>::format(builder, value.ptr());
}
};
template<typename T, typename U>
inline void swap(NonnullRefPtr<T>& a, NonnullRefPtr<U>& b) requires(IsConvertible<U*, T*>)
inline void swap(NonnullLockRefPtr<T>& a, NonnullLockRefPtr<U>& b) requires(IsConvertible<U*, T*>)
{
a.swap(b);
}
@ -339,12 +326,12 @@ inline void swap(NonnullRefPtr<T>& a, NonnullRefPtr<U>& b) requires(IsConvertibl
}
template<typename T>
struct Traits<NonnullRefPtr<T>> : public GenericTraits<NonnullRefPtr<T>> {
struct Traits<NonnullLockRefPtr<T>> : public GenericTraits<NonnullLockRefPtr<T>> {
using PeekType = T*;
using ConstPeekType = const T*;
static unsigned hash(NonnullRefPtr<T> const& p) { return ptr_hash(p.ptr()); }
static bool equals(NonnullRefPtr<T> const& a, NonnullRefPtr<T> const& b) { return a.ptr() == b.ptr(); }
using ConstPeekType = T const*;
static unsigned hash(NonnullLockRefPtr<T> const& p) { return ptr_hash(p.ptr()); }
static bool equals(NonnullLockRefPtr<T> const& a, NonnullLockRefPtr<T> const& b) { return a.ptr() == b.ptr(); }
};
using AK::adopt_ref;
using AK::NonnullRefPtr;
using AK::adopt_lock_ref;
using AK::NonnullLockRefPtr;

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2018-2022, Andreas Kling <kling@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/NonnullPtrVector.h>
#include <Kernel/Library/NonnullLockRefPtr.h>
namespace AK {
template<typename T, size_t inline_capacity>
class NonnullLockRefPtrVector : public NonnullPtrVector<NonnullLockRefPtr<T>, inline_capacity> {
using NonnullPtrVector<NonnullLockRefPtr<T>, inline_capacity>::NonnullPtrVector;
};
}
using AK::NonnullLockRefPtrVector;