mirror of
https://github.com/RGBCube/serenity
synced 2025-05-28 19:35:09 +00:00
AK: Fix a race condition with WeakPtr<T>::strong_ref and destruction
Since RefPtr<T> decrements the ref counter to 0 and after that starts destructing the object, there is a window where the ref count is 0 and the weak references have not been revoked. Also change WeakLink to be able to obtain a strong reference concurrently and block revoking instead, which should happen a lot less often. Fixes a problem observed in #4621
This commit is contained in:
parent
3e00e3da72
commit
54eeb8ee9a
4 changed files with 88 additions and 24 deletions
|
@ -74,6 +74,18 @@ public:
|
||||||
ASSERT(!Checked<RefCountType>::addition_would_overflow(old_ref_count, 1));
|
ASSERT(!Checked<RefCountType>::addition_would_overflow(old_ref_count, 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ALWAYS_INLINE bool try_ref() const
|
||||||
|
{
|
||||||
|
RefCountType expected = m_ref_count.load(AK::MemoryOrder::memory_order_relaxed);
|
||||||
|
for (;;) {
|
||||||
|
if (expected == 0)
|
||||||
|
return false;
|
||||||
|
ASSERT(!Checked<RefCountType>::addition_would_overflow(expected, 1));
|
||||||
|
if (m_ref_count.compare_exchange_strong(expected, expected + 1, AK::MemoryOrder::memory_order_acquire))
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ALWAYS_INLINE RefCountType ref_count() const
|
ALWAYS_INLINE RefCountType ref_count() const
|
||||||
{
|
{
|
||||||
return m_ref_count.load(AK::MemoryOrder::memory_order_relaxed);
|
return m_ref_count.load(AK::MemoryOrder::memory_order_relaxed);
|
||||||
|
@ -99,15 +111,17 @@ protected:
|
||||||
template<typename T>
|
template<typename T>
|
||||||
class RefCounted : public RefCountedBase {
|
class RefCounted : public RefCountedBase {
|
||||||
public:
|
public:
|
||||||
void unref() const
|
bool unref() const
|
||||||
{
|
{
|
||||||
auto new_ref_count = deref_base();
|
auto new_ref_count = deref_base();
|
||||||
if (new_ref_count == 0) {
|
if (new_ref_count == 0) {
|
||||||
call_will_be_destroyed_if_present(static_cast<const T*>(this));
|
call_will_be_destroyed_if_present(static_cast<const T*>(this));
|
||||||
delete static_cast<const T*>(this);
|
delete static_cast<const T*>(this);
|
||||||
|
return true;
|
||||||
} else if (new_ref_count == 1) {
|
} else if (new_ref_count == 1) {
|
||||||
call_one_ref_left_if_present(static_cast<const T*>(this));
|
call_one_ref_left_if_present(static_cast<const T*>(this));
|
||||||
}
|
}
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
32
AK/WeakPtr.h
32
AK/WeakPtr.h
|
@ -201,16 +201,40 @@ template<typename T>
|
||||||
template<typename U>
|
template<typename U>
|
||||||
inline WeakPtr<U> Weakable<T>::make_weak_ptr() const
|
inline WeakPtr<U> Weakable<T>::make_weak_ptr() const
|
||||||
{
|
{
|
||||||
#ifdef DEBUG
|
if constexpr (IsBaseOf<RefCountedBase, T>::value) {
|
||||||
ASSERT(!m_being_destroyed);
|
// Checking m_being_destroyed isn't sufficient when dealing with
|
||||||
#endif
|
// a RefCounted type.The reference count will drop to 0 before the
|
||||||
|
// destructor is invoked and revoke_weak_ptrs is called. So, try
|
||||||
|
// to add a ref (which should fail if the ref count is at 0) so
|
||||||
|
// that we prevent the destructor and revoke_weak_ptrs from being
|
||||||
|
// triggered until we're done.
|
||||||
|
if (!static_cast<const T*>(this)->try_ref())
|
||||||
|
return {};
|
||||||
|
} else {
|
||||||
|
// For non-RefCounted types this means a weak reference can be
|
||||||
|
// obtained until the ~Weakable destructor is invoked!
|
||||||
|
if (m_being_destroyed.load(AK::MemoryOrder::memory_order_acquire))
|
||||||
|
return {};
|
||||||
|
}
|
||||||
if (!m_link) {
|
if (!m_link) {
|
||||||
// There is a small chance that we create a new WeakLink and throw
|
// There is a small chance that we create a new WeakLink and throw
|
||||||
// it away because another thread beat us to it. But the window is
|
// it away because another thread beat us to it. But the window is
|
||||||
// pretty small and the overhead isn't terrible.
|
// pretty small and the overhead isn't terrible.
|
||||||
m_link.assign_if_null(adopt(*new WeakLink(const_cast<T&>(static_cast<const T&>(*this)))));
|
m_link.assign_if_null(adopt(*new WeakLink(const_cast<T&>(static_cast<const T&>(*this)))));
|
||||||
}
|
}
|
||||||
return WeakPtr<U>(m_link);
|
|
||||||
|
WeakPtr<U> weak_ptr(m_link);
|
||||||
|
|
||||||
|
if constexpr (IsBaseOf<RefCountedBase, T>::value) {
|
||||||
|
// Now drop the reference we temporarily added
|
||||||
|
if (static_cast<const T*>(this)->unref()) {
|
||||||
|
// We just dropped the last reference, which should have called
|
||||||
|
// revoke_weak_ptrs, which should have invalidated our weak_ptr
|
||||||
|
ASSERT(!weak_ptr.strong_ref());
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return weak_ptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
|
|
|
@ -30,6 +30,9 @@
|
||||||
#include "Atomic.h"
|
#include "Atomic.h"
|
||||||
#include "RefCounted.h"
|
#include "RefCounted.h"
|
||||||
#include "RefPtr.h"
|
#include "RefPtr.h"
|
||||||
|
#ifdef KERNEL
|
||||||
|
# include <Kernel/Arch/i386/CPU.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
#ifndef WEAKABLE_DEBUG
|
#ifndef WEAKABLE_DEBUG
|
||||||
# define WEAKABLE_DEBUG
|
# define WEAKABLE_DEBUG
|
||||||
|
@ -56,14 +59,16 @@ public:
|
||||||
|
|
||||||
{
|
{
|
||||||
#ifdef KERNEL
|
#ifdef KERNEL
|
||||||
// We don't want to be pre-empted while we have the lock bit set
|
// We don't want to be pre-empted while we are trying to obtain
|
||||||
|
// a strong reference
|
||||||
Kernel::ScopedCritical critical;
|
Kernel::ScopedCritical critical;
|
||||||
#endif
|
#endif
|
||||||
FlatPtr bits = RefPtrTraits<void>::lock(m_bits);
|
if (!(m_consumers.fetch_add(1u << 1, AK::MemoryOrder::memory_order_acquire) & 1u)) {
|
||||||
T* ptr = static_cast<T*>(RefPtrTraits<void>::as_ptr(bits));
|
T* ptr = (T*)m_ptr.load(AK::MemoryOrder::memory_order_acquire);
|
||||||
if (ptr)
|
if (ptr && ptr->try_ref())
|
||||||
ref = *ptr;
|
ref = adopt(*ptr);
|
||||||
RefPtrTraits<void>::unlock(m_bits, bits);
|
}
|
||||||
|
m_consumers.fetch_sub(1u << 1, AK::MemoryOrder::memory_order_release);
|
||||||
}
|
}
|
||||||
|
|
||||||
return ref;
|
return ref;
|
||||||
|
@ -72,26 +77,46 @@ public:
|
||||||
template<typename T>
|
template<typename T>
|
||||||
T* unsafe_ptr() const
|
T* unsafe_ptr() const
|
||||||
{
|
{
|
||||||
return static_cast<T*>(RefPtrTraits<void>::as_ptr(m_bits.load(AK::MemoryOrder::memory_order_acquire)));
|
if (m_consumers.load(AK::MemoryOrder::memory_order_relaxed) & 1u)
|
||||||
|
return nullptr;
|
||||||
|
// NOTE: This may return a non-null pointer even if revocation
|
||||||
|
// has been triggered as there is a possible race! But it's "unsafe"
|
||||||
|
// anyway because we return a raw pointer without ensuring a
|
||||||
|
// reference...
|
||||||
|
return (T*)m_ptr.load(AK::MemoryOrder::memory_order_acquire);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool is_null() const
|
bool is_null() const
|
||||||
{
|
{
|
||||||
return RefPtrTraits<void>::is_null(m_bits.load(AK::MemoryOrder::memory_order_relaxed));
|
return !unsafe_ptr<void>();
|
||||||
}
|
}
|
||||||
|
|
||||||
void revoke()
|
void revoke()
|
||||||
{
|
{
|
||||||
RefPtrTraits<void>::exchange(m_bits, RefPtrTraits<void>::default_null_value);
|
auto current_consumers = m_consumers.fetch_or(1u, AK::MemoryOrder::memory_order_relaxed);
|
||||||
|
ASSERT(!(current_consumers & 1u));
|
||||||
|
// We flagged revokation, now wait until everyone trying to obtain
|
||||||
|
// a strong reference is done
|
||||||
|
while (current_consumers > 0) {
|
||||||
|
#ifdef KERNEL
|
||||||
|
Kernel::Processor::wait_check();
|
||||||
|
#else
|
||||||
|
// TODO: yield?
|
||||||
|
#endif
|
||||||
|
current_consumers = m_consumers.load(AK::MemoryOrder::memory_order_acquire) & ~1u;
|
||||||
|
}
|
||||||
|
// No one is trying to use it (anymore)
|
||||||
|
m_ptr.store(nullptr, AK::MemoryOrder::memory_order_release);
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
template<typename T>
|
template<typename T>
|
||||||
explicit WeakLink(T& weakable)
|
explicit WeakLink(T& weakable)
|
||||||
: m_bits(RefPtrTraits<void>::as_bits(&weakable))
|
: m_ptr(&weakable)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
mutable Atomic<FlatPtr> m_bits;
|
mutable Atomic<void*> m_ptr;
|
||||||
|
mutable Atomic<unsigned> m_consumers; // LSB indicates revokation in progress
|
||||||
};
|
};
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
|
@ -108,23 +133,19 @@ protected:
|
||||||
|
|
||||||
~Weakable()
|
~Weakable()
|
||||||
{
|
{
|
||||||
#ifdef WEAKABLE_DEBUG
|
m_being_destroyed.store(true, AK::MemoryOrder::memory_order_release);
|
||||||
m_being_destroyed = true;
|
|
||||||
#endif
|
|
||||||
revoke_weak_ptrs();
|
revoke_weak_ptrs();
|
||||||
}
|
}
|
||||||
|
|
||||||
void revoke_weak_ptrs()
|
void revoke_weak_ptrs()
|
||||||
{
|
{
|
||||||
if (m_link)
|
if (auto link = move(m_link))
|
||||||
m_link->revoke();
|
link->revoke();
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
mutable RefPtr<WeakLink> m_link;
|
mutable RefPtr<WeakLink> m_link;
|
||||||
#ifdef WEAKABLE_DEBUG
|
Atomic<bool> m_being_destroyed { false };
|
||||||
bool m_being_destroyed { false };
|
|
||||||
#endif
|
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,6 +72,11 @@ static NeverDestroyed<WeakPtr<Application>> s_the;
|
||||||
|
|
||||||
Application* Application::the()
|
Application* Application::the()
|
||||||
{
|
{
|
||||||
|
// NOTE: If we don't explicitly call revoke_weak_ptrs() in the
|
||||||
|
// ~Application destructor, we would have to change this to
|
||||||
|
// return s_the->strong_ref().ptr();
|
||||||
|
// This is because this is using the unsafe operator*/operator->
|
||||||
|
// that do not have the ability to check the ref count!
|
||||||
return *s_the;
|
return *s_the;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue