1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-05-31 05:38:11 +00:00

Kernel: Support Mutex Protected lists in ListedRefCounted

This will allow us to support Mutex Protected lists like the custodies
list as well.
This commit is contained in:
Idan Horowitz 2021-12-29 00:22:14 +02:00 committed by Andreas Kling
parent 7204b292c5
commit be91b4fe3e
4 changed files with 19 additions and 8 deletions

View file

@ -23,7 +23,7 @@
namespace Kernel { namespace Kernel {
class Inode : public ListedRefCounted<Inode> class Inode : public ListedRefCounted<Inode, LockType::Spinlock>
, public Weakable<Inode> { , public Weakable<Inode> {
friend class VirtualFileSystem; friend class VirtualFileSystem;
friend class FileSystem; friend class FileSystem;

View file

@ -11,21 +11,32 @@
namespace Kernel { namespace Kernel {
// ListedRefCounted<T> is a slot-in replacement for RefCounted<T> to use in classes // ListedRefCounted<T> is a slot-in replacement for RefCounted<T> to use in classes
// that add themselves to a SpinlockProtected<IntrusiveList> when constructed. // that add themselves to a {Spinlock, Mutex}Protected<IntrusiveList> when constructed.
// The custom unref() implementation here ensures that the the list is locked during // The custom unref() implementation here ensures that the list is locked during
// unref(), and that the T is removed from the list before ~T() is invoked. // unref(), and that the T is removed from the list before ~T() is invoked.
template<typename T> enum class LockType {
Spinlock,
Mutex,
};
template<typename T, LockType Lock>
class ListedRefCounted : public RefCountedBase { class ListedRefCounted : public RefCountedBase {
public: public:
bool unref() const bool unref() const
{ {
auto new_ref_count = T::all_instances().with([&](auto& list) { auto callback = [&](auto& list) {
auto new_ref_count = deref_base(); auto new_ref_count = deref_base();
if (new_ref_count == 0) if (new_ref_count == 0)
list.remove(const_cast<T&>(static_cast<T const&>(*this))); list.remove(const_cast<T&>(static_cast<T const&>(*this)));
return new_ref_count; return new_ref_count;
}); };
RefCountType new_ref_count;
if constexpr (Lock == LockType::Spinlock)
new_ref_count = T::all_instances().with(callback);
else if constexpr (Lock == LockType::Mutex)
new_ref_count = T::all_instances().with_exclusive(callback);
if (new_ref_count == 0) { if (new_ref_count == 0) {
call_will_be_destroyed_if_present(static_cast<const T*>(this)); call_will_be_destroyed_if_present(static_cast<const T*>(this));
delete const_cast<T*>(static_cast<T const*>(this)); delete const_cast<T*>(static_cast<T const*>(this));

View file

@ -18,7 +18,7 @@
namespace Kernel::Memory { namespace Kernel::Memory {
class VMObject class VMObject
: public ListedRefCounted<VMObject> : public ListedRefCounted<VMObject, LockType::Spinlock>
, public Weakable<VMObject> { , public Weakable<VMObject> {
friend class MemoryManager; friend class MemoryManager;
friend class Region; friend class Region;

View file

@ -146,7 +146,7 @@ struct ThreadRegisters {
}; };
class Thread class Thread
: public ListedRefCounted<Thread> : public ListedRefCounted<Thread, LockType::Spinlock>
, public Weakable<Thread> { , public Weakable<Thread> {
AK_MAKE_NONCOPYABLE(Thread); AK_MAKE_NONCOPYABLE(Thread);
AK_MAKE_NONMOVABLE(Thread); AK_MAKE_NONMOVABLE(Thread);