mirror of
https://github.com/RGBCube/serenity
synced 2025-05-31 02:08:11 +00:00
Kernel: Port VMObject to ListedRefCounted
The VMObject class now manages its own instance list (it was previously a member of MemoryManager.) Removal from the list is done safely on the last unref(), closing a race window in the previous implementation. Note that VMObject::all_instances() now has its own lock instead of using the global MM lock.
This commit is contained in:
parent
3a2d888913
commit
7979b5a8bb
4 changed files with 26 additions and 28 deletions
|
@ -1053,18 +1053,6 @@ bool MemoryManager::validate_user_stack(AddressSpace& space, VirtualAddress vadd
|
||||||
return validate_user_stack_no_lock(space, vaddr);
|
return validate_user_stack_no_lock(space, vaddr);
|
||||||
}
|
}
|
||||||
|
|
||||||
void MemoryManager::register_vmobject(VMObject& vmobject)
|
|
||||||
{
|
|
||||||
ScopedSpinLock lock(s_mm_lock);
|
|
||||||
m_vmobjects.append(vmobject);
|
|
||||||
}
|
|
||||||
|
|
||||||
void MemoryManager::unregister_vmobject(VMObject& vmobject)
|
|
||||||
{
|
|
||||||
ScopedSpinLock lock(s_mm_lock);
|
|
||||||
m_vmobjects.remove(vmobject);
|
|
||||||
}
|
|
||||||
|
|
||||||
void MemoryManager::register_region(Region& region)
|
void MemoryManager::register_region(Region& region)
|
||||||
{
|
{
|
||||||
ScopedSpinLock lock(s_mm_lock);
|
ScopedSpinLock lock(s_mm_lock);
|
||||||
|
|
|
@ -204,18 +204,22 @@ public:
|
||||||
template<IteratorFunction<VMObject&> Callback>
|
template<IteratorFunction<VMObject&> Callback>
|
||||||
static void for_each_vmobject(Callback callback)
|
static void for_each_vmobject(Callback callback)
|
||||||
{
|
{
|
||||||
ScopedSpinLock locker(s_mm_lock);
|
VMObject::all_instances().with([&](auto& list) {
|
||||||
for (auto& vmobject : MM.m_vmobjects) {
|
for (auto& vmobject : list) {
|
||||||
if (callback(vmobject) == IterationDecision::Break)
|
if (callback(vmobject) == IterationDecision::Break)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
template<VoidFunction<VMObject&> Callback>
|
template<VoidFunction<VMObject&> Callback>
|
||||||
static void for_each_vmobject(Callback callback)
|
static void for_each_vmobject(Callback callback)
|
||||||
{
|
{
|
||||||
for (auto& vmobject : MM.m_vmobjects)
|
VMObject::all_instances().with([&](auto& list) {
|
||||||
callback(vmobject);
|
for (auto& vmobject : list) {
|
||||||
|
callback(vmobject);
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
static Region* find_user_region_from_vaddr(AddressSpace&, VirtualAddress);
|
static Region* find_user_region_from_vaddr(AddressSpace&, VirtualAddress);
|
||||||
|
@ -242,8 +246,6 @@ private:
|
||||||
void initialize_physical_pages();
|
void initialize_physical_pages();
|
||||||
void register_reserved_ranges();
|
void register_reserved_ranges();
|
||||||
|
|
||||||
void register_vmobject(VMObject&);
|
|
||||||
void unregister_vmobject(VMObject&);
|
|
||||||
void register_region(Region&);
|
void register_region(Region&);
|
||||||
void unregister_region(Region&);
|
void unregister_region(Region&);
|
||||||
|
|
||||||
|
@ -289,8 +291,6 @@ private:
|
||||||
Vector<UsedMemoryRange> m_used_memory_ranges;
|
Vector<UsedMemoryRange> m_used_memory_ranges;
|
||||||
Vector<PhysicalMemoryRange> m_physical_memory_ranges;
|
Vector<PhysicalMemoryRange> m_physical_memory_ranges;
|
||||||
Vector<ContiguousReservedMemoryRange> m_reserved_memory_ranges;
|
Vector<ContiguousReservedMemoryRange> m_reserved_memory_ranges;
|
||||||
|
|
||||||
VMObject::List m_vmobjects;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
inline bool is_user_address(VirtualAddress vaddr)
|
inline bool is_user_address(VirtualAddress vaddr)
|
||||||
|
|
|
@ -4,21 +4,29 @@
|
||||||
* SPDX-License-Identifier: BSD-2-Clause
|
* SPDX-License-Identifier: BSD-2-Clause
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include <AK/Singleton.h>
|
||||||
#include <Kernel/Memory/MemoryManager.h>
|
#include <Kernel/Memory/MemoryManager.h>
|
||||||
#include <Kernel/Memory/VMObject.h>
|
#include <Kernel/Memory/VMObject.h>
|
||||||
|
|
||||||
namespace Kernel::Memory {
|
namespace Kernel::Memory {
|
||||||
|
|
||||||
|
static Singleton<SpinLockProtectedValue<VMObject::AllInstancesList>> s_all_instances;
|
||||||
|
|
||||||
|
SpinLockProtectedValue<VMObject::AllInstancesList>& VMObject::all_instances()
|
||||||
|
{
|
||||||
|
return s_all_instances;
|
||||||
|
}
|
||||||
|
|
||||||
VMObject::VMObject(VMObject const& other)
|
VMObject::VMObject(VMObject const& other)
|
||||||
: m_physical_pages(other.m_physical_pages)
|
: m_physical_pages(other.m_physical_pages)
|
||||||
{
|
{
|
||||||
MM.register_vmobject(*this);
|
all_instances().with([&](auto& list) { list.append(*this); });
|
||||||
}
|
}
|
||||||
|
|
||||||
VMObject::VMObject(size_t size)
|
VMObject::VMObject(size_t size)
|
||||||
: m_physical_pages(ceil_div(size, static_cast<size_t>(PAGE_SIZE)))
|
: m_physical_pages(ceil_div(size, static_cast<size_t>(PAGE_SIZE)))
|
||||||
{
|
{
|
||||||
MM.register_vmobject(*this);
|
all_instances().with([&](auto& list) { list.append(*this); });
|
||||||
}
|
}
|
||||||
|
|
||||||
VMObject::~VMObject()
|
VMObject::~VMObject()
|
||||||
|
@ -30,7 +38,6 @@ VMObject::~VMObject()
|
||||||
m_on_deleted.clear();
|
m_on_deleted.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
MM.unregister_vmobject(*this);
|
|
||||||
VERIFY(m_regions.is_empty());
|
VERIFY(m_regions.is_empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
#include <AK/Vector.h>
|
#include <AK/Vector.h>
|
||||||
#include <AK/Weakable.h>
|
#include <AK/Weakable.h>
|
||||||
#include <Kernel/Forward.h>
|
#include <Kernel/Forward.h>
|
||||||
|
#include <Kernel/Library/ListedRefCounted.h>
|
||||||
#include <Kernel/Locking/Mutex.h>
|
#include <Kernel/Locking/Mutex.h>
|
||||||
#include <Kernel/Memory/Region.h>
|
#include <Kernel/Memory/Region.h>
|
||||||
|
|
||||||
|
@ -25,7 +26,8 @@ public:
|
||||||
virtual void vmobject_deleted(VMObject&) = 0;
|
virtual void vmobject_deleted(VMObject&) = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
class VMObject : public RefCounted<VMObject>
|
class VMObject
|
||||||
|
: public ListedRefCounted<VMObject>
|
||||||
, public Weakable<VMObject> {
|
, public Weakable<VMObject> {
|
||||||
friend class MemoryManager;
|
friend class MemoryManager;
|
||||||
friend class Region;
|
friend class Region;
|
||||||
|
@ -95,7 +97,8 @@ private:
|
||||||
Region::ListInVMObject m_regions;
|
Region::ListInVMObject m_regions;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
using List = IntrusiveList<VMObject, RawPtr<VMObject>, &VMObject::m_list_node>;
|
using AllInstancesList = IntrusiveList<VMObject, RawPtr<VMObject>, &VMObject::m_list_node>;
|
||||||
|
static SpinLockProtectedValue<VMObject::AllInstancesList>& all_instances();
|
||||||
};
|
};
|
||||||
|
|
||||||
template<typename Callback>
|
template<typename Callback>
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue