1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-07-26 02:17:34 +00:00

Kernel: Avoid O(n) loop over zones when allocating from PhysicalRegion

We now keep all the PhysicalZones on one of two intrusive lists within
the PhysicalRegion.

The "usable" list contains all zones that can be allocated from,
and the "full" list contains all zones with no free pages.
This commit is contained in:
Andreas Kling 2021-07-13 19:52:42 +02:00
parent 9ae067aa7f
commit 379bcd26e4
3 changed files with 38 additions and 10 deletions

View file

@ -47,6 +47,7 @@ void PhysicalRegion::initialize_zones()
m_zones.append(make<PhysicalZone>(base_address, zone_size)); m_zones.append(make<PhysicalZone>(base_address, zone_size));
dmesgln(" * Zone {:016x}-{:016x} ({} bytes)", base_address.get(), base_address.get() + zone_size * PAGE_SIZE - 1, zone_size * PAGE_SIZE); dmesgln(" * Zone {:016x}-{:016x} ({} bytes)", base_address.get(), base_address.get() + zone_size * PAGE_SIZE - 1, zone_size * PAGE_SIZE);
base_address = base_address.offset(zone_size * PAGE_SIZE); base_address = base_address.offset(zone_size * PAGE_SIZE);
m_usable_zones.append(m_zones.last());
remaining_pages -= zone_size; remaining_pages -= zone_size;
} }
}; };
@ -75,9 +76,13 @@ NonnullRefPtrVector<PhysicalPage> PhysicalRegion::take_contiguous_free_pages(siz
Optional<PhysicalAddress> page_base; Optional<PhysicalAddress> page_base;
for (auto& zone : m_zones) { for (auto& zone : m_zones) {
page_base = zone.allocate_block(order); page_base = zone.allocate_block(order);
if (page_base.has_value()) {
if (page_base.has_value()) if (zone.is_empty()) {
// We've exhausted this zone, move it to the full zones list.
m_full_zones.append(zone);
}
break; break;
}
} }
if (!page_base.has_value()) if (!page_base.has_value())
@ -93,21 +98,34 @@ NonnullRefPtrVector<PhysicalPage> PhysicalRegion::take_contiguous_free_pages(siz
RefPtr<PhysicalPage> PhysicalRegion::take_free_page() RefPtr<PhysicalPage> PhysicalRegion::take_free_page()
{ {
for (auto& zone : m_zones) { if (m_usable_zones.is_empty()) {
auto page = zone.allocate_block(0); dbgln("PhysicalRegion::take_free_page: No free physical pages");
if (page.has_value()) return nullptr;
return PhysicalPage::create(page.value());
} }
dbgln("PhysicalRegion::take_free_page: No free physical pages"); auto& zone = *m_usable_zones.first();
return nullptr; auto page = zone.allocate_block(0);
VERIFY(page.has_value());
if (zone.is_empty()) {
// We've exhausted this zone, move it to the full zones list.
m_full_zones.append(zone);
}
return PhysicalPage::create(page.value());
} }
void PhysicalRegion::return_page(PhysicalAddress paddr) void PhysicalRegion::return_page(PhysicalAddress paddr)
{ {
// FIXME: Find a way to avoid looping over the zones here.
// (Do some math on the address to find the right zone index.)
// The main thing that gets in the way of this is non-uniform zone sizes.
// Perhaps it would be better if all zones had the same size.
for (auto& zone : m_zones) { for (auto& zone : m_zones) {
if (zone.contains(paddr)) { if (zone.contains(paddr)) {
zone.deallocate_block(paddr, 0); zone.deallocate_block(paddr, 0);
if (m_full_zones.contains(zone))
m_usable_zones.append(zone);
return; return;
} }
} }

View file

@ -11,11 +11,10 @@
#include <AK/Optional.h> #include <AK/Optional.h>
#include <AK/OwnPtr.h> #include <AK/OwnPtr.h>
#include <Kernel/VM/PhysicalPage.h> #include <Kernel/VM/PhysicalPage.h>
#include <Kernel/VM/PhysicalZone.h>
namespace Kernel { namespace Kernel {
class PhysicalZone;
class PhysicalRegion { class PhysicalRegion {
AK_MAKE_ETERNAL AK_MAKE_ETERNAL
public: public:
@ -44,6 +43,9 @@ private:
NonnullOwnPtrVector<PhysicalZone> m_zones; NonnullOwnPtrVector<PhysicalZone> m_zones;
PhysicalZone::List m_usable_zones;
PhysicalZone::List m_full_zones;
PhysicalAddress m_lower; PhysicalAddress m_lower;
PhysicalAddress m_upper; PhysicalAddress m_upper;
unsigned m_pages { 0 }; unsigned m_pages { 0 };

View file

@ -8,6 +8,7 @@
#include <AK/Bitmap.h> #include <AK/Bitmap.h>
#include <AK/Forward.h> #include <AK/Forward.h>
#include <AK/IntrusiveList.h>
#include <AK/Types.h> #include <AK/Types.h>
#include <Kernel/Forward.h> #include <Kernel/Forward.h>
@ -32,6 +33,8 @@ public:
void dump() const; void dump() const;
size_t available() const { return m_page_count - (m_used_chunks / 2); } size_t available() const { return m_page_count - (m_used_chunks / 2); }
bool is_empty() const { return !available(); }
PhysicalAddress base() const { return m_base_address; } PhysicalAddress base() const { return m_base_address; }
bool contains(PhysicalAddress paddr) const bool contains(PhysicalAddress paddr) const
{ {
@ -82,6 +85,11 @@ private:
PhysicalAddress m_base_address { 0 }; PhysicalAddress m_base_address { 0 };
size_t m_page_count { 0 }; size_t m_page_count { 0 };
size_t m_used_chunks { 0 }; size_t m_used_chunks { 0 };
IntrusiveListNode<PhysicalZone> m_list_node;
public:
using List = IntrusiveList<PhysicalZone, RawPtr<PhysicalZone>, &PhysicalZone::m_list_node>;
}; };
} }