mirror of
https://github.com/RGBCube/serenity
synced 2025-07-24 15:17:45 +00:00
Kernel: Remove old "region lookup cache" optimization
This optimization was added when region lookup was O(n), before we had the O(log n) RedBlackTree. Let's remove it to simplify the code, as we have no evidence that it remains valuable.
This commit is contained in:
parent
c55dfabdd5
commit
4fa3c1bf2d
2 changed files with 0 additions and 15 deletions
|
@ -203,10 +203,6 @@ void AddressSpace::deallocate_region(Region& region)
|
||||||
NonnullOwnPtr<Region> AddressSpace::take_region(Region& region)
|
NonnullOwnPtr<Region> AddressSpace::take_region(Region& region)
|
||||||
{
|
{
|
||||||
SpinlockLocker lock(m_lock);
|
SpinlockLocker lock(m_lock);
|
||||||
|
|
||||||
if (m_region_lookup_cache.region.unsafe_ptr() == ®ion)
|
|
||||||
m_region_lookup_cache.region = nullptr;
|
|
||||||
|
|
||||||
auto found_region = m_regions.unsafe_remove(region.vaddr().get());
|
auto found_region = m_regions.unsafe_remove(region.vaddr().get());
|
||||||
VERIFY(found_region.ptr() == ®ion);
|
VERIFY(found_region.ptr() == ®ion);
|
||||||
return found_region;
|
return found_region;
|
||||||
|
@ -215,9 +211,6 @@ NonnullOwnPtr<Region> AddressSpace::take_region(Region& region)
|
||||||
Region* AddressSpace::find_region_from_range(VirtualRange const& range)
|
Region* AddressSpace::find_region_from_range(VirtualRange const& range)
|
||||||
{
|
{
|
||||||
SpinlockLocker lock(m_lock);
|
SpinlockLocker lock(m_lock);
|
||||||
if (m_region_lookup_cache.range.has_value() && m_region_lookup_cache.range.value() == range && m_region_lookup_cache.region)
|
|
||||||
return m_region_lookup_cache.region.unsafe_ptr();
|
|
||||||
|
|
||||||
auto* found_region = m_regions.find(range.base().get());
|
auto* found_region = m_regions.find(range.base().get());
|
||||||
if (!found_region)
|
if (!found_region)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
@ -225,8 +218,6 @@ Region* AddressSpace::find_region_from_range(VirtualRange const& range)
|
||||||
auto rounded_range_size = page_round_up(range.size());
|
auto rounded_range_size = page_round_up(range.size());
|
||||||
if (rounded_range_size.is_error() || region->size() != rounded_range_size.value())
|
if (rounded_range_size.is_error() || region->size() != rounded_range_size.value())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
m_region_lookup_cache.range = range;
|
|
||||||
m_region_lookup_cache.region = *region;
|
|
||||||
return region;
|
return region;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -74,12 +74,6 @@ private:
|
||||||
|
|
||||||
RedBlackTree<FlatPtr, NonnullOwnPtr<Region>> m_regions;
|
RedBlackTree<FlatPtr, NonnullOwnPtr<Region>> m_regions;
|
||||||
|
|
||||||
struct RegionLookupCache {
|
|
||||||
Optional<VirtualRange> range;
|
|
||||||
WeakPtr<Region> region;
|
|
||||||
};
|
|
||||||
RegionLookupCache m_region_lookup_cache;
|
|
||||||
|
|
||||||
bool m_enforces_syscall_regions { false };
|
bool m_enforces_syscall_regions { false };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue