mirror of
https://github.com/RGBCube/serenity
synced 2025-07-26 23:57:34 +00:00
AK: Give BumpAllocator a single-block cache
This avoid excessive mmap/munmap traffic in normal operation.
This commit is contained in:
parent
e4b1c0b8b1
commit
a72eea6408
1 changed files with 26 additions and 12 deletions
|
@ -62,6 +62,13 @@ public:
|
||||||
if (!m_head_chunk)
|
if (!m_head_chunk)
|
||||||
return;
|
return;
|
||||||
for_each_chunk([this](auto chunk) {
|
for_each_chunk([this](auto chunk) {
|
||||||
|
if (!s_unused_allocation_cache) {
|
||||||
|
auto next_chunk = ((ChunkHeader const*)chunk)->next_chunk;
|
||||||
|
if (!next_chunk) {
|
||||||
|
s_unused_allocation_cache = chunk;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
if constexpr (use_mmap) {
|
if constexpr (use_mmap) {
|
||||||
munmap((void*)chunk, m_chunk_size);
|
munmap((void*)chunk, m_chunk_size);
|
||||||
} else {
|
} else {
|
||||||
|
@ -91,6 +98,9 @@ protected:
|
||||||
// dbgln("Allocated {} entries in previous chunk and have {} unusable bytes", m_allocations_in_previous_chunk, m_chunk_size - m_byte_offset_into_current_chunk);
|
// dbgln("Allocated {} entries in previous chunk and have {} unusable bytes", m_allocations_in_previous_chunk, m_chunk_size - m_byte_offset_into_current_chunk);
|
||||||
// m_allocations_in_previous_chunk = 0;
|
// m_allocations_in_previous_chunk = 0;
|
||||||
void* new_chunk;
|
void* new_chunk;
|
||||||
|
if (s_unused_allocation_cache) {
|
||||||
|
new_chunk = (void*)exchange(s_unused_allocation_cache, 0);
|
||||||
|
} else {
|
||||||
if constexpr (use_mmap) {
|
if constexpr (use_mmap) {
|
||||||
#ifdef __serenity__
|
#ifdef __serenity__
|
||||||
new_chunk = serenity_mmap(nullptr, m_chunk_size, PROT_READ | PROT_WRITE, MAP_ANONYMOUS | MAP_RANDOMIZED | MAP_PRIVATE, 0, 0, m_chunk_size, "BumpAllocator Chunk");
|
new_chunk = serenity_mmap(nullptr, m_chunk_size, PROT_READ | PROT_WRITE, MAP_ANONYMOUS | MAP_RANDOMIZED | MAP_PRIVATE, 0, 0, m_chunk_size, "BumpAllocator Chunk");
|
||||||
|
@ -104,6 +114,7 @@ protected:
|
||||||
if (!new_chunk)
|
if (!new_chunk)
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
auto& new_header = *(ChunkHeader*)new_chunk;
|
auto& new_header = *(ChunkHeader*)new_chunk;
|
||||||
new_header.magic = chunk_magic;
|
new_header.magic = chunk_magic;
|
||||||
|
@ -134,7 +145,7 @@ protected:
|
||||||
FlatPtr m_current_chunk { 0 };
|
FlatPtr m_current_chunk { 0 };
|
||||||
size_t m_byte_offset_into_current_chunk { 0 };
|
size_t m_byte_offset_into_current_chunk { 0 };
|
||||||
size_t m_chunk_size { 0 };
|
size_t m_chunk_size { 0 };
|
||||||
// size_t m_allocations_in_previous_chunk { 0 };
|
static FlatPtr s_unused_allocation_cache;
|
||||||
};
|
};
|
||||||
|
|
||||||
template<typename T, bool use_mmap = false, size_t chunk_size = use_mmap ? 4 * MiB : 4 * KiB>
|
template<typename T, bool use_mmap = false, size_t chunk_size = use_mmap ? 4 * MiB : 4 * KiB>
|
||||||
|
@ -172,6 +183,9 @@ public:
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
template<bool use_mmap, size_t size>
|
||||||
|
inline FlatPtr BumpAllocator<use_mmap, size>::s_unused_allocation_cache { 0 };
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
using AK::BumpAllocator;
|
using AK::BumpAllocator;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue