diff --git a/Userland/Libraries/LibC/malloc.cpp b/Userland/Libraries/LibC/malloc.cpp index 6b3601ca64..416a4b659b 100644 --- a/Userland/Libraries/LibC/malloc.cpp +++ b/Userland/Libraries/LibC/malloc.cpp @@ -244,8 +244,13 @@ static void* malloc_impl(size_t size, CallerWillInitializeMemory caller_will_ini --block->m_free_chunks; void* ptr = block->m_freelist; + if (ptr) { + block->m_freelist = block->m_freelist->next; + } else { + ptr = block->m_slot + block->m_next_lazy_freelist_index * block->m_size; + block->m_next_lazy_freelist_index++; + } VERIFY(ptr); - block->m_freelist = block->m_freelist->next; if (block->is_full()) { g_malloc_stats.number_of_blocks_full++; dbgln_if(MALLOC_DEBUG, "Block {:p} is now full in size class {}", block, good_size); diff --git a/Userland/Libraries/LibC/mallocdefs.h b/Userland/Libraries/LibC/mallocdefs.h index 71223b2822..ebf90eeaef 100644 --- a/Userland/Libraries/LibC/mallocdefs.h +++ b/Userland/Libraries/LibC/mallocdefs.h @@ -57,18 +57,11 @@ struct ChunkedBlock m_magic = MAGIC_PAGE_HEADER; m_size = bytes_per_chunk; m_free_chunks = chunk_capacity(); - m_freelist = (FreelistEntry*)chunk(0); - for (size_t i = 0; i < chunk_capacity(); ++i) { - auto* entry = (FreelistEntry*)chunk(i); - if (i != chunk_capacity() - 1) - entry->next = (FreelistEntry*)chunk(i + 1); - else - entry->next = nullptr; - } } ChunkedBlock* m_prev { nullptr }; ChunkedBlock* m_next { nullptr }; + size_t m_next_lazy_freelist_index { 0 }; FreelistEntry* m_freelist { nullptr }; size_t m_free_chunks { 0 }; [[gnu::aligned(8)]] unsigned char m_slot[0];