From d422c46ebb1601eab33b1eabc2006f66d8d1d725 Mon Sep 17 00:00:00 2001 From: Andreas Kling Date: Mon, 2 Dec 2019 19:59:53 +0100 Subject: [PATCH] LibC: Also mark empty-but-kept-around BigAllocationBlocks as PROT_NONE This extends the opportunistic protection of empty-but-kept-around to also cover BigAllocationBlocks. Since we only cache 4KB BAB's at the moment, this sees limited use, but it does work. --- Libraries/LibC/malloc.cpp | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/Libraries/LibC/malloc.cpp b/Libraries/LibC/malloc.cpp index a78cf41d34..da95b57fce 100644 --- a/Libraries/LibC/malloc.cpp +++ b/Libraries/LibC/malloc.cpp @@ -161,6 +161,11 @@ void* malloc(size_t size) if (auto* allocator = big_allocator_for_size(real_size)) { if (!allocator->blocks.is_empty()) { auto* block = allocator->blocks.take_last(); + if (mprotect(block, real_size, PROT_READ | PROT_WRITE) < 0) { + perror("mprotect"); + ASSERT_NOT_REACHED(); + } + set_mmap_name(block, PAGE_SIZE, "malloc: BigAllocationBlock (reused)"); return &block->m_slot[0]; } } @@ -236,6 +241,11 @@ void free(void* ptr) if (auto* allocator = big_allocator_for_size(block->m_size)) { if (allocator->blocks.size() < number_of_big_blocks_to_keep_around_per_size_class) { allocator->blocks.append(block); + set_mmap_name(block, PAGE_SIZE, "malloc: BigAllocationBlock (free)"); + if (mprotect(block, PAGE_SIZE, PROT_NONE) < 0) { + perror("mprotect"); + ASSERT_NOT_REACHED(); + } return; } }