diff --git a/Libraries/LibJS/CMakeLists.txt b/Libraries/LibJS/CMakeLists.txt index c51da6bf75..200202e214 100644 --- a/Libraries/LibJS/CMakeLists.txt +++ b/Libraries/LibJS/CMakeLists.txt @@ -1,6 +1,7 @@ set(SOURCES AST.cpp Console.cpp + Heap/Allocator.cpp Heap/Handle.cpp Heap/HeapBlock.cpp Heap/Heap.cpp diff --git a/Libraries/LibJS/Forward.h b/Libraries/LibJS/Forward.h index 8d02d77d6b..3e85cd6b01 100644 --- a/Libraries/LibJS/Forward.h +++ b/Libraries/LibJS/Forward.h @@ -95,6 +95,7 @@ namespace JS { class ASTNode; +class Allocator; class BigInt; class BoundFunction; class Cell; diff --git a/Libraries/LibJS/Heap/Allocator.cpp b/Libraries/LibJS/Heap/Allocator.cpp new file mode 100644 index 0000000000..36dec6132c --- /dev/null +++ b/Libraries/LibJS/Heap/Allocator.cpp @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2020, Andreas Kling + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include +#include +#include + +namespace JS { + +Allocator::Allocator(size_t cell_size) + : m_cell_size(cell_size) +{ +} + +Allocator::~Allocator() +{ +} + +Cell* Allocator::allocate_cell(Heap& heap) +{ + if (m_usable_blocks.is_empty()) { + m_usable_blocks.append(HeapBlock::create_with_cell_size(heap, m_cell_size)); + } + + auto& block = *m_usable_blocks.last(); + auto* cell = block.allocate(); + ASSERT(cell); + if (block.is_full()) { + m_full_blocks.append(m_usable_blocks.take_last()); + } + return cell; +} + +void Allocator::block_did_become_empty(Badge, HeapBlock& block) +{ + bool removed_something = false; + removed_something |= m_full_blocks.remove_first_matching([&block](auto& entry) { return entry == █ }); + removed_something |= m_usable_blocks.remove_first_matching([&block](auto& entry) { return entry == █ }); + ASSERT(removed_something); +} + +void Allocator::block_did_become_usable(Badge, HeapBlock& block) +{ + ASSERT(!block.is_full()); + auto it = m_full_blocks.find([&](auto& entry) { + return entry == █ + }); + ASSERT(it != m_full_blocks.end()); + auto owned_block = m_full_blocks.take(it.index()); + m_usable_blocks.append(move(owned_block)); +} + +} diff --git a/Libraries/LibJS/Heap/Allocator.h b/Libraries/LibJS/Heap/Allocator.h new file mode 100644 index 0000000000..ca2b84cbe8 --- /dev/null +++ b/Libraries/LibJS/Heap/Allocator.h @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2020, Andreas Kling + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#pragma once + +#include +#include +#include + +namespace JS { + +class Allocator { +public: + Allocator(size_t cell_size); + ~Allocator(); + + size_t cell_size() const { return m_cell_size; } + + Cell* allocate_cell(Heap&); + + template + IterationDecision for_each_block(Callback callback) + { + for (auto& block : m_full_blocks) { + if (callback(*block) == IterationDecision::Break) + return IterationDecision::Break; + } + for (auto& block : m_usable_blocks) { + if (callback(*block) == IterationDecision::Break) + return IterationDecision::Break; + } + return IterationDecision::Continue; + } + + void block_did_become_empty(Badge, HeapBlock&); + void block_did_become_usable(Badge, HeapBlock&); + +private: + const size_t m_cell_size; + + Vector> m_full_blocks; + Vector> m_usable_blocks; +}; + +} diff --git a/Libraries/LibJS/Heap/Heap.cpp b/Libraries/LibJS/Heap/Heap.cpp index 7cde425888..4f6b7436aa 100644 --- a/Libraries/LibJS/Heap/Heap.cpp +++ b/Libraries/LibJS/Heap/Heap.cpp @@ -27,6 +27,7 @@ #include #include #include +#include #include #include #include @@ -50,6 +51,14 @@ namespace JS { Heap::Heap(VM& vm) : m_vm(vm) { + m_allocators.append(make(16)); + m_allocators.append(make(32)); + m_allocators.append(make(64)); + m_allocators.append(make(128)); + m_allocators.append(make(256)); + m_allocators.append(make(512)); + m_allocators.append(make(1024)); + m_allocators.append(make(3172)); } Heap::~Heap() @@ -57,6 +66,15 @@ Heap::~Heap() collect_garbage(CollectionType::CollectEverything); } +ALWAYS_INLINE Allocator& Heap::allocator_for_size(size_t cell_size) +{ + for (auto& allocator : m_allocators) { + if (allocator->cell_size() >= cell_size) + return *allocator; + } + ASSERT_NOT_REACHED(); +} + Cell* Heap::allocate_cell(size_t size) { if (should_collect_on_every_allocation()) { @@ -68,18 +86,8 @@ Cell* Heap::allocate_cell(size_t size) ++m_allocations_since_last_gc; } - for (auto& block : m_blocks) { - if (size > block->cell_size()) - continue; - if (auto* cell = block->allocate()) - return cell; - } - - size_t cell_size = round_up_to_power_of_two(size, 16); - auto block = HeapBlock::create_with_cell_size(*this, cell_size); - auto* cell = block->allocate(); - m_blocks.append(move(block)); - return cell; + auto& allocator = allocator_for_size(size); + return allocator.allocate_cell(*this); } void Heap::collect_garbage(CollectionType collection_type, bool print_report) @@ -203,7 +211,15 @@ void Heap::gather_conservative_roots(HashTable& roots) Cell* Heap::cell_from_possible_pointer(FlatPtr pointer) { auto* possible_heap_block = HeapBlock::from_cell(reinterpret_cast(pointer)); - if (m_blocks.find([possible_heap_block](auto& block) { return block.ptr() == possible_heap_block; }) == m_blocks.end()) + bool found = false; + for_each_block([&](auto& block) { + if (&block == possible_heap_block) { + found = true; + return IterationDecision::Break; + } + return IterationDecision::Continue; + }); + if (!found) return nullptr; return possible_heap_block->cell_from_possible_pointer(pointer); } @@ -240,57 +256,77 @@ void Heap::sweep_dead_cells(bool print_report, const Core::ElapsedTimer& measure dbg() << "sweep_dead_cells:"; #endif Vector empty_blocks; + Vector full_blocks_that_became_usable; size_t collected_cells = 0; size_t live_cells = 0; size_t collected_cell_bytes = 0; size_t live_cell_bytes = 0; - for (auto& block : m_blocks) { + for_each_block([&](auto& block) { bool block_has_live_cells = false; - block->for_each_cell([&](Cell* cell) { + bool block_was_full = block.is_full(); + block.for_each_cell([&](Cell* cell) { if (cell->is_live()) { if (!cell->is_marked()) { #ifdef HEAP_DEBUG dbg() << " ~ " << cell; #endif - block->deallocate(cell); + block.deallocate(cell); ++collected_cells; - collected_cell_bytes += block->cell_size(); + collected_cell_bytes += block.cell_size(); } else { cell->set_marked(false); block_has_live_cells = true; ++live_cells; - live_cell_bytes += block->cell_size(); + live_cell_bytes += block.cell_size(); } } }); if (!block_has_live_cells) - empty_blocks.append(block); - } + empty_blocks.append(&block); + else if (block_was_full != block.is_full()) + full_blocks_that_became_usable.append(&block); + return IterationDecision::Continue; + }); for (auto* block : empty_blocks) { #ifdef HEAP_DEBUG - dbg() << " - Reclaim HeapBlock @ " << block << ": cell_size=" << block->cell_size(); + dbg() << " - HeapBlock empty @ " << block << ": cell_size=" << block->cell_size(); #endif - m_blocks.remove_first_matching([block](auto& entry) { return entry == block; }); + allocator_for_size(block->cell_size()).block_did_become_empty({}, *block); + } + + for (auto* block : full_blocks_that_became_usable) { +#ifdef HEAP_DEBUG + dbg() << " - HeapBlock usable again @ " << block << ": cell_size=" << block->cell_size(); +#endif + allocator_for_size(block->cell_size()).block_did_become_usable({}, *block); } #ifdef HEAP_DEBUG - for (auto& block : m_blocks) { - dbg() << " > Live HeapBlock @ " << block << ": cell_size=" << block->cell_size(); - } + for_each_block([&](auto& block) { + dbg() << " > Live HeapBlock @ " << &block << ": cell_size=" << block.cell_size(); + return IterationDecision::Continue; + }); #endif int time_spent = measurement_timer.elapsed(); if (print_report) { + + size_t live_block_count = 0; + for_each_block([&](auto&) { + ++live_block_count; + return IterationDecision::Continue; + }); + dbgln("Garbage collection report"); dbgln("============================================="); dbgln(" Time spent: {} ms", time_spent); dbgln(" Live cells: {} ({} bytes)", live_cells, live_cell_bytes); dbgln("Collected cells: {} ({} bytes)", collected_cells, collected_cell_bytes); - dbgln(" Live blocks: {} ({} bytes)", m_blocks.size(), m_blocks.size() * HeapBlock::block_size); + dbgln(" Live blocks: {} ({} bytes)", live_block_count, live_block_count * HeapBlock::block_size); dbgln(" Freed blocks: {} ({} bytes)", empty_blocks.size(), empty_blocks.size() * HeapBlock::block_size); dbgln("============================================="); } diff --git a/Libraries/LibJS/Heap/Heap.h b/Libraries/LibJS/Heap/Heap.h index ddf6d32c17..1658c8f155 100644 --- a/Libraries/LibJS/Heap/Heap.h +++ b/Libraries/LibJS/Heap/Heap.h @@ -33,6 +33,7 @@ #include #include #include +#include #include #include #include @@ -101,13 +102,25 @@ private: Cell* cell_from_possible_pointer(FlatPtr); + Allocator& allocator_for_size(size_t); + + template + void for_each_block(Callback callback) + { + for (auto& allocator : m_allocators) { + if (allocator->for_each_block(callback) == IterationDecision::Break) + return; + } + } + size_t m_max_allocations_between_gc { 10000 }; size_t m_allocations_since_last_gc { false }; bool m_should_collect_on_every_allocation { false }; VM& m_vm; - Vector> m_blocks; + + Vector> m_allocators; HashTable m_handles; HashTable m_marked_value_lists; diff --git a/Libraries/LibJS/Heap/HeapBlock.h b/Libraries/LibJS/Heap/HeapBlock.h index 5b1a4c361b..48d7ebd554 100644 --- a/Libraries/LibJS/Heap/HeapBlock.h +++ b/Libraries/LibJS/Heap/HeapBlock.h @@ -33,6 +33,9 @@ namespace JS { class HeapBlock { + AK_MAKE_NONCOPYABLE(HeapBlock); + AK_MAKE_NONMOVABLE(HeapBlock); + public: static constexpr size_t block_size = 16 * KiB; static NonnullOwnPtr create_with_cell_size(Heap&, size_t); @@ -41,6 +44,7 @@ public: size_t cell_size() const { return m_cell_size; } size_t cell_count() const { return (block_size - sizeof(HeapBlock)) / m_cell_size; } + bool is_full() const { return !m_freelist; } ALWAYS_INLINE Cell* allocate() {