1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-07-26 06:37:35 +00:00

LibJS: Segregate GC-allocated objects by type

This patch adds two macros to declare per-type allocators:

- JS_DECLARE_ALLOCATOR(TypeName)
- JS_DEFINE_ALLOCATOR(TypeName)

When used, they add a type-specific CellAllocator that the Heap will
delegate allocation requests to.

The result of this is that GC objects of the same type always end up
within the same HeapBlock, drastically reducing the ability to perform
type confusion attacks.

It also improves HeapBlock utilization, since each block now has cells
sized exactly to the type used within that block. (Previously we only
had a handful of block sizes available, and most GC allocations ended
up with a large amount of slack in their tails.)

There is a small performance hit from this, but I'm sure we can make
up for it elsewhere.

Note that the old size-based allocators still exist, and we fall back
to them for any type that doesn't have its own CellAllocator.
This commit is contained in:
Andreas Kling 2023-11-19 09:45:05 +01:00
parent 84a8ee01e1
commit 3c74dc9f4d
428 changed files with 723 additions and 22 deletions

View file

@ -38,7 +38,7 @@ public:
template<typename T, typename... Args>
NonnullGCPtr<T> allocate_without_realm(Args&&... args)
{
auto* memory = allocate_cell(sizeof(T));
auto* memory = allocate_cell<T>();
defer_gc();
new (memory) T(forward<Args>(args)...);
undefer_gc();
@ -48,7 +48,7 @@ public:
template<typename T, typename... Args>
NonnullGCPtr<T> allocate(Realm& realm, Args&&... args)
{
auto* memory = allocate_cell(sizeof(T));
auto* memory = allocate_cell<T>();
defer_gc();
new (memory) T(forward<Args>(args)...);
undefer_gc();
@ -91,7 +91,19 @@ private:
static bool cell_must_survive_garbage_collection(Cell const&);
Cell* allocate_cell(size_t);
template<typename T>
Cell* allocate_cell()
{
will_allocate(sizeof(T));
if constexpr (requires { T::cell_allocator.allocate_cell(*this); }) {
if constexpr (IsSame<T, typename decltype(T::cell_allocator)::CellType>) {
return T::cell_allocator.allocate_cell(*this);
}
}
return allocator_for_size(sizeof(T)).allocate_cell(*this);
}
void will_allocate(size_t);
void find_min_and_max_block_addresses(FlatPtr& min_address, FlatPtr& max_address);
void gather_roots(HashMap<Cell*, HeapRoot>&);
@ -101,7 +113,16 @@ private:
void finalize_unmarked_cells();
void sweep_dead_cells(bool print_report, Core::ElapsedTimer const&);
CellAllocator& allocator_for_size(size_t);
ALWAYS_INLINE CellAllocator& allocator_for_size(size_t cell_size)
{
// FIXME: Use binary search?
for (auto& allocator : m_allocators) {
if (allocator->cell_size() >= cell_size)
return *allocator;
}
dbgln("Cannot get CellAllocator for cell size {}, largest available is {}!", cell_size, m_allocators.last()->cell_size());
VERIFY_NOT_REACHED();
}
template<typename Callback>
void for_each_block(Callback callback)