1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-05-20 14:45:07 +00:00

LibWasm: Implement memory.init and passive mode data

This commit is contained in:
Ali Mohammad Pur 2021-12-04 17:56:58 +03:30 committed by Ali Mohammad Pur
parent aff2c59f37
commit b5ca290605
5 changed files with 88 additions and 11 deletions

View file

@ -54,6 +54,13 @@ Optional<GlobalAddress> Store::allocate(GlobalType const& type, Value value)
return address;
}
Optional<DataAddress> Store::allocate_data(Vector<u8> initializer)
{
DataAddress address { m_datas.size() };
m_datas.append(DataInstance { move(initializer) });
return address;
}
Optional<ElementAddress> Store::allocate(ValueType const& type, Vector<Reference> references)
{
ElementAddress address { m_elements.size() };
@ -101,6 +108,14 @@ ElementInstance* Store::get(ElementAddress address)
return &m_elements[value];
}
DataInstance* Store::get(DataAddress address)
{
auto value = address.value();
if (m_datas.size() <= value)
return nullptr;
return &m_datas[value];
}
ErrorOr<void, ValidationError> AbstractMachine::validate(Module& module)
{
if (module.validation_status() != Module::ValidationStatus::Unchecked) {
@ -304,7 +319,7 @@ InstantiationResult AbstractMachine::instantiate(Module const& module, Vector<Ex
size_t offset = 0;
result.values().first().value().visit(
[&](auto const& value) { offset = value; },
[&](Reference const&) { instantiation_result = InstantiationError { "Data segment offset returned a reference" }; });
[&](Reference const&) { instantiation_result = InstantiationError { "Data segment offset returned a reference"sv }; });
if (instantiation_result.has_value() && instantiation_result->is_error())
return;
if (main_module_instance.memories().size() <= data.index.value()) {
@ -314,6 +329,13 @@ InstantiationResult AbstractMachine::instantiate(Module const& module, Vector<Ex
};
return;
}
auto maybe_data_address = m_store.allocate_data(data.init);
if (!maybe_data_address.has_value()) {
instantiation_result = InstantiationError { "Failed to allocate a data instance for an active data segment"sv };
return;
}
main_module_instance.datas().append(*maybe_data_address);
if (data.init.is_empty())
return;
auto address = main_module_instance.memories()[data.index.value()];
@ -332,8 +354,13 @@ InstantiationResult AbstractMachine::instantiate(Module const& module, Vector<Ex
instance->data().overwrite(offset, data.init.data(), data.init.size());
}
},
[&](DataSection::Data::Passive const&) {
// FIXME: What do we do here?
[&](DataSection::Data::Passive const& passive) {
auto maybe_data_address = m_store.allocate_data(passive.init);
if (!maybe_data_address.has_value()) {
instantiation_result = InstantiationError { "Failed to allocate a data instance for a passive data segment"sv };
return;
}
main_module_instance.datas().append(*maybe_data_address);
});
}
});

View file

@ -34,6 +34,7 @@ TYPEDEF_DISTINCT_NUMERIC_GENERAL(u64, true, true, false, false, false, true, Ext
TYPEDEF_DISTINCT_NUMERIC_GENERAL(u64, true, true, false, false, false, true, TableAddress);
TYPEDEF_DISTINCT_NUMERIC_GENERAL(u64, true, true, false, false, false, true, GlobalAddress);
TYPEDEF_DISTINCT_NUMERIC_GENERAL(u64, true, true, false, false, false, true, ElementAddress);
TYPEDEF_DISTINCT_NUMERIC_GENERAL(u64, true, true, false, false, false, true, DataAddress);
TYPEDEF_DISTINCT_NUMERIC_GENERAL(u64, true, true, false, false, false, true, MemoryAddress);
// FIXME: These should probably be made generic/virtual if/when we decide to do something more
@ -214,12 +215,14 @@ class ModuleInstance {
public:
explicit ModuleInstance(
Vector<FunctionType> types, Vector<FunctionAddress> function_addresses, Vector<TableAddress> table_addresses,
Vector<MemoryAddress> memory_addresses, Vector<GlobalAddress> global_addresses, Vector<ExportInstance> exports)
Vector<MemoryAddress> memory_addresses, Vector<GlobalAddress> global_addresses, Vector<DataAddress> data_addresses,
Vector<ExportInstance> exports)
: m_types(move(types))
, m_functions(move(function_addresses))
, m_tables(move(table_addresses))
, m_memories(move(memory_addresses))
, m_globals(move(global_addresses))
, m_datas(move(data_addresses))
, m_exports(move(exports))
{
}
@ -232,6 +235,7 @@ public:
auto& memories() const { return m_memories; }
auto& globals() const { return m_globals; }
auto& elements() const { return m_elements; }
auto& datas() const { return m_datas; }
auto& exports() const { return m_exports; }
auto& types() { return m_types; }
@ -240,6 +244,7 @@ public:
auto& memories() { return m_memories; }
auto& globals() { return m_globals; }
auto& elements() { return m_elements; }
auto& datas() { return m_datas; }
auto& exports() { return m_exports; }
private:
@ -249,6 +254,7 @@ private:
Vector<MemoryAddress> m_memories;
Vector<GlobalAddress> m_globals;
Vector<ElementAddress> m_elements;
Vector<DataAddress> m_datas;
Vector<ExportInstance> m_exports;
};
@ -385,6 +391,22 @@ private:
Value m_value;
};
class DataInstance {
public:
explicit DataInstance(Vector<u8> data)
: m_data(move(data))
{
}
size_t size() const { return m_data.size(); }
Vector<u8>& data() { return m_data; }
Vector<u8> const& data() const { return m_data; }
private:
Vector<u8> m_data;
};
class ElementInstance {
public:
explicit ElementInstance(ValueType type, Vector<Reference> references)
@ -409,6 +431,7 @@ public:
Optional<FunctionAddress> allocate(HostFunction&&);
Optional<TableAddress> allocate(TableType const&);
Optional<MemoryAddress> allocate(MemoryType const&);
Optional<DataAddress> allocate_data(Vector<u8>);
Optional<GlobalAddress> allocate(GlobalType const&, Value);
Optional<ElementAddress> allocate(ValueType const&, Vector<Reference>);
@ -416,6 +439,7 @@ public:
TableInstance* get(TableAddress);
MemoryInstance* get(MemoryAddress);
GlobalInstance* get(GlobalAddress);
DataInstance* get(DataAddress);
ElementInstance* get(ElementAddress);
private:
@ -424,6 +448,7 @@ private:
Vector<MemoryInstance> m_memories;
Vector<GlobalInstance> m_globals;
Vector<ElementInstance> m_elements;
Vector<DataInstance> m_datas;
};
class Label {

View file

@ -227,17 +227,17 @@ void BytecodeInterpreter::pop_and_store(Configuration& configuration, Instructio
auto entry = configuration.stack().pop();
auto value = ConvertToRaw<StoreT> {}(*entry.get<Value>().to<PopT>());
dbgln_if(WASM_TRACE_DEBUG, "stack({}) -> temporary({}b)", value, sizeof(StoreT));
store_to_memory(configuration, instruction, { &value, sizeof(StoreT) });
auto base_entry = configuration.stack().pop();
auto base = base_entry.get<Value>().to<i32>();
store_to_memory(configuration, instruction, { &value, sizeof(StoreT) }, *base);
}
void BytecodeInterpreter::store_to_memory(Configuration& configuration, Instruction const& instruction, ReadonlyBytes data)
void BytecodeInterpreter::store_to_memory(Configuration& configuration, Instruction const& instruction, ReadonlyBytes data, i32 base)
{
auto& address = configuration.frame().module().memories().first();
auto memory = configuration.store().get(address);
auto& arg = instruction.arguments().get<Instruction::MemoryArgument>();
auto entry = configuration.stack().pop();
auto base = entry.get<Value>().to<i32>();
u64 instance_address = static_cast<u64>(bit_cast<u32>(base.value())) + arg.offset;
u64 instance_address = static_cast<u64>(bit_cast<u32>(base)) + arg.offset;
Checked addition { instance_address };
addition += data.size();
if (addition.has_overflow() || addition.value() > memory->size()) {
@ -888,7 +888,29 @@ void BytecodeInterpreter::interpret(Configuration& configuration, InstructionPoi
return unary_operation<double, i64, Operators::SaturatingTruncate<i64>>(configuration);
case Instructions::i64_trunc_sat_f64_u.value():
return unary_operation<double, i64, Operators::SaturatingTruncate<u64>>(configuration);
case Instructions::memory_init.value():
case Instructions::memory_init.value(): {
auto data_index = instruction.arguments().get<DataIndex>();
auto& data_address = configuration.frame().module().datas()[data_index.value()];
auto& data = *configuration.store().get(data_address);
auto count = *configuration.stack().pop().get<Value>().to<i32>();
auto source_offset = *configuration.stack().pop().get<Value>().to<i32>();
auto destination_offset = *configuration.stack().pop().get<Value>().to<i32>();
TRAP_IF_NOT(count > 0);
TRAP_IF_NOT(source_offset + count > 0);
TRAP_IF_NOT(static_cast<size_t>(source_offset + count) <= data.size());
Instruction synthetic_store_instruction {
Instructions::i32_store8,
Instruction::MemoryArgument { 0, 0 }
};
for (size_t i = 0; i < (size_t)count; ++i) {
auto value = data.data()[source_offset + i];
store_to_memory(configuration, synthetic_store_instruction, { &value, sizeof(value) }, destination_offset + i);
}
return;
}
case Instructions::data_drop.value():
case Instructions::memory_copy.value():
case Instructions::memory_fill.value():

View file

@ -39,7 +39,7 @@ protected:
void load_and_push(Configuration&, Instruction const&);
template<typename PopT, typename StoreT>
void pop_and_store(Configuration&, Instruction const&);
void store_to_memory(Configuration&, Instruction const&, ReadonlyBytes data);
void store_to_memory(Configuration&, Instruction const&, ReadonlyBytes data, i32 base);
void call_address(Configuration&, FunctionAddress);
template<typename PopType, typename PushType, typename Operator>

View file

@ -103,6 +103,9 @@ ErrorOr<void, ValidationError> Validator::validate(Module& module)
for (auto& segment : section.segments())
m_context.elements.unchecked_append(segment.type);
});
module.for_each_section_of_type<DataSection>([this](DataSection const& section) {
m_context.datas.resize(section.data().size());
});
// FIXME: C.refs is the set funcidx(module with funcs=ϵ with start=ϵ),
// i.e., the set of function indices occurring in the module, except in its functions or start function.