diff --git a/Userland/Libraries/LibJIT/Assembler.h b/Userland/Libraries/LibJIT/Assembler.h index fd3915cbe0..c8cb12083e 100644 --- a/Userland/Libraries/LibJIT/Assembler.h +++ b/Userland/Libraries/LibJIT/Assembler.h @@ -398,6 +398,15 @@ struct Assembler { label.add_jump(*this, m_output.size()); } + void jump_if_overflow(Label& label) + { + // jo label (RIP-relative 32-bit offset) + emit8(0x0f); + emit8(0x80); + emit32(0xdeadbeef); + label.add_jump(*this, m_output.size()); + } + void sign_extend_32_to_64_bits(Reg reg) { // movsxd (reg as 64-bit), (reg as 32-bit) @@ -551,6 +560,24 @@ struct Assembler { } } + void add32(Operand dst, Operand src) + { + if (dst.type == Operand::Type::Reg && to_underlying(dst.reg) < 8 && src.type == Operand::Type::Reg && to_underlying(src.reg) < 8) { + emit8(0x01); + emit8(0xc0 | (encode_reg(src.reg) << 3) | encode_reg(dst.reg)); + } else if (dst.type == Operand::Type::Reg && to_underlying(dst.reg) < 8 && src.type == Operand::Type::Imm && src.fits_in_i8()) { + emit8(0x83); + emit8(0xc0 | encode_reg(dst.reg)); + emit8(src.offset_or_immediate); + } else if (dst.type == Operand::Type::Reg && to_underlying(dst.reg) < 8 && src.type == Operand::Type::Imm && src.fits_in_i32()) { + emit8(0x81); + emit8(0xc0 | encode_reg(dst.reg)); + emit32(src.offset_or_immediate); + } else { + VERIFY_NOT_REACHED(); + } + } + void sub(Operand dst, Operand src) { if (dst.type == Operand::Type::Reg && src.type == Operand::Type::Reg) { diff --git a/Userland/Libraries/LibJS/JIT/Compiler.cpp b/Userland/Libraries/LibJS/JIT/Compiler.cpp index 97bfd7999a..e804f8efe1 100644 --- a/Userland/Libraries/LibJS/JIT/Compiler.cpp +++ b/Userland/Libraries/LibJS/JIT/Compiler.cpp @@ -503,6 +503,49 @@ static ThrowCompletionOr typed_equals(VM&, Value src1, Value src2) JS_ENUMERATE_COMMON_BINARY_OPS_WITHOUT_FAST_PATH(DO_COMPILE_COMMON_BINARY_OP) # undef DO_COMPILE_COMMON_BINARY_OP +static Value cxx_add(VM& vm, Value lhs, Value rhs) +{ + return TRY_OR_SET_EXCEPTION(add(vm, lhs, rhs)); +} + +void Compiler::compile_add(Bytecode::Op::Add const& op) +{ + load_vm_register(ARG1, op.lhs()); + load_vm_register(ARG2, Bytecode::Register::accumulator()); + + Assembler::Label end {}; + Assembler::Label slow_case {}; + + branch_if_both_int32(ARG1, ARG2, [&] { + // GPR0 = ARG1 + ARG2 (32-bit) + m_assembler.mov( + Assembler::Operand::Register(GPR0), + Assembler::Operand::Register(ARG1)); + m_assembler.add32( + Assembler::Operand::Register(GPR0), + Assembler::Operand::Register(ARG2)); + + // if (overflow) goto slow_case; + m_assembler.jump_if_overflow(slow_case); + + // accumulator = GPR0 | SHIFTED_INT32_TAG; + m_assembler.mov( + Assembler::Operand::Register(GPR1), + Assembler::Operand::Imm(SHIFTED_INT32_TAG)); + m_assembler.bitwise_or( + Assembler::Operand::Register(GPR0), + Assembler::Operand::Register(GPR1)); + store_vm_register(Bytecode::Register::accumulator(), GPR0); + m_assembler.jump(end); + }); + + slow_case.link(m_assembler); + native_call((void*)cxx_add); + store_vm_register(Bytecode::Register::accumulator(), RET); + check_exception(); + end.link(m_assembler); +} + static Value cxx_less_than(VM& vm, Value lhs, Value rhs) { return TRY_OR_SET_EXCEPTION(less_than(vm, lhs, rhs)); @@ -1257,6 +1300,9 @@ OwnPtr Compiler::compile(Bytecode::Executable& bytecode_execut case Bytecode::Instruction::Type::SetVariable: compiler.compile_set_variable(static_cast(op)); break; + case Bytecode::Instruction::Type::Add: + compiler.compile_add(static_cast(op)); + break; case Bytecode::Instruction::Type::LessThan: compiler.compile_less_than(static_cast(op)); break; diff --git a/Userland/Libraries/LibJS/JIT/Compiler.h b/Userland/Libraries/LibJS/JIT/Compiler.h index 8c4b08b458..ec1fda6f24 100644 --- a/Userland/Libraries/LibJS/JIT/Compiler.h +++ b/Userland/Libraries/LibJS/JIT/Compiler.h @@ -57,7 +57,6 @@ private: void compile_resolve_this_binding(Bytecode::Op::ResolveThisBinding const&); # define JS_ENUMERATE_COMMON_BINARY_OPS_WITHOUT_FAST_PATH(O) \ - O(Add, add) \ O(Sub, sub) \ O(Mul, mul) \ O(Div, div) \ @@ -91,6 +90,7 @@ private: JS_ENUMERATE_COMMON_UNARY_OPS(DO_COMPILE_COMMON_UNARY_OP) # undef DO_COMPILE_COMMON_UNARY_OP + void compile_add(Bytecode::Op::Add const&); void compile_less_than(Bytecode::Op::LessThan const&); void compile_return(Bytecode::Op::Return const&);