1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-07-27 21:27:45 +00:00

LibX86+UserspaceEmulator: Introduce AddressSize and OperandSize enums

These replace the bools a32 and o32, which will make implementing
64-bit sizes possible. :^)
This commit is contained in:
Simon Wanner 2022-03-24 23:01:11 +01:00 committed by Andreas Kling
parent 7cd43deb28
commit a7268c3c74
6 changed files with 276 additions and 152 deletions

View file

@ -234,7 +234,7 @@ int Emulator::exec()
while (!m_shutdown) {
if (m_steps_til_pause) [[likely]] {
m_cpu->save_base_eip();
auto insn = X86::Instruction::from_stream(*m_cpu, true, true);
auto insn = X86::Instruction::from_stream(*m_cpu, X86::OperandSize::Size32, X86::AddressSize::Size32);
// Exec cycle
if constexpr (trace) {
outln("{:p} \033[33;1m{}\033[0m", m_cpu->base_eip(), insn.to_string(m_cpu->base_eip(), symbol_provider));
@ -301,7 +301,7 @@ void Emulator::handle_repl()
// FIXME: Function names (base, call, jump)
auto saved_eip = m_cpu->eip();
m_cpu->save_base_eip();
auto insn = X86::Instruction::from_stream(*m_cpu, true, true);
auto insn = X86::Instruction::from_stream(*m_cpu, X86::OperandSize::Size32, X86::AddressSize::Size32);
// FIXME: This does not respect inlining
// another way of getting the current function is at need
if (auto symbol = symbol_at(m_cpu->base_eip()); symbol.has_value()) {
@ -311,7 +311,7 @@ void Emulator::handle_repl()
outln("==> {}", create_instruction_line(m_cpu->base_eip(), insn));
for (int i = 0; i < 7; ++i) {
m_cpu->save_base_eip();
insn = X86::Instruction::from_stream(*m_cpu, true, true);
insn = X86::Instruction::from_stream(*m_cpu, X86::OperandSize::Size32, X86::AddressSize::Size32);
outln(" {}", create_instruction_line(m_cpu->base_eip(), insn));
}
// We don't want to increase EIP here, we just want the instructions

View file

@ -258,9 +258,9 @@ void SoftCPU::do_once_or_repeat(const X86::Instruction& insn, Callback callback)
if (!insn.has_rep_prefix())
return callback();
while (loop_index(insn.a32()).value()) {
while (loop_index(insn.address_size()).value()) {
callback();
decrement_loop_index(insn.a32());
decrement_loop_index(insn.address_size());
if constexpr (check_zf) {
warn_if_flags_tainted("repz/repnz");
if (insn.rep_prefix() == X86::Prefix::REPZ && !zf())
@ -1259,11 +1259,11 @@ ALWAYS_INLINE static void do_cmps(SoftCPU& cpu, const X86::Instruction& insn)
{
auto src_segment = cpu.segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS));
cpu.do_once_or_repeat<true>(insn, [&] {
auto src = cpu.read_memory<T>({ src_segment, cpu.source_index(insn.a32()).value() });
auto dest = cpu.read_memory<T>({ cpu.es(), cpu.destination_index(insn.a32()).value() });
auto src = cpu.read_memory<T>({ src_segment, cpu.source_index(insn.address_size()).value() });
auto dest = cpu.read_memory<T>({ cpu.es(), cpu.destination_index(insn.address_size()).value() });
op_sub(cpu, dest, src);
cpu.step_source_index(insn.a32(), sizeof(T));
cpu.step_destination_index(insn.a32(), sizeof(T));
cpu.step_source_index(insn.address_size(), sizeof(T));
cpu.step_destination_index(insn.address_size(), sizeof(T));
});
}
@ -1771,14 +1771,19 @@ void SoftCPU::IRET(const X86::Instruction&) { TODO_INSN(); }
void SoftCPU::JCXZ_imm8(const X86::Instruction& insn)
{
if (insn.a32()) {
switch (insn.address_size()) {
case X86::AddressSize::Size32:
warn_if_uninitialized(ecx(), "jecxz imm8");
if (ecx().value() == 0)
set_eip(eip() + (i8)insn.imm8());
} else {
break;
case X86::AddressSize::Size16:
warn_if_uninitialized(cx(), "jcxz imm8");
if (cx().value() == 0)
set_eip(eip() + (i8)insn.imm8());
break;
default:
VERIFY_NOT_REACHED();
}
}
@ -1865,9 +1870,9 @@ ALWAYS_INLINE static void do_lods(SoftCPU& cpu, const X86::Instruction& insn)
{
auto src_segment = cpu.segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS));
cpu.do_once_or_repeat<true>(insn, [&] {
auto src = cpu.read_memory<T>({ src_segment, cpu.source_index(insn.a32()).value() });
auto src = cpu.read_memory<T>({ src_segment, cpu.source_index(insn.address_size()).value() });
cpu.gpr<T>(X86::RegisterAL) = src;
cpu.step_source_index(insn.a32(), sizeof(T));
cpu.step_source_index(insn.address_size(), sizeof(T));
});
}
@ -1889,40 +1894,55 @@ void SoftCPU::LODSW(const X86::Instruction& insn)
void SoftCPU::LOOPNZ_imm8(const X86::Instruction& insn)
{
warn_if_flags_tainted("loopnz");
if (insn.a32()) {
switch (insn.address_size()) {
case X86::AddressSize::Size32:
set_ecx({ ecx().value() - 1, ecx().shadow() });
if (ecx().value() != 0 && !zf())
set_eip(eip() + (i8)insn.imm8());
} else {
break;
case X86::AddressSize::Size16:
set_cx({ (u16)(cx().value() - 1), cx().shadow() });
if (cx().value() != 0 && !zf())
set_eip(eip() + (i8)insn.imm8());
break;
default:
VERIFY_NOT_REACHED();
}
}
void SoftCPU::LOOPZ_imm8(const X86::Instruction& insn)
{
warn_if_flags_tainted("loopz");
if (insn.a32()) {
switch (insn.address_size()) {
case X86::AddressSize::Size32:
set_ecx({ ecx().value() - 1, ecx().shadow() });
if (ecx().value() != 0 && zf())
set_eip(eip() + (i8)insn.imm8());
} else {
break;
case X86::AddressSize::Size16:
set_cx({ (u16)(cx().value() - 1), cx().shadow() });
if (cx().value() != 0 && zf())
set_eip(eip() + (i8)insn.imm8());
break;
default:
VERIFY_NOT_REACHED();
}
}
void SoftCPU::LOOP_imm8(const X86::Instruction& insn)
{
if (insn.a32()) {
switch (insn.address_size()) {
case X86::AddressSize::Size32:
set_ecx({ ecx().value() - 1, ecx().shadow() });
if (ecx().value() != 0)
set_eip(eip() + (i8)insn.imm8());
} else {
break;
case X86::AddressSize::Size16:
set_cx({ (u16)(cx().value() - 1), cx().shadow() });
if (cx().value() != 0)
set_eip(eip() + (i8)insn.imm8());
break;
default:
VERIFY_NOT_REACHED();
}
}
@ -1937,10 +1957,10 @@ ALWAYS_INLINE static void do_movs(SoftCPU& cpu, const X86::Instruction& insn)
{
auto src_segment = cpu.segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS));
cpu.do_once_or_repeat<false>(insn, [&] {
auto src = cpu.read_memory<T>({ src_segment, cpu.source_index(insn.a32()).value() });
cpu.write_memory<T>({ cpu.es(), cpu.destination_index(insn.a32()).value() }, src);
cpu.step_source_index(insn.a32(), sizeof(T));
cpu.step_destination_index(insn.a32(), sizeof(T));
auto src = cpu.read_memory<T>({ src_segment, cpu.source_index(insn.address_size()).value() });
cpu.write_memory<T>({ cpu.es(), cpu.destination_index(insn.address_size()).value() }, src);
cpu.step_source_index(insn.address_size(), sizeof(T));
cpu.step_destination_index(insn.address_size(), sizeof(T));
});
}
@ -2638,9 +2658,9 @@ ALWAYS_INLINE static void do_scas(SoftCPU& cpu, const X86::Instruction& insn)
{
cpu.do_once_or_repeat<true>(insn, [&] {
auto src = cpu.const_gpr<T>(X86::RegisterAL);
auto dest = cpu.read_memory<T>({ cpu.es(), cpu.destination_index(insn.a32()).value() });
auto dest = cpu.read_memory<T>({ cpu.es(), cpu.destination_index(insn.address_size()).value() });
op_sub(cpu, dest, src);
cpu.step_destination_index(insn.a32(), sizeof(T));
cpu.step_destination_index(insn.address_size(), sizeof(T));
});
}
@ -2731,23 +2751,28 @@ void SoftCPU::STOSB(const X86::Instruction& insn)
{
if (insn.has_rep_prefix() && !df()) {
// Fast path for 8-bit forward memory fill.
if (m_emulator.mmu().fast_fill_memory8({ es(), destination_index(insn.a32()).value() }, ecx().value(), al())) {
if (insn.a32()) {
if (m_emulator.mmu().fast_fill_memory8({ es(), destination_index(insn.address_size()).value() }, ecx().value(), al())) {
switch (insn.address_size()) {
case X86::AddressSize::Size32:
// FIXME: Should an uninitialized ECX taint EDI here?
set_edi({ (u32)(edi().value() + ecx().value()), edi().shadow() });
set_ecx(shadow_wrap_as_initialized<u32>(0));
} else {
break;
case X86::AddressSize::Size16:
// FIXME: Should an uninitialized CX taint DI here?
set_di({ (u16)(di().value() + cx().value()), di().shadow() });
set_cx(shadow_wrap_as_initialized<u16>(0));
break;
default:
VERIFY_NOT_REACHED();
}
return;
}
}
do_once_or_repeat<false>(insn, [&] {
write_memory8({ es(), destination_index(insn.a32()).value() }, al());
step_destination_index(insn.a32(), 1);
write_memory8({ es(), destination_index(insn.address_size()).value() }, al());
step_destination_index(insn.address_size(), 1);
});
}
@ -2755,31 +2780,36 @@ void SoftCPU::STOSD(const X86::Instruction& insn)
{
if (insn.has_rep_prefix() && !df()) {
// Fast path for 32-bit forward memory fill.
if (m_emulator.mmu().fast_fill_memory32({ es(), destination_index(insn.a32()).value() }, ecx().value(), eax())) {
if (insn.a32()) {
if (m_emulator.mmu().fast_fill_memory32({ es(), destination_index(insn.address_size()).value() }, ecx().value(), eax())) {
switch (insn.address_size()) {
case X86::AddressSize::Size32:
// FIXME: Should an uninitialized ECX taint EDI here?
set_edi({ (u32)(edi().value() + (ecx().value() * sizeof(u32))), edi().shadow() });
set_ecx(shadow_wrap_as_initialized<u32>(0));
} else {
break;
case X86::AddressSize::Size16:
// FIXME: Should an uninitialized CX taint DI here?
set_di({ (u16)(di().value() + (cx().value() * sizeof(u32))), di().shadow() });
set_cx(shadow_wrap_as_initialized<u16>(0));
break;
default:
VERIFY_NOT_REACHED();
}
return;
}
}
do_once_or_repeat<false>(insn, [&] {
write_memory32({ es(), destination_index(insn.a32()).value() }, eax());
step_destination_index(insn.a32(), 4);
write_memory32({ es(), destination_index(insn.address_size()).value() }, eax());
step_destination_index(insn.address_size(), 4);
});
}
void SoftCPU::STOSW(const X86::Instruction& insn)
{
do_once_or_repeat<false>(insn, [&] {
write_memory16({ es(), destination_index(insn.a32()).value() }, ax());
step_destination_index(insn.a32(), 2);
write_memory16({ es(), destination_index(insn.address_size()).value() }, ax());
step_destination_index(insn.address_size(), 2);
});
}
@ -2856,12 +2886,20 @@ void SoftCPU::XCHG_reg8_RM8(const X86::Instruction& insn)
void SoftCPU::XLAT(const X86::Instruction& insn)
{
if (insn.a32())
u32 offset;
switch (insn.address_size()) {
case X86::AddressSize::Size32:
warn_if_uninitialized(ebx(), "xlat ebx");
else
offset = ebx().value() + al().value();
break;
case X86::AddressSize::Size16:
warn_if_uninitialized(bx(), "xlat bx");
offset = bx().value() + al().value();
break;
default:
VERIFY_NOT_REACHED();
}
warn_if_uninitialized(al(), "xlat al");
u32 offset = (insn.a32() ? ebx().value() : bx().value()) + al().value();
set_al(read_memory8({ segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS)), offset }));
}

View file

@ -175,64 +175,83 @@ public:
return gpr32((X86::RegisterIndex32)register_index);
}
ValueWithShadow<u32> source_index(bool a32) const
ValueWithShadow<u32> source_index(X86::AddressSize address_size) const
{
if (a32)
if (address_size == X86::AddressSize::Size32)
return esi();
return { si().value(), (u32)si().shadow_as_value() & 0xffff };
if (address_size == X86::AddressSize::Size16)
return { si().value(), (u32)si().shadow_as_value() & 0xffff };
VERIFY_NOT_REACHED();
}
ValueWithShadow<u32> destination_index(bool a32) const
ValueWithShadow<u32> destination_index(X86::AddressSize address_size) const
{
if (a32)
if (address_size == X86::AddressSize::Size32)
return edi();
return { di().value(), (u32)di().shadow_as_value() & 0xffff };
if (address_size == X86::AddressSize::Size16)
return { di().value(), (u32)di().shadow_as_value() & 0xffff };
VERIFY_NOT_REACHED();
}
ValueWithShadow<u32> loop_index(bool a32) const
ValueWithShadow<u32> loop_index(X86::AddressSize address_size) const
{
if (a32)
if (address_size == X86::AddressSize::Size32)
return ecx();
return { cx().value(), (u32)cx().shadow_as_value() & 0xffff };
if (address_size == X86::AddressSize::Size16)
return { cx().value(), (u32)cx().shadow_as_value() & 0xffff };
VERIFY_NOT_REACHED();
}
bool decrement_loop_index(bool a32)
bool decrement_loop_index(X86::AddressSize address_size)
{
if (a32) {
switch (address_size) {
case X86::AddressSize::Size32:
set_ecx({ ecx().value() - 1, ecx().shadow() });
return ecx().value() == 0;
case X86::AddressSize::Size16:
set_cx(ValueWithShadow<u16>(cx().value() - 1, cx().shadow()));
return cx().value() == 0;
}
set_cx(ValueWithShadow<u16>(cx().value() - 1, cx().shadow()));
return cx().value() == 0;
VERIFY_NOT_REACHED();
}
ALWAYS_INLINE void step_source_index(bool a32, u32 step)
ALWAYS_INLINE void step_source_index(X86::AddressSize address_size, u32 step)
{
if (a32) {
switch (address_size) {
case X86::AddressSize::Size32:
if (df())
set_esi({ esi().value() - step, esi().shadow() });
else
set_esi({ esi().value() + step, esi().shadow() });
} else {
break;
case X86::AddressSize::Size16:
if (df())
set_si(ValueWithShadow<u16>(si().value() - step, si().shadow()));
else
set_si(ValueWithShadow<u16>(si().value() + step, si().shadow()));
break;
default:
VERIFY_NOT_REACHED();
}
}
ALWAYS_INLINE void step_destination_index(bool a32, u32 step)
ALWAYS_INLINE void step_destination_index(X86::AddressSize address_size, u32 step)
{
if (a32) {
switch (address_size) {
case X86::AddressSize::Size32:
if (df())
set_edi({ edi().value() - step, edi().shadow() });
else
set_edi({ edi().value() + step, edi().shadow() });
} else {
break;
case X86::AddressSize::Size16:
if (df())
set_di(ValueWithShadow<u16>(di().value() - step, di().shadow()));
else
set_di(ValueWithShadow<u16>(di().value() + step, di().shadow()));
break;
default:
VERIFY_NOT_REACHED();
}
}