mirror of
https://github.com/RGBCube/serenity
synced 2025-07-27 21:27:45 +00:00
Everywhere: Replace dbgln<flag>(...) with dbgln_if(flag, ...)
Replacement made by `find Kernel Userland -name '*.h' -o -name '*.cpp' | sed -i -Ee 's/dbgln\b<(\w+)>\(/dbgln_if(\1, /g'`
This commit is contained in:
parent
1f8a633cc7
commit
09a43969ba
95 changed files with 427 additions and 425 deletions
|
@ -93,13 +93,13 @@ void __cxa_finalize(void* dso_handle)
|
|||
|
||||
ssize_t entry_index = atexit_entry_count;
|
||||
|
||||
dbgln<GLOBAL_DTORS_DEBUG>("__cxa_finalize: {} entries in the finalizer list", entry_index);
|
||||
dbgln_if(GLOBAL_DTORS_DEBUG, "__cxa_finalize: {} entries in the finalizer list", entry_index);
|
||||
|
||||
while (--entry_index >= 0) {
|
||||
auto& exit_entry = atexit_entries[entry_index];
|
||||
bool needs_calling = !exit_entry.has_been_called && (!dso_handle || dso_handle == exit_entry.dso_handle);
|
||||
if (needs_calling) {
|
||||
dbgln<GLOBAL_DTORS_DEBUG>("__cxa_finalize: calling entry[{}] {:p}({:p}) dso: {:p}", entry_index, exit_entry.method, exit_entry.parameter, exit_entry.dso_handle);
|
||||
dbgln_if(GLOBAL_DTORS_DEBUG, "__cxa_finalize: calling entry[{}] {:p}({:p}) dso: {:p}", entry_index, exit_entry.method, exit_entry.parameter, exit_entry.dso_handle);
|
||||
exit_entry.method(exit_entry.parameter);
|
||||
unlock_atexit_handlers();
|
||||
exit_entry.has_been_called = true;
|
||||
|
|
|
@ -102,7 +102,7 @@ static Optional<ByteBuffer> get_gzip_payload(const ByteBuffer& data)
|
|||
}
|
||||
|
||||
auto new_size = data.size() - current;
|
||||
dbgln<GZIP_DEBUG>("get_gzip_payload: Returning slice from {} with size {}", current, new_size);
|
||||
dbgln_if(GZIP_DEBUG, "get_gzip_payload: Returning slice from {} with size {}", current, new_size);
|
||||
return data.slice(current, new_size);
|
||||
}
|
||||
|
||||
|
@ -110,7 +110,7 @@ Optional<ByteBuffer> Gzip::decompress(const ByteBuffer& data)
|
|||
{
|
||||
ASSERT(is_compressed(data));
|
||||
|
||||
dbgln<GZIP_DEBUG>("Gzip::decompress: Decompressing gzip compressed data. size={}", data.size());
|
||||
dbgln_if(GZIP_DEBUG, "Gzip::decompress: Decompressing gzip compressed data. size={}", data.size());
|
||||
auto optional_payload = get_gzip_payload(data);
|
||||
if (!optional_payload.has_value()) {
|
||||
return Optional<ByteBuffer>();
|
||||
|
|
|
@ -55,7 +55,7 @@ void NetworkJob::did_finish(NonnullRefPtr<NetworkResponse>&& response)
|
|||
NonnullRefPtr<NetworkJob> protector(*this);
|
||||
|
||||
m_response = move(response);
|
||||
dbgln<CNETWORKJOB_DEBUG>("{} job did_finish", *this);
|
||||
dbgln_if(CNETWORKJOB_DEBUG, "{} job did_finish", *this);
|
||||
ASSERT(on_finish);
|
||||
on_finish(true);
|
||||
shutdown();
|
||||
|
|
|
@ -79,7 +79,7 @@ bool Socket::connect(const String& hostname, int port)
|
|||
}
|
||||
|
||||
IPv4Address host_address((const u8*)hostent->h_addr_list[0]);
|
||||
dbgln<CSOCKET_DEBUG>("Socket::connect: Resolved '{}' to {}", hostname, host_address);
|
||||
dbgln_if(CSOCKET_DEBUG, "Socket::connect: Resolved '{}' to {}", hostname, host_address);
|
||||
return connect(host_address, port);
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,7 @@ bool Socket::connect(const SocketAddress& address, int port)
|
|||
{
|
||||
ASSERT(!is_connected());
|
||||
ASSERT(address.type() == SocketAddress::Type::IPv4);
|
||||
dbgln<CSOCKET_DEBUG>("{} connecting to {}...", *this, address);
|
||||
dbgln_if(CSOCKET_DEBUG, "{} connecting to {}...", *this, address);
|
||||
|
||||
ASSERT(port > 0 && port <= 65535);
|
||||
|
||||
|
@ -119,7 +119,7 @@ bool Socket::connect(const SocketAddress& address)
|
|||
{
|
||||
ASSERT(!is_connected());
|
||||
ASSERT(address.type() == SocketAddress::Type::Local);
|
||||
dbgln<CSOCKET_DEBUG>("{} connecting to {}...", *this, address);
|
||||
dbgln_if(CSOCKET_DEBUG, "{} connecting to {}...", *this, address);
|
||||
|
||||
sockaddr_un saddr;
|
||||
saddr.sun_family = AF_LOCAL;
|
||||
|
@ -138,7 +138,7 @@ bool Socket::connect(const SocketAddress& address)
|
|||
bool Socket::common_connect(const struct sockaddr* addr, socklen_t addrlen)
|
||||
{
|
||||
auto connected = [this] {
|
||||
dbgln<CSOCKET_DEBUG>("{} connected!", *this);
|
||||
dbgln_if(CSOCKET_DEBUG, "{} connected!", *this);
|
||||
if (!m_connected) {
|
||||
m_connected = true;
|
||||
ensure_read_notifier();
|
||||
|
@ -153,7 +153,7 @@ bool Socket::common_connect(const struct sockaddr* addr, socklen_t addrlen)
|
|||
int rc = ::connect(fd(), addr, addrlen);
|
||||
if (rc < 0) {
|
||||
if (errno == EINPROGRESS) {
|
||||
dbgln<CSOCKET_DEBUG>("{} connection in progress (EINPROGRESS)", *this);
|
||||
dbgln_if(CSOCKET_DEBUG, "{} connection in progress (EINPROGRESS)", *this);
|
||||
m_notifier = Notifier::construct(fd(), Notifier::Event::Write, this);
|
||||
m_notifier->on_ready_to_write = move(connected);
|
||||
return true;
|
||||
|
@ -163,7 +163,7 @@ bool Socket::common_connect(const struct sockaddr* addr, socklen_t addrlen)
|
|||
errno = saved_errno;
|
||||
return false;
|
||||
}
|
||||
dbgln<CSOCKET_DEBUG>("{} connected ok!", *this);
|
||||
dbgln_if(CSOCKET_DEBUG, "{} connected ok!", *this);
|
||||
connected();
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ inline int safe_syscall(Syscall syscall, Args&&... args)
|
|||
if (sysret == -1) {
|
||||
if constexpr (SAFE_SYSCALL_DEBUG) {
|
||||
int saved_errno = errno;
|
||||
dbgln<SAFE_SYSCALL_DEBUG>("Core::safe_syscall: {} ({}: {})", sysret, saved_errno, strerror(saved_errno));
|
||||
dbgln_if(SAFE_SYSCALL_DEBUG, "Core::safe_syscall: {} ({}: {})", sysret, saved_errno, strerror(saved_errno));
|
||||
}
|
||||
|
||||
if (errno == EINTR)
|
||||
|
|
|
@ -83,7 +83,7 @@ void SyntaxHighlighter::rehighlight(Gfx::Palette palette)
|
|||
|
||||
Vector<GUI::TextDocumentSpan> spans;
|
||||
for (auto& token : tokens) {
|
||||
dbgln<SYNTAX_HIGHLIGHTING_DEBUG>("{} @ {}:{} - {}:{}", token.to_string(), token.m_start.line, token.m_start.column, token.m_end.line, token.m_end.column);
|
||||
dbgln_if(SYNTAX_HIGHLIGHTING_DEBUG, "{} @ {}:{} - {}:{}", token.to_string(), token.m_start.line, token.m_start.column, token.m_end.line, token.m_end.column);
|
||||
GUI::TextDocumentSpan span;
|
||||
span.range.set_start({ token.m_start.line, token.m_start.column });
|
||||
span.range.set_end({ token.m_end.line, token.m_end.column });
|
||||
|
|
|
@ -100,7 +100,7 @@ GHash::TagType GHash::process(ReadonlyBytes aad, ReadonlyBytes cipher)
|
|||
tag[2] ^= high(cipher_bits);
|
||||
tag[3] ^= low(cipher_bits);
|
||||
|
||||
dbgln<GHASH_PROCESS_DEBUG>("Tag bits: {} : {} : {} : {}", tag[0], tag[1], tag[2], tag[3]);
|
||||
dbgln_if(GHASH_PROCESS_DEBUG, "Tag bits: {} : {} : {} : {}", tag[0], tag[1], tag[2], tag[3]);
|
||||
|
||||
galois_multiply(tag, m_key, tag);
|
||||
|
||||
|
|
|
@ -231,7 +231,7 @@ UnsignedBigInteger LCM(const UnsignedBigInteger& a, const UnsignedBigInteger& b)
|
|||
UnsignedBigInteger::divide_without_allocation(a, gcd_output, temp_1, temp_2, temp_3, temp_4, temp_quotient, temp_remainder);
|
||||
UnsignedBigInteger::multiply_without_allocation(temp_quotient, b, temp_1, temp_2, temp_3, temp_4, output);
|
||||
|
||||
dbgln<NT_DEBUG>("quot: {} rem: {} out: {}", temp_quotient, temp_remainder, output);
|
||||
dbgln_if(NT_DEBUG, "quot: {} rem: {} out: {}", temp_quotient, temp_remainder, output);
|
||||
|
||||
return output;
|
||||
}
|
||||
|
|
|
@ -116,7 +116,7 @@ RSA::KeyPairType RSA::parse_rsa_key(ReadonlyBytes in)
|
|||
|
||||
void RSA::encrypt(ReadonlyBytes in, Bytes& out)
|
||||
{
|
||||
dbgln<CRYPTO_DEBUG>("in size: {}", in.size());
|
||||
dbgln_if(CRYPTO_DEBUG, "in size: {}", in.size());
|
||||
auto in_integer = UnsignedBigInteger::import_data(in.data(), in.size());
|
||||
if (!(in_integer < m_public_key.modulus())) {
|
||||
dbgln("value too large for key");
|
||||
|
@ -230,7 +230,7 @@ VerificationConsistency RSA_EMSA_PSS<HashFunction>::verify(ReadonlyBytes in)
|
|||
void RSA_PKCS1_EME::encrypt(ReadonlyBytes in, Bytes& out)
|
||||
{
|
||||
auto mod_len = (m_public_key.modulus().trimmed_length() * sizeof(u32) * 8 + 7) / 8;
|
||||
dbgln<CRYPTO_DEBUG>("key size: {}", mod_len);
|
||||
dbgln_if(CRYPTO_DEBUG, "key size: {}", mod_len);
|
||||
if (in.size() > mod_len - 11) {
|
||||
dbgln("message too long :(");
|
||||
out = out.trim(0);
|
||||
|
@ -262,7 +262,7 @@ void RSA_PKCS1_EME::encrypt(ReadonlyBytes in, Bytes& out)
|
|||
out.overwrite(3 + ps_length, in.data(), in.size());
|
||||
out = out.trim(3 + ps_length + in.size()); // should be a single block
|
||||
|
||||
dbgln<CRYPTO_DEBUG>("padded output size: {} buffer size: {}", 3 + ps_length + in.size(), out.size());
|
||||
dbgln_if(CRYPTO_DEBUG, "padded output size: {} buffer size: {}", 3 + ps_length + in.size(), out.size());
|
||||
|
||||
RSA::encrypt(out, out);
|
||||
}
|
||||
|
|
|
@ -251,7 +251,7 @@ void LineProgram::run_program()
|
|||
u8 opcode = 0;
|
||||
m_stream >> opcode;
|
||||
|
||||
dbgln<DWARF_DEBUG>("{:p}: opcode: {}", m_stream.offset() - 1, opcode);
|
||||
dbgln_if(DWARF_DEBUG, "{:p}: opcode: {}", m_stream.offset() - 1, opcode);
|
||||
|
||||
if (opcode == 0) {
|
||||
handle_extended_opcode();
|
||||
|
|
|
@ -125,10 +125,10 @@ static Vector<String> get_dependencies(const String& name)
|
|||
|
||||
static void map_dependencies(const String& name)
|
||||
{
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("mapping dependencies for: {}", name);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "mapping dependencies for: {}", name);
|
||||
|
||||
for (const auto& needed_name : get_dependencies(name)) {
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("needed library: {}", needed_name.characters());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "needed library: {}", needed_name.characters());
|
||||
String library_name = get_library_name(needed_name);
|
||||
|
||||
if (!g_loaders.contains(library_name)) {
|
||||
|
@ -136,19 +136,19 @@ static void map_dependencies(const String& name)
|
|||
map_dependencies(library_name);
|
||||
}
|
||||
}
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("mapped dependencies for {}", name);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "mapped dependencies for {}", name);
|
||||
}
|
||||
|
||||
static void allocate_tls()
|
||||
{
|
||||
size_t total_tls_size = 0;
|
||||
for (const auto& data : g_loaders) {
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("{}: TLS Size: {}", data.key, data.value->tls_size());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "{}: TLS Size: {}", data.key, data.value->tls_size());
|
||||
total_tls_size += data.value->tls_size();
|
||||
}
|
||||
if (total_tls_size) {
|
||||
[[maybe_unused]] void* tls_address = ::allocate_tls(total_tls_size);
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("from userspace, tls_address: {:p}", tls_address);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "from userspace, tls_address: {:p}", tls_address);
|
||||
}
|
||||
g_total_tls_size = total_tls_size;
|
||||
}
|
||||
|
@ -180,14 +180,14 @@ static void initialize_libc(DynamicObject& libc)
|
|||
|
||||
static void load_elf(const String& name)
|
||||
{
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("load_elf: {}", name);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "load_elf: {}", name);
|
||||
auto loader = g_loaders.get(name).value();
|
||||
|
||||
auto dynamic_object = loader->map();
|
||||
ASSERT(dynamic_object);
|
||||
|
||||
for (const auto& needed_name : get_dependencies(name)) {
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("needed library: {}", needed_name);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "needed library: {}", needed_name);
|
||||
String library_name = get_library_name(needed_name);
|
||||
if (!g_loaded_objects.contains(library_name)) {
|
||||
load_elf(library_name);
|
||||
|
@ -200,7 +200,7 @@ static void load_elf(const String& name)
|
|||
g_loaded_objects.set(name, *dynamic_object);
|
||||
g_global_objects.append(*dynamic_object);
|
||||
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("load_elf: done {}", name);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "load_elf: done {}", name);
|
||||
}
|
||||
|
||||
static NonnullRefPtr<DynamicLoader> commit_elf(const String& name)
|
||||
|
@ -249,9 +249,9 @@ void ELF::DynamicLinker::linker_main(String&& main_program_name, int main_progra
|
|||
map_library(main_program_name, main_program_fd);
|
||||
map_dependencies(main_program_name);
|
||||
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("loaded all dependencies");
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "loaded all dependencies");
|
||||
for ([[maybe_unused]] auto& lib : g_loaders) {
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("{} - tls size: {}, tls offset: {}", lib.key, lib.value->tls_size(), lib.value->tls_offset());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "{} - tls size: {}, tls offset: {}", lib.key, lib.value->tls_size(), lib.value->tls_offset());
|
||||
}
|
||||
|
||||
allocate_tls();
|
||||
|
@ -263,11 +263,11 @@ void ELF::DynamicLinker::linker_main(String&& main_program_name, int main_progra
|
|||
if (main_program_lib->is_dynamic())
|
||||
entry_point += reinterpret_cast<FlatPtr>(main_program_lib->text_segment_load_address().as_ptr());
|
||||
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("entry point: {:p}", (void*)entry_point);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "entry point: {:p}", (void*)entry_point);
|
||||
g_loaders.clear();
|
||||
|
||||
MainFunction main_function = (MainFunction)(entry_point);
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("jumping to main program entry point: {:p}", main_function);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "jumping to main program entry point: {:p}", main_function);
|
||||
if (g_do_breakpoint_trap_before_entry) {
|
||||
asm("int3");
|
||||
}
|
||||
|
@ -278,7 +278,7 @@ void ELF::DynamicLinker::linker_main(String&& main_program_name, int main_progra
|
|||
}
|
||||
|
||||
rc = main_function(argc, argv, envp);
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("rc: {}", rc);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "rc: {}", rc);
|
||||
if (g_libc_exit != nullptr) {
|
||||
g_libc_exit(rc);
|
||||
} else {
|
||||
|
|
|
@ -230,7 +230,7 @@ RefPtr<DynamicObject> DynamicLoader::load_stage_3(unsigned flags, size_t total_t
|
|||
|
||||
call_object_init_functions();
|
||||
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Loaded {}", m_filename);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Loaded {}", m_filename);
|
||||
return m_dynamic_object;
|
||||
}
|
||||
|
||||
|
@ -359,25 +359,25 @@ void DynamicLoader::load_program_headers()
|
|||
|
||||
DynamicLoader::RelocationResult DynamicLoader::do_relocation(size_t total_tls_size, ELF::DynamicObject::Relocation relocation)
|
||||
{
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Relocation symbol: {}, type: {}", relocation.symbol().name(), relocation.type());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Relocation symbol: {}, type: {}", relocation.symbol().name(), relocation.type());
|
||||
FlatPtr* patch_ptr = nullptr;
|
||||
if (is_dynamic())
|
||||
patch_ptr = (FlatPtr*)(m_dynamic_object->base_address().as_ptr() + relocation.offset());
|
||||
else
|
||||
patch_ptr = (FlatPtr*)(FlatPtr)relocation.offset();
|
||||
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("dynamic object base address: {:p}", m_dynamic_object->base_address().as_ptr());
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("relocation offset: {:#08x}", relocation.offset());
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("patch_ptr: {:p}", patch_ptr);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "dynamic object base address: {:p}", m_dynamic_object->base_address().as_ptr());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "relocation offset: {:#08x}", relocation.offset());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "patch_ptr: {:p}", patch_ptr);
|
||||
switch (relocation.type()) {
|
||||
case R_386_NONE:
|
||||
// Apparently most loaders will just skip these?
|
||||
// Seems if the 'link editor' generates one something is funky with your code
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("None relocation. No symbol, no nothing.");
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "None relocation. No symbol, no nothing.");
|
||||
break;
|
||||
case R_386_32: {
|
||||
auto symbol = relocation.symbol();
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Absolute relocation: name: '{}', value: {}", symbol.name(), symbol.value());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Absolute relocation: name: '{}', value: {}", symbol.name(), symbol.value());
|
||||
auto res = lookup_symbol(symbol);
|
||||
if (!res.has_value()) {
|
||||
if (symbol.bind() == STB_WEAK)
|
||||
|
@ -387,22 +387,22 @@ DynamicLoader::RelocationResult DynamicLoader::do_relocation(size_t total_tls_si
|
|||
}
|
||||
u32 symbol_address = res.value().address;
|
||||
*patch_ptr += symbol_address;
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>(" Symbol address: {:p}", *patch_ptr);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, " Symbol address: {:p}", *patch_ptr);
|
||||
break;
|
||||
}
|
||||
case R_386_PC32: {
|
||||
auto symbol = relocation.symbol();
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("PC-relative relocation: '{}', value: {:p}", symbol.name(), symbol.value());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "PC-relative relocation: '{}', value: {:p}", symbol.name(), symbol.value());
|
||||
auto res = lookup_symbol(symbol);
|
||||
ASSERT(res.has_value());
|
||||
u32 relative_offset = (res.value().address - (FlatPtr)(m_dynamic_object->base_address().as_ptr() + relocation.offset()));
|
||||
*patch_ptr += relative_offset;
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>(" Symbol address: {:p}", *patch_ptr);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, " Symbol address: {:p}", *patch_ptr);
|
||||
break;
|
||||
}
|
||||
case R_386_GLOB_DAT: {
|
||||
auto symbol = relocation.symbol();
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Global data relocation: '{}', value: {:p}", symbol.name(), symbol.value());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Global data relocation: '{}', value: {:p}", symbol.name(), symbol.value());
|
||||
auto res = lookup_symbol(symbol);
|
||||
if (!res.has_value()) {
|
||||
// We do not support these
|
||||
|
@ -419,46 +419,46 @@ DynamicLoader::RelocationResult DynamicLoader::do_relocation(size_t total_tls_si
|
|||
// Symbol not found
|
||||
return RelocationResult::Failed;
|
||||
}
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("symbol found, location: {:#08x}", res.value().address);
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("object: {}", m_filename);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "symbol found, location: {:#08x}", res.value().address);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "object: {}", m_filename);
|
||||
|
||||
u32 symbol_location = res.value().address;
|
||||
ASSERT(symbol_location != (FlatPtr)m_dynamic_object->base_address().as_ptr());
|
||||
*patch_ptr = symbol_location;
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>(" Symbol address: {:p}", *patch_ptr);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, " Symbol address: {:p}", *patch_ptr);
|
||||
break;
|
||||
}
|
||||
case R_386_RELATIVE: {
|
||||
// FIXME: According to the spec, R_386_relative ones must be done first.
|
||||
// We could explicitly do them first using m_number_of_relocatoins from DT_RELCOUNT
|
||||
// However, our compiler is nice enough to put them at the front of the relocations for us :)
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Load address relocation at offset {:#08x}", relocation.offset());
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>(" patch ptr == {:p}, adding load base address ({:p}) to it and storing {:p}", *patch_ptr, m_dynamic_object->base_address().as_ptr(), *patch_ptr + m_dynamic_object->base_address().as_ptr());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Load address relocation at offset {:#08x}", relocation.offset());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, " patch ptr == {:p}, adding load base address ({:p}) to it and storing {:p}", *patch_ptr, m_dynamic_object->base_address().as_ptr(), *patch_ptr + m_dynamic_object->base_address().as_ptr());
|
||||
*patch_ptr += (FlatPtr)m_dynamic_object->base_address().as_ptr(); // + addend for RelA (addend for Rel is stored at addr)
|
||||
break;
|
||||
}
|
||||
case R_386_TLS_TPOFF32:
|
||||
case R_386_TLS_TPOFF: {
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Relocation type: R_386_TLS_TPOFF at offset {:#08x}", relocation.offset());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Relocation type: R_386_TLS_TPOFF at offset {:#08x}", relocation.offset());
|
||||
auto symbol = relocation.symbol();
|
||||
// For some reason, LibC has a R_386_TLS_TPOFF that refers to the undefined symbol.. huh
|
||||
if (relocation.symbol_index() == 0)
|
||||
break;
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Symbol index: {}", symbol.index());
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Symbol is_undefined?: {}", symbol.is_undefined());
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("TLS relocation: '{}', value: {:p}", symbol.name(), symbol.value());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Symbol index: {}", symbol.index());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Symbol is_undefined?: {}", symbol.is_undefined());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "TLS relocation: '{}', value: {:p}", symbol.name(), symbol.value());
|
||||
auto res = lookup_symbol(symbol);
|
||||
if (!res.has_value())
|
||||
break;
|
||||
u32 symbol_value = res.value().value;
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("symbol value: {}", symbol_value);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "symbol value: {}", symbol_value);
|
||||
auto* dynamic_object_of_symbol = res.value().dynamic_object;
|
||||
ASSERT(dynamic_object_of_symbol);
|
||||
size_t offset_of_tls_end = dynamic_object_of_symbol->tls_offset().value() + dynamic_object_of_symbol->tls_size().value();
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("patch ptr: {:p}", patch_ptr);
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("tls end offset: {}, total tls size: {}", offset_of_tls_end, total_tls_size);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "patch ptr: {:p}", patch_ptr);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "tls end offset: {}, total tls size: {}", offset_of_tls_end, total_tls_size);
|
||||
*patch_ptr = (offset_of_tls_end - total_tls_size - symbol_value - sizeof(Elf32_Addr));
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("*patch ptr: {}", (i32)*patch_ptr);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "*patch ptr: {}", (i32)*patch_ptr);
|
||||
break;
|
||||
}
|
||||
case R_386_JMP_SLOT: {
|
||||
|
@ -466,7 +466,7 @@ DynamicLoader::RelocationResult DynamicLoader::do_relocation(size_t total_tls_si
|
|||
if (m_dynamic_object->must_bind_now() || s_always_bind_now) {
|
||||
// Eagerly BIND_NOW the PLT entries, doing all the symbol looking goodness
|
||||
// The patch method returns the address for the LAZY fixup path, but we don't need it here
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("patching plt reloaction: {:p}", relocation.offset_in_section());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "patching plt reloaction: {:p}", relocation.offset_in_section());
|
||||
[[maybe_unused]] auto rc = m_dynamic_object->patch_plt_entry(relocation.offset_in_section());
|
||||
} else {
|
||||
u8* relocation_address = relocation.address().as_ptr();
|
||||
|
@ -499,7 +499,7 @@ void DynamicLoader::setup_plt_trampoline()
|
|||
got_ptr[1] = (FlatPtr)m_dynamic_object.ptr();
|
||||
got_ptr[2] = (FlatPtr)&_plt_trampoline;
|
||||
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Set GOT PLT entries at {:p}: [0] = {:p} [1] = {:p}, [2] = {:p}", got_ptr, (void*)got_ptr[0], (void*)got_ptr[1], (void*)got_ptr[2]);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Set GOT PLT entries at {:p}: [0] = {:p} [1] = {:p}, [2] = {:p}", got_ptr, (void*)got_ptr[0], (void*)got_ptr[1], (void*)got_ptr[2]);
|
||||
}
|
||||
|
||||
// Called from our ASM routine _plt_trampoline.
|
||||
|
@ -517,7 +517,7 @@ void DynamicLoader::call_object_init_functions()
|
|||
if (m_dynamic_object->has_init_section()) {
|
||||
auto init_function = (InitFunc)(m_dynamic_object->init_section().address().as_ptr());
|
||||
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Calling DT_INIT at {:p}", init_function);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Calling DT_INIT at {:p}", init_function);
|
||||
(init_function)();
|
||||
}
|
||||
|
||||
|
@ -531,7 +531,7 @@ void DynamicLoader::call_object_init_functions()
|
|||
// 0 definitely shows up. Apparently 0/-1 are valid? Confusing.
|
||||
if (!*init_begin || ((FlatPtr)*init_begin == (FlatPtr)-1))
|
||||
continue;
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Calling DT_INITARRAY entry at {:p}", *init_begin);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Calling DT_INITARRAY entry at {:p}", *init_begin);
|
||||
(*init_begin)();
|
||||
++init_begin;
|
||||
}
|
||||
|
|
|
@ -73,8 +73,8 @@ void DynamicObject::dump() const
|
|||
if (m_has_soname)
|
||||
builder.appendf("DT_SONAME: %s\n", soname()); // FIXME: Valdidate that this string is null terminated?
|
||||
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Dynamic section at address {} contains {} entries:", m_dynamic_address.as_ptr(), num_dynamic_sections);
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("{}", builder.string_view());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Dynamic section at address {} contains {} entries:", m_dynamic_address.as_ptr(), num_dynamic_sections);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "{}", builder.string_view());
|
||||
}
|
||||
|
||||
void DynamicObject::parse()
|
||||
|
@ -303,7 +303,7 @@ const DynamicObject::Symbol DynamicObject::HashSection::lookup_elf_symbol(const
|
|||
for (u32 i = buckets[hash_value % num_buckets]; i; i = chains[i]) {
|
||||
auto symbol = m_dynamic.symbol(i);
|
||||
if (strcmp(name, symbol.name()) == 0) {
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Returning SYSV dynamic symbol with index {} for {}: {}", i, symbol.name(), symbol.address().as_ptr());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Returning SYSV dynamic symbol with index {} for {}: {}", i, symbol.name(), symbol.address().as_ptr());
|
||||
return symbol;
|
||||
}
|
||||
}
|
||||
|
@ -348,7 +348,7 @@ const DynamicObject::Symbol DynamicObject::HashSection::lookup_gnu_symbol(const
|
|||
hash2 = *(current_chain++);
|
||||
const auto symbol = m_dynamic.symbol(current_sym);
|
||||
if ((hash1 == (hash2 & ~1)) && strcmp(name, symbol.name()) == 0) {
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("Returning GNU dynamic symbol with index {} for {}: {}", current_sym, symbol.name(), symbol.address().as_ptr());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "Returning GNU dynamic symbol with index {} for {}: {}", current_sym, symbol.name(), symbol.address().as_ptr());
|
||||
return symbol;
|
||||
}
|
||||
if (hash2 & 1) {
|
||||
|
@ -494,7 +494,7 @@ Elf32_Addr DynamicObject::patch_plt_entry(u32 relocation_offset)
|
|||
|
||||
u32 symbol_location = result.value().address;
|
||||
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("DynamicLoader: Jump slot relocation: putting {} ({:p}) into PLT at {}", sym.name(), symbol_location, (void*)relocation_address);
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "DynamicLoader: Jump slot relocation: putting {} ({:p}) into PLT at {}", sym.name(), symbol_location, (void*)relocation_address);
|
||||
|
||||
*(u32*)relocation_address = symbol_location;
|
||||
|
||||
|
@ -503,12 +503,12 @@ Elf32_Addr DynamicObject::patch_plt_entry(u32 relocation_offset)
|
|||
|
||||
Optional<DynamicObject::SymbolLookupResult> DynamicObject::lookup_symbol(const ELF::DynamicObject::Symbol& symbol) const
|
||||
{
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("looking up symbol: {}", symbol.name());
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "looking up symbol: {}", symbol.name());
|
||||
if (symbol.is_undefined() || symbol.bind() == STB_WEAK)
|
||||
return DynamicLinker::lookup_global_symbol(symbol.name());
|
||||
|
||||
if (!symbol.is_undefined()) {
|
||||
dbgln<DYNAMIC_LOAD_DEBUG>("symbol is defined in its object");
|
||||
dbgln_if(DYNAMIC_LOAD_DEBUG, "symbol is defined in its object");
|
||||
return SymbolLookupResult { symbol.value(), (FlatPtr)symbol.address().as_ptr(), symbol.bind(), &symbol.object() };
|
||||
}
|
||||
return DynamicLinker::lookup_global_symbol(symbol.name());
|
||||
|
|
|
@ -140,25 +140,25 @@ void WindowServerConnection::handle(const Messages::WindowClient::KeyDown& messa
|
|||
auto key_event = make<KeyEvent>(Event::KeyDown, (KeyCode)message.key(), message.modifiers(), message.code_point(), message.scancode());
|
||||
Action* action = nullptr;
|
||||
|
||||
dbgln<KEYBOARD_SHORTCUTS_DEBUG>("Looking up action for {}", key_event->to_string());
|
||||
dbgln_if(KEYBOARD_SHORTCUTS_DEBUG, "Looking up action for {}", key_event->to_string());
|
||||
|
||||
if (auto* focused_widget = window->focused_widget()) {
|
||||
for (auto* widget = focused_widget; widget && !action; widget = widget->parent_widget()) {
|
||||
action = widget->action_for_key_event(*key_event);
|
||||
|
||||
dbgln<KEYBOARD_SHORTCUTS_DEBUG>(" > Focused widget {} gave action: {}", *widget, action);
|
||||
dbgln_if(KEYBOARD_SHORTCUTS_DEBUG, " > Focused widget {} gave action: {}", *widget, action);
|
||||
}
|
||||
}
|
||||
|
||||
if (!action) {
|
||||
action = window->action_for_key_event(*key_event);
|
||||
dbgln<KEYBOARD_SHORTCUTS_DEBUG>(" > Asked window {}, got action: {}", *window, action);
|
||||
dbgln_if(KEYBOARD_SHORTCUTS_DEBUG, " > Asked window {}, got action: {}", *window, action);
|
||||
}
|
||||
|
||||
// NOTE: Application-global shortcuts are ignored while a modal window is up.
|
||||
if (!action && !window->is_modal()) {
|
||||
action = Application::the()->action_for_key_event(*key_event);
|
||||
dbgln<KEYBOARD_SHORTCUTS_DEBUG>(" > Asked application, got action: {}", action);
|
||||
dbgln_if(KEYBOARD_SHORTCUTS_DEBUG, " > Asked application, got action: {}", action);
|
||||
}
|
||||
|
||||
if (action) {
|
||||
|
|
|
@ -317,7 +317,7 @@ static u8 get_scaled_color(u32 data, u8 mask_size, i8 mask_shift)
|
|||
// to scale the values in order to reach the proper value of 255.
|
||||
static u32 int_to_scaled_rgb(BMPLoadingContext& context, u32 data)
|
||||
{
|
||||
dbgln<BMP_DEBUG>("DIB info sizes before access: #masks={}, #mask_sizes={}, #mask_shifts={}",
|
||||
dbgln_if(BMP_DEBUG, "DIB info sizes before access: #masks={}, #mask_sizes={}, #mask_shifts={}",
|
||||
context.dib.info.masks.size(),
|
||||
context.dib.info.mask_sizes.size(),
|
||||
context.dib.info.mask_shifts.size());
|
||||
|
@ -471,7 +471,7 @@ static bool decode_bmp_header(BMPLoadingContext& context)
|
|||
return true;
|
||||
|
||||
if (!context.file_bytes || context.file_size < bmp_header_size) {
|
||||
dbgln<BMP_DEBUG>("Missing BMP header");
|
||||
dbgln_if(BMP_DEBUG, "Missing BMP header");
|
||||
context.state = BMPLoadingContext::State::Error;
|
||||
return false;
|
||||
}
|
||||
|
@ -480,7 +480,7 @@ static bool decode_bmp_header(BMPLoadingContext& context)
|
|||
|
||||
u16 header = streamer.read_u16();
|
||||
if (header != 0x4d42) {
|
||||
dbgln<BMP_DEBUG>("BMP has invalid magic header number: {:#04x}", header);
|
||||
dbgln_if(BMP_DEBUG, "BMP has invalid magic header number: {:#04x}", header);
|
||||
context.state = BMPLoadingContext::State::Error;
|
||||
return false;
|
||||
}
|
||||
|
@ -502,7 +502,7 @@ static bool decode_bmp_header(BMPLoadingContext& context)
|
|||
}
|
||||
|
||||
if (context.data_offset >= context.file_size) {
|
||||
dbgln<BMP_DEBUG>("BMP data offset is beyond file end?!");
|
||||
dbgln_if(BMP_DEBUG, "BMP data offset is beyond file end?!");
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -725,12 +725,12 @@ static bool decode_bmp_v3_dib(BMPLoadingContext& context, Streamer& streamer)
|
|||
// suite results.
|
||||
if (context.dib.info.compression == Compression::ALPHABITFIELDS) {
|
||||
context.dib.info.masks.append(streamer.read_u32());
|
||||
dbgln<BMP_DEBUG>("BMP alpha mask: {:#08x}", context.dib.info.masks[3]);
|
||||
dbgln_if(BMP_DEBUG, "BMP alpha mask: {:#08x}", context.dib.info.masks[3]);
|
||||
} else if (context.dib_size() >= 56 && context.dib.core.bpp >= 16) {
|
||||
auto mask = streamer.read_u32();
|
||||
if ((context.dib.core.bpp == 32 && mask != 0) || context.dib.core.bpp == 16) {
|
||||
context.dib.info.masks.append(mask);
|
||||
dbgln<BMP_DEBUG>("BMP alpha mask: {:#08x}", mask);
|
||||
dbgln_if(BMP_DEBUG, "BMP alpha mask: {:#08x}", mask);
|
||||
}
|
||||
} else {
|
||||
streamer.drop_bytes(4);
|
||||
|
@ -807,7 +807,7 @@ static bool decode_bmp_dib(BMPLoadingContext& context)
|
|||
|
||||
streamer = Streamer(context.file_bytes + bmp_header_size + 4, context.data_offset - bmp_header_size - 4);
|
||||
|
||||
dbgln<BMP_DEBUG>("BMP dib size: {}", dib_size);
|
||||
dbgln_if(BMP_DEBUG, "BMP dib size: {}", dib_size);
|
||||
|
||||
bool error = false;
|
||||
|
||||
|
@ -937,7 +937,7 @@ static bool uncompress_bmp_rle_data(BMPLoadingContext& context, ByteBuffer& buff
|
|||
{
|
||||
// RLE-compressed images cannot be stored top-down
|
||||
if (context.dib.core.height < 0) {
|
||||
dbgln<BMP_DEBUG>("BMP is top-down and RLE compressed");
|
||||
dbgln_if(BMP_DEBUG, "BMP is top-down and RLE compressed");
|
||||
context.state = BMPLoadingContext::State::Error;
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -212,13 +212,13 @@ public:
|
|||
}
|
||||
|
||||
if (m_current_code > m_code_table.size()) {
|
||||
dbgln<GIF_DEBUG>("Corrupted LZW stream, invalid code: {} at bit index {}, code table size: {}",
|
||||
dbgln_if(GIF_DEBUG, "Corrupted LZW stream, invalid code: {} at bit index {}, code table size: {}",
|
||||
m_current_code,
|
||||
m_current_bit_index,
|
||||
m_code_table.size());
|
||||
return {};
|
||||
} else if (m_current_code == m_code_table.size() && m_output.is_empty()) {
|
||||
dbgln<GIF_DEBUG>("Corrupted LZW stream, valid new code but output buffer is empty: {} at bit index {}, code table size: {}",
|
||||
dbgln_if(GIF_DEBUG, "Corrupted LZW stream, valid new code but output buffer is empty: {} at bit index {}, code table size: {}",
|
||||
m_current_code,
|
||||
m_current_bit_index,
|
||||
m_code_table.size());
|
||||
|
@ -541,12 +541,12 @@ static bool load_gif_frame_descriptors(GIFLoadingContext& context)
|
|||
|
||||
if (extension_type == 0xFF) {
|
||||
if (sub_block.size() != 14) {
|
||||
dbgln<GIF_DEBUG>("Unexpected application extension size: {}", sub_block.size());
|
||||
dbgln_if(GIF_DEBUG, "Unexpected application extension size: {}", sub_block.size());
|
||||
continue;
|
||||
}
|
||||
|
||||
if (sub_block[11] != 1) {
|
||||
dbgln<GIF_DEBUG>("Unexpected application extension format");
|
||||
dbgln_if(GIF_DEBUG, "Unexpected application extension format");
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -230,13 +230,13 @@ static void generate_huffman_codes(HuffmanTableSpec& table)
|
|||
static Optional<size_t> read_huffman_bits(HuffmanStreamState& hstream, size_t count = 1)
|
||||
{
|
||||
if (count > (8 * sizeof(size_t))) {
|
||||
dbgln<JPG_DEBUG>("Can't read {} bits at once!", count);
|
||||
dbgln_if(JPG_DEBUG, "Can't read {} bits at once!", count);
|
||||
return {};
|
||||
}
|
||||
size_t value = 0;
|
||||
while (count--) {
|
||||
if (hstream.byte_offset >= hstream.stream.size()) {
|
||||
dbgln<JPG_DEBUG>("Huffman stream exhausted. This could be an error!");
|
||||
dbgln_if(JPG_DEBUG, "Huffman stream exhausted. This could be an error!");
|
||||
return {};
|
||||
}
|
||||
u8 current_byte = hstream.stream[hstream.byte_offset];
|
||||
|
@ -313,7 +313,7 @@ static bool build_macroblocks(JPGLoadingContext& context, Vector<Macroblock>& ma
|
|||
// For DC coefficients, symbol encodes the length of the coefficient.
|
||||
auto dc_length = symbol_or_error.release_value();
|
||||
if (dc_length > 11) {
|
||||
dbgln<JPG_DEBUG>("DC coefficient too long: {}!", dc_length);
|
||||
dbgln_if(JPG_DEBUG, "DC coefficient too long: {}!", dc_length);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -350,13 +350,13 @@ static bool build_macroblocks(JPGLoadingContext& context, Vector<Macroblock>& ma
|
|||
j += run_length;
|
||||
|
||||
if (j >= 64) {
|
||||
dbgln<JPG_DEBUG>("Run-length exceeded boundaries. Cursor: {}, Skipping: {}!", j, run_length);
|
||||
dbgln_if(JPG_DEBUG, "Run-length exceeded boundaries. Cursor: {}, Skipping: {}!", j, run_length);
|
||||
return false;
|
||||
}
|
||||
|
||||
u8 coeff_length = ac_symbol & 0x0F;
|
||||
if (coeff_length > 10) {
|
||||
dbgln<JPG_DEBUG>("AC coefficient too long: {}!", coeff_length);
|
||||
dbgln_if(JPG_DEBUG, "AC coefficient too long: {}!", coeff_length);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -445,7 +445,7 @@ static inline bool is_valid_marker(const Marker marker)
|
|||
if (marker >= JPG_APPN0 && marker <= JPG_APPNF) {
|
||||
|
||||
if (marker != JPG_APPN0)
|
||||
dbgln<JPG_DEBUG>("{:#04x} not supported yet. The decoder may fail!", marker);
|
||||
dbgln_if(JPG_DEBUG, "{:#04x} not supported yet. The decoder may fail!", marker);
|
||||
return true;
|
||||
}
|
||||
if (marker >= JPG_RESERVED1 && marker <= JPG_RESERVEDD)
|
||||
|
@ -467,7 +467,7 @@ static inline bool is_valid_marker(const Marker marker)
|
|||
|
||||
if (marker >= 0xFFC0 && marker <= 0xFFCF) {
|
||||
if (marker != 0xFFC4 && marker != 0xFFC8 && marker != 0xFFCC) {
|
||||
dbgln<JPG_DEBUG>("Decoding this frame-type (SOF{}) is not currently supported. Decoder will fail!", marker & 0xf);
|
||||
dbgln_if(JPG_DEBUG, "Decoding this frame-type (SOF{}) is not currently supported. Decoder will fail!", marker & 0xf);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -504,7 +504,7 @@ static inline Marker read_marker_at_cursor(InputMemoryStream& stream)
|
|||
static bool read_start_of_scan(InputMemoryStream& stream, JPGLoadingContext& context)
|
||||
{
|
||||
if (context.state < JPGLoadingContext::State::FrameDecoded) {
|
||||
dbgln<JPG_DEBUG>("{}: SOS found before reading a SOF!", stream.offset());
|
||||
dbgln_if(JPG_DEBUG, "{}: SOS found before reading a SOF!", stream.offset());
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -519,7 +519,7 @@ static bool read_start_of_scan(InputMemoryStream& stream, JPGLoadingContext& con
|
|||
if (stream.handle_any_error())
|
||||
return false;
|
||||
if (component_count != context.component_count) {
|
||||
dbgln<JPG_DEBUG>("{}: Unsupported number of components: {}!", stream.offset(), component_count);
|
||||
dbgln_if(JPG_DEBUG, "{}: Unsupported number of components: {}!", stream.offset(), component_count);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -538,7 +538,7 @@ static bool read_start_of_scan(InputMemoryStream& stream, JPGLoadingContext& con
|
|||
return false;
|
||||
}
|
||||
} else {
|
||||
dbgln<JPG_DEBUG>("{}: Unsupported component id: {}!", stream.offset(), component_id);
|
||||
dbgln_if(JPG_DEBUG, "{}: Unsupported component id: {}!", stream.offset(), component_id);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -551,17 +551,17 @@ static bool read_start_of_scan(InputMemoryStream& stream, JPGLoadingContext& con
|
|||
component->ac_destination_id = table_ids & 0x0F;
|
||||
|
||||
if (context.dc_tables.size() != context.ac_tables.size()) {
|
||||
dbgln<JPG_DEBUG>("{}: DC & AC table count mismatch!", stream.offset());
|
||||
dbgln_if(JPG_DEBUG, "{}: DC & AC table count mismatch!", stream.offset());
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!context.dc_tables.contains(component->dc_destination_id)) {
|
||||
dbgln<JPG_DEBUG>("DC table (id: {}) does not exist!", component->dc_destination_id);
|
||||
dbgln_if(JPG_DEBUG, "DC table (id: {}) does not exist!", component->dc_destination_id);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!context.ac_tables.contains(component->ac_destination_id)) {
|
||||
dbgln<JPG_DEBUG>("AC table (id: {}) does not exist!", component->ac_destination_id);
|
||||
dbgln_if(JPG_DEBUG, "AC table (id: {}) does not exist!", component->ac_destination_id);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -580,7 +580,7 @@ static bool read_start_of_scan(InputMemoryStream& stream, JPGLoadingContext& con
|
|||
return false;
|
||||
// The three values should be fixed for baseline JPEGs utilizing sequential DCT.
|
||||
if (spectral_selection_start != 0 || spectral_selection_end != 63 || successive_approximation != 0) {
|
||||
dbgln<JPG_DEBUG>("{}: ERROR! Start of Selection: {}, End of Selection: {}, Successive Approximation: {}!",
|
||||
dbgln_if(JPG_DEBUG, "{}: ERROR! Start of Selection: {}, End of Selection: {}, Successive Approximation: {}!",
|
||||
stream.offset(),
|
||||
spectral_selection_start,
|
||||
spectral_selection_end,
|
||||
|
@ -597,7 +597,7 @@ static bool read_reset_marker(InputMemoryStream& stream, JPGLoadingContext& cont
|
|||
return false;
|
||||
bytes_to_read -= 2;
|
||||
if (bytes_to_read != 2) {
|
||||
dbgln<JPG_DEBUG>("{}: Malformed reset marker found!", stream.offset());
|
||||
dbgln_if(JPG_DEBUG, "{}: Malformed reset marker found!", stream.offset());
|
||||
return false;
|
||||
}
|
||||
context.dc_reset_interval = read_be_word(stream);
|
||||
|
@ -623,11 +623,11 @@ static bool read_huffman_table(InputMemoryStream& stream, JPGLoadingContext& con
|
|||
u8 table_type = table_info >> 4;
|
||||
u8 table_destination_id = table_info & 0x0F;
|
||||
if (table_type > 1) {
|
||||
dbgln<JPG_DEBUG>("{}: Unrecognized huffman table: {}!", stream.offset(), table_type);
|
||||
dbgln_if(JPG_DEBUG, "{}: Unrecognized huffman table: {}!", stream.offset(), table_type);
|
||||
return false;
|
||||
}
|
||||
if (table_destination_id > 1) {
|
||||
dbgln<JPG_DEBUG>("{}: Invalid huffman table destination id: {}!", stream.offset(), table_destination_id);
|
||||
dbgln_if(JPG_DEBUG, "{}: Invalid huffman table destination id: {}!", stream.offset(), table_destination_id);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -667,7 +667,7 @@ static bool read_huffman_table(InputMemoryStream& stream, JPGLoadingContext& con
|
|||
}
|
||||
|
||||
if (bytes_to_read != 0) {
|
||||
dbgln<JPG_DEBUG>("{}: Extra bytes detected in huffman header!", stream.offset());
|
||||
dbgln_if(JPG_DEBUG, "{}: Extra bytes detected in huffman header!", stream.offset());
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -705,7 +705,7 @@ static inline void set_macroblock_metadata(JPGLoadingContext& context)
|
|||
static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& context)
|
||||
{
|
||||
if (context.state == JPGLoadingContext::FrameDecoded) {
|
||||
dbgln<JPG_DEBUG>("{}: SOF repeated!", stream.offset());
|
||||
dbgln_if(JPG_DEBUG, "{}: SOF repeated!", stream.offset());
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -721,7 +721,7 @@ static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& co
|
|||
if (stream.handle_any_error())
|
||||
return false;
|
||||
if (context.frame.precision != 8) {
|
||||
dbgln<JPG_DEBUG>("{}: SOF precision != 8!", stream.offset());
|
||||
dbgln_if(JPG_DEBUG, "{}: SOF precision != 8!", stream.offset());
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -732,7 +732,7 @@ static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& co
|
|||
if (stream.handle_any_error())
|
||||
return false;
|
||||
if (!context.frame.width || !context.frame.height) {
|
||||
dbgln<JPG_DEBUG>("{}: ERROR! Image height: {}, Image width: {}!", stream.offset(), context.frame.height, context.frame.width);
|
||||
dbgln_if(JPG_DEBUG, "{}: ERROR! Image height: {}, Image width: {}!", stream.offset(), context.frame.height, context.frame.width);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -747,7 +747,7 @@ static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& co
|
|||
if (stream.handle_any_error())
|
||||
return false;
|
||||
if (context.component_count != 1 && context.component_count != 3) {
|
||||
dbgln<JPG_DEBUG>("{}: Unsupported number of components in SOF: {}!", stream.offset(), context.component_count);
|
||||
dbgln_if(JPG_DEBUG, "{}: Unsupported number of components in SOF: {}!", stream.offset(), context.component_count);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -770,7 +770,7 @@ static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& co
|
|||
// By convention, downsampling is applied only on chroma components. So we should
|
||||
// hope to see the maximum sampling factor in the luma component.
|
||||
if (!validate_luma_and_modify_context(component, context)) {
|
||||
dbgln<JPG_DEBUG>("{}: Unsupported luma subsampling factors: horizontal: {}, vertical: {}",
|
||||
dbgln_if(JPG_DEBUG, "{}: Unsupported luma subsampling factors: horizontal: {}, vertical: {}",
|
||||
stream.offset(),
|
||||
component.hsample_factor,
|
||||
component.vsample_factor);
|
||||
|
@ -778,7 +778,7 @@ static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& co
|
|||
}
|
||||
} else {
|
||||
if (component.hsample_factor != 1 || component.vsample_factor != 1) {
|
||||
dbgln<JPG_DEBUG>("{}: Unsupported chroma subsampling factors: horizontal: {}, vertical: {}",
|
||||
dbgln_if(JPG_DEBUG, "{}: Unsupported chroma subsampling factors: horizontal: {}, vertical: {}",
|
||||
stream.offset(),
|
||||
component.hsample_factor,
|
||||
component.vsample_factor);
|
||||
|
@ -790,7 +790,7 @@ static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& co
|
|||
if (stream.handle_any_error())
|
||||
return false;
|
||||
if (component.qtable_id > 1) {
|
||||
dbgln<JPG_DEBUG>("{}: Unsupported quantization table id: {}!", stream.offset(), component.qtable_id);
|
||||
dbgln_if(JPG_DEBUG, "{}: Unsupported quantization table id: {}!", stream.offset(), component.qtable_id);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -815,12 +815,12 @@ static bool read_quantization_table(InputMemoryStream& stream, JPGLoadingContext
|
|||
return false;
|
||||
u8 element_unit_hint = info_byte >> 4;
|
||||
if (element_unit_hint > 1) {
|
||||
dbgln<JPG_DEBUG>("{}: Unsupported unit hint in quantization table: {}!", stream.offset(), element_unit_hint);
|
||||
dbgln_if(JPG_DEBUG, "{}: Unsupported unit hint in quantization table: {}!", stream.offset(), element_unit_hint);
|
||||
return false;
|
||||
}
|
||||
u8 table_id = info_byte & 0x0F;
|
||||
if (table_id > 1) {
|
||||
dbgln<JPG_DEBUG>("{}: Unsupported quantization table id: {}!", stream.offset(), table_id);
|
||||
dbgln_if(JPG_DEBUG, "{}: Unsupported quantization table id: {}!", stream.offset(), table_id);
|
||||
return false;
|
||||
}
|
||||
u32* table = table_id == 0 ? context.luma_table : context.chroma_table;
|
||||
|
@ -843,7 +843,7 @@ static bool read_quantization_table(InputMemoryStream& stream, JPGLoadingContext
|
|||
bytes_to_read -= 1 + (element_unit_hint == 0 ? 64 : 128);
|
||||
}
|
||||
if (bytes_to_read != 0) {
|
||||
dbgln<JPG_DEBUG>("{}: Invalid length for one or more quantization tables!", stream.offset());
|
||||
dbgln_if(JPG_DEBUG, "{}: Invalid length for one or more quantization tables!", stream.offset());
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1109,7 +1109,7 @@ static bool parse_header(InputMemoryStream& stream, JPGLoadingContext& context)
|
|||
if (stream.handle_any_error())
|
||||
return false;
|
||||
if (marker != JPG_SOI) {
|
||||
dbgln<JPG_DEBUG>("{}: SOI not found: {:x}!", stream.offset(), marker);
|
||||
dbgln_if(JPG_DEBUG, "{}: SOI not found: {:x}!", stream.offset(), marker);
|
||||
return false;
|
||||
}
|
||||
for (;;) {
|
||||
|
@ -1137,7 +1137,7 @@ static bool parse_header(InputMemoryStream& stream, JPGLoadingContext& context)
|
|||
case JPG_RST7:
|
||||
case JPG_SOI:
|
||||
case JPG_EOI:
|
||||
dbgln<JPG_DEBUG>("{}: Unexpected marker {:x}!", stream.offset(), marker);
|
||||
dbgln_if(JPG_DEBUG, "{}: Unexpected marker {:x}!", stream.offset(), marker);
|
||||
return false;
|
||||
case JPG_SOF0:
|
||||
if (!read_start_of_frame(stream, context))
|
||||
|
@ -1160,7 +1160,7 @@ static bool parse_header(InputMemoryStream& stream, JPGLoadingContext& context)
|
|||
return read_start_of_scan(stream, context);
|
||||
default:
|
||||
if (!skip_marker_with_length(stream)) {
|
||||
dbgln<JPG_DEBUG>("{}: Error skipping marker: {:x}!", stream.offset(), marker);
|
||||
dbgln_if(JPG_DEBUG, "{}: Error skipping marker: {:x}!", stream.offset(), marker);
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
|
@ -1182,7 +1182,7 @@ static bool scan_huffman_stream(InputMemoryStream& stream, JPGLoadingContext& co
|
|||
last_byte = current_byte;
|
||||
stream >> current_byte;
|
||||
if (stream.handle_any_error()) {
|
||||
dbgln<JPG_DEBUG>("{}: EOI not found!", stream.offset());
|
||||
dbgln_if(JPG_DEBUG, "{}: EOI not found!", stream.offset());
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1206,7 +1206,7 @@ static bool scan_huffman_stream(InputMemoryStream& stream, JPGLoadingContext& co
|
|||
return false;
|
||||
continue;
|
||||
}
|
||||
dbgln<JPG_DEBUG>("{}: Invalid marker: {:x}!", stream.offset(), marker);
|
||||
dbgln_if(JPG_DEBUG, "{}: Invalid marker: {:x}!", stream.offset(), marker);
|
||||
return false;
|
||||
} else {
|
||||
context.huffman_stream.stream.append(last_byte);
|
||||
|
@ -1227,7 +1227,7 @@ static bool decode_jpg(JPGLoadingContext& context)
|
|||
|
||||
auto result = decode_huffman_stream(context);
|
||||
if (!result.has_value()) {
|
||||
dbgln<JPG_DEBUG>("{}: Failed to decode Macroblocks!", stream.offset());
|
||||
dbgln_if(JPG_DEBUG, "{}: Failed to decode Macroblocks!", stream.offset());
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -613,7 +613,7 @@ static bool decode_png_bitmap_simple(PNGLoadingContext& context)
|
|||
}
|
||||
|
||||
if (filter > 4) {
|
||||
dbgln<PNG_DEBUG>("Invalid PNG filter: {}", filter);
|
||||
dbgln_if(PNG_DEBUG, "Invalid PNG filter: {}", filter);
|
||||
context.state = PNGLoadingContext::State::Error;
|
||||
return false;
|
||||
}
|
||||
|
@ -715,7 +715,7 @@ static bool decode_adam7_pass(PNGLoadingContext& context, Streamer& streamer, in
|
|||
}
|
||||
|
||||
if (filter > 4) {
|
||||
dbgln<PNG_DEBUG>("Invalid PNG filter: {}", filter);
|
||||
dbgln_if(PNG_DEBUG, "Invalid PNG filter: {}", filter);
|
||||
context.state = PNGLoadingContext::State::Error;
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -947,7 +947,7 @@ void Painter::draw_glyph_or_emoji(const IntPoint& point, u32 code_point, const F
|
|||
// Perhaps it's an emoji?
|
||||
auto* emoji = Emoji::emoji_for_code_point(code_point);
|
||||
if (emoji == nullptr) {
|
||||
dbgln<EMOJI_DEBUG>("Failed to find an emoji for code_point {}", code_point);
|
||||
dbgln_if(EMOJI_DEBUG, "Failed to find an emoji for code_point {}", code_point);
|
||||
draw_glyph(point, '?', font, color);
|
||||
return;
|
||||
}
|
||||
|
@ -1661,7 +1661,7 @@ void Painter::fill_path(Path& path, Color color, WindingRule winding_rule)
|
|||
// The points between this segment and the previous are
|
||||
// inside the shape
|
||||
|
||||
dbgln<FILL_PATH_DEBUG>("y={}: {} at {}: {} -- {}", scanline, winding_number, i, from, to);
|
||||
dbgln_if(FILL_PATH_DEBUG, "y={}: {} at {}: {} -- {}", scanline, winding_number, i, from, to);
|
||||
draw_line(from, to, color, 1);
|
||||
}
|
||||
|
||||
|
|
|
@ -104,14 +104,14 @@ static bool read_magic_number(TContext& context, Streamer& streamer)
|
|||
|
||||
if (!context.data || context.data_size < 2) {
|
||||
context.state = TContext::State::Error;
|
||||
dbgln<PORTABLE_IMAGE_LOADER_DEBUG>("There is no enough data for {}", TContext::image_type);
|
||||
dbgln_if(PORTABLE_IMAGE_LOADER_DEBUG, "There is no enough data for {}", TContext::image_type);
|
||||
return false;
|
||||
}
|
||||
|
||||
u8 magic_number[2] {};
|
||||
if (!streamer.read_bytes(magic_number, 2)) {
|
||||
context.state = TContext::State::Error;
|
||||
dbgln<PORTABLE_IMAGE_LOADER_DEBUG>("We can't read magic number for {}", TContext::image_type);
|
||||
dbgln_if(PORTABLE_IMAGE_LOADER_DEBUG, "We can't read magic number for {}", TContext::image_type);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -128,7 +128,7 @@ static bool read_magic_number(TContext& context, Streamer& streamer)
|
|||
}
|
||||
|
||||
context.state = TContext::State::Error;
|
||||
dbgln<PORTABLE_IMAGE_LOADER_DEBUG>("Magic number is not valid for {}{}{}", magic_number[0], magic_number[1], TContext::image_type);
|
||||
dbgln_if(PORTABLE_IMAGE_LOADER_DEBUG, "Magic number is not valid for {}{}{}", magic_number[0], magic_number[1], TContext::image_type);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -186,7 +186,7 @@ static bool read_max_val(TContext& context, Streamer& streamer)
|
|||
}
|
||||
|
||||
if (context.max_val > 255) {
|
||||
dbgln<PORTABLE_IMAGE_LOADER_DEBUG>("We can't parse 2 byte color for {}", TContext::image_type);
|
||||
dbgln_if(PORTABLE_IMAGE_LOADER_DEBUG, "We can't parse 2 byte color for {}", TContext::image_type);
|
||||
context.state = TContext::Error;
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -36,14 +36,14 @@ namespace HTTP {
|
|||
|
||||
static ByteBuffer handle_content_encoding(const ByteBuffer& buf, const String& content_encoding)
|
||||
{
|
||||
dbgln<JOB_DEBUG>("Job::handle_content_encoding: buf has content_encoding={}", content_encoding);
|
||||
dbgln_if(JOB_DEBUG, "Job::handle_content_encoding: buf has content_encoding={}", content_encoding);
|
||||
|
||||
if (content_encoding == "gzip") {
|
||||
if (!Core::Gzip::is_compressed(buf)) {
|
||||
dbgln("Job::handle_content_encoding: buf is not gzip compressed!");
|
||||
}
|
||||
|
||||
dbgln<JOB_DEBUG>("Job::handle_content_encoding: buf is gzip compressed!");
|
||||
dbgln_if(JOB_DEBUG, "Job::handle_content_encoding: buf is gzip compressed!");
|
||||
|
||||
auto uncompressed = Core::Gzip::decompress(buf);
|
||||
if (!uncompressed.has_value()) {
|
||||
|
@ -77,7 +77,7 @@ void Job::flush_received_buffers()
|
|||
{
|
||||
if (!m_can_stream_response || m_buffered_size == 0)
|
||||
return;
|
||||
dbgln<JOB_DEBUG>("Job: Flushing received buffers: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size());
|
||||
dbgln_if(JOB_DEBUG, "Job: Flushing received buffers: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size());
|
||||
for (size_t i = 0; i < m_received_buffers.size(); ++i) {
|
||||
auto& payload = m_received_buffers[i];
|
||||
auto written = do_write(payload);
|
||||
|
@ -92,7 +92,7 @@ void Job::flush_received_buffers()
|
|||
payload = payload.slice(written, payload.size() - written);
|
||||
break;
|
||||
}
|
||||
dbgln<JOB_DEBUG>("Job: Flushing received buffers done: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size());
|
||||
dbgln_if(JOB_DEBUG, "Job: Flushing received buffers done: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size());
|
||||
}
|
||||
|
||||
void Job::on_socket_connected()
|
||||
|
@ -198,10 +198,10 @@ void Job::on_socket_connected()
|
|||
m_headers.set(name, value);
|
||||
if (name.equals_ignoring_case("Content-Encoding")) {
|
||||
// Assume that any content-encoding means that we can't decode it as a stream :(
|
||||
dbgln<JOB_DEBUG>("Content-Encoding {} detected, cannot stream output :(", value);
|
||||
dbgln_if(JOB_DEBUG, "Content-Encoding {} detected, cannot stream output :(", value);
|
||||
m_can_stream_response = false;
|
||||
}
|
||||
dbgln<JOB_DEBUG>("Job: [{}] = '{}'", name, value);
|
||||
dbgln_if(JOB_DEBUG, "Job: [{}] = '{}'", name, value);
|
||||
return;
|
||||
}
|
||||
ASSERT(m_state == State::InBody);
|
||||
|
@ -216,7 +216,7 @@ void Job::on_socket_connected()
|
|||
// read size
|
||||
auto size_data = read_line(PAGE_SIZE);
|
||||
auto size_lines = size_data.view().lines();
|
||||
dbgln<JOB_DEBUG>("Job: Received a chunk with size '{}'", size_data);
|
||||
dbgln_if(JOB_DEBUG, "Job: Received a chunk with size '{}'", size_data);
|
||||
if (size_lines.size() == 0) {
|
||||
dbgln("Job: Reached end of stream");
|
||||
finish_up();
|
||||
|
@ -239,26 +239,26 @@ void Job::on_socket_connected()
|
|||
m_current_chunk_total_size = 0;
|
||||
m_current_chunk_remaining_size = 0;
|
||||
|
||||
dbgln<JOB_DEBUG>("Job: Received the last chunk with extensions '{}'", size_string.substring_view(1, size_string.length() - 1));
|
||||
dbgln_if(JOB_DEBUG, "Job: Received the last chunk with extensions '{}'", size_string.substring_view(1, size_string.length() - 1));
|
||||
} else {
|
||||
m_current_chunk_total_size = size;
|
||||
m_current_chunk_remaining_size = size;
|
||||
read_size = size;
|
||||
|
||||
dbgln<JOB_DEBUG>("Job: Chunk of size '{}' started", size);
|
||||
dbgln_if(JOB_DEBUG, "Job: Chunk of size '{}' started", size);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
read_size = remaining;
|
||||
|
||||
dbgln<JOB_DEBUG>("Job: Resuming chunk with '{}' bytes left over", remaining);
|
||||
dbgln_if(JOB_DEBUG, "Job: Resuming chunk with '{}' bytes left over", remaining);
|
||||
}
|
||||
} else {
|
||||
auto transfer_encoding = m_headers.get("Transfer-Encoding");
|
||||
if (transfer_encoding.has_value()) {
|
||||
auto encoding = transfer_encoding.value();
|
||||
|
||||
dbgln<JOB_DEBUG>("Job: This content has transfer encoding '{}'", encoding);
|
||||
dbgln_if(JOB_DEBUG, "Job: This content has transfer encoding '{}'", encoding);
|
||||
if (encoding.equals_ignoring_case("chunked")) {
|
||||
m_current_chunk_remaining_size = -1;
|
||||
goto read_chunk_size;
|
||||
|
@ -289,9 +289,9 @@ void Job::on_socket_connected()
|
|||
if (m_current_chunk_remaining_size.has_value()) {
|
||||
auto size = m_current_chunk_remaining_size.value() - payload.size();
|
||||
|
||||
dbgln<JOB_DEBUG>("Job: We have {} bytes left over in this chunk", size);
|
||||
dbgln_if(JOB_DEBUG, "Job: We have {} bytes left over in this chunk", size);
|
||||
if (size == 0) {
|
||||
dbgln<JOB_DEBUG>("Job: Finished a chunk of {} bytes", m_current_chunk_total_size.value());
|
||||
dbgln_if(JOB_DEBUG, "Job: Finished a chunk of {} bytes", m_current_chunk_total_size.value());
|
||||
|
||||
if (m_current_chunk_total_size.value() == 0) {
|
||||
m_state = State::Trailers;
|
||||
|
|
|
@ -107,7 +107,7 @@ void SyntaxHighlighter::rehighlight(Gfx::Palette palette)
|
|||
spans.append(span);
|
||||
advance_position(str[str.length() - 1]);
|
||||
|
||||
dbgln<SYNTAX_HIGHLIGHTING_DEBUG>("{}{} @ '{}' {}:{} - {}:{}",
|
||||
dbgln_if(SYNTAX_HIGHLIGHTING_DEBUG, "{}{} @ '{}' {}:{} - {}:{}",
|
||||
token.name(),
|
||||
is_trivia ? " (trivia)" : "",
|
||||
token.value(),
|
||||
|
|
|
@ -182,7 +182,7 @@ OwnPtr<Table> Table::parse(Vector<StringView>::ConstIterator& lines)
|
|||
size_t relative_width = delimiter.length();
|
||||
for (auto ch : delimiter) {
|
||||
if (ch != '-') {
|
||||
dbgln<MARKDOWN_DEBUG>("Invalid character _{}_ in table heading delimiter (ignored)", ch);
|
||||
dbgln_if(MARKDOWN_DEBUG, "Invalid character _{}_ in table heading delimiter (ignored)", ch);
|
||||
--relative_width;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -148,7 +148,7 @@ RegexResult Matcher<Parser>::match(const Vector<RegexStringView> views, Optional
|
|||
|
||||
for (auto& view : views) {
|
||||
input.view = view;
|
||||
dbgln<REGEX_DEBUG>("[match] Starting match with view ({}): _{}_", view.length(), view);
|
||||
dbgln_if(REGEX_DEBUG, "[match] Starting match with view ({}): _{}_", view.length(), view);
|
||||
|
||||
auto view_length = view.length();
|
||||
size_t view_index = m_pattern.start_offset;
|
||||
|
|
|
@ -112,7 +112,7 @@ ssize_t TLSv12::handle_hello(ReadonlyBytes buffer, WritePacketStage& write_packe
|
|||
return (i8)Error::NoCommonCipher;
|
||||
}
|
||||
m_context.cipher = cipher;
|
||||
dbgln<TLS_DEBUG>("Cipher: {}", (u16)cipher);
|
||||
dbgln_if(TLS_DEBUG, "Cipher: {}", (u16)cipher);
|
||||
|
||||
// The handshake hash function is _always_ SHA256
|
||||
m_context.handshake_hash.initialize(Crypto::Hash::HashKind::SHA256);
|
||||
|
@ -146,7 +146,7 @@ ssize_t TLSv12::handle_hello(ReadonlyBytes buffer, WritePacketStage& write_packe
|
|||
u16 extension_length = AK::convert_between_host_and_network_endian(*(const u16*)buffer.offset_pointer(res));
|
||||
res += 2;
|
||||
|
||||
dbgln<TLS_DEBUG>("extension {} with length {}", (u16)extension_type, extension_length);
|
||||
dbgln_if(TLS_DEBUG, "extension {} with length {}", (u16)extension_type, extension_length);
|
||||
|
||||
if (extension_length) {
|
||||
if (buffer.size() - res < extension_length) {
|
||||
|
@ -218,12 +218,12 @@ ssize_t TLSv12::handle_finished(ReadonlyBytes buffer, WritePacketStage& write_pa
|
|||
u32 size = buffer[0] * 0x10000 + buffer[1] * 0x100 + buffer[2];
|
||||
|
||||
if (size < 12) {
|
||||
dbgln<TLS_DEBUG>("finished packet smaller than minimum size: {}", size);
|
||||
dbgln_if(TLS_DEBUG, "finished packet smaller than minimum size: {}", size);
|
||||
return (i8)Error::BrokenPacket;
|
||||
}
|
||||
|
||||
if (size < buffer.size() - index) {
|
||||
dbgln<TLS_DEBUG>("not enough data after length: {} > {}", size, buffer.size() - index);
|
||||
dbgln_if(TLS_DEBUG, "not enough data after length: {} > {}", size, buffer.size() - index);
|
||||
return (i8)Error::NeedMoreData;
|
||||
}
|
||||
|
||||
|
@ -324,7 +324,7 @@ ssize_t TLSv12::handle_payload(ReadonlyBytes vbuffer)
|
|||
auto type = buffer[0];
|
||||
auto write_packets { WritePacketStage::Initial };
|
||||
size_t payload_size = buffer[1] * 0x10000 + buffer[2] * 0x100 + buffer[3] + 3;
|
||||
dbgln<TLS_DEBUG>("payload size: {} buffer length: {}", payload_size, buffer_length);
|
||||
dbgln_if(TLS_DEBUG, "payload size: {} buffer length: {}", payload_size, buffer_length);
|
||||
if (payload_size + 1 > buffer_length)
|
||||
return (i8)Error::NeedMoreData;
|
||||
|
||||
|
|
|
@ -39,12 +39,12 @@ void TLSv12::write_packet(ByteBuffer& packet)
|
|||
m_context.tls_buffer.append(packet.data(), packet.size());
|
||||
if (m_context.connection_status > ConnectionStatus::Disconnected) {
|
||||
if (!m_has_scheduled_write_flush) {
|
||||
dbgln<TLS_DEBUG>("Scheduling write of {}", m_context.tls_buffer.size());
|
||||
dbgln_if(TLS_DEBUG, "Scheduling write of {}", m_context.tls_buffer.size());
|
||||
deferred_invoke([this](auto&) { write_into_socket(); });
|
||||
m_has_scheduled_write_flush = true;
|
||||
} else {
|
||||
// multiple packet are available, let's flush some out
|
||||
dbgln<TLS_DEBUG>("Flushing scheduled write of {}", m_context.tls_buffer.size());
|
||||
dbgln_if(TLS_DEBUG, "Flushing scheduled write of {}", m_context.tls_buffer.size());
|
||||
write_into_socket();
|
||||
// the deferred invoke is still in place
|
||||
m_has_scheduled_write_flush = true;
|
||||
|
@ -230,7 +230,7 @@ ssize_t TLSv12::handle_message(ReadonlyBytes buffer)
|
|||
size_t header_size = res;
|
||||
ssize_t payload_res = 0;
|
||||
|
||||
dbgln<TLS_DEBUG>("buffer size: {}", buffer.size());
|
||||
dbgln_if(TLS_DEBUG, "buffer size: {}", buffer.size());
|
||||
|
||||
if (buffer.size() < 5) {
|
||||
return (i8)Error::NeedMoreData;
|
||||
|
@ -249,15 +249,15 @@ ssize_t TLSv12::handle_message(ReadonlyBytes buffer)
|
|||
buffer_position += 2;
|
||||
|
||||
auto length = AK::convert_between_host_and_network_endian(*(const u16*)buffer.offset_pointer(buffer_position));
|
||||
dbgln<TLS_DEBUG>("record length: {} at offset: {}", length, buffer_position);
|
||||
dbgln_if(TLS_DEBUG, "record length: {} at offset: {}", length, buffer_position);
|
||||
buffer_position += 2;
|
||||
|
||||
if (buffer_position + length > buffer.size()) {
|
||||
dbgln<TLS_DEBUG>("record length more than what we have: {}", buffer.size());
|
||||
dbgln_if(TLS_DEBUG, "record length more than what we have: {}", buffer.size());
|
||||
return (i8)Error::NeedMoreData;
|
||||
}
|
||||
|
||||
dbgln<TLS_DEBUG>("message type: {}, length: {}", (u8)type, length);
|
||||
dbgln_if(TLS_DEBUG, "message type: {}, length: {}", (u8)type, length);
|
||||
auto plain = buffer.slice(buffer_position, buffer.size() - buffer_position);
|
||||
|
||||
ByteBuffer decrypted;
|
||||
|
@ -389,7 +389,7 @@ ssize_t TLSv12::handle_message(ReadonlyBytes buffer)
|
|||
auto packet = build_alert(true, (u8)AlertDescription::UnexpectedMessage);
|
||||
write_packet(packet);
|
||||
} else {
|
||||
dbgln<TLS_DEBUG>("application data message of size {}", plain.size());
|
||||
dbgln_if(TLS_DEBUG, "application data message of size {}", plain.size());
|
||||
|
||||
m_context.application_buffer.append(plain.data(), plain.size());
|
||||
}
|
||||
|
@ -414,7 +414,7 @@ ssize_t TLSv12::handle_message(ReadonlyBytes buffer)
|
|||
}
|
||||
break;
|
||||
case MessageType::Alert:
|
||||
dbgln<TLS_DEBUG>("alert message of length {}", length);
|
||||
dbgln_if(TLS_DEBUG, "alert message of length {}", length);
|
||||
if (length >= 2) {
|
||||
if constexpr (TLS_DEBUG)
|
||||
print_buffer(plain);
|
||||
|
|
|
@ -174,7 +174,7 @@ void TLSv12::read_from_socket()
|
|||
|
||||
void TLSv12::write_into_socket()
|
||||
{
|
||||
dbgln<TLS_DEBUG>("Flushing cached records: {} established? {}", m_context.tls_buffer.size(), is_established());
|
||||
dbgln_if(TLS_DEBUG, "Flushing cached records: {} established? {}", m_context.tls_buffer.size(), is_established());
|
||||
|
||||
m_has_scheduled_write_flush = false;
|
||||
if (!check_connection_state(false))
|
||||
|
@ -199,7 +199,7 @@ bool TLSv12::check_connection_state(bool read)
|
|||
m_context.connection_finished = true;
|
||||
}
|
||||
if (m_context.critical_error) {
|
||||
dbgln<TLS_DEBUG>("CRITICAL ERROR {} :(", m_context.critical_error);
|
||||
dbgln_if(TLS_DEBUG, "CRITICAL ERROR {} :(", m_context.critical_error);
|
||||
|
||||
if (on_tls_error)
|
||||
on_tls_error((AlertDescription)m_context.critical_error);
|
||||
|
@ -211,7 +211,7 @@ bool TLSv12::check_connection_state(bool read)
|
|||
on_tls_finished();
|
||||
}
|
||||
if (m_context.tls_buffer.size()) {
|
||||
dbgln<TLS_DEBUG>("connection closed without finishing data transfer, {} bytes still in buffer and {} bytes in application buffer",
|
||||
dbgln_if(TLS_DEBUG, "connection closed without finishing data transfer, {} bytes still in buffer and {} bytes in application buffer",
|
||||
m_context.tls_buffer.size(),
|
||||
m_context.application_buffer.size());
|
||||
} else {
|
||||
|
@ -247,7 +247,7 @@ bool TLSv12::flush()
|
|||
}
|
||||
if (m_context.send_retries++ == 10) {
|
||||
// drop the records, we can't send
|
||||
dbgln<TLS_DEBUG>("Dropping {} bytes worth of TLS records as max retries has been reached", write_buffer().size());
|
||||
dbgln_if(TLS_DEBUG, "Dropping {} bytes worth of TLS records as max retries has been reached", write_buffer().size());
|
||||
write_buffer().clear();
|
||||
m_context.send_retries = 0;
|
||||
}
|
||||
|
|
|
@ -406,7 +406,7 @@ static ssize_t _parse_asn1(const Context& context, Certificate& cert, const u8*
|
|||
hash.initialize(Crypto::Hash::HashKind::SHA512);
|
||||
break;
|
||||
default:
|
||||
dbgln<TLS_DEBUG>("Unsupported hash mode {}", (u32)cert.key_algorithm);
|
||||
dbgln_if(TLS_DEBUG, "Unsupported hash mode {}", (u32)cert.key_algorithm);
|
||||
// fallback to md5, it will fail later
|
||||
hash.initialize(Crypto::Hash::HashKind::MD5);
|
||||
break;
|
||||
|
@ -436,7 +436,7 @@ Optional<Certificate> TLSv12::parse_asn1(ReadonlyBytes buffer, bool) const
|
|||
|
||||
_parse_asn1(m_context, cert, buffer.data(), buffer.size(), 1, fields, nullptr, 0, nullptr, nullptr);
|
||||
|
||||
dbgln<TLS_DEBUG>("Certificate issued for {} by {}", cert.subject, cert.issuer_subject);
|
||||
dbgln_if(TLS_DEBUG, "Certificate issued for {} by {}", cert.subject, cert.issuer_subject);
|
||||
|
||||
return cert;
|
||||
}
|
||||
|
@ -454,7 +454,7 @@ ssize_t TLSv12::handle_certificate(ReadonlyBytes buffer)
|
|||
|
||||
u32 certificate_total_length = buffer[0] * 0x10000 + buffer[1] * 0x100 + buffer[2];
|
||||
|
||||
dbgln<TLS_DEBUG>("total length: {}", certificate_total_length);
|
||||
dbgln_if(TLS_DEBUG, "total length: {}", certificate_total_length);
|
||||
|
||||
if (certificate_total_length <= 4)
|
||||
return 3 * certificate_total_length;
|
||||
|
@ -549,7 +549,7 @@ void TLSv12::consume(ReadonlyBytes record)
|
|||
return;
|
||||
}
|
||||
|
||||
dbgln<TLS_DEBUG>("Consuming {} bytes", record.size());
|
||||
dbgln_if(TLS_DEBUG, "Consuming {} bytes", record.size());
|
||||
|
||||
m_context.message_buffer.append(record.data(), record.size());
|
||||
|
||||
|
@ -559,12 +559,12 @@ void TLSv12::consume(ReadonlyBytes record)
|
|||
size_t size_offset { 3 }; // read the common record header
|
||||
size_t header_size { 5 };
|
||||
|
||||
dbgln<TLS_DEBUG>("message buffer length {}", buffer_length);
|
||||
dbgln_if(TLS_DEBUG, "message buffer length {}", buffer_length);
|
||||
|
||||
while (buffer_length >= 5) {
|
||||
auto length = AK::convert_between_host_and_network_endian(*(u16*)m_context.message_buffer.offset_pointer(index + size_offset)) + header_size;
|
||||
if (length > buffer_length) {
|
||||
dbgln<TLS_DEBUG>("Need more data: {} > {}", length, buffer_length);
|
||||
dbgln_if(TLS_DEBUG, "Need more data: {} > {}", length, buffer_length);
|
||||
break;
|
||||
}
|
||||
auto consumed = handle_message(m_context.message_buffer.bytes().slice(index, length));
|
||||
|
|
|
@ -85,9 +85,9 @@ void HTMLScriptElement::execute_script()
|
|||
document().set_current_script({}, nullptr);
|
||||
|
||||
if (m_from_an_external_file)
|
||||
dbgln<HTML_SCRIPT_DEBUG>("HTMLScriptElement: Running script {}", attribute(HTML::AttributeNames::src));
|
||||
dbgln_if(HTML_SCRIPT_DEBUG, "HTMLScriptElement: Running script {}", attribute(HTML::AttributeNames::src));
|
||||
else
|
||||
dbgln<HTML_SCRIPT_DEBUG>("HTMLScriptElement: Running inline script");
|
||||
dbgln_if(HTML_SCRIPT_DEBUG, "HTMLScriptElement: Running inline script");
|
||||
|
||||
document().run_javascript(m_script_source);
|
||||
|
||||
|
|
|
@ -141,7 +141,7 @@ void HTMLDocumentParser::run(const URL& url)
|
|||
break;
|
||||
auto& token = optional_token.value();
|
||||
|
||||
dbgln<PARSER_DEBUG>("[{}] {}", insertion_mode_name(), token.to_string());
|
||||
dbgln_if(PARSER_DEBUG, "[{}] {}", insertion_mode_name(), token.to_string());
|
||||
|
||||
// FIXME: If the adjusted current node is a MathML text integration point and the token is a start tag whose tag name is neither "mglyph" nor "malignmark"
|
||||
// FIXME: If the adjusted current node is a MathML text integration point and the token is a character token
|
||||
|
@ -157,7 +157,7 @@ void HTMLDocumentParser::run(const URL& url)
|
|||
}
|
||||
|
||||
if (m_stop_parsing) {
|
||||
dbgln<PARSER_DEBUG>("Stop parsing{}! :^)", m_parsing_fragment ? " fragment" : "");
|
||||
dbgln_if(PARSER_DEBUG, "Stop parsing{}! :^)", m_parsing_fragment ? " fragment" : "");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -221,7 +221,7 @@ Optional<u32> HTMLTokenizer::next_code_point()
|
|||
return {};
|
||||
m_prev_utf8_iterator = m_utf8_iterator;
|
||||
++m_utf8_iterator;
|
||||
dbgln<TOKENIZER_TRACE_DEBUG>("(Tokenizer) Next code_point: {}", (char)*m_prev_utf8_iterator);
|
||||
dbgln_if(TOKENIZER_TRACE_DEBUG, "(Tokenizer) Next code_point: {}", (char)*m_prev_utf8_iterator);
|
||||
return *m_prev_utf8_iterator;
|
||||
}
|
||||
|
||||
|
@ -2618,17 +2618,17 @@ HTMLTokenizer::HTMLTokenizer(const StringView& input, const String& encoding)
|
|||
|
||||
void HTMLTokenizer::will_switch_to([[maybe_unused]] State new_state)
|
||||
{
|
||||
dbgln<TOKENIZER_TRACE_DEBUG>("[{}] Switch to {}", state_name(m_state), state_name(new_state));
|
||||
dbgln_if(TOKENIZER_TRACE_DEBUG, "[{}] Switch to {}", state_name(m_state), state_name(new_state));
|
||||
}
|
||||
|
||||
void HTMLTokenizer::will_reconsume_in([[maybe_unused]] State new_state)
|
||||
{
|
||||
dbgln<TOKENIZER_TRACE_DEBUG>("[{}] Reconsume in {}", state_name(m_state), state_name(new_state));
|
||||
dbgln_if(TOKENIZER_TRACE_DEBUG, "[{}] Reconsume in {}", state_name(m_state), state_name(new_state));
|
||||
}
|
||||
|
||||
void HTMLTokenizer::switch_to(Badge<HTMLDocumentParser>, State new_state)
|
||||
{
|
||||
dbgln<TOKENIZER_TRACE_DEBUG>("[{}] Parser switches tokenizer state to {}", state_name(m_state), state_name(new_state));
|
||||
dbgln_if(TOKENIZER_TRACE_DEBUG, "[{}] Parser switches tokenizer state to {}", state_name(m_state), state_name(new_state));
|
||||
m_state = new_state;
|
||||
}
|
||||
|
||||
|
|
|
@ -100,7 +100,7 @@ void Resource::did_load(Badge<ResourceLoader>, ReadonlyBytes data, const HashMap
|
|||
m_encoding = encoding_from_content_type(content_type.value());
|
||||
m_mime_type = mime_type_from_content_type(content_type.value());
|
||||
} else if (url().protocol() == "data" && !url().data_mime_type().is_empty()) {
|
||||
dbgln<RESOURCE_DEBUG>("This is a data URL with mime-type _{}_", url().data_mime_type());
|
||||
dbgln_if(RESOURCE_DEBUG, "This is a data URL with mime-type _{}_", url().data_mime_type());
|
||||
m_encoding = "utf-8"; // FIXME: This doesn't seem nice.
|
||||
m_mime_type = url().data_mime_type();
|
||||
} else {
|
||||
|
|
|
@ -86,7 +86,7 @@ RefPtr<Resource> ResourceLoader::load_resource(Resource::Type type, const LoadRe
|
|||
if (it->value->type() != type) {
|
||||
dbgln("FIXME: Not using cached resource for {} since there's a type mismatch.", request.url());
|
||||
} else {
|
||||
dbgln<CACHE_DEBUG>("Reusing cached resource for: {}", request.url());
|
||||
dbgln_if(CACHE_DEBUG, "Reusing cached resource for: {}", request.url());
|
||||
return it->value;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,7 +60,7 @@ void WebContentClient::handle([[maybe_unused]] const Messages::WebContentClient:
|
|||
|
||||
void WebContentClient::handle(const Messages::WebContentClient::DidInvalidateContentRect& message)
|
||||
{
|
||||
dbgln<SPAM_DEBUG>("handle: WebContentClient::DidInvalidateContentRect! content_rect={}", message.content_rect());
|
||||
dbgln_if(SPAM_DEBUG, "handle: WebContentClient::DidInvalidateContentRect! content_rect={}", message.content_rect());
|
||||
|
||||
// FIXME: Figure out a way to coalesce these messages to reduce unnecessary painting
|
||||
m_view.notify_server_did_invalidate_content_rect({}, message.content_rect());
|
||||
|
@ -76,25 +76,25 @@ void WebContentClient::handle(const Messages::WebContentClient::DidChangeSelecti
|
|||
|
||||
void WebContentClient::handle(const Messages::WebContentClient::DidLayout& message)
|
||||
{
|
||||
dbgln<SPAM_DEBUG>("handle: WebContentClient::DidLayout! content_size={}", message.content_size());
|
||||
dbgln_if(SPAM_DEBUG, "handle: WebContentClient::DidLayout! content_size={}", message.content_size());
|
||||
m_view.notify_server_did_layout({}, message.content_size());
|
||||
}
|
||||
|
||||
void WebContentClient::handle(const Messages::WebContentClient::DidChangeTitle& message)
|
||||
{
|
||||
dbgln<SPAM_DEBUG>("handle: WebContentClient::DidChangeTitle! title={}", message.title());
|
||||
dbgln_if(SPAM_DEBUG, "handle: WebContentClient::DidChangeTitle! title={}", message.title());
|
||||
m_view.notify_server_did_change_title({}, message.title());
|
||||
}
|
||||
|
||||
void WebContentClient::handle(const Messages::WebContentClient::DidRequestScrollIntoView& message)
|
||||
{
|
||||
dbgln<SPAM_DEBUG>("handle: WebContentClient::DidRequestScrollIntoView! rect={}", message.rect());
|
||||
dbgln_if(SPAM_DEBUG, "handle: WebContentClient::DidRequestScrollIntoView! rect={}", message.rect());
|
||||
m_view.notify_server_did_request_scroll_into_view({}, message.rect());
|
||||
}
|
||||
|
||||
void WebContentClient::handle(const Messages::WebContentClient::DidHoverLink& message)
|
||||
{
|
||||
dbgln<SPAM_DEBUG>("handle: WebContentClient::DidHoverLink! url={}", message.url());
|
||||
dbgln_if(SPAM_DEBUG, "handle: WebContentClient::DidHoverLink! url={}", message.url());
|
||||
m_view.notify_server_did_hover_link({}, message.url());
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue