mirror of
https://github.com/RGBCube/serenity
synced 2025-07-27 01:17:35 +00:00
Everywhere: Run clang-format
This commit is contained in:
parent
0376c127f6
commit
086969277e
1665 changed files with 8479 additions and 8479 deletions
|
@ -11,7 +11,7 @@
|
|||
|
||||
using namespace Kernel;
|
||||
|
||||
void __assertion_failed(const char* msg, const char* file, unsigned line, const char* func)
|
||||
void __assertion_failed(char const* msg, char const* file, unsigned line, char const* func)
|
||||
{
|
||||
asm volatile("cli");
|
||||
critical_dmesgln("ASSERTION FAILED: {}", msg);
|
||||
|
|
|
@ -174,7 +174,7 @@ static EntropySource s_entropy_source_interrupts { EntropySource::Static::Interr
|
|||
|
||||
// clang-format on
|
||||
|
||||
static void dump(const RegisterState& regs)
|
||||
static void dump(RegisterState const& regs)
|
||||
{
|
||||
#if ARCH(I386)
|
||||
u16 ss;
|
||||
|
@ -522,7 +522,7 @@ void handle_interrupt(TrapFrame* trap)
|
|||
handler->eoi();
|
||||
}
|
||||
|
||||
const DescriptorTablePointer& get_idtr()
|
||||
DescriptorTablePointer const& get_idtr()
|
||||
{
|
||||
return s_idtr;
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ READONLY_AFTER_INIT FPUState Processor::s_clean_fpu_state;
|
|||
|
||||
READONLY_AFTER_INIT static ProcessorContainer s_processors {};
|
||||
READONLY_AFTER_INIT Atomic<u32> Processor::g_total_processors;
|
||||
READONLY_AFTER_INIT static volatile bool s_smp_enabled;
|
||||
READONLY_AFTER_INIT static bool volatile s_smp_enabled;
|
||||
|
||||
static Atomic<ProcessorMessage*> s_message_pool;
|
||||
Atomic<u32> Processor::s_idle_cpu_mask { 0 };
|
||||
|
@ -775,7 +775,7 @@ void Processor::flush_gdt()
|
|||
: "memory");
|
||||
}
|
||||
|
||||
const DescriptorTablePointer& Processor::get_gdtr()
|
||||
DescriptorTablePointer const& Processor::get_gdtr()
|
||||
{
|
||||
return m_gdtr;
|
||||
}
|
||||
|
|
|
@ -53,7 +53,7 @@ ALWAYS_INLINE bool validate_canonical_address(size_t address)
|
|||
}
|
||||
|
||||
CODE_SECTION(".text.safemem")
|
||||
NEVER_INLINE bool safe_memcpy(void* dest_ptr, const void* src_ptr, size_t n, void*& fault_at)
|
||||
NEVER_INLINE bool safe_memcpy(void* dest_ptr, void const* src_ptr, size_t n, void*& fault_at)
|
||||
{
|
||||
fault_at = nullptr;
|
||||
size_t dest = (size_t)dest_ptr;
|
||||
|
@ -115,7 +115,7 @@ NEVER_INLINE bool safe_memcpy(void* dest_ptr, const void* src_ptr, size_t n, voi
|
|||
}
|
||||
|
||||
CODE_SECTION(".text.safemem")
|
||||
NEVER_INLINE ssize_t safe_strnlen(const char* str, size_t max_n, void*& fault_at)
|
||||
NEVER_INLINE ssize_t safe_strnlen(char const* str, size_t max_n, void*& fault_at)
|
||||
{
|
||||
if (!validate_canonical_address((size_t)str)) {
|
||||
fault_at = const_cast<char*>(str);
|
||||
|
@ -210,7 +210,7 @@ NEVER_INLINE bool safe_memset(void* dest_ptr, int c, size_t n, void*& fault_at)
|
|||
}
|
||||
|
||||
CODE_SECTION(".text.safemem.atomic")
|
||||
NEVER_INLINE Optional<u32> safe_atomic_fetch_add_relaxed(volatile u32* var, u32 val)
|
||||
NEVER_INLINE Optional<u32> safe_atomic_fetch_add_relaxed(u32 volatile* var, u32 val)
|
||||
{
|
||||
u32 result;
|
||||
bool error;
|
||||
|
@ -230,7 +230,7 @@ NEVER_INLINE Optional<u32> safe_atomic_fetch_add_relaxed(volatile u32* var, u32
|
|||
}
|
||||
|
||||
CODE_SECTION(".text.safemem.atomic")
|
||||
NEVER_INLINE Optional<u32> safe_atomic_exchange_relaxed(volatile u32* var, u32 val)
|
||||
NEVER_INLINE Optional<u32> safe_atomic_exchange_relaxed(u32 volatile* var, u32 val)
|
||||
{
|
||||
u32 result;
|
||||
bool error;
|
||||
|
@ -250,7 +250,7 @@ NEVER_INLINE Optional<u32> safe_atomic_exchange_relaxed(volatile u32* var, u32 v
|
|||
}
|
||||
|
||||
CODE_SECTION(".text.safemem.atomic")
|
||||
NEVER_INLINE Optional<u32> safe_atomic_load_relaxed(volatile u32* var)
|
||||
NEVER_INLINE Optional<u32> safe_atomic_load_relaxed(u32 volatile* var)
|
||||
{
|
||||
u32 result;
|
||||
bool error;
|
||||
|
@ -270,7 +270,7 @@ NEVER_INLINE Optional<u32> safe_atomic_load_relaxed(volatile u32* var)
|
|||
}
|
||||
|
||||
CODE_SECTION(".text.safemem.atomic")
|
||||
NEVER_INLINE bool safe_atomic_store_relaxed(volatile u32* var, u32 val)
|
||||
NEVER_INLINE bool safe_atomic_store_relaxed(u32 volatile* var, u32 val)
|
||||
{
|
||||
bool error;
|
||||
asm volatile(
|
||||
|
@ -287,7 +287,7 @@ NEVER_INLINE bool safe_atomic_store_relaxed(volatile u32* var, u32 val)
|
|||
}
|
||||
|
||||
CODE_SECTION(".text.safemem.atomic")
|
||||
NEVER_INLINE Optional<bool> safe_atomic_compare_exchange_relaxed(volatile u32* var, u32& expected, u32 val)
|
||||
NEVER_INLINE Optional<bool> safe_atomic_compare_exchange_relaxed(u32 volatile* var, u32& expected, u32 val)
|
||||
{
|
||||
// NOTE: accessing expected is NOT protected as it should always point
|
||||
// to a valid location in kernel memory!
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue