From 1f9b8f0e7c732ec70a77db7eaf22071f43ef46e9 Mon Sep 17 00:00:00 2001 From: Andreas Kling Date: Wed, 7 Aug 2019 20:43:54 +0200 Subject: [PATCH] Kernel: Don't create Function objects in the scheduling code Each Function is a heap allocation, so let's make an effort to avoid doing that during scheduling. Because of header dependencies, I had to put the runnables iteration helpers in Thread.h, which is a bit meh but at least this cuts out all the kmalloc() traffic in pick_next(). --- Kernel/Scheduler.cpp | 44 +------------------------------------------- Kernel/Scheduler.h | 20 ++++---------------- Kernel/Thread.h | 44 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 59 deletions(-) diff --git a/Kernel/Scheduler.cpp b/Kernel/Scheduler.cpp index 9f03e68f4b..30e06d2403 100644 --- a/Kernel/Scheduler.cpp +++ b/Kernel/Scheduler.cpp @@ -6,21 +6,7 @@ #include #include -struct SchedulerData { - typedef IntrusiveList ThreadList; - - ThreadList m_runnable_threads; - ThreadList m_nonrunnable_threads; - - ThreadList& thread_list_for_state(Thread::State state) - { - if (Thread::is_runnable_state(state)) - return m_runnable_threads; - return m_nonrunnable_threads; - } -}; - -static SchedulerData* g_scheduler_data; +SchedulerData* g_scheduler_data; void Scheduler::init_thread(Thread& thread) { @@ -37,34 +23,6 @@ void Scheduler::update_state_for_thread(Thread& thread) list.append(thread); } -IterationDecision Scheduler::for_each_runnable_func(Function&& callback) -{ - ASSERT_INTERRUPTS_DISABLED(); - auto& tl = g_scheduler_data->m_runnable_threads; - for (auto it = tl.begin(); it != tl.end();) { - auto thread = *it; - it = ++it; - if (callback(*thread) == IterationDecision::Break) - return IterationDecision::Break; - } - - return IterationDecision::Continue; -} - -IterationDecision Scheduler::for_each_nonrunnable_func(Function&& callback) -{ - ASSERT_INTERRUPTS_DISABLED(); - auto& tl = g_scheduler_data->m_nonrunnable_threads; - for (auto it = tl.begin(); it != tl.end();) { - auto thread = *it; - it = ++it; - if (callback(*thread) == IterationDecision::Break) - return IterationDecision::Break; - } - - return IterationDecision::Continue; -} - //#define LOG_EVERY_CONTEXT_SWITCH //#define SCHEDULER_DEBUG //#define SCHEDULER_RUNNABLE_DEBUG diff --git a/Kernel/Scheduler.h b/Kernel/Scheduler.h index 5363e00d9f..bbde794c1c 100644 --- a/Kernel/Scheduler.h +++ b/Kernel/Scheduler.h @@ -8,11 +8,13 @@ class Process; class Thread; struct RegisterDump; +struct SchedulerData; extern Thread* current; extern Thread* g_last_fpu_thread; extern Thread* g_finalizer; extern u64 g_uptime; +extern SchedulerData* g_scheduler_data; class Scheduler { public: @@ -30,28 +32,14 @@ public: static void beep(); template - static inline IterationDecision for_each_runnable(Callback callback) - { - return for_each_runnable_func([callback](Thread& thread) { - return callback(thread); - }); - } + static inline IterationDecision for_each_runnable(Callback); template - static inline IterationDecision for_each_nonrunnable(Callback callback) - { - return for_each_nonrunnable_func([callback](Thread& thread) { - return callback(thread); - }); - } + static inline IterationDecision for_each_nonrunnable(Callback); static void init_thread(Thread& thread); static void update_state_for_thread(Thread& thread); private: static void prepare_for_iret_to_new_process(); - static IterationDecision for_each_runnable_func(Function&& callback); - static IterationDecision for_each_nonrunnable_func(Function&& callback); - }; - diff --git a/Kernel/Thread.h b/Kernel/Thread.h index 89a5ac29a9..18bfbb7bf6 100644 --- a/Kernel/Thread.h +++ b/Kernel/Thread.h @@ -384,3 +384,47 @@ inline const LogStream& operator<<(const LogStream& stream, const Thread& value) { return stream << "Thread{" << &value << "}(" << value.pid() << ":" << value.tid() << ")"; } + +struct SchedulerData { + typedef IntrusiveList ThreadList; + + ThreadList m_runnable_threads; + ThreadList m_nonrunnable_threads; + + ThreadList& thread_list_for_state(Thread::State state) + { + if (Thread::is_runnable_state(state)) + return m_runnable_threads; + return m_nonrunnable_threads; + } +}; + +template +inline IterationDecision Scheduler::for_each_runnable(Callback callback) +{ + ASSERT_INTERRUPTS_DISABLED(); + auto& tl = g_scheduler_data->m_runnable_threads; + for (auto it = tl.begin(); it != tl.end();) { + auto thread = *it; + it = ++it; + if (callback(*thread) == IterationDecision::Break) + return IterationDecision::Break; + } + + return IterationDecision::Continue; +} + +template +inline IterationDecision Scheduler::for_each_nonrunnable(Callback callback) +{ + ASSERT_INTERRUPTS_DISABLED(); + auto& tl = g_scheduler_data->m_nonrunnable_threads; + for (auto it = tl.begin(); it != tl.end();) { + auto thread = *it; + it = ++it; + if (callback(*thread) == IterationDecision::Break) + return IterationDecision::Break; + } + + return IterationDecision::Continue; +}