1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-07-25 16:27:35 +00:00

Move runnable/non-runnable list control entirely over to Scheduler

This way, we can change how the scheduler works without having to change Thread too.
This commit is contained in:
Robin Burchell 2019-07-19 17:21:13 +02:00 committed by Andreas Kling
parent c1ed16c8e8
commit 342f7a6b0f
4 changed files with 102 additions and 75 deletions

View file

@ -6,6 +6,65 @@
#include <Kernel/RTC.h> #include <Kernel/RTC.h>
#include <Kernel/Scheduler.h> #include <Kernel/Scheduler.h>
struct SchedulerData {
typedef IntrusiveList<Thread, &Thread::m_runnable_list_node> ThreadList;
ThreadList m_runnable_threads;
ThreadList m_nonrunnable_threads;
ThreadList& thread_list_for_state(Thread::State state)
{
if (Thread::is_runnable_state(state))
return m_runnable_threads;
return m_nonrunnable_threads;
}
};
static SchedulerData* g_scheduler_data;
void Scheduler::init_thread(Thread& thread)
{
g_scheduler_data->m_nonrunnable_threads.append(thread);
}
void Scheduler::update_state_for_thread(Thread& thread)
{
auto& list = g_scheduler_data->thread_list_for_state(thread.state());
if (list.contains(thread))
return;
list.append(thread);
}
IterationDecision Scheduler::for_each_runnable_func(Function<IterationDecision(Thread&)>&& callback)
{
ASSERT_INTERRUPTS_DISABLED();
auto& tl = g_scheduler_data->m_runnable_threads;
for (auto it = tl.begin(); it != tl.end();) {
auto thread = *it;
it = ++it;
if (callback(*thread) == IterationDecision::Break)
return IterationDecision::Break;
}
return IterationDecision::Continue;
}
IterationDecision Scheduler::for_each_nonrunnable_func(Function<IterationDecision(Thread&)>&& callback)
{
ASSERT_INTERRUPTS_DISABLED();
auto& tl = g_scheduler_data->m_nonrunnable_threads;
for (auto it = tl.begin(); it != tl.end();) {
auto thread = *it;
it = ++it;
if (callback(*thread) == IterationDecision::Break)
return IterationDecision::Break;
}
return IterationDecision::Continue;
}
//#define LOG_EVERY_CONTEXT_SWITCH //#define LOG_EVERY_CONTEXT_SWITCH
//#define SCHEDULER_DEBUG //#define SCHEDULER_DEBUG
//#define SCHEDULER_RUNNABLE_DEBUG //#define SCHEDULER_RUNNABLE_DEBUG
@ -261,7 +320,7 @@ bool Scheduler::pick_next()
auto now_usec = now.tv_usec; auto now_usec = now.tv_usec;
// Check and unblock threads whose wait conditions have been met. // Check and unblock threads whose wait conditions have been met.
Thread::for_each_nonrunnable([&](Thread& thread) { Scheduler::for_each_nonrunnable([&](Thread& thread) {
thread.consider_unblock(now_sec, now_usec); thread.consider_unblock(now_sec, now_usec);
return IterationDecision::Continue; return IterationDecision::Continue;
}); });
@ -330,7 +389,7 @@ bool Scheduler::pick_next()
}); });
#endif #endif
auto& runnable_list = *Thread::g_runnable_threads; auto& runnable_list = g_scheduler_data->m_runnable_threads;
if (runnable_list.is_empty()) if (runnable_list.is_empty())
return context_switch(s_colonel_process->main_thread()); return context_switch(s_colonel_process->main_thread());
@ -488,6 +547,7 @@ Process* Scheduler::colonel()
void Scheduler::initialize() void Scheduler::initialize()
{ {
g_scheduler_data = new SchedulerData;
s_redirection.selector = gdt_alloc_entry(); s_redirection.selector = gdt_alloc_entry();
initialize_redirection(); initialize_redirection();
s_colonel_process = Process::create_kernel_process("colonel", nullptr); s_colonel_process = Process::create_kernel_process("colonel", nullptr);

View file

@ -2,6 +2,8 @@
#include <AK/Assertions.h> #include <AK/Assertions.h>
#include <AK/Types.h> #include <AK/Types.h>
#include <AK/Function.h>
#include <AK/IntrusiveList.h>
class Process; class Process;
class Thread; class Thread;
@ -27,6 +29,29 @@ public:
static bool is_active(); static bool is_active();
static void beep(); static void beep();
template<typename Callback>
static inline IterationDecision for_each_runnable(Callback callback)
{
return for_each_runnable_func([callback](Thread& thread) {
return callback(thread);
});
}
template<typename Callback>
static inline IterationDecision for_each_nonrunnable(Callback callback)
{
return for_each_nonrunnable_func([callback](Thread& thread) {
return callback(thread);
});
}
static void init_thread(Thread& thread);
static void update_state_for_thread(Thread& thread);
private: private:
static void prepare_for_iret_to_new_process(); static void prepare_for_iret_to_new_process();
static IterationDecision for_each_runnable_func(Function<IterationDecision(Thread&)>&& callback);
static IterationDecision for_each_nonrunnable_func(Function<IterationDecision(Thread&)>&& callback);
}; };

View file

@ -16,9 +16,6 @@ HashTable<Thread*>& thread_table()
return *table; return *table;
} }
Thread::SchedulerThreadList* Thread::g_runnable_threads;
Thread::SchedulerThreadList* Thread::g_nonrunnable_threads;
static const u32 default_kernel_stack_size = 65536; static const u32 default_kernel_stack_size = 65536;
static const u32 default_userspace_stack_size = 65536; static const u32 default_userspace_stack_size = 65536;
@ -75,7 +72,7 @@ Thread::Thread(Process& process)
if (m_process.pid() != 0) { if (m_process.pid() != 0) {
InterruptDisabler disabler; InterruptDisabler disabler;
thread_table().set(this); thread_table().set(this);
g_nonrunnable_threads->append(*this); Scheduler::init_thread(*this);
} }
} }
@ -514,8 +511,6 @@ Thread* Thread::clone(Process& process)
void Thread::initialize() void Thread::initialize()
{ {
g_runnable_threads = new SchedulerThreadList;
g_nonrunnable_threads = new SchedulerThreadList;
Scheduler::initialize(); Scheduler::initialize();
} }
@ -545,15 +540,6 @@ void Thread::set_state(State new_state)
m_state = new_state; m_state = new_state;
if (m_process.pid() != 0) { if (m_process.pid() != 0) {
SchedulerThreadList* list = nullptr; Scheduler::update_state_for_thread(*this);
if (is_runnable_state(new_state))
list = g_runnable_threads;
else
list = g_nonrunnable_threads;
if (list->contains(*this))
return;
list->append(*this);
} }
} }

View file

@ -6,6 +6,7 @@
#include <AK/OwnPtr.h> #include <AK/OwnPtr.h>
#include <AK/RefPtr.h> #include <AK/RefPtr.h>
#include <AK/Vector.h> #include <AK/Vector.h>
#include <Kernel/Scheduler.h>
#include <Kernel/Arch/i386/CPU.h> #include <Kernel/Arch/i386/CPU.h>
#include <Kernel/KResult.h> #include <Kernel/KResult.h>
#include <Kernel/UnixTypes.h> #include <Kernel/UnixTypes.h>
@ -16,7 +17,6 @@ class Alarm;
class FileDescription; class FileDescription;
class Process; class Process;
class Region; class Region;
class Thread;
enum class ShouldUnblockThread { enum class ShouldUnblockThread {
No = 0, No = 0,
@ -299,10 +299,6 @@ public:
template<typename Callback> template<typename Callback>
static IterationDecision for_each_living(Callback); static IterationDecision for_each_living(Callback);
template<typename Callback> template<typename Callback>
static IterationDecision for_each_runnable(Callback);
template<typename Callback>
static IterationDecision for_each_nonrunnable(Callback);
template<typename Callback>
static IterationDecision for_each(Callback); static IterationDecision for_each(Callback);
static bool is_runnable_state(Thread::State state) static bool is_runnable_state(Thread::State state)
@ -313,19 +309,8 @@ public:
private: private:
IntrusiveListNode m_runnable_list_node; IntrusiveListNode m_runnable_list_node;
typedef IntrusiveList<Thread, &Thread::m_runnable_list_node> SchedulerThreadList;
public:
static SchedulerThreadList* g_runnable_threads;
static SchedulerThreadList* g_nonrunnable_threads;
static SchedulerThreadList* thread_list_for_state(Thread::State state)
{
if (is_runnable_state(state))
return g_runnable_threads;
return g_nonrunnable_threads;
}
private: private:
friend class SchedulerData;
Process& m_process; Process& m_process;
int m_tid { -1 }; int m_tid { -1 };
TSS32 m_tss; TSS32 m_tss;
@ -351,20 +336,6 @@ private:
HashTable<Thread*>& thread_table(); HashTable<Thread*>& thread_table();
template<typename Callback>
inline IterationDecision Thread::for_each_in_state(State state, Callback callback)
{
ASSERT_INTERRUPTS_DISABLED();
auto new_callback = [=](Thread& thread) -> IterationDecision {
if (thread.state() == state)
return callback(thread);
return IterationDecision::Continue;
};
if (is_runnable_state(state))
return for_each_runnable(new_callback);
return for_each_nonrunnable(new_callback);
}
template<typename Callback> template<typename Callback>
inline IterationDecision Thread::for_each_living(Callback callback) inline IterationDecision Thread::for_each_living(Callback callback)
{ {
@ -380,38 +351,23 @@ template<typename Callback>
inline IterationDecision Thread::for_each(Callback callback) inline IterationDecision Thread::for_each(Callback callback)
{ {
ASSERT_INTERRUPTS_DISABLED(); ASSERT_INTERRUPTS_DISABLED();
auto ret = for_each_runnable(callback); auto ret = Scheduler::for_each_runnable(callback);
if (ret == IterationDecision::Break) if (ret == IterationDecision::Break)
return ret; return ret;
return for_each_nonrunnable(callback); return Scheduler::for_each_nonrunnable(callback);
} }
template<typename Callback> template<typename Callback>
inline IterationDecision Thread::for_each_runnable(Callback callback) inline IterationDecision Thread::for_each_in_state(State state, Callback callback)
{ {
ASSERT_INTERRUPTS_DISABLED(); ASSERT_INTERRUPTS_DISABLED();
auto& tl = *g_runnable_threads; auto new_callback = [=](Thread& thread) -> IterationDecision {
for (auto it = tl.begin(); it != tl.end();) { if (thread.state() == state)
auto thread = *it; return callback(thread);
it = ++it; return IterationDecision::Continue;
if (callback(*thread) == IterationDecision::Break) };
return IterationDecision::Break; if (is_runnable_state(state))
} return Scheduler::for_each_runnable(new_callback);
return Scheduler::for_each_nonrunnable(new_callback);
return IterationDecision::Continue;
} }
template<typename Callback>
inline IterationDecision Thread::for_each_nonrunnable(Callback callback)
{
ASSERT_INTERRUPTS_DISABLED();
auto& tl = *g_nonrunnable_threads;
for (auto it = tl.begin(); it != tl.end();) {
auto thread = *it;
it = ++it;
if (callback(*thread) == IterationDecision::Break)
return IterationDecision::Break;
}
return IterationDecision::Continue;
}