From 53262cd08b08f3d4d2b77cff9c348e84b1bf5eb9 Mon Sep 17 00:00:00 2001 From: Robin Burchell Date: Fri, 19 Jul 2019 13:04:42 +0200 Subject: [PATCH] AK: Introduce IntrusiveList And use it in the scheduler. IntrusiveList is similar to InlineLinkedList, except that rather than making assertions about the type (and requiring inheritance), it provides an IntrusiveListNode type that can be used to put an instance into many different lists at once. As a proof of concept, port the scheduler over to use it. The only downside here is that the "list" global needs to know the position of the IntrusiveListNode member, so we have to position things a little awkwardly to make that happen. We also move the runnable lists to Thread, to avoid having to publicize the node. --- AK/IntrusiveList.h | 258 +++++++++++++++++++++++++++++++++++++++++++ Kernel/Scheduler.cpp | 9 +- Kernel/Thread.cpp | 39 +++---- Kernel/Thread.h | 39 ++++--- 4 files changed, 299 insertions(+), 46 deletions(-) create mode 100644 AK/IntrusiveList.h diff --git a/AK/IntrusiveList.h b/AK/IntrusiveList.h new file mode 100644 index 0000000000..59edf40c9d --- /dev/null +++ b/AK/IntrusiveList.h @@ -0,0 +1,258 @@ +#pragma once + +namespace AK { + +class IntrusiveListNode; +class IntrusiveListStorage { +private: + friend class IntrusiveListNode; + template + friend class IntrusiveList; + IntrusiveListNode* m_first { nullptr }; + IntrusiveListNode* m_last { nullptr }; +}; + +template +class IntrusiveList { +public: + IntrusiveList(); + ~IntrusiveList(); + + void clear(); + bool is_empty() const; + void append(T& n); + void prepend(T& n); + void remove(T& n); + bool contains(const T&) const; + T* first() const; + T* last() const; + + class Iterator { + public: + Iterator(); + Iterator(T* value); + + T* operator*() const; + T* operator->() const; + bool operator==(const Iterator& other) const; + bool operator!=(const Iterator& other) const { return !(*this == other); } + Iterator& operator++(); + Iterator& erase(); + + private: + T* m_value { nullptr }; + }; + + Iterator begin(); + Iterator end(); + +private: + static T* next(T* current); + static T* node_to_value(IntrusiveListNode& node); + IntrusiveListStorage m_storage; +}; + +class IntrusiveListNode { +public: + ~IntrusiveListNode(); + void remove(); + bool is_in_list() const; + +private: + template + friend class IntrusiveList; + IntrusiveListStorage* m_storage = nullptr; + IntrusiveListNode* m_next = nullptr; + IntrusiveListNode* m_prev = nullptr; +}; + +template +inline IntrusiveList::Iterator::Iterator() +{ +} + +template +inline IntrusiveList::Iterator::Iterator(T* value) + : m_value(value) +{ +} + +template +inline T* IntrusiveList::Iterator::operator*() const +{ + return m_value; +} + +template +inline T* IntrusiveList::Iterator::operator->() const +{ + return m_value; +} + +template +inline bool IntrusiveList::Iterator::operator==(const Iterator& other) const +{ + return other.m_value == m_value; +} + +template +inline typename IntrusiveList::Iterator& IntrusiveList::Iterator::operator++() +{ + m_value = IntrusiveList::next(m_value); + return *this; +} + +template +inline typename IntrusiveList::Iterator& IntrusiveList::Iterator::erase() +{ + T* old = m_value; + m_value = IntrusiveList::next(m_value); + (old->*member).remove(); + return *this; +} + +template +inline IntrusiveList::IntrusiveList() + +{ +} + +template +inline IntrusiveList::~IntrusiveList() +{ + clear(); +} + +template +inline void IntrusiveList::clear() +{ + while (m_storage.m_first) + m_storage.m_first->remove(); +} + +template +inline bool IntrusiveList::is_empty() const +{ + return m_storage.m_first == nullptr; +} + +template +inline void IntrusiveList::append(T& n) +{ + auto& nnode = n.*member; + if (nnode.m_storage) + nnode.remove(); + + nnode.m_storage = &m_storage; + nnode.m_prev = m_storage.m_last; + nnode.m_next = nullptr; + + if (m_storage.m_last) + m_storage.m_last->m_next = &nnode; + m_storage.m_last = &nnode; + if (!m_storage.m_first) + m_storage.m_first = &nnode; +} + +template +inline void IntrusiveList::prepend(T& n) +{ + auto& nnode = n.*member; + if (nnode.m_storage) + nnode.remove(); + + nnode.m_storage = &m_storage; + nnode.m_prev = nullptr; + nnode.m_next = m_storage.m_first; + + if (m_storage.m_first) + m_storage.m_first->m_prev = &nnode; + m_storage.m_first = &nnode; + if (!m_storage.m_last) + m_storage.m_last = &nnode; +} + +template +inline void IntrusiveList::remove(T& n) +{ + auto& nnode = n.*member; + if (nnode.m_storage) + nnode.remove(); +} + +template +inline bool IntrusiveList::contains(const T& n) const +{ + auto& nnode = n.*member; + return nnode.m_storage == &m_storage; +} + +template +inline T* IntrusiveList::first() const +{ + return m_storage.m_first ? node_to_value(*m_storage.m_first) : nullptr; +} + +template +inline T* IntrusiveList::last() const +{ + return m_storage.m_last ? node_to_value(*m_storage.m_last) : nullptr; +} + +template +inline T* IntrusiveList::next(T* current) +{ + auto& nextnode = (current->*member).m_next; + T* nextstruct = nextnode ? node_to_value(*nextnode) : nullptr; + return nextstruct; +} + +template +inline typename IntrusiveList::Iterator IntrusiveList::begin() +{ + return m_storage.m_first ? Iterator(node_to_value(*m_storage.m_first)) : Iterator(); +} + +template +inline typename IntrusiveList::Iterator IntrusiveList::end() +{ + return Iterator(); +} + +template +inline T* IntrusiveList::node_to_value(IntrusiveListNode& node) +{ + return (T*)((char*)&node - ((char*)&(((T*)nullptr)->*member) - (char*)nullptr)); +} + +inline IntrusiveListNode::~IntrusiveListNode() +{ + if (m_storage) + remove(); +} + +inline void IntrusiveListNode::remove() +{ + ASSERT(m_storage); + if (m_storage->m_first == this) + m_storage->m_first = m_next; + if (m_storage->m_last == this) + m_storage->m_last = m_prev; + if (m_prev) + m_prev->m_next = m_next; + if (m_next) + m_next->m_prev = m_prev; + m_prev = nullptr; + m_next = nullptr; + m_storage = nullptr; +} + +inline bool IntrusiveListNode::is_in_list() const +{ + return m_storage != nullptr; +} + +} + +using AK::IntrusiveList; +using AK::IntrusiveListNode; diff --git a/Kernel/Scheduler.cpp b/Kernel/Scheduler.cpp index 7bef7efc55..fbae58cd0b 100644 --- a/Kernel/Scheduler.cpp +++ b/Kernel/Scheduler.cpp @@ -331,14 +331,15 @@ bool Scheduler::pick_next() }); #endif - if (g_runnable_threads->is_empty()) + auto& runnable_list = *Thread::g_runnable_threads; + if (runnable_list.is_empty()) return context_switch(s_colonel_process->main_thread()); - auto* previous_head = g_runnable_threads->head(); + auto* previous_head = runnable_list.first(); for (;;) { // Move head to tail. - g_runnable_threads->append(g_runnable_threads->remove_head()); - auto* thread = g_runnable_threads->head(); + runnable_list.append(*previous_head); + auto* thread = runnable_list.first(); if (!thread->process().is_being_inspected() && (thread->state() == Thread::Runnable || thread->state() == Thread::Running)) { #ifdef SCHEDULER_DEBUG diff --git a/Kernel/Thread.cpp b/Kernel/Thread.cpp index fafdac3536..c82e54a042 100644 --- a/Kernel/Thread.cpp +++ b/Kernel/Thread.cpp @@ -16,8 +16,8 @@ HashTable& thread_table() return *table; } -InlineLinkedList* g_runnable_threads; -InlineLinkedList* g_nonrunnable_threads; +Thread::SchedulerThreadList* Thread::g_runnable_threads; +Thread::SchedulerThreadList* Thread::g_nonrunnable_threads; static const u32 default_kernel_stack_size = 65536; static const u32 default_userspace_stack_size = 65536; @@ -75,7 +75,7 @@ Thread::Thread(Process& process) if (m_process.pid() != 0) { InterruptDisabler disabler; thread_table().set(this); - set_thread_list(g_nonrunnable_threads); + g_nonrunnable_threads->append(*this); } } @@ -85,8 +85,6 @@ Thread::~Thread() kfree_aligned(m_fpu_state); { InterruptDisabler disabler; - if (m_thread_list) - m_thread_list->remove(this); thread_table().remove(this); } @@ -534,8 +532,8 @@ KResult Thread::wait_for_connect(FileDescription& description) void Thread::initialize() { - g_runnable_threads = new InlineLinkedList; - g_nonrunnable_threads = new InlineLinkedList; + g_runnable_threads = new SchedulerThreadList; + g_nonrunnable_threads = new SchedulerThreadList; Scheduler::initialize(); } @@ -555,23 +553,20 @@ bool Thread::is_thread(void* ptr) return thread_table().contains((Thread*)ptr); } -void Thread::set_thread_list(InlineLinkedList* thread_list) -{ - ASSERT_INTERRUPTS_DISABLED(); - ASSERT(pid() != 0); - if (m_thread_list == thread_list) - return; - if (m_thread_list) - m_thread_list->remove(this); - if (thread_list) - thread_list->append(this); - m_thread_list = thread_list; -} - void Thread::set_state(State new_state) { InterruptDisabler disabler; m_state = new_state; - if (m_process.pid() != 0) - set_thread_list(thread_list_for_state(new_state)); + if (m_process.pid() != 0) { + SchedulerThreadList* list = nullptr; + if (is_runnable_state(new_state)) + list = g_runnable_threads; + else + list = g_nonrunnable_threads; + + if (list->contains(*this)) + return; + + list->append(*this); + } } diff --git a/Kernel/Thread.h b/Kernel/Thread.h index 45a170e05d..e31e39f3a9 100644 --- a/Kernel/Thread.h +++ b/Kernel/Thread.h @@ -2,7 +2,7 @@ #include #include -#include +#include #include #include #include @@ -29,10 +29,7 @@ struct SignalActionData { int flags { 0 }; }; -extern InlineLinkedList* g_runnable_threads; -extern InlineLinkedList* g_nonrunnable_threads; - -class Thread : public InlineLinkedListNode { +class Thread { friend class Process; friend class Scheduler; @@ -253,13 +250,6 @@ public: Thread* clone(Process&); - // For InlineLinkedList - Thread* m_prev { nullptr }; - Thread* m_next { nullptr }; - - InlineLinkedList* thread_list() { return m_thread_list; } - void set_thread_list(InlineLinkedList*); - template static IterationDecision for_each_in_state(State, Callback); template @@ -276,7 +266,15 @@ public: return state == Thread::State::Running || state == Thread::State::Runnable; } - static InlineLinkedList* thread_list_for_state(Thread::State state) +private: + IntrusiveListNode m_runnable_list_node; + + typedef IntrusiveList SchedulerThreadList; + +public: + static SchedulerThreadList* g_runnable_threads; + static SchedulerThreadList* g_nonrunnable_threads; + static SchedulerThreadList* thread_list_for_state(Thread::State state) { if (is_runnable_state(state)) return g_runnable_threads; @@ -301,7 +299,6 @@ private: Region* m_signal_stack_user_region { nullptr }; Blocker* m_blocker { nullptr }; FPUState* m_fpu_state { nullptr }; - InlineLinkedList* m_thread_list { nullptr }; State m_state { Invalid }; bool m_has_used_fpu { false }; bool m_was_interrupted_while_blocked { false }; @@ -345,11 +342,12 @@ template inline IterationDecision Thread::for_each_runnable(Callback callback) { ASSERT_INTERRUPTS_DISABLED(); - for (auto* thread = g_runnable_threads->head(); thread;) { - auto* next_thread = thread->next(); + auto& tl = *g_runnable_threads; + for (auto it = tl.begin(); it != tl.end();) { + auto thread = *it; + it = ++it; if (callback(*thread) == IterationDecision::Break) return IterationDecision::Break; - thread = next_thread; } return IterationDecision::Continue; @@ -359,11 +357,12 @@ template inline IterationDecision Thread::for_each_nonrunnable(Callback callback) { ASSERT_INTERRUPTS_DISABLED(); - for (auto* thread = g_nonrunnable_threads->head(); thread;) { - auto* next_thread = thread->next(); + auto& tl = *g_nonrunnable_threads; + for (auto it = tl.begin(); it != tl.end();) { + auto thread = *it; + it = ++it; if (callback(*thread) == IterationDecision::Break) return IterationDecision::Break; - thread = next_thread; } return IterationDecision::Continue;