1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-07-27 14:07:45 +00:00

Assistant: Cache results for each query by provider

Previously, results were cached for each query in a single list.
The majority of CPU time was spent determining which items in the
cache had been seen previously. This commit removes the need to
check previous results by holding a separate list of results for each
provider type.

This makes Assistant feel much more responsive to user input,
especially when the filesystem has a lot of files.
This commit is contained in:
Tim Ledbetter 2023-01-07 17:46:34 +00:00 committed by Jelle Raaijmakers
parent 634d1e0197
commit d9aa7eacc6
3 changed files with 52 additions and 37 deletions

View file

@ -126,13 +126,13 @@ void FileProvider::query(DeprecatedString const& query, Function<void(NonnullRef
if (m_fuzzy_match_work) if (m_fuzzy_match_work)
m_fuzzy_match_work->cancel(); m_fuzzy_match_work->cancel();
m_fuzzy_match_work = Threading::BackgroundAction<NonnullRefPtrVector<Result>>::construct( m_fuzzy_match_work = Threading::BackgroundAction<Optional<NonnullRefPtrVector<Result>>>::construct(
[this, query](auto& task) { [this, query](auto& task) -> Optional<NonnullRefPtrVector<Result>> {
NonnullRefPtrVector<Result> results; NonnullRefPtrVector<Result> results;
for (auto& path : m_full_path_cache) { for (auto& path : m_full_path_cache) {
if (task.is_cancelled()) if (task.is_cancelled())
return results; return {};
auto match_result = fuzzy_match(query, path); auto match_result = fuzzy_match(query, path);
if (!match_result.matched) if (!match_result.matched)
@ -145,7 +145,9 @@ void FileProvider::query(DeprecatedString const& query, Function<void(NonnullRef
return results; return results;
}, },
[on_complete = move(on_complete)](auto results) -> ErrorOr<void> { [on_complete = move(on_complete)](auto results) -> ErrorOr<void> {
on_complete(move(results)); if (results.has_value())
on_complete(move(results.value()));
return {}; return {};
}); });
} }

View file

@ -153,7 +153,7 @@ public:
void build_filesystem_cache(); void build_filesystem_cache();
private: private:
RefPtr<Threading::BackgroundAction<NonnullRefPtrVector<Result>>> m_fuzzy_match_work; RefPtr<Threading::BackgroundAction<Optional<NonnullRefPtrVector<Result>>>> m_fuzzy_match_work;
bool m_building_cache { false }; bool m_building_cache { false };
Vector<DeprecatedString> m_full_path_cache; Vector<DeprecatedString> m_full_path_cache;
Queue<DeprecatedString> m_work_queue; Queue<DeprecatedString> m_work_queue;

View file

@ -1,11 +1,12 @@
/* /*
* Copyright (c) 2021, Spencer Dixon <spencercdixon@gmail.com> * Copyright (c) 2021, Spencer Dixon <spencercdixon@gmail.com>
* Copyright (c) 2022, the SerenityOS developers. * Copyright (c) 2022-2023, the SerenityOS developers.
* *
* SPDX-License-Identifier: BSD-2-Clause * SPDX-License-Identifier: BSD-2-Clause
*/ */
#include "Providers.h" #include "Providers.h"
#include <AK/Array.h>
#include <AK/DeprecatedString.h> #include <AK/DeprecatedString.h>
#include <AK/Error.h> #include <AK/Error.h>
#include <AK/LexicalPath.h> #include <AK/LexicalPath.h>
@ -73,46 +74,44 @@ class ResultRow final : public GUI::Button {
RefPtr<GUI::Menu> m_context_menu; RefPtr<GUI::Menu> m_context_menu;
}; };
template<size_t ProviderCount>
class Database { class Database {
public: public:
explicit Database(AppState& state) explicit Database(AppState& state, Array<NonnullRefPtr<Provider>, ProviderCount>& providers)
: m_state(state) : m_state(state)
, m_providers(providers)
{ {
m_providers.append(make_ref_counted<AppProvider>());
m_providers.append(make_ref_counted<CalculatorProvider>());
m_providers.append(make_ref_counted<FileProvider>());
m_providers.append(make_ref_counted<TerminalProvider>());
m_providers.append(make_ref_counted<URLProvider>());
} }
Function<void(NonnullRefPtrVector<Result>)> on_new_results; Function<void(NonnullRefPtrVector<Result>)> on_new_results;
void search(DeprecatedString const& query) void search(DeprecatedString const& query)
{ {
for (auto& provider : m_providers) { auto should_display_precached_results = false;
provider.query(query, [=, this](auto results) { for (size_t i = 0; i < ProviderCount; ++i) {
did_receive_results(query, results); auto& result_array = m_result_cache.ensure(query);
}); if (result_array.at(i) == nullptr) {
m_providers[i]->query(query, [this, query, i](auto results) {
{
Threading::MutexLocker db_locker(m_mutex);
auto& result_array = m_result_cache.ensure(query);
if (result_array.at(i) != nullptr)
return;
result_array[i] = make<NonnullRefPtrVector<Result>>(results);
}
on_result_cache_updated();
});
} else {
should_display_precached_results = true;
}
} }
if (should_display_precached_results)
on_result_cache_updated();
} }
private: private:
void did_receive_results(DeprecatedString const& query, NonnullRefPtrVector<Result> const& results) void on_result_cache_updated()
{ {
{
Threading::MutexLocker db_locker(m_mutex);
auto& cache_entry = m_result_cache.ensure(query);
for (auto& result : results) {
auto found = cache_entry.find_if([&result](auto& other) {
return result.equals(other);
});
if (found.is_end())
cache_entry.append(result);
}
}
Threading::MutexLocker state_locker(m_state.lock); Threading::MutexLocker state_locker(m_state.lock);
auto new_results = m_result_cache.find(m_state.last_query); auto new_results = m_result_cache.find(m_state.last_query);
if (new_results == m_result_cache.end()) if (new_results == m_result_cache.end())
@ -121,21 +120,28 @@ private:
// NonnullRefPtrVector will provide dual_pivot_quick_sort references rather than pointers, // NonnullRefPtrVector will provide dual_pivot_quick_sort references rather than pointers,
// and dual_pivot_quick_sort requires being able to construct the underlying type on the // and dual_pivot_quick_sort requires being able to construct the underlying type on the
// stack. Assistant::Result is pure virtual, thus cannot be constructed on the stack. // stack. Assistant::Result is pure virtual, thus cannot be constructed on the stack.
auto& sortable_results = static_cast<Vector<NonnullRefPtr<Result>>&>(new_results->value); Vector<NonnullRefPtr<Result>> all_results;
for (auto const& results_for_provider : new_results->value) {
if (results_for_provider == nullptr)
continue;
for (auto const& result : *results_for_provider) {
all_results.append(result);
}
}
dual_pivot_quick_sort(sortable_results, 0, static_cast<int>(sortable_results.size() - 1), [](auto& a, auto& b) { dual_pivot_quick_sort(all_results, 0, static_cast<int>(all_results.size() - 1), [](auto& a, auto& b) {
return a->score() > b->score(); return a->score() > b->score();
}); });
on_new_results(new_results->value); on_new_results(all_results);
} }
AppState& m_state; AppState& m_state;
NonnullRefPtrVector<Provider> m_providers; Array<NonnullRefPtr<Provider>, ProviderCount> m_providers;
Threading::Mutex m_mutex; Threading::Mutex m_mutex;
HashMap<DeprecatedString, NonnullRefPtrVector<Result>> m_result_cache; HashMap<DeprecatedString, Array<OwnPtr<NonnullRefPtrVector<Result>>, ProviderCount>> m_result_cache;
}; };
} }
@ -163,7 +169,14 @@ ErrorOr<int> serenity_main(Main::Arguments arguments)
window->set_minimizable(false); window->set_minimizable(false);
Assistant::AppState app_state; Assistant::AppState app_state;
Assistant::Database db { app_state }; Array<NonnullRefPtr<Assistant::Provider>, 5> providers = {
make_ref_counted<Assistant::AppProvider>(),
make_ref_counted<Assistant::CalculatorProvider>(),
make_ref_counted<Assistant::TerminalProvider>(),
make_ref_counted<Assistant::URLProvider>(),
make_ref_counted<Assistant::FileProvider>()
};
Assistant::Database db { app_state, providers };
auto container = TRY(window->set_main_widget<GUI::Frame>()); auto container = TRY(window->set_main_widget<GUI::Frame>());
container->set_fill_with_background_color(true); container->set_fill_with_background_color(true);