mirror of
https://github.com/RGBCube/serenity
synced 2025-05-31 22:18:12 +00:00
LibSQL: Move Lexer and Parser machinery to AST directory
The SQL engine is expected to be a fairly sizeable piece of software. Therefore we're starting to restructure the codebase for growth.
This commit is contained in:
parent
e0f1c237d2
commit
4198f7e1af
24 changed files with 281 additions and 278 deletions
|
@ -1,112 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2021, Dylan Katz <dykatz@uw.edu>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include <AK/Debug.h>
|
||||
#include <LibGfx/Palette.h>
|
||||
#include <LibSQL/Lexer.h>
|
||||
#include <LibSQL/SyntaxHighlighter.h>
|
||||
|
||||
namespace SQL {
|
||||
|
||||
static Syntax::TextStyle style_for_token_type(Gfx::Palette const& palette, TokenType type)
|
||||
{
|
||||
switch (Token::category(type)) {
|
||||
case TokenCategory::Keyword:
|
||||
return { palette.syntax_keyword(), true };
|
||||
case TokenCategory::Identifier:
|
||||
return { palette.syntax_identifier(), false };
|
||||
case TokenCategory::Number:
|
||||
return { palette.syntax_number(), false };
|
||||
case TokenCategory::Blob:
|
||||
case TokenCategory::String:
|
||||
return { palette.syntax_string(), false };
|
||||
case TokenCategory::Operator:
|
||||
return { palette.syntax_operator(), false };
|
||||
case TokenCategory::Punctuation:
|
||||
return { palette.syntax_punctuation(), false };
|
||||
case TokenCategory::Invalid:
|
||||
default:
|
||||
return { palette.base_text(), false };
|
||||
}
|
||||
}
|
||||
|
||||
bool SyntaxHighlighter::is_identifier(u64 token) const
|
||||
{
|
||||
auto sql_token = static_cast<SQL::TokenType>(static_cast<size_t>(token));
|
||||
return sql_token == SQL::TokenType::Identifier;
|
||||
}
|
||||
|
||||
void SyntaxHighlighter::rehighlight(Palette const& palette)
|
||||
{
|
||||
auto text = m_client->get_text();
|
||||
|
||||
SQL::Lexer lexer(text);
|
||||
|
||||
Vector<GUI::TextDocumentSpan> spans;
|
||||
|
||||
auto append_token = [&](StringView str, SQL::Token const& token) {
|
||||
if (str.is_empty())
|
||||
return;
|
||||
|
||||
GUI::TextPosition position { token.line_number() - 1, token.line_column() - 1 };
|
||||
for (char c : str) {
|
||||
if (c == '\n') {
|
||||
position.set_line(position.line() + 1);
|
||||
position.set_column(0);
|
||||
} else
|
||||
position.set_column(position.column() + 1);
|
||||
}
|
||||
|
||||
GUI::TextDocumentSpan span;
|
||||
span.range.set_start({ token.line_number() - 1, token.line_column() - 1 });
|
||||
span.range.set_end({ position.line(), position.column() });
|
||||
auto style = style_for_token_type(palette, token.type());
|
||||
span.attributes.color = style.color;
|
||||
span.attributes.bold = style.bold;
|
||||
span.data = static_cast<u64>(token.type());
|
||||
spans.append(span);
|
||||
|
||||
dbgln_if(SYNTAX_HIGHLIGHTING_DEBUG, "{} @ '{}' {}:{} - {}:{}",
|
||||
token.name(),
|
||||
token.value(),
|
||||
span.range.start().line(), span.range.start().column(),
|
||||
span.range.end().line(), span.range.end().column());
|
||||
};
|
||||
|
||||
for (;;) {
|
||||
auto token = lexer.next();
|
||||
append_token(token.value(), token);
|
||||
if (token.type() == SQL::TokenType::Eof)
|
||||
break;
|
||||
}
|
||||
|
||||
m_client->do_set_spans(move(spans));
|
||||
|
||||
m_has_brace_buddies = false;
|
||||
highlight_matching_token_pair();
|
||||
|
||||
m_client->do_update();
|
||||
}
|
||||
|
||||
Vector<SyntaxHighlighter::MatchingTokenPair> SyntaxHighlighter::matching_token_pairs_impl() const
|
||||
{
|
||||
static Vector<SyntaxHighlighter::MatchingTokenPair> pairs;
|
||||
if (pairs.is_empty()) {
|
||||
pairs.append({ static_cast<u64>(TokenType::ParenOpen), static_cast<u64>(TokenType::ParenClose) });
|
||||
}
|
||||
return pairs;
|
||||
}
|
||||
|
||||
bool SyntaxHighlighter::token_types_equal(u64 token1, u64 token2) const
|
||||
{
|
||||
return static_cast<TokenType>(token1) == static_cast<TokenType>(token2);
|
||||
}
|
||||
|
||||
SyntaxHighlighter::~SyntaxHighlighter()
|
||||
{
|
||||
}
|
||||
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue