1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-07-25 16:17:45 +00:00

LibCpp: Use lex_iterable() where applicable

This commit is contained in:
Itamar 2021-08-21 16:48:21 +03:00 committed by Andreas Kling
parent 606e05852f
commit f91974677c
3 changed files with 12 additions and 16 deletions

View file

@ -244,13 +244,13 @@ size_t Preprocessor::do_substitution(Vector<Token> const& tokens, size_t token_i
m_substitutions.append({ original_tokens, defined_value, processed_value }); m_substitutions.append({ original_tokens, defined_value, processed_value });
Lexer lexer(processed_value); Lexer lexer(processed_value);
for (auto& token : lexer.lex()) { lexer.lex_iterable([&](auto token) {
if (token.type() == Token::Type::Whitespace) if (token.type() == Token::Type::Whitespace)
continue; return;
token.set_start(original_tokens.first().start()); token.set_start(original_tokens.first().start());
token.set_end(original_tokens.first().end()); token.set_end(original_tokens.first().end());
m_processed_tokens.append(token); m_processed_tokens.append(token);
} });
return macro_call->end_token_index; return macro_call->end_token_index;
} }
@ -363,26 +363,25 @@ String Preprocessor::evaluate_macro_call(MacroCall const& macro_call, Definition
} }
Lexer lexer { definition.value }; Lexer lexer { definition.value };
auto tokens = lexer.lex();
StringBuilder processed_value; StringBuilder processed_value;
for (auto& token : tokens) { lexer.lex_iterable([&](auto token) {
if (token.type() != Token::Type::Identifier) { if (token.type() != Token::Type::Identifier) {
processed_value.append(token.text()); processed_value.append(token.text());
continue; return;
} }
auto param_index = definition.parameters.find_first_index(token.text()); auto param_index = definition.parameters.find_first_index(token.text());
if (!param_index.has_value()) { if (!param_index.has_value()) {
processed_value.append(token.text()); processed_value.append(token.text());
continue; return;
} }
auto& argument = macro_call.arguments[*param_index]; auto& argument = macro_call.arguments[*param_index];
for (auto& arg_token : argument.tokens) { for (auto& arg_token : argument.tokens) {
processed_value.append(arg_token.text()); processed_value.append(arg_token.text());
} }
} });
return processed_value.to_string(); return processed_value.to_string();
} }

View file

@ -59,10 +59,9 @@ void SyntaxHighlighter::rehighlight(Palette const& palette)
{ {
auto text = m_client->get_text(); auto text = m_client->get_text();
Cpp::Lexer lexer(text); Cpp::Lexer lexer(text);
auto tokens = lexer.lex();
Vector<GUI::TextDocumentSpan> spans; Vector<GUI::TextDocumentSpan> spans;
for (auto& token : tokens) { lexer.lex_iterable([&](auto token) {
// FIXME: The +1 for the token end column is a quick hack due to not wanting to modify the lexer (which is also used by the parser). Maybe there's a better way to do this. // FIXME: The +1 for the token end column is a quick hack due to not wanting to modify the lexer (which is also used by the parser). Maybe there's a better way to do this.
dbgln_if(SYNTAX_HIGHLIGHTING_DEBUG, "{} @ {}:{} - {}:{}", token.type_as_string(), token.start().line, token.start().column, token.end().line, token.end().column + 1); dbgln_if(SYNTAX_HIGHLIGHTING_DEBUG, "{} @ {}:{} - {}:{}", token.type_as_string(), token.start().line, token.start().column, token.end().line, token.end().column + 1);
GUI::TextDocumentSpan span; GUI::TextDocumentSpan span;
@ -74,7 +73,7 @@ void SyntaxHighlighter::rehighlight(Palette const& palette)
span.is_skippable = token.type() == Cpp::Token::Type::Whitespace; span.is_skippable = token.type() == Cpp::Token::Type::Whitespace;
span.data = static_cast<u64>(token.type()); span.data = static_cast<u64>(token.type());
spans.append(span); spans.append(span);
} });
m_client->do_set_spans(move(spans)); m_client->do_set_spans(move(spans));
m_has_brace_buddies = false; m_has_brace_buddies = false;

View file

@ -24,9 +24,7 @@ int main(int argc, char** argv)
StringView content_view(content); StringView content_view(content);
Cpp::Lexer lexer(content); Cpp::Lexer lexer(content);
auto tokens = lexer.lex(); lexer.lex_iterable([](auto token) {
for (auto& token : tokens) {
outln("{}", token.to_string()); outln("{}", token.to_string());
} });
} }