mirror of
https://github.com/RGBCube/serenity
synced 2025-05-31 14:18:12 +00:00
LibCpp: Use lex_iterable() where applicable
This commit is contained in:
parent
606e05852f
commit
f91974677c
3 changed files with 12 additions and 16 deletions
|
@ -244,13 +244,13 @@ size_t Preprocessor::do_substitution(Vector<Token> const& tokens, size_t token_i
|
|||
m_substitutions.append({ original_tokens, defined_value, processed_value });
|
||||
|
||||
Lexer lexer(processed_value);
|
||||
for (auto& token : lexer.lex()) {
|
||||
lexer.lex_iterable([&](auto token) {
|
||||
if (token.type() == Token::Type::Whitespace)
|
||||
continue;
|
||||
return;
|
||||
token.set_start(original_tokens.first().start());
|
||||
token.set_end(original_tokens.first().end());
|
||||
m_processed_tokens.append(token);
|
||||
}
|
||||
});
|
||||
return macro_call->end_token_index;
|
||||
}
|
||||
|
||||
|
@ -363,26 +363,25 @@ String Preprocessor::evaluate_macro_call(MacroCall const& macro_call, Definition
|
|||
}
|
||||
|
||||
Lexer lexer { definition.value };
|
||||
auto tokens = lexer.lex();
|
||||
|
||||
StringBuilder processed_value;
|
||||
for (auto& token : tokens) {
|
||||
lexer.lex_iterable([&](auto token) {
|
||||
if (token.type() != Token::Type::Identifier) {
|
||||
processed_value.append(token.text());
|
||||
continue;
|
||||
return;
|
||||
}
|
||||
|
||||
auto param_index = definition.parameters.find_first_index(token.text());
|
||||
if (!param_index.has_value()) {
|
||||
processed_value.append(token.text());
|
||||
continue;
|
||||
return;
|
||||
}
|
||||
|
||||
auto& argument = macro_call.arguments[*param_index];
|
||||
for (auto& arg_token : argument.tokens) {
|
||||
processed_value.append(arg_token.text());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return processed_value.to_string();
|
||||
}
|
||||
|
||||
|
|
|
@ -59,10 +59,9 @@ void SyntaxHighlighter::rehighlight(Palette const& palette)
|
|||
{
|
||||
auto text = m_client->get_text();
|
||||
Cpp::Lexer lexer(text);
|
||||
auto tokens = lexer.lex();
|
||||
|
||||
Vector<GUI::TextDocumentSpan> spans;
|
||||
for (auto& token : tokens) {
|
||||
lexer.lex_iterable([&](auto token) {
|
||||
// FIXME: The +1 for the token end column is a quick hack due to not wanting to modify the lexer (which is also used by the parser). Maybe there's a better way to do this.
|
||||
dbgln_if(SYNTAX_HIGHLIGHTING_DEBUG, "{} @ {}:{} - {}:{}", token.type_as_string(), token.start().line, token.start().column, token.end().line, token.end().column + 1);
|
||||
GUI::TextDocumentSpan span;
|
||||
|
@ -74,7 +73,7 @@ void SyntaxHighlighter::rehighlight(Palette const& palette)
|
|||
span.is_skippable = token.type() == Cpp::Token::Type::Whitespace;
|
||||
span.data = static_cast<u64>(token.type());
|
||||
spans.append(span);
|
||||
}
|
||||
});
|
||||
m_client->do_set_spans(move(spans));
|
||||
|
||||
m_has_brace_buddies = false;
|
||||
|
|
|
@ -24,9 +24,7 @@ int main(int argc, char** argv)
|
|||
StringView content_view(content);
|
||||
|
||||
Cpp::Lexer lexer(content);
|
||||
auto tokens = lexer.lex();
|
||||
|
||||
for (auto& token : tokens) {
|
||||
lexer.lex_iterable([](auto token) {
|
||||
outln("{}", token.to_string());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue