mirror of
https://github.com/RGBCube/serenity
synced 2025-07-26 03:47:34 +00:00
LibCpp: Make Preprocessor::handle_preprocessor_line return keyword
This also moves most of the logic that was in handle_preprocessor_line into handle_preprocessor_keyword.
This commit is contained in:
parent
1c3c043cd3
commit
14e0011825
2 changed files with 30 additions and 21 deletions
|
@ -34,26 +34,33 @@ const String& Preprocessor::process()
|
||||||
return m_processed_text;
|
return m_processed_text;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Preprocessor::handle_preprocessor_line(const StringView& line)
|
static void consume_whitespace(GenericLexer& lexer)
|
||||||
|
{
|
||||||
|
lexer.ignore_while([](char ch) { return isspace(ch); });
|
||||||
|
if (lexer.peek() == '/' && lexer.peek(1) == '/')
|
||||||
|
lexer.ignore_until([](char ch) { return ch == '\n'; });
|
||||||
|
}
|
||||||
|
|
||||||
|
Preprocessor::PreprocessorKeyword Preprocessor::handle_preprocessor_line(const StringView& line)
|
||||||
{
|
{
|
||||||
GenericLexer lexer(line);
|
GenericLexer lexer(line);
|
||||||
|
|
||||||
auto consume_whitespace = [&] {
|
consume_whitespace(lexer);
|
||||||
lexer.ignore_while([](char ch) { return isspace(ch); });
|
|
||||||
if (lexer.peek() == '/' && lexer.peek(1) == '/')
|
|
||||||
lexer.ignore_until([](char ch) { return ch == '\n'; });
|
|
||||||
};
|
|
||||||
|
|
||||||
consume_whitespace();
|
|
||||||
lexer.consume_specific('#');
|
lexer.consume_specific('#');
|
||||||
consume_whitespace();
|
consume_whitespace(lexer);
|
||||||
auto keyword = lexer.consume_until(' ');
|
auto keyword = lexer.consume_until(' ');
|
||||||
if (keyword.is_empty() || keyword.is_null() || keyword.is_whitespace())
|
if (keyword.is_empty() || keyword.is_null() || keyword.is_whitespace())
|
||||||
return;
|
return {};
|
||||||
|
|
||||||
|
handle_preprocessor_keyword(keyword, lexer);
|
||||||
|
return keyword;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Preprocessor::handle_preprocessor_keyword(const StringView& keyword, GenericLexer& line_lexer)
|
||||||
|
{
|
||||||
if (keyword == "include") {
|
if (keyword == "include") {
|
||||||
consume_whitespace();
|
consume_whitespace(line_lexer);
|
||||||
m_included_paths.append(lexer.consume_all());
|
m_included_paths.append(line_lexer.consume_all());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,14 +91,14 @@ void Preprocessor::handle_preprocessor_line(const StringView& line)
|
||||||
|
|
||||||
if (keyword == "define") {
|
if (keyword == "define") {
|
||||||
if (m_state == State::Normal) {
|
if (m_state == State::Normal) {
|
||||||
auto key = lexer.consume_until(' ');
|
auto key = line_lexer.consume_until(' ');
|
||||||
consume_whitespace();
|
consume_whitespace(line_lexer);
|
||||||
|
|
||||||
DefinedValue value;
|
DefinedValue value;
|
||||||
value.filename = m_filename;
|
value.filename = m_filename;
|
||||||
value.line = m_line_index;
|
value.line = m_line_index;
|
||||||
|
|
||||||
auto string_value = lexer.consume_all();
|
auto string_value = line_lexer.consume_all();
|
||||||
if (!string_value.is_empty())
|
if (!string_value.is_empty())
|
||||||
value.value = string_value;
|
value.value = string_value;
|
||||||
|
|
||||||
|
@ -101,8 +108,8 @@ void Preprocessor::handle_preprocessor_line(const StringView& line)
|
||||||
}
|
}
|
||||||
if (keyword == "undef") {
|
if (keyword == "undef") {
|
||||||
if (m_state == State::Normal) {
|
if (m_state == State::Normal) {
|
||||||
auto key = lexer.consume_until(' ');
|
auto key = line_lexer.consume_until(' ');
|
||||||
lexer.consume_all();
|
line_lexer.consume_all();
|
||||||
m_definitions.remove(key);
|
m_definitions.remove(key);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
|
@ -110,7 +117,7 @@ void Preprocessor::handle_preprocessor_line(const StringView& line)
|
||||||
if (keyword == "ifdef") {
|
if (keyword == "ifdef") {
|
||||||
++m_current_depth;
|
++m_current_depth;
|
||||||
if (m_state == State::Normal) {
|
if (m_state == State::Normal) {
|
||||||
auto key = lexer.consume_until(' ');
|
auto key = line_lexer.consume_until(' ');
|
||||||
if (m_definitions.contains(key)) {
|
if (m_definitions.contains(key)) {
|
||||||
m_depths_of_taken_branches.append(m_current_depth - 1);
|
m_depths_of_taken_branches.append(m_current_depth - 1);
|
||||||
return;
|
return;
|
||||||
|
@ -125,7 +132,7 @@ void Preprocessor::handle_preprocessor_line(const StringView& line)
|
||||||
if (keyword == "ifndef") {
|
if (keyword == "ifndef") {
|
||||||
++m_current_depth;
|
++m_current_depth;
|
||||||
if (m_state == State::Normal) {
|
if (m_state == State::Normal) {
|
||||||
auto key = lexer.consume_until(' ');
|
auto key = line_lexer.consume_until(' ');
|
||||||
if (!m_definitions.contains(key)) {
|
if (!m_definitions.contains(key)) {
|
||||||
m_depths_of_taken_branches.append(m_current_depth - 1);
|
m_depths_of_taken_branches.append(m_current_depth - 1);
|
||||||
return;
|
return;
|
||||||
|
@ -161,7 +168,7 @@ void Preprocessor::handle_preprocessor_line(const StringView& line)
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (keyword == "pragma") {
|
if (keyword == "pragma") {
|
||||||
lexer.consume_all();
|
line_lexer.consume_all();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,9 @@ public:
|
||||||
void set_ignore_unsupported_keywords(bool ignore) { m_options.ignore_unsupported_keywords = ignore; }
|
void set_ignore_unsupported_keywords(bool ignore) { m_options.ignore_unsupported_keywords = ignore; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void handle_preprocessor_line(const StringView&);
|
using PreprocessorKeyword = StringView;
|
||||||
|
PreprocessorKeyword handle_preprocessor_line(const StringView&);
|
||||||
|
void handle_preprocessor_keyword(const StringView& keyword, GenericLexer& line_lexer);
|
||||||
|
|
||||||
Definitions m_definitions;
|
Definitions m_definitions;
|
||||||
const String m_filename;
|
const String m_filename;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue