mirror of
https://github.com/RGBCube/serenity
synced 2025-07-24 22:27:42 +00:00
AK: Standardize the behaviour of GenericLexer::consume_until overloads
Before this commit all consume_until overloads aside from the Predicate one would consume (and ignore) the stop char/string, while the Predicate overload would not, in order to keep behaviour consistent, the other overloads no longer consume the stop char/string as well.
This commit is contained in:
parent
d49d2c7ec4
commit
67ce9e28a5
7 changed files with 18 additions and 14 deletions
|
@ -53,7 +53,6 @@ StringView GenericLexer::consume_line()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Consume and return characters until `stop` is peek'd
|
// Consume and return characters until `stop` is peek'd
|
||||||
// The `stop` character is ignored, as it is user-defined
|
|
||||||
StringView GenericLexer::consume_until(char stop)
|
StringView GenericLexer::consume_until(char stop)
|
||||||
{
|
{
|
||||||
size_t start = m_index;
|
size_t start = m_index;
|
||||||
|
@ -61,15 +60,12 @@ StringView GenericLexer::consume_until(char stop)
|
||||||
m_index++;
|
m_index++;
|
||||||
size_t length = m_index - start;
|
size_t length = m_index - start;
|
||||||
|
|
||||||
ignore();
|
|
||||||
|
|
||||||
if (length == 0)
|
if (length == 0)
|
||||||
return {};
|
return {};
|
||||||
return m_input.substring_view(start, length);
|
return m_input.substring_view(start, length);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Consume and return characters until the string `stop` is found
|
// Consume and return characters until the string `stop` is found
|
||||||
// The `stop` string is ignored, as it is user-defined
|
|
||||||
StringView GenericLexer::consume_until(const char* stop)
|
StringView GenericLexer::consume_until(const char* stop)
|
||||||
{
|
{
|
||||||
size_t start = m_index;
|
size_t start = m_index;
|
||||||
|
@ -77,15 +73,12 @@ StringView GenericLexer::consume_until(const char* stop)
|
||||||
m_index++;
|
m_index++;
|
||||||
size_t length = m_index - start;
|
size_t length = m_index - start;
|
||||||
|
|
||||||
ignore(__builtin_strlen(stop));
|
|
||||||
|
|
||||||
if (length == 0)
|
if (length == 0)
|
||||||
return {};
|
return {};
|
||||||
return m_input.substring_view(start, length);
|
return m_input.substring_view(start, length);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Consume and return characters until the string `stop` is found
|
// Consume and return characters until the string `stop` is found
|
||||||
// The `stop` string is ignored, as it is user-defined
|
|
||||||
StringView GenericLexer::consume_until(StringView stop)
|
StringView GenericLexer::consume_until(StringView stop)
|
||||||
{
|
{
|
||||||
size_t start = m_index;
|
size_t start = m_index;
|
||||||
|
@ -93,8 +86,6 @@ StringView GenericLexer::consume_until(StringView stop)
|
||||||
m_index++;
|
m_index++;
|
||||||
size_t length = m_index - start;
|
size_t length = m_index - start;
|
||||||
|
|
||||||
ignore(stop.length());
|
|
||||||
|
|
||||||
if (length == 0)
|
if (length == 0)
|
||||||
return {};
|
return {};
|
||||||
return m_input.substring_view(start, length);
|
return m_input.substring_view(start, length);
|
||||||
|
|
|
@ -865,7 +865,7 @@ ErrorOr<NonnullRefPtr<Custody>> VirtualFileSystem::resolve_path_without_veil(Str
|
||||||
if (path_lexer.is_eof())
|
if (path_lexer.is_eof())
|
||||||
extra_iteration = false;
|
extra_iteration = false;
|
||||||
auto part = path_lexer.consume_until('/');
|
auto part = path_lexer.consume_until('/');
|
||||||
path_lexer.consume_specific('/');
|
path_lexer.ignore();
|
||||||
|
|
||||||
Custody& parent = custody;
|
Custody& parent = custody;
|
||||||
auto parent_metadata = parent.inode().metadata();
|
auto parent_metadata = parent.inode().metadata();
|
||||||
|
|
|
@ -230,6 +230,7 @@ static NonnullOwnPtr<Interface> parse_interface(StringView filename, StringView
|
||||||
|
|
||||||
if (lexer.consume_specific("//")) {
|
if (lexer.consume_specific("//")) {
|
||||||
lexer.consume_until('\n');
|
lexer.consume_until('\n');
|
||||||
|
lexer.ignore();
|
||||||
consumed = true;
|
consumed = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -276,7 +277,9 @@ static NonnullOwnPtr<Interface> parse_interface(StringView filename, StringView
|
||||||
while (lexer.consume_specific("#import")) {
|
while (lexer.consume_specific("#import")) {
|
||||||
consume_whitespace();
|
consume_whitespace();
|
||||||
assert_specific('<');
|
assert_specific('<');
|
||||||
imports.append(resolve_import(lexer.consume_until('>')));
|
auto path = lexer.consume_until('>');
|
||||||
|
lexer.ignore();
|
||||||
|
imports.append(resolve_import(path));
|
||||||
consume_whitespace();
|
consume_whitespace();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -88,10 +88,12 @@ parse_state_machine(StringView input)
|
||||||
num = 16 * num + get_hex_value(c);
|
num = 16 * num + get_hex_value(c);
|
||||||
} else {
|
} else {
|
||||||
lexer.consume_specific('\'');
|
lexer.consume_specific('\'');
|
||||||
if (lexer.next_is('\\'))
|
if (lexer.next_is('\\')) {
|
||||||
num = (int)lexer.consume_escaped_character('\\');
|
num = (int)lexer.consume_escaped_character('\\');
|
||||||
else
|
} else {
|
||||||
num = lexer.consume_until('\'').to_int().value();
|
num = lexer.consume_until('\'').to_int().value();
|
||||||
|
lexer.ignore();
|
||||||
|
}
|
||||||
lexer.consume_specific('\'');
|
lexer.consume_specific('\'');
|
||||||
}
|
}
|
||||||
return num;
|
return num;
|
||||||
|
|
|
@ -506,6 +506,7 @@ extern "C" int vsscanf(const char* input, const char* format, va_list ap)
|
||||||
case '[':
|
case '[':
|
||||||
format_lexer.consume();
|
format_lexer.consume();
|
||||||
scanlist = format_lexer.consume_until(']');
|
scanlist = format_lexer.consume_until(']');
|
||||||
|
format_lexer.ignore();
|
||||||
if (scanlist.starts_with('^')) {
|
if (scanlist.starts_with('^')) {
|
||||||
scanlist = scanlist.substring_view(1);
|
scanlist = scanlist.substring_view(1);
|
||||||
invert_scanlist = true;
|
invert_scanlist = true;
|
||||||
|
|
|
@ -94,6 +94,7 @@ void Preprocessor::handle_preprocessor_statement(StringView line)
|
||||||
lexer.consume_specific('#');
|
lexer.consume_specific('#');
|
||||||
consume_whitespace(lexer);
|
consume_whitespace(lexer);
|
||||||
auto keyword = lexer.consume_until(' ');
|
auto keyword = lexer.consume_until(' ');
|
||||||
|
lexer.ignore();
|
||||||
if (keyword.is_empty() || keyword.is_null() || keyword.is_whitespace())
|
if (keyword.is_empty() || keyword.is_null() || keyword.is_whitespace())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
@ -165,6 +166,7 @@ void Preprocessor::handle_preprocessor_keyword(StringView keyword, GenericLexer&
|
||||||
++m_current_depth;
|
++m_current_depth;
|
||||||
if (m_state == State::Normal) {
|
if (m_state == State::Normal) {
|
||||||
auto key = line_lexer.consume_until(' ');
|
auto key = line_lexer.consume_until(' ');
|
||||||
|
line_lexer.ignore();
|
||||||
if (m_definitions.contains(key)) {
|
if (m_definitions.contains(key)) {
|
||||||
m_depths_of_taken_branches.append(m_current_depth - 1);
|
m_depths_of_taken_branches.append(m_current_depth - 1);
|
||||||
return;
|
return;
|
||||||
|
@ -180,6 +182,7 @@ void Preprocessor::handle_preprocessor_keyword(StringView keyword, GenericLexer&
|
||||||
++m_current_depth;
|
++m_current_depth;
|
||||||
if (m_state == State::Normal) {
|
if (m_state == State::Normal) {
|
||||||
auto key = line_lexer.consume_until(' ');
|
auto key = line_lexer.consume_until(' ');
|
||||||
|
line_lexer.ignore();
|
||||||
if (!m_definitions.contains(key)) {
|
if (!m_definitions.contains(key)) {
|
||||||
m_depths_of_taken_branches.append(m_current_depth - 1);
|
m_depths_of_taken_branches.append(m_current_depth - 1);
|
||||||
return;
|
return;
|
||||||
|
@ -353,10 +356,12 @@ Optional<Preprocessor::Definition> Preprocessor::create_definition(StringView li
|
||||||
|
|
||||||
String Preprocessor::remove_escaped_newlines(StringView value)
|
String Preprocessor::remove_escaped_newlines(StringView value)
|
||||||
{
|
{
|
||||||
|
static constexpr auto escaped_newline = "\\\n"sv;
|
||||||
AK::StringBuilder processed_value;
|
AK::StringBuilder processed_value;
|
||||||
GenericLexer lexer { value };
|
GenericLexer lexer { value };
|
||||||
while (!lexer.is_eof()) {
|
while (!lexer.is_eof()) {
|
||||||
processed_value.append(lexer.consume_until("\\\n"sv));
|
processed_value.append(lexer.consume_until(escaped_newline));
|
||||||
|
lexer.ignore(escaped_newline.length());
|
||||||
}
|
}
|
||||||
return processed_value.to_string();
|
return processed_value.to_string();
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,6 +31,7 @@ static bool parse_name(StringView name, OpenFile& file)
|
||||||
{
|
{
|
||||||
GenericLexer lexer(name);
|
GenericLexer lexer(name);
|
||||||
auto component1 = lexer.consume_until(':');
|
auto component1 = lexer.consume_until(':');
|
||||||
|
lexer.ignore();
|
||||||
|
|
||||||
if (lexer.tell_remaining() == 0) {
|
if (lexer.tell_remaining() == 0) {
|
||||||
file.name = component1;
|
file.name = component1;
|
||||||
|
@ -50,6 +51,7 @@ static bool parse_name(StringView name, OpenFile& file)
|
||||||
}
|
}
|
||||||
|
|
||||||
auto component3 = lexer.consume_until(')');
|
auto component3 = lexer.consume_until(')');
|
||||||
|
lexer.ignore();
|
||||||
if (lexer.tell_remaining() != 0) {
|
if (lexer.tell_remaining() != 0) {
|
||||||
dbgln("parse_name: expected EOF");
|
dbgln("parse_name: expected EOF");
|
||||||
return false;
|
return false;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue