mirror of
https://github.com/RGBCube/serenity
synced 2025-07-27 19:27:35 +00:00
AK: Remove ByteString from GenericLexer
A bunch of users used consume_specific with a constant ByteString literal, which can be replaced by an allocation-free StringView literal. The generic consume_while overload gains a requires clause so that consume_specific("abc") causes a more understandable and actionable error.
This commit is contained in:
parent
7c88ab2836
commit
eada4f2ee8
16 changed files with 89 additions and 86 deletions
|
@ -24,12 +24,12 @@ ByteBuffer decode_pem(ReadonlyBytes data)
|
|||
while (!lexer.is_eof()) {
|
||||
switch (state) {
|
||||
case PreStartData:
|
||||
if (lexer.consume_specific("-----BEGIN"))
|
||||
if (lexer.consume_specific("-----BEGIN"sv))
|
||||
state = Started;
|
||||
lexer.consume_line();
|
||||
break;
|
||||
case Started: {
|
||||
if (lexer.consume_specific("-----END")) {
|
||||
if (lexer.consume_specific("-----END"sv)) {
|
||||
state = Ended;
|
||||
lexer.consume_line();
|
||||
break;
|
||||
|
@ -69,12 +69,12 @@ ErrorOr<Vector<ByteBuffer>> decode_pems(ReadonlyBytes data)
|
|||
while (!lexer.is_eof()) {
|
||||
switch (state) {
|
||||
case Junk:
|
||||
if (lexer.consume_specific("-----BEGIN"))
|
||||
if (lexer.consume_specific("-----BEGIN"sv))
|
||||
state = Parsing;
|
||||
lexer.consume_line();
|
||||
break;
|
||||
case Parsing: {
|
||||
if (lexer.consume_specific("-----END")) {
|
||||
if (lexer.consume_specific("-----END"sv)) {
|
||||
state = Junk;
|
||||
lexer.consume_line();
|
||||
TRY(pems.try_append(decoded));
|
||||
|
|
|
@ -26,7 +26,7 @@ Optional<HunkLocation> Parser::consume_unified_location()
|
|||
return true;
|
||||
};
|
||||
|
||||
if (!consume_specific("@@ -"))
|
||||
if (!consume_specific("@@ -"sv))
|
||||
return {};
|
||||
|
||||
HunkLocation location;
|
||||
|
@ -34,13 +34,13 @@ Optional<HunkLocation> Parser::consume_unified_location()
|
|||
if (!consume_range(location.old_range))
|
||||
return {};
|
||||
|
||||
if (!consume_specific(" +"))
|
||||
if (!consume_specific(" +"sv))
|
||||
return {};
|
||||
|
||||
if (!consume_range(location.new_range))
|
||||
return {};
|
||||
|
||||
if (!consume_specific(" @@"))
|
||||
if (!consume_specific(" @@"sv))
|
||||
return {};
|
||||
|
||||
return location;
|
||||
|
@ -101,12 +101,12 @@ ErrorOr<Header> Parser::parse_header(Optional<size_t> const& strip_count)
|
|||
|
||||
while (!is_eof()) {
|
||||
|
||||
if (consume_specific("+++ ")) {
|
||||
if (consume_specific("+++ "sv)) {
|
||||
header.new_file_path = TRY(parse_file_line(strip_count));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (consume_specific("--- ")) {
|
||||
if (consume_specific("--- "sv)) {
|
||||
header.old_file_path = TRY(parse_file_line(strip_count));
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ void Parser::consume_whitespace()
|
|||
while (consumed) {
|
||||
consumed = lexer.consume_while(is_ascii_space).length() > 0;
|
||||
|
||||
if (lexer.consume_specific("//")) {
|
||||
if (lexer.consume_specific("//"sv)) {
|
||||
lexer.consume_until('\n');
|
||||
lexer.ignore();
|
||||
consumed = true;
|
||||
|
@ -178,7 +178,7 @@ NonnullRefPtr<Type const> Parser::parse_type()
|
|||
union_member_types.append(parse_type());
|
||||
consume_whitespace();
|
||||
|
||||
while (lexer.consume_specific("or")) {
|
||||
while (lexer.consume_specific("or"sv)) {
|
||||
consume_whitespace();
|
||||
union_member_types.append(parse_type());
|
||||
consume_whitespace();
|
||||
|
@ -199,12 +199,12 @@ NonnullRefPtr<Type const> Parser::parse_type()
|
|||
return type;
|
||||
}
|
||||
|
||||
bool unsigned_ = lexer.consume_specific("unsigned");
|
||||
bool unsigned_ = lexer.consume_specific("unsigned"sv);
|
||||
if (unsigned_)
|
||||
consume_whitespace();
|
||||
|
||||
// FIXME: Actually treat "unrestricted" and normal floats/doubles differently.
|
||||
if (lexer.consume_specific("unrestricted"))
|
||||
if (lexer.consume_specific("unrestricted"sv))
|
||||
consume_whitespace();
|
||||
|
||||
auto name = lexer.consume_until([](auto ch) { return !is_ascii_alphanumeric(ch) && ch != '_'; });
|
||||
|
@ -262,15 +262,15 @@ NonnullRefPtr<Type const> Parser::parse_type()
|
|||
|
||||
void Parser::parse_attribute(HashMap<ByteString, ByteString>& extended_attributes, Interface& interface)
|
||||
{
|
||||
bool inherit = lexer.consume_specific("inherit");
|
||||
bool inherit = lexer.consume_specific("inherit"sv);
|
||||
if (inherit)
|
||||
consume_whitespace();
|
||||
|
||||
bool readonly = lexer.consume_specific("readonly");
|
||||
bool readonly = lexer.consume_specific("readonly"sv);
|
||||
if (readonly)
|
||||
consume_whitespace();
|
||||
|
||||
if (lexer.consume_specific("attribute"))
|
||||
if (lexer.consume_specific("attribute"sv))
|
||||
consume_whitespace();
|
||||
else
|
||||
report_parsing_error("expected 'attribute'"sv, filename, input, lexer.tell());
|
||||
|
@ -300,7 +300,7 @@ void Parser::parse_attribute(HashMap<ByteString, ByteString>& extended_attribute
|
|||
|
||||
void Parser::parse_constant(Interface& interface)
|
||||
{
|
||||
lexer.consume_specific("const");
|
||||
lexer.consume_specific("const"sv);
|
||||
consume_whitespace();
|
||||
|
||||
auto type = parse_type();
|
||||
|
@ -331,7 +331,7 @@ Vector<Parameter> Parser::parse_parameters()
|
|||
HashMap<ByteString, ByteString> extended_attributes;
|
||||
if (lexer.consume_specific('['))
|
||||
extended_attributes = parse_extended_attributes();
|
||||
bool optional = lexer.consume_specific("optional");
|
||||
bool optional = lexer.consume_specific("optional"sv);
|
||||
if (optional)
|
||||
consume_whitespace();
|
||||
if (lexer.consume_specific('[')) {
|
||||
|
@ -373,7 +373,7 @@ Vector<Parameter> Parser::parse_parameters()
|
|||
Function Parser::parse_function(HashMap<ByteString, ByteString>& extended_attributes, Interface& interface, IsSpecialOperation is_special_operation)
|
||||
{
|
||||
bool static_ = false;
|
||||
if (lexer.consume_specific("static")) {
|
||||
if (lexer.consume_specific("static"sv)) {
|
||||
static_ = true;
|
||||
consume_whitespace();
|
||||
}
|
||||
|
@ -766,7 +766,7 @@ void Parser::parse_dictionary(Interface& interface)
|
|||
bool required = false;
|
||||
HashMap<ByteString, ByteString> extended_attributes;
|
||||
|
||||
if (lexer.consume_specific("required")) {
|
||||
if (lexer.consume_specific("required"sv)) {
|
||||
required = true;
|
||||
consume_whitespace();
|
||||
}
|
||||
|
@ -876,7 +876,7 @@ void Parser::parse_non_interface_entities(bool allow_interface, Interface& inter
|
|||
auto current_offset = lexer.tell();
|
||||
auto name = lexer.consume_until([](auto ch) { return is_ascii_space(ch); });
|
||||
consume_whitespace();
|
||||
if (lexer.consume_specific("includes")) {
|
||||
if (lexer.consume_specific("includes"sv)) {
|
||||
consume_whitespace();
|
||||
auto mixin_name = lexer.consume_until([](auto ch) { return is_ascii_space(ch) || ch == ';'; });
|
||||
interface.included_mixins.ensure(name).set(mixin_name);
|
||||
|
@ -977,7 +977,7 @@ Interface& Parser::parse()
|
|||
Vector<Interface&> imports;
|
||||
{
|
||||
HashTable<ByteString> required_imported_paths;
|
||||
while (lexer.consume_specific("#import")) {
|
||||
while (lexer.consume_specific("#import"sv)) {
|
||||
consume_whitespace();
|
||||
assert_specific('<');
|
||||
auto path = lexer.consume_until('>');
|
||||
|
@ -995,9 +995,9 @@ Interface& Parser::parse()
|
|||
|
||||
parse_non_interface_entities(true, interface);
|
||||
|
||||
if (lexer.consume_specific("interface"))
|
||||
if (lexer.consume_specific("interface"sv))
|
||||
parse_interface(interface);
|
||||
else if (lexer.consume_specific("namespace"))
|
||||
else if (lexer.consume_specific("namespace"sv))
|
||||
parse_namespace(interface);
|
||||
|
||||
parse_non_interface_entities(false, interface);
|
||||
|
|
|
@ -89,19 +89,19 @@ Configuration Configuration::from_config(StringView libname)
|
|||
escape = false;
|
||||
} else {
|
||||
if (key_lexer.next_is("alt+")) {
|
||||
alt = key_lexer.consume_specific("alt+");
|
||||
alt = key_lexer.consume_specific("alt+"sv);
|
||||
continue;
|
||||
}
|
||||
if (key_lexer.next_is("^[")) {
|
||||
alt = key_lexer.consume_specific("^[");
|
||||
alt = key_lexer.consume_specific("^["sv);
|
||||
continue;
|
||||
}
|
||||
if (key_lexer.next_is("^")) {
|
||||
has_ctrl = key_lexer.consume_specific("^");
|
||||
has_ctrl = key_lexer.consume_specific("^"sv);
|
||||
continue;
|
||||
}
|
||||
if (key_lexer.next_is("ctrl+")) {
|
||||
has_ctrl = key_lexer.consume_specific("ctrl+");
|
||||
has_ctrl = key_lexer.consume_specific("ctrl+"sv);
|
||||
continue;
|
||||
}
|
||||
if (key_lexer.next_is("\\")) {
|
||||
|
|
|
@ -1145,23 +1145,23 @@ ErrorOr<AttributeListDeclaration::Definition, ParseError> Parser::parse_attribut
|
|||
// EnumeratedType ::= NotationType | Enumeration
|
||||
// NotationType ::= 'NOTATION' S '(' S? Name (S? '|' S? Name)* S? ')'
|
||||
// Enumeration ::= '(' S? Nmtoken (S? '|' S? Nmtoken)* S? ')'
|
||||
if (m_lexer.consume_specific("CDATA")) {
|
||||
if (m_lexer.consume_specific("CDATA"sv)) {
|
||||
type = AttributeListDeclaration::StringType::CData;
|
||||
} else if (m_lexer.consume_specific("IDREFS")) {
|
||||
} else if (m_lexer.consume_specific("IDREFS"sv)) {
|
||||
type = AttributeListDeclaration::TokenizedType::IDRefs;
|
||||
} else if (m_lexer.consume_specific("IDREF")) {
|
||||
} else if (m_lexer.consume_specific("IDREF"sv)) {
|
||||
type = AttributeListDeclaration::TokenizedType::IDRef;
|
||||
} else if (m_lexer.consume_specific("ID")) {
|
||||
} else if (m_lexer.consume_specific("ID"sv)) {
|
||||
type = AttributeListDeclaration::TokenizedType::ID;
|
||||
} else if (m_lexer.consume_specific("ENTITIES")) {
|
||||
} else if (m_lexer.consume_specific("ENTITIES"sv)) {
|
||||
type = AttributeListDeclaration::TokenizedType::Entities;
|
||||
} else if (m_lexer.consume_specific("ENTITY")) {
|
||||
} else if (m_lexer.consume_specific("ENTITY"sv)) {
|
||||
type = AttributeListDeclaration::TokenizedType::Entity;
|
||||
} else if (m_lexer.consume_specific("NMTOKENS")) {
|
||||
} else if (m_lexer.consume_specific("NMTOKENS"sv)) {
|
||||
type = AttributeListDeclaration::TokenizedType::NMTokens;
|
||||
} else if (m_lexer.consume_specific("NMTOKEN")) {
|
||||
} else if (m_lexer.consume_specific("NMTOKEN"sv)) {
|
||||
type = AttributeListDeclaration::TokenizedType::NMToken;
|
||||
} else if (m_lexer.consume_specific("NOTATION")) {
|
||||
} else if (m_lexer.consume_specific("NOTATION"sv)) {
|
||||
HashTable<Name> names;
|
||||
TRY(skip_whitespace(Required::Yes));
|
||||
TRY(expect("("sv));
|
||||
|
@ -1198,13 +1198,13 @@ ErrorOr<AttributeListDeclaration::Definition, ParseError> Parser::parse_attribut
|
|||
|
||||
// DefaultDecl ::= '#REQUIRED' | '#IMPLIED'
|
||||
// | (('#FIXED' S)? AttValue)
|
||||
if (m_lexer.consume_specific("#REQUIRED")) {
|
||||
if (m_lexer.consume_specific("#REQUIRED"sv)) {
|
||||
default_ = AttributeListDeclaration::Required {};
|
||||
} else if (m_lexer.consume_specific("#IMPLIED")) {
|
||||
} else if (m_lexer.consume_specific("#IMPLIED"sv)) {
|
||||
default_ = AttributeListDeclaration::Implied {};
|
||||
} else {
|
||||
bool fixed = false;
|
||||
if (m_lexer.consume_specific("#FIXED")) {
|
||||
if (m_lexer.consume_specific("#FIXED"sv)) {
|
||||
TRY(skip_whitespace(Required::Yes));
|
||||
fixed = true;
|
||||
}
|
||||
|
@ -1273,19 +1273,19 @@ ErrorOr<ElementDeclaration::ContentSpec, ParseError> Parser::parse_content_spec(
|
|||
Optional<ElementDeclaration::ContentSpec> content_spec;
|
||||
|
||||
// contentspec ::= 'EMPTY' | 'ANY' | Mixed | children
|
||||
if (m_lexer.consume_specific("EMPTY")) {
|
||||
if (m_lexer.consume_specific("EMPTY"sv)) {
|
||||
content_spec = ElementDeclaration::Empty {};
|
||||
} else if (m_lexer.consume_specific("ANY")) {
|
||||
} else if (m_lexer.consume_specific("ANY"sv)) {
|
||||
content_spec = ElementDeclaration::Any {};
|
||||
} else {
|
||||
TRY(expect("("sv));
|
||||
TRY(skip_whitespace());
|
||||
if (m_lexer.consume_specific("#PCDATA")) {
|
||||
if (m_lexer.consume_specific("#PCDATA"sv)) {
|
||||
HashTable<Name> names;
|
||||
// Mixed ::= '(' S? '#PCDATA' (S? '|' S? Name)* S? ')*'
|
||||
// | '(' S? '#PCDATA' S? ')'
|
||||
TRY(skip_whitespace());
|
||||
if (m_lexer.consume_specific(")*")) {
|
||||
if (m_lexer.consume_specific(")*"sv)) {
|
||||
content_spec = ElementDeclaration::Mixed { .types = {}, .many = true };
|
||||
} else if (m_lexer.consume_specific(')')) {
|
||||
content_spec = ElementDeclaration::Mixed { .types = {}, .many = false };
|
||||
|
@ -1599,7 +1599,7 @@ ErrorOr<ExternalID, ParseError> Parser::parse_external_id()
|
|||
Optional<PublicID> public_id;
|
||||
SystemID system_id;
|
||||
|
||||
if (m_lexer.consume_specific("SYSTEM")) {
|
||||
if (m_lexer.consume_specific("SYSTEM"sv)) {
|
||||
auto accept = accept_rule();
|
||||
TRY(skip_whitespace(Required::Yes));
|
||||
system_id = SystemID { TRY(parse_system_id_literal()) };
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue