diff --git a/Userland/Libraries/LibWeb/CSS/Parser/Parser.cpp b/Userland/Libraries/LibWeb/CSS/Parser/Parser.cpp index 3b51b7a822..9067744016 100644 --- a/Userland/Libraries/LibWeb/CSS/Parser/Parser.cpp +++ b/Userland/Libraries/LibWeb/CSS/Parser/Parser.cpp @@ -1869,60 +1869,85 @@ Optional Parser::consume_a_declaration(TokenStream& tok return declaration; } +// 5.4.5. Consume a list of declarations +// https://www.w3.org/TR/css-syntax-3/#consume-list-of-declarations template Vector Parser::consume_a_list_of_declarations(TokenStream& tokens) { - Vector list; + // To consume a list of declarations: + // Create an initially empty list of declarations. + Vector list_of_declarations; + + // Repeatedly consume the next input token: for (;;) { auto& token = tokens.next_token(); + + // + // if (token.is(Token::Type::Whitespace) || token.is(Token::Type::Semicolon)) { + // Do nothing. continue; } + // if (token.is(Token::Type::EndOfFile)) { - return list; + // Return the list of declarations. + return list_of_declarations; } + // if (token.is(Token::Type::AtKeyword)) { + // Reconsume the current input token. tokens.reconsume_current_input_token(); - list.append(DeclarationOrAtRule(consume_an_at_rule(tokens))); + + // Consume an at-rule. Append the returned rule to the list of declarations. + list_of_declarations.append(DeclarationOrAtRule(consume_an_at_rule(tokens))); continue; } + // if (token.is(Token::Type::Ident)) { - Vector temp; - temp.append(token); + // Initialize a temporary list initially filled with the current input token. + Vector temporary_list; + temporary_list.append(token); + // As long as the next input token is anything other than a or , + // consume a component value and append it to the temporary list. for (;;) { auto& peek = tokens.peek_token(); - if (peek.is(Token::Type::Semicolon) || peek.is(Token::Type::EndOfFile)) { + if (peek.is(Token::Type::Semicolon) || peek.is(Token::Type::EndOfFile)) break; - } - temp.append(consume_a_component_value(tokens)); + temporary_list.append(consume_a_component_value(tokens)); } - auto token_stream = TokenStream(temp); - auto maybe_declaration = consume_a_declaration(token_stream); - if (maybe_declaration.has_value()) { - list.append(DeclarationOrAtRule(maybe_declaration.value())); - } + // Consume a declaration from the temporary list. If anything was returned, append it to the list of declarations. + auto token_stream = TokenStream(temporary_list); + if (auto maybe_declaration = consume_a_declaration(token_stream); maybe_declaration.has_value()) + list_of_declarations.append(DeclarationOrAtRule(maybe_declaration.value())); + continue; } - log_parse_error(); - tokens.reconsume_current_input_token(); + // anything else + { + // This is a parse error. + log_parse_error(); - for (;;) { - auto& peek = tokens.peek_token(); - if (peek.is(Token::Type::Semicolon) || peek.is(Token::Type::EndOfFile)) - break; - dbgln_if(CSS_PARSER_DEBUG, "Discarding token: '{}'", peek.to_debug_string()); - (void)consume_a_component_value(tokens); + // Reconsume the current input token. + tokens.reconsume_current_input_token(); + + // As long as the next input token is anything other than a or , + // consume a component value and throw away the returned value. + for (;;) { + auto& peek = tokens.peek_token(); + if (peek.is(Token::Type::Semicolon) || peek.is(Token::Type::EndOfFile)) + break; + dbgln_if(CSS_PARSER_DEBUG, "Discarding token: '{}'", peek.to_debug_string()); + (void)consume_a_component_value(tokens); + } } } - - return list; } RefPtr Parser::parse_as_css_rule()