1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-07-25 04:57:44 +00:00

LibWeb: Add CSS Tokenizer::consume_as_much_whitespace_as_possible()

This is a step in the spec in 3 places, and we had it implemented
differently in each one. This unifies them and makes it clearer what
we're doing.
This commit is contained in:
Sam Atkins 2021-10-21 16:53:49 +01:00 committed by Andreas Kling
parent dfbdc20f87
commit 9a2eecaca4
2 changed files with 12 additions and 21 deletions

View file

@ -490,13 +490,7 @@ String Tokenizer::consume_a_name()
Token Tokenizer::consume_a_url_token()
{
auto token = create_new_token(Token::Type::Url);
for (;;) {
if (!is_whitespace(peek_code_point())) {
break;
}
(void)next_code_point();
}
consume_as_much_whitespace_as_possible();
for (;;) {
@ -512,13 +506,8 @@ Token Tokenizer::consume_a_url_token()
}
if (is_whitespace(input)) {
for (;;) {
if (!is_whitespace(peek_code_point())) {
break;
}
input = next_code_point();
}
consume_as_much_whitespace_as_possible();
input = peek_code_point();
if (is_eof(input)) {
log_parse_error();
@ -580,6 +569,13 @@ void Tokenizer::consume_the_remnants_of_a_bad_url()
}
}
void Tokenizer::consume_as_much_whitespace_as_possible()
{
while (is_whitespace(peek_code_point())) {
(void)next_code_point();
}
}
void Tokenizer::reconsume_current_input_code_point()
{
m_utf8_iterator = m_prev_utf8_iterator;
@ -763,13 +759,7 @@ Token Tokenizer::consume_a_token()
if (is_whitespace(input)) {
dbgln_if(CSS_TOKENIZER_DEBUG, "is whitespace");
auto next = peek_code_point();
while (is_whitespace(next)) {
(void)next_code_point();
next = peek_code_point();
}
consume_as_much_whitespace_as_possible();
return create_new_token(Token::Type::Whitespace);
}