mirror of
https://github.com/RGBCube/serenity
synced 2025-05-31 04:38:11 +00:00
LibWeb: Use setter for HTMLToken::m_{start,end}_position
This commit is contained in:
parent
8b31e41692
commit
f2e3c770f9
2 changed files with 16 additions and 11 deletions
|
@ -16,6 +16,8 @@
|
||||||
|
|
||||||
namespace Web::HTML {
|
namespace Web::HTML {
|
||||||
|
|
||||||
|
class HTMLTokenizer;
|
||||||
|
|
||||||
class HTMLToken {
|
class HTMLToken {
|
||||||
friend class HTMLDocumentParser;
|
friend class HTMLDocumentParser;
|
||||||
friend class HTMLTokenizer;
|
friend class HTMLTokenizer;
|
||||||
|
@ -282,6 +284,9 @@ public:
|
||||||
Position const& start_position() const { return m_start_position; }
|
Position const& start_position() const { return m_start_position; }
|
||||||
Position const& end_position() const { return m_end_position; }
|
Position const& end_position() const { return m_end_position; }
|
||||||
|
|
||||||
|
void set_start_position(Badge<HTMLTokenizer>, Position start_position) { m_start_position = start_position; }
|
||||||
|
void set_end_position(Badge<HTMLTokenizer>, Position end_position) { m_end_position = end_position; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
Type m_type { Type::Invalid };
|
Type m_type { Type::Invalid };
|
||||||
|
|
||||||
|
|
|
@ -284,7 +284,7 @@ _StartOfFunction:
|
||||||
{
|
{
|
||||||
log_parse_error();
|
log_parse_error();
|
||||||
create_new_token(HTMLToken::Type::Comment);
|
create_new_token(HTMLToken::Type::Comment);
|
||||||
m_current_token.m_start_position = nth_last_position(2);
|
m_current_token.set_start_position({}, nth_last_position(2));
|
||||||
RECONSUME_IN(BogusComment);
|
RECONSUME_IN(BogusComment);
|
||||||
}
|
}
|
||||||
ON_EOF
|
ON_EOF
|
||||||
|
@ -306,44 +306,44 @@ _StartOfFunction:
|
||||||
ON_WHITESPACE
|
ON_WHITESPACE
|
||||||
{
|
{
|
||||||
m_current_token.set_tag_name(consume_current_builder());
|
m_current_token.set_tag_name(consume_current_builder());
|
||||||
m_current_token.m_end_position = nth_last_position(1);
|
m_current_token.set_end_position({}, nth_last_position(1));
|
||||||
SWITCH_TO(BeforeAttributeName);
|
SWITCH_TO(BeforeAttributeName);
|
||||||
}
|
}
|
||||||
ON('/')
|
ON('/')
|
||||||
{
|
{
|
||||||
m_current_token.set_tag_name(consume_current_builder());
|
m_current_token.set_tag_name(consume_current_builder());
|
||||||
m_current_token.m_end_position = nth_last_position(0);
|
m_current_token.set_end_position({}, nth_last_position(0));
|
||||||
SWITCH_TO(SelfClosingStartTag);
|
SWITCH_TO(SelfClosingStartTag);
|
||||||
}
|
}
|
||||||
ON('>')
|
ON('>')
|
||||||
{
|
{
|
||||||
m_current_token.set_tag_name(consume_current_builder());
|
m_current_token.set_tag_name(consume_current_builder());
|
||||||
m_current_token.m_end_position = nth_last_position(1);
|
m_current_token.set_end_position({}, nth_last_position(1));
|
||||||
SWITCH_TO_AND_EMIT_CURRENT_TOKEN(Data);
|
SWITCH_TO_AND_EMIT_CURRENT_TOKEN(Data);
|
||||||
}
|
}
|
||||||
ON_ASCII_UPPER_ALPHA
|
ON_ASCII_UPPER_ALPHA
|
||||||
{
|
{
|
||||||
m_current_builder.append_code_point(to_ascii_lowercase(current_input_character.value()));
|
m_current_builder.append_code_point(to_ascii_lowercase(current_input_character.value()));
|
||||||
m_current_token.m_end_position = nth_last_position(0);
|
m_current_token.set_end_position({}, nth_last_position(0));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
ON(0)
|
ON(0)
|
||||||
{
|
{
|
||||||
log_parse_error();
|
log_parse_error();
|
||||||
m_current_builder.append_code_point(0xFFFD);
|
m_current_builder.append_code_point(0xFFFD);
|
||||||
m_current_token.m_end_position = nth_last_position(0);
|
m_current_token.set_end_position({}, nth_last_position(0));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
ON_EOF
|
ON_EOF
|
||||||
{
|
{
|
||||||
log_parse_error();
|
log_parse_error();
|
||||||
m_current_token.m_end_position = nth_last_position(0);
|
m_current_token.set_end_position({}, nth_last_position(0));
|
||||||
EMIT_EOF;
|
EMIT_EOF;
|
||||||
}
|
}
|
||||||
ANYTHING_ELSE
|
ANYTHING_ELSE
|
||||||
{
|
{
|
||||||
m_current_builder.append_code_point(current_input_character.value());
|
m_current_builder.append_code_point(current_input_character.value());
|
||||||
m_current_token.m_end_position = nth_last_position(0);
|
m_current_token.set_end_position({}, nth_last_position(0));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -382,7 +382,7 @@ _StartOfFunction:
|
||||||
DONT_CONSUME_NEXT_INPUT_CHARACTER;
|
DONT_CONSUME_NEXT_INPUT_CHARACTER;
|
||||||
if (consume_next_if_match("--")) {
|
if (consume_next_if_match("--")) {
|
||||||
create_new_token(HTMLToken::Type::Comment);
|
create_new_token(HTMLToken::Type::Comment);
|
||||||
m_current_token.m_start_position = nth_last_position(4);
|
m_current_token.set_start_position({}, nth_last_position(4));
|
||||||
SWITCH_TO(CommentStart);
|
SWITCH_TO(CommentStart);
|
||||||
}
|
}
|
||||||
if (consume_next_if_match("DOCTYPE", CaseSensitivity::CaseInsensitive)) {
|
if (consume_next_if_match("DOCTYPE", CaseSensitivity::CaseInsensitive)) {
|
||||||
|
@ -2679,7 +2679,7 @@ void HTMLTokenizer::create_new_token(HTMLToken::Type type)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
m_current_token.m_start_position = nth_last_position(offset);
|
m_current_token.set_start_position({}, nth_last_position(offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
HTMLTokenizer::HTMLTokenizer(StringView const& input, String const& encoding)
|
HTMLTokenizer::HTMLTokenizer(StringView const& input, String const& encoding)
|
||||||
|
@ -2712,7 +2712,7 @@ void HTMLTokenizer::will_emit(HTMLToken& token)
|
||||||
{
|
{
|
||||||
if (token.is_start_tag())
|
if (token.is_start_tag())
|
||||||
m_last_emitted_start_tag_name = token.tag_name();
|
m_last_emitted_start_tag_name = token.tag_name();
|
||||||
token.m_end_position = nth_last_position(0);
|
token.set_end_position({}, nth_last_position(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool HTMLTokenizer::current_end_tag_token_is_appropriate() const
|
bool HTMLTokenizer::current_end_tag_token_is_appropriate() const
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue