mirror of
https://github.com/RGBCube/serenity
synced 2025-07-27 03:47:35 +00:00
Everywhere: Rename ASSERT => VERIFY
(...and ASSERT_NOT_REACHED => VERIFY_NOT_REACHED) Since all of these checks are done in release builds as well, let's rename them to VERIFY to prevent confusion, as everyone is used to assertions being compiled out in release. We can introduce a new ASSERT macro that is specifically for debug checks, but I'm doing this wholesale conversion first since we've accumulated thousands of these already, and it's not immediately obvious which ones are suitable for ASSERT.
This commit is contained in:
parent
b33a6a443e
commit
5d180d1f99
725 changed files with 3448 additions and 3448 deletions
|
@ -192,7 +192,7 @@ void BinaryExpression::dump(size_t indent) const
|
|||
|
||||
m_lhs->dump(indent + 1);
|
||||
print_indent(indent + 1);
|
||||
ASSERT(op_string);
|
||||
VERIFY(op_string);
|
||||
outln("{}", op_string);
|
||||
m_rhs->dump(indent + 1);
|
||||
}
|
||||
|
@ -216,7 +216,7 @@ void AssignmentExpression::dump(size_t indent) const
|
|||
|
||||
m_lhs->dump(indent + 1);
|
||||
print_indent(indent + 1);
|
||||
ASSERT(op_string);
|
||||
VERIFY(op_string);
|
||||
outln("{}", op_string);
|
||||
m_rhs->dump(indent + 1);
|
||||
}
|
||||
|
@ -301,7 +301,7 @@ void UnaryExpression::dump(size_t indent) const
|
|||
op_string = "<invalid>";
|
||||
}
|
||||
|
||||
ASSERT(op_string);
|
||||
VERIFY(op_string);
|
||||
print_indent(indent + 1);
|
||||
outln("{}", op_string);
|
||||
m_lhs->dump(indent + 1);
|
||||
|
|
|
@ -53,12 +53,12 @@ public:
|
|||
ASTNode* parent() const { return m_parent; }
|
||||
Position start() const
|
||||
{
|
||||
ASSERT(m_start.has_value());
|
||||
VERIFY(m_start.has_value());
|
||||
return m_start.value();
|
||||
}
|
||||
Position end() const
|
||||
{
|
||||
ASSERT(m_end.has_value());
|
||||
VERIFY(m_end.has_value());
|
||||
return m_end.value();
|
||||
}
|
||||
const FlyString& filename() const
|
||||
|
|
|
@ -46,7 +46,7 @@ char Lexer::peek(size_t offset) const
|
|||
|
||||
char Lexer::consume()
|
||||
{
|
||||
ASSERT(m_index < m_input.length());
|
||||
VERIFY(m_index < m_input.length());
|
||||
char ch = m_input[m_index++];
|
||||
m_previous_position = m_position;
|
||||
if (ch == '\n') {
|
||||
|
@ -660,8 +660,8 @@ Vector<Token> Lexer::lex()
|
|||
StringView prefix_string = m_input.substring_view(prefix_start, m_index - prefix_start);
|
||||
while (peek()) {
|
||||
if (consume() == '"') {
|
||||
ASSERT(m_index >= prefix_string.length() + 2);
|
||||
ASSERT(m_input[m_index - 1] == '"');
|
||||
VERIFY(m_index >= prefix_string.length() + 2);
|
||||
VERIFY(m_input[m_index - 1] == '"');
|
||||
if (m_input[m_index - 1 - prefix_string.length() - 1] == ')') {
|
||||
StringView suffix_string = m_input.substring_view(m_index - 1 - prefix_string.length(), prefix_string.length());
|
||||
if (prefix_string == suffix_string)
|
||||
|
|
|
@ -124,7 +124,7 @@ struct Token {
|
|||
FOR_EACH_TOKEN_TYPE
|
||||
#undef __TOKEN
|
||||
}
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
|
||||
const char* to_string() const
|
||||
|
|
|
@ -657,7 +657,7 @@ void Parser::load_state()
|
|||
|
||||
Optional<Parser::DeclarationType> Parser::match_declaration_in_function_definition()
|
||||
{
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
|
||||
bool Parser::done()
|
||||
|
@ -667,8 +667,8 @@ bool Parser::done()
|
|||
|
||||
StringView Parser::text_of_token(const Cpp::Token& token) const
|
||||
{
|
||||
ASSERT(token.m_start.line == token.m_end.line);
|
||||
ASSERT(token.m_start.column <= token.m_end.column);
|
||||
VERIFY(token.m_start.line == token.m_end.line);
|
||||
VERIFY(token.m_start.column <= token.m_end.column);
|
||||
return m_lines[token.m_start.line].substring_view(token.m_start.column, token.m_end.column - token.m_start.column + 1);
|
||||
}
|
||||
|
||||
|
@ -680,7 +680,7 @@ StringView Parser::text_of_node(const ASTNode& node) const
|
|||
StringView Parser::text_of_range(Position start, Position end) const
|
||||
{
|
||||
if (start.line == end.line) {
|
||||
ASSERT(start.column <= end.column);
|
||||
VERIFY(start.column <= end.column);
|
||||
return m_lines[start.line].substring_view(start.column, end.column - start.column + 1);
|
||||
}
|
||||
|
||||
|
@ -694,7 +694,7 @@ StringView Parser::text_of_range(Position start, Position end) const
|
|||
});
|
||||
auto start_index = index_of_position(start);
|
||||
auto end_index = index_of_position(end);
|
||||
ASSERT(end_index >= start_index);
|
||||
VERIFY(end_index >= start_index);
|
||||
return m_program.substring_view(start_index, end_index - start_index);
|
||||
}
|
||||
|
||||
|
@ -741,13 +741,13 @@ Position Parser::position() const
|
|||
|
||||
RefPtr<ASTNode> Parser::eof_node() const
|
||||
{
|
||||
ASSERT(m_tokens.size());
|
||||
VERIFY(m_tokens.size());
|
||||
return node_at(m_tokens.last().m_end);
|
||||
}
|
||||
|
||||
RefPtr<ASTNode> Parser::node_at(Position pos) const
|
||||
{
|
||||
ASSERT(!m_tokens.is_empty());
|
||||
VERIFY(!m_tokens.is_empty());
|
||||
RefPtr<ASTNode> match_node;
|
||||
for (auto& node : m_nodes) {
|
||||
if (node.start() > pos || node.end() < pos)
|
||||
|
@ -827,7 +827,7 @@ NonnullRefPtr<StringLiteral> Parser::parse_string_literal(ASTNode& parent)
|
|||
while (!eof()) {
|
||||
auto token = peek();
|
||||
if (token.type() != Token::Type::DoubleQuotedString && token.type() != Token::Type::EscapeSequence) {
|
||||
ASSERT(start_token_index.has_value());
|
||||
VERIFY(start_token_index.has_value());
|
||||
end_token_index = m_state.token_index - 1;
|
||||
break;
|
||||
}
|
||||
|
@ -841,8 +841,8 @@ NonnullRefPtr<StringLiteral> Parser::parse_string_literal(ASTNode& parent)
|
|||
end_token_index = m_tokens.size() - 1;
|
||||
}
|
||||
|
||||
ASSERT(start_token_index.has_value());
|
||||
ASSERT(end_token_index.has_value());
|
||||
VERIFY(start_token_index.has_value());
|
||||
VERIFY(end_token_index.has_value());
|
||||
|
||||
Token start_token = m_tokens[start_token_index.value()];
|
||||
Token end_token = m_tokens[end_token_index.value()];
|
||||
|
|
|
@ -75,7 +75,7 @@ void Preprocessor::handle_preprocessor_line(const StringView& line)
|
|||
}
|
||||
|
||||
if (keyword == "else") {
|
||||
ASSERT(m_current_depth > 0);
|
||||
VERIFY(m_current_depth > 0);
|
||||
if (m_depths_of_not_taken_branches.contains_slow(m_current_depth - 1)) {
|
||||
m_depths_of_not_taken_branches.remove_all_matching([this](auto x) { return x == m_current_depth - 1; });
|
||||
m_state = State::Normal;
|
||||
|
@ -88,7 +88,7 @@ void Preprocessor::handle_preprocessor_line(const StringView& line)
|
|||
}
|
||||
|
||||
if (keyword == "endif") {
|
||||
ASSERT(m_current_depth > 0);
|
||||
VERIFY(m_current_depth > 0);
|
||||
--m_current_depth;
|
||||
if (m_depths_of_not_taken_branches.contains_slow(m_current_depth)) {
|
||||
m_depths_of_not_taken_branches.remove_all_matching([this](auto x) { return x == m_current_depth; });
|
||||
|
@ -164,7 +164,7 @@ void Preprocessor::handle_preprocessor_line(const StringView& line)
|
|||
return;
|
||||
}
|
||||
dbgln("Unsupported preprocessor keyword: {}", keyword);
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
|
||||
};
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue