mirror of
https://github.com/RGBCube/serenity
synced 2025-07-27 09:17:35 +00:00
Everywhere: Rename ASSERT => VERIFY
(...and ASSERT_NOT_REACHED => VERIFY_NOT_REACHED) Since all of these checks are done in release builds as well, let's rename them to VERIFY to prevent confusion, as everyone is used to assertions being compiled out in release. We can introduce a new ASSERT macro that is specifically for debug checks, but I'm doing this wholesale conversion first since we've accumulated thousands of these already, and it's not immediately obvious which ones are suitable for ASSERT.
This commit is contained in:
parent
b33a6a443e
commit
5d180d1f99
725 changed files with 3448 additions and 3448 deletions
|
@ -43,12 +43,12 @@ struct Array {
|
|||
|
||||
constexpr const T& at(size_t index) const
|
||||
{
|
||||
ASSERT(index < size());
|
||||
VERIFY(index < size());
|
||||
return (*this)[index];
|
||||
}
|
||||
constexpr T& at(size_t index)
|
||||
{
|
||||
ASSERT(index < size());
|
||||
VERIFY(index < size());
|
||||
return (*this)[index];
|
||||
}
|
||||
|
||||
|
|
|
@ -31,9 +31,9 @@
|
|||
#else
|
||||
# include <assert.h>
|
||||
# ifndef __serenity__
|
||||
# define ASSERT assert
|
||||
# define ASSERT_NOT_REACHED() assert(false)
|
||||
# define VERIFY assert
|
||||
# define VERIFY_NOT_REACHED() assert(false)
|
||||
# define RELEASE_ASSERT assert
|
||||
# define TODO ASSERT_NOT_REACHED
|
||||
# define TODO VERIFY_NOT_REACHED
|
||||
# endif
|
||||
#endif
|
||||
|
|
30
AK/Bitmap.h
30
AK/Bitmap.h
|
@ -85,12 +85,12 @@ public:
|
|||
size_t size_in_bytes() const { return ceil_div(m_size, static_cast<size_t>(8)); }
|
||||
bool get(size_t index) const
|
||||
{
|
||||
ASSERT(index < m_size);
|
||||
VERIFY(index < m_size);
|
||||
return 0 != (m_data[index / 8] & (1u << (index % 8)));
|
||||
}
|
||||
void set(size_t index, bool value) const
|
||||
{
|
||||
ASSERT(index < m_size);
|
||||
VERIFY(index < m_size);
|
||||
if (value)
|
||||
m_data[index / 8] |= static_cast<u8>((1u << (index % 8)));
|
||||
else
|
||||
|
@ -104,8 +104,8 @@ public:
|
|||
|
||||
size_t count_in_range(size_t start, size_t len, bool value) const
|
||||
{
|
||||
ASSERT(start < m_size);
|
||||
ASSERT(start + len <= m_size);
|
||||
VERIFY(start < m_size);
|
||||
VERIFY(start + len <= m_size);
|
||||
if (len == 0)
|
||||
return 0;
|
||||
|
||||
|
@ -153,8 +153,8 @@ public:
|
|||
|
||||
void grow(size_t size, bool default_value)
|
||||
{
|
||||
ASSERT(m_owned);
|
||||
ASSERT(size > m_size);
|
||||
VERIFY(m_owned);
|
||||
VERIFY(size > m_size);
|
||||
|
||||
auto previous_size_bytes = size_in_bytes();
|
||||
auto previous_size = m_size;
|
||||
|
@ -176,8 +176,8 @@ public:
|
|||
template<bool VALUE>
|
||||
void set_range(size_t start, size_t len)
|
||||
{
|
||||
ASSERT(start < m_size);
|
||||
ASSERT(start + len <= m_size);
|
||||
VERIFY(start < m_size);
|
||||
VERIFY(start + len <= m_size);
|
||||
if (len == 0)
|
||||
return;
|
||||
|
||||
|
@ -228,7 +228,7 @@ public:
|
|||
template<bool VALUE>
|
||||
Optional<size_t> find_one_anywhere(size_t hint = 0) const
|
||||
{
|
||||
ASSERT(hint < m_size);
|
||||
VERIFY(hint < m_size);
|
||||
const u8* end = &m_data[m_size / 8];
|
||||
|
||||
for (;;) {
|
||||
|
@ -249,7 +249,7 @@ public:
|
|||
byte = m_data[i];
|
||||
if constexpr (!VALUE)
|
||||
byte = ~byte;
|
||||
ASSERT(byte != 0);
|
||||
VERIFY(byte != 0);
|
||||
return i * 8 + __builtin_ffs(byte) - 1;
|
||||
}
|
||||
}
|
||||
|
@ -264,7 +264,7 @@ public:
|
|||
u8 byte = VALUE ? 0x00 : 0xff;
|
||||
size_t i = (const u8*)ptr32 - &m_data[0];
|
||||
size_t byte_count = m_size / 8;
|
||||
ASSERT(i <= byte_count);
|
||||
VERIFY(i <= byte_count);
|
||||
while (i < byte_count && m_data[i] == byte)
|
||||
i++;
|
||||
if (i == byte_count) {
|
||||
|
@ -279,7 +279,7 @@ public:
|
|||
byte = m_data[i];
|
||||
if constexpr (!VALUE)
|
||||
byte = ~byte;
|
||||
ASSERT(byte != 0);
|
||||
VERIFY(byte != 0);
|
||||
return i * 8 + __builtin_ffs(byte) - 1;
|
||||
}
|
||||
|
||||
|
@ -288,7 +288,7 @@ public:
|
|||
val32 = *ptr32;
|
||||
if constexpr (!VALUE)
|
||||
val32 = ~val32;
|
||||
ASSERT(val32 != 0);
|
||||
VERIFY(val32 != 0);
|
||||
return ((const u8*)ptr32 - &m_data[0]) * 8 + __builtin_ffsl(val32) - 1;
|
||||
}
|
||||
}
|
||||
|
@ -317,7 +317,7 @@ public:
|
|||
byte = m_data[i];
|
||||
if constexpr (!VALUE)
|
||||
byte = ~byte;
|
||||
ASSERT(byte != 0);
|
||||
VERIFY(byte != 0);
|
||||
return i * 8 + __builtin_ffs(byte) - 1;
|
||||
}
|
||||
|
||||
|
@ -509,7 +509,7 @@ public:
|
|||
: m_size(size)
|
||||
, m_owned(true)
|
||||
{
|
||||
ASSERT(m_size != 0);
|
||||
VERIFY(m_size != 0);
|
||||
m_data = reinterpret_cast<u8*>(kmalloc(size_in_bytes()));
|
||||
fill(default_value);
|
||||
}
|
||||
|
|
|
@ -54,12 +54,12 @@ public:
|
|||
|
||||
u8& operator[](size_t i)
|
||||
{
|
||||
ASSERT(i < m_size);
|
||||
VERIFY(i < m_size);
|
||||
return m_data[i];
|
||||
}
|
||||
const u8& operator[](size_t i) const
|
||||
{
|
||||
ASSERT(i < m_size);
|
||||
VERIFY(i < m_size);
|
||||
return m_data[i];
|
||||
}
|
||||
bool is_empty() const { return !m_size; }
|
||||
|
@ -83,7 +83,7 @@ public:
|
|||
// NOTE: trim() does not reallocate.
|
||||
void trim(size_t size)
|
||||
{
|
||||
ASSERT(size <= m_size);
|
||||
VERIFY(size <= m_size);
|
||||
m_size = size;
|
||||
}
|
||||
|
||||
|
@ -145,12 +145,12 @@ public:
|
|||
|
||||
u8& operator[](size_t i)
|
||||
{
|
||||
ASSERT(m_impl);
|
||||
VERIFY(m_impl);
|
||||
return (*m_impl)[i];
|
||||
}
|
||||
u8 operator[](size_t i) const
|
||||
{
|
||||
ASSERT(m_impl);
|
||||
VERIFY(m_impl);
|
||||
return (*m_impl)[i];
|
||||
}
|
||||
bool is_empty() const { return !m_impl || m_impl->is_empty(); }
|
||||
|
@ -215,7 +215,7 @@ public:
|
|||
return {};
|
||||
|
||||
// I cannot hand you a slice I don't have
|
||||
ASSERT(offset + size <= this->size());
|
||||
VERIFY(offset + size <= this->size());
|
||||
|
||||
return copy(offset_pointer(offset), size);
|
||||
}
|
||||
|
@ -232,7 +232,7 @@ public:
|
|||
{
|
||||
if (data_size == 0)
|
||||
return;
|
||||
ASSERT(data != nullptr);
|
||||
VERIFY(data != nullptr);
|
||||
int old_size = size();
|
||||
grow(size() + data_size);
|
||||
__builtin_memcpy(this->data() + old_size, data, data_size);
|
||||
|
@ -246,7 +246,7 @@ public:
|
|||
void overwrite(size_t offset, const void* data, size_t data_size)
|
||||
{
|
||||
// make sure we're not told to write past the end
|
||||
ASSERT(offset + data_size <= size());
|
||||
VERIFY(offset + data_size <= size());
|
||||
__builtin_memcpy(this->data() + offset, data, data_size);
|
||||
}
|
||||
|
||||
|
@ -285,7 +285,7 @@ inline ByteBufferImpl::ByteBufferImpl(const void* data, size_t size)
|
|||
|
||||
inline void ByteBufferImpl::grow(size_t size)
|
||||
{
|
||||
ASSERT(size > m_size);
|
||||
VERIFY(size > m_size);
|
||||
if (size == 0) {
|
||||
if (m_data)
|
||||
kfree(m_data);
|
||||
|
|
|
@ -156,13 +156,13 @@ public:
|
|||
|
||||
ALWAYS_INLINE constexpr bool operator!() const
|
||||
{
|
||||
ASSERT(!m_overflow);
|
||||
VERIFY(!m_overflow);
|
||||
return !m_value;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE constexpr T value() const
|
||||
{
|
||||
ASSERT(!m_overflow);
|
||||
VERIFY(!m_overflow);
|
||||
return m_value;
|
||||
}
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@
|
|||
#ifndef DBGLN_NO_COMPILETIME_FORMAT_CHECK
|
||||
namespace AK::Format::Detail {
|
||||
|
||||
// We have to define a local "purely constexpr" Array that doesn't lead back to us (via e.g. ASSERT)
|
||||
// We have to define a local "purely constexpr" Array that doesn't lead back to us (via e.g. VERIFY)
|
||||
template<typename T, size_t Size>
|
||||
struct Array {
|
||||
constexpr static size_t size() { return Size; }
|
||||
|
|
|
@ -50,7 +50,7 @@ public:
|
|||
|
||||
T dequeue_end()
|
||||
{
|
||||
ASSERT(!this->is_empty());
|
||||
VERIFY(!this->is_empty());
|
||||
auto& slot = this->elements()[(this->m_head + this->m_size - 1) % Capacity];
|
||||
T value = move(slot);
|
||||
slot.~T();
|
||||
|
|
|
@ -55,7 +55,7 @@ public:
|
|||
}
|
||||
|
||||
const auto nwritten = write(bytes);
|
||||
ASSERT(nwritten == bytes.size());
|
||||
VERIFY(nwritten == bytes.size());
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -123,7 +123,7 @@ public:
|
|||
|
||||
Bytes reserve_contigous_space(size_t count)
|
||||
{
|
||||
ASSERT(count <= remaining_contigous_space());
|
||||
VERIFY(count <= remaining_contigous_space());
|
||||
|
||||
Bytes bytes { m_queue.m_storage + (m_queue.head_index() + m_queue.size()) % Capacity, count };
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ public:
|
|||
|
||||
T dequeue()
|
||||
{
|
||||
ASSERT(!is_empty());
|
||||
VERIFY(!is_empty());
|
||||
auto& slot = elements()[m_head];
|
||||
T value = move(slot);
|
||||
slot.~T();
|
||||
|
|
|
@ -91,22 +91,22 @@ public:
|
|||
|
||||
T& first()
|
||||
{
|
||||
ASSERT(m_head);
|
||||
VERIFY(m_head);
|
||||
return m_head->value;
|
||||
}
|
||||
const T& first() const
|
||||
{
|
||||
ASSERT(m_head);
|
||||
VERIFY(m_head);
|
||||
return m_head->value;
|
||||
}
|
||||
T& last()
|
||||
{
|
||||
ASSERT(m_head);
|
||||
VERIFY(m_head);
|
||||
return m_tail->value;
|
||||
}
|
||||
const T& last() const
|
||||
{
|
||||
ASSERT(m_head);
|
||||
VERIFY(m_head);
|
||||
return m_tail->value;
|
||||
}
|
||||
|
||||
|
@ -117,13 +117,13 @@ public:
|
|||
requires { T(value); }, "Conversion operator is missing.");
|
||||
auto* node = new Node(forward<U>(value));
|
||||
if (!m_head) {
|
||||
ASSERT(!m_tail);
|
||||
VERIFY(!m_tail);
|
||||
m_head = node;
|
||||
m_tail = node;
|
||||
return;
|
||||
}
|
||||
ASSERT(m_tail);
|
||||
ASSERT(!node->next);
|
||||
VERIFY(m_tail);
|
||||
VERIFY(!node->next);
|
||||
m_tail->next = node;
|
||||
node->prev = m_tail;
|
||||
m_tail = node;
|
||||
|
@ -135,13 +135,13 @@ public:
|
|||
static_assert(IsSame<T, U>::value);
|
||||
auto* node = new Node(forward<U>(value));
|
||||
if (!m_head) {
|
||||
ASSERT(!m_tail);
|
||||
VERIFY(!m_tail);
|
||||
m_head = node;
|
||||
m_tail = node;
|
||||
return;
|
||||
}
|
||||
ASSERT(m_tail);
|
||||
ASSERT(!node->prev);
|
||||
VERIFY(m_tail);
|
||||
VERIFY(!node->prev);
|
||||
m_head->prev = node;
|
||||
node->next = m_head;
|
||||
m_head = node;
|
||||
|
@ -174,20 +174,20 @@ public:
|
|||
|
||||
void remove(Iterator it)
|
||||
{
|
||||
ASSERT(it.m_node);
|
||||
VERIFY(it.m_node);
|
||||
auto* node = it.m_node;
|
||||
if (node->prev) {
|
||||
ASSERT(node != m_head);
|
||||
VERIFY(node != m_head);
|
||||
node->prev->next = node->next;
|
||||
} else {
|
||||
ASSERT(node == m_head);
|
||||
VERIFY(node == m_head);
|
||||
m_head = node->next;
|
||||
}
|
||||
if (node->next) {
|
||||
ASSERT(node != m_tail);
|
||||
VERIFY(node != m_tail);
|
||||
node->next->prev = node->prev;
|
||||
} else {
|
||||
ASSERT(node == m_tail);
|
||||
VERIFY(node == m_tail);
|
||||
m_tail = node->prev;
|
||||
}
|
||||
delete node;
|
||||
|
|
|
@ -38,8 +38,8 @@ struct FlyStringImplTraits : public AK::Traits<StringImpl*> {
|
|||
static unsigned hash(const StringImpl* s) { return s ? s->hash() : 0; }
|
||||
static bool equals(const StringImpl* a, const StringImpl* b)
|
||||
{
|
||||
ASSERT(a);
|
||||
ASSERT(b);
|
||||
VERIFY(a);
|
||||
VERIFY(b);
|
||||
return *a == *b;
|
||||
}
|
||||
};
|
||||
|
@ -70,7 +70,7 @@ FlyString::FlyString(const String& string)
|
|||
string.impl()->set_fly({}, true);
|
||||
m_impl = string.impl();
|
||||
} else {
|
||||
ASSERT((*it)->is_fly());
|
||||
VERIFY((*it)->is_fly());
|
||||
m_impl = *it;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ constexpr size_t use_next_index = NumericLimits<size_t>::max();
|
|||
// 65 bytes. Choosing a larger power of two won't hurt and is a bit of mitigation against out-of-bounds accesses.
|
||||
inline size_t convert_unsigned_to_string(u64 value, Array<u8, 128>& buffer, u8 base, bool upper_case)
|
||||
{
|
||||
ASSERT(base >= 2 && base <= 16);
|
||||
VERIFY(base >= 2 && base <= 16);
|
||||
|
||||
static constexpr const char* lowercase_lookup = "0123456789abcdef";
|
||||
static constexpr const char* uppercase_lookup = "0123456789ABCDEF";
|
||||
|
@ -80,7 +80,7 @@ void vformat_impl(TypeErasedFormatParams& params, FormatBuilder& builder, Format
|
|||
|
||||
FormatParser::FormatSpecifier specifier;
|
||||
if (!parser.consume_specifier(specifier)) {
|
||||
ASSERT(parser.is_eof());
|
||||
VERIFY(parser.is_eof());
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -118,9 +118,9 @@ size_t TypeErasedParameter::to_size() const
|
|||
else if (type == TypeErasedParameter::Type::Int64)
|
||||
svalue = *reinterpret_cast<const i64*>(value);
|
||||
else
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
|
||||
ASSERT(svalue >= 0);
|
||||
VERIFY(svalue >= 0);
|
||||
|
||||
return static_cast<size_t>(svalue);
|
||||
}
|
||||
|
@ -163,7 +163,7 @@ bool FormatParser::consume_number(size_t& value)
|
|||
}
|
||||
bool FormatParser::consume_specifier(FormatSpecifier& specifier)
|
||||
{
|
||||
ASSERT(!next_is('}'));
|
||||
VERIFY(!next_is('}'));
|
||||
|
||||
if (!consume_specific('{'))
|
||||
return false;
|
||||
|
@ -176,7 +176,7 @@ bool FormatParser::consume_specifier(FormatSpecifier& specifier)
|
|||
|
||||
size_t level = 1;
|
||||
while (level > 0) {
|
||||
ASSERT(!is_eof());
|
||||
VERIFY(!is_eof());
|
||||
|
||||
if (consume_specific('{')) {
|
||||
++level;
|
||||
|
@ -194,7 +194,7 @@ bool FormatParser::consume_specifier(FormatSpecifier& specifier)
|
|||
specifier.flags = m_input.substring_view(begin, tell() - begin - 1);
|
||||
} else {
|
||||
if (!consume_specific('}'))
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
|
||||
specifier.flags = "";
|
||||
}
|
||||
|
@ -210,7 +210,7 @@ bool FormatParser::consume_replacement_field(size_t& index)
|
|||
index = use_next_index;
|
||||
|
||||
if (!consume_specific('}'))
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -426,7 +426,7 @@ void vformat(const LogStream& stream, StringView fmtstr, TypeErasedFormatParams
|
|||
void StandardFormatter::parse(TypeErasedFormatParams& params, FormatParser& parser)
|
||||
{
|
||||
if (StringView { "<^>" }.contains(parser.peek(1))) {
|
||||
ASSERT(!parser.next_is(is_any_of("{}")));
|
||||
VERIFY(!parser.next_is(is_any_of("{}")));
|
||||
m_fill = parser.consume();
|
||||
}
|
||||
|
||||
|
@ -498,21 +498,21 @@ void StandardFormatter::parse(TypeErasedFormatParams& params, FormatParser& pars
|
|||
if (!parser.is_eof())
|
||||
dbgln("{} did not consume '{}'", __PRETTY_FUNCTION__, parser.remaining());
|
||||
|
||||
ASSERT(parser.is_eof());
|
||||
VERIFY(parser.is_eof());
|
||||
}
|
||||
|
||||
void Formatter<StringView>::format(FormatBuilder& builder, StringView value)
|
||||
{
|
||||
if (m_sign_mode != FormatBuilder::SignMode::Default)
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
if (m_alternative_form)
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
if (m_zero_pad)
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
if (m_mode != Mode::Default && m_mode != Mode::String && m_mode != Mode::Character)
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
if (m_width.has_value() && m_precision.has_value())
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
|
||||
m_width = m_width.value_or(0);
|
||||
m_precision = m_precision.value_or(NumericLimits<size_t>::max());
|
||||
|
@ -530,7 +530,7 @@ void Formatter<T, typename EnableIf<IsIntegral<T>::value>::Type>::format(FormatB
|
|||
{
|
||||
if (m_mode == Mode::Character) {
|
||||
// FIXME: We just support ASCII for now, in the future maybe unicode?
|
||||
ASSERT(value >= 0 && value <= 127);
|
||||
VERIFY(value >= 0 && value <= 127);
|
||||
|
||||
m_mode = Mode::String;
|
||||
|
||||
|
@ -539,17 +539,17 @@ void Formatter<T, typename EnableIf<IsIntegral<T>::value>::Type>::format(FormatB
|
|||
}
|
||||
|
||||
if (m_precision.has_value())
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
|
||||
if (m_mode == Mode::Pointer) {
|
||||
if (m_sign_mode != FormatBuilder::SignMode::Default)
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
if (m_align != FormatBuilder::Align::Default)
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
if (m_alternative_form)
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
if (m_width.has_value())
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
|
||||
m_mode = Mode::Hexadecimal;
|
||||
m_alternative_form = true;
|
||||
|
@ -574,7 +574,7 @@ void Formatter<T, typename EnableIf<IsIntegral<T>::value>::Type>::format(FormatB
|
|||
base = 16;
|
||||
upper_case = true;
|
||||
} else {
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
|
||||
m_width = m_width.value_or(0);
|
||||
|
@ -621,7 +621,7 @@ void Formatter<double>::format(FormatBuilder& builder, double value)
|
|||
base = 16;
|
||||
upper_case = true;
|
||||
} else {
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
|
||||
m_width = m_width.value_or(0);
|
||||
|
@ -647,7 +647,7 @@ void vout(FILE* file, StringView fmtstr, TypeErasedFormatParams params, bool new
|
|||
|
||||
const auto string = builder.string_view();
|
||||
const auto retval = ::fwrite(string.characters_without_null_termination(), 1, string.length(), file);
|
||||
ASSERT(static_cast<size_t>(retval) == string.length());
|
||||
VERIFY(static_cast<size_t>(retval) == string.length());
|
||||
}
|
||||
#endif
|
||||
|
||||
|
|
|
@ -87,7 +87,7 @@ struct TypeErasedParameter {
|
|||
return Type::Int64;
|
||||
}
|
||||
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
|
|
|
@ -53,7 +53,7 @@ public:
|
|||
|
||||
Out operator()(In... in) const
|
||||
{
|
||||
ASSERT(m_callable_wrapper);
|
||||
VERIFY(m_callable_wrapper);
|
||||
return m_callable_wrapper->call(forward<In>(in)...);
|
||||
}
|
||||
|
||||
|
|
|
@ -79,14 +79,14 @@ bool GenericLexer::next_is(const char* expected) const
|
|||
// Go back to the previous character
|
||||
void GenericLexer::retreat()
|
||||
{
|
||||
ASSERT(m_index > 0);
|
||||
VERIFY(m_index > 0);
|
||||
m_index--;
|
||||
}
|
||||
|
||||
// Consume a character and advance the parser index
|
||||
char GenericLexer::consume()
|
||||
{
|
||||
ASSERT(!is_eof());
|
||||
VERIFY(!is_eof());
|
||||
return m_input[m_index++];
|
||||
}
|
||||
|
||||
|
|
|
@ -157,7 +157,7 @@ public:
|
|||
|
||||
void ensure_capacity(size_t capacity)
|
||||
{
|
||||
ASSERT(capacity >= size());
|
||||
VERIFY(capacity >= size());
|
||||
rehash(capacity * 2);
|
||||
}
|
||||
|
||||
|
@ -256,11 +256,11 @@ public:
|
|||
|
||||
void remove(Iterator iterator)
|
||||
{
|
||||
ASSERT(iterator.m_bucket);
|
||||
VERIFY(iterator.m_bucket);
|
||||
auto& bucket = *iterator.m_bucket;
|
||||
ASSERT(bucket.used);
|
||||
ASSERT(!bucket.end);
|
||||
ASSERT(!bucket.deleted);
|
||||
VERIFY(bucket.used);
|
||||
VERIFY(!bucket.end);
|
||||
VERIFY(!bucket.deleted);
|
||||
bucket.slot()->~T();
|
||||
bucket.used = false;
|
||||
bucket.deleted = true;
|
||||
|
|
|
@ -49,7 +49,7 @@ public:
|
|||
return allocated_id;
|
||||
}
|
||||
}
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
|
||||
void deallocate(int id)
|
||||
|
|
|
@ -65,7 +65,7 @@ public:
|
|||
|
||||
constexpr u8 operator[](int i) const
|
||||
{
|
||||
ASSERT(i >= 0 && i < 4);
|
||||
VERIFY(i >= 0 && i < 4);
|
||||
return octet(SubnetClass(i));
|
||||
}
|
||||
|
||||
|
|
|
@ -178,7 +178,7 @@ template<typename T>
|
|||
inline void InlineLinkedList<T>::prepend(T* node)
|
||||
{
|
||||
if (!m_head) {
|
||||
ASSERT(!m_tail);
|
||||
VERIFY(!m_tail);
|
||||
m_head = node;
|
||||
m_tail = node;
|
||||
node->set_prev(0);
|
||||
|
@ -186,7 +186,7 @@ inline void InlineLinkedList<T>::prepend(T* node)
|
|||
return;
|
||||
}
|
||||
|
||||
ASSERT(m_tail);
|
||||
VERIFY(m_tail);
|
||||
m_head->set_prev(node);
|
||||
node->set_next(m_head);
|
||||
node->set_prev(0);
|
||||
|
@ -197,7 +197,7 @@ template<typename T>
|
|||
inline void InlineLinkedList<T>::append(T* node)
|
||||
{
|
||||
if (!m_tail) {
|
||||
ASSERT(!m_head);
|
||||
VERIFY(!m_head);
|
||||
m_head = node;
|
||||
m_tail = node;
|
||||
node->set_prev(0);
|
||||
|
@ -205,7 +205,7 @@ inline void InlineLinkedList<T>::append(T* node)
|
|||
return;
|
||||
}
|
||||
|
||||
ASSERT(m_head);
|
||||
VERIFY(m_head);
|
||||
m_tail->set_next(node);
|
||||
node->set_prev(m_tail);
|
||||
node->set_next(0);
|
||||
|
@ -215,18 +215,18 @@ inline void InlineLinkedList<T>::append(T* node)
|
|||
template<typename T>
|
||||
inline void InlineLinkedList<T>::insert_before(T* before_node, T* node)
|
||||
{
|
||||
ASSERT(before_node);
|
||||
ASSERT(node);
|
||||
ASSERT(before_node != node);
|
||||
ASSERT(!is_empty());
|
||||
VERIFY(before_node);
|
||||
VERIFY(node);
|
||||
VERIFY(before_node != node);
|
||||
VERIFY(!is_empty());
|
||||
if (m_head == before_node) {
|
||||
ASSERT(!before_node->prev());
|
||||
VERIFY(!before_node->prev());
|
||||
m_head = node;
|
||||
node->set_prev(0);
|
||||
node->set_next(before_node);
|
||||
before_node->set_prev(node);
|
||||
} else {
|
||||
ASSERT(before_node->prev());
|
||||
VERIFY(before_node->prev());
|
||||
node->set_prev(before_node->prev());
|
||||
before_node->prev()->set_next(node);
|
||||
node->set_next(before_node);
|
||||
|
@ -237,18 +237,18 @@ inline void InlineLinkedList<T>::insert_before(T* before_node, T* node)
|
|||
template<typename T>
|
||||
inline void InlineLinkedList<T>::insert_after(T* after_node, T* node)
|
||||
{
|
||||
ASSERT(after_node);
|
||||
ASSERT(node);
|
||||
ASSERT(after_node != node);
|
||||
ASSERT(!is_empty());
|
||||
VERIFY(after_node);
|
||||
VERIFY(node);
|
||||
VERIFY(after_node != node);
|
||||
VERIFY(!is_empty());
|
||||
if (m_tail == after_node) {
|
||||
ASSERT(!after_node->next());
|
||||
VERIFY(!after_node->next());
|
||||
m_tail = node;
|
||||
node->set_prev(after_node);
|
||||
node->set_next(0);
|
||||
after_node->set_next(node);
|
||||
} else {
|
||||
ASSERT(after_node->next());
|
||||
VERIFY(after_node->next());
|
||||
node->set_prev(after_node);
|
||||
node->set_next(after_node->next());
|
||||
after_node->next()->set_prev(node);
|
||||
|
@ -260,18 +260,18 @@ template<typename T>
|
|||
inline void InlineLinkedList<T>::remove(T* node)
|
||||
{
|
||||
if (node->prev()) {
|
||||
ASSERT(node != m_head);
|
||||
VERIFY(node != m_head);
|
||||
node->prev()->set_next(node->next());
|
||||
} else {
|
||||
ASSERT(node == m_head);
|
||||
VERIFY(node == m_head);
|
||||
m_head = node->next();
|
||||
}
|
||||
|
||||
if (node->next()) {
|
||||
ASSERT(node != m_tail);
|
||||
VERIFY(node != m_tail);
|
||||
node->next()->set_prev(node->prev());
|
||||
} else {
|
||||
ASSERT(node == m_tail);
|
||||
VERIFY(node == m_tail);
|
||||
m_tail = node->prev();
|
||||
}
|
||||
|
||||
|
@ -310,15 +310,15 @@ inline void InlineLinkedList<T>::append(InlineLinkedList<T>& other)
|
|||
return;
|
||||
}
|
||||
|
||||
ASSERT(tail());
|
||||
ASSERT(other.head());
|
||||
VERIFY(tail());
|
||||
VERIFY(other.head());
|
||||
T* other_head = other.head();
|
||||
T* other_tail = other.tail();
|
||||
other.clear();
|
||||
|
||||
ASSERT(!m_tail->next());
|
||||
VERIFY(!m_tail->next());
|
||||
m_tail->set_next(other_head);
|
||||
ASSERT(!other_head->prev());
|
||||
VERIFY(!other_head->prev());
|
||||
other_head->set_prev(m_tail);
|
||||
m_tail = other_tail;
|
||||
}
|
||||
|
|
|
@ -284,7 +284,7 @@ inline IntrusiveListNode::~IntrusiveListNode()
|
|||
|
||||
inline void IntrusiveListNode::remove()
|
||||
{
|
||||
ASSERT(m_storage);
|
||||
VERIFY(m_storage);
|
||||
if (m_storage->m_first == this)
|
||||
m_storage->m_first = m_next;
|
||||
if (m_storage->m_last == this)
|
||||
|
|
|
@ -92,7 +92,7 @@ public:
|
|||
|
||||
void finish()
|
||||
{
|
||||
ASSERT(!m_finished);
|
||||
VERIFY(!m_finished);
|
||||
m_finished = true;
|
||||
m_builder.append(']');
|
||||
}
|
||||
|
|
|
@ -191,7 +191,7 @@ inline void JsonValue::serialize(Builder& builder) const
|
|||
builder.append("null");
|
||||
break;
|
||||
default:
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -141,7 +141,7 @@ public:
|
|||
|
||||
void finish()
|
||||
{
|
||||
ASSERT(!m_finished);
|
||||
VERIFY(!m_finished);
|
||||
m_finished = true;
|
||||
m_builder.append('}');
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ JsonValue JsonPath::resolve(const JsonValue& top_root) const
|
|||
root = JsonValue { root.as_array().at(element.index()) };
|
||||
break;
|
||||
default:
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
}
|
||||
return root;
|
||||
|
|
|
@ -56,13 +56,13 @@ public:
|
|||
Kind kind() const { return m_kind; }
|
||||
const String& key() const
|
||||
{
|
||||
ASSERT(m_kind == Kind::Key);
|
||||
VERIFY(m_kind == Kind::Key);
|
||||
return m_key;
|
||||
}
|
||||
|
||||
size_t index() const
|
||||
{
|
||||
ASSERT(m_kind == Kind::Index);
|
||||
VERIFY(m_kind == Kind::Index);
|
||||
return m_index;
|
||||
}
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ void JsonValue::copy_from(const JsonValue& other)
|
|||
m_type = other.m_type;
|
||||
switch (m_type) {
|
||||
case Type::String:
|
||||
ASSERT(!m_value.as_string);
|
||||
VERIFY(!m_value.as_string);
|
||||
m_value.as_string = other.m_value.as_string;
|
||||
m_value.as_string->ref();
|
||||
break;
|
||||
|
|
|
@ -127,56 +127,56 @@ public:
|
|||
|
||||
i32 as_i32() const
|
||||
{
|
||||
ASSERT(is_i32());
|
||||
VERIFY(is_i32());
|
||||
return m_value.as_i32;
|
||||
}
|
||||
|
||||
u32 as_u32() const
|
||||
{
|
||||
ASSERT(is_u32());
|
||||
VERIFY(is_u32());
|
||||
return m_value.as_u32;
|
||||
}
|
||||
|
||||
i64 as_i64() const
|
||||
{
|
||||
ASSERT(is_i64());
|
||||
VERIFY(is_i64());
|
||||
return m_value.as_i64;
|
||||
}
|
||||
|
||||
u64 as_u64() const
|
||||
{
|
||||
ASSERT(is_u64());
|
||||
VERIFY(is_u64());
|
||||
return m_value.as_u64;
|
||||
}
|
||||
|
||||
int as_bool() const
|
||||
{
|
||||
ASSERT(is_bool());
|
||||
VERIFY(is_bool());
|
||||
return m_value.as_bool;
|
||||
}
|
||||
|
||||
String as_string() const
|
||||
{
|
||||
ASSERT(is_string());
|
||||
VERIFY(is_string());
|
||||
return *m_value.as_string;
|
||||
}
|
||||
|
||||
const JsonObject& as_object() const
|
||||
{
|
||||
ASSERT(is_object());
|
||||
VERIFY(is_object());
|
||||
return *m_value.as_object;
|
||||
}
|
||||
|
||||
const JsonArray& as_array() const
|
||||
{
|
||||
ASSERT(is_array());
|
||||
VERIFY(is_array());
|
||||
return *m_value.as_array;
|
||||
}
|
||||
|
||||
#if !defined(KERNEL)
|
||||
double as_double() const
|
||||
{
|
||||
ASSERT(is_double());
|
||||
VERIFY(is_double());
|
||||
return m_value.as_double;
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -52,13 +52,13 @@ public:
|
|||
|
||||
constexpr const u8& operator[](unsigned i) const
|
||||
{
|
||||
ASSERT(i < s_mac_address_length);
|
||||
VERIFY(i < s_mac_address_length);
|
||||
return m_data[i];
|
||||
}
|
||||
|
||||
constexpr u8& operator[](unsigned i)
|
||||
{
|
||||
ASSERT(i < s_mac_address_length);
|
||||
VERIFY(i < s_mac_address_length);
|
||||
return m_data[i];
|
||||
}
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ MappedFile::MappedFile(void* ptr, size_t size)
|
|||
MappedFile::~MappedFile()
|
||||
{
|
||||
auto rc = munmap(m_data, m_size);
|
||||
ASSERT(rc == 0);
|
||||
VERIFY(rc == 0);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ namespace AK {
|
|||
namespace {
|
||||
const static void* bitap_bitwise(const void* haystack, size_t haystack_length, const void* needle, size_t needle_length)
|
||||
{
|
||||
ASSERT(needle_length < 32);
|
||||
VERIFY(needle_length < 32);
|
||||
|
||||
u64 lookup = 0xfffffffe;
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ public:
|
|||
|
||||
void seek(size_t offset)
|
||||
{
|
||||
ASSERT(offset < m_bytes.size());
|
||||
VERIFY(offset < m_bytes.size());
|
||||
m_offset = offset;
|
||||
}
|
||||
|
||||
|
@ -309,7 +309,7 @@ public:
|
|||
auto buffer = ByteBuffer::create_uninitialized(size());
|
||||
|
||||
const auto nread = read_without_consuming(buffer);
|
||||
ASSERT(nread == buffer.size());
|
||||
VERIFY(nread == buffer.size());
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
|
|
@ -59,13 +59,13 @@ public:
|
|||
NonnullOwnPtr(NonnullOwnPtr&& other)
|
||||
: m_ptr(other.leak_ptr())
|
||||
{
|
||||
ASSERT(m_ptr);
|
||||
VERIFY(m_ptr);
|
||||
}
|
||||
template<typename U>
|
||||
NonnullOwnPtr(NonnullOwnPtr<U>&& other)
|
||||
: m_ptr(other.leak_ptr())
|
||||
{
|
||||
ASSERT(m_ptr);
|
||||
VERIFY(m_ptr);
|
||||
}
|
||||
~NonnullOwnPtr()
|
||||
{
|
||||
|
@ -147,7 +147,7 @@ public:
|
|||
template<typename U>
|
||||
NonnullOwnPtr<U> release_nonnull()
|
||||
{
|
||||
ASSERT(m_ptr);
|
||||
VERIFY(m_ptr);
|
||||
return NonnullOwnPtr<U>(NonnullOwnPtr<U>::Adopt, static_cast<U&>(*leak_ptr()));
|
||||
}
|
||||
|
||||
|
|
|
@ -72,42 +72,42 @@ public:
|
|||
ALWAYS_INLINE NonnullRefPtr(const T& object)
|
||||
: m_bits((FlatPtr)&object)
|
||||
{
|
||||
ASSERT(!(m_bits & 1));
|
||||
VERIFY(!(m_bits & 1));
|
||||
const_cast<T&>(object).ref();
|
||||
}
|
||||
template<typename U>
|
||||
ALWAYS_INLINE NonnullRefPtr(const U& object)
|
||||
: m_bits((FlatPtr) static_cast<const T*>(&object))
|
||||
{
|
||||
ASSERT(!(m_bits & 1));
|
||||
VERIFY(!(m_bits & 1));
|
||||
const_cast<T&>(static_cast<const T&>(object)).ref();
|
||||
}
|
||||
ALWAYS_INLINE NonnullRefPtr(AdoptTag, T& object)
|
||||
: m_bits((FlatPtr)&object)
|
||||
{
|
||||
ASSERT(!(m_bits & 1));
|
||||
VERIFY(!(m_bits & 1));
|
||||
}
|
||||
ALWAYS_INLINE NonnullRefPtr(NonnullRefPtr&& other)
|
||||
: m_bits((FlatPtr)&other.leak_ref())
|
||||
{
|
||||
ASSERT(!(m_bits & 1));
|
||||
VERIFY(!(m_bits & 1));
|
||||
}
|
||||
template<typename U>
|
||||
ALWAYS_INLINE NonnullRefPtr(NonnullRefPtr<U>&& other)
|
||||
: m_bits((FlatPtr)&other.leak_ref())
|
||||
{
|
||||
ASSERT(!(m_bits & 1));
|
||||
VERIFY(!(m_bits & 1));
|
||||
}
|
||||
ALWAYS_INLINE NonnullRefPtr(const NonnullRefPtr& other)
|
||||
: m_bits((FlatPtr)other.add_ref())
|
||||
{
|
||||
ASSERT(!(m_bits & 1));
|
||||
VERIFY(!(m_bits & 1));
|
||||
}
|
||||
template<typename U>
|
||||
ALWAYS_INLINE NonnullRefPtr(const NonnullRefPtr<U>& other)
|
||||
: m_bits((FlatPtr)other.add_ref())
|
||||
{
|
||||
ASSERT(!(m_bits & 1));
|
||||
VERIFY(!(m_bits & 1));
|
||||
}
|
||||
ALWAYS_INLINE ~NonnullRefPtr()
|
||||
{
|
||||
|
@ -170,7 +170,7 @@ public:
|
|||
[[nodiscard]] ALWAYS_INLINE T& leak_ref()
|
||||
{
|
||||
T* ptr = exchange(nullptr);
|
||||
ASSERT(ptr);
|
||||
VERIFY(ptr);
|
||||
return *ptr;
|
||||
}
|
||||
|
||||
|
@ -253,7 +253,7 @@ private:
|
|||
ALWAYS_INLINE T* as_nonnull_ptr() const
|
||||
{
|
||||
T* ptr = (T*)(m_bits.load(AK::MemoryOrder::memory_order_relaxed) & ~(FlatPtr)1);
|
||||
ASSERT(ptr);
|
||||
VERIFY(ptr);
|
||||
return ptr;
|
||||
}
|
||||
|
||||
|
@ -273,7 +273,7 @@ private:
|
|||
Kernel::Processor::wait_check();
|
||||
#endif
|
||||
}
|
||||
ASSERT(!(bits & 1));
|
||||
VERIFY(!(bits & 1));
|
||||
f((T*)bits);
|
||||
m_bits.store(bits, AK::MemoryOrder::memory_order_release);
|
||||
}
|
||||
|
@ -286,7 +286,7 @@ private:
|
|||
|
||||
ALWAYS_INLINE T* exchange(T* new_ptr)
|
||||
{
|
||||
ASSERT(!((FlatPtr)new_ptr & 1));
|
||||
VERIFY(!((FlatPtr)new_ptr & 1));
|
||||
#ifdef KERNEL
|
||||
// We don't want to be pre-empted while we have the lock bit set
|
||||
Kernel::ScopedCritical critical;
|
||||
|
@ -301,7 +301,7 @@ private:
|
|||
Kernel::Processor::wait_check();
|
||||
#endif
|
||||
}
|
||||
ASSERT(!(expected & 1));
|
||||
VERIFY(!(expected & 1));
|
||||
return (T*)expected;
|
||||
}
|
||||
|
||||
|
|
|
@ -128,19 +128,19 @@ public:
|
|||
|
||||
[[nodiscard]] ALWAYS_INLINE T& value()
|
||||
{
|
||||
ASSERT(m_has_value);
|
||||
VERIFY(m_has_value);
|
||||
return *reinterpret_cast<T*>(&m_storage);
|
||||
}
|
||||
|
||||
[[nodiscard]] ALWAYS_INLINE const T& value() const
|
||||
{
|
||||
ASSERT(m_has_value);
|
||||
VERIFY(m_has_value);
|
||||
return *reinterpret_cast<const T*>(&m_storage);
|
||||
}
|
||||
|
||||
[[nodiscard]] T release_value()
|
||||
{
|
||||
ASSERT(m_has_value);
|
||||
VERIFY(m_has_value);
|
||||
T released_value = move(value());
|
||||
value().~T();
|
||||
m_has_value = false;
|
||||
|
|
14
AK/OwnPtr.h
14
AK/OwnPtr.h
|
@ -112,7 +112,7 @@ public:
|
|||
{
|
||||
OwnPtr ptr(move(other));
|
||||
swap(ptr);
|
||||
ASSERT(m_ptr);
|
||||
VERIFY(m_ptr);
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
@ -147,14 +147,14 @@ public:
|
|||
|
||||
NonnullOwnPtr<T> release_nonnull()
|
||||
{
|
||||
ASSERT(m_ptr);
|
||||
VERIFY(m_ptr);
|
||||
return NonnullOwnPtr<T>(NonnullOwnPtr<T>::Adopt, *leak_ptr());
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
NonnullOwnPtr<U> release_nonnull()
|
||||
{
|
||||
ASSERT(m_ptr);
|
||||
VERIFY(m_ptr);
|
||||
return NonnullOwnPtr<U>(NonnullOwnPtr<U>::Adopt, static_cast<U&>(*leak_ptr()));
|
||||
}
|
||||
|
||||
|
@ -163,25 +163,25 @@ public:
|
|||
|
||||
T* operator->()
|
||||
{
|
||||
ASSERT(m_ptr);
|
||||
VERIFY(m_ptr);
|
||||
return m_ptr;
|
||||
}
|
||||
|
||||
const T* operator->() const
|
||||
{
|
||||
ASSERT(m_ptr);
|
||||
VERIFY(m_ptr);
|
||||
return m_ptr;
|
||||
}
|
||||
|
||||
T& operator*()
|
||||
{
|
||||
ASSERT(m_ptr);
|
||||
VERIFY(m_ptr);
|
||||
return *m_ptr;
|
||||
}
|
||||
|
||||
const T& operator*() const
|
||||
{
|
||||
ASSERT(m_ptr);
|
||||
VERIFY(m_ptr);
|
||||
return *m_ptr;
|
||||
}
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ public:
|
|||
|
||||
T dequeue()
|
||||
{
|
||||
ASSERT(!is_empty());
|
||||
VERIFY(!is_empty());
|
||||
auto value = move((*m_segments.first())[m_index_into_first++]);
|
||||
if (m_index_into_first == segment_size) {
|
||||
m_segments.take_first();
|
||||
|
@ -64,7 +64,7 @@ public:
|
|||
|
||||
const T& head() const
|
||||
{
|
||||
ASSERT(!is_empty());
|
||||
VERIFY(!is_empty());
|
||||
return (*m_segments.first())[m_index_into_first];
|
||||
}
|
||||
|
||||
|
|
|
@ -70,8 +70,8 @@ public:
|
|||
ALWAYS_INLINE void ref() const
|
||||
{
|
||||
auto old_ref_count = m_ref_count.fetch_add(1, AK::MemoryOrder::memory_order_relaxed);
|
||||
ASSERT(old_ref_count > 0);
|
||||
ASSERT(!Checked<RefCountType>::addition_would_overflow(old_ref_count, 1));
|
||||
VERIFY(old_ref_count > 0);
|
||||
VERIFY(!Checked<RefCountType>::addition_would_overflow(old_ref_count, 1));
|
||||
}
|
||||
|
||||
[[nodiscard]] ALWAYS_INLINE bool try_ref() const
|
||||
|
@ -80,7 +80,7 @@ public:
|
|||
for (;;) {
|
||||
if (expected == 0)
|
||||
return false;
|
||||
ASSERT(!Checked<RefCountType>::addition_would_overflow(expected, 1));
|
||||
VERIFY(!Checked<RefCountType>::addition_would_overflow(expected, 1));
|
||||
if (m_ref_count.compare_exchange_strong(expected, expected + 1, AK::MemoryOrder::memory_order_acquire))
|
||||
return true;
|
||||
}
|
||||
|
@ -95,13 +95,13 @@ protected:
|
|||
RefCountedBase() = default;
|
||||
ALWAYS_INLINE ~RefCountedBase()
|
||||
{
|
||||
ASSERT(m_ref_count.load(AK::MemoryOrder::memory_order_relaxed) == 0);
|
||||
VERIFY(m_ref_count.load(AK::MemoryOrder::memory_order_relaxed) == 0);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE RefCountType deref_base() const
|
||||
{
|
||||
auto old_ref_count = m_ref_count.fetch_sub(1, AK::MemoryOrder::memory_order_acq_rel);
|
||||
ASSERT(old_ref_count > 0);
|
||||
VERIFY(old_ref_count > 0);
|
||||
return old_ref_count - 1;
|
||||
}
|
||||
|
||||
|
|
26
AK/RefPtr.h
26
AK/RefPtr.h
|
@ -50,7 +50,7 @@ struct RefPtrTraits {
|
|||
|
||||
ALWAYS_INLINE static FlatPtr as_bits(T* ptr)
|
||||
{
|
||||
ASSERT(!((FlatPtr)ptr & 1));
|
||||
VERIFY(!((FlatPtr)ptr & 1));
|
||||
return (FlatPtr)ptr;
|
||||
}
|
||||
|
||||
|
@ -70,7 +70,7 @@ struct RefPtrTraits {
|
|||
ALWAYS_INLINE static FlatPtr exchange(Atomic<FlatPtr>& atomic_var, FlatPtr new_value)
|
||||
{
|
||||
// Only exchange when lock is not held
|
||||
ASSERT(!(new_value & 1));
|
||||
VERIFY(!(new_value & 1));
|
||||
FlatPtr expected = atomic_var.load(AK::MemoryOrder::memory_order_relaxed);
|
||||
for (;;) {
|
||||
expected &= ~(FlatPtr)1; // only if lock bit is not set
|
||||
|
@ -86,7 +86,7 @@ struct RefPtrTraits {
|
|||
ALWAYS_INLINE static bool exchange_if_null(Atomic<FlatPtr>& atomic_var, FlatPtr new_value)
|
||||
{
|
||||
// Only exchange when lock is not held
|
||||
ASSERT(!(new_value & 1));
|
||||
VERIFY(!(new_value & 1));
|
||||
for (;;) {
|
||||
FlatPtr expected = default_null_value; // only if lock bit is not set
|
||||
if (atomic_var.compare_exchange_strong(expected, new_value, AK::MemoryOrder::memory_order_acq_rel))
|
||||
|
@ -116,13 +116,13 @@ struct RefPtrTraits {
|
|||
Kernel::Processor::wait_check();
|
||||
#endif
|
||||
}
|
||||
ASSERT(!(bits & 1));
|
||||
VERIFY(!(bits & 1));
|
||||
return bits;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE static void unlock(Atomic<FlatPtr>& atomic_var, FlatPtr new_value)
|
||||
{
|
||||
ASSERT(!(new_value & 1));
|
||||
VERIFY(!(new_value & 1));
|
||||
atomic_var.store(new_value, AK::MemoryOrder::memory_order_release);
|
||||
}
|
||||
|
||||
|
@ -153,14 +153,14 @@ public:
|
|||
: m_bits(PtrTraits::as_bits(const_cast<T*>(&object)))
|
||||
{
|
||||
T* ptr = const_cast<T*>(&object);
|
||||
ASSERT(ptr);
|
||||
ASSERT(!is_null());
|
||||
VERIFY(ptr);
|
||||
VERIFY(!is_null());
|
||||
ptr->ref();
|
||||
}
|
||||
RefPtr(AdoptTag, T& object)
|
||||
: m_bits(PtrTraits::as_bits(&object))
|
||||
{
|
||||
ASSERT(!is_null());
|
||||
VERIFY(!is_null());
|
||||
}
|
||||
RefPtr(RefPtr&& other)
|
||||
: m_bits(other.leak_ref_raw())
|
||||
|
@ -179,7 +179,7 @@ public:
|
|||
ALWAYS_INLINE RefPtr(NonnullRefPtr<U>&& other)
|
||||
: m_bits(PtrTraits::as_bits(&other.leak_ref()))
|
||||
{
|
||||
ASSERT(!is_null());
|
||||
VERIFY(!is_null());
|
||||
}
|
||||
template<typename U, typename P = RefPtrTraits<U>>
|
||||
RefPtr(RefPtr<U, P>&& other)
|
||||
|
@ -330,7 +330,7 @@ public:
|
|||
NonnullRefPtr<T> release_nonnull()
|
||||
{
|
||||
FlatPtr bits = PtrTraits::exchange(m_bits, PtrTraits::default_null_value);
|
||||
ASSERT(!PtrTraits::is_null(bits));
|
||||
VERIFY(!PtrTraits::is_null(bits));
|
||||
return NonnullRefPtr<T>(NonnullRefPtr<T>::Adopt, *PtrTraits::as_ptr(bits));
|
||||
}
|
||||
|
||||
|
@ -384,7 +384,7 @@ public:
|
|||
{
|
||||
// make sure we are holding a null value
|
||||
FlatPtr bits = m_bits.load(AK::MemoryOrder::memory_order_relaxed);
|
||||
ASSERT(PtrTraits::is_null(bits));
|
||||
VERIFY(PtrTraits::is_null(bits));
|
||||
return PtrTraits::to_null_value(bits);
|
||||
}
|
||||
template<typename U = T, typename EnableIf<IsSame<U, T>::value && !IsNullPointer<typename PtrTraits::NullType>::value>::Type* = nullptr>
|
||||
|
@ -392,7 +392,7 @@ public:
|
|||
{
|
||||
// make sure that new null value would be interpreted as a null value
|
||||
FlatPtr bits = PtrTraits::from_null_value(value);
|
||||
ASSERT(PtrTraits::is_null(bits));
|
||||
VERIFY(PtrTraits::is_null(bits));
|
||||
assign_raw(bits);
|
||||
}
|
||||
|
||||
|
@ -454,7 +454,7 @@ private:
|
|||
|
||||
ALWAYS_INLINE T* as_nonnull_ptr(FlatPtr bits) const
|
||||
{
|
||||
ASSERT(!PtrTraits::is_null(bits));
|
||||
VERIFY(!PtrTraits::is_null(bits));
|
||||
return PtrTraits::as_ptr(bits);
|
||||
}
|
||||
|
||||
|
|
|
@ -83,9 +83,9 @@ public:
|
|||
}
|
||||
if constexpr (allow_create) {
|
||||
// We should always return an instance if we allow creating one
|
||||
ASSERT(obj != nullptr);
|
||||
VERIFY(obj != nullptr);
|
||||
}
|
||||
ASSERT(obj != (T*)0x1);
|
||||
VERIFY(obj != (T*)0x1);
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
|
|
@ -104,28 +104,28 @@ public:
|
|||
|
||||
T& first()
|
||||
{
|
||||
ASSERT(head());
|
||||
VERIFY(head());
|
||||
return head()->value;
|
||||
}
|
||||
const T& first() const
|
||||
{
|
||||
ASSERT(head());
|
||||
VERIFY(head());
|
||||
return head()->value;
|
||||
}
|
||||
T& last()
|
||||
{
|
||||
ASSERT(head());
|
||||
VERIFY(head());
|
||||
return tail()->value;
|
||||
}
|
||||
const T& last() const
|
||||
{
|
||||
ASSERT(head());
|
||||
VERIFY(head());
|
||||
return tail()->value;
|
||||
}
|
||||
|
||||
T take_first()
|
||||
{
|
||||
ASSERT(m_head);
|
||||
VERIFY(m_head);
|
||||
auto* prev_head = m_head;
|
||||
T value = move(first());
|
||||
if (m_tail == m_head)
|
||||
|
@ -187,7 +187,7 @@ public:
|
|||
|
||||
void remove(Iterator iterator)
|
||||
{
|
||||
ASSERT(!iterator.is_end());
|
||||
VERIFY(!iterator.is_end());
|
||||
if (m_head == iterator.m_node)
|
||||
m_head = iterator.m_node->next;
|
||||
if (m_tail == iterator.m_node)
|
||||
|
|
|
@ -78,11 +78,11 @@ public:
|
|||
const auto placeholder = consume_until_without_consuming_stop_character(m_closing);
|
||||
|
||||
if (!lexer.consume_specific(m_closing))
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
|
||||
m_builder.append(get(placeholder));
|
||||
} else {
|
||||
ASSERT(lexer.is_eof());
|
||||
VERIFY(lexer.is_eof());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
14
AK/Span.h
14
AK/Span.h
|
@ -140,12 +140,12 @@ public:
|
|||
|
||||
[[nodiscard]] ALWAYS_INLINE constexpr Span slice(size_t start, size_t length) const
|
||||
{
|
||||
ASSERT(start + length <= size());
|
||||
VERIFY(start + length <= size());
|
||||
return { this->m_values + start, length };
|
||||
}
|
||||
[[nodiscard]] ALWAYS_INLINE constexpr Span slice(size_t start) const
|
||||
{
|
||||
ASSERT(start <= size());
|
||||
VERIFY(start <= size());
|
||||
return { this->m_values + start, size() - start };
|
||||
}
|
||||
|
||||
|
@ -156,20 +156,20 @@ public:
|
|||
|
||||
ALWAYS_INLINE constexpr T* offset(size_t start) const
|
||||
{
|
||||
ASSERT(start < this->m_size);
|
||||
VERIFY(start < this->m_size);
|
||||
return this->m_values + start;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE constexpr void overwrite(size_t offset, const void* data, size_t data_size)
|
||||
{
|
||||
// make sure we're not told to write past the end
|
||||
ASSERT(offset + data_size <= size());
|
||||
VERIFY(offset + data_size <= size());
|
||||
__builtin_memcpy(this->data() + offset, data, data_size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE constexpr size_t copy_to(Span<typename RemoveConst<T>::Type> other) const
|
||||
{
|
||||
ASSERT(other.size() >= size());
|
||||
VERIFY(other.size() >= size());
|
||||
return TypedTransfer<typename RemoveConst<T>::Type>::copy(other.data(), data(), size());
|
||||
}
|
||||
|
||||
|
@ -198,12 +198,12 @@ public:
|
|||
|
||||
ALWAYS_INLINE constexpr const T& at(size_t index) const
|
||||
{
|
||||
ASSERT(index < this->m_size);
|
||||
VERIFY(index < this->m_size);
|
||||
return this->m_values[index];
|
||||
}
|
||||
ALWAYS_INLINE constexpr T& at(size_t index)
|
||||
{
|
||||
ASSERT(index < this->m_size);
|
||||
VERIFY(index < this->m_size);
|
||||
return this->m_values[index];
|
||||
}
|
||||
|
||||
|
|
|
@ -42,17 +42,17 @@ StackInfo::StackInfo()
|
|||
#ifdef __serenity__
|
||||
if (get_stack_bounds(&m_base, &m_size) < 0) {
|
||||
perror("get_stack_bounds");
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
#elif __linux__
|
||||
pthread_attr_t attr = {};
|
||||
if (int rc = pthread_getattr_np(pthread_self(), &attr) != 0) {
|
||||
fprintf(stderr, "pthread_getattr_np: %s\n", strerror(-rc));
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
if (int rc = pthread_attr_getstack(&attr, (void**)&m_base, &m_size) != 0) {
|
||||
fprintf(stderr, "pthread_attr_getstack: %s\n", strerror(-rc));
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
pthread_attr_destroy(&attr);
|
||||
#elif __APPLE__
|
||||
|
@ -73,7 +73,7 @@ StackInfo::StackInfo()
|
|||
}
|
||||
m_base = top_of_stack - m_size;
|
||||
#else
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
#endif
|
||||
|
||||
m_top = m_base + m_size;
|
||||
|
|
|
@ -57,7 +57,7 @@ constexpr T max(const T& a, const T& b)
|
|||
template<typename T>
|
||||
constexpr T clamp(const T& value, const T& min, const T& max)
|
||||
{
|
||||
ASSERT(max >= min);
|
||||
VERIFY(max >= min);
|
||||
if (value > max)
|
||||
return max;
|
||||
if (value < min)
|
||||
|
|
|
@ -37,7 +37,7 @@ namespace AK::Detail {
|
|||
|
||||
class Stream {
|
||||
public:
|
||||
virtual ~Stream() { ASSERT(!has_any_error()); }
|
||||
virtual ~Stream() { VERIFY(!has_any_error()); }
|
||||
|
||||
virtual bool has_recoverable_error() const { return m_recoverable_error; }
|
||||
virtual bool has_fatal_error() const { return m_fatal_error; }
|
||||
|
@ -45,7 +45,7 @@ public:
|
|||
|
||||
virtual bool handle_recoverable_error()
|
||||
{
|
||||
ASSERT(!has_fatal_error());
|
||||
VERIFY(!has_fatal_error());
|
||||
return exchange(m_recoverable_error, false);
|
||||
}
|
||||
virtual bool handle_fatal_error() { return exchange(m_fatal_error, false); }
|
||||
|
|
|
@ -104,7 +104,7 @@ String String::empty()
|
|||
bool String::copy_characters_to_buffer(char* buffer, size_t buffer_size) const
|
||||
{
|
||||
// We must fit at least the NUL-terminator.
|
||||
ASSERT(buffer_size > 0);
|
||||
VERIFY(buffer_size > 0);
|
||||
|
||||
size_t characters_to_copy = min(length(), buffer_size - 1);
|
||||
__builtin_memcpy(buffer, characters(), characters_to_copy);
|
||||
|
@ -127,8 +127,8 @@ String String::isolated_copy() const
|
|||
|
||||
String String::substring(size_t start) const
|
||||
{
|
||||
ASSERT(m_impl);
|
||||
ASSERT(start <= length());
|
||||
VERIFY(m_impl);
|
||||
VERIFY(start <= length());
|
||||
return { characters() + start, length() - start };
|
||||
}
|
||||
|
||||
|
@ -136,24 +136,24 @@ String String::substring(size_t start, size_t length) const
|
|||
{
|
||||
if (!length)
|
||||
return "";
|
||||
ASSERT(m_impl);
|
||||
ASSERT(start + length <= m_impl->length());
|
||||
VERIFY(m_impl);
|
||||
VERIFY(start + length <= m_impl->length());
|
||||
// FIXME: This needs some input bounds checking.
|
||||
return { characters() + start, length };
|
||||
}
|
||||
|
||||
StringView String::substring_view(size_t start, size_t length) const
|
||||
{
|
||||
ASSERT(m_impl);
|
||||
ASSERT(start + length <= m_impl->length());
|
||||
VERIFY(m_impl);
|
||||
VERIFY(start + length <= m_impl->length());
|
||||
// FIXME: This needs some input bounds checking.
|
||||
return { characters() + start, length };
|
||||
}
|
||||
|
||||
StringView String::substring_view(size_t start) const
|
||||
{
|
||||
ASSERT(m_impl);
|
||||
ASSERT(start <= length());
|
||||
VERIFY(m_impl);
|
||||
VERIFY(start <= length());
|
||||
return { characters() + start, length() - start };
|
||||
}
|
||||
|
||||
|
|
|
@ -40,12 +40,12 @@ inline void StringBuilder::will_append(size_t size)
|
|||
{
|
||||
Checked<size_t> needed_capacity = m_length;
|
||||
needed_capacity += size;
|
||||
ASSERT(!needed_capacity.has_overflow());
|
||||
VERIFY(!needed_capacity.has_overflow());
|
||||
if (needed_capacity < inline_capacity)
|
||||
return;
|
||||
Checked<size_t> expanded_capacity = needed_capacity;
|
||||
expanded_capacity *= 2;
|
||||
ASSERT(!expanded_capacity.has_overflow());
|
||||
VERIFY(!expanded_capacity.has_overflow());
|
||||
if (m_buffer.is_null()) {
|
||||
m_buffer.grow(expanded_capacity.value());
|
||||
memcpy(m_buffer.data(), m_inline_buffer, m_length);
|
||||
|
|
|
@ -88,9 +88,9 @@ static inline size_t allocation_size_for_stringimpl(size_t length)
|
|||
|
||||
NonnullRefPtr<StringImpl> StringImpl::create_uninitialized(size_t length, char*& buffer)
|
||||
{
|
||||
ASSERT(length);
|
||||
VERIFY(length);
|
||||
void* slot = kmalloc(allocation_size_for_stringimpl(length));
|
||||
ASSERT(slot);
|
||||
VERIFY(slot);
|
||||
auto new_stringimpl = adopt(*new (slot) StringImpl(ConstructWithInlineBuffer, length));
|
||||
buffer = const_cast<char*>(new_stringimpl->characters());
|
||||
buffer[length] = '\0';
|
||||
|
|
|
@ -66,7 +66,7 @@ public:
|
|||
|
||||
const char& operator[](size_t i) const
|
||||
{
|
||||
ASSERT(i < m_length);
|
||||
VERIFY(i < m_length);
|
||||
return characters()[i];
|
||||
}
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ Vector<StringView> StringView::split_view(const char separator, bool keep_empty)
|
|||
|
||||
Vector<StringView> StringView::split_view(const StringView& separator, bool keep_empty) const
|
||||
{
|
||||
ASSERT(!separator.is_empty());
|
||||
VERIFY(!separator.is_empty());
|
||||
|
||||
if (is_empty())
|
||||
return {};
|
||||
|
@ -197,20 +197,20 @@ bool StringView::equals_ignoring_case(const StringView& other) const
|
|||
|
||||
StringView StringView::substring_view(size_t start, size_t length) const
|
||||
{
|
||||
ASSERT(start + length <= m_length);
|
||||
VERIFY(start + length <= m_length);
|
||||
return { m_characters + start, length };
|
||||
}
|
||||
StringView StringView::substring_view(size_t start) const
|
||||
{
|
||||
ASSERT(start <= m_length);
|
||||
VERIFY(start <= m_length);
|
||||
return { m_characters + start, length() - start };
|
||||
}
|
||||
|
||||
StringView StringView::substring_view_starting_from_substring(const StringView& substring) const
|
||||
{
|
||||
const char* remaining_characters = substring.characters_without_null_termination();
|
||||
ASSERT(remaining_characters >= m_characters);
|
||||
ASSERT(remaining_characters <= m_characters + m_length);
|
||||
VERIFY(remaining_characters >= m_characters);
|
||||
VERIFY(remaining_characters <= m_characters + m_length);
|
||||
size_t remaining_length = m_length - (remaining_characters - m_characters);
|
||||
return { remaining_characters, remaining_length };
|
||||
}
|
||||
|
@ -218,8 +218,8 @@ StringView StringView::substring_view_starting_from_substring(const StringView&
|
|||
StringView StringView::substring_view_starting_after_substring(const StringView& substring) const
|
||||
{
|
||||
const char* remaining_characters = substring.characters_without_null_termination() + substring.length();
|
||||
ASSERT(remaining_characters >= m_characters);
|
||||
ASSERT(remaining_characters <= m_characters + m_length);
|
||||
VERIFY(remaining_characters >= m_characters);
|
||||
VERIFY(remaining_characters <= m_characters + m_length);
|
||||
size_t remaining_length = m_length - (remaining_characters - m_characters);
|
||||
return { remaining_characters, remaining_length };
|
||||
}
|
||||
|
|
|
@ -42,13 +42,13 @@ public:
|
|||
: m_characters(characters)
|
||||
, m_length(length)
|
||||
{
|
||||
ASSERT(!Checked<uintptr_t>::addition_would_overflow((uintptr_t)characters, length));
|
||||
VERIFY(!Checked<uintptr_t>::addition_would_overflow((uintptr_t)characters, length));
|
||||
}
|
||||
ALWAYS_INLINE StringView(const unsigned char* characters, size_t length)
|
||||
: m_characters((const char*)characters)
|
||||
, m_length(length)
|
||||
{
|
||||
ASSERT(!Checked<uintptr_t>::addition_would_overflow((uintptr_t)characters, length));
|
||||
VERIFY(!Checked<uintptr_t>::addition_would_overflow((uintptr_t)characters, length));
|
||||
}
|
||||
ALWAYS_INLINE constexpr StringView(const char* cstring)
|
||||
: m_characters(cstring)
|
||||
|
|
|
@ -38,11 +38,11 @@ void warnln(CheckedFormatString<Parameters...>&& fmtstr, const Parameters&...);
|
|||
|
||||
using AK::warnln;
|
||||
|
||||
#undef ASSERT
|
||||
#define ASSERT(x) \
|
||||
#undef VERIFY
|
||||
#define VERIFY(x) \
|
||||
do { \
|
||||
if (!(x)) \
|
||||
::AK::warnln("\033[31;1mFAIL\033[0m: {}:{}: ASSERT({}) failed", __FILE__, __LINE__, #x); \
|
||||
::AK::warnln("\033[31;1mFAIL\033[0m: {}:{}: VERIFY({}) failed", __FILE__, __LINE__, #x); \
|
||||
} while (false)
|
||||
|
||||
#undef RELEASE_ASSERT
|
||||
|
@ -52,10 +52,10 @@ using AK::warnln;
|
|||
::AK::warnln("\033[31;1mFAIL\033[0m: {}:{}: RELEASE_ASSERT({}) failed", __FILE__, __LINE__, #x); \
|
||||
} while (false)
|
||||
|
||||
#undef ASSERT_NOT_REACHED
|
||||
#define ASSERT_NOT_REACHED() \
|
||||
#undef VERIFY_NOT_REACHED
|
||||
#define VERIFY_NOT_REACHED() \
|
||||
do { \
|
||||
::AK::warnln("\033[31;1mFAIL\033[0m: {}:{}: ASSERT_NOT_REACHED() called", __FILE__, __LINE__); \
|
||||
::AK::warnln("\033[31;1mFAIL\033[0m: {}:{}: VERIFY_NOT_REACHED() called", __FILE__, __LINE__); \
|
||||
::abort(); \
|
||||
} while (false)
|
||||
|
||||
|
|
|
@ -177,7 +177,7 @@ TEST_CASE(pointers)
|
|||
EXPECT_EQ(String::formatted("{:p}", ptr), "0x0000000000004000");
|
||||
EXPECT_EQ(String::formatted("{}", ptr), "0x0000000000004000");
|
||||
} else {
|
||||
ASSERT_NOT_REACHED();
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
TEST_CASE(load_form)
|
||||
{
|
||||
FILE* fp = fopen("test.frm", "r");
|
||||
ASSERT(fp);
|
||||
VERIFY(fp);
|
||||
|
||||
StringBuilder builder;
|
||||
for (;;) {
|
||||
|
@ -69,7 +69,7 @@ TEST_CASE(load_form)
|
|||
BENCHMARK_CASE(load_4chan_catalog)
|
||||
{
|
||||
FILE* fp = fopen("4chan_catalog.json", "r");
|
||||
ASSERT(fp);
|
||||
VERIFY(fp);
|
||||
|
||||
StringBuilder builder;
|
||||
for (;;) {
|
||||
|
|
|
@ -38,7 +38,7 @@ TEST_CASE(decode_ascii)
|
|||
|
||||
size_t i = 0;
|
||||
for (u32 code_point : utf8) {
|
||||
ASSERT(i < expected_size);
|
||||
VERIFY(i < expected_size);
|
||||
EXPECT_EQ(code_point, expected[i]);
|
||||
i++;
|
||||
}
|
||||
|
@ -57,7 +57,7 @@ TEST_CASE(decode_utf8)
|
|||
|
||||
size_t i = 0;
|
||||
for (u32 code_point : utf8) {
|
||||
ASSERT(i < expected_size);
|
||||
VERIFY(i < expected_size);
|
||||
EXPECT_EQ(code_point, expected[i]);
|
||||
i++;
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ namespace AK {
|
|||
|
||||
int day_of_year(int year, unsigned month, int day)
|
||||
{
|
||||
ASSERT(month >= 1 && month <= 12);
|
||||
VERIFY(month >= 1 && month <= 12);
|
||||
|
||||
static const int seek_table[] = { 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334 };
|
||||
int day_of_year = seek_table[month - 1] + day - 1;
|
||||
|
@ -44,7 +44,7 @@ int day_of_year(int year, unsigned month, int day)
|
|||
|
||||
int days_in_month(int year, unsigned month)
|
||||
{
|
||||
ASSERT(month >= 1 && month <= 12);
|
||||
VERIFY(month >= 1 && month <= 12);
|
||||
if (month == 2)
|
||||
return is_leap_year(year) ? 29 : 28;
|
||||
|
||||
|
@ -54,7 +54,7 @@ int days_in_month(int year, unsigned month)
|
|||
|
||||
unsigned day_of_week(int year, unsigned month, int day)
|
||||
{
|
||||
ASSERT(month >= 1 && month <= 12);
|
||||
VERIFY(month >= 1 && month <= 12);
|
||||
static const int seek_table[] = { 0, 3, 2, 5, 0, 3, 5, 1, 4, 6, 2, 4 };
|
||||
if (month < 3)
|
||||
--year;
|
||||
|
|
|
@ -51,7 +51,7 @@ template<typename OutputType, typename InputType>
|
|||
ALWAYS_INLINE CopyConst<InputType, OutputType>* downcast(InputType* input)
|
||||
{
|
||||
static_assert(IsBaseOf<InputType, OutputType>::value);
|
||||
ASSERT(!input || is<OutputType>(*input));
|
||||
VERIFY(!input || is<OutputType>(*input));
|
||||
return static_cast<CopyConst<InputType, OutputType>*>(input);
|
||||
}
|
||||
|
||||
|
@ -59,7 +59,7 @@ template<typename OutputType, typename InputType>
|
|||
ALWAYS_INLINE CopyConst<InputType, OutputType>& downcast(InputType& input)
|
||||
{
|
||||
static_assert(IsBaseOf<InputType, OutputType>::value);
|
||||
ASSERT(is<OutputType>(input));
|
||||
VERIFY(is<OutputType>(input));
|
||||
return static_cast<CopyConst<InputType, OutputType>&>(input);
|
||||
}
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ UUID::UUID(Array<u8, 16> uuid_buffer)
|
|||
|
||||
void UUID::convert_string_view_to_uuid(const StringView& uuid_string_view)
|
||||
{
|
||||
ASSERT(uuid_string_view.length() == 36);
|
||||
VERIFY(uuid_string_view.length() == 36);
|
||||
auto first_unit = decode_hex(uuid_string_view.substring_view(0, 8));
|
||||
auto second_unit = decode_hex(uuid_string_view.substring_view(9, 4));
|
||||
auto third_unit = decode_hex(uuid_string_view.substring_view(14, 4));
|
||||
|
|
|
@ -51,7 +51,7 @@ public:
|
|||
}
|
||||
Utf32CodepointIterator& operator++()
|
||||
{
|
||||
ASSERT(m_length > 0);
|
||||
VERIFY(m_length > 0);
|
||||
m_ptr++;
|
||||
m_length--;
|
||||
return *this;
|
||||
|
@ -62,7 +62,7 @@ public:
|
|||
}
|
||||
u32 operator*() const
|
||||
{
|
||||
ASSERT(m_length > 0);
|
||||
VERIFY(m_length > 0);
|
||||
return *m_ptr;
|
||||
}
|
||||
|
||||
|
@ -88,7 +88,7 @@ public:
|
|||
: m_code_points(code_points)
|
||||
, m_length(length)
|
||||
{
|
||||
ASSERT(code_points || length == 0);
|
||||
VERIFY(code_points || length == 0);
|
||||
}
|
||||
|
||||
Utf32CodepointIterator begin() const
|
||||
|
@ -107,8 +107,8 @@ public:
|
|||
|
||||
size_t iterator_offset(const Utf32CodepointIterator& it) const
|
||||
{
|
||||
ASSERT(it.m_ptr >= m_code_points);
|
||||
ASSERT(it.m_ptr < m_code_points + m_length);
|
||||
VERIFY(it.m_ptr >= m_code_points);
|
||||
VERIFY(it.m_ptr < m_code_points + m_length);
|
||||
return ((ptrdiff_t)it.m_ptr - (ptrdiff_t)m_code_points) / sizeof(u32);
|
||||
}
|
||||
|
||||
|
@ -116,9 +116,9 @@ public:
|
|||
{
|
||||
if (length == 0)
|
||||
return {};
|
||||
ASSERT(offset < m_length);
|
||||
ASSERT(!Checked<size_t>::addition_would_overflow(offset, length));
|
||||
ASSERT((offset + length) <= m_length);
|
||||
VERIFY(offset < m_length);
|
||||
VERIFY(!Checked<size_t>::addition_would_overflow(offset, length));
|
||||
VERIFY((offset + length) <= m_length);
|
||||
return Utf32View(m_code_points + offset, length);
|
||||
}
|
||||
|
||||
|
|
|
@ -67,8 +67,8 @@ Utf8CodepointIterator Utf8View::end() const
|
|||
|
||||
size_t Utf8View::byte_offset_of(const Utf8CodepointIterator& it) const
|
||||
{
|
||||
ASSERT(it.m_ptr >= begin_ptr());
|
||||
ASSERT(it.m_ptr <= end_ptr());
|
||||
VERIFY(it.m_ptr >= begin_ptr());
|
||||
VERIFY(it.m_ptr <= end_ptr());
|
||||
|
||||
return it.m_ptr - begin_ptr();
|
||||
}
|
||||
|
@ -162,15 +162,15 @@ bool Utf8CodepointIterator::operator!=(const Utf8CodepointIterator& other) const
|
|||
|
||||
Utf8CodepointIterator& Utf8CodepointIterator::operator++()
|
||||
{
|
||||
ASSERT(m_length > 0);
|
||||
VERIFY(m_length > 0);
|
||||
|
||||
size_t code_point_length_in_bytes = 0;
|
||||
u32 value;
|
||||
bool first_byte_makes_sense = decode_first_byte(*m_ptr, code_point_length_in_bytes, value);
|
||||
|
||||
ASSERT(first_byte_makes_sense);
|
||||
VERIFY(first_byte_makes_sense);
|
||||
|
||||
ASSERT(code_point_length_in_bytes <= m_length);
|
||||
VERIFY(code_point_length_in_bytes <= m_length);
|
||||
m_ptr += code_point_length_in_bytes;
|
||||
m_length -= code_point_length_in_bytes;
|
||||
|
||||
|
@ -179,17 +179,17 @@ Utf8CodepointIterator& Utf8CodepointIterator::operator++()
|
|||
|
||||
size_t Utf8CodepointIterator::code_point_length_in_bytes() const
|
||||
{
|
||||
ASSERT(m_length > 0);
|
||||
VERIFY(m_length > 0);
|
||||
size_t code_point_length_in_bytes = 0;
|
||||
u32 value;
|
||||
bool first_byte_makes_sense = decode_first_byte(*m_ptr, code_point_length_in_bytes, value);
|
||||
ASSERT(first_byte_makes_sense);
|
||||
VERIFY(first_byte_makes_sense);
|
||||
return code_point_length_in_bytes;
|
||||
}
|
||||
|
||||
u32 Utf8CodepointIterator::operator*() const
|
||||
{
|
||||
ASSERT(m_length > 0);
|
||||
VERIFY(m_length > 0);
|
||||
|
||||
u32 code_point_value_so_far = 0;
|
||||
size_t code_point_length_in_bytes = 0;
|
||||
|
@ -197,13 +197,13 @@ u32 Utf8CodepointIterator::operator*() const
|
|||
bool first_byte_makes_sense = decode_first_byte(m_ptr[0], code_point_length_in_bytes, code_point_value_so_far);
|
||||
if (!first_byte_makes_sense)
|
||||
dbgln("First byte doesn't make sense, bytes: {}", StringView { (const char*)m_ptr, m_length });
|
||||
ASSERT(first_byte_makes_sense);
|
||||
VERIFY(first_byte_makes_sense);
|
||||
if (code_point_length_in_bytes > m_length)
|
||||
dbgln("Not enough bytes (need {}, have {}), first byte is: {:#02x}, '{}'", code_point_length_in_bytes, m_length, m_ptr[0], (const char*)m_ptr);
|
||||
ASSERT(code_point_length_in_bytes <= m_length);
|
||||
VERIFY(code_point_length_in_bytes <= m_length);
|
||||
|
||||
for (size_t offset = 1; offset < code_point_length_in_bytes; offset++) {
|
||||
ASSERT(m_ptr[offset] >> 6 == 2);
|
||||
VERIFY(m_ptr[offset] >> 6 == 2);
|
||||
code_point_value_so_far <<= 6;
|
||||
code_point_value_so_far |= m_ptr[offset] & 63;
|
||||
}
|
||||
|
|
22
AK/Vector.h
22
AK/Vector.h
|
@ -191,12 +191,12 @@ public:
|
|||
|
||||
ALWAYS_INLINE const T& at(size_t i) const
|
||||
{
|
||||
ASSERT(i < m_size);
|
||||
VERIFY(i < m_size);
|
||||
return data()[i];
|
||||
}
|
||||
ALWAYS_INLINE T& at(size_t i)
|
||||
{
|
||||
ASSERT(i < m_size);
|
||||
VERIFY(i < m_size);
|
||||
return data()[i];
|
||||
}
|
||||
|
||||
|
@ -211,7 +211,7 @@ public:
|
|||
|
||||
T take_last()
|
||||
{
|
||||
ASSERT(!is_empty());
|
||||
VERIFY(!is_empty());
|
||||
T value = move(last());
|
||||
last().~T();
|
||||
--m_size;
|
||||
|
@ -220,7 +220,7 @@ public:
|
|||
|
||||
T take_first()
|
||||
{
|
||||
ASSERT(!is_empty());
|
||||
VERIFY(!is_empty());
|
||||
T value = move(first());
|
||||
remove(0);
|
||||
return value;
|
||||
|
@ -235,14 +235,14 @@ public:
|
|||
|
||||
T unstable_take(size_t index)
|
||||
{
|
||||
ASSERT(index < m_size);
|
||||
VERIFY(index < m_size);
|
||||
swap(at(index), at(m_size - 1));
|
||||
return take_last();
|
||||
}
|
||||
|
||||
void remove(size_t index)
|
||||
{
|
||||
ASSERT(index < m_size);
|
||||
VERIFY(index < m_size);
|
||||
|
||||
if constexpr (Traits<T>::is_trivial()) {
|
||||
TypedTransfer<T>::copy(slot(index), slot(index + 1), m_size - index - 1);
|
||||
|
@ -261,8 +261,8 @@ public:
|
|||
{
|
||||
if (count == 0)
|
||||
return;
|
||||
ASSERT(index + count > index);
|
||||
ASSERT(index + count <= m_size);
|
||||
VERIFY(index + count > index);
|
||||
VERIFY(index + count <= m_size);
|
||||
|
||||
if constexpr (Traits<T>::is_trivial()) {
|
||||
TypedTransfer<T>::copy(slot(index), slot(index + count), m_size - index - count);
|
||||
|
@ -281,7 +281,7 @@ public:
|
|||
template<typename U = T>
|
||||
void insert(size_t index, U&& value)
|
||||
{
|
||||
ASSERT(index <= size());
|
||||
VERIFY(index <= size());
|
||||
if (index == size())
|
||||
return append(forward<U>(value));
|
||||
grow_capacity(size() + 1);
|
||||
|
@ -403,7 +403,7 @@ public:
|
|||
template<typename U = T>
|
||||
ALWAYS_INLINE void unchecked_append(U&& value)
|
||||
{
|
||||
ASSERT((size() + 1) <= capacity());
|
||||
VERIFY((size() + 1) <= capacity());
|
||||
new (slot(m_size)) T(forward<U>(value));
|
||||
++m_size;
|
||||
}
|
||||
|
@ -506,7 +506,7 @@ public:
|
|||
|
||||
void shrink(size_t new_size, bool keep_capacity = false)
|
||||
{
|
||||
ASSERT(new_size <= size());
|
||||
VERIFY(new_size <= size());
|
||||
if (new_size == size())
|
||||
return;
|
||||
|
||||
|
|
|
@ -229,7 +229,7 @@ inline WeakPtr<U> Weakable<T>::make_weak_ptr() const
|
|||
if (static_cast<const T*>(this)->unref()) {
|
||||
// We just dropped the last reference, which should have called
|
||||
// revoke_weak_ptrs, which should have invalidated our weak_ptr
|
||||
ASSERT(!weak_ptr.strong_ref());
|
||||
VERIFY(!weak_ptr.strong_ref());
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -91,7 +91,7 @@ public:
|
|||
void revoke()
|
||||
{
|
||||
auto current_consumers = m_consumers.fetch_or(1u, AK::MemoryOrder::memory_order_relaxed);
|
||||
ASSERT(!(current_consumers & 1u));
|
||||
VERIFY(!(current_consumers & 1u));
|
||||
// We flagged revokation, now wait until everyone trying to obtain
|
||||
// a strong reference is done
|
||||
while (current_consumers > 0) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue