diff --git a/AK/Atomic.h b/AK/Atomic.h index f06eca1203..68d3b55ec9 100644 --- a/AK/Atomic.h +++ b/AK/Atomic.h @@ -257,6 +257,108 @@ public: } }; +template +class AtomicRef +{ + T* m_value { nullptr }; + +public: + AtomicRef() noexcept = delete; + AtomicRef(const AtomicRef&) = delete; + AtomicRef& operator=(const AtomicRef&) volatile = delete; + + AtomicRef(T* val) noexcept + : m_value(val) + { + } + + AtomicRef(T& val) noexcept + : m_value(&val) + { + } + + T exchange(T desired, MemoryOrder order = memory_order_seq_cst) volatile noexcept + { + return __atomic_exchange_n(m_value, desired, order); + } + + bool compare_exchange_strong(T& expected, T desired, MemoryOrder order = memory_order_seq_cst) volatile noexcept + { + // On failure, acquire the new value + if (order == memory_order_acq_rel || order == memory_order_release) + return __atomic_compare_exchange_n(m_value, &expected, desired, false, memory_order_acq_rel, memory_order_acquire); + else + return __atomic_compare_exchange_n(m_value, &expected, desired, false, order, order); + } + + T operator++() volatile noexcept + { + return fetch_add(1) + 1; + } + + T operator++(int) volatile noexcept + { + return fetch_add(1); + } + + T operator+=(T val) volatile noexcept + { + return fetch_add(val) + val; + } + + T fetch_add(T val, MemoryOrder order = memory_order_seq_cst) volatile noexcept + { + return __atomic_fetch_add(m_value, val, order); + } + + T operator--() volatile noexcept + { + return fetch_sub(1) - 1; + } + + T operator--(int) volatile noexcept + { + return fetch_sub(1); + } + + T operator-=(T val) volatile noexcept + { + return fetch_sub(val) - val; + } + + T fetch_sub(T val, MemoryOrder order = memory_order_seq_cst) volatile noexcept + { + return __atomic_fetch_sub(m_value, val, order); + } + + operator T*() const volatile noexcept + { + return load(); + } + + T load(MemoryOrder order = memory_order_seq_cst) const volatile noexcept + { + return __atomic_load_n(m_value, order); + } + + T operator=(T desired) volatile noexcept + { + store(desired); + return desired; + } + + void store(T desired, MemoryOrder order = memory_order_seq_cst) volatile noexcept + { + __atomic_store_n(&m_value, desired, order); + } + + bool is_lock_free() const volatile noexcept + { + return __atomic_is_lock_free(sizeof(*m_value), m_value); + } +}; + } using AK::Atomic; +using AK::AtomicRef;