#ifndef SCUDO_ATOMIC_H_
#define SCUDO_ATOMIC_H_
#include "internal_defs.h"
namespace scudo {
enum memory_order { … };
static_assert …;
static_assert …;
static_assert …;
static_assert …;
static_assert …;
static_assert …;
struct atomic_u8 { … };
struct atomic_u16 { … };
struct atomic_s32 { … };
struct atomic_u32 { … };
struct atomic_u64 { … };
struct atomic_uptr { … };
template <typename T>
inline typename T::Type atomic_load(const volatile T *A, memory_order MO) { … }
template <typename T>
inline void atomic_store(volatile T *A, typename T::Type V, memory_order MO) { … }
inline void atomic_thread_fence(memory_order) { … }
template <typename T>
inline typename T::Type atomic_fetch_add(volatile T *A, typename T::Type V,
memory_order MO) { … }
template <typename T>
inline typename T::Type atomic_fetch_sub(volatile T *A, typename T::Type V,
memory_order MO) { … }
template <typename T>
inline typename T::Type atomic_fetch_and(volatile T *A, typename T::Type V,
memory_order MO) { … }
template <typename T>
inline typename T::Type atomic_fetch_or(volatile T *A, typename T::Type V,
memory_order MO) { … }
template <typename T>
inline typename T::Type atomic_exchange(volatile T *A, typename T::Type V,
memory_order MO) { … }
template <typename T>
inline bool atomic_compare_exchange_strong(volatile T *A, typename T::Type *Cmp,
typename T::Type Xchg,
memory_order MO) { … }
template <typename T>
inline typename T::Type atomic_load_relaxed(const volatile T *A) { … }
template <typename T>
inline void atomic_store_relaxed(volatile T *A, typename T::Type V) { … }
template <typename T>
inline typename T::Type
atomic_compare_exchange_strong(volatile T *A, typename T::Type Cmp,
typename T::Type Xchg, memory_order MO) { … }
}
#endif