linux/arch/x86/include/asm/atomic64_64.h

/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_X86_ATOMIC64_64_H
#define _ASM_X86_ATOMIC64_64_H

#include <linux/types.h>
#include <asm/alternative.h>
#include <asm/cmpxchg.h>

/* The 64-bit atomic type */

#define ATOMIC64_INIT(i)

static __always_inline s64 arch_atomic64_read(const atomic64_t *v)
{}

static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i)
{}

static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
{}

static __always_inline void arch_atomic64_sub(s64 i, atomic64_t *v)
{}

static __always_inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
{}
#define arch_atomic64_sub_and_test

static __always_inline void arch_atomic64_inc(atomic64_t *v)
{}
#define arch_atomic64_inc

static __always_inline void arch_atomic64_dec(atomic64_t *v)
{}
#define arch_atomic64_dec

static __always_inline bool arch_atomic64_dec_and_test(atomic64_t *v)
{}
#define arch_atomic64_dec_and_test

static __always_inline bool arch_atomic64_inc_and_test(atomic64_t *v)
{}
#define arch_atomic64_inc_and_test

static __always_inline bool arch_atomic64_add_negative(s64 i, atomic64_t *v)
{}
#define arch_atomic64_add_negative

static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
{}
#define arch_atomic64_add_return

#define arch_atomic64_sub_return(i, v)

static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
{}
#define arch_atomic64_fetch_add

#define arch_atomic64_fetch_sub(i, v)

static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
{}
#define arch_atomic64_cmpxchg

static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
{}
#define arch_atomic64_try_cmpxchg

static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new)
{}
#define arch_atomic64_xchg

static __always_inline void arch_atomic64_and(s64 i, atomic64_t *v)
{}

static __always_inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
{}
#define arch_atomic64_fetch_and

static __always_inline void arch_atomic64_or(s64 i, atomic64_t *v)
{}

static __always_inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
{}
#define arch_atomic64_fetch_or

static __always_inline void arch_atomic64_xor(s64 i, atomic64_t *v)
{}

static __always_inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
{}
#define arch_atomic64_fetch_xor

#endif /* _ASM_X86_ATOMIC64_64_H */