#ifndef _ASM_X86_ATOMIC64_32_H
#define _ASM_X86_ATOMIC64_32_H
#include <linux/compiler.h>
#include <linux/types.h>
atomic64_t;
#define ATOMIC64_INIT(val) …
static __always_inline s64 arch_atomic64_read_nonatomic(const atomic64_t *v)
{ … }
#define __ATOMIC64_DECL …
#ifndef ATOMIC64_EXPORT
#define ATOMIC64_DECL_ONE …
#else
#define ATOMIC64_DECL_ONE …
#endif
#ifdef CONFIG_X86_CMPXCHG64
#define __alternative_atomic64 …
#define ATOMIC64_DECL …
#else
#define __alternative_atomic64 …
#define ATOMIC64_DECL …
ATOMIC64_DECL_ONE(add_386);
ATOMIC64_DECL_ONE(sub_386);
ATOMIC64_DECL_ONE(inc_386);
ATOMIC64_DECL_ONE(dec_386);
#endif
#define alternative_atomic64 …
ATOMIC64_DECL;
ATOMIC64_DECL;
ATOMIC64_DECL;
ATOMIC64_DECL;
ATOMIC64_DECL;
ATOMIC64_DECL;
ATOMIC64_DECL;
ATOMIC64_DECL;
ATOMIC64_DECL;
ATOMIC64_DECL;
#undef ATOMIC64_DECL
#undef ATOMIC64_DECL_ONE
#undef __ATOMIC64_DECL
#undef ATOMIC64_EXPORT
static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
{ … }
#define arch_atomic64_cmpxchg …
static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
{ … }
#define arch_atomic64_try_cmpxchg …
static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n)
{ … }
#define arch_atomic64_xchg …
static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i)
{ … }
static __always_inline s64 arch_atomic64_read(const atomic64_t *v)
{ … }
static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
{ … }
#define arch_atomic64_add_return …
static __always_inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
{ … }
#define arch_atomic64_sub_return …
static __always_inline s64 arch_atomic64_inc_return(atomic64_t *v)
{ … }
#define arch_atomic64_inc_return …
static __always_inline s64 arch_atomic64_dec_return(atomic64_t *v)
{ … }
#define arch_atomic64_dec_return …
static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
{ … }
static __always_inline void arch_atomic64_sub(s64 i, atomic64_t *v)
{ … }
static __always_inline void arch_atomic64_inc(atomic64_t *v)
{ … }
#define arch_atomic64_inc …
static __always_inline void arch_atomic64_dec(atomic64_t *v)
{ … }
#define arch_atomic64_dec …
static __always_inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
{ … }
#define arch_atomic64_add_unless …
static __always_inline int arch_atomic64_inc_not_zero(atomic64_t *v)
{ … }
#define arch_atomic64_inc_not_zero …
static __always_inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
{ … }
#define arch_atomic64_dec_if_positive …
#undef alternative_atomic64
#undef __alternative_atomic64
static __always_inline void arch_atomic64_and(s64 i, atomic64_t *v)
{ … }
static __always_inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
{ … }
#define arch_atomic64_fetch_and …
static __always_inline void arch_atomic64_or(s64 i, atomic64_t *v)
{ … }
static __always_inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
{ … }
#define arch_atomic64_fetch_or …
static __always_inline void arch_atomic64_xor(s64 i, atomic64_t *v)
{ … }
static __always_inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
{ … }
#define arch_atomic64_fetch_xor …
static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
{ … }
#define arch_atomic64_fetch_add …
#define arch_atomic64_fetch_sub(i, v) …
#endif