#ifndef _ASM_X86_CMPXCHG_32_H
#define _ASM_X86_CMPXCHG_32_H
__u64_halves;
#define __arch_cmpxchg64(_ptr, _old, _new, _lock) …
static __always_inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
{ … }
static __always_inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
{ … }
#define __arch_try_cmpxchg64(_ptr, _oldp, _new, _lock) …
static __always_inline bool __try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
{ … }
static __always_inline bool __try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
{ … }
#ifdef CONFIG_X86_CMPXCHG64
#define arch_cmpxchg64 …
#define arch_cmpxchg64_local …
#define arch_try_cmpxchg64 …
#define arch_try_cmpxchg64_local …
#else
#define __arch_cmpxchg64_emu …
static __always_inline u64 arch_cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
{
return __arch_cmpxchg64_emu(ptr, old, new, LOCK_PREFIX_HERE, "lock; ");
}
#define arch_cmpxchg64 …
static __always_inline u64 arch_cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
{
return __arch_cmpxchg64_emu(ptr, old, new, ,);
}
#define arch_cmpxchg64_local …
#define __arch_try_cmpxchg64_emu …
static __always_inline bool arch_try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
{
return __arch_try_cmpxchg64_emu(ptr, oldp, new, LOCK_PREFIX_HERE, "lock; ");
}
#define arch_try_cmpxchg64 …
static __always_inline bool arch_try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
{
return __arch_try_cmpxchg64_emu(ptr, oldp, new, ,);
}
#define arch_try_cmpxchg64_local …
#endif
#define system_has_cmpxchg64() …
#endif