linux/arch/x86/include/asm/cmpxchg_32.h

/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_X86_CMPXCHG_32_H
#define _ASM_X86_CMPXCHG_32_H

/*
 * Note: if you use __cmpxchg64(), or their variants,
 *       you need to test for the feature in boot_cpu_data.
 */

__u64_halves;

#define __arch_cmpxchg64(_ptr, _old, _new, _lock)


static __always_inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
{}

static __always_inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
{}

#define __arch_try_cmpxchg64(_ptr, _oldp, _new, _lock)

static __always_inline bool __try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
{}

static __always_inline bool __try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
{}

#ifdef CONFIG_X86_CMPXCHG64

#define arch_cmpxchg64

#define arch_cmpxchg64_local

#define arch_try_cmpxchg64

#define arch_try_cmpxchg64_local

#else

/*
 * Building a kernel capable running on 80386 and 80486. It may be necessary
 * to simulate the cmpxchg8b on the 80386 and 80486 CPU.
 */

#define __arch_cmpxchg64_emu

static __always_inline u64 arch_cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
{
	return __arch_cmpxchg64_emu(ptr, old, new, LOCK_PREFIX_HERE, "lock; ");
}
#define arch_cmpxchg64

static __always_inline u64 arch_cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
{
	return __arch_cmpxchg64_emu(ptr, old, new, ,);
}
#define arch_cmpxchg64_local

#define __arch_try_cmpxchg64_emu

static __always_inline bool arch_try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
{
	return __arch_try_cmpxchg64_emu(ptr, oldp, new, LOCK_PREFIX_HERE, "lock; ");
}
#define arch_try_cmpxchg64

static __always_inline bool arch_try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
{
	return __arch_try_cmpxchg64_emu(ptr, oldp, new, ,);
}
#define arch_try_cmpxchg64_local

#endif

#define system_has_cmpxchg64()

#endif /* _ASM_X86_CMPXCHG_32_H */