#ifndef KMP_OS_H
#define KMP_OS_H
#include "kmp_config.h"
#include <atomic>
#include <stdarg.h>
#include <stdlib.h>
#include <string.h>
#define KMP_FTN_PLAIN …
#define KMP_FTN_APPEND …
#define KMP_FTN_UPPER …
#define KMP_PTR_SKIP …
#define KMP_OFF …
#define KMP_ON …
#define KMP_MEM_CONS_VOLATILE …
#define KMP_MEM_CONS_FENCE …
#ifndef KMP_MEM_CONS_MODEL
#define KMP_MEM_CONS_MODEL …
#endif
#ifndef __has_cpp_attribute
#define __has_cpp_attribute …
#endif
#ifndef __has_attribute
#define __has_attribute …
#endif
#define KMP_COMPILER_ICC …
#define KMP_COMPILER_GCC …
#define KMP_COMPILER_CLANG …
#define KMP_COMPILER_MSVC …
#define KMP_COMPILER_ICX …
#if __INTEL_CLANG_COMPILER
#undef KMP_COMPILER_ICX
#define KMP_COMPILER_ICX …
#elif defined(__INTEL_COMPILER)
#undef KMP_COMPILER_ICC
#define KMP_COMPILER_ICC …
#elif defined(__clang__)
#undef KMP_COMPILER_CLANG
#define KMP_COMPILER_CLANG …
#elif defined(__GNUC__)
#undef KMP_COMPILER_GCC
#define KMP_COMPILER_GCC …
#elif defined(_MSC_VER)
#undef KMP_COMPILER_MSVC
#define KMP_COMPILER_MSVC …
#else
#error Unknown compiler
#endif
#if (KMP_OS_LINUX || KMP_OS_WINDOWS || KMP_OS_FREEBSD || KMP_OS_NETBSD || \
KMP_OS_DRAGONFLY || KMP_OS_AIX) && \
!KMP_OS_WASI && !KMP_OS_EMSCRIPTEN
#define KMP_AFFINITY_SUPPORTED …
#if KMP_OS_WINDOWS && KMP_ARCH_X86_64
#define KMP_GROUP_AFFINITY …
#else
#define KMP_GROUP_AFFINITY …
#endif
#else
#define KMP_AFFINITY_SUPPORTED …
#define KMP_GROUP_AFFINITY …
#endif
#if (KMP_OS_LINUX || (KMP_OS_FREEBSD && __FreeBSD_version >= 1301000))
#define KMP_HAVE_SCHED_GETCPU …
#else
#define KMP_HAVE_SCHED_GETCPU …
#endif
#define KMP_HAVE_QUAD …
#if KMP_ARCH_X86 || KMP_ARCH_X86_64
#if KMP_COMPILER_ICC || KMP_COMPILER_ICX
#undef KMP_HAVE_QUAD
#define KMP_HAVE_QUAD …
#elif KMP_COMPILER_CLANG
_Quad;
#elif KMP_COMPILER_GCC
#if (!KMP_OS_NETBSD || __GNUC__ >= 10)
typedef __float128 _Quad;
#undef KMP_HAVE_QUAD
#define KMP_HAVE_QUAD …
#endif
#elif KMP_COMPILER_MSVC
typedef long double _Quad;
#endif
#else
#if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
typedef long double _Quad;
#undef KMP_HAVE_QUAD
#define KMP_HAVE_QUAD …
#endif
#endif
#define KMP_USE_X87CONTROL …
#if KMP_OS_WINDOWS
#define KMP_END_OF_LINE …
typedef char kmp_int8;
typedef unsigned char kmp_uint8;
typedef short kmp_int16;
typedef unsigned short kmp_uint16;
typedef int kmp_int32;
typedef unsigned int kmp_uint32;
#define KMP_INT32_SPEC …
#define KMP_UINT32_SPEC …
#ifndef KMP_STRUCT64
typedef __int64 kmp_int64;
typedef unsigned __int64 kmp_uint64;
#define KMP_INT64_SPEC …
#define KMP_UINT64_SPEC …
#else
struct kmp_struct64 {
kmp_int32 a, b;
};
typedef struct kmp_struct64 kmp_int64;
typedef struct kmp_struct64 kmp_uint64;
#endif
#if KMP_ARCH_X86 && KMP_MSVC_COMPAT
#undef KMP_USE_X87CONTROL
#define KMP_USE_X87CONTROL …
#endif
#if KMP_ARCH_X86_64 || KMP_ARCH_AARCH64
#define KMP_INTPTR …
typedef __int64 kmp_intptr_t;
typedef unsigned __int64 kmp_uintptr_t;
#define KMP_INTPTR_SPEC …
#define KMP_UINTPTR_SPEC …
#endif
#endif
#if KMP_OS_UNIX
#define KMP_END_OF_LINE …
kmp_int8;
kmp_uint8;
kmp_int16;
kmp_uint16;
kmp_int32;
kmp_uint32;
kmp_int64;
kmp_uint64;
#define KMP_INT32_SPEC …
#define KMP_UINT32_SPEC …
#define KMP_INT64_SPEC …
#define KMP_UINT64_SPEC …
#endif
#if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS || KMP_ARCH_WASM || \
KMP_ARCH_PPC || KMP_ARCH_AARCH64_32
#define KMP_SIZE_T_SPEC …
#elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || \
KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64 || KMP_ARCH_LOONGARCH64 || \
KMP_ARCH_VE || KMP_ARCH_S390X
#define KMP_SIZE_T_SPEC …
#else
#error "Can't determine size_t printf format specifier."
#endif
#if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_WASM || KMP_ARCH_PPC
#define KMP_SIZE_T_MAX …
#else
#define KMP_SIZE_T_MAX …
#endif
kmp_size_t;
kmp_real32;
kmp_real64;
#ifndef KMP_INTPTR
#define KMP_INTPTR …
kmp_intptr_t;
kmp_uintptr_t;
#define KMP_INTPTR_SPEC …
#define KMP_UINTPTR_SPEC …
#endif
#ifdef BUILD_I8
kmp_int;
kmp_uint;
#else
typedef kmp_int32 kmp_int;
typedef kmp_uint32 kmp_uint;
#endif
#define KMP_INT_MAX …
#define KMP_INT_MIN …
#if (KMP_ARCH_ARM || KMP_ARCH_X86_64 || KMP_ARCH_AARCH64 || KMP_ARCH_WASM) && \
(KMP_OS_FREEBSD || KMP_OS_LINUX || KMP_OS_WASI)
kmp_va_list;
#define kmp_va_deref(ap) …
#define kmp_va_addr_of(ap) …
#else
typedef va_list kmp_va_list;
#define kmp_va_deref …
#define kmp_va_addr_of …
#endif
#ifdef __cplusplus
#define CCAST(type, var) …
#define RCAST(type, var) …
template <typename T> struct traits_t { … };
template <> struct traits_t<signed int> { … };
template <> struct traits_t<unsigned int> { … };
template <> struct traits_t<signed long> { … };
template <> struct traits_t<signed long long> { … };
template <> struct traits_t<unsigned long long> { … };
#else
#define CCAST …
#define RCAST …
#endif
#define KMP_EXPORT …
#if __GNUC__ >= 4 && !defined(__MINGW32__)
#define __forceinline …
#endif
#define KMP_HAVE_MWAIT …
#define KMP_HAVE_UMWAIT …
#if KMP_OS_WINDOWS
#define WIN32_NO_STATUS
#include <windows.h>
static inline int KMP_GET_PAGE_SIZE(void) {
SYSTEM_INFO si;
GetSystemInfo(&si);
return si.dwPageSize;
}
#else
#define KMP_GET_PAGE_SIZE() …
#endif
#define PAGE_ALIGNED(_addr) …
#define ALIGN_TO_PAGE(x) …
#ifdef __cplusplus
extern "C" {
#endif
#define INTERNODE_CACHE_LINE …
#ifndef CACHE_LINE
#define CACHE_LINE …
#else
#if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
#warning CACHE_LINE is too small.
#endif
#endif
#define KMP_CACHE_PREFETCH(ADDR) …
#if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
#define KMP_FALLTHROUGH() …
#elif KMP_COMPILER_ICC
#define KMP_FALLTHROUGH …
#elif __has_cpp_attribute(clang::fallthrough)
#define KMP_FALLTHROUGH …
#elif __has_attribute(fallthrough) || __GNUC__ >= 7
#define KMP_FALLTHROUGH …
#else
#define KMP_FALLTHROUGH …
#endif
#if KMP_HAVE_ATTRIBUTE_WAITPKG
#define KMP_ATTRIBUTE_TARGET_WAITPKG …
#else
#define KMP_ATTRIBUTE_TARGET_WAITPKG …
#endif
#if KMP_HAVE_ATTRIBUTE_RTM
#define KMP_ATTRIBUTE_TARGET_RTM …
#else
#define KMP_ATTRIBUTE_TARGET_RTM …
#endif
#if __cplusplus >= 201103L
#define KMP_NORETURN …
#elif KMP_OS_WINDOWS
#define KMP_NORETURN …
#else
#define KMP_NORETURN …
#endif
#if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
#define KMP_ALIGN …
#define KMP_THREAD_LOCAL …
#define KMP_ALIAS …
#else
#define KMP_ALIGN(bytes) …
#define KMP_THREAD_LOCAL …
#define KMP_ALIAS(alias_of) …
#endif
#if KMP_HAVE_WEAK_ATTRIBUTE && !KMP_DYNAMIC_LIB
#define KMP_WEAK_ATTRIBUTE_EXTERNAL …
#else
#define KMP_WEAK_ATTRIBUTE_EXTERNAL …
#endif
#if KMP_HAVE_WEAK_ATTRIBUTE
#define KMP_WEAK_ATTRIBUTE_INTERNAL …
#else
#define KMP_WEAK_ATTRIBUTE_INTERNAL …
#endif
#ifndef KMP_STR
#define KMP_STR(x) …
#define _KMP_STR(x) …
#endif
#ifdef KMP_USE_VERSION_SYMBOLS
#define KMP_EXPAND_NAME(api_name) …
#define _KMP_EXPAND_NAME(api_name) …
#define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) …
#define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) …
#define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str) …
#define _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, \
default_ver) …
#else
#define KMP_EXPAND_NAME …
#define KMP_VERSION_SYMBOL …
#define KMP_VERSION_OMPC_SYMBOL …
#endif
#define KMP_DO_ALIGN(bytes) …
#define KMP_ALIGN_CACHE …
#define KMP_ALIGN_CACHE_INTERNODE …
enum kmp_mem_fence_type { … };
#if KMP_ASM_INTRINS && KMP_OS_WINDOWS && !((KMP_ARCH_AARCH64 || KMP_ARCH_ARM) && (KMP_COMPILER_CLANG || KMP_COMPILER_GCC))
#if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
#pragma intrinsic(InterlockedExchangeAdd)
#pragma intrinsic(InterlockedCompareExchange)
#pragma intrinsic(InterlockedExchange)
#if !KMP_32_BIT_ARCH
#pragma intrinsic(InterlockedExchange64)
#endif
#endif
#define KMP_TEST_THEN_INC32 …
#define KMP_TEST_THEN_INC_ACQ32 …
#define KMP_TEST_THEN_ADD4_32 …
#define KMP_TEST_THEN_ADD4_ACQ32 …
#define KMP_TEST_THEN_DEC32 …
#define KMP_TEST_THEN_DEC_ACQ32 …
#define KMP_TEST_THEN_ADD32 …
#define KMP_COMPARE_AND_STORE_RET32 …
#define KMP_XCHG_FIXED32 …
#define KMP_XCHG_FIXED64 …
inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
return *(kmp_real32 *)&tmp;
}
#define KMP_TEST_THEN_OR8 …
#define KMP_TEST_THEN_AND8 …
#define KMP_TEST_THEN_OR32 …
#define KMP_TEST_THEN_AND32 …
#define KMP_TEST_THEN_OR64 …
#define KMP_TEST_THEN_AND64 …
extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
#if KMP_ARCH_AARCH64 && KMP_COMPILER_MSVC && !KMP_COMPILER_CLANG
#define KMP_TEST_THEN_INC64 …
#define KMP_TEST_THEN_INC_ACQ64 …
#define KMP_TEST_THEN_ADD4_64 …
#define KMP_TEST_THEN_ADD64 …
#define KMP_COMPARE_AND_STORE_ACQ8 …
#define KMP_COMPARE_AND_STORE_REL8 …
#define KMP_COMPARE_AND_STORE_ACQ16 …
#define KMP_COMPARE_AND_STORE_ACQ32 …
#define KMP_COMPARE_AND_STORE_REL32 …
#define KMP_COMPARE_AND_STORE_ACQ64 …
#define KMP_COMPARE_AND_STORE_REL64 …
#define KMP_COMPARE_AND_STORE_PTR …
inline kmp_int8 __kmp_compare_and_store_acq8(volatile kmp_int8 *p, kmp_int8 cv,
kmp_int8 sv) {
return _InterlockedCompareExchange8_acq(p, sv, cv) == cv;
}
inline kmp_int8 __kmp_compare_and_store_rel8(volatile kmp_int8 *p, kmp_int8 cv,
kmp_int8 sv) {
return _InterlockedCompareExchange8_rel(p, sv, cv) == cv;
}
inline kmp_int16 __kmp_compare_and_store_acq16(volatile kmp_int16 *p,
kmp_int16 cv, kmp_int16 sv) {
return _InterlockedCompareExchange16_acq(p, sv, cv) == cv;
}
inline kmp_int16 __kmp_compare_and_store_rel16(volatile kmp_int16 *p,
kmp_int16 cv, kmp_int16 sv) {
return _InterlockedCompareExchange16_rel(p, sv, cv) == cv;
}
inline kmp_int32 __kmp_compare_and_store_acq32(volatile kmp_int32 *p,
kmp_int32 cv, kmp_int32 sv) {
return _InterlockedCompareExchange_acq((volatile long *)p, sv, cv) == cv;
}
inline kmp_int32 __kmp_compare_and_store_rel32(volatile kmp_int32 *p,
kmp_int32 cv, kmp_int32 sv) {
return _InterlockedCompareExchange_rel((volatile long *)p, sv, cv) == cv;
}
inline kmp_int32 __kmp_compare_and_store_acq64(volatile kmp_int64 *p,
kmp_int64 cv, kmp_int64 sv) {
return _InterlockedCompareExchange64_acq(p, sv, cv) == cv;
}
inline kmp_int32 __kmp_compare_and_store_rel64(volatile kmp_int64 *p,
kmp_int64 cv, kmp_int64 sv) {
return _InterlockedCompareExchange64_rel(p, sv, cv) == cv;
}
inline kmp_int32 __kmp_compare_and_store_ptr(void *volatile *p, void *cv,
void *sv) {
return _InterlockedCompareExchangePointer(p, sv, cv) == cv;
}
#define KMP_COMPARE_AND_STORE_RET8 …
#define KMP_COMPARE_AND_STORE_RET16 …
#define KMP_COMPARE_AND_STORE_RET64 …
#define KMP_XCHG_FIXED8 …
#define KMP_XCHG_FIXED16 …
#define KMP_XCHG_REAL64 …
inline kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v) {
kmp_int64 tmp = _InterlockedExchange64((volatile kmp_int64 *)p, *(kmp_int64
*)&v); return *(kmp_real64 *)&tmp;
}
#else
extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
kmp_int8 sv);
extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
kmp_int16 sv);
extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
kmp_int32 sv);
extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
kmp_int64 sv);
extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
kmp_int8 sv);
extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
kmp_int16 cv, kmp_int16 sv);
extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
kmp_int32 cv, kmp_int32 sv);
extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
kmp_int64 cv, kmp_int64 sv);
extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
#define KMP_TEST_THEN_INC64 …
#define KMP_TEST_THEN_INC_ACQ64 …
#define KMP_TEST_THEN_ADD4_64 …
#define KMP_TEST_THEN_ADD4_ACQ64 …
#define KMP_TEST_THEN_DEC64 …
#define KMP_TEST_THEN_DEC_ACQ64 …
#define KMP_TEST_THEN_ADD8 …
#define KMP_TEST_THEN_ADD64 …
#define KMP_COMPARE_AND_STORE_ACQ8 …
#define KMP_COMPARE_AND_STORE_REL8 …
#define KMP_COMPARE_AND_STORE_ACQ16 …
#define KMP_COMPARE_AND_STORE_REL16 …
#define KMP_COMPARE_AND_STORE_ACQ32 …
#define KMP_COMPARE_AND_STORE_REL32 …
#define KMP_COMPARE_AND_STORE_ACQ64 …
#define KMP_COMPARE_AND_STORE_REL64 …
#if KMP_ARCH_X86
#define KMP_COMPARE_AND_STORE_PTR …
#else
#define KMP_COMPARE_AND_STORE_PTR …
#endif
#define KMP_COMPARE_AND_STORE_RET8 …
#define KMP_COMPARE_AND_STORE_RET16 …
#define KMP_COMPARE_AND_STORE_RET64 …
#define KMP_XCHG_FIXED8 …
#define KMP_XCHG_FIXED16 …
#define KMP_XCHG_REAL64 …
#endif
#elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
#define KMP_TEST_THEN_INC32(p) …
#define KMP_TEST_THEN_INC_ACQ32(p) …
#if KMP_ARCH_MIPS
#define KMP_TEST_THEN_INC64 …
#define KMP_TEST_THEN_INC_ACQ64 …
#else
#define KMP_TEST_THEN_INC64(p) …
#define KMP_TEST_THEN_INC_ACQ64(p) …
#endif
#define KMP_TEST_THEN_ADD4_32(p) …
#define KMP_TEST_THEN_ADD4_ACQ32(p) …
#if KMP_ARCH_MIPS
#define KMP_TEST_THEN_ADD4_64 …
#define KMP_TEST_THEN_ADD4_ACQ64 …
#define KMP_TEST_THEN_DEC64 …
#define KMP_TEST_THEN_DEC_ACQ64 …
#else
#define KMP_TEST_THEN_ADD4_64(p) …
#define KMP_TEST_THEN_ADD4_ACQ64(p) …
#define KMP_TEST_THEN_DEC64(p) …
#define KMP_TEST_THEN_DEC_ACQ64(p) …
#endif
#define KMP_TEST_THEN_DEC32(p) …
#define KMP_TEST_THEN_DEC_ACQ32(p) …
#define KMP_TEST_THEN_ADD8(p, v) …
#define KMP_TEST_THEN_ADD32(p, v) …
#if KMP_ARCH_MIPS
#define KMP_TEST_THEN_ADD64 …
#else
#define KMP_TEST_THEN_ADD64(p, v) …
#endif
#define KMP_TEST_THEN_OR8(p, v) …
#define KMP_TEST_THEN_AND8(p, v) …
#define KMP_TEST_THEN_OR32(p, v) …
#define KMP_TEST_THEN_AND32(p, v) …
#if KMP_ARCH_MIPS
#define KMP_TEST_THEN_OR64 …
#define KMP_TEST_THEN_AND64 …
#else
#define KMP_TEST_THEN_OR64(p, v) …
#define KMP_TEST_THEN_AND64(p, v) …
#endif
#define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) …
#define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) …
#define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) …
#define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) …
#define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) …
#define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) …
#define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) …
#define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) …
#define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) …
#define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) …
#if KMP_ARCH_MIPS
static inline bool mips_sync_bool_compare_and_swap(volatile kmp_uint64 *p,
kmp_uint64 cv,
kmp_uint64 sv) {
return __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
__ATOMIC_SEQ_CST);
}
static inline bool mips_sync_val_compare_and_swap(volatile kmp_uint64 *p,
kmp_uint64 cv,
kmp_uint64 sv) {
__atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
__ATOMIC_SEQ_CST);
return cv;
}
#define KMP_COMPARE_AND_STORE_ACQ64 …
#define KMP_COMPARE_AND_STORE_REL64 …
#define KMP_COMPARE_AND_STORE_RET64 …
#else
#define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) …
#define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) …
#define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) …
#endif
#if KMP_OS_DARWIN && defined(__INTEL_COMPILER) && __INTEL_COMPILER >= 1800
#define KMP_XCHG_FIXED8 …
#else
#define KMP_XCHG_FIXED8(p, v) …
#endif
#define KMP_XCHG_FIXED16(p, v) …
#define KMP_XCHG_FIXED32(p, v) …
#define KMP_XCHG_FIXED64(p, v) …
inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) { … }
inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) { … }
#else
extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
kmp_int8 sv);
extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
kmp_int16 sv);
extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
kmp_int32 sv);
extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
kmp_int64 sv);
extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
kmp_int8 sv);
extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
kmp_int16 cv, kmp_int16 sv);
extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
kmp_int32 cv, kmp_int32 sv);
extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
kmp_int64 cv, kmp_int64 sv);
extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
#define KMP_TEST_THEN_INC32 …
#define KMP_TEST_THEN_INC_ACQ32 …
#define KMP_TEST_THEN_INC64 …
#define KMP_TEST_THEN_INC_ACQ64 …
#define KMP_TEST_THEN_ADD4_32 …
#define KMP_TEST_THEN_ADD4_ACQ32 …
#define KMP_TEST_THEN_ADD4_64 …
#define KMP_TEST_THEN_ADD4_ACQ64 …
#define KMP_TEST_THEN_DEC32 …
#define KMP_TEST_THEN_DEC_ACQ32 …
#define KMP_TEST_THEN_DEC64 …
#define KMP_TEST_THEN_DEC_ACQ64 …
#define KMP_TEST_THEN_ADD8 …
#define KMP_TEST_THEN_ADD32 …
#define KMP_TEST_THEN_ADD64 …
#define KMP_TEST_THEN_OR8 …
#define KMP_TEST_THEN_AND8 …
#define KMP_TEST_THEN_OR32 …
#define KMP_TEST_THEN_AND32 …
#define KMP_TEST_THEN_OR64 …
#define KMP_TEST_THEN_AND64 …
#define KMP_COMPARE_AND_STORE_ACQ8 …
#define KMP_COMPARE_AND_STORE_REL8 …
#define KMP_COMPARE_AND_STORE_ACQ16 …
#define KMP_COMPARE_AND_STORE_REL16 …
#define KMP_COMPARE_AND_STORE_ACQ32 …
#define KMP_COMPARE_AND_STORE_REL32 …
#define KMP_COMPARE_AND_STORE_ACQ64 …
#define KMP_COMPARE_AND_STORE_REL64 …
#if KMP_ARCH_X86
#define KMP_COMPARE_AND_STORE_PTR …
#else
#define KMP_COMPARE_AND_STORE_PTR …
#endif
#define KMP_COMPARE_AND_STORE_RET8 …
#define KMP_COMPARE_AND_STORE_RET16 …
#define KMP_COMPARE_AND_STORE_RET32 …
#define KMP_COMPARE_AND_STORE_RET64 …
#define KMP_XCHG_FIXED8 …
#define KMP_XCHG_FIXED16 …
#define KMP_XCHG_FIXED32 …
#define KMP_XCHG_FIXED64 …
#define KMP_XCHG_REAL32 …
#define KMP_XCHG_REAL64 …
#endif
#if KMP_OS_WINDOWS
#ifdef __ABSOFT_WIN
#define KMP_MB …
#define KMP_IMB …
#else
#define KMP_MB …
#define KMP_IMB …
#endif
#endif
#if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64 || KMP_ARCH_LOONGARCH64 || \
KMP_ARCH_VE || KMP_ARCH_S390X || KMP_ARCH_PPC || KMP_ARCH_AARCH64_32
#if KMP_OS_WINDOWS
#undef KMP_MB
#define KMP_MB …
#else
#define KMP_MB …
#endif
#endif
#ifndef KMP_MB
#define KMP_MB() …
#endif
#if KMP_ARCH_X86 || KMP_ARCH_X86_64
#if KMP_MIC
#define KMP_MFENCE …
#define KMP_SFENCE …
#else
#if KMP_COMPILER_ICC || KMP_COMPILER_ICX
#define KMP_MFENCE_ …
#define KMP_SFENCE_ …
#elif KMP_COMPILER_MSVC
#define KMP_MFENCE_ …
#define KMP_SFENCE_ …
#else
#define KMP_MFENCE_() …
#define KMP_SFENCE_() …
#endif
#define KMP_MFENCE() …
#define KMP_SFENCE() …
#endif
#else
#define KMP_MFENCE …
#define KMP_SFENCE …
#endif
#ifndef KMP_IMB
#define KMP_IMB() …
#endif
#ifndef KMP_ST_REL32
#define KMP_ST_REL32(A, D) …
#endif
#ifndef KMP_ST_REL64
#define KMP_ST_REL64(A, D) …
#endif
#ifndef KMP_LD_ACQ32
#define KMP_LD_ACQ32(A) …
#endif
#ifndef KMP_LD_ACQ64
#define KMP_LD_ACQ64(A) …
#endif
#define TCR_1(a) …
#define TCW_1(a, b) …
#define TCR_4(a) …
#define TCW_4(a, b) …
#define TCI_4(a) …
#define TCD_4(a) …
#define TCR_8(a) …
#define TCW_8(a, b) …
#define TCI_8(a) …
#define TCD_8(a) …
#define TCR_SYNC_4(a) …
#define TCW_SYNC_4(a, b) …
#define TCX_SYNC_4(a, b, c) …
#define TCR_SYNC_8(a) …
#define TCW_SYNC_8(a, b) …
#define TCX_SYNC_8(a, b, c) …
#if KMP_ARCH_X86 || KMP_ARCH_MIPS || KMP_ARCH_WASM || KMP_ARCH_PPC
#define TCR_PTR …
#define TCW_PTR …
#define TCR_SYNC_PTR …
#define TCW_SYNC_PTR …
#define TCX_SYNC_PTR …
#else
#define TCR_PTR(a) …
#define TCW_PTR(a, b) …
#define TCR_SYNC_PTR(a) …
#define TCW_SYNC_PTR(a, b) …
#define TCX_SYNC_PTR(a, b, c) …
#endif
#ifndef FTN_TRUE
#define FTN_TRUE …
#endif
#ifndef FTN_FALSE
#define FTN_FALSE …
#endif
microtask_t;
#ifdef USE_VOLATILE_CAST
#define VOLATILE_CAST …
#else
#define VOLATILE_CAST(x) …
#endif
#define KMP_WAIT …
#define KMP_WAIT_PTR …
#define KMP_EQ …
#define KMP_NEQ …
#define KMP_LT …
#define KMP_GE …
#define KMP_LE …
#if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
#define STATIC_EFI2_WORKAROUND
#else
#define STATIC_EFI2_WORKAROUND …
#endif
#ifndef KMP_USE_BGET
#define KMP_USE_BGET …
#endif
#ifndef USE_CMPXCHG_FIX
#define USE_CMPXCHG_FIX …
#endif
#define KMP_USE_DYNAMIC_LOCK …
#if KMP_USE_DYNAMIC_LOCK
#define KMP_USE_TSX …
#ifdef KMP_USE_ADAPTIVE_LOCKS
#undef KMP_USE_ADAPTIVE_LOCKS
#endif
#define KMP_USE_ADAPTIVE_LOCKS …
#endif
#if KMP_STATS_ENABLED
#define KMP_HAVE_TICK_TIME …
#endif
enum kmp_warnings_level { … };
#ifdef __cplusplus
}
#endif
#include "kmp_safe_c_api.h"
#define KMP_ATOMIC_LD(p, order) …
#define KMP_ATOMIC_OP(op, p, v, order) …
#define KMP_ATOMIC_LD_ACQ(p) …
#define KMP_ATOMIC_LD_RLX(p) …
#define KMP_ATOMIC_ST_REL(p, v) …
#define KMP_ATOMIC_ST_RLX(p, v) …
#define KMP_ATOMIC_ADD(p, v) …
#define KMP_ATOMIC_SUB(p, v) …
#define KMP_ATOMIC_AND(p, v) …
#define KMP_ATOMIC_OR(p, v) …
#define KMP_ATOMIC_INC(p) …
#define KMP_ATOMIC_DEC(p) …
#define KMP_ATOMIC_ADD_RLX(p, v) …
#define KMP_ATOMIC_INC_RLX(p) …
template <typename T>
bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) { … }
template <typename T>
bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) { … }
template <typename T>
bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) { … }
#if KMP_OS_WINDOWS
extern void *__kmp_lookup_symbol(const char *name, bool next = false);
#define KMP_DLSYM …
#define KMP_DLSYM_NEXT …
#elif KMP_OS_WASI || KMP_OS_EMSCRIPTEN
#define KMP_DLSYM …
#define KMP_DLSYM_NEXT …
#else
#define KMP_DLSYM(name) …
#define KMP_DLSYM_NEXT(name) …
#endif
#ifndef __has_builtin
#define __has_builtin …
#endif
#if __has_builtin(__builtin_unreachable) || defined(__GNUC__)
#define KMP_BUILTIN_UNREACHABLE …
#elif defined(_MSC_VER)
#define KMP_BUILTIN_UNREACHABLE …
#else
#define KMP_BUILTIN_UNREACHABLE
#endif
#endif