#define pr_fmt(fmt) …
#include <linux/atomic.h>
#include <linux/bug.h>
#include <linux/delay.h>
#include <linux/export.h>
#include <linux/init.h>
#include <linux/kernel.h>
#include <linux/list.h>
#include <linux/minmax.h>
#include <linux/moduleparam.h>
#include <linux/percpu.h>
#include <linux/preempt.h>
#include <linux/sched.h>
#include <linux/string.h>
#include <linux/uaccess.h>
#include "encoding.h"
#include "kcsan.h"
#include "permissive.h"
static bool kcsan_early_enable = … IS_ENABLED(…);
unsigned int kcsan_udelay_task = …;
unsigned int kcsan_udelay_interrupt = …;
static long kcsan_skip_watch = …;
static bool kcsan_interrupt_watcher = … IS_ENABLED(…);
#ifdef MODULE_PARAM_PREFIX
#undef MODULE_PARAM_PREFIX
#endif
#define MODULE_PARAM_PREFIX …
module_param_named(early_enable, kcsan_early_enable, bool, 0);
module_param_named(udelay_task, kcsan_udelay_task, uint, 0644);
module_param_named(udelay_interrupt, kcsan_udelay_interrupt, uint, 0644);
module_param_named(skip_watch, kcsan_skip_watch, long, 0644);
module_param_named(interrupt_watcher, kcsan_interrupt_watcher, bool, 0444);
#ifdef CONFIG_KCSAN_WEAK_MEMORY
static bool kcsan_weak_memory = …;
module_param_named(weak_memory, kcsan_weak_memory, bool, 0644);
#else
#define kcsan_weak_memory …
#endif
bool kcsan_enabled;
static DEFINE_PER_CPU(struct kcsan_ctx, kcsan_cpu_ctx) = …;
#define SLOT_IDX(slot, i) …
#define SLOT_IDX_FAST(slot, i) …
static atomic_long_t watchpoints[CONFIG_KCSAN_NUM_WATCHPOINTS + NUM_SLOTS-1];
static DEFINE_PER_CPU(long, kcsan_skip);
static DEFINE_PER_CPU(u32, kcsan_rand_state);
static __always_inline atomic_long_t *find_watchpoint(unsigned long addr,
size_t size,
bool expect_write,
long *encoded_watchpoint)
{ … }
static inline atomic_long_t *
insert_watchpoint(unsigned long addr, size_t size, bool is_write)
{ … }
static __always_inline bool
try_consume_watchpoint(atomic_long_t *watchpoint, long encoded_watchpoint)
{ … }
static inline bool consume_watchpoint(atomic_long_t *watchpoint)
{ … }
static inline void remove_watchpoint(atomic_long_t *watchpoint)
{ … }
static __always_inline struct kcsan_ctx *get_ctx(void)
{ … }
static __always_inline void
check_access(const volatile void *ptr, size_t size, int type, unsigned long ip);
static noinline void kcsan_check_scoped_accesses(void)
{ … }
static __always_inline bool
is_atomic(struct kcsan_ctx *ctx, const volatile void *ptr, size_t size, int type)
{ … }
static __always_inline bool
should_watch(struct kcsan_ctx *ctx, const volatile void *ptr, size_t size, int type)
{ … }
static u32 kcsan_prandom_u32_max(u32 ep_ro)
{ … }
static inline void reset_kcsan_skip(void)
{ … }
static __always_inline bool kcsan_is_enabled(struct kcsan_ctx *ctx)
{ … }
static void delay_access(int type)
{ … }
static __always_inline u64 read_instrumented_memory(const volatile void *ptr, size_t size)
{ … }
void kcsan_save_irqtrace(struct task_struct *task)
{ … }
void kcsan_restore_irqtrace(struct task_struct *task)
{ … }
static __always_inline int get_kcsan_stack_depth(void)
{ … }
static __always_inline void add_kcsan_stack_depth(int val)
{ … }
static __always_inline struct kcsan_scoped_access *get_reorder_access(struct kcsan_ctx *ctx)
{ … }
static __always_inline bool
find_reorder_access(struct kcsan_ctx *ctx, const volatile void *ptr, size_t size,
int type, unsigned long ip)
{ … }
static inline void
set_reorder_access(struct kcsan_ctx *ctx, const volatile void *ptr, size_t size,
int type, unsigned long ip)
{ … }
static noinline void kcsan_found_watchpoint(const volatile void *ptr,
size_t size,
int type,
unsigned long ip,
atomic_long_t *watchpoint,
long encoded_watchpoint)
{ … }
static noinline void
kcsan_setup_watchpoint(const volatile void *ptr, size_t size, int type, unsigned long ip)
{ … }
static __always_inline void
check_access(const volatile void *ptr, size_t size, int type, unsigned long ip)
{ … }
void __init kcsan_init(void)
{ … }
void kcsan_disable_current(void)
{ … }
EXPORT_SYMBOL(…);
void kcsan_enable_current(void)
{ … }
EXPORT_SYMBOL(…);
void kcsan_enable_current_nowarn(void)
{ … }
EXPORT_SYMBOL(…);
void kcsan_nestable_atomic_begin(void)
{ … }
EXPORT_SYMBOL(…);
void kcsan_nestable_atomic_end(void)
{ … }
EXPORT_SYMBOL(…);
void kcsan_flat_atomic_begin(void)
{ … }
EXPORT_SYMBOL(…);
void kcsan_flat_atomic_end(void)
{ … }
EXPORT_SYMBOL(…);
void kcsan_atomic_next(int n)
{ … }
EXPORT_SYMBOL(…);
void kcsan_set_access_mask(unsigned long mask)
{ … }
EXPORT_SYMBOL(…);
struct kcsan_scoped_access *
kcsan_begin_scoped_access(const volatile void *ptr, size_t size, int type,
struct kcsan_scoped_access *sa)
{ … }
EXPORT_SYMBOL(…);
void kcsan_end_scoped_access(struct kcsan_scoped_access *sa)
{ … }
EXPORT_SYMBOL(…);
void __kcsan_check_access(const volatile void *ptr, size_t size, int type)
{ … }
EXPORT_SYMBOL(…);
#define DEFINE_MEMORY_BARRIER(name, order_before_cond) …
DEFINE_MEMORY_BARRIER(…);
DEFINE_MEMORY_BARRIER(…);
DEFINE_MEMORY_BARRIER(…);
DEFINE_MEMORY_BARRIER(…);
#define DEFINE_TSAN_READ_WRITE(size) …
DEFINE_TSAN_READ_WRITE(…);
DEFINE_TSAN_READ_WRITE(…);
DEFINE_TSAN_READ_WRITE(…);
DEFINE_TSAN_READ_WRITE(…);
DEFINE_TSAN_READ_WRITE(…);
void __tsan_read_range(void *ptr, size_t size);
void __tsan_read_range(void *ptr, size_t size)
{ … }
EXPORT_SYMBOL(…);
void __tsan_write_range(void *ptr, size_t size);
void __tsan_write_range(void *ptr, size_t size)
{ … }
EXPORT_SYMBOL(…);
#define DEFINE_TSAN_VOLATILE_READ_WRITE(size) …
DEFINE_TSAN_VOLATILE_READ_WRITE(…);
DEFINE_TSAN_VOLATILE_READ_WRITE(…);
DEFINE_TSAN_VOLATILE_READ_WRITE(…);
DEFINE_TSAN_VOLATILE_READ_WRITE(…);
DEFINE_TSAN_VOLATILE_READ_WRITE(…);
void __tsan_func_entry(void *call_pc);
noinline void __tsan_func_entry(void *call_pc)
{ … }
EXPORT_SYMBOL(…);
void __tsan_func_exit(void);
noinline void __tsan_func_exit(void)
{ … }
EXPORT_SYMBOL(…);
void __tsan_init(void);
void __tsan_init(void)
{ … }
EXPORT_SYMBOL(…);
static __always_inline void kcsan_atomic_builtin_memorder(int memorder)
{ … }
#define DEFINE_TSAN_ATOMIC_LOAD_STORE(bits) …
#define DEFINE_TSAN_ATOMIC_RMW(op, bits, suffix) …
#define DEFINE_TSAN_ATOMIC_CMPXCHG(bits, strength, weak) …
#define DEFINE_TSAN_ATOMIC_CMPXCHG_VAL(bits) …
#define DEFINE_TSAN_ATOMIC_OPS(bits) …
DEFINE_TSAN_ATOMIC_OPS(…);
DEFINE_TSAN_ATOMIC_OPS(…);
DEFINE_TSAN_ATOMIC_OPS(…);
#ifdef CONFIG_64BIT
DEFINE_TSAN_ATOMIC_OPS(…);
#endif
void __tsan_atomic_thread_fence(int memorder);
void __tsan_atomic_thread_fence(int memorder)
{ … }
EXPORT_SYMBOL(…);
void __tsan_atomic_signal_fence(int memorder);
noinline void __tsan_atomic_signal_fence(int memorder)
{ … }
EXPORT_SYMBOL(…);
#ifdef __HAVE_ARCH_MEMSET
void *__tsan_memset(void *s, int c, size_t count);
noinline void *__tsan_memset(void *s, int c, size_t count)
{ … }
#else
void *__tsan_memset(void *s, int c, size_t count) __alias(memset);
#endif
EXPORT_SYMBOL(…);
#ifdef __HAVE_ARCH_MEMMOVE
void *__tsan_memmove(void *dst, const void *src, size_t len);
noinline void *__tsan_memmove(void *dst, const void *src, size_t len)
{ … }
#else
void *__tsan_memmove(void *dst, const void *src, size_t len) __alias(memmove);
#endif
EXPORT_SYMBOL(…);
#ifdef __HAVE_ARCH_MEMCPY
void *__tsan_memcpy(void *dst, const void *src, size_t len);
noinline void *__tsan_memcpy(void *dst, const void *src, size_t len)
{ … }
#else
void *__tsan_memcpy(void *dst, const void *src, size_t len) __alias(memcpy);
#endif
EXPORT_SYMBOL(…);