#ifndef _ASM_X86_NOSPEC_BRANCH_H_
#define _ASM_X86_NOSPEC_BRANCH_H_
#include <linux/static_key.h>
#include <linux/objtool.h>
#include <linux/linkage.h>
#include <asm/alternative.h>
#include <asm/cpufeatures.h>
#include <asm/msr-index.h>
#include <asm/unwind_hints.h>
#include <asm/percpu.h>
#include <asm/current.h>
#define RET_DEPTH_SHIFT …
#define RSB_RET_STUFF_LOOPS …
#define RET_DEPTH_INIT …
#define RET_DEPTH_INIT_FROM_CALL …
#define RET_DEPTH_CREDIT …
#ifdef CONFIG_CALL_THUNKS_DEBUG
#define CALL_THUNKS_DEBUG_INC_CALLS …
#define CALL_THUNKS_DEBUG_INC_RETS …
#define CALL_THUNKS_DEBUG_INC_STUFFS …
#define CALL_THUNKS_DEBUG_INC_CTXSW …
#else
#define CALL_THUNKS_DEBUG_INC_CALLS
#define CALL_THUNKS_DEBUG_INC_RETS
#define CALL_THUNKS_DEBUG_INC_STUFFS
#define CALL_THUNKS_DEBUG_INC_CTXSW
#endif
#if defined(CONFIG_MITIGATION_CALL_DEPTH_TRACKING) && !defined(COMPILE_OFFSETS)
#include <asm/asm-offsets.h>
#define CREDIT_CALL_DEPTH …
#define RESET_CALL_DEPTH …
#define RESET_CALL_DEPTH_FROM_CALL …
#define INCREMENT_CALL_DEPTH …
#else
#define CREDIT_CALL_DEPTH
#define RESET_CALL_DEPTH
#define RESET_CALL_DEPTH_FROM_CALL
#define INCREMENT_CALL_DEPTH
#endif
#define RETPOLINE_THUNK_SIZE …
#define RSB_CLEAR_LOOPS …
#define __FILL_RETURN_SLOT …
#ifdef CONFIG_X86_64
#define __FILL_RETURN_BUFFER(reg, nr) …
#else
#define __FILL_RETURN_BUFFER …
#endif
#define __FILL_ONE_RETURN …
#ifdef __ASSEMBLY__
.macro ANNOTATE_RETPOLINE_SAFE
.Lhere_\@:
.pushsection .discard.retpoline_safe
.long .Lhere_\@
.popsection
.endm
#define ANNOTATE_UNRET_SAFE …
.macro VALIDATE_UNRET_END
#if defined(CONFIG_NOINSTR_VALIDATION) && \
(defined(CONFIG_MITIGATION_UNRET_ENTRY) || defined(CONFIG_MITIGATION_SRSO))
ANNOTATE_RETPOLINE_SAFE
nop
#endif
.endm
.macro __CS_PREFIX reg:req
.irp rs,r8,r9,r10,r11,r12,r13,r14,r15
.ifc \reg,\rs
.byte 0x2e
.endif
.endr
.endm
.macro JMP_NOSPEC reg:req
#ifdef CONFIG_MITIGATION_RETPOLINE
__CS_PREFIX \reg
jmp __x86_indirect_thunk_\reg
#else
jmp *%\reg
int3
#endif
.endm
.macro CALL_NOSPEC reg:req
#ifdef CONFIG_MITIGATION_RETPOLINE
__CS_PREFIX \reg
call __x86_indirect_thunk_\reg
#else
call *%\reg
#endif
.endm
.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req ftr2=ALT_NOT(X86_FEATURE_ALWAYS)
ALTERNATIVE_2 "jmp .Lskip_rsb_\@", \
__stringify(__FILL_RETURN_BUFFER(\reg,\nr)), \ftr, \
__stringify(nop;nop;__FILL_ONE_RETURN), \ftr2
.Lskip_rsb_\@:
.endm
.macro CALL_UNTRAIN_RET
#if defined(CONFIG_MITIGATION_UNRET_ENTRY) || defined(CONFIG_MITIGATION_SRSO)
ALTERNATIVE_2 "", "call entry_untrain_ret", X86_FEATURE_UNRET, \
"call srso_alias_untrain_ret", X86_FEATURE_SRSO_ALIAS
#endif
.endm
.macro __UNTRAIN_RET ibpb_feature, call_depth_insns
#if defined(CONFIG_MITIGATION_RETHUNK) || defined(CONFIG_MITIGATION_IBPB_ENTRY)
VALIDATE_UNRET_END
CALL_UNTRAIN_RET
ALTERNATIVE_2 "", \
"call entry_ibpb", \ibpb_feature, \
__stringify(\call_depth_insns), X86_FEATURE_CALL_DEPTH
#endif
.endm
#define UNTRAIN_RET …
#define UNTRAIN_RET_VM …
#define UNTRAIN_RET_FROM_CALL …
.macro CALL_DEPTH_ACCOUNT
#ifdef CONFIG_MITIGATION_CALL_DEPTH_TRACKING
ALTERNATIVE "", \
__stringify(INCREMENT_CALL_DEPTH), X86_FEATURE_CALL_DEPTH
#endif
.endm
.macro CLEAR_CPU_BUFFERS
ALTERNATIVE "", __stringify(verw _ASM_RIP(mds_verw_sel)), X86_FEATURE_CLEAR_CPU_BUF
.endm
#ifdef CONFIG_X86_64
.macro CLEAR_BRANCH_HISTORY
ALTERNATIVE "", "call clear_bhb_loop", X86_FEATURE_CLEAR_BHB_LOOP
.endm
.macro CLEAR_BRANCH_HISTORY_VMEXIT
ALTERNATIVE "", "call clear_bhb_loop", X86_FEATURE_CLEAR_BHB_LOOP_ON_VMEXIT
.endm
#else
#define CLEAR_BRANCH_HISTORY
#define CLEAR_BRANCH_HISTORY_VMEXIT
#endif
#else
#define ANNOTATE_RETPOLINE_SAFE …
retpoline_thunk_t;
extern retpoline_thunk_t __x86_indirect_thunk_array[];
extern retpoline_thunk_t __x86_indirect_call_thunk_array[];
extern retpoline_thunk_t __x86_indirect_jump_thunk_array[];
#ifdef CONFIG_MITIGATION_RETHUNK
extern void __x86_return_thunk(void);
#else
static inline void __x86_return_thunk(void) {}
#endif
#ifdef CONFIG_MITIGATION_UNRET_ENTRY
extern void retbleed_return_thunk(void);
#else
static inline void retbleed_return_thunk(void) {}
#endif
extern void srso_alias_untrain_ret(void);
#ifdef CONFIG_MITIGATION_SRSO
extern void srso_return_thunk(void);
extern void srso_alias_return_thunk(void);
#else
static inline void srso_return_thunk(void) {}
static inline void srso_alias_return_thunk(void) {}
#endif
extern void retbleed_return_thunk(void);
extern void srso_return_thunk(void);
extern void srso_alias_return_thunk(void);
extern void entry_untrain_ret(void);
extern void entry_ibpb(void);
#ifdef CONFIG_X86_64
extern void clear_bhb_loop(void);
#endif
extern void (*x86_return_thunk)(void);
extern void __warn_thunk(void);
#ifdef CONFIG_MITIGATION_CALL_DEPTH_TRACKING
extern void call_depth_return_thunk(void);
#define CALL_DEPTH_ACCOUNT …
#ifdef CONFIG_CALL_THUNKS_DEBUG
DECLARE_PER_CPU(u64, __x86_call_count);
DECLARE_PER_CPU(u64, __x86_ret_count);
DECLARE_PER_CPU(u64, __x86_stuffs_count);
DECLARE_PER_CPU(u64, __x86_ctxsw_count);
#endif
#else
static inline void call_depth_return_thunk(void) {}
#define CALL_DEPTH_ACCOUNT …
#endif
#ifdef CONFIG_MITIGATION_RETPOLINE
#define GEN …
#include <asm/GEN-for-each-reg.h>
#undef GEN
#define GEN …
#include <asm/GEN-for-each-reg.h>
#undef GEN
#define GEN …
#include <asm/GEN-for-each-reg.h>
#undef GEN
#ifdef CONFIG_X86_64
#define CALL_NOSPEC …
#define THUNK_TARGET(addr) …
#else
#define CALL_NOSPEC …
#define THUNK_TARGET …
#endif
#else
#define CALL_NOSPEC …
#define THUNK_TARGET …
#endif
enum spectre_v2_mitigation { … };
enum spectre_v2_user_mitigation { … };
enum ssb_mitigation { … };
static __always_inline
void alternative_msr_write(unsigned int msr, u64 val, unsigned int feature)
{ … }
extern u64 x86_pred_cmd;
static inline void indirect_branch_prediction_barrier(void)
{ … }
extern u64 x86_spec_ctrl_base;
DECLARE_PER_CPU(u64, x86_spec_ctrl_current);
extern void update_spec_ctrl_cond(u64 val);
extern u64 spec_ctrl_current(void);
#define firmware_restrict_branch_speculation_start() …
#define firmware_restrict_branch_speculation_end() …
DECLARE_STATIC_KEY_FALSE(switch_to_cond_stibp);
DECLARE_STATIC_KEY_FALSE(switch_mm_cond_ibpb);
DECLARE_STATIC_KEY_FALSE(switch_mm_always_ibpb);
DECLARE_STATIC_KEY_FALSE(mds_idle_clear);
DECLARE_STATIC_KEY_FALSE(switch_mm_cond_l1d_flush);
DECLARE_STATIC_KEY_FALSE(mmio_stale_data_clear);
extern u16 mds_verw_sel;
#include <asm/segment.h>
static __always_inline void mds_clear_cpu_buffers(void)
{ … }
static __always_inline void mds_idle_clear_cpu_buffers(void)
{ … }
#endif
#endif