1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef __LINUX_COMPILER_H
3*4882a593Smuzhiyun #define __LINUX_COMPILER_H
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #include <linux/compiler_types.h>
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun #ifndef __ASSEMBLY__
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun #ifdef __KERNEL__
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun /*
12*4882a593Smuzhiyun * Note: DISABLE_BRANCH_PROFILING can be used by special lowlevel code
13*4882a593Smuzhiyun * to disable branch tracing on a per file basis.
14*4882a593Smuzhiyun */
15*4882a593Smuzhiyun #if defined(CONFIG_TRACE_BRANCH_PROFILING) \
16*4882a593Smuzhiyun && !defined(DISABLE_BRANCH_PROFILING) && !defined(__CHECKER__)
17*4882a593Smuzhiyun void ftrace_likely_update(struct ftrace_likely_data *f, int val,
18*4882a593Smuzhiyun int expect, int is_constant);
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun #define likely_notrace(x) __builtin_expect(!!(x), 1)
21*4882a593Smuzhiyun #define unlikely_notrace(x) __builtin_expect(!!(x), 0)
22*4882a593Smuzhiyun
23*4882a593Smuzhiyun #define __branch_check__(x, expect, is_constant) ({ \
24*4882a593Smuzhiyun long ______r; \
25*4882a593Smuzhiyun static struct ftrace_likely_data \
26*4882a593Smuzhiyun __aligned(4) \
27*4882a593Smuzhiyun __section("_ftrace_annotated_branch") \
28*4882a593Smuzhiyun ______f = { \
29*4882a593Smuzhiyun .data.func = __func__, \
30*4882a593Smuzhiyun .data.file = __FILE__, \
31*4882a593Smuzhiyun .data.line = __LINE__, \
32*4882a593Smuzhiyun }; \
33*4882a593Smuzhiyun ______r = __builtin_expect(!!(x), expect); \
34*4882a593Smuzhiyun ftrace_likely_update(&______f, ______r, \
35*4882a593Smuzhiyun expect, is_constant); \
36*4882a593Smuzhiyun ______r; \
37*4882a593Smuzhiyun })
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun /*
40*4882a593Smuzhiyun * Using __builtin_constant_p(x) to ignore cases where the return
41*4882a593Smuzhiyun * value is always the same. This idea is taken from a similar patch
42*4882a593Smuzhiyun * written by Daniel Walker.
43*4882a593Smuzhiyun */
44*4882a593Smuzhiyun # ifndef likely
45*4882a593Smuzhiyun # define likely(x) (__branch_check__(x, 1, __builtin_constant_p(x)))
46*4882a593Smuzhiyun # endif
47*4882a593Smuzhiyun # ifndef unlikely
48*4882a593Smuzhiyun # define unlikely(x) (__branch_check__(x, 0, __builtin_constant_p(x)))
49*4882a593Smuzhiyun # endif
50*4882a593Smuzhiyun
51*4882a593Smuzhiyun #ifdef CONFIG_PROFILE_ALL_BRANCHES
52*4882a593Smuzhiyun /*
53*4882a593Smuzhiyun * "Define 'is'", Bill Clinton
54*4882a593Smuzhiyun * "Define 'if'", Steven Rostedt
55*4882a593Smuzhiyun */
56*4882a593Smuzhiyun #define if(cond, ...) if ( __trace_if_var( !!(cond , ## __VA_ARGS__) ) )
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun #define __trace_if_var(cond) (__builtin_constant_p(cond) ? (cond) : __trace_if_value(cond))
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun #define __trace_if_value(cond) ({ \
61*4882a593Smuzhiyun static struct ftrace_branch_data \
62*4882a593Smuzhiyun __aligned(4) \
63*4882a593Smuzhiyun __section("_ftrace_branch") \
64*4882a593Smuzhiyun __if_trace = { \
65*4882a593Smuzhiyun .func = __func__, \
66*4882a593Smuzhiyun .file = __FILE__, \
67*4882a593Smuzhiyun .line = __LINE__, \
68*4882a593Smuzhiyun }; \
69*4882a593Smuzhiyun (cond) ? \
70*4882a593Smuzhiyun (__if_trace.miss_hit[1]++,1) : \
71*4882a593Smuzhiyun (__if_trace.miss_hit[0]++,0); \
72*4882a593Smuzhiyun })
73*4882a593Smuzhiyun
74*4882a593Smuzhiyun #endif /* CONFIG_PROFILE_ALL_BRANCHES */
75*4882a593Smuzhiyun
76*4882a593Smuzhiyun #else
77*4882a593Smuzhiyun # define likely(x) __builtin_expect(!!(x), 1)
78*4882a593Smuzhiyun # define unlikely(x) __builtin_expect(!!(x), 0)
79*4882a593Smuzhiyun #endif
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun /* Optimization barrier */
82*4882a593Smuzhiyun #ifndef barrier
83*4882a593Smuzhiyun /* The "volatile" is due to gcc bugs */
84*4882a593Smuzhiyun # define barrier() __asm__ __volatile__("": : :"memory")
85*4882a593Smuzhiyun #endif
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun #ifndef barrier_data
88*4882a593Smuzhiyun /*
89*4882a593Smuzhiyun * This version is i.e. to prevent dead stores elimination on @ptr
90*4882a593Smuzhiyun * where gcc and llvm may behave differently when otherwise using
91*4882a593Smuzhiyun * normal barrier(): while gcc behavior gets along with a normal
92*4882a593Smuzhiyun * barrier(), llvm needs an explicit input variable to be assumed
93*4882a593Smuzhiyun * clobbered. The issue is as follows: while the inline asm might
94*4882a593Smuzhiyun * access any memory it wants, the compiler could have fit all of
95*4882a593Smuzhiyun * @ptr into memory registers instead, and since @ptr never escaped
96*4882a593Smuzhiyun * from that, it proved that the inline asm wasn't touching any of
97*4882a593Smuzhiyun * it. This version works well with both compilers, i.e. we're telling
98*4882a593Smuzhiyun * the compiler that the inline asm absolutely may see the contents
99*4882a593Smuzhiyun * of @ptr. See also: https://llvm.org/bugs/show_bug.cgi?id=15495
100*4882a593Smuzhiyun */
101*4882a593Smuzhiyun # define barrier_data(ptr) __asm__ __volatile__("": :"r"(ptr) :"memory")
102*4882a593Smuzhiyun #endif
103*4882a593Smuzhiyun
104*4882a593Smuzhiyun /* workaround for GCC PR82365 if needed */
105*4882a593Smuzhiyun #ifndef barrier_before_unreachable
106*4882a593Smuzhiyun # define barrier_before_unreachable() do { } while (0)
107*4882a593Smuzhiyun #endif
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun /* Unreachable code */
110*4882a593Smuzhiyun #ifdef CONFIG_STACK_VALIDATION
111*4882a593Smuzhiyun /*
112*4882a593Smuzhiyun * These macros help objtool understand GCC code flow for unreachable code.
113*4882a593Smuzhiyun * The __COUNTER__ based labels are a hack to make each instance of the macros
114*4882a593Smuzhiyun * unique, to convince GCC not to merge duplicate inline asm statements.
115*4882a593Smuzhiyun */
116*4882a593Smuzhiyun #define annotate_reachable() ({ \
117*4882a593Smuzhiyun asm volatile("%c0:\n\t" \
118*4882a593Smuzhiyun ".pushsection .discard.reachable\n\t" \
119*4882a593Smuzhiyun ".long %c0b - .\n\t" \
120*4882a593Smuzhiyun ".popsection\n\t" : : "i" (__COUNTER__)); \
121*4882a593Smuzhiyun })
122*4882a593Smuzhiyun #define annotate_unreachable() ({ \
123*4882a593Smuzhiyun asm volatile("%c0:\n\t" \
124*4882a593Smuzhiyun ".pushsection .discard.unreachable\n\t" \
125*4882a593Smuzhiyun ".long %c0b - .\n\t" \
126*4882a593Smuzhiyun ".popsection\n\t" : : "i" (__COUNTER__)); \
127*4882a593Smuzhiyun })
128*4882a593Smuzhiyun #define ASM_UNREACHABLE \
129*4882a593Smuzhiyun "999:\n\t" \
130*4882a593Smuzhiyun ".pushsection .discard.unreachable\n\t" \
131*4882a593Smuzhiyun ".long 999b - .\n\t" \
132*4882a593Smuzhiyun ".popsection\n\t"
133*4882a593Smuzhiyun
134*4882a593Smuzhiyun /* Annotate a C jump table to allow objtool to follow the code flow */
135*4882a593Smuzhiyun #define __annotate_jump_table __section(".rodata..c_jump_table")
136*4882a593Smuzhiyun
137*4882a593Smuzhiyun #else
138*4882a593Smuzhiyun #define annotate_reachable()
139*4882a593Smuzhiyun #define annotate_unreachable()
140*4882a593Smuzhiyun #define __annotate_jump_table
141*4882a593Smuzhiyun #endif
142*4882a593Smuzhiyun
143*4882a593Smuzhiyun #ifndef ASM_UNREACHABLE
144*4882a593Smuzhiyun # define ASM_UNREACHABLE
145*4882a593Smuzhiyun #endif
146*4882a593Smuzhiyun #ifndef unreachable
147*4882a593Smuzhiyun # define unreachable() do { \
148*4882a593Smuzhiyun annotate_unreachable(); \
149*4882a593Smuzhiyun __builtin_unreachable(); \
150*4882a593Smuzhiyun } while (0)
151*4882a593Smuzhiyun #endif
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun /*
154*4882a593Smuzhiyun * KENTRY - kernel entry point
155*4882a593Smuzhiyun * This can be used to annotate symbols (functions or data) that are used
156*4882a593Smuzhiyun * without their linker symbol being referenced explicitly. For example,
157*4882a593Smuzhiyun * interrupt vector handlers, or functions in the kernel image that are found
158*4882a593Smuzhiyun * programatically.
159*4882a593Smuzhiyun *
160*4882a593Smuzhiyun * Not required for symbols exported with EXPORT_SYMBOL, or initcalls. Those
161*4882a593Smuzhiyun * are handled in their own way (with KEEP() in linker scripts).
162*4882a593Smuzhiyun *
163*4882a593Smuzhiyun * KENTRY can be avoided if the symbols in question are marked as KEEP() in the
164*4882a593Smuzhiyun * linker script. For example an architecture could KEEP() its entire
165*4882a593Smuzhiyun * boot/exception vector code rather than annotate each function and data.
166*4882a593Smuzhiyun */
167*4882a593Smuzhiyun #ifndef KENTRY
168*4882a593Smuzhiyun # define KENTRY(sym) \
169*4882a593Smuzhiyun extern typeof(sym) sym; \
170*4882a593Smuzhiyun static const unsigned long __kentry_##sym \
171*4882a593Smuzhiyun __used \
172*4882a593Smuzhiyun __attribute__((__section__("___kentry+" #sym))) \
173*4882a593Smuzhiyun = (unsigned long)&sym;
174*4882a593Smuzhiyun #endif
175*4882a593Smuzhiyun
176*4882a593Smuzhiyun #ifndef RELOC_HIDE
177*4882a593Smuzhiyun # define RELOC_HIDE(ptr, off) \
178*4882a593Smuzhiyun ({ unsigned long __ptr; \
179*4882a593Smuzhiyun __ptr = (unsigned long) (ptr); \
180*4882a593Smuzhiyun (typeof(ptr)) (__ptr + (off)); })
181*4882a593Smuzhiyun #endif
182*4882a593Smuzhiyun
183*4882a593Smuzhiyun #define absolute_pointer(val) RELOC_HIDE((void *)(val), 0)
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun #ifndef OPTIMIZER_HIDE_VAR
186*4882a593Smuzhiyun /* Make the optimizer believe the variable can be manipulated arbitrarily. */
187*4882a593Smuzhiyun #define OPTIMIZER_HIDE_VAR(var) \
188*4882a593Smuzhiyun __asm__ ("" : "=r" (var) : "0" (var))
189*4882a593Smuzhiyun #endif
190*4882a593Smuzhiyun
191*4882a593Smuzhiyun /* Not-quite-unique ID. */
192*4882a593Smuzhiyun #ifndef __UNIQUE_ID
193*4882a593Smuzhiyun # define __UNIQUE_ID(prefix) __PASTE(__PASTE(__UNIQUE_ID_, prefix), __LINE__)
194*4882a593Smuzhiyun #endif
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun /**
197*4882a593Smuzhiyun * data_race - mark an expression as containing intentional data races
198*4882a593Smuzhiyun *
199*4882a593Smuzhiyun * This data_race() macro is useful for situations in which data races
200*4882a593Smuzhiyun * should be forgiven. One example is diagnostic code that accesses
201*4882a593Smuzhiyun * shared variables but is not a part of the core synchronization design.
202*4882a593Smuzhiyun *
203*4882a593Smuzhiyun * This macro *does not* affect normal code generation, but is a hint
204*4882a593Smuzhiyun * to tooling that data races here are to be ignored.
205*4882a593Smuzhiyun */
206*4882a593Smuzhiyun #define data_race(expr) \
207*4882a593Smuzhiyun ({ \
208*4882a593Smuzhiyun __unqual_scalar_typeof(({ expr; })) __v = ({ \
209*4882a593Smuzhiyun __kcsan_disable_current(); \
210*4882a593Smuzhiyun expr; \
211*4882a593Smuzhiyun }); \
212*4882a593Smuzhiyun __kcsan_enable_current(); \
213*4882a593Smuzhiyun __v; \
214*4882a593Smuzhiyun })
215*4882a593Smuzhiyun
216*4882a593Smuzhiyun #endif /* __KERNEL__ */
217*4882a593Smuzhiyun
218*4882a593Smuzhiyun /*
219*4882a593Smuzhiyun * Force the compiler to emit 'sym' as a symbol, so that we can reference
220*4882a593Smuzhiyun * it from inline assembler. Necessary in case 'sym' could be inlined
221*4882a593Smuzhiyun * otherwise, or eliminated entirely due to lack of references that are
222*4882a593Smuzhiyun * visible to the compiler.
223*4882a593Smuzhiyun */
224*4882a593Smuzhiyun #define __ADDRESSABLE(sym) \
225*4882a593Smuzhiyun static void * __section(".discard.addressable") __used \
226*4882a593Smuzhiyun __UNIQUE_ID(__PASTE(__addressable_,sym)) = (void *)&sym;
227*4882a593Smuzhiyun
228*4882a593Smuzhiyun /**
229*4882a593Smuzhiyun * offset_to_ptr - convert a relative memory offset to an absolute pointer
230*4882a593Smuzhiyun * @off: the address of the 32-bit offset value
231*4882a593Smuzhiyun */
offset_to_ptr(const int * off)232*4882a593Smuzhiyun static inline void *offset_to_ptr(const int *off)
233*4882a593Smuzhiyun {
234*4882a593Smuzhiyun return (void *)((unsigned long)off + *off);
235*4882a593Smuzhiyun }
236*4882a593Smuzhiyun
237*4882a593Smuzhiyun #endif /* __ASSEMBLY__ */
238*4882a593Smuzhiyun
239*4882a593Smuzhiyun /* &a[0] degrades to a pointer: a different type from an array */
240*4882a593Smuzhiyun #define __must_be_array(a) BUILD_BUG_ON_ZERO(__same_type((a), &(a)[0]))
241*4882a593Smuzhiyun
242*4882a593Smuzhiyun /*
243*4882a593Smuzhiyun * This is needed in functions which generate the stack canary, see
244*4882a593Smuzhiyun * arch/x86/kernel/smpboot.c::start_secondary() for an example.
245*4882a593Smuzhiyun */
246*4882a593Smuzhiyun #define prevent_tail_call_optimization() mb()
247*4882a593Smuzhiyun
248*4882a593Smuzhiyun #include <asm/rwonce.h>
249*4882a593Smuzhiyun
250*4882a593Smuzhiyun #endif /* __LINUX_COMPILER_H */
251