1
2#ifndef __LINUX_COMPILER_H
3#define __LINUX_COMPILER_H
4
5#include <linux/compiler_types.h>
6
7#ifndef __ASSEMBLY__
8
9#ifdef __KERNEL__
10
11
12
13
14
15#if defined(CONFIG_TRACE_BRANCH_PROFILING) \
16 && !defined(DISABLE_BRANCH_PROFILING) && !defined(__CHECKER__)
17void ftrace_likely_update(struct ftrace_likely_data *f, int val,
18 int expect, int is_constant);
19
20#define likely_notrace(x) __builtin_expect(!!(x), 1)
21#define unlikely_notrace(x) __builtin_expect(!!(x), 0)
22
23#define __branch_check__(x, expect, is_constant) ({ \
24 long ______r; \
25 static struct ftrace_likely_data \
26 __aligned(4) \
27 __section("_ftrace_annotated_branch") \
28 ______f = { \
29 .data.func = __func__, \
30 .data.file = __FILE__, \
31 .data.line = __LINE__, \
32 }; \
33 ______r = __builtin_expect(!!(x), expect); \
34 ftrace_likely_update(&______f, ______r, \
35 expect, is_constant); \
36 ______r; \
37 })
38
39
40
41
42
43
44# ifndef likely
45# define likely(x) (__branch_check__(x, 1, __builtin_constant_p(x)))
46# endif
47# ifndef unlikely
48# define unlikely(x) (__branch_check__(x, 0, __builtin_constant_p(x)))
49# endif
50
51#ifdef CONFIG_PROFILE_ALL_BRANCHES
52
53
54
55
56#define if(cond, ...) if ( __trace_if_var( !!(cond , ## __VA_ARGS__) ) )
57
58#define __trace_if_var(cond) (__builtin_constant_p(cond) ? (cond) : __trace_if_value(cond))
59
60#define __trace_if_value(cond) ({ \
61 static struct ftrace_branch_data \
62 __aligned(4) \
63 __section("_ftrace_branch") \
64 __if_trace = { \
65 .func = __func__, \
66 .file = __FILE__, \
67 .line = __LINE__, \
68 }; \
69 (cond) ? \
70 (__if_trace.miss_hit[1]++,1) : \
71 (__if_trace.miss_hit[0]++,0); \
72})
73
74#endif
75
76#else
77# define likely(x) __builtin_expect(!!(x), 1)
78# define unlikely(x) __builtin_expect(!!(x), 0)
79# define likely_notrace(x) likely(x)
80# define unlikely_notrace(x) unlikely(x)
81#endif
82
83
84#ifndef barrier
85
86# define barrier() __asm__ __volatile__("": : :"memory")
87#endif
88
89#ifndef barrier_data
90
91
92
93
94
95
96
97
98
99
100
101
102
103# define barrier_data(ptr) __asm__ __volatile__("": :"r"(ptr) :"memory")
104#endif
105
106
107#ifndef barrier_before_unreachable
108# define barrier_before_unreachable() do { } while (0)
109#endif
110
111
112#ifdef CONFIG_STACK_VALIDATION
113
114
115
116
117
118#define annotate_reachable() ({ \
119 asm volatile("%c0:\n\t" \
120 ".pushsection .discard.reachable\n\t" \
121 ".long %c0b - .\n\t" \
122 ".popsection\n\t" : : "i" (__COUNTER__)); \
123})
124#define annotate_unreachable() ({ \
125 asm volatile("%c0:\n\t" \
126 ".pushsection .discard.unreachable\n\t" \
127 ".long %c0b - .\n\t" \
128 ".popsection\n\t" : : "i" (__COUNTER__)); \
129})
130#define ASM_UNREACHABLE \
131 "999:\n\t" \
132 ".pushsection .discard.unreachable\n\t" \
133 ".long 999b - .\n\t" \
134 ".popsection\n\t"
135
136
137#define __annotate_jump_table __section(".rodata..c_jump_table")
138
139#else
140#define annotate_reachable()
141#define annotate_unreachable()
142#define __annotate_jump_table
143#endif
144
145#ifndef ASM_UNREACHABLE
146# define ASM_UNREACHABLE
147#endif
148#ifndef unreachable
149# define unreachable() do { \
150 annotate_unreachable(); \
151 __builtin_unreachable(); \
152} while (0)
153#endif
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169#ifndef KENTRY
170# define KENTRY(sym) \
171 extern typeof(sym) sym; \
172 static const unsigned long __kentry_##sym \
173 __used \
174 __attribute__((__section__("___kentry+" #sym))) \
175 = (unsigned long)&sym;
176#endif
177
178#ifndef RELOC_HIDE
179# define RELOC_HIDE(ptr, off) \
180 ({ unsigned long __ptr; \
181 __ptr = (unsigned long) (ptr); \
182 (typeof(ptr)) (__ptr + (off)); })
183#endif
184
185#ifndef OPTIMIZER_HIDE_VAR
186
187#define OPTIMIZER_HIDE_VAR(var) \
188 __asm__ ("" : "=r" (var) : "0" (var))
189#endif
190
191
192#ifndef __UNIQUE_ID
193# define __UNIQUE_ID(prefix) __PASTE(__PASTE(__UNIQUE_ID_, prefix), __LINE__)
194#endif
195
196
197
198
199
200
201
202
203
204
205
206#define data_race(expr) \
207({ \
208 __unqual_scalar_typeof(({ expr; })) __v = ({ \
209 __kcsan_disable_current(); \
210 expr; \
211 }); \
212 __kcsan_enable_current(); \
213 __v; \
214})
215
216#endif
217
218
219
220
221
222
223
224#define __ADDRESSABLE(sym) \
225 static void * __section(".discard.addressable") __used \
226 __UNIQUE_ID(__PASTE(__addressable_,sym)) = (void *)&sym;
227
228
229
230
231
232static inline void *offset_to_ptr(const int *off)
233{
234 return (void *)((unsigned long)off + *off);
235}
236
237#endif
238
239
240#define __must_be_array(a) BUILD_BUG_ON_ZERO(__same_type((a), &(a)[0]))
241
242
243
244
245
246#define prevent_tail_call_optimization() mb()
247
248#include <asm/rwonce.h>
249
250#endif
251