1
2#ifndef __LINUX_COMPILER_H
3#define __LINUX_COMPILER_H
4
5#include <linux/compiler_types.h>
6
7#ifndef __ASSEMBLY__
8
9#ifdef __KERNEL__
10
11
12
13
14
15#if defined(CONFIG_TRACE_BRANCH_PROFILING) \
16 && !defined(DISABLE_BRANCH_PROFILING) && !defined(__CHECKER__)
17void ftrace_likely_update(struct ftrace_likely_data *f, int val,
18 int expect, int is_constant);
19
20#define likely_notrace(x) __builtin_expect(!!(x), 1)
21#define unlikely_notrace(x) __builtin_expect(!!(x), 0)
22
23#define __branch_check__(x, expect, is_constant) ({ \
24 long ______r; \
25 static struct ftrace_likely_data \
26 __aligned(4) \
27 __section("_ftrace_annotated_branch") \
28 ______f = { \
29 .data.func = __func__, \
30 .data.file = __FILE__, \
31 .data.line = __LINE__, \
32 }; \
33 ______r = __builtin_expect(!!(x), expect); \
34 ftrace_likely_update(&______f, ______r, \
35 expect, is_constant); \
36 ______r; \
37 })
38
39
40
41
42
43
44# ifndef likely
45# define likely(x) (__branch_check__(x, 1, __builtin_constant_p(x)))
46# endif
47# ifndef unlikely
48# define unlikely(x) (__branch_check__(x, 0, __builtin_constant_p(x)))
49# endif
50
51#ifdef CONFIG_PROFILE_ALL_BRANCHES
52
53
54
55
56#define if(cond, ...) if ( __trace_if_var( !!(cond , ## __VA_ARGS__) ) )
57
58#define __trace_if_var(cond) (__builtin_constant_p(cond) ? (cond) : __trace_if_value(cond))
59
60#define __trace_if_value(cond) ({ \
61 static struct ftrace_branch_data \
62 __aligned(4) \
63 __section("_ftrace_branch") \
64 __if_trace = { \
65 .func = __func__, \
66 .file = __FILE__, \
67 .line = __LINE__, \
68 }; \
69 (cond) ? \
70 (__if_trace.miss_hit[1]++,1) : \
71 (__if_trace.miss_hit[0]++,0); \
72})
73
74#endif
75
76#else
77# define likely(x) __builtin_expect(!!(x), 1)
78# define unlikely(x) __builtin_expect(!!(x), 0)
79#endif
80
81
82#ifndef barrier
83# define barrier() __memory_barrier()
84#endif
85
86#ifndef barrier_data
87# define barrier_data(ptr) barrier()
88#endif
89
90
91#ifndef barrier_before_unreachable
92# define barrier_before_unreachable() do { } while (0)
93#endif
94
95
96#ifdef CONFIG_STACK_VALIDATION
97
98
99
100
101
102#define annotate_reachable() ({ \
103 asm volatile("%c0:\n\t" \
104 ".pushsection .discard.reachable\n\t" \
105 ".long %c0b - .\n\t" \
106 ".popsection\n\t" : : "i" (__COUNTER__)); \
107})
108#define annotate_unreachable() ({ \
109 asm volatile("%c0:\n\t" \
110 ".pushsection .discard.unreachable\n\t" \
111 ".long %c0b - .\n\t" \
112 ".popsection\n\t" : : "i" (__COUNTER__)); \
113})
114#define ASM_UNREACHABLE \
115 "999:\n\t" \
116 ".pushsection .discard.unreachable\n\t" \
117 ".long 999b - .\n\t" \
118 ".popsection\n\t"
119
120
121#define __annotate_jump_table __section(".rodata..c_jump_table")
122
123#else
124#define annotate_reachable()
125#define annotate_unreachable()
126#define __annotate_jump_table
127#endif
128
129#ifndef ASM_UNREACHABLE
130# define ASM_UNREACHABLE
131#endif
132#ifndef unreachable
133# define unreachable() do { \
134 annotate_unreachable(); \
135 __builtin_unreachable(); \
136} while (0)
137#endif
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153#ifndef KENTRY
154# define KENTRY(sym) \
155 extern typeof(sym) sym; \
156 static const unsigned long __kentry_##sym \
157 __used \
158 __section("___kentry" "+" #sym ) \
159 = (unsigned long)&sym;
160#endif
161
162#ifndef RELOC_HIDE
163# define RELOC_HIDE(ptr, off) \
164 ({ unsigned long __ptr; \
165 __ptr = (unsigned long) (ptr); \
166 (typeof(ptr)) (__ptr + (off)); })
167#endif
168
169#ifndef OPTIMIZER_HIDE_VAR
170
171#define OPTIMIZER_HIDE_VAR(var) \
172 __asm__ ("" : "=r" (var) : "0" (var))
173#endif
174
175
176#ifndef __UNIQUE_ID
177# define __UNIQUE_ID(prefix) __PASTE(__PASTE(__UNIQUE_ID_, prefix), __LINE__)
178#endif
179
180#include <linux/types.h>
181
182#define __READ_ONCE_SIZE \
183({ \
184 switch (size) { \
185 case 1: *(__u8 *)res = *(volatile __u8 *)p; break; \
186 case 2: *(__u16 *)res = *(volatile __u16 *)p; break; \
187 case 4: *(__u32 *)res = *(volatile __u32 *)p; break; \
188 case 8: *(__u64 *)res = *(volatile __u64 *)p; break; \
189 default: \
190 barrier(); \
191 __builtin_memcpy((void *)res, (const void *)p, size); \
192 barrier(); \
193 } \
194})
195
196static __always_inline
197void __read_once_size(const volatile void *p, void *res, int size)
198{
199 __READ_ONCE_SIZE;
200}
201
202#ifdef CONFIG_KASAN
203
204
205
206
207
208
209# define __no_kasan_or_inline __no_sanitize_address notrace __maybe_unused
210#else
211# define __no_kasan_or_inline __always_inline
212#endif
213
214static __no_kasan_or_inline
215void __read_once_size_nocheck(const volatile void *p, void *res, int size)
216{
217 __READ_ONCE_SIZE;
218}
219
220static __always_inline void __write_once_size(volatile void *p, void *res, int size)
221{
222 switch (size) {
223 case 1: *(volatile __u8 *)p = *(__u8 *)res; break;
224 case 2: *(volatile __u16 *)p = *(__u16 *)res; break;
225 case 4: *(volatile __u32 *)p = *(__u32 *)res; break;
226 case 8: *(volatile __u64 *)p = *(__u64 *)res; break;
227 default:
228 barrier();
229 __builtin_memcpy((void *)p, (const void *)res, size);
230 barrier();
231 }
232}
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257#define __READ_ONCE(x, check) \
258({ \
259 union { typeof(x) __val; char __c[1]; } __u; \
260 if (check) \
261 __read_once_size(&(x), __u.__c, sizeof(x)); \
262 else \
263 __read_once_size_nocheck(&(x), __u.__c, sizeof(x)); \
264 __u.__val; \
265})
266#define READ_ONCE(x) __READ_ONCE(x, 1)
267
268
269
270
271
272#define READ_ONCE_NOCHECK(x) __READ_ONCE(x, 0)
273
274static __no_kasan_or_inline
275unsigned long read_word_at_a_time(const void *addr)
276{
277 return *(unsigned long *)addr;
278}
279
280#define WRITE_ONCE(x, val) \
281({ \
282 union { typeof(x) __val; char __c[1]; } __u = \
283 { .__val = (__force typeof(x)) (val) }; \
284 __write_once_size(&(x), __u.__c, sizeof(x)); \
285 __u.__val; \
286})
287
288#endif
289
290
291
292
293
294
295
296#define __ADDRESSABLE(sym) \
297 static void * __section(".discard.addressable") __used \
298 __UNIQUE_ID(__PASTE(__addressable_,sym)) = (void *)&sym;
299
300
301
302
303
304static inline void *offset_to_ptr(const int *off)
305{
306 return (void *)((unsigned long)off + *off);
307}
308
309#endif
310
311
312#ifndef __compiletime_object_size
313# define __compiletime_object_size(obj) -1
314#endif
315#ifndef __compiletime_warning
316# define __compiletime_warning(message)
317#endif
318#ifndef __compiletime_error
319# define __compiletime_error(message)
320#endif
321
322#ifdef __OPTIMIZE__
323# define __compiletime_assert(condition, msg, prefix, suffix) \
324 do { \
325 extern void prefix ## suffix(void) __compiletime_error(msg); \
326 if (!(condition)) \
327 prefix ## suffix(); \
328 } while (0)
329#else
330# define __compiletime_assert(condition, msg, prefix, suffix) do { } while (0)
331#endif
332
333#define _compiletime_assert(condition, msg, prefix, suffix) \
334 __compiletime_assert(condition, msg, prefix, suffix)
335
336
337
338
339
340
341
342
343
344
345#define compiletime_assert(condition, msg) \
346 _compiletime_assert(condition, msg, __compiletime_assert_, __COUNTER__)
347
348#define compiletime_assert_atomic_type(t) \
349 compiletime_assert(__native_word(t), \
350 "Need native word sized stores/loads for atomicity.")
351
352
353#define __must_be_array(a) BUILD_BUG_ON_ZERO(__same_type((a), &(a)[0]))
354
355#endif
356