1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16#ifndef _LINUX_PERCPU_DEFS_H
17#define _LINUX_PERCPU_DEFS_H
18
19#ifdef CONFIG_SMP
20
21#ifdef MODULE
22#define PER_CPU_SHARED_ALIGNED_SECTION ""
23#define PER_CPU_ALIGNED_SECTION ""
24#else
25#define PER_CPU_SHARED_ALIGNED_SECTION "..shared_aligned"
26#define PER_CPU_ALIGNED_SECTION "..shared_aligned"
27#endif
28#define PER_CPU_FIRST_SECTION "..first"
29
30#else
31
32#define PER_CPU_SHARED_ALIGNED_SECTION ""
33#define PER_CPU_ALIGNED_SECTION "..shared_aligned"
34#define PER_CPU_FIRST_SECTION ""
35
36#endif
37
38
39
40
41
42
43
44
45
46
47
48#define __PCPU_ATTRS(sec) \
49 __percpu __attribute__((section(PER_CPU_BASE_SECTION sec))) \
50 PER_CPU_ATTRIBUTES
51
52#define __PCPU_DUMMY_ATTRS \
53 __attribute__((section(".discard"), unused))
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74#if defined(ARCH_NEEDS_WEAK_PER_CPU) || defined(CONFIG_DEBUG_FORCE_WEAK_PER_CPU)
75
76
77
78
79
80
81
82
83
84
85#define DECLARE_PER_CPU_SECTION(type, name, sec) \
86 extern __PCPU_DUMMY_ATTRS char __pcpu_scope_##name; \
87 extern __PCPU_ATTRS(sec) __typeof__(type) name
88
89#define DEFINE_PER_CPU_SECTION(type, name, sec) \
90 __PCPU_DUMMY_ATTRS char __pcpu_scope_##name; \
91 extern __PCPU_DUMMY_ATTRS char __pcpu_unique_##name; \
92 __PCPU_DUMMY_ATTRS char __pcpu_unique_##name; \
93 extern __PCPU_ATTRS(sec) __typeof__(type) name; \
94 __PCPU_ATTRS(sec) PER_CPU_DEF_ATTRIBUTES __weak \
95 __typeof__(type) name
96#else
97
98
99
100#define DECLARE_PER_CPU_SECTION(type, name, sec) \
101 extern __PCPU_ATTRS(sec) __typeof__(type) name
102
103#define DEFINE_PER_CPU_SECTION(type, name, sec) \
104 __PCPU_ATTRS(sec) PER_CPU_DEF_ATTRIBUTES \
105 __typeof__(type) name
106#endif
107
108
109
110
111
112#define DECLARE_PER_CPU(type, name) \
113 DECLARE_PER_CPU_SECTION(type, name, "")
114
115#define DEFINE_PER_CPU(type, name) \
116 DEFINE_PER_CPU_SECTION(type, name, "")
117
118
119
120
121
122#define DECLARE_PER_CPU_FIRST(type, name) \
123 DECLARE_PER_CPU_SECTION(type, name, PER_CPU_FIRST_SECTION)
124
125#define DEFINE_PER_CPU_FIRST(type, name) \
126 DEFINE_PER_CPU_SECTION(type, name, PER_CPU_FIRST_SECTION)
127
128
129
130
131
132
133
134
135
136
137
138
139#define DECLARE_PER_CPU_SHARED_ALIGNED(type, name) \
140 DECLARE_PER_CPU_SECTION(type, name, PER_CPU_SHARED_ALIGNED_SECTION) \
141 ____cacheline_aligned_in_smp
142
143#define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
144 DEFINE_PER_CPU_SECTION(type, name, PER_CPU_SHARED_ALIGNED_SECTION) \
145 ____cacheline_aligned_in_smp
146
147#define DECLARE_PER_CPU_ALIGNED(type, name) \
148 DECLARE_PER_CPU_SECTION(type, name, PER_CPU_ALIGNED_SECTION) \
149 ____cacheline_aligned
150
151#define DEFINE_PER_CPU_ALIGNED(type, name) \
152 DEFINE_PER_CPU_SECTION(type, name, PER_CPU_ALIGNED_SECTION) \
153 ____cacheline_aligned
154
155
156
157
158#define DECLARE_PER_CPU_PAGE_ALIGNED(type, name) \
159 DECLARE_PER_CPU_SECTION(type, name, "..page_aligned") \
160 __aligned(PAGE_SIZE)
161
162#define DEFINE_PER_CPU_PAGE_ALIGNED(type, name) \
163 DEFINE_PER_CPU_SECTION(type, name, "..page_aligned") \
164 __aligned(PAGE_SIZE)
165
166
167
168
169#define DECLARE_PER_CPU_READ_MOSTLY(type, name) \
170 DECLARE_PER_CPU_SECTION(type, name, "..read_mostly")
171
172#define DEFINE_PER_CPU_READ_MOSTLY(type, name) \
173 DEFINE_PER_CPU_SECTION(type, name, "..read_mostly")
174
175
176
177
178
179
180#ifndef __CHECKER__
181#define EXPORT_PER_CPU_SYMBOL(var) EXPORT_SYMBOL(var)
182#define EXPORT_PER_CPU_SYMBOL_GPL(var) EXPORT_SYMBOL_GPL(var)
183#else
184#define EXPORT_PER_CPU_SYMBOL(var)
185#define EXPORT_PER_CPU_SYMBOL_GPL(var)
186#endif
187
188
189
190
191#ifndef __ASSEMBLY__
192
193
194
195
196
197
198
199
200
201
202
203
204#define __verify_pcpu_ptr(ptr) \
205do { \
206 const void __percpu *__vpp_verify = (typeof((ptr) + 0))NULL; \
207 (void)__vpp_verify; \
208} while (0)
209
210#ifdef CONFIG_SMP
211
212
213
214
215
216
217#define SHIFT_PERCPU_PTR(__p, __offset) \
218 RELOC_HIDE((typeof(*(__p)) __kernel __force *)(__p), (__offset))
219
220#define per_cpu_ptr(ptr, cpu) \
221({ \
222 __verify_pcpu_ptr(ptr); \
223 SHIFT_PERCPU_PTR((ptr), per_cpu_offset((cpu))); \
224})
225
226#define raw_cpu_ptr(ptr) \
227({ \
228 __verify_pcpu_ptr(ptr); \
229 arch_raw_cpu_ptr(ptr); \
230})
231
232#ifdef CONFIG_DEBUG_PREEMPT
233#define this_cpu_ptr(ptr) \
234({ \
235 __verify_pcpu_ptr(ptr); \
236 SHIFT_PERCPU_PTR(ptr, my_cpu_offset); \
237})
238#else
239#define this_cpu_ptr(ptr) raw_cpu_ptr(ptr)
240#endif
241
242#else
243
244#define VERIFY_PERCPU_PTR(__p) \
245({ \
246 __verify_pcpu_ptr(__p); \
247 (typeof(*(__p)) __kernel __force *)(__p); \
248})
249
250#define per_cpu_ptr(ptr, cpu) ({ (void)(cpu); VERIFY_PERCPU_PTR(ptr); })
251#define raw_cpu_ptr(ptr) per_cpu_ptr(ptr, 0)
252#define this_cpu_ptr(ptr) raw_cpu_ptr(ptr)
253
254#endif
255
256#define per_cpu(var, cpu) (*per_cpu_ptr(&(var), cpu))
257
258
259
260
261
262#define get_cpu_var(var) \
263(*({ \
264 preempt_disable(); \
265 this_cpu_ptr(&var); \
266}))
267
268
269
270
271
272#define put_cpu_var(var) \
273do { \
274 (void)&(var); \
275 preempt_enable(); \
276} while (0)
277
278#define get_cpu_ptr(var) \
279({ \
280 preempt_disable(); \
281 this_cpu_ptr(var); \
282})
283
284#define put_cpu_ptr(var) \
285do { \
286 (void)(var); \
287 preempt_enable(); \
288} while (0)
289
290
291
292
293
294
295extern void __bad_size_call_parameter(void);
296
297#ifdef CONFIG_DEBUG_PREEMPT
298extern void __this_cpu_preempt_check(const char *op);
299#else
300static inline void __this_cpu_preempt_check(const char *op) { }
301#endif
302
303#define __pcpu_size_call_return(stem, variable) \
304({ \
305 typeof(variable) pscr_ret__; \
306 __verify_pcpu_ptr(&(variable)); \
307 switch(sizeof(variable)) { \
308 case 1: pscr_ret__ = stem##1(variable); break; \
309 case 2: pscr_ret__ = stem##2(variable); break; \
310 case 4: pscr_ret__ = stem##4(variable); break; \
311 case 8: pscr_ret__ = stem##8(variable); break; \
312 default: \
313 __bad_size_call_parameter(); break; \
314 } \
315 pscr_ret__; \
316})
317
318#define __pcpu_size_call_return2(stem, variable, ...) \
319({ \
320 typeof(variable) pscr2_ret__; \
321 __verify_pcpu_ptr(&(variable)); \
322 switch(sizeof(variable)) { \
323 case 1: pscr2_ret__ = stem##1(variable, __VA_ARGS__); break; \
324 case 2: pscr2_ret__ = stem##2(variable, __VA_ARGS__); break; \
325 case 4: pscr2_ret__ = stem##4(variable, __VA_ARGS__); break; \
326 case 8: pscr2_ret__ = stem##8(variable, __VA_ARGS__); break; \
327 default: \
328 __bad_size_call_parameter(); break; \
329 } \
330 pscr2_ret__; \
331})
332
333
334
335
336
337
338
339
340
341#define __pcpu_double_call_return_bool(stem, pcp1, pcp2, ...) \
342({ \
343 bool pdcrb_ret__; \
344 __verify_pcpu_ptr(&(pcp1)); \
345 BUILD_BUG_ON(sizeof(pcp1) != sizeof(pcp2)); \
346 VM_BUG_ON((unsigned long)(&(pcp1)) % (2 * sizeof(pcp1))); \
347 VM_BUG_ON((unsigned long)(&(pcp2)) != \
348 (unsigned long)(&(pcp1)) + sizeof(pcp1)); \
349 switch(sizeof(pcp1)) { \
350 case 1: pdcrb_ret__ = stem##1(pcp1, pcp2, __VA_ARGS__); break; \
351 case 2: pdcrb_ret__ = stem##2(pcp1, pcp2, __VA_ARGS__); break; \
352 case 4: pdcrb_ret__ = stem##4(pcp1, pcp2, __VA_ARGS__); break; \
353 case 8: pdcrb_ret__ = stem##8(pcp1, pcp2, __VA_ARGS__); break; \
354 default: \
355 __bad_size_call_parameter(); break; \
356 } \
357 pdcrb_ret__; \
358})
359
360#define __pcpu_size_call(stem, variable, ...) \
361do { \
362 __verify_pcpu_ptr(&(variable)); \
363 switch(sizeof(variable)) { \
364 case 1: stem##1(variable, __VA_ARGS__);break; \
365 case 2: stem##2(variable, __VA_ARGS__);break; \
366 case 4: stem##4(variable, __VA_ARGS__);break; \
367 case 8: stem##8(variable, __VA_ARGS__);break; \
368 default: \
369 __bad_size_call_parameter();break; \
370 } \
371} while (0)
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407#define raw_cpu_read(pcp) __pcpu_size_call_return(raw_cpu_read_, pcp)
408#define raw_cpu_write(pcp, val) __pcpu_size_call(raw_cpu_write_, pcp, val)
409#define raw_cpu_add(pcp, val) __pcpu_size_call(raw_cpu_add_, pcp, val)
410#define raw_cpu_and(pcp, val) __pcpu_size_call(raw_cpu_and_, pcp, val)
411#define raw_cpu_or(pcp, val) __pcpu_size_call(raw_cpu_or_, pcp, val)
412#define raw_cpu_add_return(pcp, val) __pcpu_size_call_return2(raw_cpu_add_return_, pcp, val)
413#define raw_cpu_xchg(pcp, nval) __pcpu_size_call_return2(raw_cpu_xchg_, pcp, nval)
414#define raw_cpu_cmpxchg(pcp, oval, nval) \
415 __pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval)
416#define raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
417 __pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, pcp1, pcp2, oval1, oval2, nval1, nval2)
418
419#define raw_cpu_sub(pcp, val) raw_cpu_add(pcp, -(val))
420#define raw_cpu_inc(pcp) raw_cpu_add(pcp, 1)
421#define raw_cpu_dec(pcp) raw_cpu_sub(pcp, 1)
422#define raw_cpu_sub_return(pcp, val) raw_cpu_add_return(pcp, -(typeof(pcp))(val))
423#define raw_cpu_inc_return(pcp) raw_cpu_add_return(pcp, 1)
424#define raw_cpu_dec_return(pcp) raw_cpu_add_return(pcp, -1)
425
426
427
428
429
430#define __this_cpu_read(pcp) \
431({ \
432 __this_cpu_preempt_check("read"); \
433 raw_cpu_read(pcp); \
434})
435
436#define __this_cpu_write(pcp, val) \
437({ \
438 __this_cpu_preempt_check("write"); \
439 raw_cpu_write(pcp, val); \
440})
441
442#define __this_cpu_add(pcp, val) \
443({ \
444 __this_cpu_preempt_check("add"); \
445 raw_cpu_add(pcp, val); \
446})
447
448#define __this_cpu_and(pcp, val) \
449({ \
450 __this_cpu_preempt_check("and"); \
451 raw_cpu_and(pcp, val); \
452})
453
454#define __this_cpu_or(pcp, val) \
455({ \
456 __this_cpu_preempt_check("or"); \
457 raw_cpu_or(pcp, val); \
458})
459
460#define __this_cpu_add_return(pcp, val) \
461({ \
462 __this_cpu_preempt_check("add_return"); \
463 raw_cpu_add_return(pcp, val); \
464})
465
466#define __this_cpu_xchg(pcp, nval) \
467({ \
468 __this_cpu_preempt_check("xchg"); \
469 raw_cpu_xchg(pcp, nval); \
470})
471
472#define __this_cpu_cmpxchg(pcp, oval, nval) \
473({ \
474 __this_cpu_preempt_check("cmpxchg"); \
475 raw_cpu_cmpxchg(pcp, oval, nval); \
476})
477
478#define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
479({ __this_cpu_preempt_check("cmpxchg_double"); \
480 raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2); \
481})
482
483#define __this_cpu_sub(pcp, val) __this_cpu_add(pcp, -(typeof(pcp))(val))
484#define __this_cpu_inc(pcp) __this_cpu_add(pcp, 1)
485#define __this_cpu_dec(pcp) __this_cpu_sub(pcp, 1)
486#define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val))
487#define __this_cpu_inc_return(pcp) __this_cpu_add_return(pcp, 1)
488#define __this_cpu_dec_return(pcp) __this_cpu_add_return(pcp, -1)
489
490
491
492
493
494#define this_cpu_read(pcp) __pcpu_size_call_return(this_cpu_read_, pcp)
495#define this_cpu_write(pcp, val) __pcpu_size_call(this_cpu_write_, pcp, val)
496#define this_cpu_add(pcp, val) __pcpu_size_call(this_cpu_add_, pcp, val)
497#define this_cpu_and(pcp, val) __pcpu_size_call(this_cpu_and_, pcp, val)
498#define this_cpu_or(pcp, val) __pcpu_size_call(this_cpu_or_, pcp, val)
499#define this_cpu_add_return(pcp, val) __pcpu_size_call_return2(this_cpu_add_return_, pcp, val)
500#define this_cpu_xchg(pcp, nval) __pcpu_size_call_return2(this_cpu_xchg_, pcp, nval)
501#define this_cpu_cmpxchg(pcp, oval, nval) \
502 __pcpu_size_call_return2(this_cpu_cmpxchg_, pcp, oval, nval)
503#define this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
504 __pcpu_double_call_return_bool(this_cpu_cmpxchg_double_, pcp1, pcp2, oval1, oval2, nval1, nval2)
505
506#define this_cpu_sub(pcp, val) this_cpu_add(pcp, -(typeof(pcp))(val))
507#define this_cpu_inc(pcp) this_cpu_add(pcp, 1)
508#define this_cpu_dec(pcp) this_cpu_sub(pcp, 1)
509#define this_cpu_sub_return(pcp, val) this_cpu_add_return(pcp, -(typeof(pcp))(val))
510#define this_cpu_inc_return(pcp) this_cpu_add_return(pcp, 1)
511#define this_cpu_dec_return(pcp) this_cpu_add_return(pcp, -1)
512
513#endif
514#endif
515