1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17#ifndef _LINUX_PERCPU_DEFS_H
18#define _LINUX_PERCPU_DEFS_H
19
20#ifdef CONFIG_SMP
21
22#ifdef MODULE
23#define PER_CPU_SHARED_ALIGNED_SECTION ""
24#define PER_CPU_ALIGNED_SECTION ""
25#else
26#define PER_CPU_SHARED_ALIGNED_SECTION "..shared_aligned"
27#define PER_CPU_ALIGNED_SECTION "..shared_aligned"
28#endif
29#define PER_CPU_FIRST_SECTION "..first"
30
31#else
32
33#define PER_CPU_SHARED_ALIGNED_SECTION ""
34#define PER_CPU_ALIGNED_SECTION "..shared_aligned"
35#define PER_CPU_FIRST_SECTION ""
36
37#endif
38
39
40
41
42
43
44
45
46
47
48
49#define __PCPU_ATTRS(sec) \
50 __percpu __attribute__((section(PER_CPU_BASE_SECTION sec))) \
51 PER_CPU_ATTRIBUTES
52
53#define __PCPU_DUMMY_ATTRS \
54 __attribute__((section(".discard"), unused))
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75#if defined(ARCH_NEEDS_WEAK_PER_CPU) || defined(CONFIG_DEBUG_FORCE_WEAK_PER_CPU)
76
77
78
79
80
81
82
83
84
85
86#define DECLARE_PER_CPU_SECTION(type, name, sec) \
87 extern __PCPU_DUMMY_ATTRS char __pcpu_scope_##name; \
88 extern __PCPU_ATTRS(sec) __typeof__(type) name
89
90#define DEFINE_PER_CPU_SECTION(type, name, sec) \
91 __PCPU_DUMMY_ATTRS char __pcpu_scope_##name; \
92 extern __PCPU_DUMMY_ATTRS char __pcpu_unique_##name; \
93 __PCPU_DUMMY_ATTRS char __pcpu_unique_##name; \
94 extern __PCPU_ATTRS(sec) __typeof__(type) name; \
95 __PCPU_ATTRS(sec) __weak __typeof__(type) name
96#else
97
98
99
100#define DECLARE_PER_CPU_SECTION(type, name, sec) \
101 extern __PCPU_ATTRS(sec) __typeof__(type) name
102
103#define DEFINE_PER_CPU_SECTION(type, name, sec) \
104 __PCPU_ATTRS(sec) __typeof__(type) name
105#endif
106
107
108
109
110
111#define DECLARE_PER_CPU(type, name) \
112 DECLARE_PER_CPU_SECTION(type, name, "")
113
114#define DEFINE_PER_CPU(type, name) \
115 DEFINE_PER_CPU_SECTION(type, name, "")
116
117
118
119
120
121#define DECLARE_PER_CPU_FIRST(type, name) \
122 DECLARE_PER_CPU_SECTION(type, name, PER_CPU_FIRST_SECTION)
123
124#define DEFINE_PER_CPU_FIRST(type, name) \
125 DEFINE_PER_CPU_SECTION(type, name, PER_CPU_FIRST_SECTION)
126
127
128
129
130
131
132
133
134
135
136
137
138#define DECLARE_PER_CPU_SHARED_ALIGNED(type, name) \
139 DECLARE_PER_CPU_SECTION(type, name, PER_CPU_SHARED_ALIGNED_SECTION) \
140 ____cacheline_aligned_in_smp
141
142#define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
143 DEFINE_PER_CPU_SECTION(type, name, PER_CPU_SHARED_ALIGNED_SECTION) \
144 ____cacheline_aligned_in_smp
145
146#define DECLARE_PER_CPU_ALIGNED(type, name) \
147 DECLARE_PER_CPU_SECTION(type, name, PER_CPU_ALIGNED_SECTION) \
148 ____cacheline_aligned
149
150#define DEFINE_PER_CPU_ALIGNED(type, name) \
151 DEFINE_PER_CPU_SECTION(type, name, PER_CPU_ALIGNED_SECTION) \
152 ____cacheline_aligned
153
154
155
156
157#define DECLARE_PER_CPU_PAGE_ALIGNED(type, name) \
158 DECLARE_PER_CPU_SECTION(type, name, "..page_aligned") \
159 __aligned(PAGE_SIZE)
160
161#define DEFINE_PER_CPU_PAGE_ALIGNED(type, name) \
162 DEFINE_PER_CPU_SECTION(type, name, "..page_aligned") \
163 __aligned(PAGE_SIZE)
164
165
166
167
168#define DECLARE_PER_CPU_READ_MOSTLY(type, name) \
169 DECLARE_PER_CPU_SECTION(type, name, "..read_mostly")
170
171#define DEFINE_PER_CPU_READ_MOSTLY(type, name) \
172 DEFINE_PER_CPU_SECTION(type, name, "..read_mostly")
173
174
175
176
177
178#if defined(CONFIG_VIRTUALIZATION) && defined(CONFIG_AMD_MEM_ENCRYPT)
179
180#define DECLARE_PER_CPU_DECRYPTED(type, name) \
181 DECLARE_PER_CPU_SECTION(type, name, "..decrypted")
182
183#define DEFINE_PER_CPU_DECRYPTED(type, name) \
184 DEFINE_PER_CPU_SECTION(type, name, "..decrypted")
185#else
186#define DEFINE_PER_CPU_DECRYPTED(type, name) DEFINE_PER_CPU(type, name)
187#endif
188
189
190
191
192
193
194#ifndef __CHECKER__
195#define EXPORT_PER_CPU_SYMBOL(var) EXPORT_SYMBOL(var)
196#define EXPORT_PER_CPU_SYMBOL_GPL(var) EXPORT_SYMBOL_GPL(var)
197#else
198#define EXPORT_PER_CPU_SYMBOL(var)
199#define EXPORT_PER_CPU_SYMBOL_GPL(var)
200#endif
201
202
203
204
205#ifndef __ASSEMBLY__
206
207
208
209
210
211
212
213
214
215
216
217
218#define __verify_pcpu_ptr(ptr) \
219do { \
220 const void __percpu *__vpp_verify = (typeof((ptr) + 0))NULL; \
221 (void)__vpp_verify; \
222} while (0)
223
224#ifdef CONFIG_SMP
225
226
227
228
229
230
231#define SHIFT_PERCPU_PTR(__p, __offset) \
232 RELOC_HIDE((typeof(*(__p)) __kernel __force *)(__p), (__offset))
233
234#define per_cpu_ptr(ptr, cpu) \
235({ \
236 __verify_pcpu_ptr(ptr); \
237 SHIFT_PERCPU_PTR((ptr), per_cpu_offset((cpu))); \
238})
239
240#define raw_cpu_ptr(ptr) \
241({ \
242 __verify_pcpu_ptr(ptr); \
243 arch_raw_cpu_ptr(ptr); \
244})
245
246#ifdef CONFIG_DEBUG_PREEMPT
247#define this_cpu_ptr(ptr) \
248({ \
249 __verify_pcpu_ptr(ptr); \
250 SHIFT_PERCPU_PTR(ptr, my_cpu_offset); \
251})
252#else
253#define this_cpu_ptr(ptr) raw_cpu_ptr(ptr)
254#endif
255
256#else
257
258#define VERIFY_PERCPU_PTR(__p) \
259({ \
260 __verify_pcpu_ptr(__p); \
261 (typeof(*(__p)) __kernel __force *)(__p); \
262})
263
264#define per_cpu_ptr(ptr, cpu) ({ (void)(cpu); VERIFY_PERCPU_PTR(ptr); })
265#define raw_cpu_ptr(ptr) per_cpu_ptr(ptr, 0)
266#define this_cpu_ptr(ptr) raw_cpu_ptr(ptr)
267
268#endif
269
270#define per_cpu(var, cpu) (*per_cpu_ptr(&(var), cpu))
271
272
273
274
275
276#define get_cpu_var(var) \
277(*({ \
278 preempt_disable(); \
279 this_cpu_ptr(&var); \
280}))
281
282
283
284
285
286#define put_cpu_var(var) \
287do { \
288 (void)&(var); \
289 preempt_enable(); \
290} while (0)
291
292#define get_cpu_ptr(var) \
293({ \
294 preempt_disable(); \
295 this_cpu_ptr(var); \
296})
297
298#define put_cpu_ptr(var) \
299do { \
300 (void)(var); \
301 preempt_enable(); \
302} while (0)
303
304
305
306
307
308
309extern void __bad_size_call_parameter(void);
310
311#ifdef CONFIG_DEBUG_PREEMPT
312extern void __this_cpu_preempt_check(const char *op);
313#else
314static inline void __this_cpu_preempt_check(const char *op) { }
315#endif
316
317#define __pcpu_size_call_return(stem, variable) \
318({ \
319 typeof(variable) pscr_ret__; \
320 __verify_pcpu_ptr(&(variable)); \
321 switch(sizeof(variable)) { \
322 case 1: pscr_ret__ = stem##1(variable); break; \
323 case 2: pscr_ret__ = stem##2(variable); break; \
324 case 4: pscr_ret__ = stem##4(variable); break; \
325 case 8: pscr_ret__ = stem##8(variable); break; \
326 default: \
327 __bad_size_call_parameter(); break; \
328 } \
329 pscr_ret__; \
330})
331
332#define __pcpu_size_call_return2(stem, variable, ...) \
333({ \
334 typeof(variable) pscr2_ret__; \
335 __verify_pcpu_ptr(&(variable)); \
336 switch(sizeof(variable)) { \
337 case 1: pscr2_ret__ = stem##1(variable, __VA_ARGS__); break; \
338 case 2: pscr2_ret__ = stem##2(variable, __VA_ARGS__); break; \
339 case 4: pscr2_ret__ = stem##4(variable, __VA_ARGS__); break; \
340 case 8: pscr2_ret__ = stem##8(variable, __VA_ARGS__); break; \
341 default: \
342 __bad_size_call_parameter(); break; \
343 } \
344 pscr2_ret__; \
345})
346
347
348
349
350
351
352
353
354
355#define __pcpu_double_call_return_bool(stem, pcp1, pcp2, ...) \
356({ \
357 bool pdcrb_ret__; \
358 __verify_pcpu_ptr(&(pcp1)); \
359 BUILD_BUG_ON(sizeof(pcp1) != sizeof(pcp2)); \
360 VM_BUG_ON((unsigned long)(&(pcp1)) % (2 * sizeof(pcp1))); \
361 VM_BUG_ON((unsigned long)(&(pcp2)) != \
362 (unsigned long)(&(pcp1)) + sizeof(pcp1)); \
363 switch(sizeof(pcp1)) { \
364 case 1: pdcrb_ret__ = stem##1(pcp1, pcp2, __VA_ARGS__); break; \
365 case 2: pdcrb_ret__ = stem##2(pcp1, pcp2, __VA_ARGS__); break; \
366 case 4: pdcrb_ret__ = stem##4(pcp1, pcp2, __VA_ARGS__); break; \
367 case 8: pdcrb_ret__ = stem##8(pcp1, pcp2, __VA_ARGS__); break; \
368 default: \
369 __bad_size_call_parameter(); break; \
370 } \
371 pdcrb_ret__; \
372})
373
374#define __pcpu_size_call(stem, variable, ...) \
375do { \
376 __verify_pcpu_ptr(&(variable)); \
377 switch(sizeof(variable)) { \
378 case 1: stem##1(variable, __VA_ARGS__);break; \
379 case 2: stem##2(variable, __VA_ARGS__);break; \
380 case 4: stem##4(variable, __VA_ARGS__);break; \
381 case 8: stem##8(variable, __VA_ARGS__);break; \
382 default: \
383 __bad_size_call_parameter();break; \
384 } \
385} while (0)
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421#define raw_cpu_read(pcp) __pcpu_size_call_return(raw_cpu_read_, pcp)
422#define raw_cpu_write(pcp, val) __pcpu_size_call(raw_cpu_write_, pcp, val)
423#define raw_cpu_add(pcp, val) __pcpu_size_call(raw_cpu_add_, pcp, val)
424#define raw_cpu_and(pcp, val) __pcpu_size_call(raw_cpu_and_, pcp, val)
425#define raw_cpu_or(pcp, val) __pcpu_size_call(raw_cpu_or_, pcp, val)
426#define raw_cpu_add_return(pcp, val) __pcpu_size_call_return2(raw_cpu_add_return_, pcp, val)
427#define raw_cpu_xchg(pcp, nval) __pcpu_size_call_return2(raw_cpu_xchg_, pcp, nval)
428#define raw_cpu_cmpxchg(pcp, oval, nval) \
429 __pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval)
430#define raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
431 __pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, pcp1, pcp2, oval1, oval2, nval1, nval2)
432
433#define raw_cpu_sub(pcp, val) raw_cpu_add(pcp, -(val))
434#define raw_cpu_inc(pcp) raw_cpu_add(pcp, 1)
435#define raw_cpu_dec(pcp) raw_cpu_sub(pcp, 1)
436#define raw_cpu_sub_return(pcp, val) raw_cpu_add_return(pcp, -(typeof(pcp))(val))
437#define raw_cpu_inc_return(pcp) raw_cpu_add_return(pcp, 1)
438#define raw_cpu_dec_return(pcp) raw_cpu_add_return(pcp, -1)
439
440
441
442
443
444#define __this_cpu_read(pcp) \
445({ \
446 __this_cpu_preempt_check("read"); \
447 raw_cpu_read(pcp); \
448})
449
450#define __this_cpu_write(pcp, val) \
451({ \
452 __this_cpu_preempt_check("write"); \
453 raw_cpu_write(pcp, val); \
454})
455
456#define __this_cpu_add(pcp, val) \
457({ \
458 __this_cpu_preempt_check("add"); \
459 raw_cpu_add(pcp, val); \
460})
461
462#define __this_cpu_and(pcp, val) \
463({ \
464 __this_cpu_preempt_check("and"); \
465 raw_cpu_and(pcp, val); \
466})
467
468#define __this_cpu_or(pcp, val) \
469({ \
470 __this_cpu_preempt_check("or"); \
471 raw_cpu_or(pcp, val); \
472})
473
474#define __this_cpu_add_return(pcp, val) \
475({ \
476 __this_cpu_preempt_check("add_return"); \
477 raw_cpu_add_return(pcp, val); \
478})
479
480#define __this_cpu_xchg(pcp, nval) \
481({ \
482 __this_cpu_preempt_check("xchg"); \
483 raw_cpu_xchg(pcp, nval); \
484})
485
486#define __this_cpu_cmpxchg(pcp, oval, nval) \
487({ \
488 __this_cpu_preempt_check("cmpxchg"); \
489 raw_cpu_cmpxchg(pcp, oval, nval); \
490})
491
492#define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
493({ __this_cpu_preempt_check("cmpxchg_double"); \
494 raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2); \
495})
496
497#define __this_cpu_sub(pcp, val) __this_cpu_add(pcp, -(typeof(pcp))(val))
498#define __this_cpu_inc(pcp) __this_cpu_add(pcp, 1)
499#define __this_cpu_dec(pcp) __this_cpu_sub(pcp, 1)
500#define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val))
501#define __this_cpu_inc_return(pcp) __this_cpu_add_return(pcp, 1)
502#define __this_cpu_dec_return(pcp) __this_cpu_add_return(pcp, -1)
503
504
505
506
507
508#define this_cpu_read(pcp) __pcpu_size_call_return(this_cpu_read_, pcp)
509#define this_cpu_write(pcp, val) __pcpu_size_call(this_cpu_write_, pcp, val)
510#define this_cpu_add(pcp, val) __pcpu_size_call(this_cpu_add_, pcp, val)
511#define this_cpu_and(pcp, val) __pcpu_size_call(this_cpu_and_, pcp, val)
512#define this_cpu_or(pcp, val) __pcpu_size_call(this_cpu_or_, pcp, val)
513#define this_cpu_add_return(pcp, val) __pcpu_size_call_return2(this_cpu_add_return_, pcp, val)
514#define this_cpu_xchg(pcp, nval) __pcpu_size_call_return2(this_cpu_xchg_, pcp, nval)
515#define this_cpu_cmpxchg(pcp, oval, nval) \
516 __pcpu_size_call_return2(this_cpu_cmpxchg_, pcp, oval, nval)
517#define this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
518 __pcpu_double_call_return_bool(this_cpu_cmpxchg_double_, pcp1, pcp2, oval1, oval2, nval1, nval2)
519
520#define this_cpu_sub(pcp, val) this_cpu_add(pcp, -(typeof(pcp))(val))
521#define this_cpu_inc(pcp) this_cpu_add(pcp, 1)
522#define this_cpu_dec(pcp) this_cpu_sub(pcp, 1)
523#define this_cpu_sub_return(pcp, val) this_cpu_add_return(pcp, -(typeof(pcp))(val))
524#define this_cpu_inc_return(pcp) this_cpu_add_return(pcp, 1)
525#define this_cpu_dec_return(pcp) this_cpu_add_return(pcp, -1)
526
527#endif
528#endif
529