1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16#ifndef _LINUX_PERCPU_DEFS_H
17#define _LINUX_PERCPU_DEFS_H
18
19#ifdef CONFIG_SMP
20
21#ifdef MODULE
22#define PER_CPU_SHARED_ALIGNED_SECTION ""
23#define PER_CPU_ALIGNED_SECTION ""
24#else
25#define PER_CPU_SHARED_ALIGNED_SECTION "..shared_aligned"
26#define PER_CPU_ALIGNED_SECTION "..shared_aligned"
27#endif
28#define PER_CPU_FIRST_SECTION "..first"
29
30#else
31
32#define PER_CPU_SHARED_ALIGNED_SECTION ""
33#define PER_CPU_ALIGNED_SECTION "..shared_aligned"
34#define PER_CPU_FIRST_SECTION ""
35
36#endif
37
38
39
40
41
42
43
44
45
46
47
48#define __PCPU_ATTRS(sec) \
49 __percpu __attribute__((section(PER_CPU_BASE_SECTION sec))) \
50 PER_CPU_ATTRIBUTES
51
52#define __PCPU_DUMMY_ATTRS \
53 __attribute__((section(".discard"), unused))
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74#if defined(ARCH_NEEDS_WEAK_PER_CPU) || defined(CONFIG_DEBUG_FORCE_WEAK_PER_CPU)
75
76
77
78
79
80
81
82
83
84
85#define DECLARE_PER_CPU_SECTION(type, name, sec) \
86 extern __PCPU_DUMMY_ATTRS char __pcpu_scope_##name; \
87 extern __PCPU_ATTRS(sec) __typeof__(type) name
88
89#define DEFINE_PER_CPU_SECTION(type, name, sec) \
90 __PCPU_DUMMY_ATTRS char __pcpu_scope_##name; \
91 extern __PCPU_DUMMY_ATTRS char __pcpu_unique_##name; \
92 __PCPU_DUMMY_ATTRS char __pcpu_unique_##name; \
93 extern __PCPU_ATTRS(sec) __typeof__(type) name; \
94 __PCPU_ATTRS(sec) PER_CPU_DEF_ATTRIBUTES __weak \
95 __typeof__(type) name
96#else
97
98
99
100#define DECLARE_PER_CPU_SECTION(type, name, sec) \
101 extern __PCPU_ATTRS(sec) __typeof__(type) name
102
103#define DEFINE_PER_CPU_SECTION(type, name, sec) \
104 __PCPU_ATTRS(sec) PER_CPU_DEF_ATTRIBUTES \
105 __typeof__(type) name
106#endif
107
108
109
110
111
112#define DECLARE_PER_CPU(type, name) \
113 DECLARE_PER_CPU_SECTION(type, name, "")
114
115#define DEFINE_PER_CPU(type, name) \
116 DEFINE_PER_CPU_SECTION(type, name, "")
117
118
119
120
121
122#define DECLARE_PER_CPU_FIRST(type, name) \
123 DECLARE_PER_CPU_SECTION(type, name, PER_CPU_FIRST_SECTION)
124
125#define DEFINE_PER_CPU_FIRST(type, name) \
126 DEFINE_PER_CPU_SECTION(type, name, PER_CPU_FIRST_SECTION)
127
128
129
130
131
132
133
134
135
136
137
138
139#define DECLARE_PER_CPU_SHARED_ALIGNED(type, name) \
140 DECLARE_PER_CPU_SECTION(type, name, PER_CPU_SHARED_ALIGNED_SECTION) \
141 ____cacheline_aligned_in_smp
142
143#define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
144 DEFINE_PER_CPU_SECTION(type, name, PER_CPU_SHARED_ALIGNED_SECTION) \
145 ____cacheline_aligned_in_smp
146
147#define DECLARE_PER_CPU_ALIGNED(type, name) \
148 DECLARE_PER_CPU_SECTION(type, name, PER_CPU_ALIGNED_SECTION) \
149 ____cacheline_aligned
150
151#define DEFINE_PER_CPU_ALIGNED(type, name) \
152 DEFINE_PER_CPU_SECTION(type, name, PER_CPU_ALIGNED_SECTION) \
153 ____cacheline_aligned
154
155
156
157
158#define DECLARE_PER_CPU_PAGE_ALIGNED(type, name) \
159 DECLARE_PER_CPU_SECTION(type, name, "..page_aligned") \
160 __aligned(PAGE_SIZE)
161
162#define DEFINE_PER_CPU_PAGE_ALIGNED(type, name) \
163 DEFINE_PER_CPU_SECTION(type, name, "..page_aligned") \
164 __aligned(PAGE_SIZE)
165
166
167
168
169#define DECLARE_PER_CPU_READ_MOSTLY(type, name) \
170 DECLARE_PER_CPU_SECTION(type, name, "..read_mostly")
171
172#define DEFINE_PER_CPU_READ_MOSTLY(type, name) \
173 DEFINE_PER_CPU_SECTION(type, name, "..read_mostly")
174
175
176
177
178
179#if defined(CONFIG_VIRTUALIZATION) && defined(CONFIG_AMD_MEM_ENCRYPT)
180
181#define DECLARE_PER_CPU_DECRYPTED(type, name) \
182 DECLARE_PER_CPU_SECTION(type, name, "..decrypted")
183
184#define DEFINE_PER_CPU_DECRYPTED(type, name) \
185 DEFINE_PER_CPU_SECTION(type, name, "..decrypted")
186#else
187#define DEFINE_PER_CPU_DECRYPTED(type, name) DEFINE_PER_CPU(type, name)
188#endif
189
190
191
192
193
194
195#ifndef __CHECKER__
196#define EXPORT_PER_CPU_SYMBOL(var) EXPORT_SYMBOL(var)
197#define EXPORT_PER_CPU_SYMBOL_GPL(var) EXPORT_SYMBOL_GPL(var)
198#else
199#define EXPORT_PER_CPU_SYMBOL(var)
200#define EXPORT_PER_CPU_SYMBOL_GPL(var)
201#endif
202
203
204
205
206#ifndef __ASSEMBLY__
207
208
209
210
211
212
213
214
215
216
217
218
219#define __verify_pcpu_ptr(ptr) \
220do { \
221 const void __percpu *__vpp_verify = (typeof((ptr) + 0))NULL; \
222 (void)__vpp_verify; \
223} while (0)
224
225#ifdef CONFIG_SMP
226
227
228
229
230
231
232#define SHIFT_PERCPU_PTR(__p, __offset) \
233 RELOC_HIDE((typeof(*(__p)) __kernel __force *)(__p), (__offset))
234
235#define per_cpu_ptr(ptr, cpu) \
236({ \
237 __verify_pcpu_ptr(ptr); \
238 SHIFT_PERCPU_PTR((ptr), per_cpu_offset((cpu))); \
239})
240
241#define raw_cpu_ptr(ptr) \
242({ \
243 __verify_pcpu_ptr(ptr); \
244 arch_raw_cpu_ptr(ptr); \
245})
246
247#ifdef CONFIG_DEBUG_PREEMPT
248#define this_cpu_ptr(ptr) \
249({ \
250 __verify_pcpu_ptr(ptr); \
251 SHIFT_PERCPU_PTR(ptr, my_cpu_offset); \
252})
253#else
254#define this_cpu_ptr(ptr) raw_cpu_ptr(ptr)
255#endif
256
257#else
258
259#define VERIFY_PERCPU_PTR(__p) \
260({ \
261 __verify_pcpu_ptr(__p); \
262 (typeof(*(__p)) __kernel __force *)(__p); \
263})
264
265#define per_cpu_ptr(ptr, cpu) ({ (void)(cpu); VERIFY_PERCPU_PTR(ptr); })
266#define raw_cpu_ptr(ptr) per_cpu_ptr(ptr, 0)
267#define this_cpu_ptr(ptr) raw_cpu_ptr(ptr)
268
269#endif
270
271#define per_cpu(var, cpu) (*per_cpu_ptr(&(var), cpu))
272
273
274
275
276
277#define get_cpu_var(var) \
278(*({ \
279 preempt_disable(); \
280 this_cpu_ptr(&var); \
281}))
282
283
284
285
286
287#define put_cpu_var(var) \
288do { \
289 (void)&(var); \
290 preempt_enable(); \
291} while (0)
292
293#define get_cpu_ptr(var) \
294({ \
295 preempt_disable(); \
296 this_cpu_ptr(var); \
297})
298
299#define put_cpu_ptr(var) \
300do { \
301 (void)(var); \
302 preempt_enable(); \
303} while (0)
304
305
306
307
308
309
310extern void __bad_size_call_parameter(void);
311
312#ifdef CONFIG_DEBUG_PREEMPT
313extern void __this_cpu_preempt_check(const char *op);
314#else
315static inline void __this_cpu_preempt_check(const char *op) { }
316#endif
317
318#define __pcpu_size_call_return(stem, variable) \
319({ \
320 typeof(variable) pscr_ret__; \
321 __verify_pcpu_ptr(&(variable)); \
322 switch(sizeof(variable)) { \
323 case 1: pscr_ret__ = stem##1(variable); break; \
324 case 2: pscr_ret__ = stem##2(variable); break; \
325 case 4: pscr_ret__ = stem##4(variable); break; \
326 case 8: pscr_ret__ = stem##8(variable); break; \
327 default: \
328 __bad_size_call_parameter(); break; \
329 } \
330 pscr_ret__; \
331})
332
333#define __pcpu_size_call_return2(stem, variable, ...) \
334({ \
335 typeof(variable) pscr2_ret__; \
336 __verify_pcpu_ptr(&(variable)); \
337 switch(sizeof(variable)) { \
338 case 1: pscr2_ret__ = stem##1(variable, __VA_ARGS__); break; \
339 case 2: pscr2_ret__ = stem##2(variable, __VA_ARGS__); break; \
340 case 4: pscr2_ret__ = stem##4(variable, __VA_ARGS__); break; \
341 case 8: pscr2_ret__ = stem##8(variable, __VA_ARGS__); break; \
342 default: \
343 __bad_size_call_parameter(); break; \
344 } \
345 pscr2_ret__; \
346})
347
348
349
350
351
352
353
354
355
356#define __pcpu_double_call_return_bool(stem, pcp1, pcp2, ...) \
357({ \
358 bool pdcrb_ret__; \
359 __verify_pcpu_ptr(&(pcp1)); \
360 BUILD_BUG_ON(sizeof(pcp1) != sizeof(pcp2)); \
361 VM_BUG_ON((unsigned long)(&(pcp1)) % (2 * sizeof(pcp1))); \
362 VM_BUG_ON((unsigned long)(&(pcp2)) != \
363 (unsigned long)(&(pcp1)) + sizeof(pcp1)); \
364 switch(sizeof(pcp1)) { \
365 case 1: pdcrb_ret__ = stem##1(pcp1, pcp2, __VA_ARGS__); break; \
366 case 2: pdcrb_ret__ = stem##2(pcp1, pcp2, __VA_ARGS__); break; \
367 case 4: pdcrb_ret__ = stem##4(pcp1, pcp2, __VA_ARGS__); break; \
368 case 8: pdcrb_ret__ = stem##8(pcp1, pcp2, __VA_ARGS__); break; \
369 default: \
370 __bad_size_call_parameter(); break; \
371 } \
372 pdcrb_ret__; \
373})
374
375#define __pcpu_size_call(stem, variable, ...) \
376do { \
377 __verify_pcpu_ptr(&(variable)); \
378 switch(sizeof(variable)) { \
379 case 1: stem##1(variable, __VA_ARGS__);break; \
380 case 2: stem##2(variable, __VA_ARGS__);break; \
381 case 4: stem##4(variable, __VA_ARGS__);break; \
382 case 8: stem##8(variable, __VA_ARGS__);break; \
383 default: \
384 __bad_size_call_parameter();break; \
385 } \
386} while (0)
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422#define raw_cpu_read(pcp) __pcpu_size_call_return(raw_cpu_read_, pcp)
423#define raw_cpu_write(pcp, val) __pcpu_size_call(raw_cpu_write_, pcp, val)
424#define raw_cpu_add(pcp, val) __pcpu_size_call(raw_cpu_add_, pcp, val)
425#define raw_cpu_and(pcp, val) __pcpu_size_call(raw_cpu_and_, pcp, val)
426#define raw_cpu_or(pcp, val) __pcpu_size_call(raw_cpu_or_, pcp, val)
427#define raw_cpu_add_return(pcp, val) __pcpu_size_call_return2(raw_cpu_add_return_, pcp, val)
428#define raw_cpu_xchg(pcp, nval) __pcpu_size_call_return2(raw_cpu_xchg_, pcp, nval)
429#define raw_cpu_cmpxchg(pcp, oval, nval) \
430 __pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval)
431#define raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
432 __pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, pcp1, pcp2, oval1, oval2, nval1, nval2)
433
434#define raw_cpu_sub(pcp, val) raw_cpu_add(pcp, -(val))
435#define raw_cpu_inc(pcp) raw_cpu_add(pcp, 1)
436#define raw_cpu_dec(pcp) raw_cpu_sub(pcp, 1)
437#define raw_cpu_sub_return(pcp, val) raw_cpu_add_return(pcp, -(typeof(pcp))(val))
438#define raw_cpu_inc_return(pcp) raw_cpu_add_return(pcp, 1)
439#define raw_cpu_dec_return(pcp) raw_cpu_add_return(pcp, -1)
440
441
442
443
444
445#define __this_cpu_read(pcp) \
446({ \
447 __this_cpu_preempt_check("read"); \
448 raw_cpu_read(pcp); \
449})
450
451#define __this_cpu_write(pcp, val) \
452({ \
453 __this_cpu_preempt_check("write"); \
454 raw_cpu_write(pcp, val); \
455})
456
457#define __this_cpu_add(pcp, val) \
458({ \
459 __this_cpu_preempt_check("add"); \
460 raw_cpu_add(pcp, val); \
461})
462
463#define __this_cpu_and(pcp, val) \
464({ \
465 __this_cpu_preempt_check("and"); \
466 raw_cpu_and(pcp, val); \
467})
468
469#define __this_cpu_or(pcp, val) \
470({ \
471 __this_cpu_preempt_check("or"); \
472 raw_cpu_or(pcp, val); \
473})
474
475#define __this_cpu_add_return(pcp, val) \
476({ \
477 __this_cpu_preempt_check("add_return"); \
478 raw_cpu_add_return(pcp, val); \
479})
480
481#define __this_cpu_xchg(pcp, nval) \
482({ \
483 __this_cpu_preempt_check("xchg"); \
484 raw_cpu_xchg(pcp, nval); \
485})
486
487#define __this_cpu_cmpxchg(pcp, oval, nval) \
488({ \
489 __this_cpu_preempt_check("cmpxchg"); \
490 raw_cpu_cmpxchg(pcp, oval, nval); \
491})
492
493#define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
494({ __this_cpu_preempt_check("cmpxchg_double"); \
495 raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2); \
496})
497
498#define __this_cpu_sub(pcp, val) __this_cpu_add(pcp, -(typeof(pcp))(val))
499#define __this_cpu_inc(pcp) __this_cpu_add(pcp, 1)
500#define __this_cpu_dec(pcp) __this_cpu_sub(pcp, 1)
501#define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val))
502#define __this_cpu_inc_return(pcp) __this_cpu_add_return(pcp, 1)
503#define __this_cpu_dec_return(pcp) __this_cpu_add_return(pcp, -1)
504
505
506
507
508
509#define this_cpu_read(pcp) __pcpu_size_call_return(this_cpu_read_, pcp)
510#define this_cpu_write(pcp, val) __pcpu_size_call(this_cpu_write_, pcp, val)
511#define this_cpu_add(pcp, val) __pcpu_size_call(this_cpu_add_, pcp, val)
512#define this_cpu_and(pcp, val) __pcpu_size_call(this_cpu_and_, pcp, val)
513#define this_cpu_or(pcp, val) __pcpu_size_call(this_cpu_or_, pcp, val)
514#define this_cpu_add_return(pcp, val) __pcpu_size_call_return2(this_cpu_add_return_, pcp, val)
515#define this_cpu_xchg(pcp, nval) __pcpu_size_call_return2(this_cpu_xchg_, pcp, nval)
516#define this_cpu_cmpxchg(pcp, oval, nval) \
517 __pcpu_size_call_return2(this_cpu_cmpxchg_, pcp, oval, nval)
518#define this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
519 __pcpu_double_call_return_bool(this_cpu_cmpxchg_double_, pcp1, pcp2, oval1, oval2, nval1, nval2)
520
521#define this_cpu_sub(pcp, val) this_cpu_add(pcp, -(typeof(pcp))(val))
522#define this_cpu_inc(pcp) this_cpu_add(pcp, 1)
523#define this_cpu_dec(pcp) this_cpu_sub(pcp, 1)
524#define this_cpu_sub_return(pcp, val) this_cpu_add_return(pcp, -(typeof(pcp))(val))
525#define this_cpu_inc_return(pcp) this_cpu_add_return(pcp, 1)
526#define this_cpu_dec_return(pcp) this_cpu_add_return(pcp, -1)
527
528#endif
529#endif
530