1#ifndef _ASM_X86_PERCPU_H
2#define _ASM_X86_PERCPU_H
3
4#ifdef CONFIG_X86_64
5#define __percpu_seg gs
6#define __percpu_mov_op movq
7#else
8#define __percpu_seg fs
9#define __percpu_mov_op movl
10#endif
11
12#ifdef __ASSEMBLY__
13
14
15
16
17
18
19
20
21
22
23
24
25
26#ifdef CONFIG_SMP
27#define PER_CPU(var, reg) \
28 __percpu_mov_op %__percpu_seg:this_cpu_off, reg; \
29 lea var(reg), reg
30#define PER_CPU_VAR(var) %__percpu_seg:var
31#else
32#define PER_CPU(var, reg) __percpu_mov_op $var, reg
33#define PER_CPU_VAR(var) var
34#endif
35
36#ifdef CONFIG_X86_64_SMP
37#define INIT_PER_CPU_VAR(var) init_per_cpu__##var
38#else
39#define INIT_PER_CPU_VAR(var) var
40#endif
41
42#else
43
44#include <linux/kernel.h>
45#include <linux/stringify.h>
46
47#ifdef CONFIG_SMP
48#define __percpu_prefix "%%"__stringify(__percpu_seg)":"
49#define __my_cpu_offset this_cpu_read(this_cpu_off)
50
51
52
53
54
55#define __this_cpu_ptr(ptr) \
56({ \
57 unsigned long tcp_ptr__; \
58 __verify_pcpu_ptr(ptr); \
59 asm volatile("add " __percpu_arg(1) ", %0" \
60 : "=r" (tcp_ptr__) \
61 : "m" (this_cpu_off), "0" (ptr)); \
62 (typeof(*(ptr)) __kernel __force *)tcp_ptr__; \
63})
64#else
65#define __percpu_prefix ""
66#endif
67
68#define __percpu_arg(x) __percpu_prefix "%P" #x
69
70
71
72
73
74
75
76
77#define DECLARE_INIT_PER_CPU(var) \
78 extern typeof(var) init_per_cpu_var(var)
79
80#ifdef CONFIG_X86_64_SMP
81#define init_per_cpu_var(var) init_per_cpu__##var
82#else
83#define init_per_cpu_var(var) var
84#endif
85
86
87
88extern void __bad_percpu_size(void);
89
90#define percpu_to_op(op, var, val) \
91do { \
92 typedef typeof(var) pto_T__; \
93 if (0) { \
94 pto_T__ pto_tmp__; \
95 pto_tmp__ = (val); \
96 (void)pto_tmp__; \
97 } \
98 switch (sizeof(var)) { \
99 case 1: \
100 asm(op "b %1,"__percpu_arg(0) \
101 : "+m" (var) \
102 : "qi" ((pto_T__)(val))); \
103 break; \
104 case 2: \
105 asm(op "w %1,"__percpu_arg(0) \
106 : "+m" (var) \
107 : "ri" ((pto_T__)(val))); \
108 break; \
109 case 4: \
110 asm(op "l %1,"__percpu_arg(0) \
111 : "+m" (var) \
112 : "ri" ((pto_T__)(val))); \
113 break; \
114 case 8: \
115 asm(op "q %1,"__percpu_arg(0) \
116 : "+m" (var) \
117 : "re" ((pto_T__)(val))); \
118 break; \
119 default: __bad_percpu_size(); \
120 } \
121} while (0)
122
123
124
125
126
127#define percpu_add_op(var, val) \
128do { \
129 typedef typeof(var) pao_T__; \
130 const int pao_ID__ = (__builtin_constant_p(val) && \
131 ((val) == 1 || (val) == -1)) ? (val) : 0; \
132 if (0) { \
133 pao_T__ pao_tmp__; \
134 pao_tmp__ = (val); \
135 (void)pao_tmp__; \
136 } \
137 switch (sizeof(var)) { \
138 case 1: \
139 if (pao_ID__ == 1) \
140 asm("incb "__percpu_arg(0) : "+m" (var)); \
141 else if (pao_ID__ == -1) \
142 asm("decb "__percpu_arg(0) : "+m" (var)); \
143 else \
144 asm("addb %1, "__percpu_arg(0) \
145 : "+m" (var) \
146 : "qi" ((pao_T__)(val))); \
147 break; \
148 case 2: \
149 if (pao_ID__ == 1) \
150 asm("incw "__percpu_arg(0) : "+m" (var)); \
151 else if (pao_ID__ == -1) \
152 asm("decw "__percpu_arg(0) : "+m" (var)); \
153 else \
154 asm("addw %1, "__percpu_arg(0) \
155 : "+m" (var) \
156 : "ri" ((pao_T__)(val))); \
157 break; \
158 case 4: \
159 if (pao_ID__ == 1) \
160 asm("incl "__percpu_arg(0) : "+m" (var)); \
161 else if (pao_ID__ == -1) \
162 asm("decl "__percpu_arg(0) : "+m" (var)); \
163 else \
164 asm("addl %1, "__percpu_arg(0) \
165 : "+m" (var) \
166 : "ri" ((pao_T__)(val))); \
167 break; \
168 case 8: \
169 if (pao_ID__ == 1) \
170 asm("incq "__percpu_arg(0) : "+m" (var)); \
171 else if (pao_ID__ == -1) \
172 asm("decq "__percpu_arg(0) : "+m" (var)); \
173 else \
174 asm("addq %1, "__percpu_arg(0) \
175 : "+m" (var) \
176 : "re" ((pao_T__)(val))); \
177 break; \
178 default: __bad_percpu_size(); \
179 } \
180} while (0)
181
182#define percpu_from_op(op, var, constraint) \
183({ \
184 typeof(var) pfo_ret__; \
185 switch (sizeof(var)) { \
186 case 1: \
187 asm(op "b "__percpu_arg(1)",%0" \
188 : "=q" (pfo_ret__) \
189 : constraint); \
190 break; \
191 case 2: \
192 asm(op "w "__percpu_arg(1)",%0" \
193 : "=r" (pfo_ret__) \
194 : constraint); \
195 break; \
196 case 4: \
197 asm(op "l "__percpu_arg(1)",%0" \
198 : "=r" (pfo_ret__) \
199 : constraint); \
200 break; \
201 case 8: \
202 asm(op "q "__percpu_arg(1)",%0" \
203 : "=r" (pfo_ret__) \
204 : constraint); \
205 break; \
206 default: __bad_percpu_size(); \
207 } \
208 pfo_ret__; \
209})
210
211#define percpu_unary_op(op, var) \
212({ \
213 switch (sizeof(var)) { \
214 case 1: \
215 asm(op "b "__percpu_arg(0) \
216 : "+m" (var)); \
217 break; \
218 case 2: \
219 asm(op "w "__percpu_arg(0) \
220 : "+m" (var)); \
221 break; \
222 case 4: \
223 asm(op "l "__percpu_arg(0) \
224 : "+m" (var)); \
225 break; \
226 case 8: \
227 asm(op "q "__percpu_arg(0) \
228 : "+m" (var)); \
229 break; \
230 default: __bad_percpu_size(); \
231 } \
232})
233
234
235
236
237#define percpu_add_return_op(var, val) \
238({ \
239 typeof(var) paro_ret__ = val; \
240 switch (sizeof(var)) { \
241 case 1: \
242 asm("xaddb %0, "__percpu_arg(1) \
243 : "+q" (paro_ret__), "+m" (var) \
244 : : "memory"); \
245 break; \
246 case 2: \
247 asm("xaddw %0, "__percpu_arg(1) \
248 : "+r" (paro_ret__), "+m" (var) \
249 : : "memory"); \
250 break; \
251 case 4: \
252 asm("xaddl %0, "__percpu_arg(1) \
253 : "+r" (paro_ret__), "+m" (var) \
254 : : "memory"); \
255 break; \
256 case 8: \
257 asm("xaddq %0, "__percpu_arg(1) \
258 : "+re" (paro_ret__), "+m" (var) \
259 : : "memory"); \
260 break; \
261 default: __bad_percpu_size(); \
262 } \
263 paro_ret__ += val; \
264 paro_ret__; \
265})
266
267
268
269
270
271
272#define percpu_xchg_op(var, nval) \
273({ \
274 typeof(var) pxo_ret__; \
275 typeof(var) pxo_new__ = (nval); \
276 switch (sizeof(var)) { \
277 case 1: \
278 asm("\n\tmov "__percpu_arg(1)",%%al" \
279 "\n1:\tcmpxchgb %2, "__percpu_arg(1) \
280 "\n\tjnz 1b" \
281 : "=&a" (pxo_ret__), "+m" (var) \
282 : "q" (pxo_new__) \
283 : "memory"); \
284 break; \
285 case 2: \
286 asm("\n\tmov "__percpu_arg(1)",%%ax" \
287 "\n1:\tcmpxchgw %2, "__percpu_arg(1) \
288 "\n\tjnz 1b" \
289 : "=&a" (pxo_ret__), "+m" (var) \
290 : "r" (pxo_new__) \
291 : "memory"); \
292 break; \
293 case 4: \
294 asm("\n\tmov "__percpu_arg(1)",%%eax" \
295 "\n1:\tcmpxchgl %2, "__percpu_arg(1) \
296 "\n\tjnz 1b" \
297 : "=&a" (pxo_ret__), "+m" (var) \
298 : "r" (pxo_new__) \
299 : "memory"); \
300 break; \
301 case 8: \
302 asm("\n\tmov "__percpu_arg(1)",%%rax" \
303 "\n1:\tcmpxchgq %2, "__percpu_arg(1) \
304 "\n\tjnz 1b" \
305 : "=&a" (pxo_ret__), "+m" (var) \
306 : "r" (pxo_new__) \
307 : "memory"); \
308 break; \
309 default: __bad_percpu_size(); \
310 } \
311 pxo_ret__; \
312})
313
314
315
316
317
318#define percpu_cmpxchg_op(var, oval, nval) \
319({ \
320 typeof(var) pco_ret__; \
321 typeof(var) pco_old__ = (oval); \
322 typeof(var) pco_new__ = (nval); \
323 switch (sizeof(var)) { \
324 case 1: \
325 asm("cmpxchgb %2, "__percpu_arg(1) \
326 : "=a" (pco_ret__), "+m" (var) \
327 : "q" (pco_new__), "0" (pco_old__) \
328 : "memory"); \
329 break; \
330 case 2: \
331 asm("cmpxchgw %2, "__percpu_arg(1) \
332 : "=a" (pco_ret__), "+m" (var) \
333 : "r" (pco_new__), "0" (pco_old__) \
334 : "memory"); \
335 break; \
336 case 4: \
337 asm("cmpxchgl %2, "__percpu_arg(1) \
338 : "=a" (pco_ret__), "+m" (var) \
339 : "r" (pco_new__), "0" (pco_old__) \
340 : "memory"); \
341 break; \
342 case 8: \
343 asm("cmpxchgq %2, "__percpu_arg(1) \
344 : "=a" (pco_ret__), "+m" (var) \
345 : "r" (pco_new__), "0" (pco_old__) \
346 : "memory"); \
347 break; \
348 default: __bad_percpu_size(); \
349 } \
350 pco_ret__; \
351})
352
353
354
355
356
357
358
359
360
361
362#define this_cpu_read_stable(var) percpu_from_op("mov", var, "p" (&(var)))
363
364#define __this_cpu_read_1(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
365#define __this_cpu_read_2(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
366#define __this_cpu_read_4(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
367
368#define __this_cpu_write_1(pcp, val) percpu_to_op("mov", (pcp), val)
369#define __this_cpu_write_2(pcp, val) percpu_to_op("mov", (pcp), val)
370#define __this_cpu_write_4(pcp, val) percpu_to_op("mov", (pcp), val)
371#define __this_cpu_add_1(pcp, val) percpu_add_op((pcp), val)
372#define __this_cpu_add_2(pcp, val) percpu_add_op((pcp), val)
373#define __this_cpu_add_4(pcp, val) percpu_add_op((pcp), val)
374#define __this_cpu_and_1(pcp, val) percpu_to_op("and", (pcp), val)
375#define __this_cpu_and_2(pcp, val) percpu_to_op("and", (pcp), val)
376#define __this_cpu_and_4(pcp, val) percpu_to_op("and", (pcp), val)
377#define __this_cpu_or_1(pcp, val) percpu_to_op("or", (pcp), val)
378#define __this_cpu_or_2(pcp, val) percpu_to_op("or", (pcp), val)
379#define __this_cpu_or_4(pcp, val) percpu_to_op("or", (pcp), val)
380#define __this_cpu_xor_1(pcp, val) percpu_to_op("xor", (pcp), val)
381#define __this_cpu_xor_2(pcp, val) percpu_to_op("xor", (pcp), val)
382#define __this_cpu_xor_4(pcp, val) percpu_to_op("xor", (pcp), val)
383#define __this_cpu_xchg_1(pcp, val) percpu_xchg_op(pcp, val)
384#define __this_cpu_xchg_2(pcp, val) percpu_xchg_op(pcp, val)
385#define __this_cpu_xchg_4(pcp, val) percpu_xchg_op(pcp, val)
386
387#define this_cpu_read_1(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
388#define this_cpu_read_2(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
389#define this_cpu_read_4(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
390#define this_cpu_write_1(pcp, val) percpu_to_op("mov", (pcp), val)
391#define this_cpu_write_2(pcp, val) percpu_to_op("mov", (pcp), val)
392#define this_cpu_write_4(pcp, val) percpu_to_op("mov", (pcp), val)
393#define this_cpu_add_1(pcp, val) percpu_add_op((pcp), val)
394#define this_cpu_add_2(pcp, val) percpu_add_op((pcp), val)
395#define this_cpu_add_4(pcp, val) percpu_add_op((pcp), val)
396#define this_cpu_and_1(pcp, val) percpu_to_op("and", (pcp), val)
397#define this_cpu_and_2(pcp, val) percpu_to_op("and", (pcp), val)
398#define this_cpu_and_4(pcp, val) percpu_to_op("and", (pcp), val)
399#define this_cpu_or_1(pcp, val) percpu_to_op("or", (pcp), val)
400#define this_cpu_or_2(pcp, val) percpu_to_op("or", (pcp), val)
401#define this_cpu_or_4(pcp, val) percpu_to_op("or", (pcp), val)
402#define this_cpu_xor_1(pcp, val) percpu_to_op("xor", (pcp), val)
403#define this_cpu_xor_2(pcp, val) percpu_to_op("xor", (pcp), val)
404#define this_cpu_xor_4(pcp, val) percpu_to_op("xor", (pcp), val)
405#define this_cpu_xchg_1(pcp, nval) percpu_xchg_op(pcp, nval)
406#define this_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval)
407#define this_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval)
408
409#ifndef CONFIG_M386
410#define __this_cpu_add_return_1(pcp, val) percpu_add_return_op(pcp, val)
411#define __this_cpu_add_return_2(pcp, val) percpu_add_return_op(pcp, val)
412#define __this_cpu_add_return_4(pcp, val) percpu_add_return_op(pcp, val)
413#define __this_cpu_cmpxchg_1(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval)
414#define __this_cpu_cmpxchg_2(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval)
415#define __this_cpu_cmpxchg_4(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval)
416
417#define this_cpu_add_return_1(pcp, val) percpu_add_return_op(pcp, val)
418#define this_cpu_add_return_2(pcp, val) percpu_add_return_op(pcp, val)
419#define this_cpu_add_return_4(pcp, val) percpu_add_return_op(pcp, val)
420#define this_cpu_cmpxchg_1(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval)
421#define this_cpu_cmpxchg_2(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval)
422#define this_cpu_cmpxchg_4(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval)
423
424#endif
425
426#ifdef CONFIG_X86_CMPXCHG64
427#define percpu_cmpxchg8b_double(pcp1, pcp2, o1, o2, n1, n2) \
428({ \
429 bool __ret; \
430 typeof(pcp1) __o1 = (o1), __n1 = (n1); \
431 typeof(pcp2) __o2 = (o2), __n2 = (n2); \
432 asm volatile("cmpxchg8b "__percpu_arg(1)"\n\tsetz %0\n\t" \
433 : "=a" (__ret), "+m" (pcp1), "+m" (pcp2), "+d" (__o2) \
434 : "b" (__n1), "c" (__n2), "a" (__o1)); \
435 __ret; \
436})
437
438#define __this_cpu_cmpxchg_double_4 percpu_cmpxchg8b_double
439#define this_cpu_cmpxchg_double_4 percpu_cmpxchg8b_double
440#endif
441
442
443
444
445
446#ifdef CONFIG_X86_64
447#define __this_cpu_read_8(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
448#define __this_cpu_write_8(pcp, val) percpu_to_op("mov", (pcp), val)
449#define __this_cpu_add_8(pcp, val) percpu_add_op((pcp), val)
450#define __this_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val)
451#define __this_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val)
452#define __this_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val)
453#define __this_cpu_add_return_8(pcp, val) percpu_add_return_op(pcp, val)
454#define __this_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval)
455#define __this_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval)
456
457#define this_cpu_read_8(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
458#define this_cpu_write_8(pcp, val) percpu_to_op("mov", (pcp), val)
459#define this_cpu_add_8(pcp, val) percpu_add_op((pcp), val)
460#define this_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val)
461#define this_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val)
462#define this_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val)
463#define this_cpu_add_return_8(pcp, val) percpu_add_return_op(pcp, val)
464#define this_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval)
465#define this_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval)
466
467
468
469
470
471
472
473#define percpu_cmpxchg16b_double(pcp1, pcp2, o1, o2, n1, n2) \
474({ \
475 bool __ret; \
476 typeof(pcp1) __o1 = (o1), __n1 = (n1); \
477 typeof(pcp2) __o2 = (o2), __n2 = (n2); \
478 alternative_io("leaq %P1,%%rsi\n\tcall this_cpu_cmpxchg16b_emu\n\t", \
479 "cmpxchg16b " __percpu_arg(1) "\n\tsetz %0\n\t", \
480 X86_FEATURE_CX16, \
481 ASM_OUTPUT2("=a" (__ret), "+m" (pcp1), \
482 "+m" (pcp2), "+d" (__o2)), \
483 "b" (__n1), "c" (__n2), "a" (__o1) : "rsi"); \
484 __ret; \
485})
486
487#define __this_cpu_cmpxchg_double_8 percpu_cmpxchg16b_double
488#define this_cpu_cmpxchg_double_8 percpu_cmpxchg16b_double
489
490#endif
491
492
493#define x86_test_and_clear_bit_percpu(bit, var) \
494({ \
495 int old__; \
496 asm volatile("btr %2,"__percpu_arg(1)"\n\tsbbl %0,%0" \
497 : "=r" (old__), "+m" (var) \
498 : "dIr" (bit)); \
499 old__; \
500})
501
502static __always_inline int x86_this_cpu_constant_test_bit(unsigned int nr,
503 const unsigned long __percpu *addr)
504{
505 unsigned long __percpu *a = (unsigned long *)addr + nr / BITS_PER_LONG;
506
507#ifdef CONFIG_X86_64
508 return ((1UL << (nr % BITS_PER_LONG)) & __this_cpu_read_8(*a)) != 0;
509#else
510 return ((1UL << (nr % BITS_PER_LONG)) & __this_cpu_read_4(*a)) != 0;
511#endif
512}
513
514static inline int x86_this_cpu_variable_test_bit(int nr,
515 const unsigned long __percpu *addr)
516{
517 int oldbit;
518
519 asm volatile("bt "__percpu_arg(2)",%1\n\t"
520 "sbb %0,%0"
521 : "=r" (oldbit)
522 : "m" (*(unsigned long *)addr), "Ir" (nr));
523
524 return oldbit;
525}
526
527#define x86_this_cpu_test_bit(nr, addr) \
528 (__builtin_constant_p((nr)) \
529 ? x86_this_cpu_constant_test_bit((nr), (addr)) \
530 : x86_this_cpu_variable_test_bit((nr), (addr)))
531
532
533#include <asm-generic/percpu.h>
534
535
536DECLARE_PER_CPU(unsigned long, this_cpu_off);
537
538#endif
539
540#ifdef CONFIG_SMP
541
542
543
544
545
546
547
548#define DEFINE_EARLY_PER_CPU(_type, _name, _initvalue) \
549 DEFINE_PER_CPU(_type, _name) = _initvalue; \
550 __typeof__(_type) _name##_early_map[NR_CPUS] __initdata = \
551 { [0 ... NR_CPUS-1] = _initvalue }; \
552 __typeof__(_type) *_name##_early_ptr __refdata = _name##_early_map
553
554#define DEFINE_EARLY_PER_CPU_READ_MOSTLY(_type, _name, _initvalue) \
555 DEFINE_PER_CPU_READ_MOSTLY(_type, _name) = _initvalue; \
556 __typeof__(_type) _name##_early_map[NR_CPUS] __initdata = \
557 { [0 ... NR_CPUS-1] = _initvalue }; \
558 __typeof__(_type) *_name##_early_ptr __refdata = _name##_early_map
559
560#define EXPORT_EARLY_PER_CPU_SYMBOL(_name) \
561 EXPORT_PER_CPU_SYMBOL(_name)
562
563#define DECLARE_EARLY_PER_CPU(_type, _name) \
564 DECLARE_PER_CPU(_type, _name); \
565 extern __typeof__(_type) *_name##_early_ptr; \
566 extern __typeof__(_type) _name##_early_map[]
567
568#define DECLARE_EARLY_PER_CPU_READ_MOSTLY(_type, _name) \
569 DECLARE_PER_CPU_READ_MOSTLY(_type, _name); \
570 extern __typeof__(_type) *_name##_early_ptr; \
571 extern __typeof__(_type) _name##_early_map[]
572
573#define early_per_cpu_ptr(_name) (_name##_early_ptr)
574#define early_per_cpu_map(_name, _idx) (_name##_early_map[_idx])
575#define early_per_cpu(_name, _cpu) \
576 *(early_per_cpu_ptr(_name) ? \
577 &early_per_cpu_ptr(_name)[_cpu] : \
578 &per_cpu(_name, _cpu))
579
580#else
581#define DEFINE_EARLY_PER_CPU(_type, _name, _initvalue) \
582 DEFINE_PER_CPU(_type, _name) = _initvalue
583
584#define DEFINE_EARLY_PER_CPU_READ_MOSTLY(_type, _name, _initvalue) \
585 DEFINE_PER_CPU_READ_MOSTLY(_type, _name) = _initvalue
586
587#define EXPORT_EARLY_PER_CPU_SYMBOL(_name) \
588 EXPORT_PER_CPU_SYMBOL(_name)
589
590#define DECLARE_EARLY_PER_CPU(_type, _name) \
591 DECLARE_PER_CPU(_type, _name)
592
593#define DECLARE_EARLY_PER_CPU_READ_MOSTLY(_type, _name) \
594 DECLARE_PER_CPU_READ_MOSTLY(_type, _name)
595
596#define early_per_cpu(_name, _cpu) per_cpu(_name, _cpu)
597#define early_per_cpu_ptr(_name) NULL
598
599
600#endif
601
602#endif
603