1
2#ifndef __LINUX_CPUMASK_H
3#define __LINUX_CPUMASK_H
4
5
6
7
8
9
10#include <linux/kernel.h>
11#include <linux/threads.h>
12#include <linux/bitmap.h>
13#include <linux/atomic.h>
14#include <linux/bug.h>
15
16
17typedef struct cpumask { DECLARE_BITMAP(bits, NR_CPUS); } cpumask_t;
18
19
20
21
22
23
24
25
26#define cpumask_bits(maskp) ((maskp)->bits)
27
28
29
30
31
32
33
34#define cpumask_pr_args(maskp) nr_cpu_ids, cpumask_bits(maskp)
35
36#if NR_CPUS == 1
37#define nr_cpu_ids 1U
38#else
39extern unsigned int nr_cpu_ids;
40#endif
41
42#ifdef CONFIG_CPUMASK_OFFSTACK
43
44
45#define nr_cpumask_bits nr_cpu_ids
46#else
47#define nr_cpumask_bits ((unsigned int)NR_CPUS)
48#endif
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90extern struct cpumask __cpu_possible_mask;
91extern struct cpumask __cpu_online_mask;
92extern struct cpumask __cpu_present_mask;
93extern struct cpumask __cpu_active_mask;
94#define cpu_possible_mask ((const struct cpumask *)&__cpu_possible_mask)
95#define cpu_online_mask ((const struct cpumask *)&__cpu_online_mask)
96#define cpu_present_mask ((const struct cpumask *)&__cpu_present_mask)
97#define cpu_active_mask ((const struct cpumask *)&__cpu_active_mask)
98
99extern atomic_t __num_online_cpus;
100
101#if NR_CPUS > 1
102
103
104
105
106
107
108
109
110static inline unsigned int num_online_cpus(void)
111{
112 return atomic_read(&__num_online_cpus);
113}
114#define num_possible_cpus() cpumask_weight(cpu_possible_mask)
115#define num_present_cpus() cpumask_weight(cpu_present_mask)
116#define num_active_cpus() cpumask_weight(cpu_active_mask)
117#define cpu_online(cpu) cpumask_test_cpu((cpu), cpu_online_mask)
118#define cpu_possible(cpu) cpumask_test_cpu((cpu), cpu_possible_mask)
119#define cpu_present(cpu) cpumask_test_cpu((cpu), cpu_present_mask)
120#define cpu_active(cpu) cpumask_test_cpu((cpu), cpu_active_mask)
121#else
122#define num_online_cpus() 1U
123#define num_possible_cpus() 1U
124#define num_present_cpus() 1U
125#define num_active_cpus() 1U
126#define cpu_online(cpu) ((cpu) == 0)
127#define cpu_possible(cpu) ((cpu) == 0)
128#define cpu_present(cpu) ((cpu) == 0)
129#define cpu_active(cpu) ((cpu) == 0)
130#endif
131
132extern cpumask_t cpus_booted_once_mask;
133
134static inline void cpu_max_bits_warn(unsigned int cpu, unsigned int bits)
135{
136#ifdef CONFIG_DEBUG_PER_CPU_MAPS
137 WARN_ON_ONCE(cpu >= bits);
138#endif
139}
140
141
142static inline unsigned int cpumask_check(unsigned int cpu)
143{
144 cpu_max_bits_warn(cpu, nr_cpumask_bits);
145 return cpu;
146}
147
148#if NR_CPUS == 1
149
150static inline unsigned int cpumask_first(const struct cpumask *srcp)
151{
152 return 0;
153}
154
155static inline unsigned int cpumask_last(const struct cpumask *srcp)
156{
157 return 0;
158}
159
160
161static inline unsigned int cpumask_next(int n, const struct cpumask *srcp)
162{
163 return n+1;
164}
165
166static inline unsigned int cpumask_next_zero(int n, const struct cpumask *srcp)
167{
168 return n+1;
169}
170
171static inline unsigned int cpumask_next_and(int n,
172 const struct cpumask *srcp,
173 const struct cpumask *andp)
174{
175 return n+1;
176}
177
178static inline unsigned int cpumask_next_wrap(int n, const struct cpumask *mask,
179 int start, bool wrap)
180{
181
182 return (wrap && n == 0);
183}
184
185
186static inline unsigned int cpumask_any_but(const struct cpumask *mask,
187 unsigned int cpu)
188{
189 return 1;
190}
191
192static inline unsigned int cpumask_local_spread(unsigned int i, int node)
193{
194 return 0;
195}
196
197static inline int cpumask_any_and_distribute(const struct cpumask *src1p,
198 const struct cpumask *src2p) {
199 return cpumask_next_and(-1, src1p, src2p);
200}
201
202#define for_each_cpu(cpu, mask) \
203 for ((cpu) = 0; (cpu) < 1; (cpu)++, (void)mask)
204#define for_each_cpu_not(cpu, mask) \
205 for ((cpu) = 0; (cpu) < 1; (cpu)++, (void)mask)
206#define for_each_cpu_wrap(cpu, mask, start) \
207 for ((cpu) = 0; (cpu) < 1; (cpu)++, (void)mask, (void)(start))
208#define for_each_cpu_and(cpu, mask1, mask2) \
209 for ((cpu) = 0; (cpu) < 1; (cpu)++, (void)mask1, (void)mask2)
210#else
211
212
213
214
215
216
217static inline unsigned int cpumask_first(const struct cpumask *srcp)
218{
219 return find_first_bit(cpumask_bits(srcp), nr_cpumask_bits);
220}
221
222
223
224
225
226
227
228static inline unsigned int cpumask_last(const struct cpumask *srcp)
229{
230 return find_last_bit(cpumask_bits(srcp), nr_cpumask_bits);
231}
232
233unsigned int cpumask_next(int n, const struct cpumask *srcp);
234
235
236
237
238
239
240
241
242static inline unsigned int cpumask_next_zero(int n, const struct cpumask *srcp)
243{
244
245 if (n != -1)
246 cpumask_check(n);
247 return find_next_zero_bit(cpumask_bits(srcp), nr_cpumask_bits, n+1);
248}
249
250int cpumask_next_and(int n, const struct cpumask *, const struct cpumask *);
251int cpumask_any_but(const struct cpumask *mask, unsigned int cpu);
252unsigned int cpumask_local_spread(unsigned int i, int node);
253int cpumask_any_and_distribute(const struct cpumask *src1p,
254 const struct cpumask *src2p);
255
256
257
258
259
260
261
262
263#define for_each_cpu(cpu, mask) \
264 for ((cpu) = -1; \
265 (cpu) = cpumask_next((cpu), (mask)), \
266 (cpu) < nr_cpu_ids;)
267
268
269
270
271
272
273
274
275#define for_each_cpu_not(cpu, mask) \
276 for ((cpu) = -1; \
277 (cpu) = cpumask_next_zero((cpu), (mask)), \
278 (cpu) < nr_cpu_ids;)
279
280extern int cpumask_next_wrap(int n, const struct cpumask *mask, int start, bool wrap);
281
282
283
284
285
286
287
288
289
290
291
292#define for_each_cpu_wrap(cpu, mask, start) \
293 for ((cpu) = cpumask_next_wrap((start)-1, (mask), (start), false); \
294 (cpu) < nr_cpumask_bits; \
295 (cpu) = cpumask_next_wrap((cpu), (mask), (start), true))
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311#define for_each_cpu_and(cpu, mask1, mask2) \
312 for ((cpu) = -1; \
313 (cpu) = cpumask_next_and((cpu), (mask1), (mask2)), \
314 (cpu) < nr_cpu_ids;)
315#endif
316
317#define CPU_BITS_NONE \
318{ \
319 [0 ... BITS_TO_LONGS(NR_CPUS)-1] = 0UL \
320}
321
322#define CPU_BITS_CPU0 \
323{ \
324 [0] = 1UL \
325}
326
327
328
329
330
331
332static inline void cpumask_set_cpu(unsigned int cpu, struct cpumask *dstp)
333{
334 set_bit(cpumask_check(cpu), cpumask_bits(dstp));
335}
336
337static inline void __cpumask_set_cpu(unsigned int cpu, struct cpumask *dstp)
338{
339 __set_bit(cpumask_check(cpu), cpumask_bits(dstp));
340}
341
342
343
344
345
346
347
348static inline void cpumask_clear_cpu(int cpu, struct cpumask *dstp)
349{
350 clear_bit(cpumask_check(cpu), cpumask_bits(dstp));
351}
352
353static inline void __cpumask_clear_cpu(int cpu, struct cpumask *dstp)
354{
355 __clear_bit(cpumask_check(cpu), cpumask_bits(dstp));
356}
357
358
359
360
361
362
363
364
365static inline int cpumask_test_cpu(int cpu, const struct cpumask *cpumask)
366{
367 return test_bit(cpumask_check(cpu), cpumask_bits((cpumask)));
368}
369
370
371
372
373
374
375
376
377
378
379static inline int cpumask_test_and_set_cpu(int cpu, struct cpumask *cpumask)
380{
381 return test_and_set_bit(cpumask_check(cpu), cpumask_bits(cpumask));
382}
383
384
385
386
387
388
389
390
391
392
393static inline int cpumask_test_and_clear_cpu(int cpu, struct cpumask *cpumask)
394{
395 return test_and_clear_bit(cpumask_check(cpu), cpumask_bits(cpumask));
396}
397
398
399
400
401
402static inline void cpumask_setall(struct cpumask *dstp)
403{
404 bitmap_fill(cpumask_bits(dstp), nr_cpumask_bits);
405}
406
407
408
409
410
411static inline void cpumask_clear(struct cpumask *dstp)
412{
413 bitmap_zero(cpumask_bits(dstp), nr_cpumask_bits);
414}
415
416
417
418
419
420
421
422
423
424static inline int cpumask_and(struct cpumask *dstp,
425 const struct cpumask *src1p,
426 const struct cpumask *src2p)
427{
428 return bitmap_and(cpumask_bits(dstp), cpumask_bits(src1p),
429 cpumask_bits(src2p), nr_cpumask_bits);
430}
431
432
433
434
435
436
437
438static inline void cpumask_or(struct cpumask *dstp, const struct cpumask *src1p,
439 const struct cpumask *src2p)
440{
441 bitmap_or(cpumask_bits(dstp), cpumask_bits(src1p),
442 cpumask_bits(src2p), nr_cpumask_bits);
443}
444
445
446
447
448
449
450
451static inline void cpumask_xor(struct cpumask *dstp,
452 const struct cpumask *src1p,
453 const struct cpumask *src2p)
454{
455 bitmap_xor(cpumask_bits(dstp), cpumask_bits(src1p),
456 cpumask_bits(src2p), nr_cpumask_bits);
457}
458
459
460
461
462
463
464
465
466
467static inline int cpumask_andnot(struct cpumask *dstp,
468 const struct cpumask *src1p,
469 const struct cpumask *src2p)
470{
471 return bitmap_andnot(cpumask_bits(dstp), cpumask_bits(src1p),
472 cpumask_bits(src2p), nr_cpumask_bits);
473}
474
475
476
477
478
479
480static inline void cpumask_complement(struct cpumask *dstp,
481 const struct cpumask *srcp)
482{
483 bitmap_complement(cpumask_bits(dstp), cpumask_bits(srcp),
484 nr_cpumask_bits);
485}
486
487
488
489
490
491
492static inline bool cpumask_equal(const struct cpumask *src1p,
493 const struct cpumask *src2p)
494{
495 return bitmap_equal(cpumask_bits(src1p), cpumask_bits(src2p),
496 nr_cpumask_bits);
497}
498
499
500
501
502
503
504
505static inline bool cpumask_or_equal(const struct cpumask *src1p,
506 const struct cpumask *src2p,
507 const struct cpumask *src3p)
508{
509 return bitmap_or_equal(cpumask_bits(src1p), cpumask_bits(src2p),
510 cpumask_bits(src3p), nr_cpumask_bits);
511}
512
513
514
515
516
517
518static inline bool cpumask_intersects(const struct cpumask *src1p,
519 const struct cpumask *src2p)
520{
521 return bitmap_intersects(cpumask_bits(src1p), cpumask_bits(src2p),
522 nr_cpumask_bits);
523}
524
525
526
527
528
529
530
531
532static inline int cpumask_subset(const struct cpumask *src1p,
533 const struct cpumask *src2p)
534{
535 return bitmap_subset(cpumask_bits(src1p), cpumask_bits(src2p),
536 nr_cpumask_bits);
537}
538
539
540
541
542
543static inline bool cpumask_empty(const struct cpumask *srcp)
544{
545 return bitmap_empty(cpumask_bits(srcp), nr_cpumask_bits);
546}
547
548
549
550
551
552static inline bool cpumask_full(const struct cpumask *srcp)
553{
554 return bitmap_full(cpumask_bits(srcp), nr_cpumask_bits);
555}
556
557
558
559
560
561static inline unsigned int cpumask_weight(const struct cpumask *srcp)
562{
563 return bitmap_weight(cpumask_bits(srcp), nr_cpumask_bits);
564}
565
566
567
568
569
570
571
572static inline void cpumask_shift_right(struct cpumask *dstp,
573 const struct cpumask *srcp, int n)
574{
575 bitmap_shift_right(cpumask_bits(dstp), cpumask_bits(srcp), n,
576 nr_cpumask_bits);
577}
578
579
580
581
582
583
584
585static inline void cpumask_shift_left(struct cpumask *dstp,
586 const struct cpumask *srcp, int n)
587{
588 bitmap_shift_left(cpumask_bits(dstp), cpumask_bits(srcp), n,
589 nr_cpumask_bits);
590}
591
592
593
594
595
596
597static inline void cpumask_copy(struct cpumask *dstp,
598 const struct cpumask *srcp)
599{
600 bitmap_copy(cpumask_bits(dstp), cpumask_bits(srcp), nr_cpumask_bits);
601}
602
603
604
605
606
607
608
609#define cpumask_any(srcp) cpumask_first(srcp)
610
611
612
613
614
615
616
617
618#define cpumask_first_and(src1p, src2p) cpumask_next_and(-1, (src1p), (src2p))
619
620
621
622
623
624
625
626
627#define cpumask_any_and(mask1, mask2) cpumask_first_and((mask1), (mask2))
628
629
630
631
632
633#define cpumask_of(cpu) (get_cpu_mask(cpu))
634
635
636
637
638
639
640
641
642
643static inline int cpumask_parse_user(const char __user *buf, int len,
644 struct cpumask *dstp)
645{
646 return bitmap_parse_user(buf, len, cpumask_bits(dstp), nr_cpumask_bits);
647}
648
649
650
651
652
653
654
655
656
657static inline int cpumask_parselist_user(const char __user *buf, int len,
658 struct cpumask *dstp)
659{
660 return bitmap_parselist_user(buf, len, cpumask_bits(dstp),
661 nr_cpumask_bits);
662}
663
664
665
666
667
668
669
670
671static inline int cpumask_parse(const char *buf, struct cpumask *dstp)
672{
673 return bitmap_parse(buf, UINT_MAX, cpumask_bits(dstp), nr_cpumask_bits);
674}
675
676
677
678
679
680
681
682
683static inline int cpulist_parse(const char *buf, struct cpumask *dstp)
684{
685 return bitmap_parselist(buf, cpumask_bits(dstp), nr_cpumask_bits);
686}
687
688
689
690
691static inline unsigned int cpumask_size(void)
692{
693 return BITS_TO_LONGS(nr_cpumask_bits) * sizeof(long);
694}
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736#ifdef CONFIG_CPUMASK_OFFSTACK
737typedef struct cpumask *cpumask_var_t;
738
739#define this_cpu_cpumask_var_ptr(x) this_cpu_read(x)
740#define __cpumask_var_read_mostly __read_mostly
741
742bool alloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags, int node);
743bool alloc_cpumask_var(cpumask_var_t *mask, gfp_t flags);
744bool zalloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags, int node);
745bool zalloc_cpumask_var(cpumask_var_t *mask, gfp_t flags);
746void alloc_bootmem_cpumask_var(cpumask_var_t *mask);
747void free_cpumask_var(cpumask_var_t mask);
748void free_bootmem_cpumask_var(cpumask_var_t mask);
749
750static inline bool cpumask_available(cpumask_var_t mask)
751{
752 return mask != NULL;
753}
754
755#else
756typedef struct cpumask cpumask_var_t[1];
757
758#define this_cpu_cpumask_var_ptr(x) this_cpu_ptr(x)
759#define __cpumask_var_read_mostly
760
761static inline bool alloc_cpumask_var(cpumask_var_t *mask, gfp_t flags)
762{
763 return true;
764}
765
766static inline bool alloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags,
767 int node)
768{
769 return true;
770}
771
772static inline bool zalloc_cpumask_var(cpumask_var_t *mask, gfp_t flags)
773{
774 cpumask_clear(*mask);
775 return true;
776}
777
778static inline bool zalloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags,
779 int node)
780{
781 cpumask_clear(*mask);
782 return true;
783}
784
785static inline void alloc_bootmem_cpumask_var(cpumask_var_t *mask)
786{
787}
788
789static inline void free_cpumask_var(cpumask_var_t mask)
790{
791}
792
793static inline void free_bootmem_cpumask_var(cpumask_var_t mask)
794{
795}
796
797static inline bool cpumask_available(cpumask_var_t mask)
798{
799 return true;
800}
801#endif
802
803
804
805extern const DECLARE_BITMAP(cpu_all_bits, NR_CPUS);
806#define cpu_all_mask to_cpumask(cpu_all_bits)
807
808
809#define cpu_none_mask to_cpumask(cpu_bit_bitmap[0])
810
811#define for_each_possible_cpu(cpu) for_each_cpu((cpu), cpu_possible_mask)
812#define for_each_online_cpu(cpu) for_each_cpu((cpu), cpu_online_mask)
813#define for_each_present_cpu(cpu) for_each_cpu((cpu), cpu_present_mask)
814
815
816void init_cpu_present(const struct cpumask *src);
817void init_cpu_possible(const struct cpumask *src);
818void init_cpu_online(const struct cpumask *src);
819
820static inline void reset_cpu_possible_mask(void)
821{
822 bitmap_zero(cpumask_bits(&__cpu_possible_mask), NR_CPUS);
823}
824
825static inline void
826set_cpu_possible(unsigned int cpu, bool possible)
827{
828 if (possible)
829 cpumask_set_cpu(cpu, &__cpu_possible_mask);
830 else
831 cpumask_clear_cpu(cpu, &__cpu_possible_mask);
832}
833
834static inline void
835set_cpu_present(unsigned int cpu, bool present)
836{
837 if (present)
838 cpumask_set_cpu(cpu, &__cpu_present_mask);
839 else
840 cpumask_clear_cpu(cpu, &__cpu_present_mask);
841}
842
843void set_cpu_online(unsigned int cpu, bool online);
844
845static inline void
846set_cpu_active(unsigned int cpu, bool active)
847{
848 if (active)
849 cpumask_set_cpu(cpu, &__cpu_active_mask);
850 else
851 cpumask_clear_cpu(cpu, &__cpu_active_mask);
852}
853
854
855
856
857
858
859
860
861
862
863
864
865#define to_cpumask(bitmap) \
866 ((struct cpumask *)(1 ? (bitmap) \
867 : (void *)sizeof(__check_is_bitmap(bitmap))))
868
869static inline int __check_is_bitmap(const unsigned long *bitmap)
870{
871 return 1;
872}
873
874
875
876
877
878
879
880
881extern const unsigned long
882 cpu_bit_bitmap[BITS_PER_LONG+1][BITS_TO_LONGS(NR_CPUS)];
883
884static inline const struct cpumask *get_cpu_mask(unsigned int cpu)
885{
886 const unsigned long *p = cpu_bit_bitmap[1 + cpu % BITS_PER_LONG];
887 p -= cpu / BITS_PER_LONG;
888 return to_cpumask(p);
889}
890
891#define cpu_is_offline(cpu) unlikely(!cpu_online(cpu))
892
893#if NR_CPUS <= BITS_PER_LONG
894#define CPU_BITS_ALL \
895{ \
896 [BITS_TO_LONGS(NR_CPUS)-1] = BITMAP_LAST_WORD_MASK(NR_CPUS) \
897}
898
899#else
900
901#define CPU_BITS_ALL \
902{ \
903 [0 ... BITS_TO_LONGS(NR_CPUS)-2] = ~0UL, \
904 [BITS_TO_LONGS(NR_CPUS)-1] = BITMAP_LAST_WORD_MASK(NR_CPUS) \
905}
906#endif
907
908
909
910
911
912
913
914
915
916
917
918static inline ssize_t
919cpumap_print_to_pagebuf(bool list, char *buf, const struct cpumask *mask)
920{
921 return bitmap_print_to_pagebuf(list, buf, cpumask_bits(mask),
922 nr_cpu_ids);
923}
924
925#if NR_CPUS <= BITS_PER_LONG
926#define CPU_MASK_ALL \
927(cpumask_t) { { \
928 [BITS_TO_LONGS(NR_CPUS)-1] = BITMAP_LAST_WORD_MASK(NR_CPUS) \
929} }
930#else
931#define CPU_MASK_ALL \
932(cpumask_t) { { \
933 [0 ... BITS_TO_LONGS(NR_CPUS)-2] = ~0UL, \
934 [BITS_TO_LONGS(NR_CPUS)-1] = BITMAP_LAST_WORD_MASK(NR_CPUS) \
935} }
936#endif
937
938#define CPU_MASK_NONE \
939(cpumask_t) { { \
940 [0 ... BITS_TO_LONGS(NR_CPUS)-1] = 0UL \
941} }
942
943#define CPU_MASK_CPU0 \
944(cpumask_t) { { \
945 [0] = 1UL \
946} }
947
948#endif
949