1
2#ifndef __LINUX_CPUMASK_H
3#define __LINUX_CPUMASK_H
4
5
6
7
8
9
10#include <linux/kernel.h>
11#include <linux/threads.h>
12#include <linux/bitmap.h>
13#include <linux/atomic.h>
14#include <linux/bug.h>
15
16
17typedef struct cpumask { DECLARE_BITMAP(bits, NR_CPUS); } cpumask_t;
18
19
20
21
22
23
24
25
26#define cpumask_bits(maskp) ((maskp)->bits)
27
28
29
30
31
32
33
34#define cpumask_pr_args(maskp) nr_cpu_ids, cpumask_bits(maskp)
35
36#if NR_CPUS == 1
37#define nr_cpu_ids 1U
38#else
39extern unsigned int nr_cpu_ids;
40#endif
41
42#ifdef CONFIG_CPUMASK_OFFSTACK
43
44
45#define nr_cpumask_bits nr_cpu_ids
46#else
47#define nr_cpumask_bits ((unsigned int)NR_CPUS)
48#endif
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90extern struct cpumask __cpu_possible_mask;
91extern struct cpumask __cpu_online_mask;
92extern struct cpumask __cpu_present_mask;
93extern struct cpumask __cpu_active_mask;
94extern struct cpumask __cpu_dying_mask;
95#define cpu_possible_mask ((const struct cpumask *)&__cpu_possible_mask)
96#define cpu_online_mask ((const struct cpumask *)&__cpu_online_mask)
97#define cpu_present_mask ((const struct cpumask *)&__cpu_present_mask)
98#define cpu_active_mask ((const struct cpumask *)&__cpu_active_mask)
99#define cpu_dying_mask ((const struct cpumask *)&__cpu_dying_mask)
100
101extern atomic_t __num_online_cpus;
102
103extern cpumask_t cpus_booted_once_mask;
104
105static inline void cpu_max_bits_warn(unsigned int cpu, unsigned int bits)
106{
107#ifdef CONFIG_DEBUG_PER_CPU_MAPS
108 WARN_ON_ONCE(cpu >= bits);
109#endif
110}
111
112
113static inline unsigned int cpumask_check(unsigned int cpu)
114{
115 cpu_max_bits_warn(cpu, nr_cpumask_bits);
116 return cpu;
117}
118
119#if NR_CPUS == 1
120
121static inline unsigned int cpumask_first(const struct cpumask *srcp)
122{
123 return 0;
124}
125
126static inline unsigned int cpumask_last(const struct cpumask *srcp)
127{
128 return 0;
129}
130
131
132static inline unsigned int cpumask_next(int n, const struct cpumask *srcp)
133{
134 return n+1;
135}
136
137static inline unsigned int cpumask_next_zero(int n, const struct cpumask *srcp)
138{
139 return n+1;
140}
141
142static inline unsigned int cpumask_next_and(int n,
143 const struct cpumask *srcp,
144 const struct cpumask *andp)
145{
146 return n+1;
147}
148
149static inline unsigned int cpumask_next_wrap(int n, const struct cpumask *mask,
150 int start, bool wrap)
151{
152
153 return (wrap && n == 0);
154}
155
156
157static inline unsigned int cpumask_any_but(const struct cpumask *mask,
158 unsigned int cpu)
159{
160 return 1;
161}
162
163static inline unsigned int cpumask_local_spread(unsigned int i, int node)
164{
165 return 0;
166}
167
168static inline int cpumask_any_and_distribute(const struct cpumask *src1p,
169 const struct cpumask *src2p) {
170 return cpumask_next_and(-1, src1p, src2p);
171}
172
173static inline int cpumask_any_distribute(const struct cpumask *srcp)
174{
175 return cpumask_first(srcp);
176}
177
178#define for_each_cpu(cpu, mask) \
179 for ((cpu) = 0; (cpu) < 1; (cpu)++, (void)mask)
180#define for_each_cpu_not(cpu, mask) \
181 for ((cpu) = 0; (cpu) < 1; (cpu)++, (void)mask)
182#define for_each_cpu_wrap(cpu, mask, start) \
183 for ((cpu) = 0; (cpu) < 1; (cpu)++, (void)mask, (void)(start))
184#define for_each_cpu_and(cpu, mask1, mask2) \
185 for ((cpu) = 0; (cpu) < 1; (cpu)++, (void)mask1, (void)mask2)
186#else
187
188
189
190
191
192
193static inline unsigned int cpumask_first(const struct cpumask *srcp)
194{
195 return find_first_bit(cpumask_bits(srcp), nr_cpumask_bits);
196}
197
198
199
200
201
202
203
204static inline unsigned int cpumask_last(const struct cpumask *srcp)
205{
206 return find_last_bit(cpumask_bits(srcp), nr_cpumask_bits);
207}
208
209unsigned int __pure cpumask_next(int n, const struct cpumask *srcp);
210
211
212
213
214
215
216
217
218static inline unsigned int cpumask_next_zero(int n, const struct cpumask *srcp)
219{
220
221 if (n != -1)
222 cpumask_check(n);
223 return find_next_zero_bit(cpumask_bits(srcp), nr_cpumask_bits, n+1);
224}
225
226int __pure cpumask_next_and(int n, const struct cpumask *, const struct cpumask *);
227int __pure cpumask_any_but(const struct cpumask *mask, unsigned int cpu);
228unsigned int cpumask_local_spread(unsigned int i, int node);
229int cpumask_any_and_distribute(const struct cpumask *src1p,
230 const struct cpumask *src2p);
231int cpumask_any_distribute(const struct cpumask *srcp);
232
233
234
235
236
237
238
239
240#define for_each_cpu(cpu, mask) \
241 for ((cpu) = -1; \
242 (cpu) = cpumask_next((cpu), (mask)), \
243 (cpu) < nr_cpu_ids;)
244
245
246
247
248
249
250
251
252#define for_each_cpu_not(cpu, mask) \
253 for ((cpu) = -1; \
254 (cpu) = cpumask_next_zero((cpu), (mask)), \
255 (cpu) < nr_cpu_ids;)
256
257extern int cpumask_next_wrap(int n, const struct cpumask *mask, int start, bool wrap);
258
259
260
261
262
263
264
265
266
267
268
269#define for_each_cpu_wrap(cpu, mask, start) \
270 for ((cpu) = cpumask_next_wrap((start)-1, (mask), (start), false); \
271 (cpu) < nr_cpumask_bits; \
272 (cpu) = cpumask_next_wrap((cpu), (mask), (start), true))
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288#define for_each_cpu_and(cpu, mask1, mask2) \
289 for ((cpu) = -1; \
290 (cpu) = cpumask_next_and((cpu), (mask1), (mask2)), \
291 (cpu) < nr_cpu_ids;)
292#endif
293
294#define CPU_BITS_NONE \
295{ \
296 [0 ... BITS_TO_LONGS(NR_CPUS)-1] = 0UL \
297}
298
299#define CPU_BITS_CPU0 \
300{ \
301 [0] = 1UL \
302}
303
304
305
306
307
308
309static inline void cpumask_set_cpu(unsigned int cpu, struct cpumask *dstp)
310{
311 set_bit(cpumask_check(cpu), cpumask_bits(dstp));
312}
313
314static inline void __cpumask_set_cpu(unsigned int cpu, struct cpumask *dstp)
315{
316 __set_bit(cpumask_check(cpu), cpumask_bits(dstp));
317}
318
319
320
321
322
323
324
325static inline void cpumask_clear_cpu(int cpu, struct cpumask *dstp)
326{
327 clear_bit(cpumask_check(cpu), cpumask_bits(dstp));
328}
329
330static inline void __cpumask_clear_cpu(int cpu, struct cpumask *dstp)
331{
332 __clear_bit(cpumask_check(cpu), cpumask_bits(dstp));
333}
334
335
336
337
338
339
340
341
342static inline int cpumask_test_cpu(int cpu, const struct cpumask *cpumask)
343{
344 return test_bit(cpumask_check(cpu), cpumask_bits((cpumask)));
345}
346
347
348
349
350
351
352
353
354
355
356static inline int cpumask_test_and_set_cpu(int cpu, struct cpumask *cpumask)
357{
358 return test_and_set_bit(cpumask_check(cpu), cpumask_bits(cpumask));
359}
360
361
362
363
364
365
366
367
368
369
370static inline int cpumask_test_and_clear_cpu(int cpu, struct cpumask *cpumask)
371{
372 return test_and_clear_bit(cpumask_check(cpu), cpumask_bits(cpumask));
373}
374
375
376
377
378
379static inline void cpumask_setall(struct cpumask *dstp)
380{
381 bitmap_fill(cpumask_bits(dstp), nr_cpumask_bits);
382}
383
384
385
386
387
388static inline void cpumask_clear(struct cpumask *dstp)
389{
390 bitmap_zero(cpumask_bits(dstp), nr_cpumask_bits);
391}
392
393
394
395
396
397
398
399
400
401static inline int cpumask_and(struct cpumask *dstp,
402 const struct cpumask *src1p,
403 const struct cpumask *src2p)
404{
405 return bitmap_and(cpumask_bits(dstp), cpumask_bits(src1p),
406 cpumask_bits(src2p), nr_cpumask_bits);
407}
408
409
410
411
412
413
414
415static inline void cpumask_or(struct cpumask *dstp, const struct cpumask *src1p,
416 const struct cpumask *src2p)
417{
418 bitmap_or(cpumask_bits(dstp), cpumask_bits(src1p),
419 cpumask_bits(src2p), nr_cpumask_bits);
420}
421
422
423
424
425
426
427
428static inline void cpumask_xor(struct cpumask *dstp,
429 const struct cpumask *src1p,
430 const struct cpumask *src2p)
431{
432 bitmap_xor(cpumask_bits(dstp), cpumask_bits(src1p),
433 cpumask_bits(src2p), nr_cpumask_bits);
434}
435
436
437
438
439
440
441
442
443
444static inline int cpumask_andnot(struct cpumask *dstp,
445 const struct cpumask *src1p,
446 const struct cpumask *src2p)
447{
448 return bitmap_andnot(cpumask_bits(dstp), cpumask_bits(src1p),
449 cpumask_bits(src2p), nr_cpumask_bits);
450}
451
452
453
454
455
456
457static inline void cpumask_complement(struct cpumask *dstp,
458 const struct cpumask *srcp)
459{
460 bitmap_complement(cpumask_bits(dstp), cpumask_bits(srcp),
461 nr_cpumask_bits);
462}
463
464
465
466
467
468
469static inline bool cpumask_equal(const struct cpumask *src1p,
470 const struct cpumask *src2p)
471{
472 return bitmap_equal(cpumask_bits(src1p), cpumask_bits(src2p),
473 nr_cpumask_bits);
474}
475
476
477
478
479
480
481
482static inline bool cpumask_or_equal(const struct cpumask *src1p,
483 const struct cpumask *src2p,
484 const struct cpumask *src3p)
485{
486 return bitmap_or_equal(cpumask_bits(src1p), cpumask_bits(src2p),
487 cpumask_bits(src3p), nr_cpumask_bits);
488}
489
490
491
492
493
494
495static inline bool cpumask_intersects(const struct cpumask *src1p,
496 const struct cpumask *src2p)
497{
498 return bitmap_intersects(cpumask_bits(src1p), cpumask_bits(src2p),
499 nr_cpumask_bits);
500}
501
502
503
504
505
506
507
508
509static inline int cpumask_subset(const struct cpumask *src1p,
510 const struct cpumask *src2p)
511{
512 return bitmap_subset(cpumask_bits(src1p), cpumask_bits(src2p),
513 nr_cpumask_bits);
514}
515
516
517
518
519
520static inline bool cpumask_empty(const struct cpumask *srcp)
521{
522 return bitmap_empty(cpumask_bits(srcp), nr_cpumask_bits);
523}
524
525
526
527
528
529static inline bool cpumask_full(const struct cpumask *srcp)
530{
531 return bitmap_full(cpumask_bits(srcp), nr_cpumask_bits);
532}
533
534
535
536
537
538static inline unsigned int cpumask_weight(const struct cpumask *srcp)
539{
540 return bitmap_weight(cpumask_bits(srcp), nr_cpumask_bits);
541}
542
543
544
545
546
547
548
549static inline void cpumask_shift_right(struct cpumask *dstp,
550 const struct cpumask *srcp, int n)
551{
552 bitmap_shift_right(cpumask_bits(dstp), cpumask_bits(srcp), n,
553 nr_cpumask_bits);
554}
555
556
557
558
559
560
561
562static inline void cpumask_shift_left(struct cpumask *dstp,
563 const struct cpumask *srcp, int n)
564{
565 bitmap_shift_left(cpumask_bits(dstp), cpumask_bits(srcp), n,
566 nr_cpumask_bits);
567}
568
569
570
571
572
573
574static inline void cpumask_copy(struct cpumask *dstp,
575 const struct cpumask *srcp)
576{
577 bitmap_copy(cpumask_bits(dstp), cpumask_bits(srcp), nr_cpumask_bits);
578}
579
580
581
582
583
584
585
586#define cpumask_any(srcp) cpumask_first(srcp)
587
588
589
590
591
592
593
594
595#define cpumask_first_and(src1p, src2p) cpumask_next_and(-1, (src1p), (src2p))
596
597
598
599
600
601
602
603
604#define cpumask_any_and(mask1, mask2) cpumask_first_and((mask1), (mask2))
605
606
607
608
609
610#define cpumask_of(cpu) (get_cpu_mask(cpu))
611
612
613
614
615
616
617
618
619
620static inline int cpumask_parse_user(const char __user *buf, int len,
621 struct cpumask *dstp)
622{
623 return bitmap_parse_user(buf, len, cpumask_bits(dstp), nr_cpumask_bits);
624}
625
626
627
628
629
630
631
632
633
634static inline int cpumask_parselist_user(const char __user *buf, int len,
635 struct cpumask *dstp)
636{
637 return bitmap_parselist_user(buf, len, cpumask_bits(dstp),
638 nr_cpumask_bits);
639}
640
641
642
643
644
645
646
647
648static inline int cpumask_parse(const char *buf, struct cpumask *dstp)
649{
650 return bitmap_parse(buf, UINT_MAX, cpumask_bits(dstp), nr_cpumask_bits);
651}
652
653
654
655
656
657
658
659
660static inline int cpulist_parse(const char *buf, struct cpumask *dstp)
661{
662 return bitmap_parselist(buf, cpumask_bits(dstp), nr_cpumask_bits);
663}
664
665
666
667
668static inline unsigned int cpumask_size(void)
669{
670 return BITS_TO_LONGS(nr_cpumask_bits) * sizeof(long);
671}
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713#ifdef CONFIG_CPUMASK_OFFSTACK
714typedef struct cpumask *cpumask_var_t;
715
716#define this_cpu_cpumask_var_ptr(x) this_cpu_read(x)
717#define __cpumask_var_read_mostly __read_mostly
718
719bool alloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags, int node);
720bool alloc_cpumask_var(cpumask_var_t *mask, gfp_t flags);
721bool zalloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags, int node);
722bool zalloc_cpumask_var(cpumask_var_t *mask, gfp_t flags);
723void alloc_bootmem_cpumask_var(cpumask_var_t *mask);
724void free_cpumask_var(cpumask_var_t mask);
725void free_bootmem_cpumask_var(cpumask_var_t mask);
726
727static inline bool cpumask_available(cpumask_var_t mask)
728{
729 return mask != NULL;
730}
731
732#else
733typedef struct cpumask cpumask_var_t[1];
734
735#define this_cpu_cpumask_var_ptr(x) this_cpu_ptr(x)
736#define __cpumask_var_read_mostly
737
738static inline bool alloc_cpumask_var(cpumask_var_t *mask, gfp_t flags)
739{
740 return true;
741}
742
743static inline bool alloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags,
744 int node)
745{
746 return true;
747}
748
749static inline bool zalloc_cpumask_var(cpumask_var_t *mask, gfp_t flags)
750{
751 cpumask_clear(*mask);
752 return true;
753}
754
755static inline bool zalloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags,
756 int node)
757{
758 cpumask_clear(*mask);
759 return true;
760}
761
762static inline void alloc_bootmem_cpumask_var(cpumask_var_t *mask)
763{
764}
765
766static inline void free_cpumask_var(cpumask_var_t mask)
767{
768}
769
770static inline void free_bootmem_cpumask_var(cpumask_var_t mask)
771{
772}
773
774static inline bool cpumask_available(cpumask_var_t mask)
775{
776 return true;
777}
778#endif
779
780
781
782extern const DECLARE_BITMAP(cpu_all_bits, NR_CPUS);
783#define cpu_all_mask to_cpumask(cpu_all_bits)
784
785
786#define cpu_none_mask to_cpumask(cpu_bit_bitmap[0])
787
788#define for_each_possible_cpu(cpu) for_each_cpu((cpu), cpu_possible_mask)
789#define for_each_online_cpu(cpu) for_each_cpu((cpu), cpu_online_mask)
790#define for_each_present_cpu(cpu) for_each_cpu((cpu), cpu_present_mask)
791
792
793void init_cpu_present(const struct cpumask *src);
794void init_cpu_possible(const struct cpumask *src);
795void init_cpu_online(const struct cpumask *src);
796
797static inline void reset_cpu_possible_mask(void)
798{
799 bitmap_zero(cpumask_bits(&__cpu_possible_mask), NR_CPUS);
800}
801
802static inline void
803set_cpu_possible(unsigned int cpu, bool possible)
804{
805 if (possible)
806 cpumask_set_cpu(cpu, &__cpu_possible_mask);
807 else
808 cpumask_clear_cpu(cpu, &__cpu_possible_mask);
809}
810
811static inline void
812set_cpu_present(unsigned int cpu, bool present)
813{
814 if (present)
815 cpumask_set_cpu(cpu, &__cpu_present_mask);
816 else
817 cpumask_clear_cpu(cpu, &__cpu_present_mask);
818}
819
820void set_cpu_online(unsigned int cpu, bool online);
821
822static inline void
823set_cpu_active(unsigned int cpu, bool active)
824{
825 if (active)
826 cpumask_set_cpu(cpu, &__cpu_active_mask);
827 else
828 cpumask_clear_cpu(cpu, &__cpu_active_mask);
829}
830
831static inline void
832set_cpu_dying(unsigned int cpu, bool dying)
833{
834 if (dying)
835 cpumask_set_cpu(cpu, &__cpu_dying_mask);
836 else
837 cpumask_clear_cpu(cpu, &__cpu_dying_mask);
838}
839
840
841
842
843
844
845
846
847
848
849
850#define to_cpumask(bitmap) \
851 ((struct cpumask *)(1 ? (bitmap) \
852 : (void *)sizeof(__check_is_bitmap(bitmap))))
853
854static inline int __check_is_bitmap(const unsigned long *bitmap)
855{
856 return 1;
857}
858
859
860
861
862
863
864
865
866extern const unsigned long
867 cpu_bit_bitmap[BITS_PER_LONG+1][BITS_TO_LONGS(NR_CPUS)];
868
869static inline const struct cpumask *get_cpu_mask(unsigned int cpu)
870{
871 const unsigned long *p = cpu_bit_bitmap[1 + cpu % BITS_PER_LONG];
872 p -= cpu / BITS_PER_LONG;
873 return to_cpumask(p);
874}
875
876#if NR_CPUS > 1
877
878
879
880
881
882
883
884
885static inline unsigned int num_online_cpus(void)
886{
887 return atomic_read(&__num_online_cpus);
888}
889#define num_possible_cpus() cpumask_weight(cpu_possible_mask)
890#define num_present_cpus() cpumask_weight(cpu_present_mask)
891#define num_active_cpus() cpumask_weight(cpu_active_mask)
892
893static inline bool cpu_online(unsigned int cpu)
894{
895 return cpumask_test_cpu(cpu, cpu_online_mask);
896}
897
898static inline bool cpu_possible(unsigned int cpu)
899{
900 return cpumask_test_cpu(cpu, cpu_possible_mask);
901}
902
903static inline bool cpu_present(unsigned int cpu)
904{
905 return cpumask_test_cpu(cpu, cpu_present_mask);
906}
907
908static inline bool cpu_active(unsigned int cpu)
909{
910 return cpumask_test_cpu(cpu, cpu_active_mask);
911}
912
913static inline bool cpu_dying(unsigned int cpu)
914{
915 return cpumask_test_cpu(cpu, cpu_dying_mask);
916}
917
918#else
919
920#define num_online_cpus() 1U
921#define num_possible_cpus() 1U
922#define num_present_cpus() 1U
923#define num_active_cpus() 1U
924
925static inline bool cpu_online(unsigned int cpu)
926{
927 return cpu == 0;
928}
929
930static inline bool cpu_possible(unsigned int cpu)
931{
932 return cpu == 0;
933}
934
935static inline bool cpu_present(unsigned int cpu)
936{
937 return cpu == 0;
938}
939
940static inline bool cpu_active(unsigned int cpu)
941{
942 return cpu == 0;
943}
944
945static inline bool cpu_dying(unsigned int cpu)
946{
947 return false;
948}
949
950#endif
951
952#define cpu_is_offline(cpu) unlikely(!cpu_online(cpu))
953
954#if NR_CPUS <= BITS_PER_LONG
955#define CPU_BITS_ALL \
956{ \
957 [BITS_TO_LONGS(NR_CPUS)-1] = BITMAP_LAST_WORD_MASK(NR_CPUS) \
958}
959
960#else
961
962#define CPU_BITS_ALL \
963{ \
964 [0 ... BITS_TO_LONGS(NR_CPUS)-2] = ~0UL, \
965 [BITS_TO_LONGS(NR_CPUS)-1] = BITMAP_LAST_WORD_MASK(NR_CPUS) \
966}
967#endif
968
969
970
971
972
973
974
975
976
977
978
979static inline ssize_t
980cpumap_print_to_pagebuf(bool list, char *buf, const struct cpumask *mask)
981{
982 return bitmap_print_to_pagebuf(list, buf, cpumask_bits(mask),
983 nr_cpu_ids);
984}
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002static inline ssize_t
1003cpumap_print_bitmask_to_buf(char *buf, const struct cpumask *mask,
1004 loff_t off, size_t count)
1005{
1006 return bitmap_print_bitmask_to_buf(buf, cpumask_bits(mask),
1007 nr_cpu_ids, off, count) - 1;
1008}
1009
1010
1011
1012
1013
1014
1015
1016
1017static inline ssize_t
1018cpumap_print_list_to_buf(char *buf, const struct cpumask *mask,
1019 loff_t off, size_t count)
1020{
1021 return bitmap_print_list_to_buf(buf, cpumask_bits(mask),
1022 nr_cpu_ids, off, count) - 1;
1023}
1024
1025#if NR_CPUS <= BITS_PER_LONG
1026#define CPU_MASK_ALL \
1027(cpumask_t) { { \
1028 [BITS_TO_LONGS(NR_CPUS)-1] = BITMAP_LAST_WORD_MASK(NR_CPUS) \
1029} }
1030#else
1031#define CPU_MASK_ALL \
1032(cpumask_t) { { \
1033 [0 ... BITS_TO_LONGS(NR_CPUS)-2] = ~0UL, \
1034 [BITS_TO_LONGS(NR_CPUS)-1] = BITMAP_LAST_WORD_MASK(NR_CPUS) \
1035} }
1036#endif
1037
1038#define CPU_MASK_NONE \
1039(cpumask_t) { { \
1040 [0 ... BITS_TO_LONGS(NR_CPUS)-1] = 0UL \
1041} }
1042
1043#define CPU_MASK_CPU0 \
1044(cpumask_t) { { \
1045 [0] = 1UL \
1046} }
1047
1048#endif
1049