1
2#ifndef __LINUX_CPUMASK_H
3#define __LINUX_CPUMASK_H
4
5
6
7
8
9
10#include <linux/kernel.h>
11#include <linux/threads.h>
12#include <linux/bitmap.h>
13#include <linux/atomic.h>
14#include <linux/bug.h>
15
16
17typedef struct cpumask { DECLARE_BITMAP(bits, NR_CPUS); } cpumask_t;
18
19
20
21
22
23
24
25
26#define cpumask_bits(maskp) ((maskp)->bits)
27
28
29
30
31
32
33
34#define cpumask_pr_args(maskp) nr_cpu_ids, cpumask_bits(maskp)
35
36#if NR_CPUS == 1
37#define nr_cpu_ids 1U
38#else
39extern unsigned int nr_cpu_ids;
40#endif
41
42#ifdef CONFIG_CPUMASK_OFFSTACK
43
44
45#define nr_cpumask_bits nr_cpu_ids
46#else
47#define nr_cpumask_bits ((unsigned int)NR_CPUS)
48#endif
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90extern struct cpumask __cpu_possible_mask;
91extern struct cpumask __cpu_online_mask;
92extern struct cpumask __cpu_present_mask;
93extern struct cpumask __cpu_active_mask;
94#define cpu_possible_mask ((const struct cpumask *)&__cpu_possible_mask)
95#define cpu_online_mask ((const struct cpumask *)&__cpu_online_mask)
96#define cpu_present_mask ((const struct cpumask *)&__cpu_present_mask)
97#define cpu_active_mask ((const struct cpumask *)&__cpu_active_mask)
98
99extern atomic_t __num_online_cpus;
100
101#if NR_CPUS > 1
102
103
104
105
106
107
108
109
110static inline unsigned int num_online_cpus(void)
111{
112 return atomic_read(&__num_online_cpus);
113}
114#define num_possible_cpus() cpumask_weight(cpu_possible_mask)
115#define num_present_cpus() cpumask_weight(cpu_present_mask)
116#define num_active_cpus() cpumask_weight(cpu_active_mask)
117#define cpu_online(cpu) cpumask_test_cpu((cpu), cpu_online_mask)
118#define cpu_possible(cpu) cpumask_test_cpu((cpu), cpu_possible_mask)
119#define cpu_present(cpu) cpumask_test_cpu((cpu), cpu_present_mask)
120#define cpu_active(cpu) cpumask_test_cpu((cpu), cpu_active_mask)
121#else
122#define num_online_cpus() 1U
123#define num_possible_cpus() 1U
124#define num_present_cpus() 1U
125#define num_active_cpus() 1U
126#define cpu_online(cpu) ((cpu) == 0)
127#define cpu_possible(cpu) ((cpu) == 0)
128#define cpu_present(cpu) ((cpu) == 0)
129#define cpu_active(cpu) ((cpu) == 0)
130#endif
131
132extern cpumask_t cpus_booted_once_mask;
133
134static inline void cpu_max_bits_warn(unsigned int cpu, unsigned int bits)
135{
136#ifdef CONFIG_DEBUG_PER_CPU_MAPS
137 WARN_ON_ONCE(cpu >= bits);
138#endif
139}
140
141
142static inline unsigned int cpumask_check(unsigned int cpu)
143{
144 cpu_max_bits_warn(cpu, nr_cpumask_bits);
145 return cpu;
146}
147
148#if NR_CPUS == 1
149
150static inline unsigned int cpumask_first(const struct cpumask *srcp)
151{
152 return 0;
153}
154
155static inline unsigned int cpumask_last(const struct cpumask *srcp)
156{
157 return 0;
158}
159
160
161static inline unsigned int cpumask_next(int n, const struct cpumask *srcp)
162{
163 return n+1;
164}
165
166static inline unsigned int cpumask_next_zero(int n, const struct cpumask *srcp)
167{
168 return n+1;
169}
170
171static inline unsigned int cpumask_next_and(int n,
172 const struct cpumask *srcp,
173 const struct cpumask *andp)
174{
175 return n+1;
176}
177
178static inline unsigned int cpumask_next_wrap(int n, const struct cpumask *mask,
179 int start, bool wrap)
180{
181
182 return (wrap && n == 0);
183}
184
185
186static inline unsigned int cpumask_any_but(const struct cpumask *mask,
187 unsigned int cpu)
188{
189 return 1;
190}
191
192static inline unsigned int cpumask_local_spread(unsigned int i, int node)
193{
194 return 0;
195}
196
197static inline int cpumask_any_and_distribute(const struct cpumask *src1p,
198 const struct cpumask *src2p) {
199 return cpumask_next_and(-1, src1p, src2p);
200}
201
202static inline int cpumask_any_distribute(const struct cpumask *srcp)
203{
204 return cpumask_first(srcp);
205}
206
207#define for_each_cpu(cpu, mask) \
208 for ((cpu) = 0; (cpu) < 1; (cpu)++, (void)mask)
209#define for_each_cpu_not(cpu, mask) \
210 for ((cpu) = 0; (cpu) < 1; (cpu)++, (void)mask)
211#define for_each_cpu_wrap(cpu, mask, start) \
212 for ((cpu) = 0; (cpu) < 1; (cpu)++, (void)mask, (void)(start))
213#define for_each_cpu_and(cpu, mask1, mask2) \
214 for ((cpu) = 0; (cpu) < 1; (cpu)++, (void)mask1, (void)mask2)
215#else
216
217
218
219
220
221
222static inline unsigned int cpumask_first(const struct cpumask *srcp)
223{
224 return find_first_bit(cpumask_bits(srcp), nr_cpumask_bits);
225}
226
227
228
229
230
231
232
233static inline unsigned int cpumask_last(const struct cpumask *srcp)
234{
235 return find_last_bit(cpumask_bits(srcp), nr_cpumask_bits);
236}
237
238unsigned int cpumask_next(int n, const struct cpumask *srcp);
239
240
241
242
243
244
245
246
247static inline unsigned int cpumask_next_zero(int n, const struct cpumask *srcp)
248{
249
250 if (n != -1)
251 cpumask_check(n);
252 return find_next_zero_bit(cpumask_bits(srcp), nr_cpumask_bits, n+1);
253}
254
255int cpumask_next_and(int n, const struct cpumask *, const struct cpumask *);
256int cpumask_any_but(const struct cpumask *mask, unsigned int cpu);
257unsigned int cpumask_local_spread(unsigned int i, int node);
258int cpumask_any_and_distribute(const struct cpumask *src1p,
259 const struct cpumask *src2p);
260int cpumask_any_distribute(const struct cpumask *srcp);
261
262
263
264
265
266
267
268
269#define for_each_cpu(cpu, mask) \
270 for ((cpu) = -1; \
271 (cpu) = cpumask_next((cpu), (mask)), \
272 (cpu) < nr_cpu_ids;)
273
274
275
276
277
278
279
280
281#define for_each_cpu_not(cpu, mask) \
282 for ((cpu) = -1; \
283 (cpu) = cpumask_next_zero((cpu), (mask)), \
284 (cpu) < nr_cpu_ids;)
285
286extern int cpumask_next_wrap(int n, const struct cpumask *mask, int start, bool wrap);
287
288
289
290
291
292
293
294
295
296
297
298#define for_each_cpu_wrap(cpu, mask, start) \
299 for ((cpu) = cpumask_next_wrap((start)-1, (mask), (start), false); \
300 (cpu) < nr_cpumask_bits; \
301 (cpu) = cpumask_next_wrap((cpu), (mask), (start), true))
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317#define for_each_cpu_and(cpu, mask1, mask2) \
318 for ((cpu) = -1; \
319 (cpu) = cpumask_next_and((cpu), (mask1), (mask2)), \
320 (cpu) < nr_cpu_ids;)
321#endif
322
323#define CPU_BITS_NONE \
324{ \
325 [0 ... BITS_TO_LONGS(NR_CPUS)-1] = 0UL \
326}
327
328#define CPU_BITS_CPU0 \
329{ \
330 [0] = 1UL \
331}
332
333
334
335
336
337
338static inline void cpumask_set_cpu(unsigned int cpu, struct cpumask *dstp)
339{
340 set_bit(cpumask_check(cpu), cpumask_bits(dstp));
341}
342
343static inline void __cpumask_set_cpu(unsigned int cpu, struct cpumask *dstp)
344{
345 __set_bit(cpumask_check(cpu), cpumask_bits(dstp));
346}
347
348
349
350
351
352
353
354static inline void cpumask_clear_cpu(int cpu, struct cpumask *dstp)
355{
356 clear_bit(cpumask_check(cpu), cpumask_bits(dstp));
357}
358
359static inline void __cpumask_clear_cpu(int cpu, struct cpumask *dstp)
360{
361 __clear_bit(cpumask_check(cpu), cpumask_bits(dstp));
362}
363
364
365
366
367
368
369
370
371static inline int cpumask_test_cpu(int cpu, const struct cpumask *cpumask)
372{
373 return test_bit(cpumask_check(cpu), cpumask_bits((cpumask)));
374}
375
376
377
378
379
380
381
382
383
384
385static inline int cpumask_test_and_set_cpu(int cpu, struct cpumask *cpumask)
386{
387 return test_and_set_bit(cpumask_check(cpu), cpumask_bits(cpumask));
388}
389
390
391
392
393
394
395
396
397
398
399static inline int cpumask_test_and_clear_cpu(int cpu, struct cpumask *cpumask)
400{
401 return test_and_clear_bit(cpumask_check(cpu), cpumask_bits(cpumask));
402}
403
404
405
406
407
408static inline void cpumask_setall(struct cpumask *dstp)
409{
410 bitmap_fill(cpumask_bits(dstp), nr_cpumask_bits);
411}
412
413
414
415
416
417static inline void cpumask_clear(struct cpumask *dstp)
418{
419 bitmap_zero(cpumask_bits(dstp), nr_cpumask_bits);
420}
421
422
423
424
425
426
427
428
429
430static inline int cpumask_and(struct cpumask *dstp,
431 const struct cpumask *src1p,
432 const struct cpumask *src2p)
433{
434 return bitmap_and(cpumask_bits(dstp), cpumask_bits(src1p),
435 cpumask_bits(src2p), nr_cpumask_bits);
436}
437
438
439
440
441
442
443
444static inline void cpumask_or(struct cpumask *dstp, const struct cpumask *src1p,
445 const struct cpumask *src2p)
446{
447 bitmap_or(cpumask_bits(dstp), cpumask_bits(src1p),
448 cpumask_bits(src2p), nr_cpumask_bits);
449}
450
451
452
453
454
455
456
457static inline void cpumask_xor(struct cpumask *dstp,
458 const struct cpumask *src1p,
459 const struct cpumask *src2p)
460{
461 bitmap_xor(cpumask_bits(dstp), cpumask_bits(src1p),
462 cpumask_bits(src2p), nr_cpumask_bits);
463}
464
465
466
467
468
469
470
471
472
473static inline int cpumask_andnot(struct cpumask *dstp,
474 const struct cpumask *src1p,
475 const struct cpumask *src2p)
476{
477 return bitmap_andnot(cpumask_bits(dstp), cpumask_bits(src1p),
478 cpumask_bits(src2p), nr_cpumask_bits);
479}
480
481
482
483
484
485
486static inline void cpumask_complement(struct cpumask *dstp,
487 const struct cpumask *srcp)
488{
489 bitmap_complement(cpumask_bits(dstp), cpumask_bits(srcp),
490 nr_cpumask_bits);
491}
492
493
494
495
496
497
498static inline bool cpumask_equal(const struct cpumask *src1p,
499 const struct cpumask *src2p)
500{
501 return bitmap_equal(cpumask_bits(src1p), cpumask_bits(src2p),
502 nr_cpumask_bits);
503}
504
505
506
507
508
509
510
511static inline bool cpumask_or_equal(const struct cpumask *src1p,
512 const struct cpumask *src2p,
513 const struct cpumask *src3p)
514{
515 return bitmap_or_equal(cpumask_bits(src1p), cpumask_bits(src2p),
516 cpumask_bits(src3p), nr_cpumask_bits);
517}
518
519
520
521
522
523
524static inline bool cpumask_intersects(const struct cpumask *src1p,
525 const struct cpumask *src2p)
526{
527 return bitmap_intersects(cpumask_bits(src1p), cpumask_bits(src2p),
528 nr_cpumask_bits);
529}
530
531
532
533
534
535
536
537
538static inline int cpumask_subset(const struct cpumask *src1p,
539 const struct cpumask *src2p)
540{
541 return bitmap_subset(cpumask_bits(src1p), cpumask_bits(src2p),
542 nr_cpumask_bits);
543}
544
545
546
547
548
549static inline bool cpumask_empty(const struct cpumask *srcp)
550{
551 return bitmap_empty(cpumask_bits(srcp), nr_cpumask_bits);
552}
553
554
555
556
557
558static inline bool cpumask_full(const struct cpumask *srcp)
559{
560 return bitmap_full(cpumask_bits(srcp), nr_cpumask_bits);
561}
562
563
564
565
566
567static inline unsigned int cpumask_weight(const struct cpumask *srcp)
568{
569 return bitmap_weight(cpumask_bits(srcp), nr_cpumask_bits);
570}
571
572
573
574
575
576
577
578static inline void cpumask_shift_right(struct cpumask *dstp,
579 const struct cpumask *srcp, int n)
580{
581 bitmap_shift_right(cpumask_bits(dstp), cpumask_bits(srcp), n,
582 nr_cpumask_bits);
583}
584
585
586
587
588
589
590
591static inline void cpumask_shift_left(struct cpumask *dstp,
592 const struct cpumask *srcp, int n)
593{
594 bitmap_shift_left(cpumask_bits(dstp), cpumask_bits(srcp), n,
595 nr_cpumask_bits);
596}
597
598
599
600
601
602
603static inline void cpumask_copy(struct cpumask *dstp,
604 const struct cpumask *srcp)
605{
606 bitmap_copy(cpumask_bits(dstp), cpumask_bits(srcp), nr_cpumask_bits);
607}
608
609
610
611
612
613
614
615#define cpumask_any(srcp) cpumask_first(srcp)
616
617
618
619
620
621
622
623
624#define cpumask_first_and(src1p, src2p) cpumask_next_and(-1, (src1p), (src2p))
625
626
627
628
629
630
631
632
633#define cpumask_any_and(mask1, mask2) cpumask_first_and((mask1), (mask2))
634
635
636
637
638
639#define cpumask_of(cpu) (get_cpu_mask(cpu))
640
641
642
643
644
645
646
647
648
649static inline int cpumask_parse_user(const char __user *buf, int len,
650 struct cpumask *dstp)
651{
652 return bitmap_parse_user(buf, len, cpumask_bits(dstp), nr_cpumask_bits);
653}
654
655
656
657
658
659
660
661
662
663static inline int cpumask_parselist_user(const char __user *buf, int len,
664 struct cpumask *dstp)
665{
666 return bitmap_parselist_user(buf, len, cpumask_bits(dstp),
667 nr_cpumask_bits);
668}
669
670
671
672
673
674
675
676
677static inline int cpumask_parse(const char *buf, struct cpumask *dstp)
678{
679 return bitmap_parse(buf, UINT_MAX, cpumask_bits(dstp), nr_cpumask_bits);
680}
681
682
683
684
685
686
687
688
689static inline int cpulist_parse(const char *buf, struct cpumask *dstp)
690{
691 return bitmap_parselist(buf, cpumask_bits(dstp), nr_cpumask_bits);
692}
693
694
695
696
697static inline unsigned int cpumask_size(void)
698{
699 return BITS_TO_LONGS(nr_cpumask_bits) * sizeof(long);
700}
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742#ifdef CONFIG_CPUMASK_OFFSTACK
743typedef struct cpumask *cpumask_var_t;
744
745#define this_cpu_cpumask_var_ptr(x) this_cpu_read(x)
746#define __cpumask_var_read_mostly __read_mostly
747
748bool alloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags, int node);
749bool alloc_cpumask_var(cpumask_var_t *mask, gfp_t flags);
750bool zalloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags, int node);
751bool zalloc_cpumask_var(cpumask_var_t *mask, gfp_t flags);
752void alloc_bootmem_cpumask_var(cpumask_var_t *mask);
753void free_cpumask_var(cpumask_var_t mask);
754void free_bootmem_cpumask_var(cpumask_var_t mask);
755
756static inline bool cpumask_available(cpumask_var_t mask)
757{
758 return mask != NULL;
759}
760
761#else
762typedef struct cpumask cpumask_var_t[1];
763
764#define this_cpu_cpumask_var_ptr(x) this_cpu_ptr(x)
765#define __cpumask_var_read_mostly
766
767static inline bool alloc_cpumask_var(cpumask_var_t *mask, gfp_t flags)
768{
769 return true;
770}
771
772static inline bool alloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags,
773 int node)
774{
775 return true;
776}
777
778static inline bool zalloc_cpumask_var(cpumask_var_t *mask, gfp_t flags)
779{
780 cpumask_clear(*mask);
781 return true;
782}
783
784static inline bool zalloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags,
785 int node)
786{
787 cpumask_clear(*mask);
788 return true;
789}
790
791static inline void alloc_bootmem_cpumask_var(cpumask_var_t *mask)
792{
793}
794
795static inline void free_cpumask_var(cpumask_var_t mask)
796{
797}
798
799static inline void free_bootmem_cpumask_var(cpumask_var_t mask)
800{
801}
802
803static inline bool cpumask_available(cpumask_var_t mask)
804{
805 return true;
806}
807#endif
808
809
810
811extern const DECLARE_BITMAP(cpu_all_bits, NR_CPUS);
812#define cpu_all_mask to_cpumask(cpu_all_bits)
813
814
815#define cpu_none_mask to_cpumask(cpu_bit_bitmap[0])
816
817#define for_each_possible_cpu(cpu) for_each_cpu((cpu), cpu_possible_mask)
818#define for_each_online_cpu(cpu) for_each_cpu((cpu), cpu_online_mask)
819#define for_each_present_cpu(cpu) for_each_cpu((cpu), cpu_present_mask)
820
821
822void init_cpu_present(const struct cpumask *src);
823void init_cpu_possible(const struct cpumask *src);
824void init_cpu_online(const struct cpumask *src);
825
826static inline void reset_cpu_possible_mask(void)
827{
828 bitmap_zero(cpumask_bits(&__cpu_possible_mask), NR_CPUS);
829}
830
831static inline void
832set_cpu_possible(unsigned int cpu, bool possible)
833{
834 if (possible)
835 cpumask_set_cpu(cpu, &__cpu_possible_mask);
836 else
837 cpumask_clear_cpu(cpu, &__cpu_possible_mask);
838}
839
840static inline void
841set_cpu_present(unsigned int cpu, bool present)
842{
843 if (present)
844 cpumask_set_cpu(cpu, &__cpu_present_mask);
845 else
846 cpumask_clear_cpu(cpu, &__cpu_present_mask);
847}
848
849void set_cpu_online(unsigned int cpu, bool online);
850
851static inline void
852set_cpu_active(unsigned int cpu, bool active)
853{
854 if (active)
855 cpumask_set_cpu(cpu, &__cpu_active_mask);
856 else
857 cpumask_clear_cpu(cpu, &__cpu_active_mask);
858}
859
860
861
862
863
864
865
866
867
868
869
870
871#define to_cpumask(bitmap) \
872 ((struct cpumask *)(1 ? (bitmap) \
873 : (void *)sizeof(__check_is_bitmap(bitmap))))
874
875static inline int __check_is_bitmap(const unsigned long *bitmap)
876{
877 return 1;
878}
879
880
881
882
883
884
885
886
887extern const unsigned long
888 cpu_bit_bitmap[BITS_PER_LONG+1][BITS_TO_LONGS(NR_CPUS)];
889
890static inline const struct cpumask *get_cpu_mask(unsigned int cpu)
891{
892 const unsigned long *p = cpu_bit_bitmap[1 + cpu % BITS_PER_LONG];
893 p -= cpu / BITS_PER_LONG;
894 return to_cpumask(p);
895}
896
897#define cpu_is_offline(cpu) unlikely(!cpu_online(cpu))
898
899#if NR_CPUS <= BITS_PER_LONG
900#define CPU_BITS_ALL \
901{ \
902 [BITS_TO_LONGS(NR_CPUS)-1] = BITMAP_LAST_WORD_MASK(NR_CPUS) \
903}
904
905#else
906
907#define CPU_BITS_ALL \
908{ \
909 [0 ... BITS_TO_LONGS(NR_CPUS)-2] = ~0UL, \
910 [BITS_TO_LONGS(NR_CPUS)-1] = BITMAP_LAST_WORD_MASK(NR_CPUS) \
911}
912#endif
913
914
915
916
917
918
919
920
921
922
923
924static inline ssize_t
925cpumap_print_to_pagebuf(bool list, char *buf, const struct cpumask *mask)
926{
927 return bitmap_print_to_pagebuf(list, buf, cpumask_bits(mask),
928 nr_cpu_ids);
929}
930
931#if NR_CPUS <= BITS_PER_LONG
932#define CPU_MASK_ALL \
933(cpumask_t) { { \
934 [BITS_TO_LONGS(NR_CPUS)-1] = BITMAP_LAST_WORD_MASK(NR_CPUS) \
935} }
936#else
937#define CPU_MASK_ALL \
938(cpumask_t) { { \
939 [0 ... BITS_TO_LONGS(NR_CPUS)-2] = ~0UL, \
940 [BITS_TO_LONGS(NR_CPUS)-1] = BITMAP_LAST_WORD_MASK(NR_CPUS) \
941} }
942#endif
943
944#define CPU_MASK_NONE \
945(cpumask_t) { { \
946 [0 ... BITS_TO_LONGS(NR_CPUS)-1] = 0UL \
947} }
948
949#define CPU_MASK_CPU0 \
950(cpumask_t) { { \
951 [0] = 1UL \
952} }
953
954#endif
955