1
2
3
4
5
6
7
8
9#include <linux/bitmap.h>
10#include <linux/export.h>
11#include <linux/list.h>
12#include <linux/slab.h>
13#include <linux/xarray.h>
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31static inline unsigned int xa_lock_type(const struct xarray *xa)
32{
33 return (__force unsigned int)xa->xa_flags & 3;
34}
35
36static inline void xas_lock_type(struct xa_state *xas, unsigned int lock_type)
37{
38 if (lock_type == XA_LOCK_IRQ)
39 xas_lock_irq(xas);
40 else if (lock_type == XA_LOCK_BH)
41 xas_lock_bh(xas);
42 else
43 xas_lock(xas);
44}
45
46static inline void xas_unlock_type(struct xa_state *xas, unsigned int lock_type)
47{
48 if (lock_type == XA_LOCK_IRQ)
49 xas_unlock_irq(xas);
50 else if (lock_type == XA_LOCK_BH)
51 xas_unlock_bh(xas);
52 else
53 xas_unlock(xas);
54}
55
56static inline bool xa_track_free(const struct xarray *xa)
57{
58 return xa->xa_flags & XA_FLAGS_TRACK_FREE;
59}
60
61static inline bool xa_zero_busy(const struct xarray *xa)
62{
63 return xa->xa_flags & XA_FLAGS_ZERO_BUSY;
64}
65
66static inline void xa_mark_set(struct xarray *xa, xa_mark_t mark)
67{
68 if (!(xa->xa_flags & XA_FLAGS_MARK(mark)))
69 xa->xa_flags |= XA_FLAGS_MARK(mark);
70}
71
72static inline void xa_mark_clear(struct xarray *xa, xa_mark_t mark)
73{
74 if (xa->xa_flags & XA_FLAGS_MARK(mark))
75 xa->xa_flags &= ~(XA_FLAGS_MARK(mark));
76}
77
78static inline unsigned long *node_marks(struct xa_node *node, xa_mark_t mark)
79{
80 return node->marks[(__force unsigned)mark];
81}
82
83static inline bool node_get_mark(struct xa_node *node,
84 unsigned int offset, xa_mark_t mark)
85{
86 return test_bit(offset, node_marks(node, mark));
87}
88
89
90static inline bool node_set_mark(struct xa_node *node, unsigned int offset,
91 xa_mark_t mark)
92{
93 return __test_and_set_bit(offset, node_marks(node, mark));
94}
95
96
97static inline bool node_clear_mark(struct xa_node *node, unsigned int offset,
98 xa_mark_t mark)
99{
100 return __test_and_clear_bit(offset, node_marks(node, mark));
101}
102
103static inline bool node_any_mark(struct xa_node *node, xa_mark_t mark)
104{
105 return !bitmap_empty(node_marks(node, mark), XA_CHUNK_SIZE);
106}
107
108static inline void node_mark_all(struct xa_node *node, xa_mark_t mark)
109{
110 bitmap_fill(node_marks(node, mark), XA_CHUNK_SIZE);
111}
112
113#define mark_inc(mark) do { \
114 mark = (__force xa_mark_t)((__force unsigned)(mark) + 1); \
115} while (0)
116
117
118
119
120
121
122
123
124static void xas_squash_marks(const struct xa_state *xas)
125{
126 unsigned int mark = 0;
127 unsigned int limit = xas->xa_offset + xas->xa_sibs + 1;
128
129 if (!xas->xa_sibs)
130 return;
131
132 do {
133 unsigned long *marks = xas->xa_node->marks[mark];
134 if (find_next_bit(marks, limit, xas->xa_offset + 1) == limit)
135 continue;
136 __set_bit(xas->xa_offset, marks);
137 bitmap_clear(marks, xas->xa_offset + 1, xas->xa_sibs);
138 } while (mark++ != (__force unsigned)XA_MARK_MAX);
139}
140
141
142static unsigned int get_offset(unsigned long index, struct xa_node *node)
143{
144 return (index >> node->shift) & XA_CHUNK_MASK;
145}
146
147static void xas_set_offset(struct xa_state *xas)
148{
149 xas->xa_offset = get_offset(xas->xa_index, xas->xa_node);
150}
151
152
153static void xas_move_index(struct xa_state *xas, unsigned long offset)
154{
155 unsigned int shift = xas->xa_node->shift;
156 xas->xa_index &= ~XA_CHUNK_MASK << shift;
157 xas->xa_index += offset << shift;
158}
159
160static void xas_advance(struct xa_state *xas)
161{
162 xas->xa_offset++;
163 xas_move_index(xas, xas->xa_offset);
164}
165
166static void *set_bounds(struct xa_state *xas)
167{
168 xas->xa_node = XAS_BOUNDS;
169 return NULL;
170}
171
172
173
174
175
176
177
178
179static void *xas_start(struct xa_state *xas)
180{
181 void *entry;
182
183 if (xas_valid(xas))
184 return xas_reload(xas);
185 if (xas_error(xas))
186 return NULL;
187
188 entry = xa_head(xas->xa);
189 if (!xa_is_node(entry)) {
190 if (xas->xa_index)
191 return set_bounds(xas);
192 } else {
193 if ((xas->xa_index >> xa_to_node(entry)->shift) > XA_CHUNK_MASK)
194 return set_bounds(xas);
195 }
196
197 xas->xa_node = NULL;
198 return entry;
199}
200
201static void *xas_descend(struct xa_state *xas, struct xa_node *node)
202{
203 unsigned int offset = get_offset(xas->xa_index, node);
204 void *entry = xa_entry(xas->xa, node, offset);
205
206 xas->xa_node = node;
207 if (xa_is_sibling(entry)) {
208 offset = xa_to_sibling(entry);
209 entry = xa_entry(xas->xa, node, offset);
210 }
211
212 xas->xa_offset = offset;
213 return entry;
214}
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231void *xas_load(struct xa_state *xas)
232{
233 void *entry = xas_start(xas);
234
235 while (xa_is_node(entry)) {
236 struct xa_node *node = xa_to_node(entry);
237
238 if (xas->xa_shift > node->shift)
239 break;
240 entry = xas_descend(xas, node);
241 if (node->shift == 0)
242 break;
243 }
244 return entry;
245}
246EXPORT_SYMBOL_GPL(xas_load);
247
248
249extern struct kmem_cache *radix_tree_node_cachep;
250extern void radix_tree_node_rcu_free(struct rcu_head *head);
251
252#define XA_RCU_FREE ((struct xarray *)1)
253
254static void xa_node_free(struct xa_node *node)
255{
256 XA_NODE_BUG_ON(node, !list_empty(&node->private_list));
257 node->array = XA_RCU_FREE;
258 call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
259}
260
261
262
263
264
265
266
267static void xas_destroy(struct xa_state *xas)
268{
269 struct xa_node *node = xas->xa_alloc;
270
271 if (!node)
272 return;
273 XA_NODE_BUG_ON(node, !list_empty(&node->private_list));
274 kmem_cache_free(radix_tree_node_cachep, node);
275 xas->xa_alloc = NULL;
276}
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296bool xas_nomem(struct xa_state *xas, gfp_t gfp)
297{
298 if (xas->xa_node != XA_ERROR(-ENOMEM)) {
299 xas_destroy(xas);
300 return false;
301 }
302 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT)
303 gfp |= __GFP_ACCOUNT;
304 xas->xa_alloc = kmem_cache_alloc(radix_tree_node_cachep, gfp);
305 if (!xas->xa_alloc)
306 return false;
307 XA_NODE_BUG_ON(xas->xa_alloc, !list_empty(&xas->xa_alloc->private_list));
308 xas->xa_node = XAS_RESTART;
309 return true;
310}
311EXPORT_SYMBOL_GPL(xas_nomem);
312
313
314
315
316
317
318
319
320
321
322static bool __xas_nomem(struct xa_state *xas, gfp_t gfp)
323 __must_hold(xas->xa->xa_lock)
324{
325 unsigned int lock_type = xa_lock_type(xas->xa);
326
327 if (xas->xa_node != XA_ERROR(-ENOMEM)) {
328 xas_destroy(xas);
329 return false;
330 }
331 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT)
332 gfp |= __GFP_ACCOUNT;
333 if (gfpflags_allow_blocking(gfp)) {
334 xas_unlock_type(xas, lock_type);
335 xas->xa_alloc = kmem_cache_alloc(radix_tree_node_cachep, gfp);
336 xas_lock_type(xas, lock_type);
337 } else {
338 xas->xa_alloc = kmem_cache_alloc(radix_tree_node_cachep, gfp);
339 }
340 if (!xas->xa_alloc)
341 return false;
342 XA_NODE_BUG_ON(xas->xa_alloc, !list_empty(&xas->xa_alloc->private_list));
343 xas->xa_node = XAS_RESTART;
344 return true;
345}
346
347static void xas_update(struct xa_state *xas, struct xa_node *node)
348{
349 if (xas->xa_update)
350 xas->xa_update(node);
351 else
352 XA_NODE_BUG_ON(node, !list_empty(&node->private_list));
353}
354
355static void *xas_alloc(struct xa_state *xas, unsigned int shift)
356{
357 struct xa_node *parent = xas->xa_node;
358 struct xa_node *node = xas->xa_alloc;
359
360 if (xas_invalid(xas))
361 return NULL;
362
363 if (node) {
364 xas->xa_alloc = NULL;
365 } else {
366 gfp_t gfp = GFP_NOWAIT | __GFP_NOWARN;
367
368 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT)
369 gfp |= __GFP_ACCOUNT;
370
371 node = kmem_cache_alloc(radix_tree_node_cachep, gfp);
372 if (!node) {
373 xas_set_err(xas, -ENOMEM);
374 return NULL;
375 }
376 }
377
378 if (parent) {
379 node->offset = xas->xa_offset;
380 parent->count++;
381 XA_NODE_BUG_ON(node, parent->count > XA_CHUNK_SIZE);
382 xas_update(xas, parent);
383 }
384 XA_NODE_BUG_ON(node, shift > BITS_PER_LONG);
385 XA_NODE_BUG_ON(node, !list_empty(&node->private_list));
386 node->shift = shift;
387 node->count = 0;
388 node->nr_values = 0;
389 RCU_INIT_POINTER(node->parent, xas->xa_node);
390 node->array = xas->xa;
391
392 return node;
393}
394
395#ifdef CONFIG_XARRAY_MULTI
396
397static unsigned long xas_size(const struct xa_state *xas)
398{
399 return (xas->xa_sibs + 1UL) << xas->xa_shift;
400}
401#endif
402
403
404
405
406
407
408
409static unsigned long xas_max(struct xa_state *xas)
410{
411 unsigned long max = xas->xa_index;
412
413#ifdef CONFIG_XARRAY_MULTI
414 if (xas->xa_shift || xas->xa_sibs) {
415 unsigned long mask = xas_size(xas) - 1;
416 max |= mask;
417 if (mask == max)
418 max++;
419 }
420#endif
421
422 return max;
423}
424
425
426static unsigned long max_index(void *entry)
427{
428 if (!xa_is_node(entry))
429 return 0;
430 return (XA_CHUNK_SIZE << xa_to_node(entry)->shift) - 1;
431}
432
433static void xas_shrink(struct xa_state *xas)
434{
435 struct xarray *xa = xas->xa;
436 struct xa_node *node = xas->xa_node;
437
438 for (;;) {
439 void *entry;
440
441 XA_NODE_BUG_ON(node, node->count > XA_CHUNK_SIZE);
442 if (node->count != 1)
443 break;
444 entry = xa_entry_locked(xa, node, 0);
445 if (!entry)
446 break;
447 if (!xa_is_node(entry) && node->shift)
448 break;
449 if (xa_is_zero(entry) && xa_zero_busy(xa))
450 entry = NULL;
451 xas->xa_node = XAS_BOUNDS;
452
453 RCU_INIT_POINTER(xa->xa_head, entry);
454 if (xa_track_free(xa) && !node_get_mark(node, 0, XA_FREE_MARK))
455 xa_mark_clear(xa, XA_FREE_MARK);
456
457 node->count = 0;
458 node->nr_values = 0;
459 if (!xa_is_node(entry))
460 RCU_INIT_POINTER(node->slots[0], XA_RETRY_ENTRY);
461 xas_update(xas, node);
462 xa_node_free(node);
463 if (!xa_is_node(entry))
464 break;
465 node = xa_to_node(entry);
466 node->parent = NULL;
467 }
468}
469
470
471
472
473
474
475
476
477static void xas_delete_node(struct xa_state *xas)
478{
479 struct xa_node *node = xas->xa_node;
480
481 for (;;) {
482 struct xa_node *parent;
483
484 XA_NODE_BUG_ON(node, node->count > XA_CHUNK_SIZE);
485 if (node->count)
486 break;
487
488 parent = xa_parent_locked(xas->xa, node);
489 xas->xa_node = parent;
490 xas->xa_offset = node->offset;
491 xa_node_free(node);
492
493 if (!parent) {
494 xas->xa->xa_head = NULL;
495 xas->xa_node = XAS_BOUNDS;
496 return;
497 }
498
499 parent->slots[xas->xa_offset] = NULL;
500 parent->count--;
501 XA_NODE_BUG_ON(parent, parent->count > XA_CHUNK_SIZE);
502 node = parent;
503 xas_update(xas, node);
504 }
505
506 if (!node->parent)
507 xas_shrink(xas);
508}
509
510
511
512
513
514
515
516
517
518
519static void xas_free_nodes(struct xa_state *xas, struct xa_node *top)
520{
521 unsigned int offset = 0;
522 struct xa_node *node = top;
523
524 for (;;) {
525 void *entry = xa_entry_locked(xas->xa, node, offset);
526
527 if (node->shift && xa_is_node(entry)) {
528 node = xa_to_node(entry);
529 offset = 0;
530 continue;
531 }
532 if (entry)
533 RCU_INIT_POINTER(node->slots[offset], XA_RETRY_ENTRY);
534 offset++;
535 while (offset == XA_CHUNK_SIZE) {
536 struct xa_node *parent;
537
538 parent = xa_parent_locked(xas->xa, node);
539 offset = node->offset + 1;
540 node->count = 0;
541 node->nr_values = 0;
542 xas_update(xas, node);
543 xa_node_free(node);
544 if (node == top)
545 return;
546 node = parent;
547 }
548 }
549}
550
551
552
553
554
555static int xas_expand(struct xa_state *xas, void *head)
556{
557 struct xarray *xa = xas->xa;
558 struct xa_node *node = NULL;
559 unsigned int shift = 0;
560 unsigned long max = xas_max(xas);
561
562 if (!head) {
563 if (max == 0)
564 return 0;
565 while ((max >> shift) >= XA_CHUNK_SIZE)
566 shift += XA_CHUNK_SHIFT;
567 return shift + XA_CHUNK_SHIFT;
568 } else if (xa_is_node(head)) {
569 node = xa_to_node(head);
570 shift = node->shift + XA_CHUNK_SHIFT;
571 }
572 xas->xa_node = NULL;
573
574 while (max > max_index(head)) {
575 xa_mark_t mark = 0;
576
577 XA_NODE_BUG_ON(node, shift > BITS_PER_LONG);
578 node = xas_alloc(xas, shift);
579 if (!node)
580 return -ENOMEM;
581
582 node->count = 1;
583 if (xa_is_value(head))
584 node->nr_values = 1;
585 RCU_INIT_POINTER(node->slots[0], head);
586
587
588 for (;;) {
589 if (xa_track_free(xa) && mark == XA_FREE_MARK) {
590 node_mark_all(node, XA_FREE_MARK);
591 if (!xa_marked(xa, XA_FREE_MARK)) {
592 node_clear_mark(node, 0, XA_FREE_MARK);
593 xa_mark_set(xa, XA_FREE_MARK);
594 }
595 } else if (xa_marked(xa, mark)) {
596 node_set_mark(node, 0, mark);
597 }
598 if (mark == XA_MARK_MAX)
599 break;
600 mark_inc(mark);
601 }
602
603
604
605
606
607 if (xa_is_node(head)) {
608 xa_to_node(head)->offset = 0;
609 rcu_assign_pointer(xa_to_node(head)->parent, node);
610 }
611 head = xa_mk_node(node);
612 rcu_assign_pointer(xa->xa_head, head);
613 xas_update(xas, node);
614
615 shift += XA_CHUNK_SHIFT;
616 }
617
618 xas->xa_node = node;
619 return shift;
620}
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635static void *xas_create(struct xa_state *xas, bool allow_root)
636{
637 struct xarray *xa = xas->xa;
638 void *entry;
639 void __rcu **slot;
640 struct xa_node *node = xas->xa_node;
641 int shift;
642 unsigned int order = xas->xa_shift;
643
644 if (xas_top(node)) {
645 entry = xa_head_locked(xa);
646 xas->xa_node = NULL;
647 if (!entry && xa_zero_busy(xa))
648 entry = XA_ZERO_ENTRY;
649 shift = xas_expand(xas, entry);
650 if (shift < 0)
651 return NULL;
652 if (!shift && !allow_root)
653 shift = XA_CHUNK_SHIFT;
654 entry = xa_head_locked(xa);
655 slot = &xa->xa_head;
656 } else if (xas_error(xas)) {
657 return NULL;
658 } else if (node) {
659 unsigned int offset = xas->xa_offset;
660
661 shift = node->shift;
662 entry = xa_entry_locked(xa, node, offset);
663 slot = &node->slots[offset];
664 } else {
665 shift = 0;
666 entry = xa_head_locked(xa);
667 slot = &xa->xa_head;
668 }
669
670 while (shift > order) {
671 shift -= XA_CHUNK_SHIFT;
672 if (!entry) {
673 node = xas_alloc(xas, shift);
674 if (!node)
675 break;
676 if (xa_track_free(xa))
677 node_mark_all(node, XA_FREE_MARK);
678 rcu_assign_pointer(*slot, xa_mk_node(node));
679 } else if (xa_is_node(entry)) {
680 node = xa_to_node(entry);
681 } else {
682 break;
683 }
684 entry = xas_descend(xas, node);
685 slot = &node->slots[xas->xa_offset];
686 }
687
688 return entry;
689}
690
691
692
693
694
695
696
697
698
699
700void xas_create_range(struct xa_state *xas)
701{
702 unsigned long index = xas->xa_index;
703 unsigned char shift = xas->xa_shift;
704 unsigned char sibs = xas->xa_sibs;
705
706 xas->xa_index |= ((sibs + 1) << shift) - 1;
707 if (xas_is_node(xas) && xas->xa_node->shift == xas->xa_shift)
708 xas->xa_offset |= sibs;
709 xas->xa_shift = 0;
710 xas->xa_sibs = 0;
711
712 for (;;) {
713 xas_create(xas, true);
714 if (xas_error(xas))
715 goto restore;
716 if (xas->xa_index <= (index | XA_CHUNK_MASK))
717 goto success;
718 xas->xa_index -= XA_CHUNK_SIZE;
719
720 for (;;) {
721 struct xa_node *node = xas->xa_node;
722 xas->xa_node = xa_parent_locked(xas->xa, node);
723 xas->xa_offset = node->offset - 1;
724 if (node->offset != 0)
725 break;
726 }
727 }
728
729restore:
730 xas->xa_shift = shift;
731 xas->xa_sibs = sibs;
732 xas->xa_index = index;
733 return;
734success:
735 xas->xa_index = index;
736 if (xas->xa_node)
737 xas_set_offset(xas);
738}
739EXPORT_SYMBOL_GPL(xas_create_range);
740
741static void update_node(struct xa_state *xas, struct xa_node *node,
742 int count, int values)
743{
744 if (!node || (!count && !values))
745 return;
746
747 node->count += count;
748 node->nr_values += values;
749 XA_NODE_BUG_ON(node, node->count > XA_CHUNK_SIZE);
750 XA_NODE_BUG_ON(node, node->nr_values > XA_CHUNK_SIZE);
751 xas_update(xas, node);
752 if (count < 0)
753 xas_delete_node(xas);
754}
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769void *xas_store(struct xa_state *xas, void *entry)
770{
771 struct xa_node *node;
772 void __rcu **slot = &xas->xa->xa_head;
773 unsigned int offset, max;
774 int count = 0;
775 int values = 0;
776 void *first, *next;
777 bool value = xa_is_value(entry);
778
779 if (entry) {
780 bool allow_root = !xa_is_node(entry) && !xa_is_zero(entry);
781 first = xas_create(xas, allow_root);
782 } else {
783 first = xas_load(xas);
784 }
785
786 if (xas_invalid(xas))
787 return first;
788 node = xas->xa_node;
789 if (node && (xas->xa_shift < node->shift))
790 xas->xa_sibs = 0;
791 if ((first == entry) && !xas->xa_sibs)
792 return first;
793
794 next = first;
795 offset = xas->xa_offset;
796 max = xas->xa_offset + xas->xa_sibs;
797 if (node) {
798 slot = &node->slots[offset];
799 if (xas->xa_sibs)
800 xas_squash_marks(xas);
801 }
802 if (!entry)
803 xas_init_marks(xas);
804
805 for (;;) {
806
807
808
809
810
811
812
813 rcu_assign_pointer(*slot, entry);
814 if (xa_is_node(next) && (!node || node->shift))
815 xas_free_nodes(xas, xa_to_node(next));
816 if (!node)
817 break;
818 count += !next - !entry;
819 values += !xa_is_value(first) - !value;
820 if (entry) {
821 if (offset == max)
822 break;
823 if (!xa_is_sibling(entry))
824 entry = xa_mk_sibling(xas->xa_offset);
825 } else {
826 if (offset == XA_CHUNK_MASK)
827 break;
828 }
829 next = xa_entry_locked(xas->xa, node, ++offset);
830 if (!xa_is_sibling(next)) {
831 if (!entry && (offset > max))
832 break;
833 first = next;
834 }
835 slot++;
836 }
837
838 update_node(xas, node, count, values);
839 return first;
840}
841EXPORT_SYMBOL_GPL(xas_store);
842
843
844
845
846
847
848
849
850
851bool xas_get_mark(const struct xa_state *xas, xa_mark_t mark)
852{
853 if (xas_invalid(xas))
854 return false;
855 if (!xas->xa_node)
856 return xa_marked(xas->xa, mark);
857 return node_get_mark(xas->xa_node, xas->xa_offset, mark);
858}
859EXPORT_SYMBOL_GPL(xas_get_mark);
860
861
862
863
864
865
866
867
868
869
870void xas_set_mark(const struct xa_state *xas, xa_mark_t mark)
871{
872 struct xa_node *node = xas->xa_node;
873 unsigned int offset = xas->xa_offset;
874
875 if (xas_invalid(xas))
876 return;
877
878 while (node) {
879 if (node_set_mark(node, offset, mark))
880 return;
881 offset = node->offset;
882 node = xa_parent_locked(xas->xa, node);
883 }
884
885 if (!xa_marked(xas->xa, mark))
886 xa_mark_set(xas->xa, mark);
887}
888EXPORT_SYMBOL_GPL(xas_set_mark);
889
890
891
892
893
894
895
896
897
898
899void xas_clear_mark(const struct xa_state *xas, xa_mark_t mark)
900{
901 struct xa_node *node = xas->xa_node;
902 unsigned int offset = xas->xa_offset;
903
904 if (xas_invalid(xas))
905 return;
906
907 while (node) {
908 if (!node_clear_mark(node, offset, mark))
909 return;
910 if (node_any_mark(node, mark))
911 return;
912
913 offset = node->offset;
914 node = xa_parent_locked(xas->xa, node);
915 }
916
917 if (xa_marked(xas->xa, mark))
918 xa_mark_clear(xas->xa, mark);
919}
920EXPORT_SYMBOL_GPL(xas_clear_mark);
921
922
923
924
925
926
927
928
929
930
931
932
933void xas_init_marks(const struct xa_state *xas)
934{
935 xa_mark_t mark = 0;
936
937 for (;;) {
938 if (xa_track_free(xas->xa) && mark == XA_FREE_MARK)
939 xas_set_mark(xas, mark);
940 else
941 xas_clear_mark(xas, mark);
942 if (mark == XA_MARK_MAX)
943 break;
944 mark_inc(mark);
945 }
946}
947EXPORT_SYMBOL_GPL(xas_init_marks);
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964void xas_pause(struct xa_state *xas)
965{
966 struct xa_node *node = xas->xa_node;
967
968 if (xas_invalid(xas))
969 return;
970
971 xas->xa_node = XAS_RESTART;
972 if (node) {
973 unsigned int offset = xas->xa_offset;
974 while (++offset < XA_CHUNK_SIZE) {
975 if (!xa_is_sibling(xa_entry(xas->xa, node, offset)))
976 break;
977 }
978 xas->xa_index += (offset - xas->xa_offset) << node->shift;
979 if (xas->xa_index == 0)
980 xas->xa_node = XAS_BOUNDS;
981 } else {
982 xas->xa_index++;
983 }
984}
985EXPORT_SYMBOL_GPL(xas_pause);
986
987
988
989
990
991
992
993
994void *__xas_prev(struct xa_state *xas)
995{
996 void *entry;
997
998 if (!xas_frozen(xas->xa_node))
999 xas->xa_index--;
1000 if (!xas->xa_node)
1001 return set_bounds(xas);
1002 if (xas_not_node(xas->xa_node))
1003 return xas_load(xas);
1004
1005 if (xas->xa_offset != get_offset(xas->xa_index, xas->xa_node))
1006 xas->xa_offset--;
1007
1008 while (xas->xa_offset == 255) {
1009 xas->xa_offset = xas->xa_node->offset - 1;
1010 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1011 if (!xas->xa_node)
1012 return set_bounds(xas);
1013 }
1014
1015 for (;;) {
1016 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1017 if (!xa_is_node(entry))
1018 return entry;
1019
1020 xas->xa_node = xa_to_node(entry);
1021 xas_set_offset(xas);
1022 }
1023}
1024EXPORT_SYMBOL_GPL(__xas_prev);
1025
1026
1027
1028
1029
1030
1031
1032
1033void *__xas_next(struct xa_state *xas)
1034{
1035 void *entry;
1036
1037 if (!xas_frozen(xas->xa_node))
1038 xas->xa_index++;
1039 if (!xas->xa_node)
1040 return set_bounds(xas);
1041 if (xas_not_node(xas->xa_node))
1042 return xas_load(xas);
1043
1044 if (xas->xa_offset != get_offset(xas->xa_index, xas->xa_node))
1045 xas->xa_offset++;
1046
1047 while (xas->xa_offset == XA_CHUNK_SIZE) {
1048 xas->xa_offset = xas->xa_node->offset + 1;
1049 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1050 if (!xas->xa_node)
1051 return set_bounds(xas);
1052 }
1053
1054 for (;;) {
1055 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1056 if (!xa_is_node(entry))
1057 return entry;
1058
1059 xas->xa_node = xa_to_node(entry);
1060 xas_set_offset(xas);
1061 }
1062}
1063EXPORT_SYMBOL_GPL(__xas_next);
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081void *xas_find(struct xa_state *xas, unsigned long max)
1082{
1083 void *entry;
1084
1085 if (xas_error(xas) || xas->xa_node == XAS_BOUNDS)
1086 return NULL;
1087 if (xas->xa_index > max)
1088 return set_bounds(xas);
1089
1090 if (!xas->xa_node) {
1091 xas->xa_index = 1;
1092 return set_bounds(xas);
1093 } else if (xas->xa_node == XAS_RESTART) {
1094 entry = xas_load(xas);
1095 if (entry || xas_not_node(xas->xa_node))
1096 return entry;
1097 } else if (!xas->xa_node->shift &&
1098 xas->xa_offset != (xas->xa_index & XA_CHUNK_MASK)) {
1099 xas->xa_offset = ((xas->xa_index - 1) & XA_CHUNK_MASK) + 1;
1100 }
1101
1102 xas_advance(xas);
1103
1104 while (xas->xa_node && (xas->xa_index <= max)) {
1105 if (unlikely(xas->xa_offset == XA_CHUNK_SIZE)) {
1106 xas->xa_offset = xas->xa_node->offset + 1;
1107 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1108 continue;
1109 }
1110
1111 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1112 if (xa_is_node(entry)) {
1113 xas->xa_node = xa_to_node(entry);
1114 xas->xa_offset = 0;
1115 continue;
1116 }
1117 if (entry && !xa_is_sibling(entry))
1118 return entry;
1119
1120 xas_advance(xas);
1121 }
1122
1123 if (!xas->xa_node)
1124 xas->xa_node = XAS_BOUNDS;
1125 return NULL;
1126}
1127EXPORT_SYMBOL_GPL(xas_find);
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150void *xas_find_marked(struct xa_state *xas, unsigned long max, xa_mark_t mark)
1151{
1152 bool advance = true;
1153 unsigned int offset;
1154 void *entry;
1155
1156 if (xas_error(xas))
1157 return NULL;
1158 if (xas->xa_index > max)
1159 goto max;
1160
1161 if (!xas->xa_node) {
1162 xas->xa_index = 1;
1163 goto out;
1164 } else if (xas_top(xas->xa_node)) {
1165 advance = false;
1166 entry = xa_head(xas->xa);
1167 xas->xa_node = NULL;
1168 if (xas->xa_index > max_index(entry))
1169 goto out;
1170 if (!xa_is_node(entry)) {
1171 if (xa_marked(xas->xa, mark))
1172 return entry;
1173 xas->xa_index = 1;
1174 goto out;
1175 }
1176 xas->xa_node = xa_to_node(entry);
1177 xas->xa_offset = xas->xa_index >> xas->xa_node->shift;
1178 }
1179
1180 while (xas->xa_index <= max) {
1181 if (unlikely(xas->xa_offset == XA_CHUNK_SIZE)) {
1182 xas->xa_offset = xas->xa_node->offset + 1;
1183 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1184 if (!xas->xa_node)
1185 break;
1186 advance = false;
1187 continue;
1188 }
1189
1190 if (!advance) {
1191 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1192 if (xa_is_sibling(entry)) {
1193 xas->xa_offset = xa_to_sibling(entry);
1194 xas_move_index(xas, xas->xa_offset);
1195 }
1196 }
1197
1198 offset = xas_find_chunk(xas, advance, mark);
1199 if (offset > xas->xa_offset) {
1200 advance = false;
1201 xas_move_index(xas, offset);
1202
1203 if ((xas->xa_index - 1) >= max)
1204 goto max;
1205 xas->xa_offset = offset;
1206 if (offset == XA_CHUNK_SIZE)
1207 continue;
1208 }
1209
1210 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1211 if (!xa_is_node(entry))
1212 return entry;
1213 xas->xa_node = xa_to_node(entry);
1214 xas_set_offset(xas);
1215 }
1216
1217out:
1218 if (xas->xa_index > max)
1219 goto max;
1220 return set_bounds(xas);
1221max:
1222 xas->xa_node = XAS_RESTART;
1223 return NULL;
1224}
1225EXPORT_SYMBOL_GPL(xas_find_marked);
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236void *xas_find_conflict(struct xa_state *xas)
1237{
1238 void *curr;
1239
1240 if (xas_error(xas))
1241 return NULL;
1242
1243 if (!xas->xa_node)
1244 return NULL;
1245
1246 if (xas_top(xas->xa_node)) {
1247 curr = xas_start(xas);
1248 if (!curr)
1249 return NULL;
1250 while (xa_is_node(curr)) {
1251 struct xa_node *node = xa_to_node(curr);
1252 curr = xas_descend(xas, node);
1253 }
1254 if (curr)
1255 return curr;
1256 }
1257
1258 if (xas->xa_node->shift > xas->xa_shift)
1259 return NULL;
1260
1261 for (;;) {
1262 if (xas->xa_node->shift == xas->xa_shift) {
1263 if ((xas->xa_offset & xas->xa_sibs) == xas->xa_sibs)
1264 break;
1265 } else if (xas->xa_offset == XA_CHUNK_MASK) {
1266 xas->xa_offset = xas->xa_node->offset;
1267 xas->xa_node = xa_parent_locked(xas->xa, xas->xa_node);
1268 if (!xas->xa_node)
1269 break;
1270 continue;
1271 }
1272 curr = xa_entry_locked(xas->xa, xas->xa_node, ++xas->xa_offset);
1273 if (xa_is_sibling(curr))
1274 continue;
1275 while (xa_is_node(curr)) {
1276 xas->xa_node = xa_to_node(curr);
1277 xas->xa_offset = 0;
1278 curr = xa_entry_locked(xas->xa, xas->xa_node, 0);
1279 }
1280 if (curr)
1281 return curr;
1282 }
1283 xas->xa_offset -= xas->xa_sibs;
1284 return NULL;
1285}
1286EXPORT_SYMBOL_GPL(xas_find_conflict);
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296void *xa_load(struct xarray *xa, unsigned long index)
1297{
1298 XA_STATE(xas, xa, index);
1299 void *entry;
1300
1301 rcu_read_lock();
1302 do {
1303 entry = xas_load(&xas);
1304 if (xa_is_zero(entry))
1305 entry = NULL;
1306 } while (xas_retry(&xas, entry));
1307 rcu_read_unlock();
1308
1309 return entry;
1310}
1311EXPORT_SYMBOL(xa_load);
1312
1313static void *xas_result(struct xa_state *xas, void *curr)
1314{
1315 if (xa_is_zero(curr))
1316 return NULL;
1317 if (xas_error(xas))
1318 curr = xas->xa_node;
1319 return curr;
1320}
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334void *__xa_erase(struct xarray *xa, unsigned long index)
1335{
1336 XA_STATE(xas, xa, index);
1337 return xas_result(&xas, xas_store(&xas, NULL));
1338}
1339EXPORT_SYMBOL(__xa_erase);
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353void *xa_erase(struct xarray *xa, unsigned long index)
1354{
1355 void *entry;
1356
1357 xa_lock(xa);
1358 entry = __xa_erase(xa, index);
1359 xa_unlock(xa);
1360
1361 return entry;
1362}
1363EXPORT_SYMBOL(xa_erase);
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380void *__xa_store(struct xarray *xa, unsigned long index, void *entry, gfp_t gfp)
1381{
1382 XA_STATE(xas, xa, index);
1383 void *curr;
1384
1385 if (WARN_ON_ONCE(xa_is_advanced(entry)))
1386 return XA_ERROR(-EINVAL);
1387 if (xa_track_free(xa) && !entry)
1388 entry = XA_ZERO_ENTRY;
1389
1390 do {
1391 curr = xas_store(&xas, entry);
1392 if (xa_track_free(xa))
1393 xas_clear_mark(&xas, XA_FREE_MARK);
1394 } while (__xas_nomem(&xas, gfp));
1395
1396 return xas_result(&xas, curr);
1397}
1398EXPORT_SYMBOL(__xa_store);
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417void *xa_store(struct xarray *xa, unsigned long index, void *entry, gfp_t gfp)
1418{
1419 void *curr;
1420
1421 xa_lock(xa);
1422 curr = __xa_store(xa, index, entry, gfp);
1423 xa_unlock(xa);
1424
1425 return curr;
1426}
1427EXPORT_SYMBOL(xa_store);
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445void *__xa_cmpxchg(struct xarray *xa, unsigned long index,
1446 void *old, void *entry, gfp_t gfp)
1447{
1448 XA_STATE(xas, xa, index);
1449 void *curr;
1450
1451 if (WARN_ON_ONCE(xa_is_advanced(entry)))
1452 return XA_ERROR(-EINVAL);
1453
1454 do {
1455 curr = xas_load(&xas);
1456 if (curr == old) {
1457 xas_store(&xas, entry);
1458 if (xa_track_free(xa) && entry && !curr)
1459 xas_clear_mark(&xas, XA_FREE_MARK);
1460 }
1461 } while (__xas_nomem(&xas, gfp));
1462
1463 return xas_result(&xas, curr);
1464}
1465EXPORT_SYMBOL(__xa_cmpxchg);
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483int __xa_insert(struct xarray *xa, unsigned long index, void *entry, gfp_t gfp)
1484{
1485 XA_STATE(xas, xa, index);
1486 void *curr;
1487
1488 if (WARN_ON_ONCE(xa_is_advanced(entry)))
1489 return -EINVAL;
1490 if (!entry)
1491 entry = XA_ZERO_ENTRY;
1492
1493 do {
1494 curr = xas_load(&xas);
1495 if (!curr) {
1496 xas_store(&xas, entry);
1497 if (xa_track_free(xa))
1498 xas_clear_mark(&xas, XA_FREE_MARK);
1499 } else {
1500 xas_set_err(&xas, -EBUSY);
1501 }
1502 } while (__xas_nomem(&xas, gfp));
1503
1504 return xas_error(&xas);
1505}
1506EXPORT_SYMBOL(__xa_insert);
1507
1508#ifdef CONFIG_XARRAY_MULTI
1509static void xas_set_range(struct xa_state *xas, unsigned long first,
1510 unsigned long last)
1511{
1512 unsigned int shift = 0;
1513 unsigned long sibs = last - first;
1514 unsigned int offset = XA_CHUNK_MASK;
1515
1516 xas_set(xas, first);
1517
1518 while ((first & XA_CHUNK_MASK) == 0) {
1519 if (sibs < XA_CHUNK_MASK)
1520 break;
1521 if ((sibs == XA_CHUNK_MASK) && (offset < XA_CHUNK_MASK))
1522 break;
1523 shift += XA_CHUNK_SHIFT;
1524 if (offset == XA_CHUNK_MASK)
1525 offset = sibs & XA_CHUNK_MASK;
1526 sibs >>= XA_CHUNK_SHIFT;
1527 first >>= XA_CHUNK_SHIFT;
1528 }
1529
1530 offset = first & XA_CHUNK_MASK;
1531 if (offset + sibs > XA_CHUNK_MASK)
1532 sibs = XA_CHUNK_MASK - offset;
1533 if ((((first + sibs + 1) << shift) - 1) > last)
1534 sibs -= 1;
1535
1536 xas->xa_shift = shift;
1537 xas->xa_sibs = sibs;
1538}
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558void *xa_store_range(struct xarray *xa, unsigned long first,
1559 unsigned long last, void *entry, gfp_t gfp)
1560{
1561 XA_STATE(xas, xa, 0);
1562
1563 if (WARN_ON_ONCE(xa_is_internal(entry)))
1564 return XA_ERROR(-EINVAL);
1565 if (last < first)
1566 return XA_ERROR(-EINVAL);
1567
1568 do {
1569 xas_lock(&xas);
1570 if (entry) {
1571 unsigned int order = BITS_PER_LONG;
1572 if (last + 1)
1573 order = __ffs(last + 1);
1574 xas_set_order(&xas, last, order);
1575 xas_create(&xas, true);
1576 if (xas_error(&xas))
1577 goto unlock;
1578 }
1579 do {
1580 xas_set_range(&xas, first, last);
1581 xas_store(&xas, entry);
1582 if (xas_error(&xas))
1583 goto unlock;
1584 first += xas_size(&xas);
1585 } while (first <= last);
1586unlock:
1587 xas_unlock(&xas);
1588 } while (xas_nomem(&xas, gfp));
1589
1590 return xas_result(&xas, NULL);
1591}
1592EXPORT_SYMBOL(xa_store_range);
1593#endif
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612int __xa_alloc(struct xarray *xa, u32 *id, void *entry,
1613 struct xa_limit limit, gfp_t gfp)
1614{
1615 XA_STATE(xas, xa, 0);
1616
1617 if (WARN_ON_ONCE(xa_is_advanced(entry)))
1618 return -EINVAL;
1619 if (WARN_ON_ONCE(!xa_track_free(xa)))
1620 return -EINVAL;
1621
1622 if (!entry)
1623 entry = XA_ZERO_ENTRY;
1624
1625 do {
1626 xas.xa_index = limit.min;
1627 xas_find_marked(&xas, limit.max, XA_FREE_MARK);
1628 if (xas.xa_node == XAS_RESTART)
1629 xas_set_err(&xas, -EBUSY);
1630 else
1631 *id = xas.xa_index;
1632 xas_store(&xas, entry);
1633 xas_clear_mark(&xas, XA_FREE_MARK);
1634 } while (__xas_nomem(&xas, gfp));
1635
1636 return xas_error(&xas);
1637}
1638EXPORT_SYMBOL(__xa_alloc);
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661int __xa_alloc_cyclic(struct xarray *xa, u32 *id, void *entry,
1662 struct xa_limit limit, u32 *next, gfp_t gfp)
1663{
1664 u32 min = limit.min;
1665 int ret;
1666
1667 limit.min = max(min, *next);
1668 ret = __xa_alloc(xa, id, entry, limit, gfp);
1669 if ((xa->xa_flags & XA_FLAGS_ALLOC_WRAPPED) && ret == 0) {
1670 xa->xa_flags &= ~XA_FLAGS_ALLOC_WRAPPED;
1671 ret = 1;
1672 }
1673
1674 if (ret < 0 && limit.min > min) {
1675 limit.min = min;
1676 ret = __xa_alloc(xa, id, entry, limit, gfp);
1677 if (ret == 0)
1678 ret = 1;
1679 }
1680
1681 if (ret >= 0) {
1682 *next = *id + 1;
1683 if (*next == 0)
1684 xa->xa_flags |= XA_FLAGS_ALLOC_WRAPPED;
1685 }
1686 return ret;
1687}
1688EXPORT_SYMBOL(__xa_alloc_cyclic);
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700void __xa_set_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1701{
1702 XA_STATE(xas, xa, index);
1703 void *entry = xas_load(&xas);
1704
1705 if (entry)
1706 xas_set_mark(&xas, mark);
1707}
1708EXPORT_SYMBOL(__xa_set_mark);
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718void __xa_clear_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1719{
1720 XA_STATE(xas, xa, index);
1721 void *entry = xas_load(&xas);
1722
1723 if (entry)
1724 xas_clear_mark(&xas, mark);
1725}
1726EXPORT_SYMBOL(__xa_clear_mark);
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740bool xa_get_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1741{
1742 XA_STATE(xas, xa, index);
1743 void *entry;
1744
1745 rcu_read_lock();
1746 entry = xas_start(&xas);
1747 while (xas_get_mark(&xas, mark)) {
1748 if (!xa_is_node(entry))
1749 goto found;
1750 entry = xas_descend(&xas, xa_to_node(entry));
1751 }
1752 rcu_read_unlock();
1753 return false;
1754 found:
1755 rcu_read_unlock();
1756 return true;
1757}
1758EXPORT_SYMBOL(xa_get_mark);
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770void xa_set_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1771{
1772 xa_lock(xa);
1773 __xa_set_mark(xa, index, mark);
1774 xa_unlock(xa);
1775}
1776EXPORT_SYMBOL(xa_set_mark);
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788void xa_clear_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1789{
1790 xa_lock(xa);
1791 __xa_clear_mark(xa, index, mark);
1792 xa_unlock(xa);
1793}
1794EXPORT_SYMBOL(xa_clear_mark);
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813void *xa_find(struct xarray *xa, unsigned long *indexp,
1814 unsigned long max, xa_mark_t filter)
1815{
1816 XA_STATE(xas, xa, *indexp);
1817 void *entry;
1818
1819 rcu_read_lock();
1820 do {
1821 if ((__force unsigned int)filter < XA_MAX_MARKS)
1822 entry = xas_find_marked(&xas, max, filter);
1823 else
1824 entry = xas_find(&xas, max);
1825 } while (xas_retry(&xas, entry));
1826 rcu_read_unlock();
1827
1828 if (entry)
1829 *indexp = xas.xa_index;
1830 return entry;
1831}
1832EXPORT_SYMBOL(xa_find);
1833
1834static bool xas_sibling(struct xa_state *xas)
1835{
1836 struct xa_node *node = xas->xa_node;
1837 unsigned long mask;
1838
1839 if (!node)
1840 return false;
1841 mask = (XA_CHUNK_SIZE << node->shift) - 1;
1842 return (xas->xa_index & mask) > (xas->xa_offset << node->shift);
1843}
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862void *xa_find_after(struct xarray *xa, unsigned long *indexp,
1863 unsigned long max, xa_mark_t filter)
1864{
1865 XA_STATE(xas, xa, *indexp + 1);
1866 void *entry;
1867
1868 if (xas.xa_index == 0)
1869 return NULL;
1870
1871 rcu_read_lock();
1872 for (;;) {
1873 if ((__force unsigned int)filter < XA_MAX_MARKS)
1874 entry = xas_find_marked(&xas, max, filter);
1875 else
1876 entry = xas_find(&xas, max);
1877
1878 if (xas_invalid(&xas))
1879 break;
1880 if (xas_sibling(&xas))
1881 continue;
1882 if (!xas_retry(&xas, entry))
1883 break;
1884 }
1885 rcu_read_unlock();
1886
1887 if (entry)
1888 *indexp = xas.xa_index;
1889 return entry;
1890}
1891EXPORT_SYMBOL(xa_find_after);
1892
1893static unsigned int xas_extract_present(struct xa_state *xas, void **dst,
1894 unsigned long max, unsigned int n)
1895{
1896 void *entry;
1897 unsigned int i = 0;
1898
1899 rcu_read_lock();
1900 xas_for_each(xas, entry, max) {
1901 if (xas_retry(xas, entry))
1902 continue;
1903 dst[i++] = entry;
1904 if (i == n)
1905 break;
1906 }
1907 rcu_read_unlock();
1908
1909 return i;
1910}
1911
1912static unsigned int xas_extract_marked(struct xa_state *xas, void **dst,
1913 unsigned long max, unsigned int n, xa_mark_t mark)
1914{
1915 void *entry;
1916 unsigned int i = 0;
1917
1918 rcu_read_lock();
1919 xas_for_each_marked(xas, entry, max, mark) {
1920 if (xas_retry(xas, entry))
1921 continue;
1922 dst[i++] = entry;
1923 if (i == n)
1924 break;
1925 }
1926 rcu_read_unlock();
1927
1928 return i;
1929}
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959unsigned int xa_extract(struct xarray *xa, void **dst, unsigned long start,
1960 unsigned long max, unsigned int n, xa_mark_t filter)
1961{
1962 XA_STATE(xas, xa, start);
1963
1964 if (!n)
1965 return 0;
1966
1967 if ((__force unsigned int)filter < XA_MAX_MARKS)
1968 return xas_extract_marked(&xas, dst, max, n, filter);
1969 return xas_extract_present(&xas, dst, max, n);
1970}
1971EXPORT_SYMBOL(xa_extract);
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983void xa_destroy(struct xarray *xa)
1984{
1985 XA_STATE(xas, xa, 0);
1986 unsigned long flags;
1987 void *entry;
1988
1989 xas.xa_node = NULL;
1990 xas_lock_irqsave(&xas, flags);
1991 entry = xa_head_locked(xa);
1992 RCU_INIT_POINTER(xa->xa_head, NULL);
1993 xas_init_marks(&xas);
1994 if (xa_zero_busy(xa))
1995 xa_mark_clear(xa, XA_FREE_MARK);
1996
1997 if (xa_is_node(entry))
1998 xas_free_nodes(&xas, xa_to_node(entry));
1999 xas_unlock_irqrestore(&xas, flags);
2000}
2001EXPORT_SYMBOL(xa_destroy);
2002
2003#ifdef XA_DEBUG
2004void xa_dump_node(const struct xa_node *node)
2005{
2006 unsigned i, j;
2007
2008 if (!node)
2009 return;
2010 if ((unsigned long)node & 3) {
2011 pr_cont("node %px\n", node);
2012 return;
2013 }
2014
2015 pr_cont("node %px %s %d parent %px shift %d count %d values %d "
2016 "array %px list %px %px marks",
2017 node, node->parent ? "offset" : "max", node->offset,
2018 node->parent, node->shift, node->count, node->nr_values,
2019 node->array, node->private_list.prev, node->private_list.next);
2020 for (i = 0; i < XA_MAX_MARKS; i++)
2021 for (j = 0; j < XA_MARK_LONGS; j++)
2022 pr_cont(" %lx", node->marks[i][j]);
2023 pr_cont("\n");
2024}
2025
2026void xa_dump_index(unsigned long index, unsigned int shift)
2027{
2028 if (!shift)
2029 pr_info("%lu: ", index);
2030 else if (shift >= BITS_PER_LONG)
2031 pr_info("0-%lu: ", ~0UL);
2032 else
2033 pr_info("%lu-%lu: ", index, index | ((1UL << shift) - 1));
2034}
2035
2036void xa_dump_entry(const void *entry, unsigned long index, unsigned long shift)
2037{
2038 if (!entry)
2039 return;
2040
2041 xa_dump_index(index, shift);
2042
2043 if (xa_is_node(entry)) {
2044 if (shift == 0) {
2045 pr_cont("%px\n", entry);
2046 } else {
2047 unsigned long i;
2048 struct xa_node *node = xa_to_node(entry);
2049 xa_dump_node(node);
2050 for (i = 0; i < XA_CHUNK_SIZE; i++)
2051 xa_dump_entry(node->slots[i],
2052 index + (i << node->shift), node->shift);
2053 }
2054 } else if (xa_is_value(entry))
2055 pr_cont("value %ld (0x%lx) [%px]\n", xa_to_value(entry),
2056 xa_to_value(entry), entry);
2057 else if (!xa_is_internal(entry))
2058 pr_cont("%px\n", entry);
2059 else if (xa_is_retry(entry))
2060 pr_cont("retry (%ld)\n", xa_to_internal(entry));
2061 else if (xa_is_sibling(entry))
2062 pr_cont("sibling (slot %ld)\n", xa_to_sibling(entry));
2063 else if (xa_is_zero(entry))
2064 pr_cont("zero (%ld)\n", xa_to_internal(entry));
2065 else
2066 pr_cont("UNKNOWN ENTRY (%px)\n", entry);
2067}
2068
2069void xa_dump(const struct xarray *xa)
2070{
2071 void *entry = xa->xa_head;
2072 unsigned int shift = 0;
2073
2074 pr_info("xarray: %px head %px flags %x marks %d %d %d\n", xa, entry,
2075 xa->xa_flags, xa_marked(xa, XA_MARK_0),
2076 xa_marked(xa, XA_MARK_1), xa_marked(xa, XA_MARK_2));
2077 if (xa_is_node(entry))
2078 shift = xa_to_node(entry)->shift + XA_CHUNK_SHIFT;
2079 xa_dump_entry(entry, 0, shift);
2080}
2081#endif
2082