1#include <linux/bitmap.h>
2#include <linux/bug.h>
3#include <linux/export.h>
4#include <linux/idr.h>
5#include <linux/slab.h>
6#include <linux/spinlock.h>
7#include <linux/xarray.h>
8
9DEFINE_PER_CPU(struct ida_bitmap *, ida_bitmap);
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34int idr_alloc_u32(struct idr *idr, void *ptr, u32 *nextid,
35 unsigned long max, gfp_t gfp)
36{
37 struct radix_tree_iter iter;
38 void __rcu **slot;
39 unsigned int base = idr->idr_base;
40 unsigned int id = *nextid;
41
42 if (WARN_ON_ONCE(radix_tree_is_internal_node(ptr)))
43 return -EINVAL;
44 if (WARN_ON_ONCE(!(idr->idr_rt.gfp_mask & ROOT_IS_IDR)))
45 idr->idr_rt.gfp_mask |= IDR_RT_MARKER;
46
47 id = (id < base) ? 0 : id - base;
48 radix_tree_iter_init(&iter, id);
49 slot = idr_get_free(&idr->idr_rt, &iter, gfp, max - base);
50 if (IS_ERR(slot))
51 return PTR_ERR(slot);
52
53 *nextid = iter.index + base;
54
55 radix_tree_iter_replace(&idr->idr_rt, &iter, slot, ptr);
56 radix_tree_iter_tag_clear(&idr->idr_rt, &iter, IDR_FREE);
57
58 return 0;
59}
60EXPORT_SYMBOL_GPL(idr_alloc_u32);
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82int idr_alloc(struct idr *idr, void *ptr, int start, int end, gfp_t gfp)
83{
84 u32 id = start;
85 int ret;
86
87 if (WARN_ON_ONCE(start < 0))
88 return -EINVAL;
89
90 ret = idr_alloc_u32(idr, ptr, &id, end > 0 ? end - 1 : INT_MAX, gfp);
91 if (ret)
92 return ret;
93
94 return id;
95}
96EXPORT_SYMBOL_GPL(idr_alloc);
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120int idr_alloc_cyclic(struct idr *idr, void *ptr, int start, int end, gfp_t gfp)
121{
122 u32 id = idr->idr_next;
123 int err, max = end > 0 ? end - 1 : INT_MAX;
124
125 if ((int)id < start)
126 id = start;
127
128 err = idr_alloc_u32(idr, ptr, &id, max, gfp);
129 if ((err == -ENOSPC) && (id > start)) {
130 id = start;
131 err = idr_alloc_u32(idr, ptr, &id, max, gfp);
132 }
133 if (err)
134 return err;
135
136 idr->idr_next = id + 1;
137 return id;
138}
139EXPORT_SYMBOL(idr_alloc_cyclic);
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155void *idr_remove(struct idr *idr, unsigned long id)
156{
157 return radix_tree_delete_item(&idr->idr_rt, id - idr->idr_base, NULL);
158}
159EXPORT_SYMBOL_GPL(idr_remove);
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175void *idr_find(const struct idr *idr, unsigned long id)
176{
177 return radix_tree_lookup(&idr->idr_rt, id - idr->idr_base);
178}
179EXPORT_SYMBOL_GPL(idr_find);
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198int idr_for_each(const struct idr *idr,
199 int (*fn)(int id, void *p, void *data), void *data)
200{
201 struct radix_tree_iter iter;
202 void __rcu **slot;
203 int base = idr->idr_base;
204
205 radix_tree_for_each_slot(slot, &idr->idr_rt, &iter, 0) {
206 int ret;
207 unsigned long id = iter.index + base;
208
209 if (WARN_ON_ONCE(id > INT_MAX))
210 break;
211 ret = fn(id, rcu_dereference_raw(*slot), data);
212 if (ret)
213 return ret;
214 }
215
216 return 0;
217}
218EXPORT_SYMBOL(idr_for_each);
219
220
221
222
223
224
225
226
227
228
229
230void *idr_get_next(struct idr *idr, int *nextid)
231{
232 struct radix_tree_iter iter;
233 void __rcu **slot;
234 unsigned long base = idr->idr_base;
235 unsigned long id = *nextid;
236
237 id = (id < base) ? 0 : id - base;
238 slot = radix_tree_iter_find(&idr->idr_rt, &iter, id);
239 if (!slot)
240 return NULL;
241 id = iter.index + base;
242
243 if (WARN_ON_ONCE(id > INT_MAX))
244 return NULL;
245
246 *nextid = id;
247 return rcu_dereference_raw(*slot);
248}
249EXPORT_SYMBOL(idr_get_next);
250
251
252
253
254
255
256
257
258
259
260
261void *idr_get_next_ul(struct idr *idr, unsigned long *nextid)
262{
263 struct radix_tree_iter iter;
264 void __rcu **slot;
265 unsigned long base = idr->idr_base;
266 unsigned long id = *nextid;
267
268 id = (id < base) ? 0 : id - base;
269 slot = radix_tree_iter_find(&idr->idr_rt, &iter, id);
270 if (!slot)
271 return NULL;
272
273 *nextid = iter.index + base;
274 return rcu_dereference_raw(*slot);
275}
276EXPORT_SYMBOL(idr_get_next_ul);
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292void *idr_replace(struct idr *idr, void *ptr, unsigned long id)
293{
294 struct radix_tree_node *node;
295 void __rcu **slot = NULL;
296 void *entry;
297
298 if (WARN_ON_ONCE(radix_tree_is_internal_node(ptr)))
299 return ERR_PTR(-EINVAL);
300 id -= idr->idr_base;
301
302 entry = __radix_tree_lookup(&idr->idr_rt, id, &node, &slot);
303 if (!slot || radix_tree_tag_get(&idr->idr_rt, id, IDR_FREE))
304 return ERR_PTR(-ENOENT);
305
306 __radix_tree_replace(&idr->idr_rt, node, slot, ptr, NULL);
307
308 return entry;
309}
310EXPORT_SYMBOL(idr_replace);
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364#define IDA_MAX (0x80000000U / IDA_BITMAP_BITS - 1)
365
366static int ida_get_new_above(struct ida *ida, int start)
367{
368 struct radix_tree_root *root = &ida->ida_rt;
369 void __rcu **slot;
370 struct radix_tree_iter iter;
371 struct ida_bitmap *bitmap;
372 unsigned long index;
373 unsigned bit, ebit;
374 int new;
375
376 index = start / IDA_BITMAP_BITS;
377 bit = start % IDA_BITMAP_BITS;
378 ebit = bit + RADIX_TREE_EXCEPTIONAL_SHIFT;
379
380 slot = radix_tree_iter_init(&iter, index);
381 for (;;) {
382 if (slot)
383 slot = radix_tree_next_slot(slot, &iter,
384 RADIX_TREE_ITER_TAGGED);
385 if (!slot) {
386 slot = idr_get_free(root, &iter, GFP_NOWAIT, IDA_MAX);
387 if (IS_ERR(slot)) {
388 if (slot == ERR_PTR(-ENOMEM))
389 return -EAGAIN;
390 return PTR_ERR(slot);
391 }
392 }
393 if (iter.index > index) {
394 bit = 0;
395 ebit = RADIX_TREE_EXCEPTIONAL_SHIFT;
396 }
397 new = iter.index * IDA_BITMAP_BITS;
398 bitmap = rcu_dereference_raw(*slot);
399 if (radix_tree_exception(bitmap)) {
400 unsigned long tmp = (unsigned long)bitmap;
401 ebit = find_next_zero_bit(&tmp, BITS_PER_LONG, ebit);
402 if (ebit < BITS_PER_LONG) {
403 tmp |= 1UL << ebit;
404 rcu_assign_pointer(*slot, (void *)tmp);
405 return new + ebit -
406 RADIX_TREE_EXCEPTIONAL_SHIFT;
407 }
408 bitmap = this_cpu_xchg(ida_bitmap, NULL);
409 if (!bitmap)
410 return -EAGAIN;
411 bitmap->bitmap[0] = tmp >> RADIX_TREE_EXCEPTIONAL_SHIFT;
412 rcu_assign_pointer(*slot, bitmap);
413 }
414
415 if (bitmap) {
416 bit = find_next_zero_bit(bitmap->bitmap,
417 IDA_BITMAP_BITS, bit);
418 new += bit;
419 if (new < 0)
420 return -ENOSPC;
421 if (bit == IDA_BITMAP_BITS)
422 continue;
423
424 __set_bit(bit, bitmap->bitmap);
425 if (bitmap_full(bitmap->bitmap, IDA_BITMAP_BITS))
426 radix_tree_iter_tag_clear(root, &iter,
427 IDR_FREE);
428 } else {
429 new += bit;
430 if (new < 0)
431 return -ENOSPC;
432 if (ebit < BITS_PER_LONG) {
433 bitmap = (void *)((1UL << ebit) |
434 RADIX_TREE_EXCEPTIONAL_ENTRY);
435 radix_tree_iter_replace(root, &iter, slot,
436 bitmap);
437 return new;
438 }
439 bitmap = this_cpu_xchg(ida_bitmap, NULL);
440 if (!bitmap)
441 return -EAGAIN;
442 __set_bit(bit, bitmap->bitmap);
443 radix_tree_iter_replace(root, &iter, slot, bitmap);
444 }
445
446 return new;
447 }
448}
449
450static void ida_remove(struct ida *ida, int id)
451{
452 unsigned long index = id / IDA_BITMAP_BITS;
453 unsigned offset = id % IDA_BITMAP_BITS;
454 struct ida_bitmap *bitmap;
455 unsigned long *btmp;
456 struct radix_tree_iter iter;
457 void __rcu **slot;
458
459 slot = radix_tree_iter_lookup(&ida->ida_rt, &iter, index);
460 if (!slot)
461 goto err;
462
463 bitmap = rcu_dereference_raw(*slot);
464 if (radix_tree_exception(bitmap)) {
465 btmp = (unsigned long *)slot;
466 offset += RADIX_TREE_EXCEPTIONAL_SHIFT;
467 if (offset >= BITS_PER_LONG)
468 goto err;
469 } else {
470 btmp = bitmap->bitmap;
471 }
472 if (!test_bit(offset, btmp))
473 goto err;
474
475 __clear_bit(offset, btmp);
476 radix_tree_iter_tag_set(&ida->ida_rt, &iter, IDR_FREE);
477 if (radix_tree_exception(bitmap)) {
478 if (rcu_dereference_raw(*slot) ==
479 (void *)RADIX_TREE_EXCEPTIONAL_ENTRY)
480 radix_tree_iter_delete(&ida->ida_rt, &iter, slot);
481 } else if (bitmap_empty(btmp, IDA_BITMAP_BITS)) {
482 kfree(bitmap);
483 radix_tree_iter_delete(&ida->ida_rt, &iter, slot);
484 }
485 return;
486 err:
487 WARN(1, "ida_free called for id=%d which is not allocated.\n", id);
488}
489
490
491
492
493
494
495
496
497
498
499
500
501void ida_destroy(struct ida *ida)
502{
503 unsigned long flags;
504 struct radix_tree_iter iter;
505 void __rcu **slot;
506
507 xa_lock_irqsave(&ida->ida_rt, flags);
508 radix_tree_for_each_slot(slot, &ida->ida_rt, &iter, 0) {
509 struct ida_bitmap *bitmap = rcu_dereference_raw(*slot);
510 if (!radix_tree_exception(bitmap))
511 kfree(bitmap);
512 radix_tree_iter_delete(&ida->ida_rt, &iter, slot);
513 }
514 xa_unlock_irqrestore(&ida->ida_rt, flags);
515}
516EXPORT_SYMBOL(ida_destroy);
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532int ida_alloc_range(struct ida *ida, unsigned int min, unsigned int max,
533 gfp_t gfp)
534{
535 int id = 0;
536 unsigned long flags;
537
538 if ((int)min < 0)
539 return -ENOSPC;
540
541 if ((int)max < 0)
542 max = INT_MAX;
543
544again:
545 xa_lock_irqsave(&ida->ida_rt, flags);
546 id = ida_get_new_above(ida, min);
547 if (id > (int)max) {
548 ida_remove(ida, id);
549 id = -ENOSPC;
550 }
551 xa_unlock_irqrestore(&ida->ida_rt, flags);
552
553 if (unlikely(id == -EAGAIN)) {
554 if (!ida_pre_get(ida, gfp))
555 return -ENOMEM;
556 goto again;
557 }
558
559 return id;
560}
561EXPORT_SYMBOL(ida_alloc_range);
562
563
564
565
566
567
568
569
570void ida_free(struct ida *ida, unsigned int id)
571{
572 unsigned long flags;
573
574 BUG_ON((int)id < 0);
575 xa_lock_irqsave(&ida->ida_rt, flags);
576 ida_remove(ida, id);
577 xa_unlock_irqrestore(&ida->ida_rt, flags);
578}
579EXPORT_SYMBOL(ida_free);
580