1
2#include <linux/bitmap.h>
3#include <linux/bug.h>
4#include <linux/export.h>
5#include <linux/idr.h>
6#include <linux/slab.h>
7#include <linux/spinlock.h>
8#include <linux/xarray.h>
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33int idr_alloc_u32(struct idr *idr, void *ptr, u32 *nextid,
34 unsigned long max, gfp_t gfp)
35{
36 struct radix_tree_iter iter;
37 void __rcu **slot;
38 unsigned int base = idr->idr_base;
39 unsigned int id = *nextid;
40
41 if (WARN_ON_ONCE(!(idr->idr_rt.xa_flags & ROOT_IS_IDR)))
42 idr->idr_rt.xa_flags |= IDR_RT_MARKER;
43
44 id = (id < base) ? 0 : id - base;
45 radix_tree_iter_init(&iter, id);
46 slot = idr_get_free(&idr->idr_rt, &iter, gfp, max - base);
47 if (IS_ERR(slot))
48 return PTR_ERR(slot);
49
50 *nextid = iter.index + base;
51
52 radix_tree_iter_replace(&idr->idr_rt, &iter, slot, ptr);
53 radix_tree_iter_tag_clear(&idr->idr_rt, &iter, IDR_FREE);
54
55 return 0;
56}
57EXPORT_SYMBOL_GPL(idr_alloc_u32);
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79int idr_alloc(struct idr *idr, void *ptr, int start, int end, gfp_t gfp)
80{
81 u32 id = start;
82 int ret;
83
84 if (WARN_ON_ONCE(start < 0))
85 return -EINVAL;
86
87 ret = idr_alloc_u32(idr, ptr, &id, end > 0 ? end - 1 : INT_MAX, gfp);
88 if (ret)
89 return ret;
90
91 return id;
92}
93EXPORT_SYMBOL_GPL(idr_alloc);
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117int idr_alloc_cyclic(struct idr *idr, void *ptr, int start, int end, gfp_t gfp)
118{
119 u32 id = idr->idr_next;
120 int err, max = end > 0 ? end - 1 : INT_MAX;
121
122 if ((int)id < start)
123 id = start;
124
125 err = idr_alloc_u32(idr, ptr, &id, max, gfp);
126 if ((err == -ENOSPC) && (id > start)) {
127 id = start;
128 err = idr_alloc_u32(idr, ptr, &id, max, gfp);
129 }
130 if (err)
131 return err;
132
133 idr->idr_next = id + 1;
134 return id;
135}
136EXPORT_SYMBOL(idr_alloc_cyclic);
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152void *idr_remove(struct idr *idr, unsigned long id)
153{
154 return radix_tree_delete_item(&idr->idr_rt, id - idr->idr_base, NULL);
155}
156EXPORT_SYMBOL_GPL(idr_remove);
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172void *idr_find(const struct idr *idr, unsigned long id)
173{
174 return radix_tree_lookup(&idr->idr_rt, id - idr->idr_base);
175}
176EXPORT_SYMBOL_GPL(idr_find);
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195int idr_for_each(const struct idr *idr,
196 int (*fn)(int id, void *p, void *data), void *data)
197{
198 struct radix_tree_iter iter;
199 void __rcu **slot;
200 int base = idr->idr_base;
201
202 radix_tree_for_each_slot(slot, &idr->idr_rt, &iter, 0) {
203 int ret;
204 unsigned long id = iter.index + base;
205
206 if (WARN_ON_ONCE(id > INT_MAX))
207 break;
208 ret = fn(id, rcu_dereference_raw(*slot), data);
209 if (ret)
210 return ret;
211 }
212
213 return 0;
214}
215EXPORT_SYMBOL(idr_for_each);
216
217
218
219
220
221
222
223
224
225
226
227void *idr_get_next(struct idr *idr, int *nextid)
228{
229 struct radix_tree_iter iter;
230 void __rcu **slot;
231 void *entry = NULL;
232 unsigned long base = idr->idr_base;
233 unsigned long id = *nextid;
234
235 id = (id < base) ? 0 : id - base;
236 radix_tree_for_each_slot(slot, &idr->idr_rt, &iter, id) {
237 entry = rcu_dereference_raw(*slot);
238 if (!entry)
239 continue;
240 if (!xa_is_internal(entry))
241 break;
242 if (slot != &idr->idr_rt.xa_head && !xa_is_retry(entry))
243 break;
244 slot = radix_tree_iter_retry(&iter);
245 }
246 if (!slot)
247 return NULL;
248 id = iter.index + base;
249
250 if (WARN_ON_ONCE(id > INT_MAX))
251 return NULL;
252
253 *nextid = id;
254 return entry;
255}
256EXPORT_SYMBOL(idr_get_next);
257
258
259
260
261
262
263
264
265
266
267
268void *idr_get_next_ul(struct idr *idr, unsigned long *nextid)
269{
270 struct radix_tree_iter iter;
271 void __rcu **slot;
272 unsigned long base = idr->idr_base;
273 unsigned long id = *nextid;
274
275 id = (id < base) ? 0 : id - base;
276 slot = radix_tree_iter_find(&idr->idr_rt, &iter, id);
277 if (!slot)
278 return NULL;
279
280 *nextid = iter.index + base;
281 return rcu_dereference_raw(*slot);
282}
283EXPORT_SYMBOL(idr_get_next_ul);
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299void *idr_replace(struct idr *idr, void *ptr, unsigned long id)
300{
301 struct radix_tree_node *node;
302 void __rcu **slot = NULL;
303 void *entry;
304
305 id -= idr->idr_base;
306
307 entry = __radix_tree_lookup(&idr->idr_rt, id, &node, &slot);
308 if (!slot || radix_tree_tag_get(&idr->idr_rt, id, IDR_FREE))
309 return ERR_PTR(-ENOENT);
310
311 __radix_tree_replace(&idr->idr_rt, node, slot, ptr);
312
313 return entry;
314}
315EXPORT_SYMBOL(idr_replace);
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388int ida_alloc_range(struct ida *ida, unsigned int min, unsigned int max,
389 gfp_t gfp)
390{
391 XA_STATE(xas, &ida->xa, min / IDA_BITMAP_BITS);
392 unsigned bit = min % IDA_BITMAP_BITS;
393 unsigned long flags;
394 struct ida_bitmap *bitmap, *alloc = NULL;
395
396 if ((int)min < 0)
397 return -ENOSPC;
398
399 if ((int)max < 0)
400 max = INT_MAX;
401
402retry:
403 xas_lock_irqsave(&xas, flags);
404next:
405 bitmap = xas_find_marked(&xas, max / IDA_BITMAP_BITS, XA_FREE_MARK);
406 if (xas.xa_index > min / IDA_BITMAP_BITS)
407 bit = 0;
408 if (xas.xa_index * IDA_BITMAP_BITS + bit > max)
409 goto nospc;
410
411 if (xa_is_value(bitmap)) {
412 unsigned long tmp = xa_to_value(bitmap);
413
414 if (bit < BITS_PER_XA_VALUE) {
415 bit = find_next_zero_bit(&tmp, BITS_PER_XA_VALUE, bit);
416 if (xas.xa_index * IDA_BITMAP_BITS + bit > max)
417 goto nospc;
418 if (bit < BITS_PER_XA_VALUE) {
419 tmp |= 1UL << bit;
420 xas_store(&xas, xa_mk_value(tmp));
421 goto out;
422 }
423 }
424 bitmap = alloc;
425 if (!bitmap)
426 bitmap = kzalloc(sizeof(*bitmap), GFP_NOWAIT);
427 if (!bitmap)
428 goto alloc;
429 bitmap->bitmap[0] = tmp;
430 xas_store(&xas, bitmap);
431 if (xas_error(&xas)) {
432 bitmap->bitmap[0] = 0;
433 goto out;
434 }
435 }
436
437 if (bitmap) {
438 bit = find_next_zero_bit(bitmap->bitmap, IDA_BITMAP_BITS, bit);
439 if (xas.xa_index * IDA_BITMAP_BITS + bit > max)
440 goto nospc;
441 if (bit == IDA_BITMAP_BITS)
442 goto next;
443
444 __set_bit(bit, bitmap->bitmap);
445 if (bitmap_full(bitmap->bitmap, IDA_BITMAP_BITS))
446 xas_clear_mark(&xas, XA_FREE_MARK);
447 } else {
448 if (bit < BITS_PER_XA_VALUE) {
449 bitmap = xa_mk_value(1UL << bit);
450 } else {
451 bitmap = alloc;
452 if (!bitmap)
453 bitmap = kzalloc(sizeof(*bitmap), GFP_NOWAIT);
454 if (!bitmap)
455 goto alloc;
456 __set_bit(bit, bitmap->bitmap);
457 }
458 xas_store(&xas, bitmap);
459 }
460out:
461 xas_unlock_irqrestore(&xas, flags);
462 if (xas_nomem(&xas, gfp)) {
463 xas.xa_index = min / IDA_BITMAP_BITS;
464 bit = min % IDA_BITMAP_BITS;
465 goto retry;
466 }
467 if (bitmap != alloc)
468 kfree(alloc);
469 if (xas_error(&xas))
470 return xas_error(&xas);
471 return xas.xa_index * IDA_BITMAP_BITS + bit;
472alloc:
473 xas_unlock_irqrestore(&xas, flags);
474 alloc = kzalloc(sizeof(*bitmap), gfp);
475 if (!alloc)
476 return -ENOMEM;
477 xas_set(&xas, min / IDA_BITMAP_BITS);
478 bit = min % IDA_BITMAP_BITS;
479 goto retry;
480nospc:
481 xas_unlock_irqrestore(&xas, flags);
482 return -ENOSPC;
483}
484EXPORT_SYMBOL(ida_alloc_range);
485
486
487
488
489
490
491
492
493void ida_free(struct ida *ida, unsigned int id)
494{
495 XA_STATE(xas, &ida->xa, id / IDA_BITMAP_BITS);
496 unsigned bit = id % IDA_BITMAP_BITS;
497 struct ida_bitmap *bitmap;
498 unsigned long flags;
499
500 BUG_ON((int)id < 0);
501
502 xas_lock_irqsave(&xas, flags);
503 bitmap = xas_load(&xas);
504
505 if (xa_is_value(bitmap)) {
506 unsigned long v = xa_to_value(bitmap);
507 if (bit >= BITS_PER_XA_VALUE)
508 goto err;
509 if (!(v & (1UL << bit)))
510 goto err;
511 v &= ~(1UL << bit);
512 if (!v)
513 goto delete;
514 xas_store(&xas, xa_mk_value(v));
515 } else {
516 if (!test_bit(bit, bitmap->bitmap))
517 goto err;
518 __clear_bit(bit, bitmap->bitmap);
519 xas_set_mark(&xas, XA_FREE_MARK);
520 if (bitmap_empty(bitmap->bitmap, IDA_BITMAP_BITS)) {
521 kfree(bitmap);
522delete:
523 xas_store(&xas, NULL);
524 }
525 }
526 xas_unlock_irqrestore(&xas, flags);
527 return;
528 err:
529 xas_unlock_irqrestore(&xas, flags);
530 WARN(1, "ida_free called for id=%d which is not allocated.\n", id);
531}
532EXPORT_SYMBOL(ida_free);
533
534
535
536
537
538
539
540
541
542
543
544
545void ida_destroy(struct ida *ida)
546{
547 XA_STATE(xas, &ida->xa, 0);
548 struct ida_bitmap *bitmap;
549 unsigned long flags;
550
551 xas_lock_irqsave(&xas, flags);
552 xas_for_each(&xas, bitmap, ULONG_MAX) {
553 if (!xa_is_value(bitmap))
554 kfree(bitmap);
555 xas_store(&xas, NULL);
556 }
557 xas_unlock_irqrestore(&xas, flags);
558}
559EXPORT_SYMBOL(ida_destroy);
560
561#ifndef __KERNEL__
562extern void xa_dump_index(unsigned long index, unsigned int shift);
563#define IDA_CHUNK_SHIFT ilog2(IDA_BITMAP_BITS)
564
565static void ida_dump_entry(void *entry, unsigned long index)
566{
567 unsigned long i;
568
569 if (!entry)
570 return;
571
572 if (xa_is_node(entry)) {
573 struct xa_node *node = xa_to_node(entry);
574 unsigned int shift = node->shift + IDA_CHUNK_SHIFT +
575 XA_CHUNK_SHIFT;
576
577 xa_dump_index(index * IDA_BITMAP_BITS, shift);
578 xa_dump_node(node);
579 for (i = 0; i < XA_CHUNK_SIZE; i++)
580 ida_dump_entry(node->slots[i],
581 index | (i << node->shift));
582 } else if (xa_is_value(entry)) {
583 xa_dump_index(index * IDA_BITMAP_BITS, ilog2(BITS_PER_LONG));
584 pr_cont("value: data %lx [%px]\n", xa_to_value(entry), entry);
585 } else {
586 struct ida_bitmap *bitmap = entry;
587
588 xa_dump_index(index * IDA_BITMAP_BITS, IDA_CHUNK_SHIFT);
589 pr_cont("bitmap: %p data", bitmap);
590 for (i = 0; i < IDA_BITMAP_LONGS; i++)
591 pr_cont(" %lx", bitmap->bitmap[i]);
592 pr_cont("\n");
593 }
594}
595
596static void ida_dump(struct ida *ida)
597{
598 struct xarray *xa = &ida->xa;
599 pr_debug("ida: %p node %p free %d\n", ida, xa->xa_head,
600 xa->xa_flags >> ROOT_TAG_SHIFT);
601 ida_dump_entry(xa->xa_head, 0);
602}
603#endif
604