1
2#include <linux/bitmap.h>
3#include <linux/bug.h>
4#include <linux/export.h>
5#include <linux/idr.h>
6#include <linux/slab.h>
7#include <linux/spinlock.h>
8#include <linux/xarray.h>
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33int idr_alloc_u32(struct idr *idr, void *ptr, u32 *nextid,
34 unsigned long max, gfp_t gfp)
35{
36 struct radix_tree_iter iter;
37 void __rcu **slot;
38 unsigned int base = idr->idr_base;
39 unsigned int id = *nextid;
40
41 if (WARN_ON_ONCE(!(idr->idr_rt.xa_flags & ROOT_IS_IDR)))
42 idr->idr_rt.xa_flags |= IDR_RT_MARKER;
43
44 id = (id < base) ? 0 : id - base;
45 radix_tree_iter_init(&iter, id);
46 slot = idr_get_free(&idr->idr_rt, &iter, gfp, max - base);
47 if (IS_ERR(slot))
48 return PTR_ERR(slot);
49
50 *nextid = iter.index + base;
51
52 radix_tree_iter_replace(&idr->idr_rt, &iter, slot, ptr);
53 radix_tree_iter_tag_clear(&idr->idr_rt, &iter, IDR_FREE);
54
55 return 0;
56}
57EXPORT_SYMBOL_GPL(idr_alloc_u32);
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79int idr_alloc(struct idr *idr, void *ptr, int start, int end, gfp_t gfp)
80{
81 u32 id = start;
82 int ret;
83
84 if (WARN_ON_ONCE(start < 0))
85 return -EINVAL;
86
87 ret = idr_alloc_u32(idr, ptr, &id, end > 0 ? end - 1 : INT_MAX, gfp);
88 if (ret)
89 return ret;
90
91 return id;
92}
93EXPORT_SYMBOL_GPL(idr_alloc);
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117int idr_alloc_cyclic(struct idr *idr, void *ptr, int start, int end, gfp_t gfp)
118{
119 u32 id = idr->idr_next;
120 int err, max = end > 0 ? end - 1 : INT_MAX;
121
122 if ((int)id < start)
123 id = start;
124
125 err = idr_alloc_u32(idr, ptr, &id, max, gfp);
126 if ((err == -ENOSPC) && (id > start)) {
127 id = start;
128 err = idr_alloc_u32(idr, ptr, &id, max, gfp);
129 }
130 if (err)
131 return err;
132
133 idr->idr_next = id + 1;
134 return id;
135}
136EXPORT_SYMBOL(idr_alloc_cyclic);
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152void *idr_remove(struct idr *idr, unsigned long id)
153{
154 return radix_tree_delete_item(&idr->idr_rt, id - idr->idr_base, NULL);
155}
156EXPORT_SYMBOL_GPL(idr_remove);
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172void *idr_find(const struct idr *idr, unsigned long id)
173{
174 return radix_tree_lookup(&idr->idr_rt, id - idr->idr_base);
175}
176EXPORT_SYMBOL_GPL(idr_find);
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195int idr_for_each(const struct idr *idr,
196 int (*fn)(int id, void *p, void *data), void *data)
197{
198 struct radix_tree_iter iter;
199 void __rcu **slot;
200 int base = idr->idr_base;
201
202 radix_tree_for_each_slot(slot, &idr->idr_rt, &iter, 0) {
203 int ret;
204 unsigned long id = iter.index + base;
205
206 if (WARN_ON_ONCE(id > INT_MAX))
207 break;
208 ret = fn(id, rcu_dereference_raw(*slot), data);
209 if (ret)
210 return ret;
211 }
212
213 return 0;
214}
215EXPORT_SYMBOL(idr_for_each);
216
217
218
219
220
221
222
223
224
225
226
227void *idr_get_next_ul(struct idr *idr, unsigned long *nextid)
228{
229 struct radix_tree_iter iter;
230 void __rcu **slot;
231 void *entry = NULL;
232 unsigned long base = idr->idr_base;
233 unsigned long id = *nextid;
234
235 id = (id < base) ? 0 : id - base;
236 radix_tree_for_each_slot(slot, &idr->idr_rt, &iter, id) {
237 entry = rcu_dereference_raw(*slot);
238 if (!entry)
239 continue;
240 if (!xa_is_internal(entry))
241 break;
242 if (slot != &idr->idr_rt.xa_head && !xa_is_retry(entry))
243 break;
244 slot = radix_tree_iter_retry(&iter);
245 }
246 if (!slot)
247 return NULL;
248
249 *nextid = iter.index + base;
250 return entry;
251}
252EXPORT_SYMBOL(idr_get_next_ul);
253
254
255
256
257
258
259
260
261
262
263
264void *idr_get_next(struct idr *idr, int *nextid)
265{
266 unsigned long id = *nextid;
267 void *entry = idr_get_next_ul(idr, &id);
268
269 if (WARN_ON_ONCE(id > INT_MAX))
270 return NULL;
271 *nextid = id;
272 return entry;
273}
274EXPORT_SYMBOL(idr_get_next);
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290void *idr_replace(struct idr *idr, void *ptr, unsigned long id)
291{
292 struct radix_tree_node *node;
293 void __rcu **slot = NULL;
294 void *entry;
295
296 id -= idr->idr_base;
297
298 entry = __radix_tree_lookup(&idr->idr_rt, id, &node, &slot);
299 if (!slot || radix_tree_tag_get(&idr->idr_rt, id, IDR_FREE))
300 return ERR_PTR(-ENOENT);
301
302 __radix_tree_replace(&idr->idr_rt, node, slot, ptr);
303
304 return entry;
305}
306EXPORT_SYMBOL(idr_replace);
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379int ida_alloc_range(struct ida *ida, unsigned int min, unsigned int max,
380 gfp_t gfp)
381{
382 XA_STATE(xas, &ida->xa, min / IDA_BITMAP_BITS);
383 unsigned bit = min % IDA_BITMAP_BITS;
384 unsigned long flags;
385 struct ida_bitmap *bitmap, *alloc = NULL;
386
387 if ((int)min < 0)
388 return -ENOSPC;
389
390 if ((int)max < 0)
391 max = INT_MAX;
392
393retry:
394 xas_lock_irqsave(&xas, flags);
395next:
396 bitmap = xas_find_marked(&xas, max / IDA_BITMAP_BITS, XA_FREE_MARK);
397 if (xas.xa_index > min / IDA_BITMAP_BITS)
398 bit = 0;
399 if (xas.xa_index * IDA_BITMAP_BITS + bit > max)
400 goto nospc;
401
402 if (xa_is_value(bitmap)) {
403 unsigned long tmp = xa_to_value(bitmap);
404
405 if (bit < BITS_PER_XA_VALUE) {
406 bit = find_next_zero_bit(&tmp, BITS_PER_XA_VALUE, bit);
407 if (xas.xa_index * IDA_BITMAP_BITS + bit > max)
408 goto nospc;
409 if (bit < BITS_PER_XA_VALUE) {
410 tmp |= 1UL << bit;
411 xas_store(&xas, xa_mk_value(tmp));
412 goto out;
413 }
414 }
415 bitmap = alloc;
416 if (!bitmap)
417 bitmap = kzalloc(sizeof(*bitmap), GFP_NOWAIT);
418 if (!bitmap)
419 goto alloc;
420 bitmap->bitmap[0] = tmp;
421 xas_store(&xas, bitmap);
422 if (xas_error(&xas)) {
423 bitmap->bitmap[0] = 0;
424 goto out;
425 }
426 }
427
428 if (bitmap) {
429 bit = find_next_zero_bit(bitmap->bitmap, IDA_BITMAP_BITS, bit);
430 if (xas.xa_index * IDA_BITMAP_BITS + bit > max)
431 goto nospc;
432 if (bit == IDA_BITMAP_BITS)
433 goto next;
434
435 __set_bit(bit, bitmap->bitmap);
436 if (bitmap_full(bitmap->bitmap, IDA_BITMAP_BITS))
437 xas_clear_mark(&xas, XA_FREE_MARK);
438 } else {
439 if (bit < BITS_PER_XA_VALUE) {
440 bitmap = xa_mk_value(1UL << bit);
441 } else {
442 bitmap = alloc;
443 if (!bitmap)
444 bitmap = kzalloc(sizeof(*bitmap), GFP_NOWAIT);
445 if (!bitmap)
446 goto alloc;
447 __set_bit(bit, bitmap->bitmap);
448 }
449 xas_store(&xas, bitmap);
450 }
451out:
452 xas_unlock_irqrestore(&xas, flags);
453 if (xas_nomem(&xas, gfp)) {
454 xas.xa_index = min / IDA_BITMAP_BITS;
455 bit = min % IDA_BITMAP_BITS;
456 goto retry;
457 }
458 if (bitmap != alloc)
459 kfree(alloc);
460 if (xas_error(&xas))
461 return xas_error(&xas);
462 return xas.xa_index * IDA_BITMAP_BITS + bit;
463alloc:
464 xas_unlock_irqrestore(&xas, flags);
465 alloc = kzalloc(sizeof(*bitmap), gfp);
466 if (!alloc)
467 return -ENOMEM;
468 xas_set(&xas, min / IDA_BITMAP_BITS);
469 bit = min % IDA_BITMAP_BITS;
470 goto retry;
471nospc:
472 xas_unlock_irqrestore(&xas, flags);
473 return -ENOSPC;
474}
475EXPORT_SYMBOL(ida_alloc_range);
476
477
478
479
480
481
482
483
484void ida_free(struct ida *ida, unsigned int id)
485{
486 XA_STATE(xas, &ida->xa, id / IDA_BITMAP_BITS);
487 unsigned bit = id % IDA_BITMAP_BITS;
488 struct ida_bitmap *bitmap;
489 unsigned long flags;
490
491 BUG_ON((int)id < 0);
492
493 xas_lock_irqsave(&xas, flags);
494 bitmap = xas_load(&xas);
495
496 if (xa_is_value(bitmap)) {
497 unsigned long v = xa_to_value(bitmap);
498 if (bit >= BITS_PER_XA_VALUE)
499 goto err;
500 if (!(v & (1UL << bit)))
501 goto err;
502 v &= ~(1UL << bit);
503 if (!v)
504 goto delete;
505 xas_store(&xas, xa_mk_value(v));
506 } else {
507 if (!test_bit(bit, bitmap->bitmap))
508 goto err;
509 __clear_bit(bit, bitmap->bitmap);
510 xas_set_mark(&xas, XA_FREE_MARK);
511 if (bitmap_empty(bitmap->bitmap, IDA_BITMAP_BITS)) {
512 kfree(bitmap);
513delete:
514 xas_store(&xas, NULL);
515 }
516 }
517 xas_unlock_irqrestore(&xas, flags);
518 return;
519 err:
520 xas_unlock_irqrestore(&xas, flags);
521 WARN(1, "ida_free called for id=%d which is not allocated.\n", id);
522}
523EXPORT_SYMBOL(ida_free);
524
525
526
527
528
529
530
531
532
533
534
535
536void ida_destroy(struct ida *ida)
537{
538 XA_STATE(xas, &ida->xa, 0);
539 struct ida_bitmap *bitmap;
540 unsigned long flags;
541
542 xas_lock_irqsave(&xas, flags);
543 xas_for_each(&xas, bitmap, ULONG_MAX) {
544 if (!xa_is_value(bitmap))
545 kfree(bitmap);
546 xas_store(&xas, NULL);
547 }
548 xas_unlock_irqrestore(&xas, flags);
549}
550EXPORT_SYMBOL(ida_destroy);
551
552#ifndef __KERNEL__
553extern void xa_dump_index(unsigned long index, unsigned int shift);
554#define IDA_CHUNK_SHIFT ilog2(IDA_BITMAP_BITS)
555
556static void ida_dump_entry(void *entry, unsigned long index)
557{
558 unsigned long i;
559
560 if (!entry)
561 return;
562
563 if (xa_is_node(entry)) {
564 struct xa_node *node = xa_to_node(entry);
565 unsigned int shift = node->shift + IDA_CHUNK_SHIFT +
566 XA_CHUNK_SHIFT;
567
568 xa_dump_index(index * IDA_BITMAP_BITS, shift);
569 xa_dump_node(node);
570 for (i = 0; i < XA_CHUNK_SIZE; i++)
571 ida_dump_entry(node->slots[i],
572 index | (i << node->shift));
573 } else if (xa_is_value(entry)) {
574 xa_dump_index(index * IDA_BITMAP_BITS, ilog2(BITS_PER_LONG));
575 pr_cont("value: data %lx [%px]\n", xa_to_value(entry), entry);
576 } else {
577 struct ida_bitmap *bitmap = entry;
578
579 xa_dump_index(index * IDA_BITMAP_BITS, IDA_CHUNK_SHIFT);
580 pr_cont("bitmap: %p data", bitmap);
581 for (i = 0; i < IDA_BITMAP_LONGS; i++)
582 pr_cont(" %lx", bitmap->bitmap[i]);
583 pr_cont("\n");
584 }
585}
586
587static void ida_dump(struct ida *ida)
588{
589 struct xarray *xa = &ida->xa;
590 pr_debug("ida: %p node %p free %d\n", ida, xa->xa_head,
591 xa->xa_flags >> ROOT_TAG_SHIFT);
592 ida_dump_entry(xa->xa_head, 0);
593}
594#endif
595