1
2#include <linux/bitmap.h>
3#include <linux/bug.h>
4#include <linux/export.h>
5#include <linux/idr.h>
6#include <linux/slab.h>
7#include <linux/spinlock.h>
8#include <linux/xarray.h>
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33int idr_alloc_u32(struct idr *idr, void *ptr, u32 *nextid,
34 unsigned long max, gfp_t gfp)
35{
36 struct radix_tree_iter iter;
37 void __rcu **slot;
38 unsigned int base = idr->idr_base;
39 unsigned int id = *nextid;
40
41 if (WARN_ON_ONCE(!(idr->idr_rt.xa_flags & ROOT_IS_IDR)))
42 idr->idr_rt.xa_flags |= IDR_RT_MARKER;
43
44 id = (id < base) ? 0 : id - base;
45 radix_tree_iter_init(&iter, id);
46 slot = idr_get_free(&idr->idr_rt, &iter, gfp, max - base);
47 if (IS_ERR(slot))
48 return PTR_ERR(slot);
49
50 *nextid = iter.index + base;
51
52 radix_tree_iter_replace(&idr->idr_rt, &iter, slot, ptr);
53 radix_tree_iter_tag_clear(&idr->idr_rt, &iter, IDR_FREE);
54
55 return 0;
56}
57EXPORT_SYMBOL_GPL(idr_alloc_u32);
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79int idr_alloc(struct idr *idr, void *ptr, int start, int end, gfp_t gfp)
80{
81 u32 id = start;
82 int ret;
83
84 if (WARN_ON_ONCE(start < 0))
85 return -EINVAL;
86
87 ret = idr_alloc_u32(idr, ptr, &id, end > 0 ? end - 1 : INT_MAX, gfp);
88 if (ret)
89 return ret;
90
91 return id;
92}
93EXPORT_SYMBOL_GPL(idr_alloc);
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117int idr_alloc_cyclic(struct idr *idr, void *ptr, int start, int end, gfp_t gfp)
118{
119 u32 id = idr->idr_next;
120 int err, max = end > 0 ? end - 1 : INT_MAX;
121
122 if ((int)id < start)
123 id = start;
124
125 err = idr_alloc_u32(idr, ptr, &id, max, gfp);
126 if ((err == -ENOSPC) && (id > start)) {
127 id = start;
128 err = idr_alloc_u32(idr, ptr, &id, max, gfp);
129 }
130 if (err)
131 return err;
132
133 idr->idr_next = id + 1;
134 return id;
135}
136EXPORT_SYMBOL(idr_alloc_cyclic);
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152void *idr_remove(struct idr *idr, unsigned long id)
153{
154 return radix_tree_delete_item(&idr->idr_rt, id - idr->idr_base, NULL);
155}
156EXPORT_SYMBOL_GPL(idr_remove);
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172void *idr_find(const struct idr *idr, unsigned long id)
173{
174 return radix_tree_lookup(&idr->idr_rt, id - idr->idr_base);
175}
176EXPORT_SYMBOL_GPL(idr_find);
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195int idr_for_each(const struct idr *idr,
196 int (*fn)(int id, void *p, void *data), void *data)
197{
198 struct radix_tree_iter iter;
199 void __rcu **slot;
200 int base = idr->idr_base;
201
202 radix_tree_for_each_slot(slot, &idr->idr_rt, &iter, 0) {
203 int ret;
204 unsigned long id = iter.index + base;
205
206 if (WARN_ON_ONCE(id > INT_MAX))
207 break;
208 ret = fn(id, rcu_dereference_raw(*slot), data);
209 if (ret)
210 return ret;
211 }
212
213 return 0;
214}
215EXPORT_SYMBOL(idr_for_each);
216
217
218
219
220
221
222
223
224
225
226
227void *idr_get_next_ul(struct idr *idr, unsigned long *nextid)
228{
229 struct radix_tree_iter iter;
230 void __rcu **slot;
231 void *entry = NULL;
232 unsigned long base = idr->idr_base;
233 unsigned long id = *nextid;
234
235 id = (id < base) ? 0 : id - base;
236 radix_tree_for_each_slot(slot, &idr->idr_rt, &iter, id) {
237 entry = rcu_dereference_raw(*slot);
238 if (!entry)
239 continue;
240 if (!xa_is_internal(entry))
241 break;
242 if (slot != &idr->idr_rt.xa_head && !xa_is_retry(entry))
243 break;
244 slot = radix_tree_iter_retry(&iter);
245 }
246 if (!slot)
247 return NULL;
248
249 *nextid = iter.index + base;
250 return entry;
251}
252EXPORT_SYMBOL(idr_get_next_ul);
253
254
255
256
257
258
259
260
261
262
263
264void *idr_get_next(struct idr *idr, int *nextid)
265{
266 unsigned long id = *nextid;
267 void *entry = idr_get_next_ul(idr, &id);
268
269 if (WARN_ON_ONCE(id > INT_MAX))
270 return NULL;
271 *nextid = id;
272 return entry;
273}
274EXPORT_SYMBOL(idr_get_next);
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290void *idr_replace(struct idr *idr, void *ptr, unsigned long id)
291{
292 struct radix_tree_node *node;
293 void __rcu **slot = NULL;
294 void *entry;
295
296 id -= idr->idr_base;
297
298 entry = __radix_tree_lookup(&idr->idr_rt, id, &node, &slot);
299 if (!slot || radix_tree_tag_get(&idr->idr_rt, id, IDR_FREE))
300 return ERR_PTR(-ENOENT);
301
302 __radix_tree_replace(&idr->idr_rt, node, slot, ptr);
303
304 return entry;
305}
306EXPORT_SYMBOL(idr_replace);
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380int ida_alloc_range(struct ida *ida, unsigned int min, unsigned int max,
381 gfp_t gfp)
382{
383 XA_STATE(xas, &ida->xa, min / IDA_BITMAP_BITS);
384 unsigned bit = min % IDA_BITMAP_BITS;
385 unsigned long flags;
386 struct ida_bitmap *bitmap, *alloc = NULL;
387
388 if ((int)min < 0)
389 return -ENOSPC;
390
391 if ((int)max < 0)
392 max = INT_MAX;
393
394retry:
395 xas_lock_irqsave(&xas, flags);
396next:
397 bitmap = xas_find_marked(&xas, max / IDA_BITMAP_BITS, XA_FREE_MARK);
398 if (xas.xa_index > min / IDA_BITMAP_BITS)
399 bit = 0;
400 if (xas.xa_index * IDA_BITMAP_BITS + bit > max)
401 goto nospc;
402
403 if (xa_is_value(bitmap)) {
404 unsigned long tmp = xa_to_value(bitmap);
405
406 if (bit < BITS_PER_XA_VALUE) {
407 bit = find_next_zero_bit(&tmp, BITS_PER_XA_VALUE, bit);
408 if (xas.xa_index * IDA_BITMAP_BITS + bit > max)
409 goto nospc;
410 if (bit < BITS_PER_XA_VALUE) {
411 tmp |= 1UL << bit;
412 xas_store(&xas, xa_mk_value(tmp));
413 goto out;
414 }
415 }
416 bitmap = alloc;
417 if (!bitmap)
418 bitmap = kzalloc(sizeof(*bitmap), GFP_NOWAIT);
419 if (!bitmap)
420 goto alloc;
421 bitmap->bitmap[0] = tmp;
422 xas_store(&xas, bitmap);
423 if (xas_error(&xas)) {
424 bitmap->bitmap[0] = 0;
425 goto out;
426 }
427 }
428
429 if (bitmap) {
430 bit = find_next_zero_bit(bitmap->bitmap, IDA_BITMAP_BITS, bit);
431 if (xas.xa_index * IDA_BITMAP_BITS + bit > max)
432 goto nospc;
433 if (bit == IDA_BITMAP_BITS)
434 goto next;
435
436 __set_bit(bit, bitmap->bitmap);
437 if (bitmap_full(bitmap->bitmap, IDA_BITMAP_BITS))
438 xas_clear_mark(&xas, XA_FREE_MARK);
439 } else {
440 if (bit < BITS_PER_XA_VALUE) {
441 bitmap = xa_mk_value(1UL << bit);
442 } else {
443 bitmap = alloc;
444 if (!bitmap)
445 bitmap = kzalloc(sizeof(*bitmap), GFP_NOWAIT);
446 if (!bitmap)
447 goto alloc;
448 __set_bit(bit, bitmap->bitmap);
449 }
450 xas_store(&xas, bitmap);
451 }
452out:
453 xas_unlock_irqrestore(&xas, flags);
454 if (xas_nomem(&xas, gfp)) {
455 xas.xa_index = min / IDA_BITMAP_BITS;
456 bit = min % IDA_BITMAP_BITS;
457 goto retry;
458 }
459 if (bitmap != alloc)
460 kfree(alloc);
461 if (xas_error(&xas))
462 return xas_error(&xas);
463 return xas.xa_index * IDA_BITMAP_BITS + bit;
464alloc:
465 xas_unlock_irqrestore(&xas, flags);
466 alloc = kzalloc(sizeof(*bitmap), gfp);
467 if (!alloc)
468 return -ENOMEM;
469 xas_set(&xas, min / IDA_BITMAP_BITS);
470 bit = min % IDA_BITMAP_BITS;
471 goto retry;
472nospc:
473 xas_unlock_irqrestore(&xas, flags);
474 kfree(alloc);
475 return -ENOSPC;
476}
477EXPORT_SYMBOL(ida_alloc_range);
478
479
480
481
482
483
484
485
486
487void ida_free(struct ida *ida, unsigned int id)
488{
489 XA_STATE(xas, &ida->xa, id / IDA_BITMAP_BITS);
490 unsigned bit = id % IDA_BITMAP_BITS;
491 struct ida_bitmap *bitmap;
492 unsigned long flags;
493
494 BUG_ON((int)id < 0);
495
496 xas_lock_irqsave(&xas, flags);
497 bitmap = xas_load(&xas);
498
499 if (xa_is_value(bitmap)) {
500 unsigned long v = xa_to_value(bitmap);
501 if (bit >= BITS_PER_XA_VALUE)
502 goto err;
503 if (!(v & (1UL << bit)))
504 goto err;
505 v &= ~(1UL << bit);
506 if (!v)
507 goto delete;
508 xas_store(&xas, xa_mk_value(v));
509 } else {
510 if (!test_bit(bit, bitmap->bitmap))
511 goto err;
512 __clear_bit(bit, bitmap->bitmap);
513 xas_set_mark(&xas, XA_FREE_MARK);
514 if (bitmap_empty(bitmap->bitmap, IDA_BITMAP_BITS)) {
515 kfree(bitmap);
516delete:
517 xas_store(&xas, NULL);
518 }
519 }
520 xas_unlock_irqrestore(&xas, flags);
521 return;
522 err:
523 xas_unlock_irqrestore(&xas, flags);
524 WARN(1, "ida_free called for id=%d which is not allocated.\n", id);
525}
526EXPORT_SYMBOL(ida_free);
527
528
529
530
531
532
533
534
535
536
537
538
539
540void ida_destroy(struct ida *ida)
541{
542 XA_STATE(xas, &ida->xa, 0);
543 struct ida_bitmap *bitmap;
544 unsigned long flags;
545
546 xas_lock_irqsave(&xas, flags);
547 xas_for_each(&xas, bitmap, ULONG_MAX) {
548 if (!xa_is_value(bitmap))
549 kfree(bitmap);
550 xas_store(&xas, NULL);
551 }
552 xas_unlock_irqrestore(&xas, flags);
553}
554EXPORT_SYMBOL(ida_destroy);
555
556#ifndef __KERNEL__
557extern void xa_dump_index(unsigned long index, unsigned int shift);
558#define IDA_CHUNK_SHIFT ilog2(IDA_BITMAP_BITS)
559
560static void ida_dump_entry(void *entry, unsigned long index)
561{
562 unsigned long i;
563
564 if (!entry)
565 return;
566
567 if (xa_is_node(entry)) {
568 struct xa_node *node = xa_to_node(entry);
569 unsigned int shift = node->shift + IDA_CHUNK_SHIFT +
570 XA_CHUNK_SHIFT;
571
572 xa_dump_index(index * IDA_BITMAP_BITS, shift);
573 xa_dump_node(node);
574 for (i = 0; i < XA_CHUNK_SIZE; i++)
575 ida_dump_entry(node->slots[i],
576 index | (i << node->shift));
577 } else if (xa_is_value(entry)) {
578 xa_dump_index(index * IDA_BITMAP_BITS, ilog2(BITS_PER_LONG));
579 pr_cont("value: data %lx [%px]\n", xa_to_value(entry), entry);
580 } else {
581 struct ida_bitmap *bitmap = entry;
582
583 xa_dump_index(index * IDA_BITMAP_BITS, IDA_CHUNK_SHIFT);
584 pr_cont("bitmap: %p data", bitmap);
585 for (i = 0; i < IDA_BITMAP_LONGS; i++)
586 pr_cont(" %lx", bitmap->bitmap[i]);
587 pr_cont("\n");
588 }
589}
590
591static void ida_dump(struct ida *ida)
592{
593 struct xarray *xa = &ida->xa;
594 pr_debug("ida: %p node %p free %d\n", ida, xa->xa_head,
595 xa->xa_flags >> ROOT_TAG_SHIFT);
596 ida_dump_entry(xa->xa_head, 0);
597}
598#endif
599