0001
0002 #include <linux/bitmap.h>
0003 #include <linux/bug.h>
0004 #include <linux/export.h>
0005 #include <linux/idr.h>
0006 #include <linux/slab.h>
0007 #include <linux/spinlock.h>
0008 #include <linux/xarray.h>
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033 int idr_alloc_u32(struct idr *idr, void *ptr, u32 *nextid,
0034 unsigned long max, gfp_t gfp)
0035 {
0036 struct radix_tree_iter iter;
0037 void __rcu **slot;
0038 unsigned int base = idr->idr_base;
0039 unsigned int id = *nextid;
0040
0041 if (WARN_ON_ONCE(!(idr->idr_rt.xa_flags & ROOT_IS_IDR)))
0042 idr->idr_rt.xa_flags |= IDR_RT_MARKER;
0043
0044 id = (id < base) ? 0 : id - base;
0045 radix_tree_iter_init(&iter, id);
0046 slot = idr_get_free(&idr->idr_rt, &iter, gfp, max - base);
0047 if (IS_ERR(slot))
0048 return PTR_ERR(slot);
0049
0050 *nextid = iter.index + base;
0051
0052 radix_tree_iter_replace(&idr->idr_rt, &iter, slot, ptr);
0053 radix_tree_iter_tag_clear(&idr->idr_rt, &iter, IDR_FREE);
0054
0055 return 0;
0056 }
0057 EXPORT_SYMBOL_GPL(idr_alloc_u32);
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079 int idr_alloc(struct idr *idr, void *ptr, int start, int end, gfp_t gfp)
0080 {
0081 u32 id = start;
0082 int ret;
0083
0084 if (WARN_ON_ONCE(start < 0))
0085 return -EINVAL;
0086
0087 ret = idr_alloc_u32(idr, ptr, &id, end > 0 ? end - 1 : INT_MAX, gfp);
0088 if (ret)
0089 return ret;
0090
0091 return id;
0092 }
0093 EXPORT_SYMBOL_GPL(idr_alloc);
0094
0095
0096
0097
0098
0099
0100
0101
0102
0103
0104
0105
0106
0107
0108
0109
0110
0111
0112
0113
0114
0115
0116
0117 int idr_alloc_cyclic(struct idr *idr, void *ptr, int start, int end, gfp_t gfp)
0118 {
0119 u32 id = idr->idr_next;
0120 int err, max = end > 0 ? end - 1 : INT_MAX;
0121
0122 if ((int)id < start)
0123 id = start;
0124
0125 err = idr_alloc_u32(idr, ptr, &id, max, gfp);
0126 if ((err == -ENOSPC) && (id > start)) {
0127 id = start;
0128 err = idr_alloc_u32(idr, ptr, &id, max, gfp);
0129 }
0130 if (err)
0131 return err;
0132
0133 idr->idr_next = id + 1;
0134 return id;
0135 }
0136 EXPORT_SYMBOL(idr_alloc_cyclic);
0137
0138
0139
0140
0141
0142
0143
0144
0145
0146
0147
0148
0149
0150
0151
0152 void *idr_remove(struct idr *idr, unsigned long id)
0153 {
0154 return radix_tree_delete_item(&idr->idr_rt, id - idr->idr_base, NULL);
0155 }
0156 EXPORT_SYMBOL_GPL(idr_remove);
0157
0158
0159
0160
0161
0162
0163
0164
0165
0166
0167
0168
0169
0170
0171
0172 void *idr_find(const struct idr *idr, unsigned long id)
0173 {
0174 return radix_tree_lookup(&idr->idr_rt, id - idr->idr_base);
0175 }
0176 EXPORT_SYMBOL_GPL(idr_find);
0177
0178
0179
0180
0181
0182
0183
0184
0185
0186
0187
0188
0189
0190
0191
0192
0193
0194
0195 int idr_for_each(const struct idr *idr,
0196 int (*fn)(int id, void *p, void *data), void *data)
0197 {
0198 struct radix_tree_iter iter;
0199 void __rcu **slot;
0200 int base = idr->idr_base;
0201
0202 radix_tree_for_each_slot(slot, &idr->idr_rt, &iter, 0) {
0203 int ret;
0204 unsigned long id = iter.index + base;
0205
0206 if (WARN_ON_ONCE(id > INT_MAX))
0207 break;
0208 ret = fn(id, rcu_dereference_raw(*slot), data);
0209 if (ret)
0210 return ret;
0211 }
0212
0213 return 0;
0214 }
0215 EXPORT_SYMBOL(idr_for_each);
0216
0217
0218
0219
0220
0221
0222
0223
0224
0225
0226
0227 void *idr_get_next_ul(struct idr *idr, unsigned long *nextid)
0228 {
0229 struct radix_tree_iter iter;
0230 void __rcu **slot;
0231 void *entry = NULL;
0232 unsigned long base = idr->idr_base;
0233 unsigned long id = *nextid;
0234
0235 id = (id < base) ? 0 : id - base;
0236 radix_tree_for_each_slot(slot, &idr->idr_rt, &iter, id) {
0237 entry = rcu_dereference_raw(*slot);
0238 if (!entry)
0239 continue;
0240 if (!xa_is_internal(entry))
0241 break;
0242 if (slot != &idr->idr_rt.xa_head && !xa_is_retry(entry))
0243 break;
0244 slot = radix_tree_iter_retry(&iter);
0245 }
0246 if (!slot)
0247 return NULL;
0248
0249 *nextid = iter.index + base;
0250 return entry;
0251 }
0252 EXPORT_SYMBOL(idr_get_next_ul);
0253
0254
0255
0256
0257
0258
0259
0260
0261
0262
0263
0264 void *idr_get_next(struct idr *idr, int *nextid)
0265 {
0266 unsigned long id = *nextid;
0267 void *entry = idr_get_next_ul(idr, &id);
0268
0269 if (WARN_ON_ONCE(id > INT_MAX))
0270 return NULL;
0271 *nextid = id;
0272 return entry;
0273 }
0274 EXPORT_SYMBOL(idr_get_next);
0275
0276
0277
0278
0279
0280
0281
0282
0283
0284
0285
0286
0287
0288
0289
0290 void *idr_replace(struct idr *idr, void *ptr, unsigned long id)
0291 {
0292 struct radix_tree_node *node;
0293 void __rcu **slot = NULL;
0294 void *entry;
0295
0296 id -= idr->idr_base;
0297
0298 entry = __radix_tree_lookup(&idr->idr_rt, id, &node, &slot);
0299 if (!slot || radix_tree_tag_get(&idr->idr_rt, id, IDR_FREE))
0300 return ERR_PTR(-ENOENT);
0301
0302 __radix_tree_replace(&idr->idr_rt, node, slot, ptr);
0303
0304 return entry;
0305 }
0306 EXPORT_SYMBOL(idr_replace);
0307
0308
0309
0310
0311
0312
0313
0314
0315
0316
0317
0318
0319
0320
0321
0322
0323
0324
0325
0326
0327
0328
0329
0330
0331
0332
0333
0334
0335
0336
0337
0338
0339
0340
0341
0342
0343
0344
0345
0346
0347
0348
0349
0350
0351
0352
0353
0354
0355
0356
0357
0358
0359
0360
0361
0362
0363
0364
0365
0366
0367
0368
0369
0370
0371
0372
0373
0374
0375
0376
0377
0378
0379
0380 int ida_alloc_range(struct ida *ida, unsigned int min, unsigned int max,
0381 gfp_t gfp)
0382 {
0383 XA_STATE(xas, &ida->xa, min / IDA_BITMAP_BITS);
0384 unsigned bit = min % IDA_BITMAP_BITS;
0385 unsigned long flags;
0386 struct ida_bitmap *bitmap, *alloc = NULL;
0387
0388 if ((int)min < 0)
0389 return -ENOSPC;
0390
0391 if ((int)max < 0)
0392 max = INT_MAX;
0393
0394 retry:
0395 xas_lock_irqsave(&xas, flags);
0396 next:
0397 bitmap = xas_find_marked(&xas, max / IDA_BITMAP_BITS, XA_FREE_MARK);
0398 if (xas.xa_index > min / IDA_BITMAP_BITS)
0399 bit = 0;
0400 if (xas.xa_index * IDA_BITMAP_BITS + bit > max)
0401 goto nospc;
0402
0403 if (xa_is_value(bitmap)) {
0404 unsigned long tmp = xa_to_value(bitmap);
0405
0406 if (bit < BITS_PER_XA_VALUE) {
0407 bit = find_next_zero_bit(&tmp, BITS_PER_XA_VALUE, bit);
0408 if (xas.xa_index * IDA_BITMAP_BITS + bit > max)
0409 goto nospc;
0410 if (bit < BITS_PER_XA_VALUE) {
0411 tmp |= 1UL << bit;
0412 xas_store(&xas, xa_mk_value(tmp));
0413 goto out;
0414 }
0415 }
0416 bitmap = alloc;
0417 if (!bitmap)
0418 bitmap = kzalloc(sizeof(*bitmap), GFP_NOWAIT);
0419 if (!bitmap)
0420 goto alloc;
0421 bitmap->bitmap[0] = tmp;
0422 xas_store(&xas, bitmap);
0423 if (xas_error(&xas)) {
0424 bitmap->bitmap[0] = 0;
0425 goto out;
0426 }
0427 }
0428
0429 if (bitmap) {
0430 bit = find_next_zero_bit(bitmap->bitmap, IDA_BITMAP_BITS, bit);
0431 if (xas.xa_index * IDA_BITMAP_BITS + bit > max)
0432 goto nospc;
0433 if (bit == IDA_BITMAP_BITS)
0434 goto next;
0435
0436 __set_bit(bit, bitmap->bitmap);
0437 if (bitmap_full(bitmap->bitmap, IDA_BITMAP_BITS))
0438 xas_clear_mark(&xas, XA_FREE_MARK);
0439 } else {
0440 if (bit < BITS_PER_XA_VALUE) {
0441 bitmap = xa_mk_value(1UL << bit);
0442 } else {
0443 bitmap = alloc;
0444 if (!bitmap)
0445 bitmap = kzalloc(sizeof(*bitmap), GFP_NOWAIT);
0446 if (!bitmap)
0447 goto alloc;
0448 __set_bit(bit, bitmap->bitmap);
0449 }
0450 xas_store(&xas, bitmap);
0451 }
0452 out:
0453 xas_unlock_irqrestore(&xas, flags);
0454 if (xas_nomem(&xas, gfp)) {
0455 xas.xa_index = min / IDA_BITMAP_BITS;
0456 bit = min % IDA_BITMAP_BITS;
0457 goto retry;
0458 }
0459 if (bitmap != alloc)
0460 kfree(alloc);
0461 if (xas_error(&xas))
0462 return xas_error(&xas);
0463 return xas.xa_index * IDA_BITMAP_BITS + bit;
0464 alloc:
0465 xas_unlock_irqrestore(&xas, flags);
0466 alloc = kzalloc(sizeof(*bitmap), gfp);
0467 if (!alloc)
0468 return -ENOMEM;
0469 xas_set(&xas, min / IDA_BITMAP_BITS);
0470 bit = min % IDA_BITMAP_BITS;
0471 goto retry;
0472 nospc:
0473 xas_unlock_irqrestore(&xas, flags);
0474 kfree(alloc);
0475 return -ENOSPC;
0476 }
0477 EXPORT_SYMBOL(ida_alloc_range);
0478
0479
0480
0481
0482
0483
0484
0485
0486
0487 void ida_free(struct ida *ida, unsigned int id)
0488 {
0489 XA_STATE(xas, &ida->xa, id / IDA_BITMAP_BITS);
0490 unsigned bit = id % IDA_BITMAP_BITS;
0491 struct ida_bitmap *bitmap;
0492 unsigned long flags;
0493
0494 if ((int)id < 0)
0495 return;
0496
0497 xas_lock_irqsave(&xas, flags);
0498 bitmap = xas_load(&xas);
0499
0500 if (xa_is_value(bitmap)) {
0501 unsigned long v = xa_to_value(bitmap);
0502 if (bit >= BITS_PER_XA_VALUE)
0503 goto err;
0504 if (!(v & (1UL << bit)))
0505 goto err;
0506 v &= ~(1UL << bit);
0507 if (!v)
0508 goto delete;
0509 xas_store(&xas, xa_mk_value(v));
0510 } else {
0511 if (!test_bit(bit, bitmap->bitmap))
0512 goto err;
0513 __clear_bit(bit, bitmap->bitmap);
0514 xas_set_mark(&xas, XA_FREE_MARK);
0515 if (bitmap_empty(bitmap->bitmap, IDA_BITMAP_BITS)) {
0516 kfree(bitmap);
0517 delete:
0518 xas_store(&xas, NULL);
0519 }
0520 }
0521 xas_unlock_irqrestore(&xas, flags);
0522 return;
0523 err:
0524 xas_unlock_irqrestore(&xas, flags);
0525 WARN(1, "ida_free called for id=%d which is not allocated.\n", id);
0526 }
0527 EXPORT_SYMBOL(ida_free);
0528
0529
0530
0531
0532
0533
0534
0535
0536
0537
0538
0539
0540
0541 void ida_destroy(struct ida *ida)
0542 {
0543 XA_STATE(xas, &ida->xa, 0);
0544 struct ida_bitmap *bitmap;
0545 unsigned long flags;
0546
0547 xas_lock_irqsave(&xas, flags);
0548 xas_for_each(&xas, bitmap, ULONG_MAX) {
0549 if (!xa_is_value(bitmap))
0550 kfree(bitmap);
0551 xas_store(&xas, NULL);
0552 }
0553 xas_unlock_irqrestore(&xas, flags);
0554 }
0555 EXPORT_SYMBOL(ida_destroy);
0556
0557 #ifndef __KERNEL__
0558 extern void xa_dump_index(unsigned long index, unsigned int shift);
0559 #define IDA_CHUNK_SHIFT ilog2(IDA_BITMAP_BITS)
0560
0561 static void ida_dump_entry(void *entry, unsigned long index)
0562 {
0563 unsigned long i;
0564
0565 if (!entry)
0566 return;
0567
0568 if (xa_is_node(entry)) {
0569 struct xa_node *node = xa_to_node(entry);
0570 unsigned int shift = node->shift + IDA_CHUNK_SHIFT +
0571 XA_CHUNK_SHIFT;
0572
0573 xa_dump_index(index * IDA_BITMAP_BITS, shift);
0574 xa_dump_node(node);
0575 for (i = 0; i < XA_CHUNK_SIZE; i++)
0576 ida_dump_entry(node->slots[i],
0577 index | (i << node->shift));
0578 } else if (xa_is_value(entry)) {
0579 xa_dump_index(index * IDA_BITMAP_BITS, ilog2(BITS_PER_LONG));
0580 pr_cont("value: data %lx [%px]\n", xa_to_value(entry), entry);
0581 } else {
0582 struct ida_bitmap *bitmap = entry;
0583
0584 xa_dump_index(index * IDA_BITMAP_BITS, IDA_CHUNK_SHIFT);
0585 pr_cont("bitmap: %p data", bitmap);
0586 for (i = 0; i < IDA_BITMAP_LONGS; i++)
0587 pr_cont(" %lx", bitmap->bitmap[i]);
0588 pr_cont("\n");
0589 }
0590 }
0591
0592 static void ida_dump(struct ida *ida)
0593 {
0594 struct xarray *xa = &ida->xa;
0595 pr_debug("ida: %p node %p free %d\n", ida, xa->xa_head,
0596 xa->xa_flags >> ROOT_TAG_SHIFT);
0597 ida_dump_entry(xa->xa_head, 0);
0598 }
0599 #endif