0001
0002
0003
0004
0005
0006
0007
0008
0009 #include <linux/bitmap.h>
0010 #include <linux/export.h>
0011 #include <linux/list.h>
0012 #include <linux/slab.h>
0013 #include <linux/xarray.h>
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031 static inline unsigned int xa_lock_type(const struct xarray *xa)
0032 {
0033 return (__force unsigned int)xa->xa_flags & 3;
0034 }
0035
0036 static inline void xas_lock_type(struct xa_state *xas, unsigned int lock_type)
0037 {
0038 if (lock_type == XA_LOCK_IRQ)
0039 xas_lock_irq(xas);
0040 else if (lock_type == XA_LOCK_BH)
0041 xas_lock_bh(xas);
0042 else
0043 xas_lock(xas);
0044 }
0045
0046 static inline void xas_unlock_type(struct xa_state *xas, unsigned int lock_type)
0047 {
0048 if (lock_type == XA_LOCK_IRQ)
0049 xas_unlock_irq(xas);
0050 else if (lock_type == XA_LOCK_BH)
0051 xas_unlock_bh(xas);
0052 else
0053 xas_unlock(xas);
0054 }
0055
0056 static inline bool xa_track_free(const struct xarray *xa)
0057 {
0058 return xa->xa_flags & XA_FLAGS_TRACK_FREE;
0059 }
0060
0061 static inline bool xa_zero_busy(const struct xarray *xa)
0062 {
0063 return xa->xa_flags & XA_FLAGS_ZERO_BUSY;
0064 }
0065
0066 static inline void xa_mark_set(struct xarray *xa, xa_mark_t mark)
0067 {
0068 if (!(xa->xa_flags & XA_FLAGS_MARK(mark)))
0069 xa->xa_flags |= XA_FLAGS_MARK(mark);
0070 }
0071
0072 static inline void xa_mark_clear(struct xarray *xa, xa_mark_t mark)
0073 {
0074 if (xa->xa_flags & XA_FLAGS_MARK(mark))
0075 xa->xa_flags &= ~(XA_FLAGS_MARK(mark));
0076 }
0077
0078 static inline unsigned long *node_marks(struct xa_node *node, xa_mark_t mark)
0079 {
0080 return node->marks[(__force unsigned)mark];
0081 }
0082
0083 static inline bool node_get_mark(struct xa_node *node,
0084 unsigned int offset, xa_mark_t mark)
0085 {
0086 return test_bit(offset, node_marks(node, mark));
0087 }
0088
0089
0090 static inline bool node_set_mark(struct xa_node *node, unsigned int offset,
0091 xa_mark_t mark)
0092 {
0093 return __test_and_set_bit(offset, node_marks(node, mark));
0094 }
0095
0096
0097 static inline bool node_clear_mark(struct xa_node *node, unsigned int offset,
0098 xa_mark_t mark)
0099 {
0100 return __test_and_clear_bit(offset, node_marks(node, mark));
0101 }
0102
0103 static inline bool node_any_mark(struct xa_node *node, xa_mark_t mark)
0104 {
0105 return !bitmap_empty(node_marks(node, mark), XA_CHUNK_SIZE);
0106 }
0107
0108 static inline void node_mark_all(struct xa_node *node, xa_mark_t mark)
0109 {
0110 bitmap_fill(node_marks(node, mark), XA_CHUNK_SIZE);
0111 }
0112
0113 #define mark_inc(mark) do { \
0114 mark = (__force xa_mark_t)((__force unsigned)(mark) + 1); \
0115 } while (0)
0116
0117
0118
0119
0120
0121
0122
0123
0124 static void xas_squash_marks(const struct xa_state *xas)
0125 {
0126 unsigned int mark = 0;
0127 unsigned int limit = xas->xa_offset + xas->xa_sibs + 1;
0128
0129 if (!xas->xa_sibs)
0130 return;
0131
0132 do {
0133 unsigned long *marks = xas->xa_node->marks[mark];
0134 if (find_next_bit(marks, limit, xas->xa_offset + 1) == limit)
0135 continue;
0136 __set_bit(xas->xa_offset, marks);
0137 bitmap_clear(marks, xas->xa_offset + 1, xas->xa_sibs);
0138 } while (mark++ != (__force unsigned)XA_MARK_MAX);
0139 }
0140
0141
0142 static unsigned int get_offset(unsigned long index, struct xa_node *node)
0143 {
0144 return (index >> node->shift) & XA_CHUNK_MASK;
0145 }
0146
0147 static void xas_set_offset(struct xa_state *xas)
0148 {
0149 xas->xa_offset = get_offset(xas->xa_index, xas->xa_node);
0150 }
0151
0152
0153 static void xas_move_index(struct xa_state *xas, unsigned long offset)
0154 {
0155 unsigned int shift = xas->xa_node->shift;
0156 xas->xa_index &= ~XA_CHUNK_MASK << shift;
0157 xas->xa_index += offset << shift;
0158 }
0159
0160 static void xas_next_offset(struct xa_state *xas)
0161 {
0162 xas->xa_offset++;
0163 xas_move_index(xas, xas->xa_offset);
0164 }
0165
0166 static void *set_bounds(struct xa_state *xas)
0167 {
0168 xas->xa_node = XAS_BOUNDS;
0169 return NULL;
0170 }
0171
0172
0173
0174
0175
0176
0177
0178
0179 static void *xas_start(struct xa_state *xas)
0180 {
0181 void *entry;
0182
0183 if (xas_valid(xas))
0184 return xas_reload(xas);
0185 if (xas_error(xas))
0186 return NULL;
0187
0188 entry = xa_head(xas->xa);
0189 if (!xa_is_node(entry)) {
0190 if (xas->xa_index)
0191 return set_bounds(xas);
0192 } else {
0193 if ((xas->xa_index >> xa_to_node(entry)->shift) > XA_CHUNK_MASK)
0194 return set_bounds(xas);
0195 }
0196
0197 xas->xa_node = NULL;
0198 return entry;
0199 }
0200
0201 static void *xas_descend(struct xa_state *xas, struct xa_node *node)
0202 {
0203 unsigned int offset = get_offset(xas->xa_index, node);
0204 void *entry = xa_entry(xas->xa, node, offset);
0205
0206 xas->xa_node = node;
0207 if (xa_is_sibling(entry)) {
0208 offset = xa_to_sibling(entry);
0209 entry = xa_entry(xas->xa, node, offset);
0210 if (node->shift && xa_is_node(entry))
0211 entry = XA_RETRY_ENTRY;
0212 }
0213
0214 xas->xa_offset = offset;
0215 return entry;
0216 }
0217
0218
0219
0220
0221
0222
0223
0224
0225
0226
0227
0228
0229
0230
0231
0232
0233 void *xas_load(struct xa_state *xas)
0234 {
0235 void *entry = xas_start(xas);
0236
0237 while (xa_is_node(entry)) {
0238 struct xa_node *node = xa_to_node(entry);
0239
0240 if (xas->xa_shift > node->shift)
0241 break;
0242 entry = xas_descend(xas, node);
0243 if (node->shift == 0)
0244 break;
0245 }
0246 return entry;
0247 }
0248 EXPORT_SYMBOL_GPL(xas_load);
0249
0250
0251 extern struct kmem_cache *radix_tree_node_cachep;
0252 extern void radix_tree_node_rcu_free(struct rcu_head *head);
0253
0254 #define XA_RCU_FREE ((struct xarray *)1)
0255
0256 static void xa_node_free(struct xa_node *node)
0257 {
0258 XA_NODE_BUG_ON(node, !list_empty(&node->private_list));
0259 node->array = XA_RCU_FREE;
0260 call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
0261 }
0262
0263
0264
0265
0266
0267
0268
0269
0270 void xas_destroy(struct xa_state *xas)
0271 {
0272 struct xa_node *next, *node = xas->xa_alloc;
0273
0274 while (node) {
0275 XA_NODE_BUG_ON(node, !list_empty(&node->private_list));
0276 next = rcu_dereference_raw(node->parent);
0277 radix_tree_node_rcu_free(&node->rcu_head);
0278 xas->xa_alloc = node = next;
0279 }
0280 }
0281
0282
0283
0284
0285
0286
0287
0288
0289
0290
0291
0292
0293
0294
0295
0296
0297
0298
0299
0300 bool xas_nomem(struct xa_state *xas, gfp_t gfp)
0301 {
0302 if (xas->xa_node != XA_ERROR(-ENOMEM)) {
0303 xas_destroy(xas);
0304 return false;
0305 }
0306 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT)
0307 gfp |= __GFP_ACCOUNT;
0308 xas->xa_alloc = kmem_cache_alloc_lru(radix_tree_node_cachep, xas->xa_lru, gfp);
0309 if (!xas->xa_alloc)
0310 return false;
0311 xas->xa_alloc->parent = NULL;
0312 XA_NODE_BUG_ON(xas->xa_alloc, !list_empty(&xas->xa_alloc->private_list));
0313 xas->xa_node = XAS_RESTART;
0314 return true;
0315 }
0316 EXPORT_SYMBOL_GPL(xas_nomem);
0317
0318
0319
0320
0321
0322
0323
0324
0325
0326
0327 static bool __xas_nomem(struct xa_state *xas, gfp_t gfp)
0328 __must_hold(xas->xa->xa_lock)
0329 {
0330 unsigned int lock_type = xa_lock_type(xas->xa);
0331
0332 if (xas->xa_node != XA_ERROR(-ENOMEM)) {
0333 xas_destroy(xas);
0334 return false;
0335 }
0336 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT)
0337 gfp |= __GFP_ACCOUNT;
0338 if (gfpflags_allow_blocking(gfp)) {
0339 xas_unlock_type(xas, lock_type);
0340 xas->xa_alloc = kmem_cache_alloc_lru(radix_tree_node_cachep, xas->xa_lru, gfp);
0341 xas_lock_type(xas, lock_type);
0342 } else {
0343 xas->xa_alloc = kmem_cache_alloc_lru(radix_tree_node_cachep, xas->xa_lru, gfp);
0344 }
0345 if (!xas->xa_alloc)
0346 return false;
0347 xas->xa_alloc->parent = NULL;
0348 XA_NODE_BUG_ON(xas->xa_alloc, !list_empty(&xas->xa_alloc->private_list));
0349 xas->xa_node = XAS_RESTART;
0350 return true;
0351 }
0352
0353 static void xas_update(struct xa_state *xas, struct xa_node *node)
0354 {
0355 if (xas->xa_update)
0356 xas->xa_update(node);
0357 else
0358 XA_NODE_BUG_ON(node, !list_empty(&node->private_list));
0359 }
0360
0361 static void *xas_alloc(struct xa_state *xas, unsigned int shift)
0362 {
0363 struct xa_node *parent = xas->xa_node;
0364 struct xa_node *node = xas->xa_alloc;
0365
0366 if (xas_invalid(xas))
0367 return NULL;
0368
0369 if (node) {
0370 xas->xa_alloc = NULL;
0371 } else {
0372 gfp_t gfp = GFP_NOWAIT | __GFP_NOWARN;
0373
0374 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT)
0375 gfp |= __GFP_ACCOUNT;
0376
0377 node = kmem_cache_alloc_lru(radix_tree_node_cachep, xas->xa_lru, gfp);
0378 if (!node) {
0379 xas_set_err(xas, -ENOMEM);
0380 return NULL;
0381 }
0382 }
0383
0384 if (parent) {
0385 node->offset = xas->xa_offset;
0386 parent->count++;
0387 XA_NODE_BUG_ON(node, parent->count > XA_CHUNK_SIZE);
0388 xas_update(xas, parent);
0389 }
0390 XA_NODE_BUG_ON(node, shift > BITS_PER_LONG);
0391 XA_NODE_BUG_ON(node, !list_empty(&node->private_list));
0392 node->shift = shift;
0393 node->count = 0;
0394 node->nr_values = 0;
0395 RCU_INIT_POINTER(node->parent, xas->xa_node);
0396 node->array = xas->xa;
0397
0398 return node;
0399 }
0400
0401 #ifdef CONFIG_XARRAY_MULTI
0402
0403 static unsigned long xas_size(const struct xa_state *xas)
0404 {
0405 return (xas->xa_sibs + 1UL) << xas->xa_shift;
0406 }
0407 #endif
0408
0409
0410
0411
0412
0413
0414
0415 static unsigned long xas_max(struct xa_state *xas)
0416 {
0417 unsigned long max = xas->xa_index;
0418
0419 #ifdef CONFIG_XARRAY_MULTI
0420 if (xas->xa_shift || xas->xa_sibs) {
0421 unsigned long mask = xas_size(xas) - 1;
0422 max |= mask;
0423 if (mask == max)
0424 max++;
0425 }
0426 #endif
0427
0428 return max;
0429 }
0430
0431
0432 static unsigned long max_index(void *entry)
0433 {
0434 if (!xa_is_node(entry))
0435 return 0;
0436 return (XA_CHUNK_SIZE << xa_to_node(entry)->shift) - 1;
0437 }
0438
0439 static void xas_shrink(struct xa_state *xas)
0440 {
0441 struct xarray *xa = xas->xa;
0442 struct xa_node *node = xas->xa_node;
0443
0444 for (;;) {
0445 void *entry;
0446
0447 XA_NODE_BUG_ON(node, node->count > XA_CHUNK_SIZE);
0448 if (node->count != 1)
0449 break;
0450 entry = xa_entry_locked(xa, node, 0);
0451 if (!entry)
0452 break;
0453 if (!xa_is_node(entry) && node->shift)
0454 break;
0455 if (xa_is_zero(entry) && xa_zero_busy(xa))
0456 entry = NULL;
0457 xas->xa_node = XAS_BOUNDS;
0458
0459 RCU_INIT_POINTER(xa->xa_head, entry);
0460 if (xa_track_free(xa) && !node_get_mark(node, 0, XA_FREE_MARK))
0461 xa_mark_clear(xa, XA_FREE_MARK);
0462
0463 node->count = 0;
0464 node->nr_values = 0;
0465 if (!xa_is_node(entry))
0466 RCU_INIT_POINTER(node->slots[0], XA_RETRY_ENTRY);
0467 xas_update(xas, node);
0468 xa_node_free(node);
0469 if (!xa_is_node(entry))
0470 break;
0471 node = xa_to_node(entry);
0472 node->parent = NULL;
0473 }
0474 }
0475
0476
0477
0478
0479
0480
0481
0482
0483 static void xas_delete_node(struct xa_state *xas)
0484 {
0485 struct xa_node *node = xas->xa_node;
0486
0487 for (;;) {
0488 struct xa_node *parent;
0489
0490 XA_NODE_BUG_ON(node, node->count > XA_CHUNK_SIZE);
0491 if (node->count)
0492 break;
0493
0494 parent = xa_parent_locked(xas->xa, node);
0495 xas->xa_node = parent;
0496 xas->xa_offset = node->offset;
0497 xa_node_free(node);
0498
0499 if (!parent) {
0500 xas->xa->xa_head = NULL;
0501 xas->xa_node = XAS_BOUNDS;
0502 return;
0503 }
0504
0505 parent->slots[xas->xa_offset] = NULL;
0506 parent->count--;
0507 XA_NODE_BUG_ON(parent, parent->count > XA_CHUNK_SIZE);
0508 node = parent;
0509 xas_update(xas, node);
0510 }
0511
0512 if (!node->parent)
0513 xas_shrink(xas);
0514 }
0515
0516
0517
0518
0519
0520
0521
0522
0523
0524
0525 static void xas_free_nodes(struct xa_state *xas, struct xa_node *top)
0526 {
0527 unsigned int offset = 0;
0528 struct xa_node *node = top;
0529
0530 for (;;) {
0531 void *entry = xa_entry_locked(xas->xa, node, offset);
0532
0533 if (node->shift && xa_is_node(entry)) {
0534 node = xa_to_node(entry);
0535 offset = 0;
0536 continue;
0537 }
0538 if (entry)
0539 RCU_INIT_POINTER(node->slots[offset], XA_RETRY_ENTRY);
0540 offset++;
0541 while (offset == XA_CHUNK_SIZE) {
0542 struct xa_node *parent;
0543
0544 parent = xa_parent_locked(xas->xa, node);
0545 offset = node->offset + 1;
0546 node->count = 0;
0547 node->nr_values = 0;
0548 xas_update(xas, node);
0549 xa_node_free(node);
0550 if (node == top)
0551 return;
0552 node = parent;
0553 }
0554 }
0555 }
0556
0557
0558
0559
0560
0561 static int xas_expand(struct xa_state *xas, void *head)
0562 {
0563 struct xarray *xa = xas->xa;
0564 struct xa_node *node = NULL;
0565 unsigned int shift = 0;
0566 unsigned long max = xas_max(xas);
0567
0568 if (!head) {
0569 if (max == 0)
0570 return 0;
0571 while ((max >> shift) >= XA_CHUNK_SIZE)
0572 shift += XA_CHUNK_SHIFT;
0573 return shift + XA_CHUNK_SHIFT;
0574 } else if (xa_is_node(head)) {
0575 node = xa_to_node(head);
0576 shift = node->shift + XA_CHUNK_SHIFT;
0577 }
0578 xas->xa_node = NULL;
0579
0580 while (max > max_index(head)) {
0581 xa_mark_t mark = 0;
0582
0583 XA_NODE_BUG_ON(node, shift > BITS_PER_LONG);
0584 node = xas_alloc(xas, shift);
0585 if (!node)
0586 return -ENOMEM;
0587
0588 node->count = 1;
0589 if (xa_is_value(head))
0590 node->nr_values = 1;
0591 RCU_INIT_POINTER(node->slots[0], head);
0592
0593
0594 for (;;) {
0595 if (xa_track_free(xa) && mark == XA_FREE_MARK) {
0596 node_mark_all(node, XA_FREE_MARK);
0597 if (!xa_marked(xa, XA_FREE_MARK)) {
0598 node_clear_mark(node, 0, XA_FREE_MARK);
0599 xa_mark_set(xa, XA_FREE_MARK);
0600 }
0601 } else if (xa_marked(xa, mark)) {
0602 node_set_mark(node, 0, mark);
0603 }
0604 if (mark == XA_MARK_MAX)
0605 break;
0606 mark_inc(mark);
0607 }
0608
0609
0610
0611
0612
0613 if (xa_is_node(head)) {
0614 xa_to_node(head)->offset = 0;
0615 rcu_assign_pointer(xa_to_node(head)->parent, node);
0616 }
0617 head = xa_mk_node(node);
0618 rcu_assign_pointer(xa->xa_head, head);
0619 xas_update(xas, node);
0620
0621 shift += XA_CHUNK_SHIFT;
0622 }
0623
0624 xas->xa_node = node;
0625 return shift;
0626 }
0627
0628
0629
0630
0631
0632
0633
0634
0635
0636
0637
0638
0639
0640
0641 static void *xas_create(struct xa_state *xas, bool allow_root)
0642 {
0643 struct xarray *xa = xas->xa;
0644 void *entry;
0645 void __rcu **slot;
0646 struct xa_node *node = xas->xa_node;
0647 int shift;
0648 unsigned int order = xas->xa_shift;
0649
0650 if (xas_top(node)) {
0651 entry = xa_head_locked(xa);
0652 xas->xa_node = NULL;
0653 if (!entry && xa_zero_busy(xa))
0654 entry = XA_ZERO_ENTRY;
0655 shift = xas_expand(xas, entry);
0656 if (shift < 0)
0657 return NULL;
0658 if (!shift && !allow_root)
0659 shift = XA_CHUNK_SHIFT;
0660 entry = xa_head_locked(xa);
0661 slot = &xa->xa_head;
0662 } else if (xas_error(xas)) {
0663 return NULL;
0664 } else if (node) {
0665 unsigned int offset = xas->xa_offset;
0666
0667 shift = node->shift;
0668 entry = xa_entry_locked(xa, node, offset);
0669 slot = &node->slots[offset];
0670 } else {
0671 shift = 0;
0672 entry = xa_head_locked(xa);
0673 slot = &xa->xa_head;
0674 }
0675
0676 while (shift > order) {
0677 shift -= XA_CHUNK_SHIFT;
0678 if (!entry) {
0679 node = xas_alloc(xas, shift);
0680 if (!node)
0681 break;
0682 if (xa_track_free(xa))
0683 node_mark_all(node, XA_FREE_MARK);
0684 rcu_assign_pointer(*slot, xa_mk_node(node));
0685 } else if (xa_is_node(entry)) {
0686 node = xa_to_node(entry);
0687 } else {
0688 break;
0689 }
0690 entry = xas_descend(xas, node);
0691 slot = &node->slots[xas->xa_offset];
0692 }
0693
0694 return entry;
0695 }
0696
0697
0698
0699
0700
0701
0702
0703
0704
0705
0706 void xas_create_range(struct xa_state *xas)
0707 {
0708 unsigned long index = xas->xa_index;
0709 unsigned char shift = xas->xa_shift;
0710 unsigned char sibs = xas->xa_sibs;
0711
0712 xas->xa_index |= ((sibs + 1UL) << shift) - 1;
0713 if (xas_is_node(xas) && xas->xa_node->shift == xas->xa_shift)
0714 xas->xa_offset |= sibs;
0715 xas->xa_shift = 0;
0716 xas->xa_sibs = 0;
0717
0718 for (;;) {
0719 xas_create(xas, true);
0720 if (xas_error(xas))
0721 goto restore;
0722 if (xas->xa_index <= (index | XA_CHUNK_MASK))
0723 goto success;
0724 xas->xa_index -= XA_CHUNK_SIZE;
0725
0726 for (;;) {
0727 struct xa_node *node = xas->xa_node;
0728 if (node->shift >= shift)
0729 break;
0730 xas->xa_node = xa_parent_locked(xas->xa, node);
0731 xas->xa_offset = node->offset - 1;
0732 if (node->offset != 0)
0733 break;
0734 }
0735 }
0736
0737 restore:
0738 xas->xa_shift = shift;
0739 xas->xa_sibs = sibs;
0740 xas->xa_index = index;
0741 return;
0742 success:
0743 xas->xa_index = index;
0744 if (xas->xa_node)
0745 xas_set_offset(xas);
0746 }
0747 EXPORT_SYMBOL_GPL(xas_create_range);
0748
0749 static void update_node(struct xa_state *xas, struct xa_node *node,
0750 int count, int values)
0751 {
0752 if (!node || (!count && !values))
0753 return;
0754
0755 node->count += count;
0756 node->nr_values += values;
0757 XA_NODE_BUG_ON(node, node->count > XA_CHUNK_SIZE);
0758 XA_NODE_BUG_ON(node, node->nr_values > XA_CHUNK_SIZE);
0759 xas_update(xas, node);
0760 if (count < 0)
0761 xas_delete_node(xas);
0762 }
0763
0764
0765
0766
0767
0768
0769
0770
0771
0772
0773
0774
0775
0776
0777 void *xas_store(struct xa_state *xas, void *entry)
0778 {
0779 struct xa_node *node;
0780 void __rcu **slot = &xas->xa->xa_head;
0781 unsigned int offset, max;
0782 int count = 0;
0783 int values = 0;
0784 void *first, *next;
0785 bool value = xa_is_value(entry);
0786
0787 if (entry) {
0788 bool allow_root = !xa_is_node(entry) && !xa_is_zero(entry);
0789 first = xas_create(xas, allow_root);
0790 } else {
0791 first = xas_load(xas);
0792 }
0793
0794 if (xas_invalid(xas))
0795 return first;
0796 node = xas->xa_node;
0797 if (node && (xas->xa_shift < node->shift))
0798 xas->xa_sibs = 0;
0799 if ((first == entry) && !xas->xa_sibs)
0800 return first;
0801
0802 next = first;
0803 offset = xas->xa_offset;
0804 max = xas->xa_offset + xas->xa_sibs;
0805 if (node) {
0806 slot = &node->slots[offset];
0807 if (xas->xa_sibs)
0808 xas_squash_marks(xas);
0809 }
0810 if (!entry)
0811 xas_init_marks(xas);
0812
0813 for (;;) {
0814
0815
0816
0817
0818
0819
0820
0821 rcu_assign_pointer(*slot, entry);
0822 if (xa_is_node(next) && (!node || node->shift))
0823 xas_free_nodes(xas, xa_to_node(next));
0824 if (!node)
0825 break;
0826 count += !next - !entry;
0827 values += !xa_is_value(first) - !value;
0828 if (entry) {
0829 if (offset == max)
0830 break;
0831 if (!xa_is_sibling(entry))
0832 entry = xa_mk_sibling(xas->xa_offset);
0833 } else {
0834 if (offset == XA_CHUNK_MASK)
0835 break;
0836 }
0837 next = xa_entry_locked(xas->xa, node, ++offset);
0838 if (!xa_is_sibling(next)) {
0839 if (!entry && (offset > max))
0840 break;
0841 first = next;
0842 }
0843 slot++;
0844 }
0845
0846 update_node(xas, node, count, values);
0847 return first;
0848 }
0849 EXPORT_SYMBOL_GPL(xas_store);
0850
0851
0852
0853
0854
0855
0856
0857
0858
0859 bool xas_get_mark(const struct xa_state *xas, xa_mark_t mark)
0860 {
0861 if (xas_invalid(xas))
0862 return false;
0863 if (!xas->xa_node)
0864 return xa_marked(xas->xa, mark);
0865 return node_get_mark(xas->xa_node, xas->xa_offset, mark);
0866 }
0867 EXPORT_SYMBOL_GPL(xas_get_mark);
0868
0869
0870
0871
0872
0873
0874
0875
0876
0877
0878 void xas_set_mark(const struct xa_state *xas, xa_mark_t mark)
0879 {
0880 struct xa_node *node = xas->xa_node;
0881 unsigned int offset = xas->xa_offset;
0882
0883 if (xas_invalid(xas))
0884 return;
0885
0886 while (node) {
0887 if (node_set_mark(node, offset, mark))
0888 return;
0889 offset = node->offset;
0890 node = xa_parent_locked(xas->xa, node);
0891 }
0892
0893 if (!xa_marked(xas->xa, mark))
0894 xa_mark_set(xas->xa, mark);
0895 }
0896 EXPORT_SYMBOL_GPL(xas_set_mark);
0897
0898
0899
0900
0901
0902
0903
0904
0905
0906
0907 void xas_clear_mark(const struct xa_state *xas, xa_mark_t mark)
0908 {
0909 struct xa_node *node = xas->xa_node;
0910 unsigned int offset = xas->xa_offset;
0911
0912 if (xas_invalid(xas))
0913 return;
0914
0915 while (node) {
0916 if (!node_clear_mark(node, offset, mark))
0917 return;
0918 if (node_any_mark(node, mark))
0919 return;
0920
0921 offset = node->offset;
0922 node = xa_parent_locked(xas->xa, node);
0923 }
0924
0925 if (xa_marked(xas->xa, mark))
0926 xa_mark_clear(xas->xa, mark);
0927 }
0928 EXPORT_SYMBOL_GPL(xas_clear_mark);
0929
0930
0931
0932
0933
0934
0935
0936
0937
0938
0939
0940
0941 void xas_init_marks(const struct xa_state *xas)
0942 {
0943 xa_mark_t mark = 0;
0944
0945 for (;;) {
0946 if (xa_track_free(xas->xa) && mark == XA_FREE_MARK)
0947 xas_set_mark(xas, mark);
0948 else
0949 xas_clear_mark(xas, mark);
0950 if (mark == XA_MARK_MAX)
0951 break;
0952 mark_inc(mark);
0953 }
0954 }
0955 EXPORT_SYMBOL_GPL(xas_init_marks);
0956
0957 #ifdef CONFIG_XARRAY_MULTI
0958 static unsigned int node_get_marks(struct xa_node *node, unsigned int offset)
0959 {
0960 unsigned int marks = 0;
0961 xa_mark_t mark = XA_MARK_0;
0962
0963 for (;;) {
0964 if (node_get_mark(node, offset, mark))
0965 marks |= 1 << (__force unsigned int)mark;
0966 if (mark == XA_MARK_MAX)
0967 break;
0968 mark_inc(mark);
0969 }
0970
0971 return marks;
0972 }
0973
0974 static void node_set_marks(struct xa_node *node, unsigned int offset,
0975 struct xa_node *child, unsigned int marks)
0976 {
0977 xa_mark_t mark = XA_MARK_0;
0978
0979 for (;;) {
0980 if (marks & (1 << (__force unsigned int)mark)) {
0981 node_set_mark(node, offset, mark);
0982 if (child)
0983 node_mark_all(child, mark);
0984 }
0985 if (mark == XA_MARK_MAX)
0986 break;
0987 mark_inc(mark);
0988 }
0989 }
0990
0991
0992
0993
0994
0995
0996
0997
0998
0999
1000
1001
1002
1003
1004
1005 void xas_split_alloc(struct xa_state *xas, void *entry, unsigned int order,
1006 gfp_t gfp)
1007 {
1008 unsigned int sibs = (1 << (order % XA_CHUNK_SHIFT)) - 1;
1009 unsigned int mask = xas->xa_sibs;
1010
1011
1012 if (WARN_ON(xas->xa_shift + 2 * XA_CHUNK_SHIFT < order))
1013 goto nomem;
1014 if (xas->xa_shift + XA_CHUNK_SHIFT > order)
1015 return;
1016
1017 do {
1018 unsigned int i;
1019 void *sibling = NULL;
1020 struct xa_node *node;
1021
1022 node = kmem_cache_alloc_lru(radix_tree_node_cachep, xas->xa_lru, gfp);
1023 if (!node)
1024 goto nomem;
1025 node->array = xas->xa;
1026 for (i = 0; i < XA_CHUNK_SIZE; i++) {
1027 if ((i & mask) == 0) {
1028 RCU_INIT_POINTER(node->slots[i], entry);
1029 sibling = xa_mk_sibling(i);
1030 } else {
1031 RCU_INIT_POINTER(node->slots[i], sibling);
1032 }
1033 }
1034 RCU_INIT_POINTER(node->parent, xas->xa_alloc);
1035 xas->xa_alloc = node;
1036 } while (sibs-- > 0);
1037
1038 return;
1039 nomem:
1040 xas_destroy(xas);
1041 xas_set_err(xas, -ENOMEM);
1042 }
1043 EXPORT_SYMBOL_GPL(xas_split_alloc);
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056 void xas_split(struct xa_state *xas, void *entry, unsigned int order)
1057 {
1058 unsigned int sibs = (1 << (order % XA_CHUNK_SHIFT)) - 1;
1059 unsigned int offset, marks;
1060 struct xa_node *node;
1061 void *curr = xas_load(xas);
1062 int values = 0;
1063
1064 node = xas->xa_node;
1065 if (xas_top(node))
1066 return;
1067
1068 marks = node_get_marks(node, xas->xa_offset);
1069
1070 offset = xas->xa_offset + sibs;
1071 do {
1072 if (xas->xa_shift < node->shift) {
1073 struct xa_node *child = xas->xa_alloc;
1074
1075 xas->xa_alloc = rcu_dereference_raw(child->parent);
1076 child->shift = node->shift - XA_CHUNK_SHIFT;
1077 child->offset = offset;
1078 child->count = XA_CHUNK_SIZE;
1079 child->nr_values = xa_is_value(entry) ?
1080 XA_CHUNK_SIZE : 0;
1081 RCU_INIT_POINTER(child->parent, node);
1082 node_set_marks(node, offset, child, marks);
1083 rcu_assign_pointer(node->slots[offset],
1084 xa_mk_node(child));
1085 if (xa_is_value(curr))
1086 values--;
1087 xas_update(xas, child);
1088 } else {
1089 unsigned int canon = offset - xas->xa_sibs;
1090
1091 node_set_marks(node, canon, NULL, marks);
1092 rcu_assign_pointer(node->slots[canon], entry);
1093 while (offset > canon)
1094 rcu_assign_pointer(node->slots[offset--],
1095 xa_mk_sibling(canon));
1096 values += (xa_is_value(entry) - xa_is_value(curr)) *
1097 (xas->xa_sibs + 1);
1098 }
1099 } while (offset-- > xas->xa_offset);
1100
1101 node->nr_values += values;
1102 xas_update(xas, node);
1103 }
1104 EXPORT_SYMBOL_GPL(xas_split);
1105 #endif
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122 void xas_pause(struct xa_state *xas)
1123 {
1124 struct xa_node *node = xas->xa_node;
1125
1126 if (xas_invalid(xas))
1127 return;
1128
1129 xas->xa_node = XAS_RESTART;
1130 if (node) {
1131 unsigned long offset = xas->xa_offset;
1132 while (++offset < XA_CHUNK_SIZE) {
1133 if (!xa_is_sibling(xa_entry(xas->xa, node, offset)))
1134 break;
1135 }
1136 xas->xa_index += (offset - xas->xa_offset) << node->shift;
1137 if (xas->xa_index == 0)
1138 xas->xa_node = XAS_BOUNDS;
1139 } else {
1140 xas->xa_index++;
1141 }
1142 }
1143 EXPORT_SYMBOL_GPL(xas_pause);
1144
1145
1146
1147
1148
1149
1150
1151
1152 void *__xas_prev(struct xa_state *xas)
1153 {
1154 void *entry;
1155
1156 if (!xas_frozen(xas->xa_node))
1157 xas->xa_index--;
1158 if (!xas->xa_node)
1159 return set_bounds(xas);
1160 if (xas_not_node(xas->xa_node))
1161 return xas_load(xas);
1162
1163 if (xas->xa_offset != get_offset(xas->xa_index, xas->xa_node))
1164 xas->xa_offset--;
1165
1166 while (xas->xa_offset == 255) {
1167 xas->xa_offset = xas->xa_node->offset - 1;
1168 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1169 if (!xas->xa_node)
1170 return set_bounds(xas);
1171 }
1172
1173 for (;;) {
1174 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1175 if (!xa_is_node(entry))
1176 return entry;
1177
1178 xas->xa_node = xa_to_node(entry);
1179 xas_set_offset(xas);
1180 }
1181 }
1182 EXPORT_SYMBOL_GPL(__xas_prev);
1183
1184
1185
1186
1187
1188
1189
1190
1191 void *__xas_next(struct xa_state *xas)
1192 {
1193 void *entry;
1194
1195 if (!xas_frozen(xas->xa_node))
1196 xas->xa_index++;
1197 if (!xas->xa_node)
1198 return set_bounds(xas);
1199 if (xas_not_node(xas->xa_node))
1200 return xas_load(xas);
1201
1202 if (xas->xa_offset != get_offset(xas->xa_index, xas->xa_node))
1203 xas->xa_offset++;
1204
1205 while (xas->xa_offset == XA_CHUNK_SIZE) {
1206 xas->xa_offset = xas->xa_node->offset + 1;
1207 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1208 if (!xas->xa_node)
1209 return set_bounds(xas);
1210 }
1211
1212 for (;;) {
1213 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1214 if (!xa_is_node(entry))
1215 return entry;
1216
1217 xas->xa_node = xa_to_node(entry);
1218 xas_set_offset(xas);
1219 }
1220 }
1221 EXPORT_SYMBOL_GPL(__xas_next);
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239 void *xas_find(struct xa_state *xas, unsigned long max)
1240 {
1241 void *entry;
1242
1243 if (xas_error(xas) || xas->xa_node == XAS_BOUNDS)
1244 return NULL;
1245 if (xas->xa_index > max)
1246 return set_bounds(xas);
1247
1248 if (!xas->xa_node) {
1249 xas->xa_index = 1;
1250 return set_bounds(xas);
1251 } else if (xas->xa_node == XAS_RESTART) {
1252 entry = xas_load(xas);
1253 if (entry || xas_not_node(xas->xa_node))
1254 return entry;
1255 } else if (!xas->xa_node->shift &&
1256 xas->xa_offset != (xas->xa_index & XA_CHUNK_MASK)) {
1257 xas->xa_offset = ((xas->xa_index - 1) & XA_CHUNK_MASK) + 1;
1258 }
1259
1260 xas_next_offset(xas);
1261
1262 while (xas->xa_node && (xas->xa_index <= max)) {
1263 if (unlikely(xas->xa_offset == XA_CHUNK_SIZE)) {
1264 xas->xa_offset = xas->xa_node->offset + 1;
1265 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1266 continue;
1267 }
1268
1269 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1270 if (xa_is_node(entry)) {
1271 xas->xa_node = xa_to_node(entry);
1272 xas->xa_offset = 0;
1273 continue;
1274 }
1275 if (entry && !xa_is_sibling(entry))
1276 return entry;
1277
1278 xas_next_offset(xas);
1279 }
1280
1281 if (!xas->xa_node)
1282 xas->xa_node = XAS_BOUNDS;
1283 return NULL;
1284 }
1285 EXPORT_SYMBOL_GPL(xas_find);
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308 void *xas_find_marked(struct xa_state *xas, unsigned long max, xa_mark_t mark)
1309 {
1310 bool advance = true;
1311 unsigned int offset;
1312 void *entry;
1313
1314 if (xas_error(xas))
1315 return NULL;
1316 if (xas->xa_index > max)
1317 goto max;
1318
1319 if (!xas->xa_node) {
1320 xas->xa_index = 1;
1321 goto out;
1322 } else if (xas_top(xas->xa_node)) {
1323 advance = false;
1324 entry = xa_head(xas->xa);
1325 xas->xa_node = NULL;
1326 if (xas->xa_index > max_index(entry))
1327 goto out;
1328 if (!xa_is_node(entry)) {
1329 if (xa_marked(xas->xa, mark))
1330 return entry;
1331 xas->xa_index = 1;
1332 goto out;
1333 }
1334 xas->xa_node = xa_to_node(entry);
1335 xas->xa_offset = xas->xa_index >> xas->xa_node->shift;
1336 }
1337
1338 while (xas->xa_index <= max) {
1339 if (unlikely(xas->xa_offset == XA_CHUNK_SIZE)) {
1340 xas->xa_offset = xas->xa_node->offset + 1;
1341 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1342 if (!xas->xa_node)
1343 break;
1344 advance = false;
1345 continue;
1346 }
1347
1348 if (!advance) {
1349 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1350 if (xa_is_sibling(entry)) {
1351 xas->xa_offset = xa_to_sibling(entry);
1352 xas_move_index(xas, xas->xa_offset);
1353 }
1354 }
1355
1356 offset = xas_find_chunk(xas, advance, mark);
1357 if (offset > xas->xa_offset) {
1358 advance = false;
1359 xas_move_index(xas, offset);
1360
1361 if ((xas->xa_index - 1) >= max)
1362 goto max;
1363 xas->xa_offset = offset;
1364 if (offset == XA_CHUNK_SIZE)
1365 continue;
1366 }
1367
1368 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1369 if (!entry && !(xa_track_free(xas->xa) && mark == XA_FREE_MARK))
1370 continue;
1371 if (!xa_is_node(entry))
1372 return entry;
1373 xas->xa_node = xa_to_node(entry);
1374 xas_set_offset(xas);
1375 }
1376
1377 out:
1378 if (xas->xa_index > max)
1379 goto max;
1380 return set_bounds(xas);
1381 max:
1382 xas->xa_node = XAS_RESTART;
1383 return NULL;
1384 }
1385 EXPORT_SYMBOL_GPL(xas_find_marked);
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396 void *xas_find_conflict(struct xa_state *xas)
1397 {
1398 void *curr;
1399
1400 if (xas_error(xas))
1401 return NULL;
1402
1403 if (!xas->xa_node)
1404 return NULL;
1405
1406 if (xas_top(xas->xa_node)) {
1407 curr = xas_start(xas);
1408 if (!curr)
1409 return NULL;
1410 while (xa_is_node(curr)) {
1411 struct xa_node *node = xa_to_node(curr);
1412 curr = xas_descend(xas, node);
1413 }
1414 if (curr)
1415 return curr;
1416 }
1417
1418 if (xas->xa_node->shift > xas->xa_shift)
1419 return NULL;
1420
1421 for (;;) {
1422 if (xas->xa_node->shift == xas->xa_shift) {
1423 if ((xas->xa_offset & xas->xa_sibs) == xas->xa_sibs)
1424 break;
1425 } else if (xas->xa_offset == XA_CHUNK_MASK) {
1426 xas->xa_offset = xas->xa_node->offset;
1427 xas->xa_node = xa_parent_locked(xas->xa, xas->xa_node);
1428 if (!xas->xa_node)
1429 break;
1430 continue;
1431 }
1432 curr = xa_entry_locked(xas->xa, xas->xa_node, ++xas->xa_offset);
1433 if (xa_is_sibling(curr))
1434 continue;
1435 while (xa_is_node(curr)) {
1436 xas->xa_node = xa_to_node(curr);
1437 xas->xa_offset = 0;
1438 curr = xa_entry_locked(xas->xa, xas->xa_node, 0);
1439 }
1440 if (curr)
1441 return curr;
1442 }
1443 xas->xa_offset -= xas->xa_sibs;
1444 return NULL;
1445 }
1446 EXPORT_SYMBOL_GPL(xas_find_conflict);
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456 void *xa_load(struct xarray *xa, unsigned long index)
1457 {
1458 XA_STATE(xas, xa, index);
1459 void *entry;
1460
1461 rcu_read_lock();
1462 do {
1463 entry = xas_load(&xas);
1464 if (xa_is_zero(entry))
1465 entry = NULL;
1466 } while (xas_retry(&xas, entry));
1467 rcu_read_unlock();
1468
1469 return entry;
1470 }
1471 EXPORT_SYMBOL(xa_load);
1472
1473 static void *xas_result(struct xa_state *xas, void *curr)
1474 {
1475 if (xa_is_zero(curr))
1476 return NULL;
1477 if (xas_error(xas))
1478 curr = xas->xa_node;
1479 return curr;
1480 }
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494 void *__xa_erase(struct xarray *xa, unsigned long index)
1495 {
1496 XA_STATE(xas, xa, index);
1497 return xas_result(&xas, xas_store(&xas, NULL));
1498 }
1499 EXPORT_SYMBOL(__xa_erase);
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513 void *xa_erase(struct xarray *xa, unsigned long index)
1514 {
1515 void *entry;
1516
1517 xa_lock(xa);
1518 entry = __xa_erase(xa, index);
1519 xa_unlock(xa);
1520
1521 return entry;
1522 }
1523 EXPORT_SYMBOL(xa_erase);
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540 void *__xa_store(struct xarray *xa, unsigned long index, void *entry, gfp_t gfp)
1541 {
1542 XA_STATE(xas, xa, index);
1543 void *curr;
1544
1545 if (WARN_ON_ONCE(xa_is_advanced(entry)))
1546 return XA_ERROR(-EINVAL);
1547 if (xa_track_free(xa) && !entry)
1548 entry = XA_ZERO_ENTRY;
1549
1550 do {
1551 curr = xas_store(&xas, entry);
1552 if (xa_track_free(xa))
1553 xas_clear_mark(&xas, XA_FREE_MARK);
1554 } while (__xas_nomem(&xas, gfp));
1555
1556 return xas_result(&xas, curr);
1557 }
1558 EXPORT_SYMBOL(__xa_store);
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577 void *xa_store(struct xarray *xa, unsigned long index, void *entry, gfp_t gfp)
1578 {
1579 void *curr;
1580
1581 xa_lock(xa);
1582 curr = __xa_store(xa, index, entry, gfp);
1583 xa_unlock(xa);
1584
1585 return curr;
1586 }
1587 EXPORT_SYMBOL(xa_store);
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605 void *__xa_cmpxchg(struct xarray *xa, unsigned long index,
1606 void *old, void *entry, gfp_t gfp)
1607 {
1608 XA_STATE(xas, xa, index);
1609 void *curr;
1610
1611 if (WARN_ON_ONCE(xa_is_advanced(entry)))
1612 return XA_ERROR(-EINVAL);
1613
1614 do {
1615 curr = xas_load(&xas);
1616 if (curr == old) {
1617 xas_store(&xas, entry);
1618 if (xa_track_free(xa) && entry && !curr)
1619 xas_clear_mark(&xas, XA_FREE_MARK);
1620 }
1621 } while (__xas_nomem(&xas, gfp));
1622
1623 return xas_result(&xas, curr);
1624 }
1625 EXPORT_SYMBOL(__xa_cmpxchg);
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643 int __xa_insert(struct xarray *xa, unsigned long index, void *entry, gfp_t gfp)
1644 {
1645 XA_STATE(xas, xa, index);
1646 void *curr;
1647
1648 if (WARN_ON_ONCE(xa_is_advanced(entry)))
1649 return -EINVAL;
1650 if (!entry)
1651 entry = XA_ZERO_ENTRY;
1652
1653 do {
1654 curr = xas_load(&xas);
1655 if (!curr) {
1656 xas_store(&xas, entry);
1657 if (xa_track_free(xa))
1658 xas_clear_mark(&xas, XA_FREE_MARK);
1659 } else {
1660 xas_set_err(&xas, -EBUSY);
1661 }
1662 } while (__xas_nomem(&xas, gfp));
1663
1664 return xas_error(&xas);
1665 }
1666 EXPORT_SYMBOL(__xa_insert);
1667
1668 #ifdef CONFIG_XARRAY_MULTI
1669 static void xas_set_range(struct xa_state *xas, unsigned long first,
1670 unsigned long last)
1671 {
1672 unsigned int shift = 0;
1673 unsigned long sibs = last - first;
1674 unsigned int offset = XA_CHUNK_MASK;
1675
1676 xas_set(xas, first);
1677
1678 while ((first & XA_CHUNK_MASK) == 0) {
1679 if (sibs < XA_CHUNK_MASK)
1680 break;
1681 if ((sibs == XA_CHUNK_MASK) && (offset < XA_CHUNK_MASK))
1682 break;
1683 shift += XA_CHUNK_SHIFT;
1684 if (offset == XA_CHUNK_MASK)
1685 offset = sibs & XA_CHUNK_MASK;
1686 sibs >>= XA_CHUNK_SHIFT;
1687 first >>= XA_CHUNK_SHIFT;
1688 }
1689
1690 offset = first & XA_CHUNK_MASK;
1691 if (offset + sibs > XA_CHUNK_MASK)
1692 sibs = XA_CHUNK_MASK - offset;
1693 if ((((first + sibs + 1) << shift) - 1) > last)
1694 sibs -= 1;
1695
1696 xas->xa_shift = shift;
1697 xas->xa_sibs = sibs;
1698 }
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718 void *xa_store_range(struct xarray *xa, unsigned long first,
1719 unsigned long last, void *entry, gfp_t gfp)
1720 {
1721 XA_STATE(xas, xa, 0);
1722
1723 if (WARN_ON_ONCE(xa_is_internal(entry)))
1724 return XA_ERROR(-EINVAL);
1725 if (last < first)
1726 return XA_ERROR(-EINVAL);
1727
1728 do {
1729 xas_lock(&xas);
1730 if (entry) {
1731 unsigned int order = BITS_PER_LONG;
1732 if (last + 1)
1733 order = __ffs(last + 1);
1734 xas_set_order(&xas, last, order);
1735 xas_create(&xas, true);
1736 if (xas_error(&xas))
1737 goto unlock;
1738 }
1739 do {
1740 xas_set_range(&xas, first, last);
1741 xas_store(&xas, entry);
1742 if (xas_error(&xas))
1743 goto unlock;
1744 first += xas_size(&xas);
1745 } while (first <= last);
1746 unlock:
1747 xas_unlock(&xas);
1748 } while (xas_nomem(&xas, gfp));
1749
1750 return xas_result(&xas, NULL);
1751 }
1752 EXPORT_SYMBOL(xa_store_range);
1753
1754
1755
1756
1757
1758
1759
1760
1761 int xa_get_order(struct xarray *xa, unsigned long index)
1762 {
1763 XA_STATE(xas, xa, index);
1764 void *entry;
1765 int order = 0;
1766
1767 rcu_read_lock();
1768 entry = xas_load(&xas);
1769
1770 if (!entry)
1771 goto unlock;
1772
1773 if (!xas.xa_node)
1774 goto unlock;
1775
1776 for (;;) {
1777 unsigned int slot = xas.xa_offset + (1 << order);
1778
1779 if (slot >= XA_CHUNK_SIZE)
1780 break;
1781 if (!xa_is_sibling(xas.xa_node->slots[slot]))
1782 break;
1783 order++;
1784 }
1785
1786 order += xas.xa_node->shift;
1787 unlock:
1788 rcu_read_unlock();
1789
1790 return order;
1791 }
1792 EXPORT_SYMBOL(xa_get_order);
1793 #endif
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812 int __xa_alloc(struct xarray *xa, u32 *id, void *entry,
1813 struct xa_limit limit, gfp_t gfp)
1814 {
1815 XA_STATE(xas, xa, 0);
1816
1817 if (WARN_ON_ONCE(xa_is_advanced(entry)))
1818 return -EINVAL;
1819 if (WARN_ON_ONCE(!xa_track_free(xa)))
1820 return -EINVAL;
1821
1822 if (!entry)
1823 entry = XA_ZERO_ENTRY;
1824
1825 do {
1826 xas.xa_index = limit.min;
1827 xas_find_marked(&xas, limit.max, XA_FREE_MARK);
1828 if (xas.xa_node == XAS_RESTART)
1829 xas_set_err(&xas, -EBUSY);
1830 else
1831 *id = xas.xa_index;
1832 xas_store(&xas, entry);
1833 xas_clear_mark(&xas, XA_FREE_MARK);
1834 } while (__xas_nomem(&xas, gfp));
1835
1836 return xas_error(&xas);
1837 }
1838 EXPORT_SYMBOL(__xa_alloc);
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861 int __xa_alloc_cyclic(struct xarray *xa, u32 *id, void *entry,
1862 struct xa_limit limit, u32 *next, gfp_t gfp)
1863 {
1864 u32 min = limit.min;
1865 int ret;
1866
1867 limit.min = max(min, *next);
1868 ret = __xa_alloc(xa, id, entry, limit, gfp);
1869 if ((xa->xa_flags & XA_FLAGS_ALLOC_WRAPPED) && ret == 0) {
1870 xa->xa_flags &= ~XA_FLAGS_ALLOC_WRAPPED;
1871 ret = 1;
1872 }
1873
1874 if (ret < 0 && limit.min > min) {
1875 limit.min = min;
1876 ret = __xa_alloc(xa, id, entry, limit, gfp);
1877 if (ret == 0)
1878 ret = 1;
1879 }
1880
1881 if (ret >= 0) {
1882 *next = *id + 1;
1883 if (*next == 0)
1884 xa->xa_flags |= XA_FLAGS_ALLOC_WRAPPED;
1885 }
1886 return ret;
1887 }
1888 EXPORT_SYMBOL(__xa_alloc_cyclic);
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900 void __xa_set_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1901 {
1902 XA_STATE(xas, xa, index);
1903 void *entry = xas_load(&xas);
1904
1905 if (entry)
1906 xas_set_mark(&xas, mark);
1907 }
1908 EXPORT_SYMBOL(__xa_set_mark);
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918 void __xa_clear_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1919 {
1920 XA_STATE(xas, xa, index);
1921 void *entry = xas_load(&xas);
1922
1923 if (entry)
1924 xas_clear_mark(&xas, mark);
1925 }
1926 EXPORT_SYMBOL(__xa_clear_mark);
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940 bool xa_get_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1941 {
1942 XA_STATE(xas, xa, index);
1943 void *entry;
1944
1945 rcu_read_lock();
1946 entry = xas_start(&xas);
1947 while (xas_get_mark(&xas, mark)) {
1948 if (!xa_is_node(entry))
1949 goto found;
1950 entry = xas_descend(&xas, xa_to_node(entry));
1951 }
1952 rcu_read_unlock();
1953 return false;
1954 found:
1955 rcu_read_unlock();
1956 return true;
1957 }
1958 EXPORT_SYMBOL(xa_get_mark);
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970 void xa_set_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1971 {
1972 xa_lock(xa);
1973 __xa_set_mark(xa, index, mark);
1974 xa_unlock(xa);
1975 }
1976 EXPORT_SYMBOL(xa_set_mark);
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988 void xa_clear_mark(struct xarray *xa, unsigned long index, xa_mark_t mark)
1989 {
1990 xa_lock(xa);
1991 __xa_clear_mark(xa, index, mark);
1992 xa_unlock(xa);
1993 }
1994 EXPORT_SYMBOL(xa_clear_mark);
1995
1996
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013 void *xa_find(struct xarray *xa, unsigned long *indexp,
2014 unsigned long max, xa_mark_t filter)
2015 {
2016 XA_STATE(xas, xa, *indexp);
2017 void *entry;
2018
2019 rcu_read_lock();
2020 do {
2021 if ((__force unsigned int)filter < XA_MAX_MARKS)
2022 entry = xas_find_marked(&xas, max, filter);
2023 else
2024 entry = xas_find(&xas, max);
2025 } while (xas_retry(&xas, entry));
2026 rcu_read_unlock();
2027
2028 if (entry)
2029 *indexp = xas.xa_index;
2030 return entry;
2031 }
2032 EXPORT_SYMBOL(xa_find);
2033
2034 static bool xas_sibling(struct xa_state *xas)
2035 {
2036 struct xa_node *node = xas->xa_node;
2037 unsigned long mask;
2038
2039 if (!IS_ENABLED(CONFIG_XARRAY_MULTI) || !node)
2040 return false;
2041 mask = (XA_CHUNK_SIZE << node->shift) - 1;
2042 return (xas->xa_index & mask) >
2043 ((unsigned long)xas->xa_offset << node->shift);
2044 }
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063 void *xa_find_after(struct xarray *xa, unsigned long *indexp,
2064 unsigned long max, xa_mark_t filter)
2065 {
2066 XA_STATE(xas, xa, *indexp + 1);
2067 void *entry;
2068
2069 if (xas.xa_index == 0)
2070 return NULL;
2071
2072 rcu_read_lock();
2073 for (;;) {
2074 if ((__force unsigned int)filter < XA_MAX_MARKS)
2075 entry = xas_find_marked(&xas, max, filter);
2076 else
2077 entry = xas_find(&xas, max);
2078
2079 if (xas_invalid(&xas))
2080 break;
2081 if (xas_sibling(&xas))
2082 continue;
2083 if (!xas_retry(&xas, entry))
2084 break;
2085 }
2086 rcu_read_unlock();
2087
2088 if (entry)
2089 *indexp = xas.xa_index;
2090 return entry;
2091 }
2092 EXPORT_SYMBOL(xa_find_after);
2093
2094 static unsigned int xas_extract_present(struct xa_state *xas, void **dst,
2095 unsigned long max, unsigned int n)
2096 {
2097 void *entry;
2098 unsigned int i = 0;
2099
2100 rcu_read_lock();
2101 xas_for_each(xas, entry, max) {
2102 if (xas_retry(xas, entry))
2103 continue;
2104 dst[i++] = entry;
2105 if (i == n)
2106 break;
2107 }
2108 rcu_read_unlock();
2109
2110 return i;
2111 }
2112
2113 static unsigned int xas_extract_marked(struct xa_state *xas, void **dst,
2114 unsigned long max, unsigned int n, xa_mark_t mark)
2115 {
2116 void *entry;
2117 unsigned int i = 0;
2118
2119 rcu_read_lock();
2120 xas_for_each_marked(xas, entry, max, mark) {
2121 if (xas_retry(xas, entry))
2122 continue;
2123 dst[i++] = entry;
2124 if (i == n)
2125 break;
2126 }
2127 rcu_read_unlock();
2128
2129 return i;
2130 }
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160 unsigned int xa_extract(struct xarray *xa, void **dst, unsigned long start,
2161 unsigned long max, unsigned int n, xa_mark_t filter)
2162 {
2163 XA_STATE(xas, xa, start);
2164
2165 if (!n)
2166 return 0;
2167
2168 if ((__force unsigned int)filter < XA_MAX_MARKS)
2169 return xas_extract_marked(&xas, dst, max, n, filter);
2170 return xas_extract_present(&xas, dst, max, n);
2171 }
2172 EXPORT_SYMBOL(xa_extract);
2173
2174
2175
2176
2177
2178
2179
2180
2181 void xa_delete_node(struct xa_node *node, xa_update_node_t update)
2182 {
2183 struct xa_state xas = {
2184 .xa = node->array,
2185 .xa_index = (unsigned long)node->offset <<
2186 (node->shift + XA_CHUNK_SHIFT),
2187 .xa_shift = node->shift + XA_CHUNK_SHIFT,
2188 .xa_offset = node->offset,
2189 .xa_node = xa_parent_locked(node->array, node),
2190 .xa_update = update,
2191 };
2192
2193 xas_store(&xas, NULL);
2194 }
2195 EXPORT_SYMBOL_GPL(xa_delete_node);
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207 void xa_destroy(struct xarray *xa)
2208 {
2209 XA_STATE(xas, xa, 0);
2210 unsigned long flags;
2211 void *entry;
2212
2213 xas.xa_node = NULL;
2214 xas_lock_irqsave(&xas, flags);
2215 entry = xa_head_locked(xa);
2216 RCU_INIT_POINTER(xa->xa_head, NULL);
2217 xas_init_marks(&xas);
2218 if (xa_zero_busy(xa))
2219 xa_mark_clear(xa, XA_FREE_MARK);
2220
2221 if (xa_is_node(entry))
2222 xas_free_nodes(&xas, xa_to_node(entry));
2223 xas_unlock_irqrestore(&xas, flags);
2224 }
2225 EXPORT_SYMBOL(xa_destroy);
2226
2227 #ifdef XA_DEBUG
2228 void xa_dump_node(const struct xa_node *node)
2229 {
2230 unsigned i, j;
2231
2232 if (!node)
2233 return;
2234 if ((unsigned long)node & 3) {
2235 pr_cont("node %px\n", node);
2236 return;
2237 }
2238
2239 pr_cont("node %px %s %d parent %px shift %d count %d values %d "
2240 "array %px list %px %px marks",
2241 node, node->parent ? "offset" : "max", node->offset,
2242 node->parent, node->shift, node->count, node->nr_values,
2243 node->array, node->private_list.prev, node->private_list.next);
2244 for (i = 0; i < XA_MAX_MARKS; i++)
2245 for (j = 0; j < XA_MARK_LONGS; j++)
2246 pr_cont(" %lx", node->marks[i][j]);
2247 pr_cont("\n");
2248 }
2249
2250 void xa_dump_index(unsigned long index, unsigned int shift)
2251 {
2252 if (!shift)
2253 pr_info("%lu: ", index);
2254 else if (shift >= BITS_PER_LONG)
2255 pr_info("0-%lu: ", ~0UL);
2256 else
2257 pr_info("%lu-%lu: ", index, index | ((1UL << shift) - 1));
2258 }
2259
2260 void xa_dump_entry(const void *entry, unsigned long index, unsigned long shift)
2261 {
2262 if (!entry)
2263 return;
2264
2265 xa_dump_index(index, shift);
2266
2267 if (xa_is_node(entry)) {
2268 if (shift == 0) {
2269 pr_cont("%px\n", entry);
2270 } else {
2271 unsigned long i;
2272 struct xa_node *node = xa_to_node(entry);
2273 xa_dump_node(node);
2274 for (i = 0; i < XA_CHUNK_SIZE; i++)
2275 xa_dump_entry(node->slots[i],
2276 index + (i << node->shift), node->shift);
2277 }
2278 } else if (xa_is_value(entry))
2279 pr_cont("value %ld (0x%lx) [%px]\n", xa_to_value(entry),
2280 xa_to_value(entry), entry);
2281 else if (!xa_is_internal(entry))
2282 pr_cont("%px\n", entry);
2283 else if (xa_is_retry(entry))
2284 pr_cont("retry (%ld)\n", xa_to_internal(entry));
2285 else if (xa_is_sibling(entry))
2286 pr_cont("sibling (slot %ld)\n", xa_to_sibling(entry));
2287 else if (xa_is_zero(entry))
2288 pr_cont("zero (%ld)\n", xa_to_internal(entry));
2289 else
2290 pr_cont("UNKNOWN ENTRY (%px)\n", entry);
2291 }
2292
2293 void xa_dump(const struct xarray *xa)
2294 {
2295 void *entry = xa->xa_head;
2296 unsigned int shift = 0;
2297
2298 pr_info("xarray: %px head %px flags %x marks %d %d %d\n", xa, entry,
2299 xa->xa_flags, xa_marked(xa, XA_MARK_0),
2300 xa_marked(xa, XA_MARK_1), xa_marked(xa, XA_MARK_2));
2301 if (xa_is_node(entry))
2302 shift = xa_to_node(entry)->shift + XA_CHUNK_SHIFT;
2303 xa_dump_entry(entry, 0, shift);
2304 }
2305 #endif