Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-only
0002 /*
0003  * multiorder.c: Multi-order radix tree entry testing
0004  * Copyright (c) 2016 Intel Corporation
0005  * Author: Ross Zwisler <ross.zwisler@linux.intel.com>
0006  * Author: Matthew Wilcox <matthew.r.wilcox@intel.com>
0007  */
0008 #include <linux/radix-tree.h>
0009 #include <linux/slab.h>
0010 #include <linux/errno.h>
0011 #include <pthread.h>
0012 
0013 #include "test.h"
0014 
0015 static int item_insert_order(struct xarray *xa, unsigned long index,
0016             unsigned order)
0017 {
0018     XA_STATE_ORDER(xas, xa, index, order);
0019     struct item *item = item_create(index, order);
0020 
0021     do {
0022         xas_lock(&xas);
0023         xas_store(&xas, item);
0024         xas_unlock(&xas);
0025     } while (xas_nomem(&xas, GFP_KERNEL));
0026 
0027     if (!xas_error(&xas))
0028         return 0;
0029 
0030     free(item);
0031     return xas_error(&xas);
0032 }
0033 
0034 void multiorder_iteration(struct xarray *xa)
0035 {
0036     XA_STATE(xas, xa, 0);
0037     struct item *item;
0038     int i, j, err;
0039 
0040 #define NUM_ENTRIES 11
0041     int index[NUM_ENTRIES] = {0, 2, 4, 8, 16, 32, 34, 36, 64, 72, 128};
0042     int order[NUM_ENTRIES] = {1, 1, 2, 3,  4,  1,  0,  1,  3,  0, 7};
0043 
0044     printv(1, "Multiorder iteration test\n");
0045 
0046     for (i = 0; i < NUM_ENTRIES; i++) {
0047         err = item_insert_order(xa, index[i], order[i]);
0048         assert(!err);
0049     }
0050 
0051     for (j = 0; j < 256; j++) {
0052         for (i = 0; i < NUM_ENTRIES; i++)
0053             if (j <= (index[i] | ((1 << order[i]) - 1)))
0054                 break;
0055 
0056         xas_set(&xas, j);
0057         xas_for_each(&xas, item, ULONG_MAX) {
0058             int height = order[i] / XA_CHUNK_SHIFT;
0059             int shift = height * XA_CHUNK_SHIFT;
0060             unsigned long mask = (1UL << order[i]) - 1;
0061 
0062             assert((xas.xa_index | mask) == (index[i] | mask));
0063             assert(xas.xa_node->shift == shift);
0064             assert(!radix_tree_is_internal_node(item));
0065             assert((item->index | mask) == (index[i] | mask));
0066             assert(item->order == order[i]);
0067             i++;
0068         }
0069     }
0070 
0071     item_kill_tree(xa);
0072 }
0073 
0074 void multiorder_tagged_iteration(struct xarray *xa)
0075 {
0076     XA_STATE(xas, xa, 0);
0077     struct item *item;
0078     int i, j;
0079 
0080 #define MT_NUM_ENTRIES 9
0081     int index[MT_NUM_ENTRIES] = {0, 2, 4, 16, 32, 40, 64, 72, 128};
0082     int order[MT_NUM_ENTRIES] = {1, 0, 2, 4,  3,  1,  3,  0,   7};
0083 
0084 #define TAG_ENTRIES 7
0085     int tag_index[TAG_ENTRIES] = {0, 4, 16, 40, 64, 72, 128};
0086 
0087     printv(1, "Multiorder tagged iteration test\n");
0088 
0089     for (i = 0; i < MT_NUM_ENTRIES; i++)
0090         assert(!item_insert_order(xa, index[i], order[i]));
0091 
0092     assert(!xa_marked(xa, XA_MARK_1));
0093 
0094     for (i = 0; i < TAG_ENTRIES; i++)
0095         xa_set_mark(xa, tag_index[i], XA_MARK_1);
0096 
0097     for (j = 0; j < 256; j++) {
0098         int k;
0099 
0100         for (i = 0; i < TAG_ENTRIES; i++) {
0101             for (k = i; index[k] < tag_index[i]; k++)
0102                 ;
0103             if (j <= (index[k] | ((1 << order[k]) - 1)))
0104                 break;
0105         }
0106 
0107         xas_set(&xas, j);
0108         xas_for_each_marked(&xas, item, ULONG_MAX, XA_MARK_1) {
0109             unsigned long mask;
0110             for (k = i; index[k] < tag_index[i]; k++)
0111                 ;
0112             mask = (1UL << order[k]) - 1;
0113 
0114             assert((xas.xa_index | mask) == (tag_index[i] | mask));
0115             assert(!xa_is_internal(item));
0116             assert((item->index | mask) == (tag_index[i] | mask));
0117             assert(item->order == order[k]);
0118             i++;
0119         }
0120     }
0121 
0122     assert(tag_tagged_items(xa, 0, ULONG_MAX, TAG_ENTRIES, XA_MARK_1,
0123                 XA_MARK_2) == TAG_ENTRIES);
0124 
0125     for (j = 0; j < 256; j++) {
0126         int mask, k;
0127 
0128         for (i = 0; i < TAG_ENTRIES; i++) {
0129             for (k = i; index[k] < tag_index[i]; k++)
0130                 ;
0131             if (j <= (index[k] | ((1 << order[k]) - 1)))
0132                 break;
0133         }
0134 
0135         xas_set(&xas, j);
0136         xas_for_each_marked(&xas, item, ULONG_MAX, XA_MARK_2) {
0137             for (k = i; index[k] < tag_index[i]; k++)
0138                 ;
0139             mask = (1 << order[k]) - 1;
0140 
0141             assert((xas.xa_index | mask) == (tag_index[i] | mask));
0142             assert(!xa_is_internal(item));
0143             assert((item->index | mask) == (tag_index[i] | mask));
0144             assert(item->order == order[k]);
0145             i++;
0146         }
0147     }
0148 
0149     assert(tag_tagged_items(xa, 1, ULONG_MAX, MT_NUM_ENTRIES * 2, XA_MARK_1,
0150                 XA_MARK_0) == TAG_ENTRIES);
0151     i = 0;
0152     xas_set(&xas, 0);
0153     xas_for_each_marked(&xas, item, ULONG_MAX, XA_MARK_0) {
0154         assert(xas.xa_index == tag_index[i]);
0155         i++;
0156     }
0157     assert(i == TAG_ENTRIES);
0158 
0159     item_kill_tree(xa);
0160 }
0161 
0162 bool stop_iteration = false;
0163 
0164 static void *creator_func(void *ptr)
0165 {
0166     /* 'order' is set up to ensure we have sibling entries */
0167     unsigned int order = RADIX_TREE_MAP_SHIFT - 1;
0168     struct radix_tree_root *tree = ptr;
0169     int i;
0170 
0171     for (i = 0; i < 10000; i++) {
0172         item_insert_order(tree, 0, order);
0173         item_delete_rcu(tree, 0);
0174     }
0175 
0176     stop_iteration = true;
0177     return NULL;
0178 }
0179 
0180 static void *iterator_func(void *ptr)
0181 {
0182     XA_STATE(xas, ptr, 0);
0183     struct item *item;
0184 
0185     while (!stop_iteration) {
0186         rcu_read_lock();
0187         xas_for_each(&xas, item, ULONG_MAX) {
0188             if (xas_retry(&xas, item))
0189                 continue;
0190 
0191             item_sanity(item, xas.xa_index);
0192         }
0193         rcu_read_unlock();
0194     }
0195     return NULL;
0196 }
0197 
0198 static void multiorder_iteration_race(struct xarray *xa)
0199 {
0200     const int num_threads = sysconf(_SC_NPROCESSORS_ONLN);
0201     pthread_t worker_thread[num_threads];
0202     int i;
0203 
0204     pthread_create(&worker_thread[0], NULL, &creator_func, xa);
0205     for (i = 1; i < num_threads; i++)
0206         pthread_create(&worker_thread[i], NULL, &iterator_func, xa);
0207 
0208     for (i = 0; i < num_threads; i++)
0209         pthread_join(worker_thread[i], NULL);
0210 
0211     item_kill_tree(xa);
0212 }
0213 
0214 static DEFINE_XARRAY(array);
0215 
0216 void multiorder_checks(void)
0217 {
0218     multiorder_iteration(&array);
0219     multiorder_tagged_iteration(&array);
0220     multiorder_iteration_race(&array);
0221 
0222     radix_tree_cpu_dead(0);
0223 }
0224 
0225 int __weak main(void)
0226 {
0227     rcu_register_thread();
0228     radix_tree_init();
0229     multiorder_checks();
0230     rcu_unregister_thread();
0231     return 0;
0232 }