0001
0002
0003
0004
0005
0006
0007
0008
0009 #include <linux/debugfs.h>
0010 #include <linux/device.h>
0011 #include <linux/rbtree.h>
0012 #include <linux/seq_file.h>
0013 #include <linux/slab.h>
0014
0015 #include "internal.h"
0016
0017 static int regcache_rbtree_write(struct regmap *map, unsigned int reg,
0018 unsigned int value);
0019 static int regcache_rbtree_exit(struct regmap *map);
0020
0021 struct regcache_rbtree_node {
0022
0023 void *block;
0024
0025 long *cache_present;
0026
0027 unsigned int base_reg;
0028
0029 unsigned int blklen;
0030
0031 struct rb_node node;
0032 };
0033
0034 struct regcache_rbtree_ctx {
0035 struct rb_root root;
0036 struct regcache_rbtree_node *cached_rbnode;
0037 };
0038
0039 static inline void regcache_rbtree_get_base_top_reg(
0040 struct regmap *map,
0041 struct regcache_rbtree_node *rbnode,
0042 unsigned int *base, unsigned int *top)
0043 {
0044 *base = rbnode->base_reg;
0045 *top = rbnode->base_reg + ((rbnode->blklen - 1) * map->reg_stride);
0046 }
0047
0048 static unsigned int regcache_rbtree_get_register(struct regmap *map,
0049 struct regcache_rbtree_node *rbnode, unsigned int idx)
0050 {
0051 return regcache_get_val(map, rbnode->block, idx);
0052 }
0053
0054 static void regcache_rbtree_set_register(struct regmap *map,
0055 struct regcache_rbtree_node *rbnode,
0056 unsigned int idx, unsigned int val)
0057 {
0058 set_bit(idx, rbnode->cache_present);
0059 regcache_set_val(map, rbnode->block, idx, val);
0060 }
0061
0062 static struct regcache_rbtree_node *regcache_rbtree_lookup(struct regmap *map,
0063 unsigned int reg)
0064 {
0065 struct regcache_rbtree_ctx *rbtree_ctx = map->cache;
0066 struct rb_node *node;
0067 struct regcache_rbtree_node *rbnode;
0068 unsigned int base_reg, top_reg;
0069
0070 rbnode = rbtree_ctx->cached_rbnode;
0071 if (rbnode) {
0072 regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
0073 &top_reg);
0074 if (reg >= base_reg && reg <= top_reg)
0075 return rbnode;
0076 }
0077
0078 node = rbtree_ctx->root.rb_node;
0079 while (node) {
0080 rbnode = rb_entry(node, struct regcache_rbtree_node, node);
0081 regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
0082 &top_reg);
0083 if (reg >= base_reg && reg <= top_reg) {
0084 rbtree_ctx->cached_rbnode = rbnode;
0085 return rbnode;
0086 } else if (reg > top_reg) {
0087 node = node->rb_right;
0088 } else if (reg < base_reg) {
0089 node = node->rb_left;
0090 }
0091 }
0092
0093 return NULL;
0094 }
0095
0096 static int regcache_rbtree_insert(struct regmap *map, struct rb_root *root,
0097 struct regcache_rbtree_node *rbnode)
0098 {
0099 struct rb_node **new, *parent;
0100 struct regcache_rbtree_node *rbnode_tmp;
0101 unsigned int base_reg_tmp, top_reg_tmp;
0102 unsigned int base_reg;
0103
0104 parent = NULL;
0105 new = &root->rb_node;
0106 while (*new) {
0107 rbnode_tmp = rb_entry(*new, struct regcache_rbtree_node, node);
0108
0109 regcache_rbtree_get_base_top_reg(map, rbnode_tmp, &base_reg_tmp,
0110 &top_reg_tmp);
0111
0112 base_reg = rbnode->base_reg;
0113 parent = *new;
0114
0115 if (base_reg >= base_reg_tmp &&
0116 base_reg <= top_reg_tmp)
0117 return 0;
0118 else if (base_reg > top_reg_tmp)
0119 new = &((*new)->rb_right);
0120 else if (base_reg < base_reg_tmp)
0121 new = &((*new)->rb_left);
0122 }
0123
0124
0125 rb_link_node(&rbnode->node, parent, new);
0126 rb_insert_color(&rbnode->node, root);
0127
0128 return 1;
0129 }
0130
0131 #ifdef CONFIG_DEBUG_FS
0132 static int rbtree_show(struct seq_file *s, void *ignored)
0133 {
0134 struct regmap *map = s->private;
0135 struct regcache_rbtree_ctx *rbtree_ctx = map->cache;
0136 struct regcache_rbtree_node *n;
0137 struct rb_node *node;
0138 unsigned int base, top;
0139 size_t mem_size;
0140 int nodes = 0;
0141 int registers = 0;
0142 int this_registers, average;
0143
0144 map->lock(map->lock_arg);
0145
0146 mem_size = sizeof(*rbtree_ctx);
0147
0148 for (node = rb_first(&rbtree_ctx->root); node != NULL;
0149 node = rb_next(node)) {
0150 n = rb_entry(node, struct regcache_rbtree_node, node);
0151 mem_size += sizeof(*n);
0152 mem_size += (n->blklen * map->cache_word_size);
0153 mem_size += BITS_TO_LONGS(n->blklen) * sizeof(long);
0154
0155 regcache_rbtree_get_base_top_reg(map, n, &base, &top);
0156 this_registers = ((top - base) / map->reg_stride) + 1;
0157 seq_printf(s, "%x-%x (%d)\n", base, top, this_registers);
0158
0159 nodes++;
0160 registers += this_registers;
0161 }
0162
0163 if (nodes)
0164 average = registers / nodes;
0165 else
0166 average = 0;
0167
0168 seq_printf(s, "%d nodes, %d registers, average %d registers, used %zu bytes\n",
0169 nodes, registers, average, mem_size);
0170
0171 map->unlock(map->lock_arg);
0172
0173 return 0;
0174 }
0175
0176 DEFINE_SHOW_ATTRIBUTE(rbtree);
0177
0178 static void rbtree_debugfs_init(struct regmap *map)
0179 {
0180 debugfs_create_file("rbtree", 0400, map->debugfs, map, &rbtree_fops);
0181 }
0182 #endif
0183
0184 static int regcache_rbtree_init(struct regmap *map)
0185 {
0186 struct regcache_rbtree_ctx *rbtree_ctx;
0187 int i;
0188 int ret;
0189
0190 map->cache = kmalloc(sizeof *rbtree_ctx, GFP_KERNEL);
0191 if (!map->cache)
0192 return -ENOMEM;
0193
0194 rbtree_ctx = map->cache;
0195 rbtree_ctx->root = RB_ROOT;
0196 rbtree_ctx->cached_rbnode = NULL;
0197
0198 for (i = 0; i < map->num_reg_defaults; i++) {
0199 ret = regcache_rbtree_write(map,
0200 map->reg_defaults[i].reg,
0201 map->reg_defaults[i].def);
0202 if (ret)
0203 goto err;
0204 }
0205
0206 return 0;
0207
0208 err:
0209 regcache_rbtree_exit(map);
0210 return ret;
0211 }
0212
0213 static int regcache_rbtree_exit(struct regmap *map)
0214 {
0215 struct rb_node *next;
0216 struct regcache_rbtree_ctx *rbtree_ctx;
0217 struct regcache_rbtree_node *rbtree_node;
0218
0219
0220 rbtree_ctx = map->cache;
0221 if (!rbtree_ctx)
0222 return 0;
0223
0224
0225 next = rb_first(&rbtree_ctx->root);
0226 while (next) {
0227 rbtree_node = rb_entry(next, struct regcache_rbtree_node, node);
0228 next = rb_next(&rbtree_node->node);
0229 rb_erase(&rbtree_node->node, &rbtree_ctx->root);
0230 kfree(rbtree_node->cache_present);
0231 kfree(rbtree_node->block);
0232 kfree(rbtree_node);
0233 }
0234
0235
0236 kfree(map->cache);
0237 map->cache = NULL;
0238
0239 return 0;
0240 }
0241
0242 static int regcache_rbtree_read(struct regmap *map,
0243 unsigned int reg, unsigned int *value)
0244 {
0245 struct regcache_rbtree_node *rbnode;
0246 unsigned int reg_tmp;
0247
0248 rbnode = regcache_rbtree_lookup(map, reg);
0249 if (rbnode) {
0250 reg_tmp = (reg - rbnode->base_reg) / map->reg_stride;
0251 if (!test_bit(reg_tmp, rbnode->cache_present))
0252 return -ENOENT;
0253 *value = regcache_rbtree_get_register(map, rbnode, reg_tmp);
0254 } else {
0255 return -ENOENT;
0256 }
0257
0258 return 0;
0259 }
0260
0261
0262 static int regcache_rbtree_insert_to_block(struct regmap *map,
0263 struct regcache_rbtree_node *rbnode,
0264 unsigned int base_reg,
0265 unsigned int top_reg,
0266 unsigned int reg,
0267 unsigned int value)
0268 {
0269 unsigned int blklen;
0270 unsigned int pos, offset;
0271 unsigned long *present;
0272 u8 *blk;
0273
0274 blklen = (top_reg - base_reg) / map->reg_stride + 1;
0275 pos = (reg - base_reg) / map->reg_stride;
0276 offset = (rbnode->base_reg - base_reg) / map->reg_stride;
0277
0278 blk = krealloc(rbnode->block,
0279 blklen * map->cache_word_size,
0280 GFP_KERNEL);
0281 if (!blk)
0282 return -ENOMEM;
0283
0284 rbnode->block = blk;
0285
0286 if (BITS_TO_LONGS(blklen) > BITS_TO_LONGS(rbnode->blklen)) {
0287 present = krealloc(rbnode->cache_present,
0288 BITS_TO_LONGS(blklen) * sizeof(*present),
0289 GFP_KERNEL);
0290 if (!present)
0291 return -ENOMEM;
0292
0293 memset(present + BITS_TO_LONGS(rbnode->blklen), 0,
0294 (BITS_TO_LONGS(blklen) - BITS_TO_LONGS(rbnode->blklen))
0295 * sizeof(*present));
0296 } else {
0297 present = rbnode->cache_present;
0298 }
0299
0300
0301 if (pos == 0) {
0302 memmove(blk + offset * map->cache_word_size,
0303 blk, rbnode->blklen * map->cache_word_size);
0304 bitmap_shift_left(present, present, offset, blklen);
0305 }
0306
0307
0308 rbnode->blklen = blklen;
0309 rbnode->base_reg = base_reg;
0310 rbnode->cache_present = present;
0311
0312 regcache_rbtree_set_register(map, rbnode, pos, value);
0313 return 0;
0314 }
0315
0316 static struct regcache_rbtree_node *
0317 regcache_rbtree_node_alloc(struct regmap *map, unsigned int reg)
0318 {
0319 struct regcache_rbtree_node *rbnode;
0320 const struct regmap_range *range;
0321 int i;
0322
0323 rbnode = kzalloc(sizeof(*rbnode), GFP_KERNEL);
0324 if (!rbnode)
0325 return NULL;
0326
0327
0328 if (map->rd_table) {
0329 for (i = 0; i < map->rd_table->n_yes_ranges; i++) {
0330 if (regmap_reg_in_range(reg,
0331 &map->rd_table->yes_ranges[i]))
0332 break;
0333 }
0334
0335 if (i != map->rd_table->n_yes_ranges) {
0336 range = &map->rd_table->yes_ranges[i];
0337 rbnode->blklen = (range->range_max - range->range_min) /
0338 map->reg_stride + 1;
0339 rbnode->base_reg = range->range_min;
0340 }
0341 }
0342
0343 if (!rbnode->blklen) {
0344 rbnode->blklen = 1;
0345 rbnode->base_reg = reg;
0346 }
0347
0348 rbnode->block = kmalloc_array(rbnode->blklen, map->cache_word_size,
0349 GFP_KERNEL);
0350 if (!rbnode->block)
0351 goto err_free;
0352
0353 rbnode->cache_present = kcalloc(BITS_TO_LONGS(rbnode->blklen),
0354 sizeof(*rbnode->cache_present),
0355 GFP_KERNEL);
0356 if (!rbnode->cache_present)
0357 goto err_free_block;
0358
0359 return rbnode;
0360
0361 err_free_block:
0362 kfree(rbnode->block);
0363 err_free:
0364 kfree(rbnode);
0365 return NULL;
0366 }
0367
0368 static int regcache_rbtree_write(struct regmap *map, unsigned int reg,
0369 unsigned int value)
0370 {
0371 struct regcache_rbtree_ctx *rbtree_ctx;
0372 struct regcache_rbtree_node *rbnode, *rbnode_tmp;
0373 struct rb_node *node;
0374 unsigned int reg_tmp;
0375 int ret;
0376
0377 rbtree_ctx = map->cache;
0378
0379
0380
0381
0382 rbnode = regcache_rbtree_lookup(map, reg);
0383 if (rbnode) {
0384 reg_tmp = (reg - rbnode->base_reg) / map->reg_stride;
0385 regcache_rbtree_set_register(map, rbnode, reg_tmp, value);
0386 } else {
0387 unsigned int base_reg, top_reg;
0388 unsigned int new_base_reg, new_top_reg;
0389 unsigned int min, max;
0390 unsigned int max_dist;
0391 unsigned int dist, best_dist = UINT_MAX;
0392
0393 max_dist = map->reg_stride * sizeof(*rbnode_tmp) /
0394 map->cache_word_size;
0395 if (reg < max_dist)
0396 min = 0;
0397 else
0398 min = reg - max_dist;
0399 max = reg + max_dist;
0400
0401
0402 node = rbtree_ctx->root.rb_node;
0403 while (node) {
0404 rbnode_tmp = rb_entry(node, struct regcache_rbtree_node,
0405 node);
0406
0407 regcache_rbtree_get_base_top_reg(map, rbnode_tmp,
0408 &base_reg, &top_reg);
0409
0410 if (base_reg <= max && top_reg >= min) {
0411 if (reg < base_reg)
0412 dist = base_reg - reg;
0413 else if (reg > top_reg)
0414 dist = reg - top_reg;
0415 else
0416 dist = 0;
0417 if (dist < best_dist) {
0418 rbnode = rbnode_tmp;
0419 best_dist = dist;
0420 new_base_reg = min(reg, base_reg);
0421 new_top_reg = max(reg, top_reg);
0422 }
0423 }
0424
0425
0426
0427
0428
0429
0430 if (reg < base_reg)
0431 node = node->rb_left;
0432 else if (reg > top_reg)
0433 node = node->rb_right;
0434 else
0435 break;
0436 }
0437
0438 if (rbnode) {
0439 ret = regcache_rbtree_insert_to_block(map, rbnode,
0440 new_base_reg,
0441 new_top_reg, reg,
0442 value);
0443 if (ret)
0444 return ret;
0445 rbtree_ctx->cached_rbnode = rbnode;
0446 return 0;
0447 }
0448
0449
0450
0451
0452 rbnode = regcache_rbtree_node_alloc(map, reg);
0453 if (!rbnode)
0454 return -ENOMEM;
0455 regcache_rbtree_set_register(map, rbnode,
0456 reg - rbnode->base_reg, value);
0457 regcache_rbtree_insert(map, &rbtree_ctx->root, rbnode);
0458 rbtree_ctx->cached_rbnode = rbnode;
0459 }
0460
0461 return 0;
0462 }
0463
0464 static int regcache_rbtree_sync(struct regmap *map, unsigned int min,
0465 unsigned int max)
0466 {
0467 struct regcache_rbtree_ctx *rbtree_ctx;
0468 struct rb_node *node;
0469 struct regcache_rbtree_node *rbnode;
0470 unsigned int base_reg, top_reg;
0471 unsigned int start, end;
0472 int ret;
0473
0474 rbtree_ctx = map->cache;
0475 for (node = rb_first(&rbtree_ctx->root); node; node = rb_next(node)) {
0476 rbnode = rb_entry(node, struct regcache_rbtree_node, node);
0477
0478 regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
0479 &top_reg);
0480 if (base_reg > max)
0481 break;
0482 if (top_reg < min)
0483 continue;
0484
0485 if (min > base_reg)
0486 start = (min - base_reg) / map->reg_stride;
0487 else
0488 start = 0;
0489
0490 if (max < top_reg)
0491 end = (max - base_reg) / map->reg_stride + 1;
0492 else
0493 end = rbnode->blklen;
0494
0495 ret = regcache_sync_block(map, rbnode->block,
0496 rbnode->cache_present,
0497 rbnode->base_reg, start, end);
0498 if (ret != 0)
0499 return ret;
0500 }
0501
0502 return regmap_async_complete(map);
0503 }
0504
0505 static int regcache_rbtree_drop(struct regmap *map, unsigned int min,
0506 unsigned int max)
0507 {
0508 struct regcache_rbtree_ctx *rbtree_ctx;
0509 struct regcache_rbtree_node *rbnode;
0510 struct rb_node *node;
0511 unsigned int base_reg, top_reg;
0512 unsigned int start, end;
0513
0514 rbtree_ctx = map->cache;
0515 for (node = rb_first(&rbtree_ctx->root); node; node = rb_next(node)) {
0516 rbnode = rb_entry(node, struct regcache_rbtree_node, node);
0517
0518 regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
0519 &top_reg);
0520 if (base_reg > max)
0521 break;
0522 if (top_reg < min)
0523 continue;
0524
0525 if (min > base_reg)
0526 start = (min - base_reg) / map->reg_stride;
0527 else
0528 start = 0;
0529
0530 if (max < top_reg)
0531 end = (max - base_reg) / map->reg_stride + 1;
0532 else
0533 end = rbnode->blklen;
0534
0535 bitmap_clear(rbnode->cache_present, start, end - start);
0536 }
0537
0538 return 0;
0539 }
0540
0541 struct regcache_ops regcache_rbtree_ops = {
0542 .type = REGCACHE_RBTREE,
0543 .name = "rbtree",
0544 .init = regcache_rbtree_init,
0545 .exit = regcache_rbtree_exit,
0546 #ifdef CONFIG_DEBUG_FS
0547 .debugfs_init = rbtree_debugfs_init,
0548 #endif
0549 .read = regcache_rbtree_read,
0550 .write = regcache_rbtree_write,
0551 .sync = regcache_rbtree_sync,
0552 .drop = regcache_rbtree_drop,
0553 };