linux/drivers/base/regmap/regcache-rbtree.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0
   2//
   3// Register cache access API - rbtree caching support
   4//
   5// Copyright 2011 Wolfson Microelectronics plc
   6//
   7// Author: Dimitris Papastamos <dp@opensource.wolfsonmicro.com>
   8
   9#include <linux/debugfs.h>
  10#include <linux/device.h>
  11#include <linux/rbtree.h>
  12#include <linux/seq_file.h>
  13#include <linux/slab.h>
  14
  15#include "internal.h"
  16
  17static int regcache_rbtree_write(struct regmap *map, unsigned int reg,
  18                                 unsigned int value);
  19static int regcache_rbtree_exit(struct regmap *map);
  20
  21struct regcache_rbtree_node {
  22        /* block of adjacent registers */
  23        void *block;
  24        /* Which registers are present */
  25        long *cache_present;
  26        /* base register handled by this block */
  27        unsigned int base_reg;
  28        /* number of registers available in the block */
  29        unsigned int blklen;
  30        /* the actual rbtree node holding this block */
  31        struct rb_node node;
  32};
  33
  34struct regcache_rbtree_ctx {
  35        struct rb_root root;
  36        struct regcache_rbtree_node *cached_rbnode;
  37};
  38
  39static inline void regcache_rbtree_get_base_top_reg(
  40        struct regmap *map,
  41        struct regcache_rbtree_node *rbnode,
  42        unsigned int *base, unsigned int *top)
  43{
  44        *base = rbnode->base_reg;
  45        *top = rbnode->base_reg + ((rbnode->blklen - 1) * map->reg_stride);
  46}
  47
  48static unsigned int regcache_rbtree_get_register(struct regmap *map,
  49        struct regcache_rbtree_node *rbnode, unsigned int idx)
  50{
  51        return regcache_get_val(map, rbnode->block, idx);
  52}
  53
  54static void regcache_rbtree_set_register(struct regmap *map,
  55                                         struct regcache_rbtree_node *rbnode,
  56                                         unsigned int idx, unsigned int val)
  57{
  58        set_bit(idx, rbnode->cache_present);
  59        regcache_set_val(map, rbnode->block, idx, val);
  60}
  61
  62static struct regcache_rbtree_node *regcache_rbtree_lookup(struct regmap *map,
  63                                                           unsigned int reg)
  64{
  65        struct regcache_rbtree_ctx *rbtree_ctx = map->cache;
  66        struct rb_node *node;
  67        struct regcache_rbtree_node *rbnode;
  68        unsigned int base_reg, top_reg;
  69
  70        rbnode = rbtree_ctx->cached_rbnode;
  71        if (rbnode) {
  72                regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
  73                                                 &top_reg);
  74                if (reg >= base_reg && reg <= top_reg)
  75                        return rbnode;
  76        }
  77
  78        node = rbtree_ctx->root.rb_node;
  79        while (node) {
  80                rbnode = rb_entry(node, struct regcache_rbtree_node, node);
  81                regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
  82                                                 &top_reg);
  83                if (reg >= base_reg && reg <= top_reg) {
  84                        rbtree_ctx->cached_rbnode = rbnode;
  85                        return rbnode;
  86                } else if (reg > top_reg) {
  87                        node = node->rb_right;
  88                } else if (reg < base_reg) {
  89                        node = node->rb_left;
  90                }
  91        }
  92
  93        return NULL;
  94}
  95
  96static int regcache_rbtree_insert(struct regmap *map, struct rb_root *root,
  97                                  struct regcache_rbtree_node *rbnode)
  98{
  99        struct rb_node **new, *parent;
 100        struct regcache_rbtree_node *rbnode_tmp;
 101        unsigned int base_reg_tmp, top_reg_tmp;
 102        unsigned int base_reg;
 103
 104        parent = NULL;
 105        new = &root->rb_node;
 106        while (*new) {
 107                rbnode_tmp = rb_entry(*new, struct regcache_rbtree_node, node);
 108                /* base and top registers of the current rbnode */
 109                regcache_rbtree_get_base_top_reg(map, rbnode_tmp, &base_reg_tmp,
 110                                                 &top_reg_tmp);
 111                /* base register of the rbnode to be added */
 112                base_reg = rbnode->base_reg;
 113                parent = *new;
 114                /* if this register has already been inserted, just return */
 115                if (base_reg >= base_reg_tmp &&
 116                    base_reg <= top_reg_tmp)
 117                        return 0;
 118                else if (base_reg > top_reg_tmp)
 119                        new = &((*new)->rb_right);
 120                else if (base_reg < base_reg_tmp)
 121                        new = &((*new)->rb_left);
 122        }
 123
 124        /* insert the node into the rbtree */
 125        rb_link_node(&rbnode->node, parent, new);
 126        rb_insert_color(&rbnode->node, root);
 127
 128        return 1;
 129}
 130
 131#ifdef CONFIG_DEBUG_FS
 132static int rbtree_show(struct seq_file *s, void *ignored)
 133{
 134        struct regmap *map = s->private;
 135        struct regcache_rbtree_ctx *rbtree_ctx = map->cache;
 136        struct regcache_rbtree_node *n;
 137        struct rb_node *node;
 138        unsigned int base, top;
 139        size_t mem_size;
 140        int nodes = 0;
 141        int registers = 0;
 142        int this_registers, average;
 143
 144        map->lock(map->lock_arg);
 145
 146        mem_size = sizeof(*rbtree_ctx);
 147
 148        for (node = rb_first(&rbtree_ctx->root); node != NULL;
 149             node = rb_next(node)) {
 150                n = rb_entry(node, struct regcache_rbtree_node, node);
 151                mem_size += sizeof(*n);
 152                mem_size += (n->blklen * map->cache_word_size);
 153                mem_size += BITS_TO_LONGS(n->blklen) * sizeof(long);
 154
 155                regcache_rbtree_get_base_top_reg(map, n, &base, &top);
 156                this_registers = ((top - base) / map->reg_stride) + 1;
 157                seq_printf(s, "%x-%x (%d)\n", base, top, this_registers);
 158
 159                nodes++;
 160                registers += this_registers;
 161        }
 162
 163        if (nodes)
 164                average = registers / nodes;
 165        else
 166                average = 0;
 167
 168        seq_printf(s, "%d nodes, %d registers, average %d registers, used %zu bytes\n",
 169                   nodes, registers, average, mem_size);
 170
 171        map->unlock(map->lock_arg);
 172
 173        return 0;
 174}
 175
 176DEFINE_SHOW_ATTRIBUTE(rbtree);
 177
 178static void rbtree_debugfs_init(struct regmap *map)
 179{
 180        debugfs_create_file("rbtree", 0400, map->debugfs, map, &rbtree_fops);
 181}
 182#endif
 183
 184static int regcache_rbtree_init(struct regmap *map)
 185{
 186        struct regcache_rbtree_ctx *rbtree_ctx;
 187        int i;
 188        int ret;
 189
 190        map->cache = kmalloc(sizeof *rbtree_ctx, GFP_KERNEL);
 191        if (!map->cache)
 192                return -ENOMEM;
 193
 194        rbtree_ctx = map->cache;
 195        rbtree_ctx->root = RB_ROOT;
 196        rbtree_ctx->cached_rbnode = NULL;
 197
 198        for (i = 0; i < map->num_reg_defaults; i++) {
 199                ret = regcache_rbtree_write(map,
 200                                            map->reg_defaults[i].reg,
 201                                            map->reg_defaults[i].def);
 202                if (ret)
 203                        goto err;
 204        }
 205
 206        return 0;
 207
 208err:
 209        regcache_rbtree_exit(map);
 210        return ret;
 211}
 212
 213static int regcache_rbtree_exit(struct regmap *map)
 214{
 215        struct rb_node *next;
 216        struct regcache_rbtree_ctx *rbtree_ctx;
 217        struct regcache_rbtree_node *rbtree_node;
 218
 219        /* if we've already been called then just return */
 220        rbtree_ctx = map->cache;
 221        if (!rbtree_ctx)
 222                return 0;
 223
 224        /* free up the rbtree */
 225        next = rb_first(&rbtree_ctx->root);
 226        while (next) {
 227                rbtree_node = rb_entry(next, struct regcache_rbtree_node, node);
 228                next = rb_next(&rbtree_node->node);
 229                rb_erase(&rbtree_node->node, &rbtree_ctx->root);
 230                kfree(rbtree_node->cache_present);
 231                kfree(rbtree_node->block);
 232                kfree(rbtree_node);
 233        }
 234
 235        /* release the resources */
 236        kfree(map->cache);
 237        map->cache = NULL;
 238
 239        return 0;
 240}
 241
 242static int regcache_rbtree_read(struct regmap *map,
 243                                unsigned int reg, unsigned int *value)
 244{
 245        struct regcache_rbtree_node *rbnode;
 246        unsigned int reg_tmp;
 247
 248        rbnode = regcache_rbtree_lookup(map, reg);
 249        if (rbnode) {
 250                reg_tmp = (reg - rbnode->base_reg) / map->reg_stride;
 251                if (!test_bit(reg_tmp, rbnode->cache_present))
 252                        return -ENOENT;
 253                *value = regcache_rbtree_get_register(map, rbnode, reg_tmp);
 254        } else {
 255                return -ENOENT;
 256        }
 257
 258        return 0;
 259}
 260
 261
 262static int regcache_rbtree_insert_to_block(struct regmap *map,
 263                                           struct regcache_rbtree_node *rbnode,
 264                                           unsigned int base_reg,
 265                                           unsigned int top_reg,
 266                                           unsigned int reg,
 267                                           unsigned int value)
 268{
 269        unsigned int blklen;
 270        unsigned int pos, offset;
 271        unsigned long *present;
 272        u8 *blk;
 273
 274        blklen = (top_reg - base_reg) / map->reg_stride + 1;
 275        pos = (reg - base_reg) / map->reg_stride;
 276        offset = (rbnode->base_reg - base_reg) / map->reg_stride;
 277
 278        blk = krealloc(rbnode->block,
 279                       blklen * map->cache_word_size,
 280                       GFP_KERNEL);
 281        if (!blk)
 282                return -ENOMEM;
 283
 284        if (BITS_TO_LONGS(blklen) > BITS_TO_LONGS(rbnode->blklen)) {
 285                present = krealloc(rbnode->cache_present,
 286                                   BITS_TO_LONGS(blklen) * sizeof(*present),
 287                                   GFP_KERNEL);
 288                if (!present) {
 289                        kfree(blk);
 290                        return -ENOMEM;
 291                }
 292
 293                memset(present + BITS_TO_LONGS(rbnode->blklen), 0,
 294                       (BITS_TO_LONGS(blklen) - BITS_TO_LONGS(rbnode->blklen))
 295                       * sizeof(*present));
 296        } else {
 297                present = rbnode->cache_present;
 298        }
 299
 300        /* insert the register value in the correct place in the rbnode block */
 301        if (pos == 0) {
 302                memmove(blk + offset * map->cache_word_size,
 303                        blk, rbnode->blklen * map->cache_word_size);
 304                bitmap_shift_left(present, present, offset, blklen);
 305        }
 306
 307        /* update the rbnode block, its size and the base register */
 308        rbnode->block = blk;
 309        rbnode->blklen = blklen;
 310        rbnode->base_reg = base_reg;
 311        rbnode->cache_present = present;
 312
 313        regcache_rbtree_set_register(map, rbnode, pos, value);
 314        return 0;
 315}
 316
 317static struct regcache_rbtree_node *
 318regcache_rbtree_node_alloc(struct regmap *map, unsigned int reg)
 319{
 320        struct regcache_rbtree_node *rbnode;
 321        const struct regmap_range *range;
 322        int i;
 323
 324        rbnode = kzalloc(sizeof(*rbnode), GFP_KERNEL);
 325        if (!rbnode)
 326                return NULL;
 327
 328        /* If there is a read table then use it to guess at an allocation */
 329        if (map->rd_table) {
 330                for (i = 0; i < map->rd_table->n_yes_ranges; i++) {
 331                        if (regmap_reg_in_range(reg,
 332                                                &map->rd_table->yes_ranges[i]))
 333                                break;
 334                }
 335
 336                if (i != map->rd_table->n_yes_ranges) {
 337                        range = &map->rd_table->yes_ranges[i];
 338                        rbnode->blklen = (range->range_max - range->range_min) /
 339                                map->reg_stride + 1;
 340                        rbnode->base_reg = range->range_min;
 341                }
 342        }
 343
 344        if (!rbnode->blklen) {
 345                rbnode->blklen = 1;
 346                rbnode->base_reg = reg;
 347        }
 348
 349        rbnode->block = kmalloc_array(rbnode->blklen, map->cache_word_size,
 350                                      GFP_KERNEL);
 351        if (!rbnode->block)
 352                goto err_free;
 353
 354        rbnode->cache_present = kcalloc(BITS_TO_LONGS(rbnode->blklen),
 355                                        sizeof(*rbnode->cache_present),
 356                                        GFP_KERNEL);
 357        if (!rbnode->cache_present)
 358                goto err_free_block;
 359
 360        return rbnode;
 361
 362err_free_block:
 363        kfree(rbnode->block);
 364err_free:
 365        kfree(rbnode);
 366        return NULL;
 367}
 368
 369static int regcache_rbtree_write(struct regmap *map, unsigned int reg,
 370                                 unsigned int value)
 371{
 372        struct regcache_rbtree_ctx *rbtree_ctx;
 373        struct regcache_rbtree_node *rbnode, *rbnode_tmp;
 374        struct rb_node *node;
 375        unsigned int reg_tmp;
 376        int ret;
 377
 378        rbtree_ctx = map->cache;
 379
 380        /* if we can't locate it in the cached rbnode we'll have
 381         * to traverse the rbtree looking for it.
 382         */
 383        rbnode = regcache_rbtree_lookup(map, reg);
 384        if (rbnode) {
 385                reg_tmp = (reg - rbnode->base_reg) / map->reg_stride;
 386                regcache_rbtree_set_register(map, rbnode, reg_tmp, value);
 387        } else {
 388                unsigned int base_reg, top_reg;
 389                unsigned int new_base_reg, new_top_reg;
 390                unsigned int min, max;
 391                unsigned int max_dist;
 392                unsigned int dist, best_dist = UINT_MAX;
 393
 394                max_dist = map->reg_stride * sizeof(*rbnode_tmp) /
 395                        map->cache_word_size;
 396                if (reg < max_dist)
 397                        min = 0;
 398                else
 399                        min = reg - max_dist;
 400                max = reg + max_dist;
 401
 402                /* look for an adjacent register to the one we are about to add */
 403                node = rbtree_ctx->root.rb_node;
 404                while (node) {
 405                        rbnode_tmp = rb_entry(node, struct regcache_rbtree_node,
 406                                              node);
 407
 408                        regcache_rbtree_get_base_top_reg(map, rbnode_tmp,
 409                                &base_reg, &top_reg);
 410
 411                        if (base_reg <= max && top_reg >= min) {
 412                                if (reg < base_reg)
 413                                        dist = base_reg - reg;
 414                                else if (reg > top_reg)
 415                                        dist = reg - top_reg;
 416                                else
 417                                        dist = 0;
 418                                if (dist < best_dist) {
 419                                        rbnode = rbnode_tmp;
 420                                        best_dist = dist;
 421                                        new_base_reg = min(reg, base_reg);
 422                                        new_top_reg = max(reg, top_reg);
 423                                }
 424                        }
 425
 426                        /*
 427                         * Keep looking, we want to choose the closest block,
 428                         * otherwise we might end up creating overlapping
 429                         * blocks, which breaks the rbtree.
 430                         */
 431                        if (reg < base_reg)
 432                                node = node->rb_left;
 433                        else if (reg > top_reg)
 434                                node = node->rb_right;
 435                        else
 436                                break;
 437                }
 438
 439                if (rbnode) {
 440                        ret = regcache_rbtree_insert_to_block(map, rbnode,
 441                                                              new_base_reg,
 442                                                              new_top_reg, reg,
 443                                                              value);
 444                        if (ret)
 445                                return ret;
 446                        rbtree_ctx->cached_rbnode = rbnode;
 447                        return 0;
 448                }
 449
 450                /* We did not manage to find a place to insert it in
 451                 * an existing block so create a new rbnode.
 452                 */
 453                rbnode = regcache_rbtree_node_alloc(map, reg);
 454                if (!rbnode)
 455                        return -ENOMEM;
 456                regcache_rbtree_set_register(map, rbnode,
 457                                             reg - rbnode->base_reg, value);
 458                regcache_rbtree_insert(map, &rbtree_ctx->root, rbnode);
 459                rbtree_ctx->cached_rbnode = rbnode;
 460        }
 461
 462        return 0;
 463}
 464
 465static int regcache_rbtree_sync(struct regmap *map, unsigned int min,
 466                                unsigned int max)
 467{
 468        struct regcache_rbtree_ctx *rbtree_ctx;
 469        struct rb_node *node;
 470        struct regcache_rbtree_node *rbnode;
 471        unsigned int base_reg, top_reg;
 472        unsigned int start, end;
 473        int ret;
 474
 475        rbtree_ctx = map->cache;
 476        for (node = rb_first(&rbtree_ctx->root); node; node = rb_next(node)) {
 477                rbnode = rb_entry(node, struct regcache_rbtree_node, node);
 478
 479                regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
 480                        &top_reg);
 481                if (base_reg > max)
 482                        break;
 483                if (top_reg < min)
 484                        continue;
 485
 486                if (min > base_reg)
 487                        start = (min - base_reg) / map->reg_stride;
 488                else
 489                        start = 0;
 490
 491                if (max < top_reg)
 492                        end = (max - base_reg) / map->reg_stride + 1;
 493                else
 494                        end = rbnode->blklen;
 495
 496                ret = regcache_sync_block(map, rbnode->block,
 497                                          rbnode->cache_present,
 498                                          rbnode->base_reg, start, end);
 499                if (ret != 0)
 500                        return ret;
 501        }
 502
 503        return regmap_async_complete(map);
 504}
 505
 506static int regcache_rbtree_drop(struct regmap *map, unsigned int min,
 507                                unsigned int max)
 508{
 509        struct regcache_rbtree_ctx *rbtree_ctx;
 510        struct regcache_rbtree_node *rbnode;
 511        struct rb_node *node;
 512        unsigned int base_reg, top_reg;
 513        unsigned int start, end;
 514
 515        rbtree_ctx = map->cache;
 516        for (node = rb_first(&rbtree_ctx->root); node; node = rb_next(node)) {
 517                rbnode = rb_entry(node, struct regcache_rbtree_node, node);
 518
 519                regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
 520                        &top_reg);
 521                if (base_reg > max)
 522                        break;
 523                if (top_reg < min)
 524                        continue;
 525
 526                if (min > base_reg)
 527                        start = (min - base_reg) / map->reg_stride;
 528                else
 529                        start = 0;
 530
 531                if (max < top_reg)
 532                        end = (max - base_reg) / map->reg_stride + 1;
 533                else
 534                        end = rbnode->blklen;
 535
 536                bitmap_clear(rbnode->cache_present, start, end - start);
 537        }
 538
 539        return 0;
 540}
 541
 542struct regcache_ops regcache_rbtree_ops = {
 543        .type = REGCACHE_RBTREE,
 544        .name = "rbtree",
 545        .init = regcache_rbtree_init,
 546        .exit = regcache_rbtree_exit,
 547#ifdef CONFIG_DEBUG_FS
 548        .debugfs_init = rbtree_debugfs_init,
 549#endif
 550        .read = regcache_rbtree_read,
 551        .write = regcache_rbtree_write,
 552        .sync = regcache_rbtree_sync,
 553        .drop = regcache_rbtree_drop,
 554};
 555