linux/drivers/base/regmap/regcache-rbtree.c
<<
>>
Prefs
   1/*
   2 * Register cache access API - rbtree caching support
   3 *
   4 * Copyright 2011 Wolfson Microelectronics plc
   5 *
   6 * Author: Dimitris Papastamos <dp@opensource.wolfsonmicro.com>
   7 *
   8 * This program is free software; you can redistribute it and/or modify
   9 * it under the terms of the GNU General Public License version 2 as
  10 * published by the Free Software Foundation.
  11 */
  12
  13#include <linux/debugfs.h>
  14#include <linux/device.h>
  15#include <linux/rbtree.h>
  16#include <linux/seq_file.h>
  17#include <linux/slab.h>
  18
  19#include "internal.h"
  20
  21static int regcache_rbtree_write(struct regmap *map, unsigned int reg,
  22                                 unsigned int value);
  23static int regcache_rbtree_exit(struct regmap *map);
  24
  25struct regcache_rbtree_node {
  26        /* block of adjacent registers */
  27        void *block;
  28        /* Which registers are present */
  29        long *cache_present;
  30        /* base register handled by this block */
  31        unsigned int base_reg;
  32        /* number of registers available in the block */
  33        unsigned int blklen;
  34        /* the actual rbtree node holding this block */
  35        struct rb_node node;
  36} __attribute__ ((packed));
  37
  38struct regcache_rbtree_ctx {
  39        struct rb_root root;
  40        struct regcache_rbtree_node *cached_rbnode;
  41};
  42
  43static inline void regcache_rbtree_get_base_top_reg(
  44        struct regmap *map,
  45        struct regcache_rbtree_node *rbnode,
  46        unsigned int *base, unsigned int *top)
  47{
  48        *base = rbnode->base_reg;
  49        *top = rbnode->base_reg + ((rbnode->blklen - 1) * map->reg_stride);
  50}
  51
  52static unsigned int regcache_rbtree_get_register(struct regmap *map,
  53        struct regcache_rbtree_node *rbnode, unsigned int idx)
  54{
  55        return regcache_get_val(map, rbnode->block, idx);
  56}
  57
  58static void regcache_rbtree_set_register(struct regmap *map,
  59                                         struct regcache_rbtree_node *rbnode,
  60                                         unsigned int idx, unsigned int val)
  61{
  62        set_bit(idx, rbnode->cache_present);
  63        regcache_set_val(map, rbnode->block, idx, val);
  64}
  65
  66static struct regcache_rbtree_node *regcache_rbtree_lookup(struct regmap *map,
  67                                                           unsigned int reg)
  68{
  69        struct regcache_rbtree_ctx *rbtree_ctx = map->cache;
  70        struct rb_node *node;
  71        struct regcache_rbtree_node *rbnode;
  72        unsigned int base_reg, top_reg;
  73
  74        rbnode = rbtree_ctx->cached_rbnode;
  75        if (rbnode) {
  76                regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
  77                                                 &top_reg);
  78                if (reg >= base_reg && reg <= top_reg)
  79                        return rbnode;
  80        }
  81
  82        node = rbtree_ctx->root.rb_node;
  83        while (node) {
  84                rbnode = container_of(node, struct regcache_rbtree_node, node);
  85                regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
  86                                                 &top_reg);
  87                if (reg >= base_reg && reg <= top_reg) {
  88                        rbtree_ctx->cached_rbnode = rbnode;
  89                        return rbnode;
  90                } else if (reg > top_reg) {
  91                        node = node->rb_right;
  92                } else if (reg < base_reg) {
  93                        node = node->rb_left;
  94                }
  95        }
  96
  97        return NULL;
  98}
  99
 100static int regcache_rbtree_insert(struct regmap *map, struct rb_root *root,
 101                                  struct regcache_rbtree_node *rbnode)
 102{
 103        struct rb_node **new, *parent;
 104        struct regcache_rbtree_node *rbnode_tmp;
 105        unsigned int base_reg_tmp, top_reg_tmp;
 106        unsigned int base_reg;
 107
 108        parent = NULL;
 109        new = &root->rb_node;
 110        while (*new) {
 111                rbnode_tmp = container_of(*new, struct regcache_rbtree_node,
 112                                          node);
 113                /* base and top registers of the current rbnode */
 114                regcache_rbtree_get_base_top_reg(map, rbnode_tmp, &base_reg_tmp,
 115                                                 &top_reg_tmp);
 116                /* base register of the rbnode to be added */
 117                base_reg = rbnode->base_reg;
 118                parent = *new;
 119                /* if this register has already been inserted, just return */
 120                if (base_reg >= base_reg_tmp &&
 121                    base_reg <= top_reg_tmp)
 122                        return 0;
 123                else if (base_reg > top_reg_tmp)
 124                        new = &((*new)->rb_right);
 125                else if (base_reg < base_reg_tmp)
 126                        new = &((*new)->rb_left);
 127        }
 128
 129        /* insert the node into the rbtree */
 130        rb_link_node(&rbnode->node, parent, new);
 131        rb_insert_color(&rbnode->node, root);
 132
 133        return 1;
 134}
 135
 136#ifdef CONFIG_DEBUG_FS
 137static int rbtree_show(struct seq_file *s, void *ignored)
 138{
 139        struct regmap *map = s->private;
 140        struct regcache_rbtree_ctx *rbtree_ctx = map->cache;
 141        struct regcache_rbtree_node *n;
 142        struct rb_node *node;
 143        unsigned int base, top;
 144        size_t mem_size;
 145        int nodes = 0;
 146        int registers = 0;
 147        int this_registers, average;
 148
 149        map->lock(map->lock_arg);
 150
 151        mem_size = sizeof(*rbtree_ctx);
 152
 153        for (node = rb_first(&rbtree_ctx->root); node != NULL;
 154             node = rb_next(node)) {
 155                n = container_of(node, struct regcache_rbtree_node, node);
 156                mem_size += sizeof(*n);
 157                mem_size += (n->blklen * map->cache_word_size);
 158                mem_size += BITS_TO_LONGS(n->blklen) * sizeof(long);
 159
 160                regcache_rbtree_get_base_top_reg(map, n, &base, &top);
 161                this_registers = ((top - base) / map->reg_stride) + 1;
 162                seq_printf(s, "%x-%x (%d)\n", base, top, this_registers);
 163
 164                nodes++;
 165                registers += this_registers;
 166        }
 167
 168        if (nodes)
 169                average = registers / nodes;
 170        else
 171                average = 0;
 172
 173        seq_printf(s, "%d nodes, %d registers, average %d registers, used %zu bytes\n",
 174                   nodes, registers, average, mem_size);
 175
 176        map->unlock(map->lock_arg);
 177
 178        return 0;
 179}
 180
 181static int rbtree_open(struct inode *inode, struct file *file)
 182{
 183        return single_open(file, rbtree_show, inode->i_private);
 184}
 185
 186static const struct file_operations rbtree_fops = {
 187        .open           = rbtree_open,
 188        .read           = seq_read,
 189        .llseek         = seq_lseek,
 190        .release        = single_release,
 191};
 192
 193static void rbtree_debugfs_init(struct regmap *map)
 194{
 195        debugfs_create_file("rbtree", 0400, map->debugfs, map, &rbtree_fops);
 196}
 197#endif
 198
 199static int regcache_rbtree_init(struct regmap *map)
 200{
 201        struct regcache_rbtree_ctx *rbtree_ctx;
 202        int i;
 203        int ret;
 204
 205        map->cache = kmalloc(sizeof *rbtree_ctx, GFP_KERNEL);
 206        if (!map->cache)
 207                return -ENOMEM;
 208
 209        rbtree_ctx = map->cache;
 210        rbtree_ctx->root = RB_ROOT;
 211        rbtree_ctx->cached_rbnode = NULL;
 212
 213        for (i = 0; i < map->num_reg_defaults; i++) {
 214                ret = regcache_rbtree_write(map,
 215                                            map->reg_defaults[i].reg,
 216                                            map->reg_defaults[i].def);
 217                if (ret)
 218                        goto err;
 219        }
 220
 221        return 0;
 222
 223err:
 224        regcache_rbtree_exit(map);
 225        return ret;
 226}
 227
 228static int regcache_rbtree_exit(struct regmap *map)
 229{
 230        struct rb_node *next;
 231        struct regcache_rbtree_ctx *rbtree_ctx;
 232        struct regcache_rbtree_node *rbtree_node;
 233
 234        /* if we've already been called then just return */
 235        rbtree_ctx = map->cache;
 236        if (!rbtree_ctx)
 237                return 0;
 238
 239        /* free up the rbtree */
 240        next = rb_first(&rbtree_ctx->root);
 241        while (next) {
 242                rbtree_node = rb_entry(next, struct regcache_rbtree_node, node);
 243                next = rb_next(&rbtree_node->node);
 244                rb_erase(&rbtree_node->node, &rbtree_ctx->root);
 245                kfree(rbtree_node->cache_present);
 246                kfree(rbtree_node->block);
 247                kfree(rbtree_node);
 248        }
 249
 250        /* release the resources */
 251        kfree(map->cache);
 252        map->cache = NULL;
 253
 254        return 0;
 255}
 256
 257static int regcache_rbtree_read(struct regmap *map,
 258                                unsigned int reg, unsigned int *value)
 259{
 260        struct regcache_rbtree_node *rbnode;
 261        unsigned int reg_tmp;
 262
 263        rbnode = regcache_rbtree_lookup(map, reg);
 264        if (rbnode) {
 265                reg_tmp = (reg - rbnode->base_reg) / map->reg_stride;
 266                if (!test_bit(reg_tmp, rbnode->cache_present))
 267                        return -ENOENT;
 268                *value = regcache_rbtree_get_register(map, rbnode, reg_tmp);
 269        } else {
 270                return -ENOENT;
 271        }
 272
 273        return 0;
 274}
 275
 276
 277static int regcache_rbtree_insert_to_block(struct regmap *map,
 278                                           struct regcache_rbtree_node *rbnode,
 279                                           unsigned int base_reg,
 280                                           unsigned int top_reg,
 281                                           unsigned int reg,
 282                                           unsigned int value)
 283{
 284        unsigned int blklen;
 285        unsigned int pos, offset;
 286        unsigned long *present;
 287        u8 *blk;
 288
 289        blklen = (top_reg - base_reg) / map->reg_stride + 1;
 290        pos = (reg - base_reg) / map->reg_stride;
 291        offset = (rbnode->base_reg - base_reg) / map->reg_stride;
 292
 293        blk = krealloc(rbnode->block,
 294                       blklen * map->cache_word_size,
 295                       GFP_KERNEL);
 296        if (!blk)
 297                return -ENOMEM;
 298
 299        if (BITS_TO_LONGS(blklen) > BITS_TO_LONGS(rbnode->blklen)) {
 300                present = krealloc(rbnode->cache_present,
 301                                   BITS_TO_LONGS(blklen) * sizeof(*present),
 302                                   GFP_KERNEL);
 303                if (!present) {
 304                        kfree(blk);
 305                        return -ENOMEM;
 306                }
 307
 308                memset(present + BITS_TO_LONGS(rbnode->blklen), 0,
 309                       (BITS_TO_LONGS(blklen) - BITS_TO_LONGS(rbnode->blklen))
 310                       * sizeof(*present));
 311        } else {
 312                present = rbnode->cache_present;
 313        }
 314
 315        /* insert the register value in the correct place in the rbnode block */
 316        if (pos == 0) {
 317                memmove(blk + offset * map->cache_word_size,
 318                        blk, rbnode->blklen * map->cache_word_size);
 319                bitmap_shift_left(present, present, offset, blklen);
 320        }
 321
 322        /* update the rbnode block, its size and the base register */
 323        rbnode->block = blk;
 324        rbnode->blklen = blklen;
 325        rbnode->base_reg = base_reg;
 326        rbnode->cache_present = present;
 327
 328        regcache_rbtree_set_register(map, rbnode, pos, value);
 329        return 0;
 330}
 331
 332static struct regcache_rbtree_node *
 333regcache_rbtree_node_alloc(struct regmap *map, unsigned int reg)
 334{
 335        struct regcache_rbtree_node *rbnode;
 336        const struct regmap_range *range;
 337        int i;
 338
 339        rbnode = kzalloc(sizeof(*rbnode), GFP_KERNEL);
 340        if (!rbnode)
 341                return NULL;
 342
 343        /* If there is a read table then use it to guess at an allocation */
 344        if (map->rd_table) {
 345                for (i = 0; i < map->rd_table->n_yes_ranges; i++) {
 346                        if (regmap_reg_in_range(reg,
 347                                                &map->rd_table->yes_ranges[i]))
 348                                break;
 349                }
 350
 351                if (i != map->rd_table->n_yes_ranges) {
 352                        range = &map->rd_table->yes_ranges[i];
 353                        rbnode->blklen = (range->range_max - range->range_min) /
 354                                map->reg_stride + 1;
 355                        rbnode->base_reg = range->range_min;
 356                }
 357        }
 358
 359        if (!rbnode->blklen) {
 360                rbnode->blklen = 1;
 361                rbnode->base_reg = reg;
 362        }
 363
 364        rbnode->block = kmalloc_array(rbnode->blklen, map->cache_word_size,
 365                                      GFP_KERNEL);
 366        if (!rbnode->block)
 367                goto err_free;
 368
 369        rbnode->cache_present = kcalloc(BITS_TO_LONGS(rbnode->blklen),
 370                                        sizeof(*rbnode->cache_present),
 371                                        GFP_KERNEL);
 372        if (!rbnode->cache_present)
 373                goto err_free_block;
 374
 375        return rbnode;
 376
 377err_free_block:
 378        kfree(rbnode->block);
 379err_free:
 380        kfree(rbnode);
 381        return NULL;
 382}
 383
 384static int regcache_rbtree_write(struct regmap *map, unsigned int reg,
 385                                 unsigned int value)
 386{
 387        struct regcache_rbtree_ctx *rbtree_ctx;
 388        struct regcache_rbtree_node *rbnode, *rbnode_tmp;
 389        struct rb_node *node;
 390        unsigned int reg_tmp;
 391        int ret;
 392
 393        rbtree_ctx = map->cache;
 394
 395        /* if we can't locate it in the cached rbnode we'll have
 396         * to traverse the rbtree looking for it.
 397         */
 398        rbnode = regcache_rbtree_lookup(map, reg);
 399        if (rbnode) {
 400                reg_tmp = (reg - rbnode->base_reg) / map->reg_stride;
 401                regcache_rbtree_set_register(map, rbnode, reg_tmp, value);
 402        } else {
 403                unsigned int base_reg, top_reg;
 404                unsigned int new_base_reg, new_top_reg;
 405                unsigned int min, max;
 406                unsigned int max_dist;
 407
 408                max_dist = map->reg_stride * sizeof(*rbnode_tmp) /
 409                        map->cache_word_size;
 410                if (reg < max_dist)
 411                        min = 0;
 412                else
 413                        min = reg - max_dist;
 414                max = reg + max_dist;
 415
 416                /* look for an adjacent register to the one we are about to add */
 417                node = rbtree_ctx->root.rb_node;
 418                while (node) {
 419                        rbnode_tmp = rb_entry(node, struct regcache_rbtree_node,
 420                                              node);
 421
 422                        regcache_rbtree_get_base_top_reg(map, rbnode_tmp,
 423                                &base_reg, &top_reg);
 424
 425                        if (base_reg <= max && top_reg >= min) {
 426                                new_base_reg = min(reg, base_reg);
 427                                new_top_reg = max(reg, top_reg);
 428                        } else {
 429                                if (max < base_reg)
 430                                        node = node->rb_left;
 431                                else
 432                                        node = node->rb_right;
 433
 434                                continue;
 435                        }
 436
 437                        ret = regcache_rbtree_insert_to_block(map, rbnode_tmp,
 438                                                              new_base_reg,
 439                                                              new_top_reg, reg,
 440                                                              value);
 441                        if (ret)
 442                                return ret;
 443                        rbtree_ctx->cached_rbnode = rbnode_tmp;
 444                        return 0;
 445                }
 446
 447                /* We did not manage to find a place to insert it in
 448                 * an existing block so create a new rbnode.
 449                 */
 450                rbnode = regcache_rbtree_node_alloc(map, reg);
 451                if (!rbnode)
 452                        return -ENOMEM;
 453                regcache_rbtree_set_register(map, rbnode,
 454                                             reg - rbnode->base_reg, value);
 455                regcache_rbtree_insert(map, &rbtree_ctx->root, rbnode);
 456                rbtree_ctx->cached_rbnode = rbnode;
 457        }
 458
 459        return 0;
 460}
 461
 462static int regcache_rbtree_sync(struct regmap *map, unsigned int min,
 463                                unsigned int max)
 464{
 465        struct regcache_rbtree_ctx *rbtree_ctx;
 466        struct rb_node *node;
 467        struct regcache_rbtree_node *rbnode;
 468        unsigned int base_reg, top_reg;
 469        unsigned int start, end;
 470        int ret;
 471
 472        rbtree_ctx = map->cache;
 473        for (node = rb_first(&rbtree_ctx->root); node; node = rb_next(node)) {
 474                rbnode = rb_entry(node, struct regcache_rbtree_node, node);
 475
 476                regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
 477                        &top_reg);
 478                if (base_reg > max)
 479                        break;
 480                if (top_reg < min)
 481                        continue;
 482
 483                if (min > base_reg)
 484                        start = (min - base_reg) / map->reg_stride;
 485                else
 486                        start = 0;
 487
 488                if (max < top_reg)
 489                        end = (max - base_reg) / map->reg_stride + 1;
 490                else
 491                        end = rbnode->blklen;
 492
 493                ret = regcache_sync_block(map, rbnode->block,
 494                                          rbnode->cache_present,
 495                                          rbnode->base_reg, start, end);
 496                if (ret != 0)
 497                        return ret;
 498        }
 499
 500        return regmap_async_complete(map);
 501}
 502
 503static int regcache_rbtree_drop(struct regmap *map, unsigned int min,
 504                                unsigned int max)
 505{
 506        struct regcache_rbtree_ctx *rbtree_ctx;
 507        struct regcache_rbtree_node *rbnode;
 508        struct rb_node *node;
 509        unsigned int base_reg, top_reg;
 510        unsigned int start, end;
 511
 512        rbtree_ctx = map->cache;
 513        for (node = rb_first(&rbtree_ctx->root); node; node = rb_next(node)) {
 514                rbnode = rb_entry(node, struct regcache_rbtree_node, node);
 515
 516                regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
 517                        &top_reg);
 518                if (base_reg > max)
 519                        break;
 520                if (top_reg < min)
 521                        continue;
 522
 523                if (min > base_reg)
 524                        start = (min - base_reg) / map->reg_stride;
 525                else
 526                        start = 0;
 527
 528                if (max < top_reg)
 529                        end = (max - base_reg) / map->reg_stride + 1;
 530                else
 531                        end = rbnode->blklen;
 532
 533                bitmap_clear(rbnode->cache_present, start, end - start);
 534        }
 535
 536        return 0;
 537}
 538
 539struct regcache_ops regcache_rbtree_ops = {
 540        .type = REGCACHE_RBTREE,
 541        .name = "rbtree",
 542        .init = regcache_rbtree_init,
 543        .exit = regcache_rbtree_exit,
 544#ifdef CONFIG_DEBUG_FS
 545        .debugfs_init = rbtree_debugfs_init,
 546#endif
 547        .read = regcache_rbtree_read,
 548        .write = regcache_rbtree_write,
 549        .sync = regcache_rbtree_sync,
 550        .drop = regcache_rbtree_drop,
 551};
 552