struct regcache_rbtree_ctx {
struct rb_root root;
struct regcache_rbtree_node *cached_rbnode;
+ unsigned long *reg_present;
+ unsigned int reg_present_nbits;
};
static inline void regcache_rbtree_get_base_top_reg(
map->lock(map);
mem_size = sizeof(*rbtree_ctx);
+ mem_size += BITS_TO_LONGS(rbtree_ctx->reg_present_nbits) * sizeof(long);
for (node = rb_first(&rbtree_ctx->root); node != NULL;
node = rb_next(node)) {
}
#endif
+static int enlarge_reg_present_bitmap(struct regmap *map, unsigned int reg)
+{
+ struct regcache_rbtree_ctx *rbtree_ctx;
+ unsigned long *reg_present;
+ unsigned int reg_present_size;
+ unsigned int nregs;
+ int i;
+
+ rbtree_ctx = map->cache;
+ nregs = reg + 1;
+ reg_present_size = BITS_TO_LONGS(nregs);
+ reg_present_size *= sizeof(long);
+
+ if (!rbtree_ctx->reg_present) {
+ reg_present = kmalloc(reg_present_size, GFP_KERNEL);
+ if (!reg_present)
+ return -ENOMEM;
+ bitmap_zero(reg_present, nregs);
+ rbtree_ctx->reg_present = reg_present;
+ rbtree_ctx->reg_present_nbits = nregs;
+ return 0;
+ }
+
+ if (nregs > rbtree_ctx->reg_present_nbits) {
+ reg_present = krealloc(rbtree_ctx->reg_present,
+ reg_present_size, GFP_KERNEL);
+ if (!reg_present)
+ return -ENOMEM;
+ for (i = 0; i < nregs; i++)
+ if (i >= rbtree_ctx->reg_present_nbits)
+ clear_bit(i, reg_present);
+ rbtree_ctx->reg_present = reg_present;
+ rbtree_ctx->reg_present_nbits = nregs;
+ }
+
+ return 0;
+}
+
static int regcache_rbtree_init(struct regmap *map)
{
struct regcache_rbtree_ctx *rbtree_ctx;
rbtree_ctx = map->cache;
rbtree_ctx->root = RB_ROOT;
rbtree_ctx->cached_rbnode = NULL;
+ rbtree_ctx->reg_present = NULL;
+ rbtree_ctx->reg_present_nbits = 0;
for (i = 0; i < map->num_reg_defaults; i++) {
ret = regcache_rbtree_write(map,
if (!rbtree_ctx)
return 0;
+ kfree(rbtree_ctx->reg_present);
+
/* free up the rbtree */
next = rb_first(&rbtree_ctx->root);
while (next) {
return 0;
}
+static int regcache_reg_present(struct regmap *map, unsigned int reg)
+{
+ struct regcache_rbtree_ctx *rbtree_ctx;
+
+ rbtree_ctx = map->cache;
+ if (!(rbtree_ctx->reg_present[BIT_WORD(reg)] & BIT_MASK(reg)))
+ return 0;
+ return 1;
+
+}
+
static int regcache_rbtree_read(struct regmap *map,
unsigned int reg, unsigned int *value)
{
rbnode = regcache_rbtree_lookup(map, reg);
if (rbnode) {
reg_tmp = (reg - rbnode->base_reg) / map->reg_stride;
+ if (!regcache_reg_present(map, reg))
+ return -ENOENT;
*value = regcache_rbtree_get_register(map, rbnode, reg_tmp);
} else {
return -ENOENT;
int ret;
rbtree_ctx = map->cache;
+ /* update the reg_present bitmap, make space if necessary */
+ ret = enlarge_reg_present_bitmap(map, reg);
+ if (ret < 0)
+ return ret;
+ set_bit(reg, rbtree_ctx->reg_present);
+
/* if we can't locate it in the cached rbnode we'll have
* to traverse the rbtree looking for it.
*/
rbnode = kzalloc(sizeof *rbnode, GFP_KERNEL);
if (!rbnode)
return -ENOMEM;
- rbnode->blklen = 1;
+ rbnode->blklen = sizeof(*rbnode);
rbnode->base_reg = reg;
rbnode->block = kmalloc(rbnode->blklen * map->cache_word_size,
GFP_KERNEL);
for (i = base; i < end; i++) {
regtmp = rbnode->base_reg + (i * map->reg_stride);
+
+ if (!regcache_reg_present(map, regtmp))
+ continue;
+
val = regcache_rbtree_get_register(map, rbnode, i);
/* Is this the hardware default? If so skip. */