xref: /OK3568_Linux_fs/kernel/drivers/base/regmap/regcache-rbtree.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun //
3*4882a593Smuzhiyun // Register cache access API - rbtree caching support
4*4882a593Smuzhiyun //
5*4882a593Smuzhiyun // Copyright 2011 Wolfson Microelectronics plc
6*4882a593Smuzhiyun //
7*4882a593Smuzhiyun // Author: Dimitris Papastamos <dp@opensource.wolfsonmicro.com>
8*4882a593Smuzhiyun 
9*4882a593Smuzhiyun #include <linux/debugfs.h>
10*4882a593Smuzhiyun #include <linux/device.h>
11*4882a593Smuzhiyun #include <linux/rbtree.h>
12*4882a593Smuzhiyun #include <linux/seq_file.h>
13*4882a593Smuzhiyun #include <linux/slab.h>
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun #include "internal.h"
16*4882a593Smuzhiyun 
17*4882a593Smuzhiyun static int regcache_rbtree_write(struct regmap *map, unsigned int reg,
18*4882a593Smuzhiyun 				 unsigned int value);
19*4882a593Smuzhiyun static int regcache_rbtree_exit(struct regmap *map);
20*4882a593Smuzhiyun 
21*4882a593Smuzhiyun struct regcache_rbtree_node {
22*4882a593Smuzhiyun 	/* block of adjacent registers */
23*4882a593Smuzhiyun 	void *block;
24*4882a593Smuzhiyun 	/* Which registers are present */
25*4882a593Smuzhiyun 	long *cache_present;
26*4882a593Smuzhiyun 	/* base register handled by this block */
27*4882a593Smuzhiyun 	unsigned int base_reg;
28*4882a593Smuzhiyun 	/* number of registers available in the block */
29*4882a593Smuzhiyun 	unsigned int blklen;
30*4882a593Smuzhiyun 	/* the actual rbtree node holding this block */
31*4882a593Smuzhiyun 	struct rb_node node;
32*4882a593Smuzhiyun };
33*4882a593Smuzhiyun 
34*4882a593Smuzhiyun struct regcache_rbtree_ctx {
35*4882a593Smuzhiyun 	struct rb_root root;
36*4882a593Smuzhiyun 	struct regcache_rbtree_node *cached_rbnode;
37*4882a593Smuzhiyun };
38*4882a593Smuzhiyun 
regcache_rbtree_get_base_top_reg(struct regmap * map,struct regcache_rbtree_node * rbnode,unsigned int * base,unsigned int * top)39*4882a593Smuzhiyun static inline void regcache_rbtree_get_base_top_reg(
40*4882a593Smuzhiyun 	struct regmap *map,
41*4882a593Smuzhiyun 	struct regcache_rbtree_node *rbnode,
42*4882a593Smuzhiyun 	unsigned int *base, unsigned int *top)
43*4882a593Smuzhiyun {
44*4882a593Smuzhiyun 	*base = rbnode->base_reg;
45*4882a593Smuzhiyun 	*top = rbnode->base_reg + ((rbnode->blklen - 1) * map->reg_stride);
46*4882a593Smuzhiyun }
47*4882a593Smuzhiyun 
regcache_rbtree_get_register(struct regmap * map,struct regcache_rbtree_node * rbnode,unsigned int idx)48*4882a593Smuzhiyun static unsigned int regcache_rbtree_get_register(struct regmap *map,
49*4882a593Smuzhiyun 	struct regcache_rbtree_node *rbnode, unsigned int idx)
50*4882a593Smuzhiyun {
51*4882a593Smuzhiyun 	return regcache_get_val(map, rbnode->block, idx);
52*4882a593Smuzhiyun }
53*4882a593Smuzhiyun 
regcache_rbtree_set_register(struct regmap * map,struct regcache_rbtree_node * rbnode,unsigned int idx,unsigned int val)54*4882a593Smuzhiyun static void regcache_rbtree_set_register(struct regmap *map,
55*4882a593Smuzhiyun 					 struct regcache_rbtree_node *rbnode,
56*4882a593Smuzhiyun 					 unsigned int idx, unsigned int val)
57*4882a593Smuzhiyun {
58*4882a593Smuzhiyun 	set_bit(idx, rbnode->cache_present);
59*4882a593Smuzhiyun 	regcache_set_val(map, rbnode->block, idx, val);
60*4882a593Smuzhiyun }
61*4882a593Smuzhiyun 
regcache_rbtree_lookup(struct regmap * map,unsigned int reg)62*4882a593Smuzhiyun static struct regcache_rbtree_node *regcache_rbtree_lookup(struct regmap *map,
63*4882a593Smuzhiyun 							   unsigned int reg)
64*4882a593Smuzhiyun {
65*4882a593Smuzhiyun 	struct regcache_rbtree_ctx *rbtree_ctx = map->cache;
66*4882a593Smuzhiyun 	struct rb_node *node;
67*4882a593Smuzhiyun 	struct regcache_rbtree_node *rbnode;
68*4882a593Smuzhiyun 	unsigned int base_reg, top_reg;
69*4882a593Smuzhiyun 
70*4882a593Smuzhiyun 	rbnode = rbtree_ctx->cached_rbnode;
71*4882a593Smuzhiyun 	if (rbnode) {
72*4882a593Smuzhiyun 		regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
73*4882a593Smuzhiyun 						 &top_reg);
74*4882a593Smuzhiyun 		if (reg >= base_reg && reg <= top_reg)
75*4882a593Smuzhiyun 			return rbnode;
76*4882a593Smuzhiyun 	}
77*4882a593Smuzhiyun 
78*4882a593Smuzhiyun 	node = rbtree_ctx->root.rb_node;
79*4882a593Smuzhiyun 	while (node) {
80*4882a593Smuzhiyun 		rbnode = rb_entry(node, struct regcache_rbtree_node, node);
81*4882a593Smuzhiyun 		regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
82*4882a593Smuzhiyun 						 &top_reg);
83*4882a593Smuzhiyun 		if (reg >= base_reg && reg <= top_reg) {
84*4882a593Smuzhiyun 			rbtree_ctx->cached_rbnode = rbnode;
85*4882a593Smuzhiyun 			return rbnode;
86*4882a593Smuzhiyun 		} else if (reg > top_reg) {
87*4882a593Smuzhiyun 			node = node->rb_right;
88*4882a593Smuzhiyun 		} else if (reg < base_reg) {
89*4882a593Smuzhiyun 			node = node->rb_left;
90*4882a593Smuzhiyun 		}
91*4882a593Smuzhiyun 	}
92*4882a593Smuzhiyun 
93*4882a593Smuzhiyun 	return NULL;
94*4882a593Smuzhiyun }
95*4882a593Smuzhiyun 
regcache_rbtree_insert(struct regmap * map,struct rb_root * root,struct regcache_rbtree_node * rbnode)96*4882a593Smuzhiyun static int regcache_rbtree_insert(struct regmap *map, struct rb_root *root,
97*4882a593Smuzhiyun 				  struct regcache_rbtree_node *rbnode)
98*4882a593Smuzhiyun {
99*4882a593Smuzhiyun 	struct rb_node **new, *parent;
100*4882a593Smuzhiyun 	struct regcache_rbtree_node *rbnode_tmp;
101*4882a593Smuzhiyun 	unsigned int base_reg_tmp, top_reg_tmp;
102*4882a593Smuzhiyun 	unsigned int base_reg;
103*4882a593Smuzhiyun 
104*4882a593Smuzhiyun 	parent = NULL;
105*4882a593Smuzhiyun 	new = &root->rb_node;
106*4882a593Smuzhiyun 	while (*new) {
107*4882a593Smuzhiyun 		rbnode_tmp = rb_entry(*new, struct regcache_rbtree_node, node);
108*4882a593Smuzhiyun 		/* base and top registers of the current rbnode */
109*4882a593Smuzhiyun 		regcache_rbtree_get_base_top_reg(map, rbnode_tmp, &base_reg_tmp,
110*4882a593Smuzhiyun 						 &top_reg_tmp);
111*4882a593Smuzhiyun 		/* base register of the rbnode to be added */
112*4882a593Smuzhiyun 		base_reg = rbnode->base_reg;
113*4882a593Smuzhiyun 		parent = *new;
114*4882a593Smuzhiyun 		/* if this register has already been inserted, just return */
115*4882a593Smuzhiyun 		if (base_reg >= base_reg_tmp &&
116*4882a593Smuzhiyun 		    base_reg <= top_reg_tmp)
117*4882a593Smuzhiyun 			return 0;
118*4882a593Smuzhiyun 		else if (base_reg > top_reg_tmp)
119*4882a593Smuzhiyun 			new = &((*new)->rb_right);
120*4882a593Smuzhiyun 		else if (base_reg < base_reg_tmp)
121*4882a593Smuzhiyun 			new = &((*new)->rb_left);
122*4882a593Smuzhiyun 	}
123*4882a593Smuzhiyun 
124*4882a593Smuzhiyun 	/* insert the node into the rbtree */
125*4882a593Smuzhiyun 	rb_link_node(&rbnode->node, parent, new);
126*4882a593Smuzhiyun 	rb_insert_color(&rbnode->node, root);
127*4882a593Smuzhiyun 
128*4882a593Smuzhiyun 	return 1;
129*4882a593Smuzhiyun }
130*4882a593Smuzhiyun 
131*4882a593Smuzhiyun #ifdef CONFIG_DEBUG_FS
rbtree_show(struct seq_file * s,void * ignored)132*4882a593Smuzhiyun static int rbtree_show(struct seq_file *s, void *ignored)
133*4882a593Smuzhiyun {
134*4882a593Smuzhiyun 	struct regmap *map = s->private;
135*4882a593Smuzhiyun 	struct regcache_rbtree_ctx *rbtree_ctx = map->cache;
136*4882a593Smuzhiyun 	struct regcache_rbtree_node *n;
137*4882a593Smuzhiyun 	struct rb_node *node;
138*4882a593Smuzhiyun 	unsigned int base, top;
139*4882a593Smuzhiyun 	size_t mem_size;
140*4882a593Smuzhiyun 	int nodes = 0;
141*4882a593Smuzhiyun 	int registers = 0;
142*4882a593Smuzhiyun 	int this_registers, average;
143*4882a593Smuzhiyun 
144*4882a593Smuzhiyun 	map->lock(map->lock_arg);
145*4882a593Smuzhiyun 
146*4882a593Smuzhiyun 	mem_size = sizeof(*rbtree_ctx);
147*4882a593Smuzhiyun 
148*4882a593Smuzhiyun 	for (node = rb_first(&rbtree_ctx->root); node != NULL;
149*4882a593Smuzhiyun 	     node = rb_next(node)) {
150*4882a593Smuzhiyun 		n = rb_entry(node, struct regcache_rbtree_node, node);
151*4882a593Smuzhiyun 		mem_size += sizeof(*n);
152*4882a593Smuzhiyun 		mem_size += (n->blklen * map->cache_word_size);
153*4882a593Smuzhiyun 		mem_size += BITS_TO_LONGS(n->blklen) * sizeof(long);
154*4882a593Smuzhiyun 
155*4882a593Smuzhiyun 		regcache_rbtree_get_base_top_reg(map, n, &base, &top);
156*4882a593Smuzhiyun 		this_registers = ((top - base) / map->reg_stride) + 1;
157*4882a593Smuzhiyun 		seq_printf(s, "%x-%x (%d)\n", base, top, this_registers);
158*4882a593Smuzhiyun 
159*4882a593Smuzhiyun 		nodes++;
160*4882a593Smuzhiyun 		registers += this_registers;
161*4882a593Smuzhiyun 	}
162*4882a593Smuzhiyun 
163*4882a593Smuzhiyun 	if (nodes)
164*4882a593Smuzhiyun 		average = registers / nodes;
165*4882a593Smuzhiyun 	else
166*4882a593Smuzhiyun 		average = 0;
167*4882a593Smuzhiyun 
168*4882a593Smuzhiyun 	seq_printf(s, "%d nodes, %d registers, average %d registers, used %zu bytes\n",
169*4882a593Smuzhiyun 		   nodes, registers, average, mem_size);
170*4882a593Smuzhiyun 
171*4882a593Smuzhiyun 	map->unlock(map->lock_arg);
172*4882a593Smuzhiyun 
173*4882a593Smuzhiyun 	return 0;
174*4882a593Smuzhiyun }
175*4882a593Smuzhiyun 
176*4882a593Smuzhiyun DEFINE_SHOW_ATTRIBUTE(rbtree);
177*4882a593Smuzhiyun 
rbtree_debugfs_init(struct regmap * map)178*4882a593Smuzhiyun static void rbtree_debugfs_init(struct regmap *map)
179*4882a593Smuzhiyun {
180*4882a593Smuzhiyun 	debugfs_create_file("rbtree", 0400, map->debugfs, map, &rbtree_fops);
181*4882a593Smuzhiyun }
182*4882a593Smuzhiyun #endif
183*4882a593Smuzhiyun 
regcache_rbtree_init(struct regmap * map)184*4882a593Smuzhiyun static int regcache_rbtree_init(struct regmap *map)
185*4882a593Smuzhiyun {
186*4882a593Smuzhiyun 	struct regcache_rbtree_ctx *rbtree_ctx;
187*4882a593Smuzhiyun 	int i;
188*4882a593Smuzhiyun 	int ret;
189*4882a593Smuzhiyun 
190*4882a593Smuzhiyun 	map->cache = kmalloc(sizeof *rbtree_ctx, GFP_KERNEL);
191*4882a593Smuzhiyun 	if (!map->cache)
192*4882a593Smuzhiyun 		return -ENOMEM;
193*4882a593Smuzhiyun 
194*4882a593Smuzhiyun 	rbtree_ctx = map->cache;
195*4882a593Smuzhiyun 	rbtree_ctx->root = RB_ROOT;
196*4882a593Smuzhiyun 	rbtree_ctx->cached_rbnode = NULL;
197*4882a593Smuzhiyun 
198*4882a593Smuzhiyun 	for (i = 0; i < map->num_reg_defaults; i++) {
199*4882a593Smuzhiyun 		ret = regcache_rbtree_write(map,
200*4882a593Smuzhiyun 					    map->reg_defaults[i].reg,
201*4882a593Smuzhiyun 					    map->reg_defaults[i].def);
202*4882a593Smuzhiyun 		if (ret)
203*4882a593Smuzhiyun 			goto err;
204*4882a593Smuzhiyun 	}
205*4882a593Smuzhiyun 
206*4882a593Smuzhiyun 	return 0;
207*4882a593Smuzhiyun 
208*4882a593Smuzhiyun err:
209*4882a593Smuzhiyun 	regcache_rbtree_exit(map);
210*4882a593Smuzhiyun 	return ret;
211*4882a593Smuzhiyun }
212*4882a593Smuzhiyun 
regcache_rbtree_exit(struct regmap * map)213*4882a593Smuzhiyun static int regcache_rbtree_exit(struct regmap *map)
214*4882a593Smuzhiyun {
215*4882a593Smuzhiyun 	struct rb_node *next;
216*4882a593Smuzhiyun 	struct regcache_rbtree_ctx *rbtree_ctx;
217*4882a593Smuzhiyun 	struct regcache_rbtree_node *rbtree_node;
218*4882a593Smuzhiyun 
219*4882a593Smuzhiyun 	/* if we've already been called then just return */
220*4882a593Smuzhiyun 	rbtree_ctx = map->cache;
221*4882a593Smuzhiyun 	if (!rbtree_ctx)
222*4882a593Smuzhiyun 		return 0;
223*4882a593Smuzhiyun 
224*4882a593Smuzhiyun 	/* free up the rbtree */
225*4882a593Smuzhiyun 	next = rb_first(&rbtree_ctx->root);
226*4882a593Smuzhiyun 	while (next) {
227*4882a593Smuzhiyun 		rbtree_node = rb_entry(next, struct regcache_rbtree_node, node);
228*4882a593Smuzhiyun 		next = rb_next(&rbtree_node->node);
229*4882a593Smuzhiyun 		rb_erase(&rbtree_node->node, &rbtree_ctx->root);
230*4882a593Smuzhiyun 		kfree(rbtree_node->cache_present);
231*4882a593Smuzhiyun 		kfree(rbtree_node->block);
232*4882a593Smuzhiyun 		kfree(rbtree_node);
233*4882a593Smuzhiyun 	}
234*4882a593Smuzhiyun 
235*4882a593Smuzhiyun 	/* release the resources */
236*4882a593Smuzhiyun 	kfree(map->cache);
237*4882a593Smuzhiyun 	map->cache = NULL;
238*4882a593Smuzhiyun 
239*4882a593Smuzhiyun 	return 0;
240*4882a593Smuzhiyun }
241*4882a593Smuzhiyun 
regcache_rbtree_read(struct regmap * map,unsigned int reg,unsigned int * value)242*4882a593Smuzhiyun static int regcache_rbtree_read(struct regmap *map,
243*4882a593Smuzhiyun 				unsigned int reg, unsigned int *value)
244*4882a593Smuzhiyun {
245*4882a593Smuzhiyun 	struct regcache_rbtree_node *rbnode;
246*4882a593Smuzhiyun 	unsigned int reg_tmp;
247*4882a593Smuzhiyun 
248*4882a593Smuzhiyun 	rbnode = regcache_rbtree_lookup(map, reg);
249*4882a593Smuzhiyun 	if (rbnode) {
250*4882a593Smuzhiyun 		reg_tmp = (reg - rbnode->base_reg) / map->reg_stride;
251*4882a593Smuzhiyun 		if (!test_bit(reg_tmp, rbnode->cache_present))
252*4882a593Smuzhiyun 			return -ENOENT;
253*4882a593Smuzhiyun 		*value = regcache_rbtree_get_register(map, rbnode, reg_tmp);
254*4882a593Smuzhiyun 	} else {
255*4882a593Smuzhiyun 		return -ENOENT;
256*4882a593Smuzhiyun 	}
257*4882a593Smuzhiyun 
258*4882a593Smuzhiyun 	return 0;
259*4882a593Smuzhiyun }
260*4882a593Smuzhiyun 
261*4882a593Smuzhiyun 
regcache_rbtree_insert_to_block(struct regmap * map,struct regcache_rbtree_node * rbnode,unsigned int base_reg,unsigned int top_reg,unsigned int reg,unsigned int value)262*4882a593Smuzhiyun static int regcache_rbtree_insert_to_block(struct regmap *map,
263*4882a593Smuzhiyun 					   struct regcache_rbtree_node *rbnode,
264*4882a593Smuzhiyun 					   unsigned int base_reg,
265*4882a593Smuzhiyun 					   unsigned int top_reg,
266*4882a593Smuzhiyun 					   unsigned int reg,
267*4882a593Smuzhiyun 					   unsigned int value)
268*4882a593Smuzhiyun {
269*4882a593Smuzhiyun 	unsigned int blklen;
270*4882a593Smuzhiyun 	unsigned int pos, offset;
271*4882a593Smuzhiyun 	unsigned long *present;
272*4882a593Smuzhiyun 	u8 *blk;
273*4882a593Smuzhiyun 
274*4882a593Smuzhiyun 	blklen = (top_reg - base_reg) / map->reg_stride + 1;
275*4882a593Smuzhiyun 	pos = (reg - base_reg) / map->reg_stride;
276*4882a593Smuzhiyun 	offset = (rbnode->base_reg - base_reg) / map->reg_stride;
277*4882a593Smuzhiyun 
278*4882a593Smuzhiyun 	blk = krealloc(rbnode->block,
279*4882a593Smuzhiyun 		       blklen * map->cache_word_size,
280*4882a593Smuzhiyun 		       GFP_KERNEL);
281*4882a593Smuzhiyun 	if (!blk)
282*4882a593Smuzhiyun 		return -ENOMEM;
283*4882a593Smuzhiyun 
284*4882a593Smuzhiyun 	rbnode->block = blk;
285*4882a593Smuzhiyun 
286*4882a593Smuzhiyun 	if (BITS_TO_LONGS(blklen) > BITS_TO_LONGS(rbnode->blklen)) {
287*4882a593Smuzhiyun 		present = krealloc(rbnode->cache_present,
288*4882a593Smuzhiyun 				   BITS_TO_LONGS(blklen) * sizeof(*present),
289*4882a593Smuzhiyun 				   GFP_KERNEL);
290*4882a593Smuzhiyun 		if (!present)
291*4882a593Smuzhiyun 			return -ENOMEM;
292*4882a593Smuzhiyun 
293*4882a593Smuzhiyun 		memset(present + BITS_TO_LONGS(rbnode->blklen), 0,
294*4882a593Smuzhiyun 		       (BITS_TO_LONGS(blklen) - BITS_TO_LONGS(rbnode->blklen))
295*4882a593Smuzhiyun 		       * sizeof(*present));
296*4882a593Smuzhiyun 	} else {
297*4882a593Smuzhiyun 		present = rbnode->cache_present;
298*4882a593Smuzhiyun 	}
299*4882a593Smuzhiyun 
300*4882a593Smuzhiyun 	/* insert the register value in the correct place in the rbnode block */
301*4882a593Smuzhiyun 	if (pos == 0) {
302*4882a593Smuzhiyun 		memmove(blk + offset * map->cache_word_size,
303*4882a593Smuzhiyun 			blk, rbnode->blklen * map->cache_word_size);
304*4882a593Smuzhiyun 		bitmap_shift_left(present, present, offset, blklen);
305*4882a593Smuzhiyun 	}
306*4882a593Smuzhiyun 
307*4882a593Smuzhiyun 	/* update the rbnode block, its size and the base register */
308*4882a593Smuzhiyun 	rbnode->blklen = blklen;
309*4882a593Smuzhiyun 	rbnode->base_reg = base_reg;
310*4882a593Smuzhiyun 	rbnode->cache_present = present;
311*4882a593Smuzhiyun 
312*4882a593Smuzhiyun 	regcache_rbtree_set_register(map, rbnode, pos, value);
313*4882a593Smuzhiyun 	return 0;
314*4882a593Smuzhiyun }
315*4882a593Smuzhiyun 
316*4882a593Smuzhiyun static struct regcache_rbtree_node *
regcache_rbtree_node_alloc(struct regmap * map,unsigned int reg)317*4882a593Smuzhiyun regcache_rbtree_node_alloc(struct regmap *map, unsigned int reg)
318*4882a593Smuzhiyun {
319*4882a593Smuzhiyun 	struct regcache_rbtree_node *rbnode;
320*4882a593Smuzhiyun 	const struct regmap_range *range;
321*4882a593Smuzhiyun 	int i;
322*4882a593Smuzhiyun 
323*4882a593Smuzhiyun 	rbnode = kzalloc(sizeof(*rbnode), GFP_KERNEL);
324*4882a593Smuzhiyun 	if (!rbnode)
325*4882a593Smuzhiyun 		return NULL;
326*4882a593Smuzhiyun 
327*4882a593Smuzhiyun 	/* If there is a read table then use it to guess at an allocation */
328*4882a593Smuzhiyun 	if (map->rd_table) {
329*4882a593Smuzhiyun 		for (i = 0; i < map->rd_table->n_yes_ranges; i++) {
330*4882a593Smuzhiyun 			if (regmap_reg_in_range(reg,
331*4882a593Smuzhiyun 						&map->rd_table->yes_ranges[i]))
332*4882a593Smuzhiyun 				break;
333*4882a593Smuzhiyun 		}
334*4882a593Smuzhiyun 
335*4882a593Smuzhiyun 		if (i != map->rd_table->n_yes_ranges) {
336*4882a593Smuzhiyun 			range = &map->rd_table->yes_ranges[i];
337*4882a593Smuzhiyun 			rbnode->blklen = (range->range_max - range->range_min) /
338*4882a593Smuzhiyun 				map->reg_stride	+ 1;
339*4882a593Smuzhiyun 			rbnode->base_reg = range->range_min;
340*4882a593Smuzhiyun 		}
341*4882a593Smuzhiyun 	}
342*4882a593Smuzhiyun 
343*4882a593Smuzhiyun 	if (!rbnode->blklen) {
344*4882a593Smuzhiyun 		rbnode->blklen = 1;
345*4882a593Smuzhiyun 		rbnode->base_reg = reg;
346*4882a593Smuzhiyun 	}
347*4882a593Smuzhiyun 
348*4882a593Smuzhiyun 	rbnode->block = kmalloc_array(rbnode->blklen, map->cache_word_size,
349*4882a593Smuzhiyun 				      GFP_KERNEL);
350*4882a593Smuzhiyun 	if (!rbnode->block)
351*4882a593Smuzhiyun 		goto err_free;
352*4882a593Smuzhiyun 
353*4882a593Smuzhiyun 	rbnode->cache_present = kcalloc(BITS_TO_LONGS(rbnode->blklen),
354*4882a593Smuzhiyun 					sizeof(*rbnode->cache_present),
355*4882a593Smuzhiyun 					GFP_KERNEL);
356*4882a593Smuzhiyun 	if (!rbnode->cache_present)
357*4882a593Smuzhiyun 		goto err_free_block;
358*4882a593Smuzhiyun 
359*4882a593Smuzhiyun 	return rbnode;
360*4882a593Smuzhiyun 
361*4882a593Smuzhiyun err_free_block:
362*4882a593Smuzhiyun 	kfree(rbnode->block);
363*4882a593Smuzhiyun err_free:
364*4882a593Smuzhiyun 	kfree(rbnode);
365*4882a593Smuzhiyun 	return NULL;
366*4882a593Smuzhiyun }
367*4882a593Smuzhiyun 
regcache_rbtree_write(struct regmap * map,unsigned int reg,unsigned int value)368*4882a593Smuzhiyun static int regcache_rbtree_write(struct regmap *map, unsigned int reg,
369*4882a593Smuzhiyun 				 unsigned int value)
370*4882a593Smuzhiyun {
371*4882a593Smuzhiyun 	struct regcache_rbtree_ctx *rbtree_ctx;
372*4882a593Smuzhiyun 	struct regcache_rbtree_node *rbnode, *rbnode_tmp;
373*4882a593Smuzhiyun 	struct rb_node *node;
374*4882a593Smuzhiyun 	unsigned int reg_tmp;
375*4882a593Smuzhiyun 	int ret;
376*4882a593Smuzhiyun 
377*4882a593Smuzhiyun 	rbtree_ctx = map->cache;
378*4882a593Smuzhiyun 
379*4882a593Smuzhiyun 	/* if we can't locate it in the cached rbnode we'll have
380*4882a593Smuzhiyun 	 * to traverse the rbtree looking for it.
381*4882a593Smuzhiyun 	 */
382*4882a593Smuzhiyun 	rbnode = regcache_rbtree_lookup(map, reg);
383*4882a593Smuzhiyun 	if (rbnode) {
384*4882a593Smuzhiyun 		reg_tmp = (reg - rbnode->base_reg) / map->reg_stride;
385*4882a593Smuzhiyun 		regcache_rbtree_set_register(map, rbnode, reg_tmp, value);
386*4882a593Smuzhiyun 	} else {
387*4882a593Smuzhiyun 		unsigned int base_reg, top_reg;
388*4882a593Smuzhiyun 		unsigned int new_base_reg, new_top_reg;
389*4882a593Smuzhiyun 		unsigned int min, max;
390*4882a593Smuzhiyun 		unsigned int max_dist;
391*4882a593Smuzhiyun 		unsigned int dist, best_dist = UINT_MAX;
392*4882a593Smuzhiyun 
393*4882a593Smuzhiyun 		max_dist = map->reg_stride * sizeof(*rbnode_tmp) /
394*4882a593Smuzhiyun 			map->cache_word_size;
395*4882a593Smuzhiyun 		if (reg < max_dist)
396*4882a593Smuzhiyun 			min = 0;
397*4882a593Smuzhiyun 		else
398*4882a593Smuzhiyun 			min = reg - max_dist;
399*4882a593Smuzhiyun 		max = reg + max_dist;
400*4882a593Smuzhiyun 
401*4882a593Smuzhiyun 		/* look for an adjacent register to the one we are about to add */
402*4882a593Smuzhiyun 		node = rbtree_ctx->root.rb_node;
403*4882a593Smuzhiyun 		while (node) {
404*4882a593Smuzhiyun 			rbnode_tmp = rb_entry(node, struct regcache_rbtree_node,
405*4882a593Smuzhiyun 					      node);
406*4882a593Smuzhiyun 
407*4882a593Smuzhiyun 			regcache_rbtree_get_base_top_reg(map, rbnode_tmp,
408*4882a593Smuzhiyun 				&base_reg, &top_reg);
409*4882a593Smuzhiyun 
410*4882a593Smuzhiyun 			if (base_reg <= max && top_reg >= min) {
411*4882a593Smuzhiyun 				if (reg < base_reg)
412*4882a593Smuzhiyun 					dist = base_reg - reg;
413*4882a593Smuzhiyun 				else if (reg > top_reg)
414*4882a593Smuzhiyun 					dist = reg - top_reg;
415*4882a593Smuzhiyun 				else
416*4882a593Smuzhiyun 					dist = 0;
417*4882a593Smuzhiyun 				if (dist < best_dist) {
418*4882a593Smuzhiyun 					rbnode = rbnode_tmp;
419*4882a593Smuzhiyun 					best_dist = dist;
420*4882a593Smuzhiyun 					new_base_reg = min(reg, base_reg);
421*4882a593Smuzhiyun 					new_top_reg = max(reg, top_reg);
422*4882a593Smuzhiyun 				}
423*4882a593Smuzhiyun 			}
424*4882a593Smuzhiyun 
425*4882a593Smuzhiyun 			/*
426*4882a593Smuzhiyun 			 * Keep looking, we want to choose the closest block,
427*4882a593Smuzhiyun 			 * otherwise we might end up creating overlapping
428*4882a593Smuzhiyun 			 * blocks, which breaks the rbtree.
429*4882a593Smuzhiyun 			 */
430*4882a593Smuzhiyun 			if (reg < base_reg)
431*4882a593Smuzhiyun 				node = node->rb_left;
432*4882a593Smuzhiyun 			else if (reg > top_reg)
433*4882a593Smuzhiyun 				node = node->rb_right;
434*4882a593Smuzhiyun 			else
435*4882a593Smuzhiyun 				break;
436*4882a593Smuzhiyun 		}
437*4882a593Smuzhiyun 
438*4882a593Smuzhiyun 		if (rbnode) {
439*4882a593Smuzhiyun 			ret = regcache_rbtree_insert_to_block(map, rbnode,
440*4882a593Smuzhiyun 							      new_base_reg,
441*4882a593Smuzhiyun 							      new_top_reg, reg,
442*4882a593Smuzhiyun 							      value);
443*4882a593Smuzhiyun 			if (ret)
444*4882a593Smuzhiyun 				return ret;
445*4882a593Smuzhiyun 			rbtree_ctx->cached_rbnode = rbnode;
446*4882a593Smuzhiyun 			return 0;
447*4882a593Smuzhiyun 		}
448*4882a593Smuzhiyun 
449*4882a593Smuzhiyun 		/* We did not manage to find a place to insert it in
450*4882a593Smuzhiyun 		 * an existing block so create a new rbnode.
451*4882a593Smuzhiyun 		 */
452*4882a593Smuzhiyun 		rbnode = regcache_rbtree_node_alloc(map, reg);
453*4882a593Smuzhiyun 		if (!rbnode)
454*4882a593Smuzhiyun 			return -ENOMEM;
455*4882a593Smuzhiyun 		regcache_rbtree_set_register(map, rbnode,
456*4882a593Smuzhiyun 					     reg - rbnode->base_reg, value);
457*4882a593Smuzhiyun 		regcache_rbtree_insert(map, &rbtree_ctx->root, rbnode);
458*4882a593Smuzhiyun 		rbtree_ctx->cached_rbnode = rbnode;
459*4882a593Smuzhiyun 	}
460*4882a593Smuzhiyun 
461*4882a593Smuzhiyun 	return 0;
462*4882a593Smuzhiyun }
463*4882a593Smuzhiyun 
regcache_rbtree_sync(struct regmap * map,unsigned int min,unsigned int max)464*4882a593Smuzhiyun static int regcache_rbtree_sync(struct regmap *map, unsigned int min,
465*4882a593Smuzhiyun 				unsigned int max)
466*4882a593Smuzhiyun {
467*4882a593Smuzhiyun 	struct regcache_rbtree_ctx *rbtree_ctx;
468*4882a593Smuzhiyun 	struct rb_node *node;
469*4882a593Smuzhiyun 	struct regcache_rbtree_node *rbnode;
470*4882a593Smuzhiyun 	unsigned int base_reg, top_reg;
471*4882a593Smuzhiyun 	unsigned int start, end;
472*4882a593Smuzhiyun 	int ret;
473*4882a593Smuzhiyun 
474*4882a593Smuzhiyun 	rbtree_ctx = map->cache;
475*4882a593Smuzhiyun 	for (node = rb_first(&rbtree_ctx->root); node; node = rb_next(node)) {
476*4882a593Smuzhiyun 		rbnode = rb_entry(node, struct regcache_rbtree_node, node);
477*4882a593Smuzhiyun 
478*4882a593Smuzhiyun 		regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
479*4882a593Smuzhiyun 			&top_reg);
480*4882a593Smuzhiyun 		if (base_reg > max)
481*4882a593Smuzhiyun 			break;
482*4882a593Smuzhiyun 		if (top_reg < min)
483*4882a593Smuzhiyun 			continue;
484*4882a593Smuzhiyun 
485*4882a593Smuzhiyun 		if (min > base_reg)
486*4882a593Smuzhiyun 			start = (min - base_reg) / map->reg_stride;
487*4882a593Smuzhiyun 		else
488*4882a593Smuzhiyun 			start = 0;
489*4882a593Smuzhiyun 
490*4882a593Smuzhiyun 		if (max < top_reg)
491*4882a593Smuzhiyun 			end = (max - base_reg) / map->reg_stride + 1;
492*4882a593Smuzhiyun 		else
493*4882a593Smuzhiyun 			end = rbnode->blklen;
494*4882a593Smuzhiyun 
495*4882a593Smuzhiyun 		ret = regcache_sync_block(map, rbnode->block,
496*4882a593Smuzhiyun 					  rbnode->cache_present,
497*4882a593Smuzhiyun 					  rbnode->base_reg, start, end);
498*4882a593Smuzhiyun 		if (ret != 0)
499*4882a593Smuzhiyun 			return ret;
500*4882a593Smuzhiyun 	}
501*4882a593Smuzhiyun 
502*4882a593Smuzhiyun 	return regmap_async_complete(map);
503*4882a593Smuzhiyun }
504*4882a593Smuzhiyun 
regcache_rbtree_drop(struct regmap * map,unsigned int min,unsigned int max)505*4882a593Smuzhiyun static int regcache_rbtree_drop(struct regmap *map, unsigned int min,
506*4882a593Smuzhiyun 				unsigned int max)
507*4882a593Smuzhiyun {
508*4882a593Smuzhiyun 	struct regcache_rbtree_ctx *rbtree_ctx;
509*4882a593Smuzhiyun 	struct regcache_rbtree_node *rbnode;
510*4882a593Smuzhiyun 	struct rb_node *node;
511*4882a593Smuzhiyun 	unsigned int base_reg, top_reg;
512*4882a593Smuzhiyun 	unsigned int start, end;
513*4882a593Smuzhiyun 
514*4882a593Smuzhiyun 	rbtree_ctx = map->cache;
515*4882a593Smuzhiyun 	for (node = rb_first(&rbtree_ctx->root); node; node = rb_next(node)) {
516*4882a593Smuzhiyun 		rbnode = rb_entry(node, struct regcache_rbtree_node, node);
517*4882a593Smuzhiyun 
518*4882a593Smuzhiyun 		regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
519*4882a593Smuzhiyun 			&top_reg);
520*4882a593Smuzhiyun 		if (base_reg > max)
521*4882a593Smuzhiyun 			break;
522*4882a593Smuzhiyun 		if (top_reg < min)
523*4882a593Smuzhiyun 			continue;
524*4882a593Smuzhiyun 
525*4882a593Smuzhiyun 		if (min > base_reg)
526*4882a593Smuzhiyun 			start = (min - base_reg) / map->reg_stride;
527*4882a593Smuzhiyun 		else
528*4882a593Smuzhiyun 			start = 0;
529*4882a593Smuzhiyun 
530*4882a593Smuzhiyun 		if (max < top_reg)
531*4882a593Smuzhiyun 			end = (max - base_reg) / map->reg_stride + 1;
532*4882a593Smuzhiyun 		else
533*4882a593Smuzhiyun 			end = rbnode->blklen;
534*4882a593Smuzhiyun 
535*4882a593Smuzhiyun 		bitmap_clear(rbnode->cache_present, start, end - start);
536*4882a593Smuzhiyun 	}
537*4882a593Smuzhiyun 
538*4882a593Smuzhiyun 	return 0;
539*4882a593Smuzhiyun }
540*4882a593Smuzhiyun 
541*4882a593Smuzhiyun struct regcache_ops regcache_rbtree_ops = {
542*4882a593Smuzhiyun 	.type = REGCACHE_RBTREE,
543*4882a593Smuzhiyun 	.name = "rbtree",
544*4882a593Smuzhiyun 	.init = regcache_rbtree_init,
545*4882a593Smuzhiyun 	.exit = regcache_rbtree_exit,
546*4882a593Smuzhiyun #ifdef CONFIG_DEBUG_FS
547*4882a593Smuzhiyun 	.debugfs_init = rbtree_debugfs_init,
548*4882a593Smuzhiyun #endif
549*4882a593Smuzhiyun 	.read = regcache_rbtree_read,
550*4882a593Smuzhiyun 	.write = regcache_rbtree_write,
551*4882a593Smuzhiyun 	.sync = regcache_rbtree_sync,
552*4882a593Smuzhiyun 	.drop = regcache_rbtree_drop,
553*4882a593Smuzhiyun };
554