xref: /OK3568_Linux_fs/kernel/include/linux/radix-tree.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0-or-later */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Copyright (C) 2001 Momchil Velikov
4*4882a593Smuzhiyun  * Portions Copyright (C) 2001 Christoph Hellwig
5*4882a593Smuzhiyun  * Copyright (C) 2006 Nick Piggin
6*4882a593Smuzhiyun  * Copyright (C) 2012 Konstantin Khlebnikov
7*4882a593Smuzhiyun  */
8*4882a593Smuzhiyun #ifndef _LINUX_RADIX_TREE_H
9*4882a593Smuzhiyun #define _LINUX_RADIX_TREE_H
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #include <linux/bitops.h>
12*4882a593Smuzhiyun #include <linux/kernel.h>
13*4882a593Smuzhiyun #include <linux/list.h>
14*4882a593Smuzhiyun #include <linux/percpu.h>
15*4882a593Smuzhiyun #include <linux/preempt.h>
16*4882a593Smuzhiyun #include <linux/rcupdate.h>
17*4882a593Smuzhiyun #include <linux/spinlock.h>
18*4882a593Smuzhiyun #include <linux/types.h>
19*4882a593Smuzhiyun #include <linux/xarray.h>
20*4882a593Smuzhiyun #include <linux/local_lock.h>
21*4882a593Smuzhiyun 
22*4882a593Smuzhiyun /* Keep unconverted code working */
23*4882a593Smuzhiyun #define radix_tree_root		xarray
24*4882a593Smuzhiyun #define radix_tree_node		xa_node
25*4882a593Smuzhiyun 
26*4882a593Smuzhiyun struct radix_tree_preload {
27*4882a593Smuzhiyun 	local_lock_t lock;
28*4882a593Smuzhiyun 	unsigned nr;
29*4882a593Smuzhiyun 	/* nodes->parent points to next preallocated node */
30*4882a593Smuzhiyun 	struct radix_tree_node *nodes;
31*4882a593Smuzhiyun };
32*4882a593Smuzhiyun DECLARE_PER_CPU(struct radix_tree_preload, radix_tree_preloads);
33*4882a593Smuzhiyun 
34*4882a593Smuzhiyun /*
35*4882a593Smuzhiyun  * The bottom two bits of the slot determine how the remaining bits in the
36*4882a593Smuzhiyun  * slot are interpreted:
37*4882a593Smuzhiyun  *
38*4882a593Smuzhiyun  * 00 - data pointer
39*4882a593Smuzhiyun  * 10 - internal entry
40*4882a593Smuzhiyun  * x1 - value entry
41*4882a593Smuzhiyun  *
42*4882a593Smuzhiyun  * The internal entry may be a pointer to the next level in the tree, a
43*4882a593Smuzhiyun  * sibling entry, or an indicator that the entry in this slot has been moved
44*4882a593Smuzhiyun  * to another location in the tree and the lookup should be restarted.  While
45*4882a593Smuzhiyun  * NULL fits the 'data pointer' pattern, it means that there is no entry in
46*4882a593Smuzhiyun  * the tree for this index (no matter what level of the tree it is found at).
47*4882a593Smuzhiyun  * This means that storing a NULL entry in the tree is the same as deleting
48*4882a593Smuzhiyun  * the entry from the tree.
49*4882a593Smuzhiyun  */
50*4882a593Smuzhiyun #define RADIX_TREE_ENTRY_MASK		3UL
51*4882a593Smuzhiyun #define RADIX_TREE_INTERNAL_NODE	2UL
52*4882a593Smuzhiyun 
radix_tree_is_internal_node(void * ptr)53*4882a593Smuzhiyun static inline bool radix_tree_is_internal_node(void *ptr)
54*4882a593Smuzhiyun {
55*4882a593Smuzhiyun 	return ((unsigned long)ptr & RADIX_TREE_ENTRY_MASK) ==
56*4882a593Smuzhiyun 				RADIX_TREE_INTERNAL_NODE;
57*4882a593Smuzhiyun }
58*4882a593Smuzhiyun 
59*4882a593Smuzhiyun /*** radix-tree API starts here ***/
60*4882a593Smuzhiyun 
61*4882a593Smuzhiyun #define RADIX_TREE_MAP_SHIFT	XA_CHUNK_SHIFT
62*4882a593Smuzhiyun #define RADIX_TREE_MAP_SIZE	(1UL << RADIX_TREE_MAP_SHIFT)
63*4882a593Smuzhiyun #define RADIX_TREE_MAP_MASK	(RADIX_TREE_MAP_SIZE-1)
64*4882a593Smuzhiyun 
65*4882a593Smuzhiyun #define RADIX_TREE_MAX_TAGS	XA_MAX_MARKS
66*4882a593Smuzhiyun #define RADIX_TREE_TAG_LONGS	XA_MARK_LONGS
67*4882a593Smuzhiyun 
68*4882a593Smuzhiyun #define RADIX_TREE_INDEX_BITS  (8 /* CHAR_BIT */ * sizeof(unsigned long))
69*4882a593Smuzhiyun #define RADIX_TREE_MAX_PATH (DIV_ROUND_UP(RADIX_TREE_INDEX_BITS, \
70*4882a593Smuzhiyun 					  RADIX_TREE_MAP_SHIFT))
71*4882a593Smuzhiyun 
72*4882a593Smuzhiyun /* The IDR tag is stored in the low bits of xa_flags */
73*4882a593Smuzhiyun #define ROOT_IS_IDR	((__force gfp_t)4)
74*4882a593Smuzhiyun /* The top bits of xa_flags are used to store the root tags */
75*4882a593Smuzhiyun #define ROOT_TAG_SHIFT	(__GFP_BITS_SHIFT)
76*4882a593Smuzhiyun 
77*4882a593Smuzhiyun #define RADIX_TREE_INIT(name, mask)	XARRAY_INIT(name, mask)
78*4882a593Smuzhiyun 
79*4882a593Smuzhiyun #define RADIX_TREE(name, mask) \
80*4882a593Smuzhiyun 	struct radix_tree_root name = RADIX_TREE_INIT(name, mask)
81*4882a593Smuzhiyun 
82*4882a593Smuzhiyun #define INIT_RADIX_TREE(root, mask) xa_init_flags(root, mask)
83*4882a593Smuzhiyun 
radix_tree_empty(const struct radix_tree_root * root)84*4882a593Smuzhiyun static inline bool radix_tree_empty(const struct radix_tree_root *root)
85*4882a593Smuzhiyun {
86*4882a593Smuzhiyun 	return root->xa_head == NULL;
87*4882a593Smuzhiyun }
88*4882a593Smuzhiyun 
89*4882a593Smuzhiyun /**
90*4882a593Smuzhiyun  * struct radix_tree_iter - radix tree iterator state
91*4882a593Smuzhiyun  *
92*4882a593Smuzhiyun  * @index:	index of current slot
93*4882a593Smuzhiyun  * @next_index:	one beyond the last index for this chunk
94*4882a593Smuzhiyun  * @tags:	bit-mask for tag-iterating
95*4882a593Smuzhiyun  * @node:	node that contains current slot
96*4882a593Smuzhiyun  *
97*4882a593Smuzhiyun  * This radix tree iterator works in terms of "chunks" of slots.  A chunk is a
98*4882a593Smuzhiyun  * subinterval of slots contained within one radix tree leaf node.  It is
99*4882a593Smuzhiyun  * described by a pointer to its first slot and a struct radix_tree_iter
100*4882a593Smuzhiyun  * which holds the chunk's position in the tree and its size.  For tagged
101*4882a593Smuzhiyun  * iteration radix_tree_iter also holds the slots' bit-mask for one chosen
102*4882a593Smuzhiyun  * radix tree tag.
103*4882a593Smuzhiyun  */
104*4882a593Smuzhiyun struct radix_tree_iter {
105*4882a593Smuzhiyun 	unsigned long	index;
106*4882a593Smuzhiyun 	unsigned long	next_index;
107*4882a593Smuzhiyun 	unsigned long	tags;
108*4882a593Smuzhiyun 	struct radix_tree_node *node;
109*4882a593Smuzhiyun };
110*4882a593Smuzhiyun 
111*4882a593Smuzhiyun /**
112*4882a593Smuzhiyun  * Radix-tree synchronization
113*4882a593Smuzhiyun  *
114*4882a593Smuzhiyun  * The radix-tree API requires that users provide all synchronisation (with
115*4882a593Smuzhiyun  * specific exceptions, noted below).
116*4882a593Smuzhiyun  *
117*4882a593Smuzhiyun  * Synchronization of access to the data items being stored in the tree, and
118*4882a593Smuzhiyun  * management of their lifetimes must be completely managed by API users.
119*4882a593Smuzhiyun  *
120*4882a593Smuzhiyun  * For API usage, in general,
121*4882a593Smuzhiyun  * - any function _modifying_ the tree or tags (inserting or deleting
122*4882a593Smuzhiyun  *   items, setting or clearing tags) must exclude other modifications, and
123*4882a593Smuzhiyun  *   exclude any functions reading the tree.
124*4882a593Smuzhiyun  * - any function _reading_ the tree or tags (looking up items or tags,
125*4882a593Smuzhiyun  *   gang lookups) must exclude modifications to the tree, but may occur
126*4882a593Smuzhiyun  *   concurrently with other readers.
127*4882a593Smuzhiyun  *
128*4882a593Smuzhiyun  * The notable exceptions to this rule are the following functions:
129*4882a593Smuzhiyun  * __radix_tree_lookup
130*4882a593Smuzhiyun  * radix_tree_lookup
131*4882a593Smuzhiyun  * radix_tree_lookup_slot
132*4882a593Smuzhiyun  * radix_tree_tag_get
133*4882a593Smuzhiyun  * radix_tree_gang_lookup
134*4882a593Smuzhiyun  * radix_tree_gang_lookup_tag
135*4882a593Smuzhiyun  * radix_tree_gang_lookup_tag_slot
136*4882a593Smuzhiyun  * radix_tree_tagged
137*4882a593Smuzhiyun  *
138*4882a593Smuzhiyun  * The first 7 functions are able to be called locklessly, using RCU. The
139*4882a593Smuzhiyun  * caller must ensure calls to these functions are made within rcu_read_lock()
140*4882a593Smuzhiyun  * regions. Other readers (lock-free or otherwise) and modifications may be
141*4882a593Smuzhiyun  * running concurrently.
142*4882a593Smuzhiyun  *
143*4882a593Smuzhiyun  * It is still required that the caller manage the synchronization and lifetimes
144*4882a593Smuzhiyun  * of the items. So if RCU lock-free lookups are used, typically this would mean
145*4882a593Smuzhiyun  * that the items have their own locks, or are amenable to lock-free access; and
146*4882a593Smuzhiyun  * that the items are freed by RCU (or only freed after having been deleted from
147*4882a593Smuzhiyun  * the radix tree *and* a synchronize_rcu() grace period).
148*4882a593Smuzhiyun  *
149*4882a593Smuzhiyun  * (Note, rcu_assign_pointer and rcu_dereference are not needed to control
150*4882a593Smuzhiyun  * access to data items when inserting into or looking up from the radix tree)
151*4882a593Smuzhiyun  *
152*4882a593Smuzhiyun  * Note that the value returned by radix_tree_tag_get() may not be relied upon
153*4882a593Smuzhiyun  * if only the RCU read lock is held.  Functions to set/clear tags and to
154*4882a593Smuzhiyun  * delete nodes running concurrently with it may affect its result such that
155*4882a593Smuzhiyun  * two consecutive reads in the same locked section may return different
156*4882a593Smuzhiyun  * values.  If reliability is required, modification functions must also be
157*4882a593Smuzhiyun  * excluded from concurrency.
158*4882a593Smuzhiyun  *
159*4882a593Smuzhiyun  * radix_tree_tagged is able to be called without locking or RCU.
160*4882a593Smuzhiyun  */
161*4882a593Smuzhiyun 
162*4882a593Smuzhiyun /**
163*4882a593Smuzhiyun  * radix_tree_deref_slot - dereference a slot
164*4882a593Smuzhiyun  * @slot: slot pointer, returned by radix_tree_lookup_slot
165*4882a593Smuzhiyun  *
166*4882a593Smuzhiyun  * For use with radix_tree_lookup_slot().  Caller must hold tree at least read
167*4882a593Smuzhiyun  * locked across slot lookup and dereference. Not required if write lock is
168*4882a593Smuzhiyun  * held (ie. items cannot be concurrently inserted).
169*4882a593Smuzhiyun  *
170*4882a593Smuzhiyun  * radix_tree_deref_retry must be used to confirm validity of the pointer if
171*4882a593Smuzhiyun  * only the read lock is held.
172*4882a593Smuzhiyun  *
173*4882a593Smuzhiyun  * Return: entry stored in that slot.
174*4882a593Smuzhiyun  */
radix_tree_deref_slot(void __rcu ** slot)175*4882a593Smuzhiyun static inline void *radix_tree_deref_slot(void __rcu **slot)
176*4882a593Smuzhiyun {
177*4882a593Smuzhiyun 	return rcu_dereference(*slot);
178*4882a593Smuzhiyun }
179*4882a593Smuzhiyun 
180*4882a593Smuzhiyun /**
181*4882a593Smuzhiyun  * radix_tree_deref_slot_protected - dereference a slot with tree lock held
182*4882a593Smuzhiyun  * @slot: slot pointer, returned by radix_tree_lookup_slot
183*4882a593Smuzhiyun  *
184*4882a593Smuzhiyun  * Similar to radix_tree_deref_slot.  The caller does not hold the RCU read
185*4882a593Smuzhiyun  * lock but it must hold the tree lock to prevent parallel updates.
186*4882a593Smuzhiyun  *
187*4882a593Smuzhiyun  * Return: entry stored in that slot.
188*4882a593Smuzhiyun  */
radix_tree_deref_slot_protected(void __rcu ** slot,spinlock_t * treelock)189*4882a593Smuzhiyun static inline void *radix_tree_deref_slot_protected(void __rcu **slot,
190*4882a593Smuzhiyun 							spinlock_t *treelock)
191*4882a593Smuzhiyun {
192*4882a593Smuzhiyun 	return rcu_dereference_protected(*slot, lockdep_is_held(treelock));
193*4882a593Smuzhiyun }
194*4882a593Smuzhiyun 
195*4882a593Smuzhiyun /**
196*4882a593Smuzhiyun  * radix_tree_deref_retry	- check radix_tree_deref_slot
197*4882a593Smuzhiyun  * @arg:	pointer returned by radix_tree_deref_slot
198*4882a593Smuzhiyun  * Returns:	0 if retry is not required, otherwise retry is required
199*4882a593Smuzhiyun  *
200*4882a593Smuzhiyun  * radix_tree_deref_retry must be used with radix_tree_deref_slot.
201*4882a593Smuzhiyun  */
radix_tree_deref_retry(void * arg)202*4882a593Smuzhiyun static inline int radix_tree_deref_retry(void *arg)
203*4882a593Smuzhiyun {
204*4882a593Smuzhiyun 	return unlikely(radix_tree_is_internal_node(arg));
205*4882a593Smuzhiyun }
206*4882a593Smuzhiyun 
207*4882a593Smuzhiyun /**
208*4882a593Smuzhiyun  * radix_tree_exception	- radix_tree_deref_slot returned either exception?
209*4882a593Smuzhiyun  * @arg:	value returned by radix_tree_deref_slot
210*4882a593Smuzhiyun  * Returns:	0 if well-aligned pointer, non-0 if either kind of exception.
211*4882a593Smuzhiyun  */
radix_tree_exception(void * arg)212*4882a593Smuzhiyun static inline int radix_tree_exception(void *arg)
213*4882a593Smuzhiyun {
214*4882a593Smuzhiyun 	return unlikely((unsigned long)arg & RADIX_TREE_ENTRY_MASK);
215*4882a593Smuzhiyun }
216*4882a593Smuzhiyun 
217*4882a593Smuzhiyun int radix_tree_insert(struct radix_tree_root *, unsigned long index,
218*4882a593Smuzhiyun 			void *);
219*4882a593Smuzhiyun void *__radix_tree_lookup(const struct radix_tree_root *, unsigned long index,
220*4882a593Smuzhiyun 			  struct radix_tree_node **nodep, void __rcu ***slotp);
221*4882a593Smuzhiyun void *radix_tree_lookup(const struct radix_tree_root *, unsigned long);
222*4882a593Smuzhiyun void __rcu **radix_tree_lookup_slot(const struct radix_tree_root *,
223*4882a593Smuzhiyun 					unsigned long index);
224*4882a593Smuzhiyun void __radix_tree_replace(struct radix_tree_root *, struct radix_tree_node *,
225*4882a593Smuzhiyun 			  void __rcu **slot, void *entry);
226*4882a593Smuzhiyun void radix_tree_iter_replace(struct radix_tree_root *,
227*4882a593Smuzhiyun 		const struct radix_tree_iter *, void __rcu **slot, void *entry);
228*4882a593Smuzhiyun void radix_tree_replace_slot(struct radix_tree_root *,
229*4882a593Smuzhiyun 			     void __rcu **slot, void *entry);
230*4882a593Smuzhiyun void radix_tree_iter_delete(struct radix_tree_root *,
231*4882a593Smuzhiyun 			struct radix_tree_iter *iter, void __rcu **slot);
232*4882a593Smuzhiyun void *radix_tree_delete_item(struct radix_tree_root *, unsigned long, void *);
233*4882a593Smuzhiyun void *radix_tree_delete(struct radix_tree_root *, unsigned long);
234*4882a593Smuzhiyun unsigned int radix_tree_gang_lookup(const struct radix_tree_root *,
235*4882a593Smuzhiyun 			void **results, unsigned long first_index,
236*4882a593Smuzhiyun 			unsigned int max_items);
237*4882a593Smuzhiyun int radix_tree_preload(gfp_t gfp_mask);
238*4882a593Smuzhiyun int radix_tree_maybe_preload(gfp_t gfp_mask);
239*4882a593Smuzhiyun void radix_tree_init(void);
240*4882a593Smuzhiyun void *radix_tree_tag_set(struct radix_tree_root *,
241*4882a593Smuzhiyun 			unsigned long index, unsigned int tag);
242*4882a593Smuzhiyun void *radix_tree_tag_clear(struct radix_tree_root *,
243*4882a593Smuzhiyun 			unsigned long index, unsigned int tag);
244*4882a593Smuzhiyun int radix_tree_tag_get(const struct radix_tree_root *,
245*4882a593Smuzhiyun 			unsigned long index, unsigned int tag);
246*4882a593Smuzhiyun void radix_tree_iter_tag_clear(struct radix_tree_root *,
247*4882a593Smuzhiyun 		const struct radix_tree_iter *iter, unsigned int tag);
248*4882a593Smuzhiyun unsigned int radix_tree_gang_lookup_tag(const struct radix_tree_root *,
249*4882a593Smuzhiyun 		void **results, unsigned long first_index,
250*4882a593Smuzhiyun 		unsigned int max_items, unsigned int tag);
251*4882a593Smuzhiyun unsigned int radix_tree_gang_lookup_tag_slot(const struct radix_tree_root *,
252*4882a593Smuzhiyun 		void __rcu ***results, unsigned long first_index,
253*4882a593Smuzhiyun 		unsigned int max_items, unsigned int tag);
254*4882a593Smuzhiyun int radix_tree_tagged(const struct radix_tree_root *, unsigned int tag);
255*4882a593Smuzhiyun 
radix_tree_preload_end(void)256*4882a593Smuzhiyun static inline void radix_tree_preload_end(void)
257*4882a593Smuzhiyun {
258*4882a593Smuzhiyun 	local_unlock(&radix_tree_preloads.lock);
259*4882a593Smuzhiyun }
260*4882a593Smuzhiyun 
261*4882a593Smuzhiyun void __rcu **idr_get_free(struct radix_tree_root *root,
262*4882a593Smuzhiyun 			      struct radix_tree_iter *iter, gfp_t gfp,
263*4882a593Smuzhiyun 			      unsigned long max);
264*4882a593Smuzhiyun 
265*4882a593Smuzhiyun enum {
266*4882a593Smuzhiyun 	RADIX_TREE_ITER_TAG_MASK = 0x0f,	/* tag index in lower nybble */
267*4882a593Smuzhiyun 	RADIX_TREE_ITER_TAGGED   = 0x10,	/* lookup tagged slots */
268*4882a593Smuzhiyun 	RADIX_TREE_ITER_CONTIG   = 0x20,	/* stop at first hole */
269*4882a593Smuzhiyun };
270*4882a593Smuzhiyun 
271*4882a593Smuzhiyun /**
272*4882a593Smuzhiyun  * radix_tree_iter_init - initialize radix tree iterator
273*4882a593Smuzhiyun  *
274*4882a593Smuzhiyun  * @iter:	pointer to iterator state
275*4882a593Smuzhiyun  * @start:	iteration starting index
276*4882a593Smuzhiyun  * Returns:	NULL
277*4882a593Smuzhiyun  */
278*4882a593Smuzhiyun static __always_inline void __rcu **
radix_tree_iter_init(struct radix_tree_iter * iter,unsigned long start)279*4882a593Smuzhiyun radix_tree_iter_init(struct radix_tree_iter *iter, unsigned long start)
280*4882a593Smuzhiyun {
281*4882a593Smuzhiyun 	/*
282*4882a593Smuzhiyun 	 * Leave iter->tags uninitialized. radix_tree_next_chunk() will fill it
283*4882a593Smuzhiyun 	 * in the case of a successful tagged chunk lookup.  If the lookup was
284*4882a593Smuzhiyun 	 * unsuccessful or non-tagged then nobody cares about ->tags.
285*4882a593Smuzhiyun 	 *
286*4882a593Smuzhiyun 	 * Set index to zero to bypass next_index overflow protection.
287*4882a593Smuzhiyun 	 * See the comment in radix_tree_next_chunk() for details.
288*4882a593Smuzhiyun 	 */
289*4882a593Smuzhiyun 	iter->index = 0;
290*4882a593Smuzhiyun 	iter->next_index = start;
291*4882a593Smuzhiyun 	return NULL;
292*4882a593Smuzhiyun }
293*4882a593Smuzhiyun 
294*4882a593Smuzhiyun /**
295*4882a593Smuzhiyun  * radix_tree_next_chunk - find next chunk of slots for iteration
296*4882a593Smuzhiyun  *
297*4882a593Smuzhiyun  * @root:	radix tree root
298*4882a593Smuzhiyun  * @iter:	iterator state
299*4882a593Smuzhiyun  * @flags:	RADIX_TREE_ITER_* flags and tag index
300*4882a593Smuzhiyun  * Returns:	pointer to chunk first slot, or NULL if there no more left
301*4882a593Smuzhiyun  *
302*4882a593Smuzhiyun  * This function looks up the next chunk in the radix tree starting from
303*4882a593Smuzhiyun  * @iter->next_index.  It returns a pointer to the chunk's first slot.
304*4882a593Smuzhiyun  * Also it fills @iter with data about chunk: position in the tree (index),
305*4882a593Smuzhiyun  * its end (next_index), and constructs a bit mask for tagged iterating (tags).
306*4882a593Smuzhiyun  */
307*4882a593Smuzhiyun void __rcu **radix_tree_next_chunk(const struct radix_tree_root *,
308*4882a593Smuzhiyun 			     struct radix_tree_iter *iter, unsigned flags);
309*4882a593Smuzhiyun 
310*4882a593Smuzhiyun /**
311*4882a593Smuzhiyun  * radix_tree_iter_lookup - look up an index in the radix tree
312*4882a593Smuzhiyun  * @root: radix tree root
313*4882a593Smuzhiyun  * @iter: iterator state
314*4882a593Smuzhiyun  * @index: key to look up
315*4882a593Smuzhiyun  *
316*4882a593Smuzhiyun  * If @index is present in the radix tree, this function returns the slot
317*4882a593Smuzhiyun  * containing it and updates @iter to describe the entry.  If @index is not
318*4882a593Smuzhiyun  * present, it returns NULL.
319*4882a593Smuzhiyun  */
320*4882a593Smuzhiyun static inline void __rcu **
radix_tree_iter_lookup(const struct radix_tree_root * root,struct radix_tree_iter * iter,unsigned long index)321*4882a593Smuzhiyun radix_tree_iter_lookup(const struct radix_tree_root *root,
322*4882a593Smuzhiyun 			struct radix_tree_iter *iter, unsigned long index)
323*4882a593Smuzhiyun {
324*4882a593Smuzhiyun 	radix_tree_iter_init(iter, index);
325*4882a593Smuzhiyun 	return radix_tree_next_chunk(root, iter, RADIX_TREE_ITER_CONTIG);
326*4882a593Smuzhiyun }
327*4882a593Smuzhiyun 
328*4882a593Smuzhiyun /**
329*4882a593Smuzhiyun  * radix_tree_iter_retry - retry this chunk of the iteration
330*4882a593Smuzhiyun  * @iter:	iterator state
331*4882a593Smuzhiyun  *
332*4882a593Smuzhiyun  * If we iterate over a tree protected only by the RCU lock, a race
333*4882a593Smuzhiyun  * against deletion or creation may result in seeing a slot for which
334*4882a593Smuzhiyun  * radix_tree_deref_retry() returns true.  If so, call this function
335*4882a593Smuzhiyun  * and continue the iteration.
336*4882a593Smuzhiyun  */
337*4882a593Smuzhiyun static inline __must_check
radix_tree_iter_retry(struct radix_tree_iter * iter)338*4882a593Smuzhiyun void __rcu **radix_tree_iter_retry(struct radix_tree_iter *iter)
339*4882a593Smuzhiyun {
340*4882a593Smuzhiyun 	iter->next_index = iter->index;
341*4882a593Smuzhiyun 	iter->tags = 0;
342*4882a593Smuzhiyun 	return NULL;
343*4882a593Smuzhiyun }
344*4882a593Smuzhiyun 
345*4882a593Smuzhiyun static inline unsigned long
__radix_tree_iter_add(struct radix_tree_iter * iter,unsigned long slots)346*4882a593Smuzhiyun __radix_tree_iter_add(struct radix_tree_iter *iter, unsigned long slots)
347*4882a593Smuzhiyun {
348*4882a593Smuzhiyun 	return iter->index + slots;
349*4882a593Smuzhiyun }
350*4882a593Smuzhiyun 
351*4882a593Smuzhiyun /**
352*4882a593Smuzhiyun  * radix_tree_iter_resume - resume iterating when the chunk may be invalid
353*4882a593Smuzhiyun  * @slot: pointer to current slot
354*4882a593Smuzhiyun  * @iter: iterator state
355*4882a593Smuzhiyun  * Returns: New slot pointer
356*4882a593Smuzhiyun  *
357*4882a593Smuzhiyun  * If the iterator needs to release then reacquire a lock, the chunk may
358*4882a593Smuzhiyun  * have been invalidated by an insertion or deletion.  Call this function
359*4882a593Smuzhiyun  * before releasing the lock to continue the iteration from the next index.
360*4882a593Smuzhiyun  */
361*4882a593Smuzhiyun void __rcu **__must_check radix_tree_iter_resume(void __rcu **slot,
362*4882a593Smuzhiyun 					struct radix_tree_iter *iter);
363*4882a593Smuzhiyun 
364*4882a593Smuzhiyun /**
365*4882a593Smuzhiyun  * radix_tree_chunk_size - get current chunk size
366*4882a593Smuzhiyun  *
367*4882a593Smuzhiyun  * @iter:	pointer to radix tree iterator
368*4882a593Smuzhiyun  * Returns:	current chunk size
369*4882a593Smuzhiyun  */
370*4882a593Smuzhiyun static __always_inline long
radix_tree_chunk_size(struct radix_tree_iter * iter)371*4882a593Smuzhiyun radix_tree_chunk_size(struct radix_tree_iter *iter)
372*4882a593Smuzhiyun {
373*4882a593Smuzhiyun 	return iter->next_index - iter->index;
374*4882a593Smuzhiyun }
375*4882a593Smuzhiyun 
376*4882a593Smuzhiyun /**
377*4882a593Smuzhiyun  * radix_tree_next_slot - find next slot in chunk
378*4882a593Smuzhiyun  *
379*4882a593Smuzhiyun  * @slot:	pointer to current slot
380*4882a593Smuzhiyun  * @iter:	pointer to iterator state
381*4882a593Smuzhiyun  * @flags:	RADIX_TREE_ITER_*, should be constant
382*4882a593Smuzhiyun  * Returns:	pointer to next slot, or NULL if there no more left
383*4882a593Smuzhiyun  *
384*4882a593Smuzhiyun  * This function updates @iter->index in the case of a successful lookup.
385*4882a593Smuzhiyun  * For tagged lookup it also eats @iter->tags.
386*4882a593Smuzhiyun  *
387*4882a593Smuzhiyun  * There are several cases where 'slot' can be passed in as NULL to this
388*4882a593Smuzhiyun  * function.  These cases result from the use of radix_tree_iter_resume() or
389*4882a593Smuzhiyun  * radix_tree_iter_retry().  In these cases we don't end up dereferencing
390*4882a593Smuzhiyun  * 'slot' because either:
391*4882a593Smuzhiyun  * a) we are doing tagged iteration and iter->tags has been set to 0, or
392*4882a593Smuzhiyun  * b) we are doing non-tagged iteration, and iter->index and iter->next_index
393*4882a593Smuzhiyun  *    have been set up so that radix_tree_chunk_size() returns 1 or 0.
394*4882a593Smuzhiyun  */
radix_tree_next_slot(void __rcu ** slot,struct radix_tree_iter * iter,unsigned flags)395*4882a593Smuzhiyun static __always_inline void __rcu **radix_tree_next_slot(void __rcu **slot,
396*4882a593Smuzhiyun 				struct radix_tree_iter *iter, unsigned flags)
397*4882a593Smuzhiyun {
398*4882a593Smuzhiyun 	if (flags & RADIX_TREE_ITER_TAGGED) {
399*4882a593Smuzhiyun 		iter->tags >>= 1;
400*4882a593Smuzhiyun 		if (unlikely(!iter->tags))
401*4882a593Smuzhiyun 			return NULL;
402*4882a593Smuzhiyun 		if (likely(iter->tags & 1ul)) {
403*4882a593Smuzhiyun 			iter->index = __radix_tree_iter_add(iter, 1);
404*4882a593Smuzhiyun 			slot++;
405*4882a593Smuzhiyun 			goto found;
406*4882a593Smuzhiyun 		}
407*4882a593Smuzhiyun 		if (!(flags & RADIX_TREE_ITER_CONTIG)) {
408*4882a593Smuzhiyun 			unsigned offset = __ffs(iter->tags);
409*4882a593Smuzhiyun 
410*4882a593Smuzhiyun 			iter->tags >>= offset++;
411*4882a593Smuzhiyun 			iter->index = __radix_tree_iter_add(iter, offset);
412*4882a593Smuzhiyun 			slot += offset;
413*4882a593Smuzhiyun 			goto found;
414*4882a593Smuzhiyun 		}
415*4882a593Smuzhiyun 	} else {
416*4882a593Smuzhiyun 		long count = radix_tree_chunk_size(iter);
417*4882a593Smuzhiyun 
418*4882a593Smuzhiyun 		while (--count > 0) {
419*4882a593Smuzhiyun 			slot++;
420*4882a593Smuzhiyun 			iter->index = __radix_tree_iter_add(iter, 1);
421*4882a593Smuzhiyun 
422*4882a593Smuzhiyun 			if (likely(*slot))
423*4882a593Smuzhiyun 				goto found;
424*4882a593Smuzhiyun 			if (flags & RADIX_TREE_ITER_CONTIG) {
425*4882a593Smuzhiyun 				/* forbid switching to the next chunk */
426*4882a593Smuzhiyun 				iter->next_index = 0;
427*4882a593Smuzhiyun 				break;
428*4882a593Smuzhiyun 			}
429*4882a593Smuzhiyun 		}
430*4882a593Smuzhiyun 	}
431*4882a593Smuzhiyun 	return NULL;
432*4882a593Smuzhiyun 
433*4882a593Smuzhiyun  found:
434*4882a593Smuzhiyun 	return slot;
435*4882a593Smuzhiyun }
436*4882a593Smuzhiyun 
437*4882a593Smuzhiyun /**
438*4882a593Smuzhiyun  * radix_tree_for_each_slot - iterate over non-empty slots
439*4882a593Smuzhiyun  *
440*4882a593Smuzhiyun  * @slot:	the void** variable for pointer to slot
441*4882a593Smuzhiyun  * @root:	the struct radix_tree_root pointer
442*4882a593Smuzhiyun  * @iter:	the struct radix_tree_iter pointer
443*4882a593Smuzhiyun  * @start:	iteration starting index
444*4882a593Smuzhiyun  *
445*4882a593Smuzhiyun  * @slot points to radix tree slot, @iter->index contains its index.
446*4882a593Smuzhiyun  */
447*4882a593Smuzhiyun #define radix_tree_for_each_slot(slot, root, iter, start)		\
448*4882a593Smuzhiyun 	for (slot = radix_tree_iter_init(iter, start) ;			\
449*4882a593Smuzhiyun 	     slot || (slot = radix_tree_next_chunk(root, iter, 0)) ;	\
450*4882a593Smuzhiyun 	     slot = radix_tree_next_slot(slot, iter, 0))
451*4882a593Smuzhiyun 
452*4882a593Smuzhiyun /**
453*4882a593Smuzhiyun  * radix_tree_for_each_tagged - iterate over tagged slots
454*4882a593Smuzhiyun  *
455*4882a593Smuzhiyun  * @slot:	the void** variable for pointer to slot
456*4882a593Smuzhiyun  * @root:	the struct radix_tree_root pointer
457*4882a593Smuzhiyun  * @iter:	the struct radix_tree_iter pointer
458*4882a593Smuzhiyun  * @start:	iteration starting index
459*4882a593Smuzhiyun  * @tag:	tag index
460*4882a593Smuzhiyun  *
461*4882a593Smuzhiyun  * @slot points to radix tree slot, @iter->index contains its index.
462*4882a593Smuzhiyun  */
463*4882a593Smuzhiyun #define radix_tree_for_each_tagged(slot, root, iter, start, tag)	\
464*4882a593Smuzhiyun 	for (slot = radix_tree_iter_init(iter, start) ;			\
465*4882a593Smuzhiyun 	     slot || (slot = radix_tree_next_chunk(root, iter,		\
466*4882a593Smuzhiyun 			      RADIX_TREE_ITER_TAGGED | tag)) ;		\
467*4882a593Smuzhiyun 	     slot = radix_tree_next_slot(slot, iter,			\
468*4882a593Smuzhiyun 				RADIX_TREE_ITER_TAGGED | tag))
469*4882a593Smuzhiyun 
470*4882a593Smuzhiyun #endif /* _LINUX_RADIX_TREE_H */
471