1*4882a593Smuzhiyun
2*4882a593Smuzhiyun #include <linux/export.h>
3*4882a593Smuzhiyun #include <linux/generic-radix-tree.h>
4*4882a593Smuzhiyun #include <linux/gfp.h>
5*4882a593Smuzhiyun #include <linux/kmemleak.h>
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun #define GENRADIX_ARY (PAGE_SIZE / sizeof(struct genradix_node *))
8*4882a593Smuzhiyun #define GENRADIX_ARY_SHIFT ilog2(GENRADIX_ARY)
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun struct genradix_node {
11*4882a593Smuzhiyun union {
12*4882a593Smuzhiyun /* Interior node: */
13*4882a593Smuzhiyun struct genradix_node *children[GENRADIX_ARY];
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun /* Leaf: */
16*4882a593Smuzhiyun u8 data[PAGE_SIZE];
17*4882a593Smuzhiyun };
18*4882a593Smuzhiyun };
19*4882a593Smuzhiyun
genradix_depth_shift(unsigned depth)20*4882a593Smuzhiyun static inline int genradix_depth_shift(unsigned depth)
21*4882a593Smuzhiyun {
22*4882a593Smuzhiyun return PAGE_SHIFT + GENRADIX_ARY_SHIFT * depth;
23*4882a593Smuzhiyun }
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun /*
26*4882a593Smuzhiyun * Returns size (of data, in bytes) that a tree of a given depth holds:
27*4882a593Smuzhiyun */
genradix_depth_size(unsigned depth)28*4882a593Smuzhiyun static inline size_t genradix_depth_size(unsigned depth)
29*4882a593Smuzhiyun {
30*4882a593Smuzhiyun return 1UL << genradix_depth_shift(depth);
31*4882a593Smuzhiyun }
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun /* depth that's needed for a genradix that can address up to ULONG_MAX: */
34*4882a593Smuzhiyun #define GENRADIX_MAX_DEPTH \
35*4882a593Smuzhiyun DIV_ROUND_UP(BITS_PER_LONG - PAGE_SHIFT, GENRADIX_ARY_SHIFT)
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun #define GENRADIX_DEPTH_MASK \
38*4882a593Smuzhiyun ((unsigned long) (roundup_pow_of_two(GENRADIX_MAX_DEPTH + 1) - 1))
39*4882a593Smuzhiyun
genradix_root_to_depth(struct genradix_root * r)40*4882a593Smuzhiyun static inline unsigned genradix_root_to_depth(struct genradix_root *r)
41*4882a593Smuzhiyun {
42*4882a593Smuzhiyun return (unsigned long) r & GENRADIX_DEPTH_MASK;
43*4882a593Smuzhiyun }
44*4882a593Smuzhiyun
genradix_root_to_node(struct genradix_root * r)45*4882a593Smuzhiyun static inline struct genradix_node *genradix_root_to_node(struct genradix_root *r)
46*4882a593Smuzhiyun {
47*4882a593Smuzhiyun return (void *) ((unsigned long) r & ~GENRADIX_DEPTH_MASK);
48*4882a593Smuzhiyun }
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun /*
51*4882a593Smuzhiyun * Returns pointer to the specified byte @offset within @radix, or NULL if not
52*4882a593Smuzhiyun * allocated
53*4882a593Smuzhiyun */
__genradix_ptr(struct __genradix * radix,size_t offset)54*4882a593Smuzhiyun void *__genradix_ptr(struct __genradix *radix, size_t offset)
55*4882a593Smuzhiyun {
56*4882a593Smuzhiyun struct genradix_root *r = READ_ONCE(radix->root);
57*4882a593Smuzhiyun struct genradix_node *n = genradix_root_to_node(r);
58*4882a593Smuzhiyun unsigned level = genradix_root_to_depth(r);
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun if (ilog2(offset) >= genradix_depth_shift(level))
61*4882a593Smuzhiyun return NULL;
62*4882a593Smuzhiyun
63*4882a593Smuzhiyun while (1) {
64*4882a593Smuzhiyun if (!n)
65*4882a593Smuzhiyun return NULL;
66*4882a593Smuzhiyun if (!level)
67*4882a593Smuzhiyun break;
68*4882a593Smuzhiyun
69*4882a593Smuzhiyun level--;
70*4882a593Smuzhiyun
71*4882a593Smuzhiyun n = n->children[offset >> genradix_depth_shift(level)];
72*4882a593Smuzhiyun offset &= genradix_depth_size(level) - 1;
73*4882a593Smuzhiyun }
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun return &n->data[offset];
76*4882a593Smuzhiyun }
77*4882a593Smuzhiyun EXPORT_SYMBOL(__genradix_ptr);
78*4882a593Smuzhiyun
genradix_alloc_node(gfp_t gfp_mask)79*4882a593Smuzhiyun static inline struct genradix_node *genradix_alloc_node(gfp_t gfp_mask)
80*4882a593Smuzhiyun {
81*4882a593Smuzhiyun struct genradix_node *node;
82*4882a593Smuzhiyun
83*4882a593Smuzhiyun node = (struct genradix_node *)__get_free_page(gfp_mask|__GFP_ZERO);
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun /*
86*4882a593Smuzhiyun * We're using pages (not slab allocations) directly for kernel data
87*4882a593Smuzhiyun * structures, so we need to explicitly inform kmemleak of them in order
88*4882a593Smuzhiyun * to avoid false positive memory leak reports.
89*4882a593Smuzhiyun */
90*4882a593Smuzhiyun kmemleak_alloc(node, PAGE_SIZE, 1, gfp_mask);
91*4882a593Smuzhiyun return node;
92*4882a593Smuzhiyun }
93*4882a593Smuzhiyun
genradix_free_node(struct genradix_node * node)94*4882a593Smuzhiyun static inline void genradix_free_node(struct genradix_node *node)
95*4882a593Smuzhiyun {
96*4882a593Smuzhiyun kmemleak_free(node);
97*4882a593Smuzhiyun free_page((unsigned long)node);
98*4882a593Smuzhiyun }
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun /*
101*4882a593Smuzhiyun * Returns pointer to the specified byte @offset within @radix, allocating it if
102*4882a593Smuzhiyun * necessary - newly allocated slots are always zeroed out:
103*4882a593Smuzhiyun */
__genradix_ptr_alloc(struct __genradix * radix,size_t offset,gfp_t gfp_mask)104*4882a593Smuzhiyun void *__genradix_ptr_alloc(struct __genradix *radix, size_t offset,
105*4882a593Smuzhiyun gfp_t gfp_mask)
106*4882a593Smuzhiyun {
107*4882a593Smuzhiyun struct genradix_root *v = READ_ONCE(radix->root);
108*4882a593Smuzhiyun struct genradix_node *n, *new_node = NULL;
109*4882a593Smuzhiyun unsigned level;
110*4882a593Smuzhiyun
111*4882a593Smuzhiyun /* Increase tree depth if necessary: */
112*4882a593Smuzhiyun while (1) {
113*4882a593Smuzhiyun struct genradix_root *r = v, *new_root;
114*4882a593Smuzhiyun
115*4882a593Smuzhiyun n = genradix_root_to_node(r);
116*4882a593Smuzhiyun level = genradix_root_to_depth(r);
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun if (n && ilog2(offset) < genradix_depth_shift(level))
119*4882a593Smuzhiyun break;
120*4882a593Smuzhiyun
121*4882a593Smuzhiyun if (!new_node) {
122*4882a593Smuzhiyun new_node = genradix_alloc_node(gfp_mask);
123*4882a593Smuzhiyun if (!new_node)
124*4882a593Smuzhiyun return NULL;
125*4882a593Smuzhiyun }
126*4882a593Smuzhiyun
127*4882a593Smuzhiyun new_node->children[0] = n;
128*4882a593Smuzhiyun new_root = ((struct genradix_root *)
129*4882a593Smuzhiyun ((unsigned long) new_node | (n ? level + 1 : 0)));
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun if ((v = cmpxchg_release(&radix->root, r, new_root)) == r) {
132*4882a593Smuzhiyun v = new_root;
133*4882a593Smuzhiyun new_node = NULL;
134*4882a593Smuzhiyun }
135*4882a593Smuzhiyun }
136*4882a593Smuzhiyun
137*4882a593Smuzhiyun while (level--) {
138*4882a593Smuzhiyun struct genradix_node **p =
139*4882a593Smuzhiyun &n->children[offset >> genradix_depth_shift(level)];
140*4882a593Smuzhiyun offset &= genradix_depth_size(level) - 1;
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun n = READ_ONCE(*p);
143*4882a593Smuzhiyun if (!n) {
144*4882a593Smuzhiyun if (!new_node) {
145*4882a593Smuzhiyun new_node = genradix_alloc_node(gfp_mask);
146*4882a593Smuzhiyun if (!new_node)
147*4882a593Smuzhiyun return NULL;
148*4882a593Smuzhiyun }
149*4882a593Smuzhiyun
150*4882a593Smuzhiyun if (!(n = cmpxchg_release(p, NULL, new_node)))
151*4882a593Smuzhiyun swap(n, new_node);
152*4882a593Smuzhiyun }
153*4882a593Smuzhiyun }
154*4882a593Smuzhiyun
155*4882a593Smuzhiyun if (new_node)
156*4882a593Smuzhiyun genradix_free_node(new_node);
157*4882a593Smuzhiyun
158*4882a593Smuzhiyun return &n->data[offset];
159*4882a593Smuzhiyun }
160*4882a593Smuzhiyun EXPORT_SYMBOL(__genradix_ptr_alloc);
161*4882a593Smuzhiyun
__genradix_iter_peek(struct genradix_iter * iter,struct __genradix * radix,size_t objs_per_page)162*4882a593Smuzhiyun void *__genradix_iter_peek(struct genradix_iter *iter,
163*4882a593Smuzhiyun struct __genradix *radix,
164*4882a593Smuzhiyun size_t objs_per_page)
165*4882a593Smuzhiyun {
166*4882a593Smuzhiyun struct genradix_root *r;
167*4882a593Smuzhiyun struct genradix_node *n;
168*4882a593Smuzhiyun unsigned level, i;
169*4882a593Smuzhiyun restart:
170*4882a593Smuzhiyun r = READ_ONCE(radix->root);
171*4882a593Smuzhiyun if (!r)
172*4882a593Smuzhiyun return NULL;
173*4882a593Smuzhiyun
174*4882a593Smuzhiyun n = genradix_root_to_node(r);
175*4882a593Smuzhiyun level = genradix_root_to_depth(r);
176*4882a593Smuzhiyun
177*4882a593Smuzhiyun if (ilog2(iter->offset) >= genradix_depth_shift(level))
178*4882a593Smuzhiyun return NULL;
179*4882a593Smuzhiyun
180*4882a593Smuzhiyun while (level) {
181*4882a593Smuzhiyun level--;
182*4882a593Smuzhiyun
183*4882a593Smuzhiyun i = (iter->offset >> genradix_depth_shift(level)) &
184*4882a593Smuzhiyun (GENRADIX_ARY - 1);
185*4882a593Smuzhiyun
186*4882a593Smuzhiyun while (!n->children[i]) {
187*4882a593Smuzhiyun i++;
188*4882a593Smuzhiyun iter->offset = round_down(iter->offset +
189*4882a593Smuzhiyun genradix_depth_size(level),
190*4882a593Smuzhiyun genradix_depth_size(level));
191*4882a593Smuzhiyun iter->pos = (iter->offset >> PAGE_SHIFT) *
192*4882a593Smuzhiyun objs_per_page;
193*4882a593Smuzhiyun if (i == GENRADIX_ARY)
194*4882a593Smuzhiyun goto restart;
195*4882a593Smuzhiyun }
196*4882a593Smuzhiyun
197*4882a593Smuzhiyun n = n->children[i];
198*4882a593Smuzhiyun }
199*4882a593Smuzhiyun
200*4882a593Smuzhiyun return &n->data[iter->offset & (PAGE_SIZE - 1)];
201*4882a593Smuzhiyun }
202*4882a593Smuzhiyun EXPORT_SYMBOL(__genradix_iter_peek);
203*4882a593Smuzhiyun
genradix_free_recurse(struct genradix_node * n,unsigned level)204*4882a593Smuzhiyun static void genradix_free_recurse(struct genradix_node *n, unsigned level)
205*4882a593Smuzhiyun {
206*4882a593Smuzhiyun if (level) {
207*4882a593Smuzhiyun unsigned i;
208*4882a593Smuzhiyun
209*4882a593Smuzhiyun for (i = 0; i < GENRADIX_ARY; i++)
210*4882a593Smuzhiyun if (n->children[i])
211*4882a593Smuzhiyun genradix_free_recurse(n->children[i], level - 1);
212*4882a593Smuzhiyun }
213*4882a593Smuzhiyun
214*4882a593Smuzhiyun genradix_free_node(n);
215*4882a593Smuzhiyun }
216*4882a593Smuzhiyun
__genradix_prealloc(struct __genradix * radix,size_t size,gfp_t gfp_mask)217*4882a593Smuzhiyun int __genradix_prealloc(struct __genradix *radix, size_t size,
218*4882a593Smuzhiyun gfp_t gfp_mask)
219*4882a593Smuzhiyun {
220*4882a593Smuzhiyun size_t offset;
221*4882a593Smuzhiyun
222*4882a593Smuzhiyun for (offset = 0; offset < size; offset += PAGE_SIZE)
223*4882a593Smuzhiyun if (!__genradix_ptr_alloc(radix, offset, gfp_mask))
224*4882a593Smuzhiyun return -ENOMEM;
225*4882a593Smuzhiyun
226*4882a593Smuzhiyun return 0;
227*4882a593Smuzhiyun }
228*4882a593Smuzhiyun EXPORT_SYMBOL(__genradix_prealloc);
229*4882a593Smuzhiyun
__genradix_free(struct __genradix * radix)230*4882a593Smuzhiyun void __genradix_free(struct __genradix *radix)
231*4882a593Smuzhiyun {
232*4882a593Smuzhiyun struct genradix_root *r = xchg(&radix->root, NULL);
233*4882a593Smuzhiyun
234*4882a593Smuzhiyun genradix_free_recurse(genradix_root_to_node(r),
235*4882a593Smuzhiyun genradix_root_to_depth(r));
236*4882a593Smuzhiyun }
237*4882a593Smuzhiyun EXPORT_SYMBOL(__genradix_free);
238