Merge tag 'xtensa-20230928' of https://github.com/jcmvbkbc/linux-xtensa
[platform/kernel/linux-rpi.git] / lib / generic-radix-tree.c
1
2 #include <linux/export.h>
3 #include <linux/generic-radix-tree.h>
4 #include <linux/gfp.h>
5 #include <linux/kmemleak.h>
6
7 #define GENRADIX_ARY            (PAGE_SIZE / sizeof(struct genradix_node *))
8 #define GENRADIX_ARY_SHIFT      ilog2(GENRADIX_ARY)
9
10 struct genradix_node {
11         union {
12                 /* Interior node: */
13                 struct genradix_node    *children[GENRADIX_ARY];
14
15                 /* Leaf: */
16                 u8                      data[PAGE_SIZE];
17         };
18 };
19
20 static inline int genradix_depth_shift(unsigned depth)
21 {
22         return PAGE_SHIFT + GENRADIX_ARY_SHIFT * depth;
23 }
24
25 /*
26  * Returns size (of data, in bytes) that a tree of a given depth holds:
27  */
28 static inline size_t genradix_depth_size(unsigned depth)
29 {
30         return 1UL << genradix_depth_shift(depth);
31 }
32
33 /* depth that's needed for a genradix that can address up to ULONG_MAX: */
34 #define GENRADIX_MAX_DEPTH      \
35         DIV_ROUND_UP(BITS_PER_LONG - PAGE_SHIFT, GENRADIX_ARY_SHIFT)
36
37 #define GENRADIX_DEPTH_MASK                             \
38         ((unsigned long) (roundup_pow_of_two(GENRADIX_MAX_DEPTH + 1) - 1))
39
40 static inline unsigned genradix_root_to_depth(struct genradix_root *r)
41 {
42         return (unsigned long) r & GENRADIX_DEPTH_MASK;
43 }
44
45 static inline struct genradix_node *genradix_root_to_node(struct genradix_root *r)
46 {
47         return (void *) ((unsigned long) r & ~GENRADIX_DEPTH_MASK);
48 }
49
50 /*
51  * Returns pointer to the specified byte @offset within @radix, or NULL if not
52  * allocated
53  */
54 void *__genradix_ptr(struct __genradix *radix, size_t offset)
55 {
56         struct genradix_root *r = READ_ONCE(radix->root);
57         struct genradix_node *n = genradix_root_to_node(r);
58         unsigned level          = genradix_root_to_depth(r);
59
60         if (ilog2(offset) >= genradix_depth_shift(level))
61                 return NULL;
62
63         while (1) {
64                 if (!n)
65                         return NULL;
66                 if (!level)
67                         break;
68
69                 level--;
70
71                 n = n->children[offset >> genradix_depth_shift(level)];
72                 offset &= genradix_depth_size(level) - 1;
73         }
74
75         return &n->data[offset];
76 }
77 EXPORT_SYMBOL(__genradix_ptr);
78
79 static inline struct genradix_node *genradix_alloc_node(gfp_t gfp_mask)
80 {
81         struct genradix_node *node;
82
83         node = (struct genradix_node *)__get_free_page(gfp_mask|__GFP_ZERO);
84
85         /*
86          * We're using pages (not slab allocations) directly for kernel data
87          * structures, so we need to explicitly inform kmemleak of them in order
88          * to avoid false positive memory leak reports.
89          */
90         kmemleak_alloc(node, PAGE_SIZE, 1, gfp_mask);
91         return node;
92 }
93
94 static inline void genradix_free_node(struct genradix_node *node)
95 {
96         kmemleak_free(node);
97         free_page((unsigned long)node);
98 }
99
100 /*
101  * Returns pointer to the specified byte @offset within @radix, allocating it if
102  * necessary - newly allocated slots are always zeroed out:
103  */
104 void *__genradix_ptr_alloc(struct __genradix *radix, size_t offset,
105                            gfp_t gfp_mask)
106 {
107         struct genradix_root *v = READ_ONCE(radix->root);
108         struct genradix_node *n, *new_node = NULL;
109         unsigned level;
110
111         /* Increase tree depth if necessary: */
112         while (1) {
113                 struct genradix_root *r = v, *new_root;
114
115                 n       = genradix_root_to_node(r);
116                 level   = genradix_root_to_depth(r);
117
118                 if (n && ilog2(offset) < genradix_depth_shift(level))
119                         break;
120
121                 if (!new_node) {
122                         new_node = genradix_alloc_node(gfp_mask);
123                         if (!new_node)
124                                 return NULL;
125                 }
126
127                 new_node->children[0] = n;
128                 new_root = ((struct genradix_root *)
129                             ((unsigned long) new_node | (n ? level + 1 : 0)));
130
131                 if ((v = cmpxchg_release(&radix->root, r, new_root)) == r) {
132                         v = new_root;
133                         new_node = NULL;
134                 }
135         }
136
137         while (level--) {
138                 struct genradix_node **p =
139                         &n->children[offset >> genradix_depth_shift(level)];
140                 offset &= genradix_depth_size(level) - 1;
141
142                 n = READ_ONCE(*p);
143                 if (!n) {
144                         if (!new_node) {
145                                 new_node = genradix_alloc_node(gfp_mask);
146                                 if (!new_node)
147                                         return NULL;
148                         }
149
150                         if (!(n = cmpxchg_release(p, NULL, new_node)))
151                                 swap(n, new_node);
152                 }
153         }
154
155         if (new_node)
156                 genradix_free_node(new_node);
157
158         return &n->data[offset];
159 }
160 EXPORT_SYMBOL(__genradix_ptr_alloc);
161
162 void *__genradix_iter_peek(struct genradix_iter *iter,
163                            struct __genradix *radix,
164                            size_t objs_per_page)
165 {
166         struct genradix_root *r;
167         struct genradix_node *n;
168         unsigned level, i;
169 restart:
170         r = READ_ONCE(radix->root);
171         if (!r)
172                 return NULL;
173
174         n       = genradix_root_to_node(r);
175         level   = genradix_root_to_depth(r);
176
177         if (ilog2(iter->offset) >= genradix_depth_shift(level))
178                 return NULL;
179
180         while (level) {
181                 level--;
182
183                 i = (iter->offset >> genradix_depth_shift(level)) &
184                         (GENRADIX_ARY - 1);
185
186                 while (!n->children[i]) {
187                         i++;
188                         iter->offset = round_down(iter->offset +
189                                            genradix_depth_size(level),
190                                            genradix_depth_size(level));
191                         iter->pos = (iter->offset >> PAGE_SHIFT) *
192                                 objs_per_page;
193                         if (i == GENRADIX_ARY)
194                                 goto restart;
195                 }
196
197                 n = n->children[i];
198         }
199
200         return &n->data[iter->offset & (PAGE_SIZE - 1)];
201 }
202 EXPORT_SYMBOL(__genradix_iter_peek);
203
204 static void genradix_free_recurse(struct genradix_node *n, unsigned level)
205 {
206         if (level) {
207                 unsigned i;
208
209                 for (i = 0; i < GENRADIX_ARY; i++)
210                         if (n->children[i])
211                                 genradix_free_recurse(n->children[i], level - 1);
212         }
213
214         genradix_free_node(n);
215 }
216
217 int __genradix_prealloc(struct __genradix *radix, size_t size,
218                         gfp_t gfp_mask)
219 {
220         size_t offset;
221
222         for (offset = 0; offset < size; offset += PAGE_SIZE)
223                 if (!__genradix_ptr_alloc(radix, offset, gfp_mask))
224                         return -ENOMEM;
225
226         return 0;
227 }
228 EXPORT_SYMBOL(__genradix_prealloc);
229
230 void __genradix_free(struct __genradix *radix)
231 {
232         struct genradix_root *r = xchg(&radix->root, NULL);
233
234         genradix_free_recurse(genradix_root_to_node(r),
235                               genradix_root_to_depth(r));
236 }
237 EXPORT_SYMBOL(__genradix_free);