1 #ifndef _LINUX_GENERIC_RADIX_TREE_H
2 #define _LINUX_GENERIC_RADIX_TREE_H
3
4 /**
5 * DOC: Generic radix trees/sparse arrays
6 *
7 * Very simple and minimalistic, supporting arbitrary size entries up to
8 * GENRADIX_NODE_SIZE.
9 *
10 * A genradix is defined with the type it will store, like so:
11 *
12 * static GENRADIX(struct foo) foo_genradix;
13 *
14 * The main operations are:
15 *
16 * - genradix_init(radix) - initialize an empty genradix
17 *
18 * - genradix_free(radix) - free all memory owned by the genradix and
19 * reinitialize it
20 *
21 * - genradix_ptr(radix, idx) - gets a pointer to the entry at idx, returning
22 * NULL if that entry does not exist
23 *
24 * - genradix_ptr_alloc(radix, idx, gfp) - gets a pointer to an entry,
25 * allocating it if necessary
26 *
27 * - genradix_for_each(radix, iter, p) - iterate over each entry in a genradix
28 *
29 * The radix tree allocates one page of entries at a time, so entries may exist
30 * that were never explicitly allocated - they will be initialized to all
31 * zeroes.
32 *
33 * Internally, a genradix is just a radix tree of pages, and indexing works in
34 * terms of byte offsets. The wrappers in this header file use sizeof on the
35 * type the radix contains to calculate a byte offset from the index - see
36 * __idx_to_offset.
37 */
38
39 #include <asm/page.h>
40 #include <linux/bug.h>
41 #include <linux/limits.h>
42 #include <linux/log2.h>
43 #include <linux/math.h>
44 #include <linux/slab.h>
45 #include <linux/types.h>
46
47 struct genradix_root;
48
49 #define GENRADIX_NODE_SHIFT 9
50 #define GENRADIX_NODE_SIZE (1U << GENRADIX_NODE_SHIFT)
51
52 #define GENRADIX_ARY (GENRADIX_NODE_SIZE / sizeof(struct genradix_node *))
53 #define GENRADIX_ARY_SHIFT ilog2(GENRADIX_ARY)
54
55 /* depth that's needed for a genradix that can address up to ULONG_MAX: */
56 #define GENRADIX_MAX_DEPTH \
57 DIV_ROUND_UP(BITS_PER_LONG - GENRADIX_NODE_SHIFT, GENRADIX_ARY_SHIFT)
58
59 #define GENRADIX_DEPTH_MASK \
60 ((unsigned long) (roundup_pow_of_two(GENRADIX_MAX_DEPTH + 1) - 1))
61
genradix_depth_shift(unsigned depth)62 static inline int genradix_depth_shift(unsigned depth)
63 {
64 return GENRADIX_NODE_SHIFT + GENRADIX_ARY_SHIFT * depth;
65 }
66
67 /*
68 * Returns size (of data, in bytes) that a tree of a given depth holds:
69 */
genradix_depth_size(unsigned depth)70 static inline size_t genradix_depth_size(unsigned depth)
71 {
72 return 1UL << genradix_depth_shift(depth);
73 }
74
genradix_root_to_depth(struct genradix_root * r)75 static inline unsigned genradix_root_to_depth(struct genradix_root *r)
76 {
77 return (unsigned long) r & GENRADIX_DEPTH_MASK;
78 }
79
genradix_root_to_node(struct genradix_root * r)80 static inline struct genradix_node *genradix_root_to_node(struct genradix_root *r)
81 {
82 return (void *) ((unsigned long) r & ~GENRADIX_DEPTH_MASK);
83 }
84
85 struct __genradix {
86 struct genradix_root *root;
87 };
88
89 struct genradix_node {
90 union {
91 /* Interior node: */
92 struct genradix_node *children[GENRADIX_ARY];
93
94 /* Leaf: */
95 u8 data[GENRADIX_NODE_SIZE];
96 };
97 };
98
genradix_alloc_node(gfp_t gfp_mask)99 static inline struct genradix_node *genradix_alloc_node(gfp_t gfp_mask)
100 {
101 return kzalloc(GENRADIX_NODE_SIZE, gfp_mask);
102 }
103
genradix_free_node(struct genradix_node * node)104 static inline void genradix_free_node(struct genradix_node *node)
105 {
106 kfree(node);
107 }
108
109 /*
110 * NOTE: currently, sizeof(_type) must not be larger than GENRADIX_NODE_SIZE:
111 */
112
113 #define __GENRADIX_INITIALIZER \
114 { \
115 .tree = { \
116 .root = NULL, \
117 } \
118 }
119
120 /*
121 * We use a 0 size array to stash the type we're storing without taking any
122 * space at runtime - then the various accessor macros can use typeof() to get
123 * to it for casts/sizeof - we also force the alignment so that storing a type
124 * with a ridiculous alignment doesn't blow up the alignment or size of the
125 * genradix.
126 */
127
128 #define GENRADIX(_type) \
129 struct { \
130 struct __genradix tree; \
131 _type type[0] __aligned(1); \
132 }
133
134 #define DEFINE_GENRADIX(_name, _type) \
135 GENRADIX(_type) _name = __GENRADIX_INITIALIZER
136
137 /**
138 * genradix_init - initialize a genradix
139 * @_radix: genradix to initialize
140 *
141 * Does not fail
142 */
143 #define genradix_init(_radix) \
144 do { \
145 *(_radix) = (typeof(*_radix)) __GENRADIX_INITIALIZER; \
146 } while (0)
147
148 void __genradix_free(struct __genradix *);
149
150 /**
151 * genradix_free: free all memory owned by a genradix
152 * @_radix: the genradix to free
153 *
154 * After freeing, @_radix will be reinitialized and empty
155 */
156 #define genradix_free(_radix) __genradix_free(&(_radix)->tree)
157
__idx_to_offset(size_t idx,size_t obj_size)158 static inline size_t __idx_to_offset(size_t idx, size_t obj_size)
159 {
160 if (__builtin_constant_p(obj_size))
161 BUILD_BUG_ON(obj_size > GENRADIX_NODE_SIZE);
162 else
163 BUG_ON(obj_size > GENRADIX_NODE_SIZE);
164
165 if (!is_power_of_2(obj_size)) {
166 size_t objs_per_page = GENRADIX_NODE_SIZE / obj_size;
167
168 return (idx / objs_per_page) * GENRADIX_NODE_SIZE +
169 (idx % objs_per_page) * obj_size;
170 } else {
171 return idx * obj_size;
172 }
173 }
174
175 #define __genradix_cast(_radix) (typeof((_radix)->type[0]) *)
176 #define __genradix_obj_size(_radix) sizeof((_radix)->type[0])
177 #define __genradix_objs_per_page(_radix) \
178 (GENRADIX_NODE_SIZE / sizeof((_radix)->type[0]))
179 #define __genradix_page_remainder(_radix) \
180 (GENRADIX_NODE_SIZE % sizeof((_radix)->type[0]))
181
182 #define __genradix_idx_to_offset(_radix, _idx) \
183 __idx_to_offset(_idx, __genradix_obj_size(_radix))
184
__genradix_ptr_inlined(struct __genradix * radix,size_t offset)185 static inline void *__genradix_ptr_inlined(struct __genradix *radix, size_t offset)
186 {
187 struct genradix_root *r = READ_ONCE(radix->root);
188 struct genradix_node *n = genradix_root_to_node(r);
189 unsigned level = genradix_root_to_depth(r);
190 unsigned shift = genradix_depth_shift(level);
191
192 if (unlikely(ilog2(offset) >= genradix_depth_shift(level)))
193 return NULL;
194
195 while (n && shift > GENRADIX_NODE_SHIFT) {
196 shift -= GENRADIX_ARY_SHIFT;
197 n = n->children[offset >> shift];
198 offset &= (1UL << shift) - 1;
199 }
200
201 return n ? &n->data[offset] : NULL;
202 }
203
204 #define genradix_ptr_inlined(_radix, _idx) \
205 (__genradix_cast(_radix) \
206 __genradix_ptr_inlined(&(_radix)->tree, \
207 __genradix_idx_to_offset(_radix, _idx)))
208
209 void *__genradix_ptr(struct __genradix *, size_t);
210
211 /**
212 * genradix_ptr - get a pointer to a genradix entry
213 * @_radix: genradix to access
214 * @_idx: index to fetch
215 *
216 * Returns a pointer to entry at @_idx, or NULL if that entry does not exist.
217 */
218 #define genradix_ptr(_radix, _idx) \
219 (__genradix_cast(_radix) \
220 __genradix_ptr(&(_radix)->tree, \
221 __genradix_idx_to_offset(_radix, _idx)))
222
223 void *__genradix_ptr_alloc(struct __genradix *, size_t,
224 struct genradix_node **, gfp_t);
225
226 #define genradix_ptr_alloc_inlined(_radix, _idx, _gfp) \
227 (__genradix_cast(_radix) \
228 (__genradix_ptr_inlined(&(_radix)->tree, \
229 __genradix_idx_to_offset(_radix, _idx)) ?: \
230 __genradix_ptr_alloc(&(_radix)->tree, \
231 __genradix_idx_to_offset(_radix, _idx), \
232 NULL, _gfp)))
233
234 #define genradix_ptr_alloc_preallocated_inlined(_radix, _idx, _new_node, _gfp)\
235 (__genradix_cast(_radix) \
236 (__genradix_ptr_inlined(&(_radix)->tree, \
237 __genradix_idx_to_offset(_radix, _idx)) ?: \
238 __genradix_ptr_alloc(&(_radix)->tree, \
239 __genradix_idx_to_offset(_radix, _idx), \
240 _new_node, _gfp)))
241
242 /**
243 * genradix_ptr_alloc - get a pointer to a genradix entry, allocating it
244 * if necessary
245 * @_radix: genradix to access
246 * @_idx: index to fetch
247 * @_gfp: gfp mask
248 *
249 * Returns a pointer to entry at @_idx, or NULL on allocation failure
250 */
251 #define genradix_ptr_alloc(_radix, _idx, _gfp) \
252 (__genradix_cast(_radix) \
253 __genradix_ptr_alloc(&(_radix)->tree, \
254 __genradix_idx_to_offset(_radix, _idx), \
255 NULL, _gfp))
256
257 #define genradix_ptr_alloc_preallocated(_radix, _idx, _new_node, _gfp)\
258 (__genradix_cast(_radix) \
259 __genradix_ptr_alloc(&(_radix)->tree, \
260 __genradix_idx_to_offset(_radix, _idx), \
261 _new_node, _gfp))
262
263 struct genradix_iter {
264 size_t offset;
265 size_t pos;
266 };
267
268 /**
269 * genradix_iter_init - initialize a genradix_iter
270 * @_radix: genradix that will be iterated over
271 * @_idx: index to start iterating from
272 */
273 #define genradix_iter_init(_radix, _idx) \
274 ((struct genradix_iter) { \
275 .pos = (_idx), \
276 .offset = __genradix_idx_to_offset((_radix), (_idx)),\
277 })
278
279 void *__genradix_iter_peek(struct genradix_iter *, struct __genradix *, size_t);
280
281 /**
282 * genradix_iter_peek - get first entry at or above iterator's current
283 * position
284 * @_iter: a genradix_iter
285 * @_radix: genradix being iterated over
286 *
287 * If no more entries exist at or above @_iter's current position, returns NULL
288 */
289 #define genradix_iter_peek(_iter, _radix) \
290 (__genradix_cast(_radix) \
291 __genradix_iter_peek(_iter, &(_radix)->tree, \
292 __genradix_objs_per_page(_radix)))
293
294 void *__genradix_iter_peek_prev(struct genradix_iter *, struct __genradix *,
295 size_t, size_t);
296
297 /**
298 * genradix_iter_peek_prev - get first entry at or below iterator's current
299 * position
300 * @_iter: a genradix_iter
301 * @_radix: genradix being iterated over
302 *
303 * If no more entries exist at or below @_iter's current position, returns NULL
304 */
305 #define genradix_iter_peek_prev(_iter, _radix) \
306 (__genradix_cast(_radix) \
307 __genradix_iter_peek_prev(_iter, &(_radix)->tree, \
308 __genradix_objs_per_page(_radix), \
309 __genradix_obj_size(_radix) + \
310 __genradix_page_remainder(_radix)))
311
__genradix_iter_advance(struct genradix_iter * iter,size_t obj_size)312 static inline void __genradix_iter_advance(struct genradix_iter *iter,
313 size_t obj_size)
314 {
315 if (iter->offset + obj_size < iter->offset) {
316 iter->offset = SIZE_MAX;
317 iter->pos = SIZE_MAX;
318 return;
319 }
320
321 iter->offset += obj_size;
322
323 if (!is_power_of_2(obj_size) &&
324 (iter->offset & (GENRADIX_NODE_SIZE - 1)) + obj_size > GENRADIX_NODE_SIZE)
325 iter->offset = round_up(iter->offset, GENRADIX_NODE_SIZE);
326
327 iter->pos++;
328 }
329
330 #define genradix_iter_advance(_iter, _radix) \
331 __genradix_iter_advance(_iter, __genradix_obj_size(_radix))
332
__genradix_iter_rewind(struct genradix_iter * iter,size_t obj_size)333 static inline void __genradix_iter_rewind(struct genradix_iter *iter,
334 size_t obj_size)
335 {
336 if (iter->offset == 0 ||
337 iter->offset == SIZE_MAX) {
338 iter->offset = SIZE_MAX;
339 return;
340 }
341
342 if ((iter->offset & (GENRADIX_NODE_SIZE - 1)) == 0)
343 iter->offset -= GENRADIX_NODE_SIZE % obj_size;
344
345 iter->offset -= obj_size;
346 iter->pos--;
347 }
348
349 #define genradix_iter_rewind(_iter, _radix) \
350 __genradix_iter_rewind(_iter, __genradix_obj_size(_radix))
351
352 #define genradix_for_each_from(_radix, _iter, _p, _start) \
353 for (_iter = genradix_iter_init(_radix, _start); \
354 (_p = genradix_iter_peek(&_iter, _radix)) != NULL; \
355 genradix_iter_advance(&_iter, _radix))
356
357 /**
358 * genradix_for_each - iterate over entry in a genradix
359 * @_radix: genradix to iterate over
360 * @_iter: a genradix_iter to track current position
361 * @_p: pointer to genradix entry type
362 *
363 * On every iteration, @_p will point to the current entry, and @_iter.pos
364 * will be the current entry's index.
365 */
366 #define genradix_for_each(_radix, _iter, _p) \
367 genradix_for_each_from(_radix, _iter, _p, 0)
368
369 #define genradix_last_pos(_radix) \
370 (SIZE_MAX / GENRADIX_NODE_SIZE * __genradix_objs_per_page(_radix) - 1)
371
372 /**
373 * genradix_for_each_reverse - iterate over entry in a genradix, reverse order
374 * @_radix: genradix to iterate over
375 * @_iter: a genradix_iter to track current position
376 * @_p: pointer to genradix entry type
377 *
378 * On every iteration, @_p will point to the current entry, and @_iter.pos
379 * will be the current entry's index.
380 */
381 #define genradix_for_each_reverse(_radix, _iter, _p) \
382 for (_iter = genradix_iter_init(_radix, genradix_last_pos(_radix));\
383 (_p = genradix_iter_peek_prev(&_iter, _radix)) != NULL;\
384 genradix_iter_rewind(&_iter, _radix))
385
386 int __genradix_prealloc(struct __genradix *, size_t, gfp_t);
387
388 /**
389 * genradix_prealloc - preallocate entries in a generic radix tree
390 * @_radix: genradix to preallocate
391 * @_nr: number of entries to preallocate
392 * @_gfp: gfp mask
393 *
394 * Returns 0 on success, -ENOMEM on failure
395 */
396 #define genradix_prealloc(_radix, _nr, _gfp) \
397 __genradix_prealloc(&(_radix)->tree, \
398 __genradix_idx_to_offset(_radix, _nr + 1),\
399 _gfp)
400
401
402 #endif /* _LINUX_GENERIC_RADIX_TREE_H */
403