xref: /linux/lib/radix-tree.c (revision 4cb584e0ee7df70fd0376aee60cf701855ea8c81)
1 /*
2  * Copyright (C) 2001 Momchil Velikov
3  * Portions Copyright (C) 2001 Christoph Hellwig
4  * Copyright (C) 2005 SGI, Christoph Lameter
5  * Copyright (C) 2006 Nick Piggin
6  * Copyright (C) 2012 Konstantin Khlebnikov
7  * Copyright (C) 2016 Intel, Matthew Wilcox
8  * Copyright (C) 2016 Intel, Ross Zwisler
9  *
10  * This program is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU General Public License as
12  * published by the Free Software Foundation; either version 2, or (at
13  * your option) any later version.
14  *
15  * This program is distributed in the hope that it will be useful, but
16  * WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * General Public License for more details.
19  *
20  * You should have received a copy of the GNU General Public License
21  * along with this program; if not, write to the Free Software
22  * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
23  */
24 
25 #include <linux/cpu.h>
26 #include <linux/errno.h>
27 #include <linux/init.h>
28 #include <linux/kernel.h>
29 #include <linux/export.h>
30 #include <linux/radix-tree.h>
31 #include <linux/percpu.h>
32 #include <linux/slab.h>
33 #include <linux/kmemleak.h>
34 #include <linux/cpu.h>
35 #include <linux/string.h>
36 #include <linux/bitops.h>
37 #include <linux/rcupdate.h>
38 #include <linux/preempt.h>		/* in_interrupt() */
39 
40 
41 /* Number of nodes in fully populated tree of given height */
42 static unsigned long height_to_maxnodes[RADIX_TREE_MAX_PATH + 1] __read_mostly;
43 
44 /*
45  * Radix tree node cache.
46  */
47 static struct kmem_cache *radix_tree_node_cachep;
48 
49 /*
50  * The radix tree is variable-height, so an insert operation not only has
51  * to build the branch to its corresponding item, it also has to build the
52  * branch to existing items if the size has to be increased (by
53  * radix_tree_extend).
54  *
55  * The worst case is a zero height tree with just a single item at index 0,
56  * and then inserting an item at index ULONG_MAX. This requires 2 new branches
57  * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
58  * Hence:
59  */
60 #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
61 
62 /*
63  * Per-cpu pool of preloaded nodes
64  */
65 struct radix_tree_preload {
66 	unsigned nr;
67 	/* nodes->private_data points to next preallocated node */
68 	struct radix_tree_node *nodes;
69 };
70 static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
71 
72 static inline struct radix_tree_node *entry_to_node(void *ptr)
73 {
74 	return (void *)((unsigned long)ptr & ~RADIX_TREE_INTERNAL_NODE);
75 }
76 
77 static inline void *node_to_entry(void *ptr)
78 {
79 	return (void *)((unsigned long)ptr | RADIX_TREE_INTERNAL_NODE);
80 }
81 
82 #define RADIX_TREE_RETRY	node_to_entry(NULL)
83 
84 #ifdef CONFIG_RADIX_TREE_MULTIORDER
85 /* Sibling slots point directly to another slot in the same node */
86 static inline bool is_sibling_entry(struct radix_tree_node *parent, void *node)
87 {
88 	void **ptr = node;
89 	return (parent->slots <= ptr) &&
90 			(ptr < parent->slots + RADIX_TREE_MAP_SIZE);
91 }
92 #else
93 static inline bool is_sibling_entry(struct radix_tree_node *parent, void *node)
94 {
95 	return false;
96 }
97 #endif
98 
99 static inline unsigned long get_slot_offset(struct radix_tree_node *parent,
100 						 void **slot)
101 {
102 	return slot - parent->slots;
103 }
104 
105 static unsigned int radix_tree_descend(struct radix_tree_node *parent,
106 			struct radix_tree_node **nodep, unsigned long index)
107 {
108 	unsigned int offset = (index >> parent->shift) & RADIX_TREE_MAP_MASK;
109 	void **entry = rcu_dereference_raw(parent->slots[offset]);
110 
111 #ifdef CONFIG_RADIX_TREE_MULTIORDER
112 	if (radix_tree_is_internal_node(entry)) {
113 		if (is_sibling_entry(parent, entry)) {
114 			void **sibentry = (void **) entry_to_node(entry);
115 			offset = get_slot_offset(parent, sibentry);
116 			entry = rcu_dereference_raw(*sibentry);
117 		}
118 	}
119 #endif
120 
121 	*nodep = (void *)entry;
122 	return offset;
123 }
124 
125 static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
126 {
127 	return root->gfp_mask & __GFP_BITS_MASK;
128 }
129 
130 static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
131 		int offset)
132 {
133 	__set_bit(offset, node->tags[tag]);
134 }
135 
136 static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
137 		int offset)
138 {
139 	__clear_bit(offset, node->tags[tag]);
140 }
141 
142 static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
143 		int offset)
144 {
145 	return test_bit(offset, node->tags[tag]);
146 }
147 
148 static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
149 {
150 	root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
151 }
152 
153 static inline void root_tag_clear(struct radix_tree_root *root, unsigned tag)
154 {
155 	root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
156 }
157 
158 static inline void root_tag_clear_all(struct radix_tree_root *root)
159 {
160 	root->gfp_mask &= __GFP_BITS_MASK;
161 }
162 
163 static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
164 {
165 	return (__force int)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
166 }
167 
168 static inline unsigned root_tags_get(struct radix_tree_root *root)
169 {
170 	return (__force unsigned)root->gfp_mask >> __GFP_BITS_SHIFT;
171 }
172 
173 /*
174  * Returns 1 if any slot in the node has this tag set.
175  * Otherwise returns 0.
176  */
177 static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
178 {
179 	unsigned idx;
180 	for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
181 		if (node->tags[tag][idx])
182 			return 1;
183 	}
184 	return 0;
185 }
186 
187 /**
188  * radix_tree_find_next_bit - find the next set bit in a memory region
189  *
190  * @addr: The address to base the search on
191  * @size: The bitmap size in bits
192  * @offset: The bitnumber to start searching at
193  *
194  * Unrollable variant of find_next_bit() for constant size arrays.
195  * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
196  * Returns next bit offset, or size if nothing found.
197  */
198 static __always_inline unsigned long
199 radix_tree_find_next_bit(struct radix_tree_node *node, unsigned int tag,
200 			 unsigned long offset)
201 {
202 	const unsigned long *addr = node->tags[tag];
203 
204 	if (offset < RADIX_TREE_MAP_SIZE) {
205 		unsigned long tmp;
206 
207 		addr += offset / BITS_PER_LONG;
208 		tmp = *addr >> (offset % BITS_PER_LONG);
209 		if (tmp)
210 			return __ffs(tmp) + offset;
211 		offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
212 		while (offset < RADIX_TREE_MAP_SIZE) {
213 			tmp = *++addr;
214 			if (tmp)
215 				return __ffs(tmp) + offset;
216 			offset += BITS_PER_LONG;
217 		}
218 	}
219 	return RADIX_TREE_MAP_SIZE;
220 }
221 
222 static unsigned int iter_offset(const struct radix_tree_iter *iter)
223 {
224 	return (iter->index >> iter_shift(iter)) & RADIX_TREE_MAP_MASK;
225 }
226 
227 /*
228  * The maximum index which can be stored in a radix tree
229  */
230 static inline unsigned long shift_maxindex(unsigned int shift)
231 {
232 	return (RADIX_TREE_MAP_SIZE << shift) - 1;
233 }
234 
235 static inline unsigned long node_maxindex(struct radix_tree_node *node)
236 {
237 	return shift_maxindex(node->shift);
238 }
239 
240 #ifndef __KERNEL__
241 static void dump_node(struct radix_tree_node *node, unsigned long index)
242 {
243 	unsigned long i;
244 
245 	pr_debug("radix node: %p offset %d indices %lu-%lu parent %p tags %lx %lx %lx shift %d count %d exceptional %d\n",
246 		node, node->offset, index, index | node_maxindex(node),
247 		node->parent,
248 		node->tags[0][0], node->tags[1][0], node->tags[2][0],
249 		node->shift, node->count, node->exceptional);
250 
251 	for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
252 		unsigned long first = index | (i << node->shift);
253 		unsigned long last = first | ((1UL << node->shift) - 1);
254 		void *entry = node->slots[i];
255 		if (!entry)
256 			continue;
257 		if (entry == RADIX_TREE_RETRY) {
258 			pr_debug("radix retry offset %ld indices %lu-%lu parent %p\n",
259 					i, first, last, node);
260 		} else if (!radix_tree_is_internal_node(entry)) {
261 			pr_debug("radix entry %p offset %ld indices %lu-%lu parent %p\n",
262 					entry, i, first, last, node);
263 		} else if (is_sibling_entry(node, entry)) {
264 			pr_debug("radix sblng %p offset %ld indices %lu-%lu parent %p val %p\n",
265 					entry, i, first, last, node,
266 					*(void **)entry_to_node(entry));
267 		} else {
268 			dump_node(entry_to_node(entry), first);
269 		}
270 	}
271 }
272 
273 /* For debug */
274 static void radix_tree_dump(struct radix_tree_root *root)
275 {
276 	pr_debug("radix root: %p rnode %p tags %x\n",
277 			root, root->rnode,
278 			root->gfp_mask >> __GFP_BITS_SHIFT);
279 	if (!radix_tree_is_internal_node(root->rnode))
280 		return;
281 	dump_node(entry_to_node(root->rnode), 0);
282 }
283 #endif
284 
285 /*
286  * This assumes that the caller has performed appropriate preallocation, and
287  * that the caller has pinned this thread of control to the current CPU.
288  */
289 static struct radix_tree_node *
290 radix_tree_node_alloc(struct radix_tree_root *root,
291 			struct radix_tree_node *parent,
292 			unsigned int shift, unsigned int offset,
293 			unsigned int count, unsigned int exceptional)
294 {
295 	struct radix_tree_node *ret = NULL;
296 	gfp_t gfp_mask = root_gfp_mask(root);
297 
298 	/*
299 	 * Preload code isn't irq safe and it doesn't make sense to use
300 	 * preloading during an interrupt anyway as all the allocations have
301 	 * to be atomic. So just do normal allocation when in interrupt.
302 	 */
303 	if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) {
304 		struct radix_tree_preload *rtp;
305 
306 		/*
307 		 * Even if the caller has preloaded, try to allocate from the
308 		 * cache first for the new node to get accounted to the memory
309 		 * cgroup.
310 		 */
311 		ret = kmem_cache_alloc(radix_tree_node_cachep,
312 				       gfp_mask | __GFP_NOWARN);
313 		if (ret)
314 			goto out;
315 
316 		/*
317 		 * Provided the caller has preloaded here, we will always
318 		 * succeed in getting a node here (and never reach
319 		 * kmem_cache_alloc)
320 		 */
321 		rtp = this_cpu_ptr(&radix_tree_preloads);
322 		if (rtp->nr) {
323 			ret = rtp->nodes;
324 			rtp->nodes = ret->private_data;
325 			ret->private_data = NULL;
326 			rtp->nr--;
327 		}
328 		/*
329 		 * Update the allocation stack trace as this is more useful
330 		 * for debugging.
331 		 */
332 		kmemleak_update_trace(ret);
333 		goto out;
334 	}
335 	ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
336 out:
337 	BUG_ON(radix_tree_is_internal_node(ret));
338 	if (ret) {
339 		ret->parent = parent;
340 		ret->shift = shift;
341 		ret->offset = offset;
342 		ret->count = count;
343 		ret->exceptional = exceptional;
344 	}
345 	return ret;
346 }
347 
348 static void radix_tree_node_rcu_free(struct rcu_head *head)
349 {
350 	struct radix_tree_node *node =
351 			container_of(head, struct radix_tree_node, rcu_head);
352 
353 	/*
354 	 * Must only free zeroed nodes into the slab.  We can be left with
355 	 * non-NULL entries by radix_tree_free_nodes, so clear the entries
356 	 * and tags here.
357 	 */
358 	memset(node->slots, 0, sizeof(node->slots));
359 	memset(node->tags, 0, sizeof(node->tags));
360 	INIT_LIST_HEAD(&node->private_list);
361 
362 	kmem_cache_free(radix_tree_node_cachep, node);
363 }
364 
365 static inline void
366 radix_tree_node_free(struct radix_tree_node *node)
367 {
368 	call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
369 }
370 
371 /*
372  * Load up this CPU's radix_tree_node buffer with sufficient objects to
373  * ensure that the addition of a single element in the tree cannot fail.  On
374  * success, return zero, with preemption disabled.  On error, return -ENOMEM
375  * with preemption not disabled.
376  *
377  * To make use of this facility, the radix tree must be initialised without
378  * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
379  */
380 static int __radix_tree_preload(gfp_t gfp_mask, unsigned nr)
381 {
382 	struct radix_tree_preload *rtp;
383 	struct radix_tree_node *node;
384 	int ret = -ENOMEM;
385 
386 	/*
387 	 * Nodes preloaded by one cgroup can be be used by another cgroup, so
388 	 * they should never be accounted to any particular memory cgroup.
389 	 */
390 	gfp_mask &= ~__GFP_ACCOUNT;
391 
392 	preempt_disable();
393 	rtp = this_cpu_ptr(&radix_tree_preloads);
394 	while (rtp->nr < nr) {
395 		preempt_enable();
396 		node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
397 		if (node == NULL)
398 			goto out;
399 		preempt_disable();
400 		rtp = this_cpu_ptr(&radix_tree_preloads);
401 		if (rtp->nr < nr) {
402 			node->private_data = rtp->nodes;
403 			rtp->nodes = node;
404 			rtp->nr++;
405 		} else {
406 			kmem_cache_free(radix_tree_node_cachep, node);
407 		}
408 	}
409 	ret = 0;
410 out:
411 	return ret;
412 }
413 
414 /*
415  * Load up this CPU's radix_tree_node buffer with sufficient objects to
416  * ensure that the addition of a single element in the tree cannot fail.  On
417  * success, return zero, with preemption disabled.  On error, return -ENOMEM
418  * with preemption not disabled.
419  *
420  * To make use of this facility, the radix tree must be initialised without
421  * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
422  */
423 int radix_tree_preload(gfp_t gfp_mask)
424 {
425 	/* Warn on non-sensical use... */
426 	WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask));
427 	return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
428 }
429 EXPORT_SYMBOL(radix_tree_preload);
430 
431 /*
432  * The same as above function, except we don't guarantee preloading happens.
433  * We do it, if we decide it helps. On success, return zero with preemption
434  * disabled. On error, return -ENOMEM with preemption not disabled.
435  */
436 int radix_tree_maybe_preload(gfp_t gfp_mask)
437 {
438 	if (gfpflags_allow_blocking(gfp_mask))
439 		return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
440 	/* Preloading doesn't help anything with this gfp mask, skip it */
441 	preempt_disable();
442 	return 0;
443 }
444 EXPORT_SYMBOL(radix_tree_maybe_preload);
445 
446 #ifdef CONFIG_RADIX_TREE_MULTIORDER
447 /*
448  * Preload with enough objects to ensure that we can split a single entry
449  * of order @old_order into many entries of size @new_order
450  */
451 int radix_tree_split_preload(unsigned int old_order, unsigned int new_order,
452 							gfp_t gfp_mask)
453 {
454 	unsigned top = 1 << (old_order % RADIX_TREE_MAP_SHIFT);
455 	unsigned layers = (old_order / RADIX_TREE_MAP_SHIFT) -
456 				(new_order / RADIX_TREE_MAP_SHIFT);
457 	unsigned nr = 0;
458 
459 	WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask));
460 	BUG_ON(new_order >= old_order);
461 
462 	while (layers--)
463 		nr = nr * RADIX_TREE_MAP_SIZE + 1;
464 	return __radix_tree_preload(gfp_mask, top * nr);
465 }
466 #endif
467 
468 /*
469  * The same as function above, but preload number of nodes required to insert
470  * (1 << order) continuous naturally-aligned elements.
471  */
472 int radix_tree_maybe_preload_order(gfp_t gfp_mask, int order)
473 {
474 	unsigned long nr_subtrees;
475 	int nr_nodes, subtree_height;
476 
477 	/* Preloading doesn't help anything with this gfp mask, skip it */
478 	if (!gfpflags_allow_blocking(gfp_mask)) {
479 		preempt_disable();
480 		return 0;
481 	}
482 
483 	/*
484 	 * Calculate number and height of fully populated subtrees it takes to
485 	 * store (1 << order) elements.
486 	 */
487 	nr_subtrees = 1 << order;
488 	for (subtree_height = 0; nr_subtrees > RADIX_TREE_MAP_SIZE;
489 			subtree_height++)
490 		nr_subtrees >>= RADIX_TREE_MAP_SHIFT;
491 
492 	/*
493 	 * The worst case is zero height tree with a single item at index 0 and
494 	 * then inserting items starting at ULONG_MAX - (1 << order).
495 	 *
496 	 * This requires RADIX_TREE_MAX_PATH nodes to build branch from root to
497 	 * 0-index item.
498 	 */
499 	nr_nodes = RADIX_TREE_MAX_PATH;
500 
501 	/* Plus branch to fully populated subtrees. */
502 	nr_nodes += RADIX_TREE_MAX_PATH - subtree_height;
503 
504 	/* Root node is shared. */
505 	nr_nodes--;
506 
507 	/* Plus nodes required to build subtrees. */
508 	nr_nodes += nr_subtrees * height_to_maxnodes[subtree_height];
509 
510 	return __radix_tree_preload(gfp_mask, nr_nodes);
511 }
512 
513 static unsigned radix_tree_load_root(struct radix_tree_root *root,
514 		struct radix_tree_node **nodep, unsigned long *maxindex)
515 {
516 	struct radix_tree_node *node = rcu_dereference_raw(root->rnode);
517 
518 	*nodep = node;
519 
520 	if (likely(radix_tree_is_internal_node(node))) {
521 		node = entry_to_node(node);
522 		*maxindex = node_maxindex(node);
523 		return node->shift + RADIX_TREE_MAP_SHIFT;
524 	}
525 
526 	*maxindex = 0;
527 	return 0;
528 }
529 
530 /*
531  *	Extend a radix tree so it can store key @index.
532  */
533 static int radix_tree_extend(struct radix_tree_root *root,
534 				unsigned long index, unsigned int shift)
535 {
536 	struct radix_tree_node *slot;
537 	unsigned int maxshift;
538 	int tag;
539 
540 	/* Figure out what the shift should be.  */
541 	maxshift = shift;
542 	while (index > shift_maxindex(maxshift))
543 		maxshift += RADIX_TREE_MAP_SHIFT;
544 
545 	slot = root->rnode;
546 	if (!slot)
547 		goto out;
548 
549 	do {
550 		struct radix_tree_node *node = radix_tree_node_alloc(root,
551 							NULL, shift, 0, 1, 0);
552 		if (!node)
553 			return -ENOMEM;
554 
555 		/* Propagate the aggregated tag info into the new root */
556 		for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
557 			if (root_tag_get(root, tag))
558 				tag_set(node, tag, 0);
559 		}
560 
561 		BUG_ON(shift > BITS_PER_LONG);
562 		if (radix_tree_is_internal_node(slot)) {
563 			entry_to_node(slot)->parent = node;
564 		} else if (radix_tree_exceptional_entry(slot)) {
565 			/* Moving an exceptional root->rnode to a node */
566 			node->exceptional = 1;
567 		}
568 		node->slots[0] = slot;
569 		slot = node_to_entry(node);
570 		rcu_assign_pointer(root->rnode, slot);
571 		shift += RADIX_TREE_MAP_SHIFT;
572 	} while (shift <= maxshift);
573 out:
574 	return maxshift + RADIX_TREE_MAP_SHIFT;
575 }
576 
577 /**
578  *	radix_tree_shrink    -    shrink radix tree to minimum height
579  *	@root		radix tree root
580  */
581 static inline void radix_tree_shrink(struct radix_tree_root *root,
582 				     radix_tree_update_node_t update_node,
583 				     void *private)
584 {
585 	for (;;) {
586 		struct radix_tree_node *node = root->rnode;
587 		struct radix_tree_node *child;
588 
589 		if (!radix_tree_is_internal_node(node))
590 			break;
591 		node = entry_to_node(node);
592 
593 		/*
594 		 * The candidate node has more than one child, or its child
595 		 * is not at the leftmost slot, or the child is a multiorder
596 		 * entry, we cannot shrink.
597 		 */
598 		if (node->count != 1)
599 			break;
600 		child = node->slots[0];
601 		if (!child)
602 			break;
603 		if (!radix_tree_is_internal_node(child) && node->shift)
604 			break;
605 
606 		if (radix_tree_is_internal_node(child))
607 			entry_to_node(child)->parent = NULL;
608 
609 		/*
610 		 * We don't need rcu_assign_pointer(), since we are simply
611 		 * moving the node from one part of the tree to another: if it
612 		 * was safe to dereference the old pointer to it
613 		 * (node->slots[0]), it will be safe to dereference the new
614 		 * one (root->rnode) as far as dependent read barriers go.
615 		 */
616 		root->rnode = child;
617 
618 		/*
619 		 * We have a dilemma here. The node's slot[0] must not be
620 		 * NULLed in case there are concurrent lookups expecting to
621 		 * find the item. However if this was a bottom-level node,
622 		 * then it may be subject to the slot pointer being visible
623 		 * to callers dereferencing it. If item corresponding to
624 		 * slot[0] is subsequently deleted, these callers would expect
625 		 * their slot to become empty sooner or later.
626 		 *
627 		 * For example, lockless pagecache will look up a slot, deref
628 		 * the page pointer, and if the page has 0 refcount it means it
629 		 * was concurrently deleted from pagecache so try the deref
630 		 * again. Fortunately there is already a requirement for logic
631 		 * to retry the entire slot lookup -- the indirect pointer
632 		 * problem (replacing direct root node with an indirect pointer
633 		 * also results in a stale slot). So tag the slot as indirect
634 		 * to force callers to retry.
635 		 */
636 		node->count = 0;
637 		if (!radix_tree_is_internal_node(child)) {
638 			node->slots[0] = RADIX_TREE_RETRY;
639 			if (update_node)
640 				update_node(node, private);
641 		}
642 
643 		WARN_ON_ONCE(!list_empty(&node->private_list));
644 		radix_tree_node_free(node);
645 	}
646 }
647 
648 static void delete_node(struct radix_tree_root *root,
649 			struct radix_tree_node *node,
650 			radix_tree_update_node_t update_node, void *private)
651 {
652 	do {
653 		struct radix_tree_node *parent;
654 
655 		if (node->count) {
656 			if (node == entry_to_node(root->rnode))
657 				radix_tree_shrink(root, update_node, private);
658 			return;
659 		}
660 
661 		parent = node->parent;
662 		if (parent) {
663 			parent->slots[node->offset] = NULL;
664 			parent->count--;
665 		} else {
666 			root_tag_clear_all(root);
667 			root->rnode = NULL;
668 		}
669 
670 		WARN_ON_ONCE(!list_empty(&node->private_list));
671 		radix_tree_node_free(node);
672 
673 		node = parent;
674 	} while (node);
675 }
676 
677 /**
678  *	__radix_tree_create	-	create a slot in a radix tree
679  *	@root:		radix tree root
680  *	@index:		index key
681  *	@order:		index occupies 2^order aligned slots
682  *	@nodep:		returns node
683  *	@slotp:		returns slot
684  *
685  *	Create, if necessary, and return the node and slot for an item
686  *	at position @index in the radix tree @root.
687  *
688  *	Until there is more than one item in the tree, no nodes are
689  *	allocated and @root->rnode is used as a direct slot instead of
690  *	pointing to a node, in which case *@nodep will be NULL.
691  *
692  *	Returns -ENOMEM, or 0 for success.
693  */
694 int __radix_tree_create(struct radix_tree_root *root, unsigned long index,
695 			unsigned order, struct radix_tree_node **nodep,
696 			void ***slotp)
697 {
698 	struct radix_tree_node *node = NULL, *child;
699 	void **slot = (void **)&root->rnode;
700 	unsigned long maxindex;
701 	unsigned int shift, offset = 0;
702 	unsigned long max = index | ((1UL << order) - 1);
703 
704 	shift = radix_tree_load_root(root, &child, &maxindex);
705 
706 	/* Make sure the tree is high enough.  */
707 	if (order > 0 && max == ((1UL << order) - 1))
708 		max++;
709 	if (max > maxindex) {
710 		int error = radix_tree_extend(root, max, shift);
711 		if (error < 0)
712 			return error;
713 		shift = error;
714 		child = root->rnode;
715 	}
716 
717 	while (shift > order) {
718 		shift -= RADIX_TREE_MAP_SHIFT;
719 		if (child == NULL) {
720 			/* Have to add a child node.  */
721 			child = radix_tree_node_alloc(root, node, shift,
722 							offset, 0, 0);
723 			if (!child)
724 				return -ENOMEM;
725 			rcu_assign_pointer(*slot, node_to_entry(child));
726 			if (node)
727 				node->count++;
728 		} else if (!radix_tree_is_internal_node(child))
729 			break;
730 
731 		/* Go a level down */
732 		node = entry_to_node(child);
733 		offset = radix_tree_descend(node, &child, index);
734 		slot = &node->slots[offset];
735 	}
736 
737 	if (nodep)
738 		*nodep = node;
739 	if (slotp)
740 		*slotp = slot;
741 	return 0;
742 }
743 
744 #ifdef CONFIG_RADIX_TREE_MULTIORDER
745 /*
746  * Free any nodes below this node.  The tree is presumed to not need
747  * shrinking, and any user data in the tree is presumed to not need a
748  * destructor called on it.  If we need to add a destructor, we can
749  * add that functionality later.  Note that we may not clear tags or
750  * slots from the tree as an RCU walker may still have a pointer into
751  * this subtree.  We could replace the entries with RADIX_TREE_RETRY,
752  * but we'll still have to clear those in rcu_free.
753  */
754 static void radix_tree_free_nodes(struct radix_tree_node *node)
755 {
756 	unsigned offset = 0;
757 	struct radix_tree_node *child = entry_to_node(node);
758 
759 	for (;;) {
760 		void *entry = child->slots[offset];
761 		if (radix_tree_is_internal_node(entry) &&
762 					!is_sibling_entry(child, entry)) {
763 			child = entry_to_node(entry);
764 			offset = 0;
765 			continue;
766 		}
767 		offset++;
768 		while (offset == RADIX_TREE_MAP_SIZE) {
769 			struct radix_tree_node *old = child;
770 			offset = child->offset + 1;
771 			child = child->parent;
772 			WARN_ON_ONCE(!list_empty(&old->private_list));
773 			radix_tree_node_free(old);
774 			if (old == entry_to_node(node))
775 				return;
776 		}
777 	}
778 }
779 
780 static inline int insert_entries(struct radix_tree_node *node, void **slot,
781 				void *item, unsigned order, bool replace)
782 {
783 	struct radix_tree_node *child;
784 	unsigned i, n, tag, offset, tags = 0;
785 
786 	if (node) {
787 		if (order > node->shift)
788 			n = 1 << (order - node->shift);
789 		else
790 			n = 1;
791 		offset = get_slot_offset(node, slot);
792 	} else {
793 		n = 1;
794 		offset = 0;
795 	}
796 
797 	if (n > 1) {
798 		offset = offset & ~(n - 1);
799 		slot = &node->slots[offset];
800 	}
801 	child = node_to_entry(slot);
802 
803 	for (i = 0; i < n; i++) {
804 		if (slot[i]) {
805 			if (replace) {
806 				node->count--;
807 				for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
808 					if (tag_get(node, tag, offset + i))
809 						tags |= 1 << tag;
810 			} else
811 				return -EEXIST;
812 		}
813 	}
814 
815 	for (i = 0; i < n; i++) {
816 		struct radix_tree_node *old = slot[i];
817 		if (i) {
818 			rcu_assign_pointer(slot[i], child);
819 			for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
820 				if (tags & (1 << tag))
821 					tag_clear(node, tag, offset + i);
822 		} else {
823 			rcu_assign_pointer(slot[i], item);
824 			for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
825 				if (tags & (1 << tag))
826 					tag_set(node, tag, offset);
827 		}
828 		if (radix_tree_is_internal_node(old) &&
829 					!is_sibling_entry(node, old) &&
830 					(old != RADIX_TREE_RETRY))
831 			radix_tree_free_nodes(old);
832 		if (radix_tree_exceptional_entry(old))
833 			node->exceptional--;
834 	}
835 	if (node) {
836 		node->count += n;
837 		if (radix_tree_exceptional_entry(item))
838 			node->exceptional += n;
839 	}
840 	return n;
841 }
842 #else
843 static inline int insert_entries(struct radix_tree_node *node, void **slot,
844 				void *item, unsigned order, bool replace)
845 {
846 	if (*slot)
847 		return -EEXIST;
848 	rcu_assign_pointer(*slot, item);
849 	if (node) {
850 		node->count++;
851 		if (radix_tree_exceptional_entry(item))
852 			node->exceptional++;
853 	}
854 	return 1;
855 }
856 #endif
857 
858 /**
859  *	__radix_tree_insert    -    insert into a radix tree
860  *	@root:		radix tree root
861  *	@index:		index key
862  *	@order:		key covers the 2^order indices around index
863  *	@item:		item to insert
864  *
865  *	Insert an item into the radix tree at position @index.
866  */
867 int __radix_tree_insert(struct radix_tree_root *root, unsigned long index,
868 			unsigned order, void *item)
869 {
870 	struct radix_tree_node *node;
871 	void **slot;
872 	int error;
873 
874 	BUG_ON(radix_tree_is_internal_node(item));
875 
876 	error = __radix_tree_create(root, index, order, &node, &slot);
877 	if (error)
878 		return error;
879 
880 	error = insert_entries(node, slot, item, order, false);
881 	if (error < 0)
882 		return error;
883 
884 	if (node) {
885 		unsigned offset = get_slot_offset(node, slot);
886 		BUG_ON(tag_get(node, 0, offset));
887 		BUG_ON(tag_get(node, 1, offset));
888 		BUG_ON(tag_get(node, 2, offset));
889 	} else {
890 		BUG_ON(root_tags_get(root));
891 	}
892 
893 	return 0;
894 }
895 EXPORT_SYMBOL(__radix_tree_insert);
896 
897 /**
898  *	__radix_tree_lookup	-	lookup an item in a radix tree
899  *	@root:		radix tree root
900  *	@index:		index key
901  *	@nodep:		returns node
902  *	@slotp:		returns slot
903  *
904  *	Lookup and return the item at position @index in the radix
905  *	tree @root.
906  *
907  *	Until there is more than one item in the tree, no nodes are
908  *	allocated and @root->rnode is used as a direct slot instead of
909  *	pointing to a node, in which case *@nodep will be NULL.
910  */
911 void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index,
912 			  struct radix_tree_node **nodep, void ***slotp)
913 {
914 	struct radix_tree_node *node, *parent;
915 	unsigned long maxindex;
916 	void **slot;
917 
918  restart:
919 	parent = NULL;
920 	slot = (void **)&root->rnode;
921 	radix_tree_load_root(root, &node, &maxindex);
922 	if (index > maxindex)
923 		return NULL;
924 
925 	while (radix_tree_is_internal_node(node)) {
926 		unsigned offset;
927 
928 		if (node == RADIX_TREE_RETRY)
929 			goto restart;
930 		parent = entry_to_node(node);
931 		offset = radix_tree_descend(parent, &node, index);
932 		slot = parent->slots + offset;
933 	}
934 
935 	if (nodep)
936 		*nodep = parent;
937 	if (slotp)
938 		*slotp = slot;
939 	return node;
940 }
941 
942 /**
943  *	radix_tree_lookup_slot    -    lookup a slot in a radix tree
944  *	@root:		radix tree root
945  *	@index:		index key
946  *
947  *	Returns:  the slot corresponding to the position @index in the
948  *	radix tree @root. This is useful for update-if-exists operations.
949  *
950  *	This function can be called under rcu_read_lock iff the slot is not
951  *	modified by radix_tree_replace_slot, otherwise it must be called
952  *	exclusive from other writers. Any dereference of the slot must be done
953  *	using radix_tree_deref_slot.
954  */
955 void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index)
956 {
957 	void **slot;
958 
959 	if (!__radix_tree_lookup(root, index, NULL, &slot))
960 		return NULL;
961 	return slot;
962 }
963 EXPORT_SYMBOL(radix_tree_lookup_slot);
964 
965 /**
966  *	radix_tree_lookup    -    perform lookup operation on a radix tree
967  *	@root:		radix tree root
968  *	@index:		index key
969  *
970  *	Lookup the item at the position @index in the radix tree @root.
971  *
972  *	This function can be called under rcu_read_lock, however the caller
973  *	must manage lifetimes of leaf nodes (eg. RCU may also be used to free
974  *	them safely). No RCU barriers are required to access or modify the
975  *	returned item, however.
976  */
977 void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index)
978 {
979 	return __radix_tree_lookup(root, index, NULL, NULL);
980 }
981 EXPORT_SYMBOL(radix_tree_lookup);
982 
983 static inline int slot_count(struct radix_tree_node *node,
984 						void **slot)
985 {
986 	int n = 1;
987 #ifdef CONFIG_RADIX_TREE_MULTIORDER
988 	void *ptr = node_to_entry(slot);
989 	unsigned offset = get_slot_offset(node, slot);
990 	int i;
991 
992 	for (i = 1; offset + i < RADIX_TREE_MAP_SIZE; i++) {
993 		if (node->slots[offset + i] != ptr)
994 			break;
995 		n++;
996 	}
997 #endif
998 	return n;
999 }
1000 
1001 static void replace_slot(struct radix_tree_root *root,
1002 			 struct radix_tree_node *node,
1003 			 void **slot, void *item,
1004 			 bool warn_typeswitch)
1005 {
1006 	void *old = rcu_dereference_raw(*slot);
1007 	int count, exceptional;
1008 
1009 	WARN_ON_ONCE(radix_tree_is_internal_node(item));
1010 
1011 	count = !!item - !!old;
1012 	exceptional = !!radix_tree_exceptional_entry(item) -
1013 		      !!radix_tree_exceptional_entry(old);
1014 
1015 	WARN_ON_ONCE(warn_typeswitch && (count || exceptional));
1016 
1017 	if (node) {
1018 		node->count += count;
1019 		if (exceptional) {
1020 			exceptional *= slot_count(node, slot);
1021 			node->exceptional += exceptional;
1022 		}
1023 	}
1024 
1025 	rcu_assign_pointer(*slot, item);
1026 }
1027 
1028 static inline void delete_sibling_entries(struct radix_tree_node *node,
1029 						void **slot)
1030 {
1031 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1032 	bool exceptional = radix_tree_exceptional_entry(*slot);
1033 	void *ptr = node_to_entry(slot);
1034 	unsigned offset = get_slot_offset(node, slot);
1035 	int i;
1036 
1037 	for (i = 1; offset + i < RADIX_TREE_MAP_SIZE; i++) {
1038 		if (node->slots[offset + i] != ptr)
1039 			break;
1040 		node->slots[offset + i] = NULL;
1041 		node->count--;
1042 		if (exceptional)
1043 			node->exceptional--;
1044 	}
1045 #endif
1046 }
1047 
1048 /**
1049  * __radix_tree_replace		- replace item in a slot
1050  * @root:		radix tree root
1051  * @node:		pointer to tree node
1052  * @slot:		pointer to slot in @node
1053  * @item:		new item to store in the slot.
1054  * @update_node:	callback for changing leaf nodes
1055  * @private:		private data to pass to @update_node
1056  *
1057  * For use with __radix_tree_lookup().  Caller must hold tree write locked
1058  * across slot lookup and replacement.
1059  */
1060 void __radix_tree_replace(struct radix_tree_root *root,
1061 			  struct radix_tree_node *node,
1062 			  void **slot, void *item,
1063 			  radix_tree_update_node_t update_node, void *private)
1064 {
1065 	if (!item)
1066 		delete_sibling_entries(node, slot);
1067 	/*
1068 	 * This function supports replacing exceptional entries and
1069 	 * deleting entries, but that needs accounting against the
1070 	 * node unless the slot is root->rnode.
1071 	 */
1072 	replace_slot(root, node, slot, item,
1073 		     !node && slot != (void **)&root->rnode);
1074 
1075 	if (!node)
1076 		return;
1077 
1078 	if (update_node)
1079 		update_node(node, private);
1080 
1081 	delete_node(root, node, update_node, private);
1082 }
1083 
1084 /**
1085  * radix_tree_replace_slot	- replace item in a slot
1086  * @root:	radix tree root
1087  * @slot:	pointer to slot
1088  * @item:	new item to store in the slot.
1089  *
1090  * For use with radix_tree_lookup_slot(), radix_tree_gang_lookup_slot(),
1091  * radix_tree_gang_lookup_tag_slot().  Caller must hold tree write locked
1092  * across slot lookup and replacement.
1093  *
1094  * NOTE: This cannot be used to switch between non-entries (empty slots),
1095  * regular entries, and exceptional entries, as that requires accounting
1096  * inside the radix tree node. When switching from one type of entry or
1097  * deleting, use __radix_tree_lookup() and __radix_tree_replace() or
1098  * radix_tree_iter_replace().
1099  */
1100 void radix_tree_replace_slot(struct radix_tree_root *root,
1101 			     void **slot, void *item)
1102 {
1103 	replace_slot(root, NULL, slot, item, true);
1104 }
1105 
1106 /**
1107  * radix_tree_iter_replace - replace item in a slot
1108  * @root:	radix tree root
1109  * @slot:	pointer to slot
1110  * @item:	new item to store in the slot.
1111  *
1112  * For use with radix_tree_split() and radix_tree_for_each_slot().
1113  * Caller must hold tree write locked across split and replacement.
1114  */
1115 void radix_tree_iter_replace(struct radix_tree_root *root,
1116 		const struct radix_tree_iter *iter, void **slot, void *item)
1117 {
1118 	__radix_tree_replace(root, iter->node, slot, item, NULL, NULL);
1119 }
1120 
1121 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1122 /**
1123  * radix_tree_join - replace multiple entries with one multiorder entry
1124  * @root: radix tree root
1125  * @index: an index inside the new entry
1126  * @order: order of the new entry
1127  * @item: new entry
1128  *
1129  * Call this function to replace several entries with one larger entry.
1130  * The existing entries are presumed to not need freeing as a result of
1131  * this call.
1132  *
1133  * The replacement entry will have all the tags set on it that were set
1134  * on any of the entries it is replacing.
1135  */
1136 int radix_tree_join(struct radix_tree_root *root, unsigned long index,
1137 			unsigned order, void *item)
1138 {
1139 	struct radix_tree_node *node;
1140 	void **slot;
1141 	int error;
1142 
1143 	BUG_ON(radix_tree_is_internal_node(item));
1144 
1145 	error = __radix_tree_create(root, index, order, &node, &slot);
1146 	if (!error)
1147 		error = insert_entries(node, slot, item, order, true);
1148 	if (error > 0)
1149 		error = 0;
1150 
1151 	return error;
1152 }
1153 
1154 /**
1155  * radix_tree_split - Split an entry into smaller entries
1156  * @root: radix tree root
1157  * @index: An index within the large entry
1158  * @order: Order of new entries
1159  *
1160  * Call this function as the first step in replacing a multiorder entry
1161  * with several entries of lower order.  After this function returns,
1162  * loop over the relevant portion of the tree using radix_tree_for_each_slot()
1163  * and call radix_tree_iter_replace() to set up each new entry.
1164  *
1165  * The tags from this entry are replicated to all the new entries.
1166  *
1167  * The radix tree should be locked against modification during the entire
1168  * replacement operation.  Lock-free lookups will see RADIX_TREE_RETRY which
1169  * should prompt RCU walkers to restart the lookup from the root.
1170  */
1171 int radix_tree_split(struct radix_tree_root *root, unsigned long index,
1172 				unsigned order)
1173 {
1174 	struct radix_tree_node *parent, *node, *child;
1175 	void **slot;
1176 	unsigned int offset, end;
1177 	unsigned n, tag, tags = 0;
1178 
1179 	if (!__radix_tree_lookup(root, index, &parent, &slot))
1180 		return -ENOENT;
1181 	if (!parent)
1182 		return -ENOENT;
1183 
1184 	offset = get_slot_offset(parent, slot);
1185 
1186 	for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1187 		if (tag_get(parent, tag, offset))
1188 			tags |= 1 << tag;
1189 
1190 	for (end = offset + 1; end < RADIX_TREE_MAP_SIZE; end++) {
1191 		if (!is_sibling_entry(parent, parent->slots[end]))
1192 			break;
1193 		for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1194 			if (tags & (1 << tag))
1195 				tag_set(parent, tag, end);
1196 		/* rcu_assign_pointer ensures tags are set before RETRY */
1197 		rcu_assign_pointer(parent->slots[end], RADIX_TREE_RETRY);
1198 	}
1199 	rcu_assign_pointer(parent->slots[offset], RADIX_TREE_RETRY);
1200 	parent->exceptional -= (end - offset);
1201 
1202 	if (order == parent->shift)
1203 		return 0;
1204 	if (order > parent->shift) {
1205 		while (offset < end)
1206 			offset += insert_entries(parent, &parent->slots[offset],
1207 					RADIX_TREE_RETRY, order, true);
1208 		return 0;
1209 	}
1210 
1211 	node = parent;
1212 
1213 	for (;;) {
1214 		if (node->shift > order) {
1215 			child = radix_tree_node_alloc(root, node,
1216 					node->shift - RADIX_TREE_MAP_SHIFT,
1217 					offset, 0, 0);
1218 			if (!child)
1219 				goto nomem;
1220 			if (node != parent) {
1221 				node->count++;
1222 				node->slots[offset] = node_to_entry(child);
1223 				for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1224 					if (tags & (1 << tag))
1225 						tag_set(node, tag, offset);
1226 			}
1227 
1228 			node = child;
1229 			offset = 0;
1230 			continue;
1231 		}
1232 
1233 		n = insert_entries(node, &node->slots[offset],
1234 					RADIX_TREE_RETRY, order, false);
1235 		BUG_ON(n > RADIX_TREE_MAP_SIZE);
1236 
1237 		for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1238 			if (tags & (1 << tag))
1239 				tag_set(node, tag, offset);
1240 		offset += n;
1241 
1242 		while (offset == RADIX_TREE_MAP_SIZE) {
1243 			if (node == parent)
1244 				break;
1245 			offset = node->offset;
1246 			child = node;
1247 			node = node->parent;
1248 			rcu_assign_pointer(node->slots[offset],
1249 						node_to_entry(child));
1250 			offset++;
1251 		}
1252 		if ((node == parent) && (offset == end))
1253 			return 0;
1254 	}
1255 
1256  nomem:
1257 	/* Shouldn't happen; did user forget to preload? */
1258 	/* TODO: free all the allocated nodes */
1259 	WARN_ON(1);
1260 	return -ENOMEM;
1261 }
1262 #endif
1263 
1264 /**
1265  *	radix_tree_tag_set - set a tag on a radix tree node
1266  *	@root:		radix tree root
1267  *	@index:		index key
1268  *	@tag:		tag index
1269  *
1270  *	Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
1271  *	corresponding to @index in the radix tree.  From
1272  *	the root all the way down to the leaf node.
1273  *
1274  *	Returns the address of the tagged item.  Setting a tag on a not-present
1275  *	item is a bug.
1276  */
1277 void *radix_tree_tag_set(struct radix_tree_root *root,
1278 			unsigned long index, unsigned int tag)
1279 {
1280 	struct radix_tree_node *node, *parent;
1281 	unsigned long maxindex;
1282 
1283 	radix_tree_load_root(root, &node, &maxindex);
1284 	BUG_ON(index > maxindex);
1285 
1286 	while (radix_tree_is_internal_node(node)) {
1287 		unsigned offset;
1288 
1289 		parent = entry_to_node(node);
1290 		offset = radix_tree_descend(parent, &node, index);
1291 		BUG_ON(!node);
1292 
1293 		if (!tag_get(parent, tag, offset))
1294 			tag_set(parent, tag, offset);
1295 	}
1296 
1297 	/* set the root's tag bit */
1298 	if (!root_tag_get(root, tag))
1299 		root_tag_set(root, tag);
1300 
1301 	return node;
1302 }
1303 EXPORT_SYMBOL(radix_tree_tag_set);
1304 
1305 static void node_tag_clear(struct radix_tree_root *root,
1306 				struct radix_tree_node *node,
1307 				unsigned int tag, unsigned int offset)
1308 {
1309 	while (node) {
1310 		if (!tag_get(node, tag, offset))
1311 			return;
1312 		tag_clear(node, tag, offset);
1313 		if (any_tag_set(node, tag))
1314 			return;
1315 
1316 		offset = node->offset;
1317 		node = node->parent;
1318 	}
1319 
1320 	/* clear the root's tag bit */
1321 	if (root_tag_get(root, tag))
1322 		root_tag_clear(root, tag);
1323 }
1324 
1325 static void node_tag_set(struct radix_tree_root *root,
1326 				struct radix_tree_node *node,
1327 				unsigned int tag, unsigned int offset)
1328 {
1329 	while (node) {
1330 		if (tag_get(node, tag, offset))
1331 			return;
1332 		tag_set(node, tag, offset);
1333 		offset = node->offset;
1334 		node = node->parent;
1335 	}
1336 
1337 	if (!root_tag_get(root, tag))
1338 		root_tag_set(root, tag);
1339 }
1340 
1341 /**
1342  * radix_tree_iter_tag_set - set a tag on the current iterator entry
1343  * @root:	radix tree root
1344  * @iter:	iterator state
1345  * @tag:	tag to set
1346  */
1347 void radix_tree_iter_tag_set(struct radix_tree_root *root,
1348 			const struct radix_tree_iter *iter, unsigned int tag)
1349 {
1350 	node_tag_set(root, iter->node, tag, iter_offset(iter));
1351 }
1352 
1353 /**
1354  *	radix_tree_tag_clear - clear a tag on a radix tree node
1355  *	@root:		radix tree root
1356  *	@index:		index key
1357  *	@tag:		tag index
1358  *
1359  *	Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
1360  *	corresponding to @index in the radix tree.  If this causes
1361  *	the leaf node to have no tags set then clear the tag in the
1362  *	next-to-leaf node, etc.
1363  *
1364  *	Returns the address of the tagged item on success, else NULL.  ie:
1365  *	has the same return value and semantics as radix_tree_lookup().
1366  */
1367 void *radix_tree_tag_clear(struct radix_tree_root *root,
1368 			unsigned long index, unsigned int tag)
1369 {
1370 	struct radix_tree_node *node, *parent;
1371 	unsigned long maxindex;
1372 	int uninitialized_var(offset);
1373 
1374 	radix_tree_load_root(root, &node, &maxindex);
1375 	if (index > maxindex)
1376 		return NULL;
1377 
1378 	parent = NULL;
1379 
1380 	while (radix_tree_is_internal_node(node)) {
1381 		parent = entry_to_node(node);
1382 		offset = radix_tree_descend(parent, &node, index);
1383 	}
1384 
1385 	if (node)
1386 		node_tag_clear(root, parent, tag, offset);
1387 
1388 	return node;
1389 }
1390 EXPORT_SYMBOL(radix_tree_tag_clear);
1391 
1392 /**
1393  * radix_tree_tag_get - get a tag on a radix tree node
1394  * @root:		radix tree root
1395  * @index:		index key
1396  * @tag:		tag index (< RADIX_TREE_MAX_TAGS)
1397  *
1398  * Return values:
1399  *
1400  *  0: tag not present or not set
1401  *  1: tag set
1402  *
1403  * Note that the return value of this function may not be relied on, even if
1404  * the RCU lock is held, unless tag modification and node deletion are excluded
1405  * from concurrency.
1406  */
1407 int radix_tree_tag_get(struct radix_tree_root *root,
1408 			unsigned long index, unsigned int tag)
1409 {
1410 	struct radix_tree_node *node, *parent;
1411 	unsigned long maxindex;
1412 
1413 	if (!root_tag_get(root, tag))
1414 		return 0;
1415 
1416 	radix_tree_load_root(root, &node, &maxindex);
1417 	if (index > maxindex)
1418 		return 0;
1419 	if (node == NULL)
1420 		return 0;
1421 
1422 	while (radix_tree_is_internal_node(node)) {
1423 		unsigned offset;
1424 
1425 		parent = entry_to_node(node);
1426 		offset = radix_tree_descend(parent, &node, index);
1427 
1428 		if (!node)
1429 			return 0;
1430 		if (!tag_get(parent, tag, offset))
1431 			return 0;
1432 		if (node == RADIX_TREE_RETRY)
1433 			break;
1434 	}
1435 
1436 	return 1;
1437 }
1438 EXPORT_SYMBOL(radix_tree_tag_get);
1439 
1440 static inline void __set_iter_shift(struct radix_tree_iter *iter,
1441 					unsigned int shift)
1442 {
1443 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1444 	iter->shift = shift;
1445 #endif
1446 }
1447 
1448 /* Construct iter->tags bit-mask from node->tags[tag] array */
1449 static void set_iter_tags(struct radix_tree_iter *iter,
1450 				struct radix_tree_node *node, unsigned offset,
1451 				unsigned tag)
1452 {
1453 	unsigned tag_long = offset / BITS_PER_LONG;
1454 	unsigned tag_bit  = offset % BITS_PER_LONG;
1455 
1456 	iter->tags = node->tags[tag][tag_long] >> tag_bit;
1457 
1458 	/* This never happens if RADIX_TREE_TAG_LONGS == 1 */
1459 	if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
1460 		/* Pick tags from next element */
1461 		if (tag_bit)
1462 			iter->tags |= node->tags[tag][tag_long + 1] <<
1463 						(BITS_PER_LONG - tag_bit);
1464 		/* Clip chunk size, here only BITS_PER_LONG tags */
1465 		iter->next_index = __radix_tree_iter_add(iter, BITS_PER_LONG);
1466 	}
1467 }
1468 
1469 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1470 static void **skip_siblings(struct radix_tree_node **nodep,
1471 			void **slot, struct radix_tree_iter *iter)
1472 {
1473 	void *sib = node_to_entry(slot - 1);
1474 
1475 	while (iter->index < iter->next_index) {
1476 		*nodep = rcu_dereference_raw(*slot);
1477 		if (*nodep && *nodep != sib)
1478 			return slot;
1479 		slot++;
1480 		iter->index = __radix_tree_iter_add(iter, 1);
1481 		iter->tags >>= 1;
1482 	}
1483 
1484 	*nodep = NULL;
1485 	return NULL;
1486 }
1487 
1488 void ** __radix_tree_next_slot(void **slot, struct radix_tree_iter *iter,
1489 					unsigned flags)
1490 {
1491 	unsigned tag = flags & RADIX_TREE_ITER_TAG_MASK;
1492 	struct radix_tree_node *node = rcu_dereference_raw(*slot);
1493 
1494 	slot = skip_siblings(&node, slot, iter);
1495 
1496 	while (radix_tree_is_internal_node(node)) {
1497 		unsigned offset;
1498 		unsigned long next_index;
1499 
1500 		if (node == RADIX_TREE_RETRY)
1501 			return slot;
1502 		node = entry_to_node(node);
1503 		iter->node = node;
1504 		iter->shift = node->shift;
1505 
1506 		if (flags & RADIX_TREE_ITER_TAGGED) {
1507 			offset = radix_tree_find_next_bit(node, tag, 0);
1508 			if (offset == RADIX_TREE_MAP_SIZE)
1509 				return NULL;
1510 			slot = &node->slots[offset];
1511 			iter->index = __radix_tree_iter_add(iter, offset);
1512 			set_iter_tags(iter, node, offset, tag);
1513 			node = rcu_dereference_raw(*slot);
1514 		} else {
1515 			offset = 0;
1516 			slot = &node->slots[0];
1517 			for (;;) {
1518 				node = rcu_dereference_raw(*slot);
1519 				if (node)
1520 					break;
1521 				slot++;
1522 				offset++;
1523 				if (offset == RADIX_TREE_MAP_SIZE)
1524 					return NULL;
1525 			}
1526 			iter->index = __radix_tree_iter_add(iter, offset);
1527 		}
1528 		if ((flags & RADIX_TREE_ITER_CONTIG) && (offset > 0))
1529 			goto none;
1530 		next_index = (iter->index | shift_maxindex(iter->shift)) + 1;
1531 		if (next_index < iter->next_index)
1532 			iter->next_index = next_index;
1533 	}
1534 
1535 	return slot;
1536  none:
1537 	iter->next_index = 0;
1538 	return NULL;
1539 }
1540 EXPORT_SYMBOL(__radix_tree_next_slot);
1541 #else
1542 static void **skip_siblings(struct radix_tree_node **nodep,
1543 			void **slot, struct radix_tree_iter *iter)
1544 {
1545 	return slot;
1546 }
1547 #endif
1548 
1549 void **radix_tree_iter_resume(void **slot, struct radix_tree_iter *iter)
1550 {
1551 	struct radix_tree_node *node;
1552 
1553 	slot++;
1554 	iter->index = __radix_tree_iter_add(iter, 1);
1555 	node = rcu_dereference_raw(*slot);
1556 	skip_siblings(&node, slot, iter);
1557 	iter->next_index = iter->index;
1558 	iter->tags = 0;
1559 	return NULL;
1560 }
1561 EXPORT_SYMBOL(radix_tree_iter_resume);
1562 
1563 /**
1564  * radix_tree_next_chunk - find next chunk of slots for iteration
1565  *
1566  * @root:	radix tree root
1567  * @iter:	iterator state
1568  * @flags:	RADIX_TREE_ITER_* flags and tag index
1569  * Returns:	pointer to chunk first slot, or NULL if iteration is over
1570  */
1571 void **radix_tree_next_chunk(struct radix_tree_root *root,
1572 			     struct radix_tree_iter *iter, unsigned flags)
1573 {
1574 	unsigned tag = flags & RADIX_TREE_ITER_TAG_MASK;
1575 	struct radix_tree_node *node, *child;
1576 	unsigned long index, offset, maxindex;
1577 
1578 	if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
1579 		return NULL;
1580 
1581 	/*
1582 	 * Catch next_index overflow after ~0UL. iter->index never overflows
1583 	 * during iterating; it can be zero only at the beginning.
1584 	 * And we cannot overflow iter->next_index in a single step,
1585 	 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
1586 	 *
1587 	 * This condition also used by radix_tree_next_slot() to stop
1588 	 * contiguous iterating, and forbid switching to the next chunk.
1589 	 */
1590 	index = iter->next_index;
1591 	if (!index && iter->index)
1592 		return NULL;
1593 
1594  restart:
1595 	radix_tree_load_root(root, &child, &maxindex);
1596 	if (index > maxindex)
1597 		return NULL;
1598 	if (!child)
1599 		return NULL;
1600 
1601 	if (!radix_tree_is_internal_node(child)) {
1602 		/* Single-slot tree */
1603 		iter->index = index;
1604 		iter->next_index = maxindex + 1;
1605 		iter->tags = 1;
1606 		iter->node = NULL;
1607 		__set_iter_shift(iter, 0);
1608 		return (void **)&root->rnode;
1609 	}
1610 
1611 	do {
1612 		node = entry_to_node(child);
1613 		offset = radix_tree_descend(node, &child, index);
1614 
1615 		if ((flags & RADIX_TREE_ITER_TAGGED) ?
1616 				!tag_get(node, tag, offset) : !child) {
1617 			/* Hole detected */
1618 			if (flags & RADIX_TREE_ITER_CONTIG)
1619 				return NULL;
1620 
1621 			if (flags & RADIX_TREE_ITER_TAGGED)
1622 				offset = radix_tree_find_next_bit(node, tag,
1623 						offset + 1);
1624 			else
1625 				while (++offset	< RADIX_TREE_MAP_SIZE) {
1626 					void *slot = node->slots[offset];
1627 					if (is_sibling_entry(node, slot))
1628 						continue;
1629 					if (slot)
1630 						break;
1631 				}
1632 			index &= ~node_maxindex(node);
1633 			index += offset << node->shift;
1634 			/* Overflow after ~0UL */
1635 			if (!index)
1636 				return NULL;
1637 			if (offset == RADIX_TREE_MAP_SIZE)
1638 				goto restart;
1639 			child = rcu_dereference_raw(node->slots[offset]);
1640 		}
1641 
1642 		if (!child)
1643 			goto restart;
1644 		if (child == RADIX_TREE_RETRY)
1645 			break;
1646 	} while (radix_tree_is_internal_node(child));
1647 
1648 	/* Update the iterator state */
1649 	iter->index = (index &~ node_maxindex(node)) | (offset << node->shift);
1650 	iter->next_index = (index | node_maxindex(node)) + 1;
1651 	iter->node = node;
1652 	__set_iter_shift(iter, node->shift);
1653 
1654 	if (flags & RADIX_TREE_ITER_TAGGED)
1655 		set_iter_tags(iter, node, offset, tag);
1656 
1657 	return node->slots + offset;
1658 }
1659 EXPORT_SYMBOL(radix_tree_next_chunk);
1660 
1661 /**
1662  *	radix_tree_gang_lookup - perform multiple lookup on a radix tree
1663  *	@root:		radix tree root
1664  *	@results:	where the results of the lookup are placed
1665  *	@first_index:	start the lookup from this key
1666  *	@max_items:	place up to this many items at *results
1667  *
1668  *	Performs an index-ascending scan of the tree for present items.  Places
1669  *	them at *@results and returns the number of items which were placed at
1670  *	*@results.
1671  *
1672  *	The implementation is naive.
1673  *
1674  *	Like radix_tree_lookup, radix_tree_gang_lookup may be called under
1675  *	rcu_read_lock. In this case, rather than the returned results being
1676  *	an atomic snapshot of the tree at a single point in time, the
1677  *	semantics of an RCU protected gang lookup are as though multiple
1678  *	radix_tree_lookups have been issued in individual locks, and results
1679  *	stored in 'results'.
1680  */
1681 unsigned int
1682 radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
1683 			unsigned long first_index, unsigned int max_items)
1684 {
1685 	struct radix_tree_iter iter;
1686 	void **slot;
1687 	unsigned int ret = 0;
1688 
1689 	if (unlikely(!max_items))
1690 		return 0;
1691 
1692 	radix_tree_for_each_slot(slot, root, &iter, first_index) {
1693 		results[ret] = rcu_dereference_raw(*slot);
1694 		if (!results[ret])
1695 			continue;
1696 		if (radix_tree_is_internal_node(results[ret])) {
1697 			slot = radix_tree_iter_retry(&iter);
1698 			continue;
1699 		}
1700 		if (++ret == max_items)
1701 			break;
1702 	}
1703 
1704 	return ret;
1705 }
1706 EXPORT_SYMBOL(radix_tree_gang_lookup);
1707 
1708 /**
1709  *	radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1710  *	@root:		radix tree root
1711  *	@results:	where the results of the lookup are placed
1712  *	@indices:	where their indices should be placed (but usually NULL)
1713  *	@first_index:	start the lookup from this key
1714  *	@max_items:	place up to this many items at *results
1715  *
1716  *	Performs an index-ascending scan of the tree for present items.  Places
1717  *	their slots at *@results and returns the number of items which were
1718  *	placed at *@results.
1719  *
1720  *	The implementation is naive.
1721  *
1722  *	Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1723  *	be dereferenced with radix_tree_deref_slot, and if using only RCU
1724  *	protection, radix_tree_deref_slot may fail requiring a retry.
1725  */
1726 unsigned int
1727 radix_tree_gang_lookup_slot(struct radix_tree_root *root,
1728 			void ***results, unsigned long *indices,
1729 			unsigned long first_index, unsigned int max_items)
1730 {
1731 	struct radix_tree_iter iter;
1732 	void **slot;
1733 	unsigned int ret = 0;
1734 
1735 	if (unlikely(!max_items))
1736 		return 0;
1737 
1738 	radix_tree_for_each_slot(slot, root, &iter, first_index) {
1739 		results[ret] = slot;
1740 		if (indices)
1741 			indices[ret] = iter.index;
1742 		if (++ret == max_items)
1743 			break;
1744 	}
1745 
1746 	return ret;
1747 }
1748 EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
1749 
1750 /**
1751  *	radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1752  *	                             based on a tag
1753  *	@root:		radix tree root
1754  *	@results:	where the results of the lookup are placed
1755  *	@first_index:	start the lookup from this key
1756  *	@max_items:	place up to this many items at *results
1757  *	@tag:		the tag index (< RADIX_TREE_MAX_TAGS)
1758  *
1759  *	Performs an index-ascending scan of the tree for present items which
1760  *	have the tag indexed by @tag set.  Places the items at *@results and
1761  *	returns the number of items which were placed at *@results.
1762  */
1763 unsigned int
1764 radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
1765 		unsigned long first_index, unsigned int max_items,
1766 		unsigned int tag)
1767 {
1768 	struct radix_tree_iter iter;
1769 	void **slot;
1770 	unsigned int ret = 0;
1771 
1772 	if (unlikely(!max_items))
1773 		return 0;
1774 
1775 	radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1776 		results[ret] = rcu_dereference_raw(*slot);
1777 		if (!results[ret])
1778 			continue;
1779 		if (radix_tree_is_internal_node(results[ret])) {
1780 			slot = radix_tree_iter_retry(&iter);
1781 			continue;
1782 		}
1783 		if (++ret == max_items)
1784 			break;
1785 	}
1786 
1787 	return ret;
1788 }
1789 EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
1790 
1791 /**
1792  *	radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1793  *					  radix tree based on a tag
1794  *	@root:		radix tree root
1795  *	@results:	where the results of the lookup are placed
1796  *	@first_index:	start the lookup from this key
1797  *	@max_items:	place up to this many items at *results
1798  *	@tag:		the tag index (< RADIX_TREE_MAX_TAGS)
1799  *
1800  *	Performs an index-ascending scan of the tree for present items which
1801  *	have the tag indexed by @tag set.  Places the slots at *@results and
1802  *	returns the number of slots which were placed at *@results.
1803  */
1804 unsigned int
1805 radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results,
1806 		unsigned long first_index, unsigned int max_items,
1807 		unsigned int tag)
1808 {
1809 	struct radix_tree_iter iter;
1810 	void **slot;
1811 	unsigned int ret = 0;
1812 
1813 	if (unlikely(!max_items))
1814 		return 0;
1815 
1816 	radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1817 		results[ret] = slot;
1818 		if (++ret == max_items)
1819 			break;
1820 	}
1821 
1822 	return ret;
1823 }
1824 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
1825 
1826 /**
1827  *	__radix_tree_delete_node    -    try to free node after clearing a slot
1828  *	@root:		radix tree root
1829  *	@node:		node containing @index
1830  *	@update_node:	callback for changing leaf nodes
1831  *	@private:	private data to pass to @update_node
1832  *
1833  *	After clearing the slot at @index in @node from radix tree
1834  *	rooted at @root, call this function to attempt freeing the
1835  *	node and shrinking the tree.
1836  */
1837 void __radix_tree_delete_node(struct radix_tree_root *root,
1838 			      struct radix_tree_node *node,
1839 			      radix_tree_update_node_t update_node,
1840 			      void *private)
1841 {
1842 	delete_node(root, node, update_node, private);
1843 }
1844 
1845 /**
1846  *	radix_tree_delete_item    -    delete an item from a radix tree
1847  *	@root:		radix tree root
1848  *	@index:		index key
1849  *	@item:		expected item
1850  *
1851  *	Remove @item at @index from the radix tree rooted at @root.
1852  *
1853  *	Returns the address of the deleted item, or NULL if it was not present
1854  *	or the entry at the given @index was not @item.
1855  */
1856 void *radix_tree_delete_item(struct radix_tree_root *root,
1857 			     unsigned long index, void *item)
1858 {
1859 	struct radix_tree_node *node;
1860 	unsigned int offset;
1861 	void **slot;
1862 	void *entry;
1863 	int tag;
1864 
1865 	entry = __radix_tree_lookup(root, index, &node, &slot);
1866 	if (!entry)
1867 		return NULL;
1868 
1869 	if (item && entry != item)
1870 		return NULL;
1871 
1872 	if (!node) {
1873 		root_tag_clear_all(root);
1874 		root->rnode = NULL;
1875 		return entry;
1876 	}
1877 
1878 	offset = get_slot_offset(node, slot);
1879 
1880 	/* Clear all tags associated with the item to be deleted.  */
1881 	for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1882 		node_tag_clear(root, node, tag, offset);
1883 
1884 	__radix_tree_replace(root, node, slot, NULL, NULL, NULL);
1885 
1886 	return entry;
1887 }
1888 EXPORT_SYMBOL(radix_tree_delete_item);
1889 
1890 /**
1891  *	radix_tree_delete    -    delete an item from a radix tree
1892  *	@root:		radix tree root
1893  *	@index:		index key
1894  *
1895  *	Remove the item at @index from the radix tree rooted at @root.
1896  *
1897  *	Returns the address of the deleted item, or NULL if it was not present.
1898  */
1899 void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
1900 {
1901 	return radix_tree_delete_item(root, index, NULL);
1902 }
1903 EXPORT_SYMBOL(radix_tree_delete);
1904 
1905 void radix_tree_clear_tags(struct radix_tree_root *root,
1906 			   struct radix_tree_node *node,
1907 			   void **slot)
1908 {
1909 	if (node) {
1910 		unsigned int tag, offset = get_slot_offset(node, slot);
1911 		for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1912 			node_tag_clear(root, node, tag, offset);
1913 	} else {
1914 		/* Clear root node tags */
1915 		root->gfp_mask &= __GFP_BITS_MASK;
1916 	}
1917 }
1918 
1919 /**
1920  *	radix_tree_tagged - test whether any items in the tree are tagged
1921  *	@root:		radix tree root
1922  *	@tag:		tag to test
1923  */
1924 int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag)
1925 {
1926 	return root_tag_get(root, tag);
1927 }
1928 EXPORT_SYMBOL(radix_tree_tagged);
1929 
1930 static void
1931 radix_tree_node_ctor(void *arg)
1932 {
1933 	struct radix_tree_node *node = arg;
1934 
1935 	memset(node, 0, sizeof(*node));
1936 	INIT_LIST_HEAD(&node->private_list);
1937 }
1938 
1939 static __init unsigned long __maxindex(unsigned int height)
1940 {
1941 	unsigned int width = height * RADIX_TREE_MAP_SHIFT;
1942 	int shift = RADIX_TREE_INDEX_BITS - width;
1943 
1944 	if (shift < 0)
1945 		return ~0UL;
1946 	if (shift >= BITS_PER_LONG)
1947 		return 0UL;
1948 	return ~0UL >> shift;
1949 }
1950 
1951 static __init void radix_tree_init_maxnodes(void)
1952 {
1953 	unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1];
1954 	unsigned int i, j;
1955 
1956 	for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
1957 		height_to_maxindex[i] = __maxindex(i);
1958 	for (i = 0; i < ARRAY_SIZE(height_to_maxnodes); i++) {
1959 		for (j = i; j > 0; j--)
1960 			height_to_maxnodes[i] += height_to_maxindex[j - 1] + 1;
1961 	}
1962 }
1963 
1964 static int radix_tree_cpu_dead(unsigned int cpu)
1965 {
1966 	struct radix_tree_preload *rtp;
1967 	struct radix_tree_node *node;
1968 
1969 	/* Free per-cpu pool of preloaded nodes */
1970 	rtp = &per_cpu(radix_tree_preloads, cpu);
1971 	while (rtp->nr) {
1972 		node = rtp->nodes;
1973 		rtp->nodes = node->private_data;
1974 		kmem_cache_free(radix_tree_node_cachep, node);
1975 		rtp->nr--;
1976 	}
1977 	return 0;
1978 }
1979 
1980 void __init radix_tree_init(void)
1981 {
1982 	int ret;
1983 	radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
1984 			sizeof(struct radix_tree_node), 0,
1985 			SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
1986 			radix_tree_node_ctor);
1987 	radix_tree_init_maxnodes();
1988 	ret = cpuhp_setup_state_nocalls(CPUHP_RADIX_DEAD, "lib/radix:dead",
1989 					NULL, radix_tree_cpu_dead);
1990 	WARN_ON(ret < 0);
1991 }
1992