xref: /linux/lib/radix-tree.c (revision d97b46a64674a267bc41c9e16132ee2a98c3347d)
1 /*
2  * Copyright (C) 2001 Momchil Velikov
3  * Portions Copyright (C) 2001 Christoph Hellwig
4  * Copyright (C) 2005 SGI, Christoph Lameter
5  * Copyright (C) 2006 Nick Piggin
6  * Copyright (C) 2012 Konstantin Khlebnikov
7  *
8  * This program is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU General Public License as
10  * published by the Free Software Foundation; either version 2, or (at
11  * your option) any later version.
12  *
13  * This program is distributed in the hope that it will be useful, but
14  * WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16  * General Public License for more details.
17  *
18  * You should have received a copy of the GNU General Public License
19  * along with this program; if not, write to the Free Software
20  * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
21  */
22 
23 #include <linux/errno.h>
24 #include <linux/init.h>
25 #include <linux/kernel.h>
26 #include <linux/export.h>
27 #include <linux/radix-tree.h>
28 #include <linux/percpu.h>
29 #include <linux/slab.h>
30 #include <linux/notifier.h>
31 #include <linux/cpu.h>
32 #include <linux/string.h>
33 #include <linux/bitops.h>
34 #include <linux/rcupdate.h>
35 
36 
37 #ifdef __KERNEL__
38 #define RADIX_TREE_MAP_SHIFT	(CONFIG_BASE_SMALL ? 4 : 6)
39 #else
40 #define RADIX_TREE_MAP_SHIFT	3	/* For more stressful testing */
41 #endif
42 
43 #define RADIX_TREE_MAP_SIZE	(1UL << RADIX_TREE_MAP_SHIFT)
44 #define RADIX_TREE_MAP_MASK	(RADIX_TREE_MAP_SIZE-1)
45 
46 #define RADIX_TREE_TAG_LONGS	\
47 	((RADIX_TREE_MAP_SIZE + BITS_PER_LONG - 1) / BITS_PER_LONG)
48 
49 struct radix_tree_node {
50 	unsigned int	height;		/* Height from the bottom */
51 	unsigned int	count;
52 	union {
53 		struct radix_tree_node *parent;	/* Used when ascending tree */
54 		struct rcu_head	rcu_head;	/* Used when freeing node */
55 	};
56 	void __rcu	*slots[RADIX_TREE_MAP_SIZE];
57 	unsigned long	tags[RADIX_TREE_MAX_TAGS][RADIX_TREE_TAG_LONGS];
58 };
59 
60 #define RADIX_TREE_INDEX_BITS  (8 /* CHAR_BIT */ * sizeof(unsigned long))
61 #define RADIX_TREE_MAX_PATH (DIV_ROUND_UP(RADIX_TREE_INDEX_BITS, \
62 					  RADIX_TREE_MAP_SHIFT))
63 
64 /*
65  * The height_to_maxindex array needs to be one deeper than the maximum
66  * path as height 0 holds only 1 entry.
67  */
68 static unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1] __read_mostly;
69 
70 /*
71  * Radix tree node cache.
72  */
73 static struct kmem_cache *radix_tree_node_cachep;
74 
75 /*
76  * The radix tree is variable-height, so an insert operation not only has
77  * to build the branch to its corresponding item, it also has to build the
78  * branch to existing items if the size has to be increased (by
79  * radix_tree_extend).
80  *
81  * The worst case is a zero height tree with just a single item at index 0,
82  * and then inserting an item at index ULONG_MAX. This requires 2 new branches
83  * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
84  * Hence:
85  */
86 #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
87 
88 /*
89  * Per-cpu pool of preloaded nodes
90  */
91 struct radix_tree_preload {
92 	int nr;
93 	struct radix_tree_node *nodes[RADIX_TREE_PRELOAD_SIZE];
94 };
95 static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
96 
97 static inline void *ptr_to_indirect(void *ptr)
98 {
99 	return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR);
100 }
101 
102 static inline void *indirect_to_ptr(void *ptr)
103 {
104 	return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR);
105 }
106 
107 static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
108 {
109 	return root->gfp_mask & __GFP_BITS_MASK;
110 }
111 
112 static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
113 		int offset)
114 {
115 	__set_bit(offset, node->tags[tag]);
116 }
117 
118 static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
119 		int offset)
120 {
121 	__clear_bit(offset, node->tags[tag]);
122 }
123 
124 static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
125 		int offset)
126 {
127 	return test_bit(offset, node->tags[tag]);
128 }
129 
130 static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
131 {
132 	root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
133 }
134 
135 static inline void root_tag_clear(struct radix_tree_root *root, unsigned int tag)
136 {
137 	root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
138 }
139 
140 static inline void root_tag_clear_all(struct radix_tree_root *root)
141 {
142 	root->gfp_mask &= __GFP_BITS_MASK;
143 }
144 
145 static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
146 {
147 	return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
148 }
149 
150 /*
151  * Returns 1 if any slot in the node has this tag set.
152  * Otherwise returns 0.
153  */
154 static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
155 {
156 	int idx;
157 	for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
158 		if (node->tags[tag][idx])
159 			return 1;
160 	}
161 	return 0;
162 }
163 
164 /**
165  * radix_tree_find_next_bit - find the next set bit in a memory region
166  *
167  * @addr: The address to base the search on
168  * @size: The bitmap size in bits
169  * @offset: The bitnumber to start searching at
170  *
171  * Unrollable variant of find_next_bit() for constant size arrays.
172  * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
173  * Returns next bit offset, or size if nothing found.
174  */
175 static __always_inline unsigned long
176 radix_tree_find_next_bit(const unsigned long *addr,
177 			 unsigned long size, unsigned long offset)
178 {
179 	if (!__builtin_constant_p(size))
180 		return find_next_bit(addr, size, offset);
181 
182 	if (offset < size) {
183 		unsigned long tmp;
184 
185 		addr += offset / BITS_PER_LONG;
186 		tmp = *addr >> (offset % BITS_PER_LONG);
187 		if (tmp)
188 			return __ffs(tmp) + offset;
189 		offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
190 		while (offset < size) {
191 			tmp = *++addr;
192 			if (tmp)
193 				return __ffs(tmp) + offset;
194 			offset += BITS_PER_LONG;
195 		}
196 	}
197 	return size;
198 }
199 
200 /*
201  * This assumes that the caller has performed appropriate preallocation, and
202  * that the caller has pinned this thread of control to the current CPU.
203  */
204 static struct radix_tree_node *
205 radix_tree_node_alloc(struct radix_tree_root *root)
206 {
207 	struct radix_tree_node *ret = NULL;
208 	gfp_t gfp_mask = root_gfp_mask(root);
209 
210 	if (!(gfp_mask & __GFP_WAIT)) {
211 		struct radix_tree_preload *rtp;
212 
213 		/*
214 		 * Provided the caller has preloaded here, we will always
215 		 * succeed in getting a node here (and never reach
216 		 * kmem_cache_alloc)
217 		 */
218 		rtp = &__get_cpu_var(radix_tree_preloads);
219 		if (rtp->nr) {
220 			ret = rtp->nodes[rtp->nr - 1];
221 			rtp->nodes[rtp->nr - 1] = NULL;
222 			rtp->nr--;
223 		}
224 	}
225 	if (ret == NULL)
226 		ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
227 
228 	BUG_ON(radix_tree_is_indirect_ptr(ret));
229 	return ret;
230 }
231 
232 static void radix_tree_node_rcu_free(struct rcu_head *head)
233 {
234 	struct radix_tree_node *node =
235 			container_of(head, struct radix_tree_node, rcu_head);
236 	int i;
237 
238 	/*
239 	 * must only free zeroed nodes into the slab. radix_tree_shrink
240 	 * can leave us with a non-NULL entry in the first slot, so clear
241 	 * that here to make sure.
242 	 */
243 	for (i = 0; i < RADIX_TREE_MAX_TAGS; i++)
244 		tag_clear(node, i, 0);
245 
246 	node->slots[0] = NULL;
247 	node->count = 0;
248 
249 	kmem_cache_free(radix_tree_node_cachep, node);
250 }
251 
252 static inline void
253 radix_tree_node_free(struct radix_tree_node *node)
254 {
255 	call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
256 }
257 
258 /*
259  * Load up this CPU's radix_tree_node buffer with sufficient objects to
260  * ensure that the addition of a single element in the tree cannot fail.  On
261  * success, return zero, with preemption disabled.  On error, return -ENOMEM
262  * with preemption not disabled.
263  *
264  * To make use of this facility, the radix tree must be initialised without
265  * __GFP_WAIT being passed to INIT_RADIX_TREE().
266  */
267 int radix_tree_preload(gfp_t gfp_mask)
268 {
269 	struct radix_tree_preload *rtp;
270 	struct radix_tree_node *node;
271 	int ret = -ENOMEM;
272 
273 	preempt_disable();
274 	rtp = &__get_cpu_var(radix_tree_preloads);
275 	while (rtp->nr < ARRAY_SIZE(rtp->nodes)) {
276 		preempt_enable();
277 		node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
278 		if (node == NULL)
279 			goto out;
280 		preempt_disable();
281 		rtp = &__get_cpu_var(radix_tree_preloads);
282 		if (rtp->nr < ARRAY_SIZE(rtp->nodes))
283 			rtp->nodes[rtp->nr++] = node;
284 		else
285 			kmem_cache_free(radix_tree_node_cachep, node);
286 	}
287 	ret = 0;
288 out:
289 	return ret;
290 }
291 EXPORT_SYMBOL(radix_tree_preload);
292 
293 /*
294  *	Return the maximum key which can be store into a
295  *	radix tree with height HEIGHT.
296  */
297 static inline unsigned long radix_tree_maxindex(unsigned int height)
298 {
299 	return height_to_maxindex[height];
300 }
301 
302 /*
303  *	Extend a radix tree so it can store key @index.
304  */
305 static int radix_tree_extend(struct radix_tree_root *root, unsigned long index)
306 {
307 	struct radix_tree_node *node;
308 	struct radix_tree_node *slot;
309 	unsigned int height;
310 	int tag;
311 
312 	/* Figure out what the height should be.  */
313 	height = root->height + 1;
314 	while (index > radix_tree_maxindex(height))
315 		height++;
316 
317 	if (root->rnode == NULL) {
318 		root->height = height;
319 		goto out;
320 	}
321 
322 	do {
323 		unsigned int newheight;
324 		if (!(node = radix_tree_node_alloc(root)))
325 			return -ENOMEM;
326 
327 		/* Propagate the aggregated tag info into the new root */
328 		for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
329 			if (root_tag_get(root, tag))
330 				tag_set(node, tag, 0);
331 		}
332 
333 		/* Increase the height.  */
334 		newheight = root->height+1;
335 		node->height = newheight;
336 		node->count = 1;
337 		node->parent = NULL;
338 		slot = root->rnode;
339 		if (newheight > 1) {
340 			slot = indirect_to_ptr(slot);
341 			slot->parent = node;
342 		}
343 		node->slots[0] = slot;
344 		node = ptr_to_indirect(node);
345 		rcu_assign_pointer(root->rnode, node);
346 		root->height = newheight;
347 	} while (height > root->height);
348 out:
349 	return 0;
350 }
351 
352 /**
353  *	radix_tree_insert    -    insert into a radix tree
354  *	@root:		radix tree root
355  *	@index:		index key
356  *	@item:		item to insert
357  *
358  *	Insert an item into the radix tree at position @index.
359  */
360 int radix_tree_insert(struct radix_tree_root *root,
361 			unsigned long index, void *item)
362 {
363 	struct radix_tree_node *node = NULL, *slot;
364 	unsigned int height, shift;
365 	int offset;
366 	int error;
367 
368 	BUG_ON(radix_tree_is_indirect_ptr(item));
369 
370 	/* Make sure the tree is high enough.  */
371 	if (index > radix_tree_maxindex(root->height)) {
372 		error = radix_tree_extend(root, index);
373 		if (error)
374 			return error;
375 	}
376 
377 	slot = indirect_to_ptr(root->rnode);
378 
379 	height = root->height;
380 	shift = (height-1) * RADIX_TREE_MAP_SHIFT;
381 
382 	offset = 0;			/* uninitialised var warning */
383 	while (height > 0) {
384 		if (slot == NULL) {
385 			/* Have to add a child node.  */
386 			if (!(slot = radix_tree_node_alloc(root)))
387 				return -ENOMEM;
388 			slot->height = height;
389 			slot->parent = node;
390 			if (node) {
391 				rcu_assign_pointer(node->slots[offset], slot);
392 				node->count++;
393 			} else
394 				rcu_assign_pointer(root->rnode, ptr_to_indirect(slot));
395 		}
396 
397 		/* Go a level down */
398 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
399 		node = slot;
400 		slot = node->slots[offset];
401 		shift -= RADIX_TREE_MAP_SHIFT;
402 		height--;
403 	}
404 
405 	if (slot != NULL)
406 		return -EEXIST;
407 
408 	if (node) {
409 		node->count++;
410 		rcu_assign_pointer(node->slots[offset], item);
411 		BUG_ON(tag_get(node, 0, offset));
412 		BUG_ON(tag_get(node, 1, offset));
413 	} else {
414 		rcu_assign_pointer(root->rnode, item);
415 		BUG_ON(root_tag_get(root, 0));
416 		BUG_ON(root_tag_get(root, 1));
417 	}
418 
419 	return 0;
420 }
421 EXPORT_SYMBOL(radix_tree_insert);
422 
423 /*
424  * is_slot == 1 : search for the slot.
425  * is_slot == 0 : search for the node.
426  */
427 static void *radix_tree_lookup_element(struct radix_tree_root *root,
428 				unsigned long index, int is_slot)
429 {
430 	unsigned int height, shift;
431 	struct radix_tree_node *node, **slot;
432 
433 	node = rcu_dereference_raw(root->rnode);
434 	if (node == NULL)
435 		return NULL;
436 
437 	if (!radix_tree_is_indirect_ptr(node)) {
438 		if (index > 0)
439 			return NULL;
440 		return is_slot ? (void *)&root->rnode : node;
441 	}
442 	node = indirect_to_ptr(node);
443 
444 	height = node->height;
445 	if (index > radix_tree_maxindex(height))
446 		return NULL;
447 
448 	shift = (height-1) * RADIX_TREE_MAP_SHIFT;
449 
450 	do {
451 		slot = (struct radix_tree_node **)
452 			(node->slots + ((index>>shift) & RADIX_TREE_MAP_MASK));
453 		node = rcu_dereference_raw(*slot);
454 		if (node == NULL)
455 			return NULL;
456 
457 		shift -= RADIX_TREE_MAP_SHIFT;
458 		height--;
459 	} while (height > 0);
460 
461 	return is_slot ? (void *)slot : indirect_to_ptr(node);
462 }
463 
464 /**
465  *	radix_tree_lookup_slot    -    lookup a slot in a radix tree
466  *	@root:		radix tree root
467  *	@index:		index key
468  *
469  *	Returns:  the slot corresponding to the position @index in the
470  *	radix tree @root. This is useful for update-if-exists operations.
471  *
472  *	This function can be called under rcu_read_lock iff the slot is not
473  *	modified by radix_tree_replace_slot, otherwise it must be called
474  *	exclusive from other writers. Any dereference of the slot must be done
475  *	using radix_tree_deref_slot.
476  */
477 void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index)
478 {
479 	return (void **)radix_tree_lookup_element(root, index, 1);
480 }
481 EXPORT_SYMBOL(radix_tree_lookup_slot);
482 
483 /**
484  *	radix_tree_lookup    -    perform lookup operation on a radix tree
485  *	@root:		radix tree root
486  *	@index:		index key
487  *
488  *	Lookup the item at the position @index in the radix tree @root.
489  *
490  *	This function can be called under rcu_read_lock, however the caller
491  *	must manage lifetimes of leaf nodes (eg. RCU may also be used to free
492  *	them safely). No RCU barriers are required to access or modify the
493  *	returned item, however.
494  */
495 void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index)
496 {
497 	return radix_tree_lookup_element(root, index, 0);
498 }
499 EXPORT_SYMBOL(radix_tree_lookup);
500 
501 /**
502  *	radix_tree_tag_set - set a tag on a radix tree node
503  *	@root:		radix tree root
504  *	@index:		index key
505  *	@tag: 		tag index
506  *
507  *	Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
508  *	corresponding to @index in the radix tree.  From
509  *	the root all the way down to the leaf node.
510  *
511  *	Returns the address of the tagged item.   Setting a tag on a not-present
512  *	item is a bug.
513  */
514 void *radix_tree_tag_set(struct radix_tree_root *root,
515 			unsigned long index, unsigned int tag)
516 {
517 	unsigned int height, shift;
518 	struct radix_tree_node *slot;
519 
520 	height = root->height;
521 	BUG_ON(index > radix_tree_maxindex(height));
522 
523 	slot = indirect_to_ptr(root->rnode);
524 	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
525 
526 	while (height > 0) {
527 		int offset;
528 
529 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
530 		if (!tag_get(slot, tag, offset))
531 			tag_set(slot, tag, offset);
532 		slot = slot->slots[offset];
533 		BUG_ON(slot == NULL);
534 		shift -= RADIX_TREE_MAP_SHIFT;
535 		height--;
536 	}
537 
538 	/* set the root's tag bit */
539 	if (slot && !root_tag_get(root, tag))
540 		root_tag_set(root, tag);
541 
542 	return slot;
543 }
544 EXPORT_SYMBOL(radix_tree_tag_set);
545 
546 /**
547  *	radix_tree_tag_clear - clear a tag on a radix tree node
548  *	@root:		radix tree root
549  *	@index:		index key
550  *	@tag: 		tag index
551  *
552  *	Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
553  *	corresponding to @index in the radix tree.  If
554  *	this causes the leaf node to have no tags set then clear the tag in the
555  *	next-to-leaf node, etc.
556  *
557  *	Returns the address of the tagged item on success, else NULL.  ie:
558  *	has the same return value and semantics as radix_tree_lookup().
559  */
560 void *radix_tree_tag_clear(struct radix_tree_root *root,
561 			unsigned long index, unsigned int tag)
562 {
563 	struct radix_tree_node *node = NULL;
564 	struct radix_tree_node *slot = NULL;
565 	unsigned int height, shift;
566 	int uninitialized_var(offset);
567 
568 	height = root->height;
569 	if (index > radix_tree_maxindex(height))
570 		goto out;
571 
572 	shift = height * RADIX_TREE_MAP_SHIFT;
573 	slot = indirect_to_ptr(root->rnode);
574 
575 	while (shift) {
576 		if (slot == NULL)
577 			goto out;
578 
579 		shift -= RADIX_TREE_MAP_SHIFT;
580 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
581 		node = slot;
582 		slot = slot->slots[offset];
583 	}
584 
585 	if (slot == NULL)
586 		goto out;
587 
588 	while (node) {
589 		if (!tag_get(node, tag, offset))
590 			goto out;
591 		tag_clear(node, tag, offset);
592 		if (any_tag_set(node, tag))
593 			goto out;
594 
595 		index >>= RADIX_TREE_MAP_SHIFT;
596 		offset = index & RADIX_TREE_MAP_MASK;
597 		node = node->parent;
598 	}
599 
600 	/* clear the root's tag bit */
601 	if (root_tag_get(root, tag))
602 		root_tag_clear(root, tag);
603 
604 out:
605 	return slot;
606 }
607 EXPORT_SYMBOL(radix_tree_tag_clear);
608 
609 /**
610  * radix_tree_tag_get - get a tag on a radix tree node
611  * @root:		radix tree root
612  * @index:		index key
613  * @tag: 		tag index (< RADIX_TREE_MAX_TAGS)
614  *
615  * Return values:
616  *
617  *  0: tag not present or not set
618  *  1: tag set
619  *
620  * Note that the return value of this function may not be relied on, even if
621  * the RCU lock is held, unless tag modification and node deletion are excluded
622  * from concurrency.
623  */
624 int radix_tree_tag_get(struct radix_tree_root *root,
625 			unsigned long index, unsigned int tag)
626 {
627 	unsigned int height, shift;
628 	struct radix_tree_node *node;
629 
630 	/* check the root's tag bit */
631 	if (!root_tag_get(root, tag))
632 		return 0;
633 
634 	node = rcu_dereference_raw(root->rnode);
635 	if (node == NULL)
636 		return 0;
637 
638 	if (!radix_tree_is_indirect_ptr(node))
639 		return (index == 0);
640 	node = indirect_to_ptr(node);
641 
642 	height = node->height;
643 	if (index > radix_tree_maxindex(height))
644 		return 0;
645 
646 	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
647 
648 	for ( ; ; ) {
649 		int offset;
650 
651 		if (node == NULL)
652 			return 0;
653 
654 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
655 		if (!tag_get(node, tag, offset))
656 			return 0;
657 		if (height == 1)
658 			return 1;
659 		node = rcu_dereference_raw(node->slots[offset]);
660 		shift -= RADIX_TREE_MAP_SHIFT;
661 		height--;
662 	}
663 }
664 EXPORT_SYMBOL(radix_tree_tag_get);
665 
666 /**
667  * radix_tree_next_chunk - find next chunk of slots for iteration
668  *
669  * @root:	radix tree root
670  * @iter:	iterator state
671  * @flags:	RADIX_TREE_ITER_* flags and tag index
672  * Returns:	pointer to chunk first slot, or NULL if iteration is over
673  */
674 void **radix_tree_next_chunk(struct radix_tree_root *root,
675 			     struct radix_tree_iter *iter, unsigned flags)
676 {
677 	unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK;
678 	struct radix_tree_node *rnode, *node;
679 	unsigned long index, offset;
680 
681 	if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
682 		return NULL;
683 
684 	/*
685 	 * Catch next_index overflow after ~0UL. iter->index never overflows
686 	 * during iterating; it can be zero only at the beginning.
687 	 * And we cannot overflow iter->next_index in a single step,
688 	 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
689 	 */
690 	index = iter->next_index;
691 	if (!index && iter->index)
692 		return NULL;
693 
694 	rnode = rcu_dereference_raw(root->rnode);
695 	if (radix_tree_is_indirect_ptr(rnode)) {
696 		rnode = indirect_to_ptr(rnode);
697 	} else if (rnode && !index) {
698 		/* Single-slot tree */
699 		iter->index = 0;
700 		iter->next_index = 1;
701 		iter->tags = 1;
702 		return (void **)&root->rnode;
703 	} else
704 		return NULL;
705 
706 restart:
707 	shift = (rnode->height - 1) * RADIX_TREE_MAP_SHIFT;
708 	offset = index >> shift;
709 
710 	/* Index outside of the tree */
711 	if (offset >= RADIX_TREE_MAP_SIZE)
712 		return NULL;
713 
714 	node = rnode;
715 	while (1) {
716 		if ((flags & RADIX_TREE_ITER_TAGGED) ?
717 				!test_bit(offset, node->tags[tag]) :
718 				!node->slots[offset]) {
719 			/* Hole detected */
720 			if (flags & RADIX_TREE_ITER_CONTIG)
721 				return NULL;
722 
723 			if (flags & RADIX_TREE_ITER_TAGGED)
724 				offset = radix_tree_find_next_bit(
725 						node->tags[tag],
726 						RADIX_TREE_MAP_SIZE,
727 						offset + 1);
728 			else
729 				while (++offset	< RADIX_TREE_MAP_SIZE) {
730 					if (node->slots[offset])
731 						break;
732 				}
733 			index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1);
734 			index += offset << shift;
735 			/* Overflow after ~0UL */
736 			if (!index)
737 				return NULL;
738 			if (offset == RADIX_TREE_MAP_SIZE)
739 				goto restart;
740 		}
741 
742 		/* This is leaf-node */
743 		if (!shift)
744 			break;
745 
746 		node = rcu_dereference_raw(node->slots[offset]);
747 		if (node == NULL)
748 			goto restart;
749 		shift -= RADIX_TREE_MAP_SHIFT;
750 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
751 	}
752 
753 	/* Update the iterator state */
754 	iter->index = index;
755 	iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1;
756 
757 	/* Construct iter->tags bit-mask from node->tags[tag] array */
758 	if (flags & RADIX_TREE_ITER_TAGGED) {
759 		unsigned tag_long, tag_bit;
760 
761 		tag_long = offset / BITS_PER_LONG;
762 		tag_bit  = offset % BITS_PER_LONG;
763 		iter->tags = node->tags[tag][tag_long] >> tag_bit;
764 		/* This never happens if RADIX_TREE_TAG_LONGS == 1 */
765 		if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
766 			/* Pick tags from next element */
767 			if (tag_bit)
768 				iter->tags |= node->tags[tag][tag_long + 1] <<
769 						(BITS_PER_LONG - tag_bit);
770 			/* Clip chunk size, here only BITS_PER_LONG tags */
771 			iter->next_index = index + BITS_PER_LONG;
772 		}
773 	}
774 
775 	return node->slots + offset;
776 }
777 EXPORT_SYMBOL(radix_tree_next_chunk);
778 
779 /**
780  * radix_tree_range_tag_if_tagged - for each item in given range set given
781  *				   tag if item has another tag set
782  * @root:		radix tree root
783  * @first_indexp:	pointer to a starting index of a range to scan
784  * @last_index:		last index of a range to scan
785  * @nr_to_tag:		maximum number items to tag
786  * @iftag:		tag index to test
787  * @settag:		tag index to set if tested tag is set
788  *
789  * This function scans range of radix tree from first_index to last_index
790  * (inclusive).  For each item in the range if iftag is set, the function sets
791  * also settag. The function stops either after tagging nr_to_tag items or
792  * after reaching last_index.
793  *
794  * The tags must be set from the leaf level only and propagated back up the
795  * path to the root. We must do this so that we resolve the full path before
796  * setting any tags on intermediate nodes. If we set tags as we descend, then
797  * we can get to the leaf node and find that the index that has the iftag
798  * set is outside the range we are scanning. This reults in dangling tags and
799  * can lead to problems with later tag operations (e.g. livelocks on lookups).
800  *
801  * The function returns number of leaves where the tag was set and sets
802  * *first_indexp to the first unscanned index.
803  * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must
804  * be prepared to handle that.
805  */
806 unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root,
807 		unsigned long *first_indexp, unsigned long last_index,
808 		unsigned long nr_to_tag,
809 		unsigned int iftag, unsigned int settag)
810 {
811 	unsigned int height = root->height;
812 	struct radix_tree_node *node = NULL;
813 	struct radix_tree_node *slot;
814 	unsigned int shift;
815 	unsigned long tagged = 0;
816 	unsigned long index = *first_indexp;
817 
818 	last_index = min(last_index, radix_tree_maxindex(height));
819 	if (index > last_index)
820 		return 0;
821 	if (!nr_to_tag)
822 		return 0;
823 	if (!root_tag_get(root, iftag)) {
824 		*first_indexp = last_index + 1;
825 		return 0;
826 	}
827 	if (height == 0) {
828 		*first_indexp = last_index + 1;
829 		root_tag_set(root, settag);
830 		return 1;
831 	}
832 
833 	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
834 	slot = indirect_to_ptr(root->rnode);
835 
836 	for (;;) {
837 		unsigned long upindex;
838 		int offset;
839 
840 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
841 		if (!slot->slots[offset])
842 			goto next;
843 		if (!tag_get(slot, iftag, offset))
844 			goto next;
845 		if (shift) {
846 			/* Go down one level */
847 			shift -= RADIX_TREE_MAP_SHIFT;
848 			node = slot;
849 			slot = slot->slots[offset];
850 			continue;
851 		}
852 
853 		/* tag the leaf */
854 		tagged++;
855 		tag_set(slot, settag, offset);
856 
857 		/* walk back up the path tagging interior nodes */
858 		upindex = index;
859 		while (node) {
860 			upindex >>= RADIX_TREE_MAP_SHIFT;
861 			offset = upindex & RADIX_TREE_MAP_MASK;
862 
863 			/* stop if we find a node with the tag already set */
864 			if (tag_get(node, settag, offset))
865 				break;
866 			tag_set(node, settag, offset);
867 			node = node->parent;
868 		}
869 
870 		/*
871 		 * Small optimization: now clear that node pointer.
872 		 * Since all of this slot's ancestors now have the tag set
873 		 * from setting it above, we have no further need to walk
874 		 * back up the tree setting tags, until we update slot to
875 		 * point to another radix_tree_node.
876 		 */
877 		node = NULL;
878 
879 next:
880 		/* Go to next item at level determined by 'shift' */
881 		index = ((index >> shift) + 1) << shift;
882 		/* Overflow can happen when last_index is ~0UL... */
883 		if (index > last_index || !index)
884 			break;
885 		if (tagged >= nr_to_tag)
886 			break;
887 		while (((index >> shift) & RADIX_TREE_MAP_MASK) == 0) {
888 			/*
889 			 * We've fully scanned this node. Go up. Because
890 			 * last_index is guaranteed to be in the tree, what
891 			 * we do below cannot wander astray.
892 			 */
893 			slot = slot->parent;
894 			shift += RADIX_TREE_MAP_SHIFT;
895 		}
896 	}
897 	/*
898 	 * We need not to tag the root tag if there is no tag which is set with
899 	 * settag within the range from *first_indexp to last_index.
900 	 */
901 	if (tagged > 0)
902 		root_tag_set(root, settag);
903 	*first_indexp = index;
904 
905 	return tagged;
906 }
907 EXPORT_SYMBOL(radix_tree_range_tag_if_tagged);
908 
909 
910 /**
911  *	radix_tree_next_hole    -    find the next hole (not-present entry)
912  *	@root:		tree root
913  *	@index:		index key
914  *	@max_scan:	maximum range to search
915  *
916  *	Search the set [index, min(index+max_scan-1, MAX_INDEX)] for the lowest
917  *	indexed hole.
918  *
919  *	Returns: the index of the hole if found, otherwise returns an index
920  *	outside of the set specified (in which case 'return - index >= max_scan'
921  *	will be true). In rare cases of index wrap-around, 0 will be returned.
922  *
923  *	radix_tree_next_hole may be called under rcu_read_lock. However, like
924  *	radix_tree_gang_lookup, this will not atomically search a snapshot of
925  *	the tree at a single point in time. For example, if a hole is created
926  *	at index 5, then subsequently a hole is created at index 10,
927  *	radix_tree_next_hole covering both indexes may return 10 if called
928  *	under rcu_read_lock.
929  */
930 unsigned long radix_tree_next_hole(struct radix_tree_root *root,
931 				unsigned long index, unsigned long max_scan)
932 {
933 	unsigned long i;
934 
935 	for (i = 0; i < max_scan; i++) {
936 		if (!radix_tree_lookup(root, index))
937 			break;
938 		index++;
939 		if (index == 0)
940 			break;
941 	}
942 
943 	return index;
944 }
945 EXPORT_SYMBOL(radix_tree_next_hole);
946 
947 /**
948  *	radix_tree_prev_hole    -    find the prev hole (not-present entry)
949  *	@root:		tree root
950  *	@index:		index key
951  *	@max_scan:	maximum range to search
952  *
953  *	Search backwards in the range [max(index-max_scan+1, 0), index]
954  *	for the first hole.
955  *
956  *	Returns: the index of the hole if found, otherwise returns an index
957  *	outside of the set specified (in which case 'index - return >= max_scan'
958  *	will be true). In rare cases of wrap-around, ULONG_MAX will be returned.
959  *
960  *	radix_tree_next_hole may be called under rcu_read_lock. However, like
961  *	radix_tree_gang_lookup, this will not atomically search a snapshot of
962  *	the tree at a single point in time. For example, if a hole is created
963  *	at index 10, then subsequently a hole is created at index 5,
964  *	radix_tree_prev_hole covering both indexes may return 5 if called under
965  *	rcu_read_lock.
966  */
967 unsigned long radix_tree_prev_hole(struct radix_tree_root *root,
968 				   unsigned long index, unsigned long max_scan)
969 {
970 	unsigned long i;
971 
972 	for (i = 0; i < max_scan; i++) {
973 		if (!radix_tree_lookup(root, index))
974 			break;
975 		index--;
976 		if (index == ULONG_MAX)
977 			break;
978 	}
979 
980 	return index;
981 }
982 EXPORT_SYMBOL(radix_tree_prev_hole);
983 
984 /**
985  *	radix_tree_gang_lookup - perform multiple lookup on a radix tree
986  *	@root:		radix tree root
987  *	@results:	where the results of the lookup are placed
988  *	@first_index:	start the lookup from this key
989  *	@max_items:	place up to this many items at *results
990  *
991  *	Performs an index-ascending scan of the tree for present items.  Places
992  *	them at *@results and returns the number of items which were placed at
993  *	*@results.
994  *
995  *	The implementation is naive.
996  *
997  *	Like radix_tree_lookup, radix_tree_gang_lookup may be called under
998  *	rcu_read_lock. In this case, rather than the returned results being
999  *	an atomic snapshot of the tree at a single point in time, the semantics
1000  *	of an RCU protected gang lookup are as though multiple radix_tree_lookups
1001  *	have been issued in individual locks, and results stored in 'results'.
1002  */
1003 unsigned int
1004 radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
1005 			unsigned long first_index, unsigned int max_items)
1006 {
1007 	struct radix_tree_iter iter;
1008 	void **slot;
1009 	unsigned int ret = 0;
1010 
1011 	if (unlikely(!max_items))
1012 		return 0;
1013 
1014 	radix_tree_for_each_slot(slot, root, &iter, first_index) {
1015 		results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot));
1016 		if (!results[ret])
1017 			continue;
1018 		if (++ret == max_items)
1019 			break;
1020 	}
1021 
1022 	return ret;
1023 }
1024 EXPORT_SYMBOL(radix_tree_gang_lookup);
1025 
1026 /**
1027  *	radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1028  *	@root:		radix tree root
1029  *	@results:	where the results of the lookup are placed
1030  *	@indices:	where their indices should be placed (but usually NULL)
1031  *	@first_index:	start the lookup from this key
1032  *	@max_items:	place up to this many items at *results
1033  *
1034  *	Performs an index-ascending scan of the tree for present items.  Places
1035  *	their slots at *@results and returns the number of items which were
1036  *	placed at *@results.
1037  *
1038  *	The implementation is naive.
1039  *
1040  *	Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1041  *	be dereferenced with radix_tree_deref_slot, and if using only RCU
1042  *	protection, radix_tree_deref_slot may fail requiring a retry.
1043  */
1044 unsigned int
1045 radix_tree_gang_lookup_slot(struct radix_tree_root *root,
1046 			void ***results, unsigned long *indices,
1047 			unsigned long first_index, unsigned int max_items)
1048 {
1049 	struct radix_tree_iter iter;
1050 	void **slot;
1051 	unsigned int ret = 0;
1052 
1053 	if (unlikely(!max_items))
1054 		return 0;
1055 
1056 	radix_tree_for_each_slot(slot, root, &iter, first_index) {
1057 		results[ret] = slot;
1058 		if (indices)
1059 			indices[ret] = iter.index;
1060 		if (++ret == max_items)
1061 			break;
1062 	}
1063 
1064 	return ret;
1065 }
1066 EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
1067 
1068 /**
1069  *	radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1070  *	                             based on a tag
1071  *	@root:		radix tree root
1072  *	@results:	where the results of the lookup are placed
1073  *	@first_index:	start the lookup from this key
1074  *	@max_items:	place up to this many items at *results
1075  *	@tag:		the tag index (< RADIX_TREE_MAX_TAGS)
1076  *
1077  *	Performs an index-ascending scan of the tree for present items which
1078  *	have the tag indexed by @tag set.  Places the items at *@results and
1079  *	returns the number of items which were placed at *@results.
1080  */
1081 unsigned int
1082 radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
1083 		unsigned long first_index, unsigned int max_items,
1084 		unsigned int tag)
1085 {
1086 	struct radix_tree_iter iter;
1087 	void **slot;
1088 	unsigned int ret = 0;
1089 
1090 	if (unlikely(!max_items))
1091 		return 0;
1092 
1093 	radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1094 		results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot));
1095 		if (!results[ret])
1096 			continue;
1097 		if (++ret == max_items)
1098 			break;
1099 	}
1100 
1101 	return ret;
1102 }
1103 EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
1104 
1105 /**
1106  *	radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1107  *					  radix tree based on a tag
1108  *	@root:		radix tree root
1109  *	@results:	where the results of the lookup are placed
1110  *	@first_index:	start the lookup from this key
1111  *	@max_items:	place up to this many items at *results
1112  *	@tag:		the tag index (< RADIX_TREE_MAX_TAGS)
1113  *
1114  *	Performs an index-ascending scan of the tree for present items which
1115  *	have the tag indexed by @tag set.  Places the slots at *@results and
1116  *	returns the number of slots which were placed at *@results.
1117  */
1118 unsigned int
1119 radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results,
1120 		unsigned long first_index, unsigned int max_items,
1121 		unsigned int tag)
1122 {
1123 	struct radix_tree_iter iter;
1124 	void **slot;
1125 	unsigned int ret = 0;
1126 
1127 	if (unlikely(!max_items))
1128 		return 0;
1129 
1130 	radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1131 		results[ret] = slot;
1132 		if (++ret == max_items)
1133 			break;
1134 	}
1135 
1136 	return ret;
1137 }
1138 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
1139 
1140 #if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP)
1141 #include <linux/sched.h> /* for cond_resched() */
1142 
1143 /*
1144  * This linear search is at present only useful to shmem_unuse_inode().
1145  */
1146 static unsigned long __locate(struct radix_tree_node *slot, void *item,
1147 			      unsigned long index, unsigned long *found_index)
1148 {
1149 	unsigned int shift, height;
1150 	unsigned long i;
1151 
1152 	height = slot->height;
1153 	shift = (height-1) * RADIX_TREE_MAP_SHIFT;
1154 
1155 	for ( ; height > 1; height--) {
1156 		i = (index >> shift) & RADIX_TREE_MAP_MASK;
1157 		for (;;) {
1158 			if (slot->slots[i] != NULL)
1159 				break;
1160 			index &= ~((1UL << shift) - 1);
1161 			index += 1UL << shift;
1162 			if (index == 0)
1163 				goto out;	/* 32-bit wraparound */
1164 			i++;
1165 			if (i == RADIX_TREE_MAP_SIZE)
1166 				goto out;
1167 		}
1168 
1169 		shift -= RADIX_TREE_MAP_SHIFT;
1170 		slot = rcu_dereference_raw(slot->slots[i]);
1171 		if (slot == NULL)
1172 			goto out;
1173 	}
1174 
1175 	/* Bottom level: check items */
1176 	for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
1177 		if (slot->slots[i] == item) {
1178 			*found_index = index + i;
1179 			index = 0;
1180 			goto out;
1181 		}
1182 	}
1183 	index += RADIX_TREE_MAP_SIZE;
1184 out:
1185 	return index;
1186 }
1187 
1188 /**
1189  *	radix_tree_locate_item - search through radix tree for item
1190  *	@root:		radix tree root
1191  *	@item:		item to be found
1192  *
1193  *	Returns index where item was found, or -1 if not found.
1194  *	Caller must hold no lock (since this time-consuming function needs
1195  *	to be preemptible), and must check afterwards if item is still there.
1196  */
1197 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1198 {
1199 	struct radix_tree_node *node;
1200 	unsigned long max_index;
1201 	unsigned long cur_index = 0;
1202 	unsigned long found_index = -1;
1203 
1204 	do {
1205 		rcu_read_lock();
1206 		node = rcu_dereference_raw(root->rnode);
1207 		if (!radix_tree_is_indirect_ptr(node)) {
1208 			rcu_read_unlock();
1209 			if (node == item)
1210 				found_index = 0;
1211 			break;
1212 		}
1213 
1214 		node = indirect_to_ptr(node);
1215 		max_index = radix_tree_maxindex(node->height);
1216 		if (cur_index > max_index)
1217 			break;
1218 
1219 		cur_index = __locate(node, item, cur_index, &found_index);
1220 		rcu_read_unlock();
1221 		cond_resched();
1222 	} while (cur_index != 0 && cur_index <= max_index);
1223 
1224 	return found_index;
1225 }
1226 #else
1227 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1228 {
1229 	return -1;
1230 }
1231 #endif /* CONFIG_SHMEM && CONFIG_SWAP */
1232 
1233 /**
1234  *	radix_tree_shrink    -    shrink height of a radix tree to minimal
1235  *	@root		radix tree root
1236  */
1237 static inline void radix_tree_shrink(struct radix_tree_root *root)
1238 {
1239 	/* try to shrink tree height */
1240 	while (root->height > 0) {
1241 		struct radix_tree_node *to_free = root->rnode;
1242 		struct radix_tree_node *slot;
1243 
1244 		BUG_ON(!radix_tree_is_indirect_ptr(to_free));
1245 		to_free = indirect_to_ptr(to_free);
1246 
1247 		/*
1248 		 * The candidate node has more than one child, or its child
1249 		 * is not at the leftmost slot, we cannot shrink.
1250 		 */
1251 		if (to_free->count != 1)
1252 			break;
1253 		if (!to_free->slots[0])
1254 			break;
1255 
1256 		/*
1257 		 * We don't need rcu_assign_pointer(), since we are simply
1258 		 * moving the node from one part of the tree to another: if it
1259 		 * was safe to dereference the old pointer to it
1260 		 * (to_free->slots[0]), it will be safe to dereference the new
1261 		 * one (root->rnode) as far as dependent read barriers go.
1262 		 */
1263 		slot = to_free->slots[0];
1264 		if (root->height > 1) {
1265 			slot->parent = NULL;
1266 			slot = ptr_to_indirect(slot);
1267 		}
1268 		root->rnode = slot;
1269 		root->height--;
1270 
1271 		/*
1272 		 * We have a dilemma here. The node's slot[0] must not be
1273 		 * NULLed in case there are concurrent lookups expecting to
1274 		 * find the item. However if this was a bottom-level node,
1275 		 * then it may be subject to the slot pointer being visible
1276 		 * to callers dereferencing it. If item corresponding to
1277 		 * slot[0] is subsequently deleted, these callers would expect
1278 		 * their slot to become empty sooner or later.
1279 		 *
1280 		 * For example, lockless pagecache will look up a slot, deref
1281 		 * the page pointer, and if the page is 0 refcount it means it
1282 		 * was concurrently deleted from pagecache so try the deref
1283 		 * again. Fortunately there is already a requirement for logic
1284 		 * to retry the entire slot lookup -- the indirect pointer
1285 		 * problem (replacing direct root node with an indirect pointer
1286 		 * also results in a stale slot). So tag the slot as indirect
1287 		 * to force callers to retry.
1288 		 */
1289 		if (root->height == 0)
1290 			*((unsigned long *)&to_free->slots[0]) |=
1291 						RADIX_TREE_INDIRECT_PTR;
1292 
1293 		radix_tree_node_free(to_free);
1294 	}
1295 }
1296 
1297 /**
1298  *	radix_tree_delete    -    delete an item from a radix tree
1299  *	@root:		radix tree root
1300  *	@index:		index key
1301  *
1302  *	Remove the item at @index from the radix tree rooted at @root.
1303  *
1304  *	Returns the address of the deleted item, or NULL if it was not present.
1305  */
1306 void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
1307 {
1308 	struct radix_tree_node *node = NULL;
1309 	struct radix_tree_node *slot = NULL;
1310 	struct radix_tree_node *to_free;
1311 	unsigned int height, shift;
1312 	int tag;
1313 	int uninitialized_var(offset);
1314 
1315 	height = root->height;
1316 	if (index > radix_tree_maxindex(height))
1317 		goto out;
1318 
1319 	slot = root->rnode;
1320 	if (height == 0) {
1321 		root_tag_clear_all(root);
1322 		root->rnode = NULL;
1323 		goto out;
1324 	}
1325 	slot = indirect_to_ptr(slot);
1326 	shift = height * RADIX_TREE_MAP_SHIFT;
1327 
1328 	do {
1329 		if (slot == NULL)
1330 			goto out;
1331 
1332 		shift -= RADIX_TREE_MAP_SHIFT;
1333 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
1334 		node = slot;
1335 		slot = slot->slots[offset];
1336 	} while (shift);
1337 
1338 	if (slot == NULL)
1339 		goto out;
1340 
1341 	/*
1342 	 * Clear all tags associated with the item to be deleted.
1343 	 * This way of doing it would be inefficient, but seldom is any set.
1344 	 */
1345 	for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
1346 		if (tag_get(node, tag, offset))
1347 			radix_tree_tag_clear(root, index, tag);
1348 	}
1349 
1350 	to_free = NULL;
1351 	/* Now free the nodes we do not need anymore */
1352 	while (node) {
1353 		node->slots[offset] = NULL;
1354 		node->count--;
1355 		/*
1356 		 * Queue the node for deferred freeing after the
1357 		 * last reference to it disappears (set NULL, above).
1358 		 */
1359 		if (to_free)
1360 			radix_tree_node_free(to_free);
1361 
1362 		if (node->count) {
1363 			if (node == indirect_to_ptr(root->rnode))
1364 				radix_tree_shrink(root);
1365 			goto out;
1366 		}
1367 
1368 		/* Node with zero slots in use so free it */
1369 		to_free = node;
1370 
1371 		index >>= RADIX_TREE_MAP_SHIFT;
1372 		offset = index & RADIX_TREE_MAP_MASK;
1373 		node = node->parent;
1374 	}
1375 
1376 	root_tag_clear_all(root);
1377 	root->height = 0;
1378 	root->rnode = NULL;
1379 	if (to_free)
1380 		radix_tree_node_free(to_free);
1381 
1382 out:
1383 	return slot;
1384 }
1385 EXPORT_SYMBOL(radix_tree_delete);
1386 
1387 /**
1388  *	radix_tree_tagged - test whether any items in the tree are tagged
1389  *	@root:		radix tree root
1390  *	@tag:		tag to test
1391  */
1392 int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag)
1393 {
1394 	return root_tag_get(root, tag);
1395 }
1396 EXPORT_SYMBOL(radix_tree_tagged);
1397 
1398 static void
1399 radix_tree_node_ctor(void *node)
1400 {
1401 	memset(node, 0, sizeof(struct radix_tree_node));
1402 }
1403 
1404 static __init unsigned long __maxindex(unsigned int height)
1405 {
1406 	unsigned int width = height * RADIX_TREE_MAP_SHIFT;
1407 	int shift = RADIX_TREE_INDEX_BITS - width;
1408 
1409 	if (shift < 0)
1410 		return ~0UL;
1411 	if (shift >= BITS_PER_LONG)
1412 		return 0UL;
1413 	return ~0UL >> shift;
1414 }
1415 
1416 static __init void radix_tree_init_maxindex(void)
1417 {
1418 	unsigned int i;
1419 
1420 	for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
1421 		height_to_maxindex[i] = __maxindex(i);
1422 }
1423 
1424 static int radix_tree_callback(struct notifier_block *nfb,
1425                             unsigned long action,
1426                             void *hcpu)
1427 {
1428        int cpu = (long)hcpu;
1429        struct radix_tree_preload *rtp;
1430 
1431        /* Free per-cpu pool of perloaded nodes */
1432        if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) {
1433                rtp = &per_cpu(radix_tree_preloads, cpu);
1434                while (rtp->nr) {
1435                        kmem_cache_free(radix_tree_node_cachep,
1436                                        rtp->nodes[rtp->nr-1]);
1437                        rtp->nodes[rtp->nr-1] = NULL;
1438                        rtp->nr--;
1439                }
1440        }
1441        return NOTIFY_OK;
1442 }
1443 
1444 void __init radix_tree_init(void)
1445 {
1446 	radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
1447 			sizeof(struct radix_tree_node), 0,
1448 			SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
1449 			radix_tree_node_ctor);
1450 	radix_tree_init_maxindex();
1451 	hotcpu_notifier(radix_tree_callback, 0);
1452 }
1453