Lines Matching +full:in +full:- +full:tree
1 // SPDX-License-Identifier: GPL-2.0-or-later
24 #include <linux/radix-tree.h>
30 #include "radix-tree.h"
33 * Radix tree node cache.
38 * The radix tree is variable-height, so an insert operation not only has
43 * The worst case is a zero height tree with just a single item at index 0,
48 #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
51 * The IDR does not have to be as high as the radix tree since it uses
54 #define IDR_INDEX_BITS (8 /* CHAR_BIT */ * sizeof(int) - 1)
57 #define IDR_PRELOAD_SIZE (IDR_MAX_PATH * 2 - 1)
60 * Per-cpu pool of preloaded nodes
82 return parent ? slot - parent->slots : 0; in get_slot_offset()
88 unsigned int offset = (index >> parent->shift) & RADIX_TREE_MAP_MASK; in radix_tree_descend()
89 void __rcu **entry = rcu_dereference_raw(parent->slots[offset]); in radix_tree_descend()
97 return root->xa_flags & (__GFP_BITS_MASK & ~GFP_ZONEMASK); in root_gfp_mask()
103 __set_bit(offset, node->tags[tag]); in tag_set()
109 __clear_bit(offset, node->tags[tag]); in tag_clear()
115 return test_bit(offset, node->tags[tag]); in tag_get()
120 root->xa_flags |= (__force gfp_t)(1 << (tag + ROOT_TAG_SHIFT)); in root_tag_set()
125 root->xa_flags &= (__force gfp_t)~(1 << (tag + ROOT_TAG_SHIFT)); in root_tag_clear()
130 root->xa_flags &= (__force gfp_t)((1 << ROOT_TAG_SHIFT) - 1); in root_tag_clear_all()
135 return (__force int)root->xa_flags & (1 << (tag + ROOT_TAG_SHIFT)); in root_tag_get()
140 return (__force unsigned)root->xa_flags >> ROOT_TAG_SHIFT; in root_tags_get()
145 return !!(root->xa_flags & ROOT_IS_IDR); in is_idr()
149 * Returns 1 if any slot in the node has this tag set.
157 if (node->tags[tag][idx]) in any_tag_set()
165 bitmap_fill(node->tags[tag], RADIX_TREE_MAP_SIZE); in all_tag_set()
169 * radix_tree_find_next_bit - find the next set bit in a memory region
183 const unsigned long *addr = node->tags[tag]; in radix_tree_find_next_bit()
192 offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1); in radix_tree_find_next_bit()
205 return iter->index & RADIX_TREE_MAP_MASK; in iter_offset()
209 * The maximum index which can be stored in a radix tree
213 return (RADIX_TREE_MAP_SIZE << shift) - 1; in shift_maxindex()
218 return shift_maxindex(node->shift); in node_maxindex()
225 return (index & ~node_maxindex(node)) + (offset << node->shift); in next_index()
243 * to be atomic. So just do normal allocation when in interrupt. in radix_tree_node_alloc()
260 * succeed in getting a node here (and never reach in radix_tree_node_alloc()
264 if (rtp->nr) { in radix_tree_node_alloc()
265 ret = rtp->nodes; in radix_tree_node_alloc()
266 rtp->nodes = ret->parent; in radix_tree_node_alloc()
267 rtp->nr--; in radix_tree_node_alloc()
280 ret->shift = shift; in radix_tree_node_alloc()
281 ret->offset = offset; in radix_tree_node_alloc()
282 ret->count = count; in radix_tree_node_alloc()
283 ret->nr_values = nr_values; in radix_tree_node_alloc()
284 ret->parent = parent; in radix_tree_node_alloc()
285 ret->array = root; in radix_tree_node_alloc()
297 * non-NULL entries by radix_tree_free_nodes, so clear the entries in radix_tree_node_rcu_free()
300 memset(node->slots, 0, sizeof(node->slots)); in radix_tree_node_rcu_free()
301 memset(node->tags, 0, sizeof(node->tags)); in radix_tree_node_rcu_free()
302 INIT_LIST_HEAD(&node->private_list); in radix_tree_node_rcu_free()
310 call_rcu(&node->rcu_head, radix_tree_node_rcu_free); in radix_tree_node_free()
315 * ensure that the addition of a single element in the tree cannot fail. On
316 * success, return zero, with preemption disabled. On error, return -ENOMEM
319 * To make use of this facility, the radix tree must be initialised without
326 int ret = -ENOMEM; in __radix_tree_preload()
336 while (rtp->nr < nr) { in __radix_tree_preload()
343 if (rtp->nr < nr) { in __radix_tree_preload()
344 node->parent = rtp->nodes; in __radix_tree_preload()
345 rtp->nodes = node; in __radix_tree_preload()
346 rtp->nr++; in __radix_tree_preload()
358 * ensure that the addition of a single element in the tree cannot fail. On
359 * success, return zero, with preemption disabled. On error, return -ENOMEM
362 * To make use of this facility, the radix tree must be initialised without
367 /* Warn on non-sensical use... */ in radix_tree_preload()
376 * disabled. On error, return -ENOMEM with preemption not disabled.
391 struct radix_tree_node *node = rcu_dereference_raw(root->xa_head); in radix_tree_load_root()
398 return node->shift + RADIX_TREE_MAP_SHIFT; in radix_tree_load_root()
406 * Extend a radix tree so it can store key @index.
420 entry = rcu_dereference_raw(root->xa_head); in radix_tree_extend()
428 return -ENOMEM; in radix_tree_extend()
446 entry_to_node(entry)->parent = node; in radix_tree_extend()
448 /* Moving a value entry root->xa_head to a node */ in radix_tree_extend()
449 node->nr_values = 1; in radix_tree_extend()
452 * entry was already in the radix tree, so we do not need in radix_tree_extend()
455 node->slots[0] = (void __rcu *)entry; in radix_tree_extend()
457 rcu_assign_pointer(root->xa_head, entry); in radix_tree_extend()
465 * radix_tree_shrink - shrink radix tree to minimum height
466 * @root: radix tree root
473 struct radix_tree_node *node = rcu_dereference_raw(root->xa_head); in radix_tree_shrink()
484 if (node->count != 1) in radix_tree_shrink()
486 child = rcu_dereference_raw(node->slots[0]); in radix_tree_shrink()
491 * For an IDR, we must not shrink entry 0 into the root in in radix_tree_shrink()
495 if (!node->shift && is_idr(root)) in radix_tree_shrink()
499 entry_to_node(child)->parent = NULL; in radix_tree_shrink()
503 * moving the node from one part of the tree to another: if it in radix_tree_shrink()
505 * (node->slots[0]), it will be safe to dereference the new in radix_tree_shrink()
506 * one (root->xa_head) as far as dependent read barriers go. in radix_tree_shrink()
508 root->xa_head = (void __rcu *)child; in radix_tree_shrink()
514 * NULLed in case there are concurrent lookups expecting to in radix_tree_shrink()
515 * find the item. However if this was a bottom-level node, in radix_tree_shrink()
525 * to retry the entire slot lookup -- the indirect pointer in radix_tree_shrink()
527 * also results in a stale slot). So tag the slot as indirect in radix_tree_shrink()
530 node->count = 0; in radix_tree_shrink()
532 node->slots[0] = (void __rcu *)RADIX_TREE_RETRY; in radix_tree_shrink()
535 WARN_ON_ONCE(!list_empty(&node->private_list)); in radix_tree_shrink()
551 if (node->count) { in delete_node()
553 rcu_dereference_raw(root->xa_head)) in delete_node()
558 parent = node->parent; in delete_node()
560 parent->slots[node->offset] = NULL; in delete_node()
561 parent->count--; in delete_node()
569 root->xa_head = NULL; in delete_node()
572 WARN_ON_ONCE(!list_empty(&node->private_list)); in delete_node()
583 * __radix_tree_create - create a slot in a radix tree
584 * @root: radix tree root
590 * at position @index in the radix tree @root.
592 * Until there is more than one item in the tree, no nodes are
593 * allocated and @root->xa_head is used as a direct slot instead of
594 * pointing to a node, in which case *@nodep will be NULL.
596 * Returns -ENOMEM, or 0 for success.
603 void __rcu **slot = (void __rcu **)&root->xa_head; in __radix_tree_create()
611 /* Make sure the tree is high enough. */ in __radix_tree_create()
617 child = rcu_dereference_raw(root->xa_head); in __radix_tree_create()
621 shift -= RADIX_TREE_MAP_SHIFT; in __radix_tree_create()
627 return -ENOMEM; in __radix_tree_create()
630 node->count++; in __radix_tree_create()
637 slot = &node->slots[offset]; in __radix_tree_create()
648 * Free any nodes below this node. The tree is presumed to not need
649 * shrinking, and any user data in the tree is presumed to not need a
652 * slots from the tree as an RCU walker may still have a pointer into
654 * but we'll still have to clear those in rcu_free.
662 void *entry = rcu_dereference_raw(child->slots[offset]); in radix_tree_free_nodes()
663 if (xa_is_node(entry) && child->shift) { in radix_tree_free_nodes()
671 offset = child->offset + 1; in radix_tree_free_nodes()
672 child = child->parent; in radix_tree_free_nodes()
673 WARN_ON_ONCE(!list_empty(&old->private_list)); in radix_tree_free_nodes()
685 return -EEXIST; in insert_entries()
688 node->count++; in insert_entries()
690 node->nr_values++; in insert_entries()
696 * radix_tree_insert - insert into a radix tree
697 * @root: radix tree root
701 * Insert an item into the radix tree at position @index.
734 * __radix_tree_lookup - lookup an item in a radix tree
735 * @root: radix tree root
740 * Lookup and return the item at position @index in the radix
741 * tree @root.
743 * Until there is more than one item in the tree, no nodes are
744 * allocated and @root->xa_head is used as a direct slot instead of
745 * pointing to a node, in which case *@nodep will be NULL.
757 slot = (void __rcu **)&root->xa_head; in __radix_tree_lookup()
767 slot = parent->slots + offset; in __radix_tree_lookup()
770 if (parent->shift == 0) in __radix_tree_lookup()
782 * radix_tree_lookup_slot - lookup a slot in a radix tree
783 * @root: radix tree root
786 * Returns: the slot corresponding to the position @index in the
787 * radix tree @root. This is useful for update-if-exists operations.
806 * radix_tree_lookup - perform lookup operation on a radix tree
807 * @root: radix tree root
810 * Lookup the item at the position @index in the radix tree @root.
827 node->count += count; in replace_slot()
828 node->nr_values += values; in replace_slot()
844 * IDR users want to be able to store NULL in the tree, so if the slot isn't
846 * non-NULL. For the IDA, we mark slots as being IDR_FREE while they still
847 * have empty bits, but it only stores NULL in slots when they're being
862 return !!item - !!old; in calculate_count()
866 * __radix_tree_replace - replace item in a slot
867 * @root: radix tree root
868 * @node: pointer to tree node
869 * @slot: pointer to slot in @node
870 * @item: new item to store in the slot.
872 * For use with __radix_tree_lookup(). Caller must hold tree write locked
880 int values = !!xa_is_value(item) - !!xa_is_value(old); in __radix_tree_replace()
886 * node unless the slot is root->xa_head. in __radix_tree_replace()
888 WARN_ON_ONCE(!node && (slot != (void __rcu **)&root->xa_head) && in __radix_tree_replace()
899 * radix_tree_replace_slot - replace item in a slot
900 * @root: radix tree root
902 * @item: new item to store in the slot.
905 * radix_tree_gang_lookup_tag_slot(). Caller must hold tree write locked
908 * NOTE: This cannot be used to switch between non-entries (empty slots),
910 * inside the radix tree node. When switching from one type of entry or
922 * radix_tree_iter_replace - replace item in a slot
923 * @root: radix tree root
926 * @item: new item to store in the slot.
929 * Caller must hold tree write locked.
935 __radix_tree_replace(root, iter->node, slot, item); in radix_tree_iter_replace()
946 offset = node->offset; in node_tag_set()
947 node = node->parent; in node_tag_set()
955 * radix_tree_tag_set - set a tag on a radix tree node
956 * @root: radix tree root
961 * corresponding to @index in the radix tree. From
964 * Returns the address of the tagged item. Setting a tag on a not-present
1006 offset = node->offset; in node_tag_clear()
1007 node = node->parent; in node_tag_clear()
1016 * radix_tree_tag_clear - clear a tag on a radix tree node
1017 * @root: radix tree root
1022 * corresponding to @index in the radix tree. If this causes
1023 * the leaf node to have no tags set then clear the tag in the
1024 * next-to-leaf node, etc.
1055 * radix_tree_iter_tag_clear - clear a tag on the current iterator entry
1056 * @root: radix tree root
1063 node_tag_clear(root, iter->node, tag, iter_offset(iter)); in radix_tree_iter_tag_clear()
1067 * radix_tree_tag_get - get a tag on a radix tree node
1068 * @root: radix tree root
1110 /* Construct iter->tags bit-mask from node->tags[tag] array */
1119 iter->tags = 1; in set_iter_tags()
1123 iter->tags = node->tags[tag][tag_long] >> tag_bit; in set_iter_tags()
1126 if (tag_long < RADIX_TREE_TAG_LONGS - 1) { in set_iter_tags()
1129 iter->tags |= node->tags[tag][tag_long + 1] << in set_iter_tags()
1130 (BITS_PER_LONG - tag_bit); in set_iter_tags()
1132 iter->next_index = __radix_tree_iter_add(iter, BITS_PER_LONG); in set_iter_tags()
1139 iter->index = __radix_tree_iter_add(iter, 1); in radix_tree_iter_resume()
1140 iter->next_index = iter->index; in radix_tree_iter_resume()
1141 iter->tags = 0; in radix_tree_iter_resume()
1147 * radix_tree_next_chunk - find next chunk of slots for iteration
1149 * @root: radix tree root
1165 * Catch next_index overflow after ~0UL. iter->index never overflows in radix_tree_next_chunk()
1167 * And we cannot overflow iter->next_index in a single step, in radix_tree_next_chunk()
1173 index = iter->next_index; in radix_tree_next_chunk()
1174 if (!index && iter->index) in radix_tree_next_chunk()
1185 /* Single-slot tree */ in radix_tree_next_chunk()
1186 iter->index = index; in radix_tree_next_chunk()
1187 iter->next_index = maxindex + 1; in radix_tree_next_chunk()
1188 iter->tags = 1; in radix_tree_next_chunk()
1189 iter->node = NULL; in radix_tree_next_chunk()
1190 return (void __rcu **)&root->xa_head; in radix_tree_next_chunk()
1209 node->slots[offset]); in radix_tree_next_chunk()
1214 index += offset << node->shift; in radix_tree_next_chunk()
1220 child = rcu_dereference_raw(node->slots[offset]); in radix_tree_next_chunk()
1227 } while (node->shift && radix_tree_is_internal_node(child)); in radix_tree_next_chunk()
1230 iter->index = (index &~ node_maxindex(node)) | offset; in radix_tree_next_chunk()
1231 iter->next_index = (index | node_maxindex(node)) + 1; in radix_tree_next_chunk()
1232 iter->node = node; in radix_tree_next_chunk()
1237 return node->slots + offset; in radix_tree_next_chunk()
1242 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
1243 * @root: radix tree root
1248 * Performs an index-ascending scan of the tree for present items. Places
1255 * rcu_read_lock. In this case, rather than the returned results being
1256 * an atomic snapshot of the tree at a single point in time, the
1258 * radix_tree_lookups have been issued in individual locks, and results
1259 * stored in 'results'.
1289 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1291 * @root: radix tree root
1297 * Performs an index-ascending scan of the tree for present items which
1330 * radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1331 * radix tree based on a tag
1332 * @root: radix tree root
1338 * Performs an index-ascending scan of the tree for present items which
1368 int values = xa_is_value(old) ? -1 : 0; in __radix_tree_delete()
1378 replace_slot(slot, NULL, node, -1, values); in __radix_tree_delete()
1383 * radix_tree_iter_delete - delete the entry at this iterator position
1384 * @root: radix tree root
1389 * This may result in the current node being freed; if it is, the iterator
1392 * which can access this tree.
1397 if (__radix_tree_delete(root, iter->node, slot)) in radix_tree_iter_delete()
1398 iter->index = iter->next_index; in radix_tree_iter_delete()
1403 * radix_tree_delete_item - delete an item from a radix tree
1404 * @root: radix tree root
1408 * Remove @item at @index from the radix tree rooted at @root.
1437 * radix_tree_delete - delete an entry from a radix tree
1438 * @root: radix tree root
1441 * Remove the entry at @index from the radix tree rooted at @root.
1452 * radix_tree_tagged - test whether any items in the tree are tagged
1453 * @root: radix tree root
1463 * idr_preload - preload for idr_alloc()
1481 void __rcu **slot = (void __rcu **)&root->xa_head; in idr_get_free()
1482 unsigned long maxindex, start = iter->next_index; in idr_get_free()
1490 return ERR_PTR(-ENOSPC); in idr_get_free()
1497 child = rcu_dereference_raw(root->xa_head); in idr_get_free()
1503 shift -= RADIX_TREE_MAP_SHIFT; in idr_get_free()
1509 return ERR_PTR(-ENOMEM); in idr_get_free()
1513 node->count++; in idr_get_free()
1524 return ERR_PTR(-ENOSPC); in idr_get_free()
1526 offset = node->offset + 1; in idr_get_free()
1527 node = node->parent; in idr_get_free()
1530 shift = node->shift; in idr_get_free()
1532 child = rcu_dereference_raw(node->slots[offset]); in idr_get_free()
1534 slot = &node->slots[offset]; in idr_get_free()
1537 iter->index = start; in idr_get_free()
1539 iter->next_index = 1 + min(max, (start | node_maxindex(node))); in idr_get_free()
1541 iter->next_index = 1; in idr_get_free()
1542 iter->node = node; in idr_get_free()
1549 * idr_destroy - release all internal memory from an IDR
1555 * A typical clean-up sequence for objects stored in an idr tree will use
1561 struct radix_tree_node *node = rcu_dereference_raw(idr->idr_rt.xa_head); in idr_destroy()
1564 idr->idr_rt.xa_head = NULL; in idr_destroy()
1565 root_tag_set(&idr->idr_rt, IDR_FREE); in idr_destroy()
1575 INIT_LIST_HEAD(&node->private_list); in radix_tree_node_ctor()
1583 /* Free per-cpu pool of preloaded nodes */ in radix_tree_cpu_dead()
1585 while (rtp->nr) { in radix_tree_cpu_dead()
1586 node = rtp->nodes; in radix_tree_cpu_dead()
1587 rtp->nodes = node->parent; in radix_tree_cpu_dead()
1589 rtp->nr--; in radix_tree_cpu_dead()