Lines Matching full:mt

209 static void mt_set_height(struct maple_tree *mt, unsigned char height)  in mt_set_height()  argument
211 unsigned int new_flags = mt->ma_flags; in mt_set_height()
214 MT_BUG_ON(mt, height > MAPLE_HEIGHT_MAX); in mt_set_height()
216 mt->ma_flags = new_flags; in mt_set_height()
224 static inline unsigned int mt_attr(struct maple_tree *mt) in mt_attr() argument
226 return mt->ma_flags & ~MT_FLAGS_HEIGHT_MASK; in mt_attr()
390 static __always_inline bool mt_is_alloc(struct maple_tree *mt) in mt_is_alloc() argument
392 return (mt->ma_flags & MT_FLAGS_ALLOC_RANGE); in mt_is_alloc()
700 * @mt: The maple node type
704 static inline void __rcu **ma_slots(struct maple_node *mn, enum maple_type mt) in ma_slots() argument
706 switch (mt) { in ma_slots()
719 static inline bool mt_write_locked(const struct maple_tree *mt) in mt_write_locked() argument
721 return mt_external_lock(mt) ? mt_write_lock_is_held(mt) : in mt_write_locked()
722 lockdep_is_held(&mt->ma_lock); in mt_write_locked()
725 static __always_inline bool mt_locked(const struct maple_tree *mt) in mt_locked() argument
727 return mt_external_lock(mt) ? mt_lock_is_held(mt) : in mt_locked()
728 lockdep_is_held(&mt->ma_lock); in mt_locked()
731 static __always_inline void *mt_slot(const struct maple_tree *mt, in mt_slot() argument
734 return rcu_dereference_check(slots[offset], mt_locked(mt)); in mt_slot()
737 static __always_inline void *mt_slot_locked(struct maple_tree *mt, in mt_slot_locked() argument
740 return rcu_dereference_protected(slots[offset], mt_write_locked(mt)); in mt_slot_locked()
781 static inline void *mt_root_locked(struct maple_tree *mt) in mt_root_locked() argument
783 return rcu_dereference_protected(mt->ma_root, mt_write_locked(mt)); in mt_root_locked()
798 enum maple_type mt) in ma_meta() argument
800 switch (mt) { in ma_meta()
811 * @mt: The maple node type
815 static inline void ma_set_meta(struct maple_node *mn, enum maple_type mt, in ma_set_meta() argument
818 struct maple_metadata *meta = ma_meta(mn, mt); in ma_set_meta()
826 * @mt: The maple tree
830 static inline void mt_clear_meta(struct maple_tree *mt, struct maple_node *mn, in mt_clear_meta() argument
843 next = mt_slot_locked(mt, slots, in mt_clear_meta()
864 * @mt: The maple node type
867 enum maple_type mt) in ma_meta_end() argument
869 struct maple_metadata *meta = ma_meta(mn, mt); in ma_meta_end()
886 * @mt: The maple node type
889 static inline void ma_set_meta_gap(struct maple_node *mn, enum maple_type mt, in ma_set_meta_gap() argument
893 struct maple_metadata *meta = ma_meta(mn, mt); in ma_set_meta_gap()
920 static void mt_destroy_walk(struct maple_enode *enode, struct maple_tree *mt,
1307 enum maple_type mt; in mas_leaf_max_gap() local
1315 mt = mte_node_type(mas->node); in mas_leaf_max_gap()
1317 slots = ma_slots(mn, mt); in mas_leaf_max_gap()
1319 if (unlikely(ma_is_dense(mt))) { in mas_leaf_max_gap()
1321 for (i = 0; i < mt_slots[mt]; i++) { in mas_leaf_max_gap()
1339 pivots = ma_pivots(mn, mt); in mas_leaf_max_gap()
1348 max_piv = ma_data_end(mn, mt, pivots, mas->max) - 1; in mas_leaf_max_gap()
1382 * @mt: The maple node type
1390 ma_max_gap(struct maple_node *node, unsigned long *gaps, enum maple_type mt, in ma_max_gap() argument
1396 i = offset = ma_meta_end(node, mt); in ma_max_gap()
1418 enum maple_type mt; in mas_max_gap() local
1421 mt = mte_node_type(mas->node); in mas_max_gap()
1422 if (ma_is_leaf(mt)) in mas_max_gap()
1426 MAS_BUG_ON(mas, mt != maple_arange_64); in mas_max_gap()
1428 gaps = ma_gaps(node, mt); in mas_max_gap()
1590 enum maple_type mt; in mas_find_child() local
1598 mt = mte_node_type(mas->node); in mas_find_child()
1600 slots = ma_slots(node, mt); in mas_find_child()
1601 pivots = ma_pivots(node, mt); in mas_find_child()
1602 end = ma_data_end(node, mt, pivots, mas->max); in mas_find_child()
1739 enum maple_type mt; in mas_mab_cp() local
1747 mt = mte_node_type(mas->node); in mas_mab_cp()
1748 pivots = ma_pivots(node, mt); in mas_mab_cp()
1756 piv_end = min(mas_end, mt_pivots[mt]); in mas_mab_cp()
1766 b_node->pivot[j] = mas_safe_pivot(mas, pivots, i, mt); in mas_mab_cp()
1771 slots = ma_slots(node, mt); in mas_mab_cp()
1773 if (!ma_is_leaf(mt) && mt_is_alloc(mas->tree)) { in mas_mab_cp()
1774 gaps = ma_gaps(node, mt); in mas_mab_cp()
1783 * @mt: The maple type
1787 enum maple_type mt, unsigned char end) in mas_leaf_set_meta() argument
1789 if (end < mt_slots[mt] - 1) in mas_leaf_set_meta()
1790 ma_set_meta(node, mt, 0, end); in mas_leaf_set_meta()
1805 enum maple_type mt = mte_node_type(mas->node); in mab_mas_cp() local
1807 void __rcu **slots = ma_slots(node, mt); in mab_mas_cp()
1808 unsigned long *pivots = ma_pivots(node, mt); in mab_mas_cp()
1812 if (mab_end - mab_start > mt_pivots[mt]) in mab_mas_cp()
1815 if (!pivots[mt_pivots[mt] - 1]) in mab_mas_cp()
1816 slots[mt_pivots[mt]] = NULL; in mab_mas_cp()
1830 if (likely(!ma_is_leaf(mt) && mt_is_alloc(mas->tree))) { in mab_mas_cp()
1834 gaps = ma_gaps(node, mt); in mab_mas_cp()
1843 ma_set_meta(node, mt, offset, end); in mab_mas_cp()
1845 mas_leaf_set_meta(node, mt, end); in mab_mas_cp()
4093 enum maple_type mt; in mas_prev_node() local
4122 mt = mte_node_type(mas->node); in mas_prev_node()
4125 slots = ma_slots(node, mt); in mas_prev_node()
4130 mt = mte_node_type(mas->node); in mas_prev_node()
4132 pivots = ma_pivots(node, mt); in mas_prev_node()
4133 offset = ma_data_end(node, mt, pivots, max); in mas_prev_node()
4138 slots = ma_slots(node, mt); in mas_prev_node()
4140 pivots = ma_pivots(node, mt); in mas_prev_node()
4263 enum maple_type mt; in mas_next_node() local
4281 mt = mte_node_type(mas->node); in mas_next_node()
4282 pivots = ma_pivots(node, mt); in mas_next_node()
4283 node_end = ma_data_end(node, mt, pivots, mas->max); in mas_next_node()
4289 slots = ma_slots(node, mt); in mas_next_node()
4302 mt = mte_node_type(mas->node); in mas_next_node()
4303 slots = ma_slots(node, mt); in mas_next_node()
4310 pivots = ma_pivots(node, mt); in mas_next_node()
4312 mas->max = mas_safe_pivot(mas, pivots, mas->offset, mt); in mas_next_node()
4314 mt = mte_node_type(enode); in mas_next_node()
4315 pivots = ma_pivots(tmp, mt); in mas_next_node()
4316 mas->end = ma_data_end(tmp, mt, pivots, mas->max); in mas_next_node()
4737 enum maple_type mt; in mas_empty_area() local
4767 mt = mte_node_type(mas->node); in mas_empty_area()
4768 pivots = ma_pivots(node, mt); in mas_empty_area()
4773 mas->end = ma_data_end(node, mt, pivots, mas->max); in mas_empty_area()
4842 * @mt: the maple tree
4850 unsigned char mte_dead_leaves(struct maple_enode *enode, struct maple_tree *mt, in mte_dead_leaves() argument
4859 entry = mt_slot(mt, slots, offset); in mte_dead_leaves()
4946 struct maple_tree *mt, struct maple_enode *prev, unsigned char offset) in mte_destroy_descend() argument
4959 next = mt_slot_locked(mt, slots, next_offset); in mte_destroy_descend()
4961 next = mt_slot_locked(mt, slots, ++next_offset); in mte_destroy_descend()
4975 static void mt_destroy_walk(struct maple_enode *enode, struct maple_tree *mt, in mt_destroy_walk() argument
4989 slots = mte_destroy_descend(&enode, mt, start, 0); in mt_destroy_walk()
4996 node->slot_len = mte_dead_leaves(enode, mt, slots); in mt_destroy_walk()
5009 tmp = mt_slot_locked(mt, slots, offset); in mt_destroy_walk()
5013 slots = mte_destroy_descend(&enode, mt, parent, offset); in mt_destroy_walk()
5020 node->slot_len = mte_dead_leaves(enode, mt, slots); in mt_destroy_walk()
5028 mt_clear_meta(mt, node, node->type); in mt_destroy_walk()
5034 * @mt: the tree to free - needed for node types.
5039 struct maple_tree *mt) in mte_destroy_walk() argument
5043 if (mt_in_rcu(mt)) { in mte_destroy_walk()
5044 mt_destroy_walk(enode, mt, false); in mte_destroy_walk()
5047 mt_destroy_walk(enode, mt, true); in mte_destroy_walk()
5341 * @mt: The maple tree
5351 void *mt_next(struct maple_tree *mt, unsigned long index, unsigned long max) in mt_next() argument
5354 MA_STATE(mas, mt, index, index); in mt_next()
5471 * @mt: The maple tree
5481 void *mt_prev(struct maple_tree *mt, unsigned long index, unsigned long min) in mt_prev() argument
5484 MA_STATE(mas, mt, index, index); in mt_prev()
5875 * @mt: The maple tree
5880 void *mtree_load(struct maple_tree *mt, unsigned long index) in mtree_load() argument
5882 MA_STATE(mas, mt, index, index); in mtree_load()
5913 * @mt: The maple tree
5922 int mtree_store_range(struct maple_tree *mt, unsigned long index, in mtree_store_range() argument
5925 MA_STATE(mas, mt, index, last); in mtree_store_range()
5935 mtree_lock(mt); in mtree_store_range()
5937 mtree_unlock(mt); in mtree_store_range()
5945 * @mt: The maple tree
5953 int mtree_store(struct maple_tree *mt, unsigned long index, void *entry, in mtree_store() argument
5956 return mtree_store_range(mt, index, index, entry, gfp); in mtree_store()
5962 * @mt: The maple tree
5971 int mtree_insert_range(struct maple_tree *mt, unsigned long first, in mtree_insert_range() argument
5974 MA_STATE(ms, mt, first, last); in mtree_insert_range()
5983 mtree_lock(mt); in mtree_insert_range()
5989 mtree_unlock(mt); in mtree_insert_range()
6000 * @mt: The maple tree
6008 int mtree_insert(struct maple_tree *mt, unsigned long index, void *entry, in mtree_insert() argument
6011 return mtree_insert_range(mt, index, index, entry, gfp); in mtree_insert()
6015 int mtree_alloc_range(struct maple_tree *mt, unsigned long *startp, in mtree_alloc_range() argument
6021 MA_STATE(mas, mt, 0, 0); in mtree_alloc_range()
6022 if (!mt_is_alloc(mt)) in mtree_alloc_range()
6028 mtree_lock(mt); in mtree_alloc_range()
6048 mtree_unlock(mt); in mtree_alloc_range()
6056 * @mt: The maple tree.
6064 * Finds an empty entry in @mt after @next, stores the new index into
6067 * @mt must be initialized with the MT_FLAGS_ALLOC_RANGE flag.
6069 * Context: Any context. Takes and releases the mt.lock. May sleep if
6074 * allocated, -EINVAL if @mt cannot be used, or -EBUSY if there are no
6077 int mtree_alloc_cyclic(struct maple_tree *mt, unsigned long *startp, in mtree_alloc_cyclic() argument
6083 MA_STATE(mas, mt, 0, 0); in mtree_alloc_cyclic()
6085 if (!mt_is_alloc(mt)) in mtree_alloc_cyclic()
6089 mtree_lock(mt); in mtree_alloc_cyclic()
6092 mtree_unlock(mt); in mtree_alloc_cyclic()
6097 int mtree_alloc_rrange(struct maple_tree *mt, unsigned long *startp, in mtree_alloc_rrange() argument
6103 MA_STATE(mas, mt, 0, 0); in mtree_alloc_rrange()
6104 if (!mt_is_alloc(mt)) in mtree_alloc_rrange()
6110 mtree_lock(mt); in mtree_alloc_rrange()
6130 mtree_unlock(mt); in mtree_alloc_rrange()
6138 * @mt: The maple tree
6146 void *mtree_erase(struct maple_tree *mt, unsigned long index) in mtree_erase() argument
6150 MA_STATE(mas, mt, index, index); in mtree_erase()
6153 mtree_lock(mt); in mtree_erase()
6155 mtree_unlock(mt); in mtree_erase()
6355 * @mt: The source maple tree
6374 int __mt_dup(struct maple_tree *mt, struct maple_tree *new, gfp_t gfp) in __mt_dup() argument
6377 MA_STATE(mas, mt, 0, 0); in __mt_dup()
6393 * @mt: The source maple tree
6411 int mtree_dup(struct maple_tree *mt, struct maple_tree *new, gfp_t gfp) in mtree_dup() argument
6414 MA_STATE(mas, mt, 0, 0); in mtree_dup()
6434 * @mt: The maple tree
6438 void __mt_destroy(struct maple_tree *mt) in __mt_destroy() argument
6440 void *root = mt_root_locked(mt); in __mt_destroy()
6442 rcu_assign_pointer(mt->ma_root, NULL); in __mt_destroy()
6444 mte_destroy_walk(root, mt); in __mt_destroy()
6446 mt->ma_flags = mt_attr(mt); in __mt_destroy()
6452 * @mt: The maple tree
6456 void mtree_destroy(struct maple_tree *mt) in mtree_destroy() argument
6458 mtree_lock(mt); in mtree_destroy()
6459 __mt_destroy(mt); in mtree_destroy()
6460 mtree_unlock(mt); in mtree_destroy()
6466 * @mt: The maple tree
6480 void *mt_find(struct maple_tree *mt, unsigned long *index, unsigned long max) in mt_find() argument
6482 MA_STATE(mas, mt, *index, *index); in mt_find()
6518 if (MT_WARN_ON(mt, (*index) && ((*index) <= copy))) in mt_find()
6530 * @mt: The maple tree
6540 void *mt_find_after(struct maple_tree *mt, unsigned long *index, in mt_find_after() argument
6546 return mt_find(mt, index, max); in mt_find_after()
6663 static void mt_dump_node(const struct maple_tree *mt, void *entry,
6702 static void mt_dump_range64(const struct maple_tree *mt, void *entry, in mt_dump_range64() argument
6732 mt_dump_entry(mt_slot(mt, node->slot, i), in mt_dump_range64()
6735 mt_dump_node(mt, mt_slot(mt, node->slot, i), in mt_dump_range64()
6755 static void mt_dump_arange64(const struct maple_tree *mt, void *entry, in mt_dump_arange64() argument
6794 mt_dump_node(mt, mt_slot(mt, node->slot, i), in mt_dump_arange64()
6814 static void mt_dump_node(const struct maple_tree *mt, void *entry, in mt_dump_node() argument
6832 mt_dump_entry(mt_slot(mt, node->slot, i), in mt_dump_node()
6838 mt_dump_range64(mt, entry, min, max, depth, format); in mt_dump_node()
6841 mt_dump_arange64(mt, entry, min, max, depth, format); in mt_dump_node()
6849 void mt_dump(const struct maple_tree *mt, enum mt_dump_format format) in mt_dump() argument
6851 void *entry = rcu_dereference_check(mt->ma_root, mt_locked(mt)); in mt_dump()
6854 mt, mt->ma_flags, mt_height(mt), entry); in mt_dump()
6856 mt_dump_node(mt, entry, 0, mt_node_max(entry), 0, format); in mt_dump()
6872 enum maple_type mt = mte_node_type(mas->node); in mas_validate_gaps() local
6877 unsigned long *pivots = ma_pivots(node, mt); in mas_validate_gaps()
6880 if (ma_is_dense(mt)) { in mas_validate_gaps()
6893 gaps = ma_gaps(node, mt); in mas_validate_gaps()
6895 p_end = mas_safe_pivot(mas, pivots, i, mt); in mas_validate_gaps()
6923 if (mt == maple_arange_64) { in mas_validate_gaps()
7106 static void mt_validate_nulls(struct maple_tree *mt) in mt_validate_nulls() argument
7111 MA_STATE(mas, mt, 0, 0); in mt_validate_nulls()
7127 MT_BUG_ON(mt, !last && !entry); in mt_validate_nulls()
7148 void mt_validate(struct maple_tree *mt) in mt_validate() argument
7153 MA_STATE(mas, mt, 0, 0); in mt_validate()
7173 if (mt_is_alloc(mt)) in mt_validate()
7177 mt_validate_nulls(mt); in mt_validate()