Searched refs:mas_next (Results 1 – 5 of 5) sorted by relevance
/linux/lib/ |
H A D | test_maple_tree.c | 1336 entry = mas_next(&mas, limit); in check_next_entry() 1535 ptr = mas_next(&mas, ULONG_MAX); in check_gap_combining() 1553 ptr = mas_next(&mas, ULONG_MAX); in check_gap_combining() 1634 mas_next(&mas, ULONG_MAX); in check_gap_combining() 1635 entry = mas_next(&mas, ULONG_MAX); in check_gap_combining() 1665 entry = mas_next(&mas, ULONG_MAX); in check_gap_combining() 1667 mas_next(&mas, ULONG_MAX); /* go to the next entry. */ in check_gap_combining() 2163 mas_next(&mas, 1000); in next_prev_test() 2193 val = mas_next(&mas, 1000); in next_prev_test() 2203 val = mas_next( in next_prev_test() [all...] |
H A D | maple_tree.c | 5742 void *mas_next(struct ma_state *mas, unsigned long max) in mas_next() function 5752 EXPORT_SYMBOL_GPL(mas_next); 5795 entry = mas_next(&mas, max); in mt_next()
|
/linux/Documentation/core-api/ |
H A D | maple_tree.rst | 173 Using a maple state allows mas_next() and mas_prev() to function as if the 175 performance penalty is outweighed by cache optimization. mas_next() will
|
/linux/mm/ |
H A D | vma.c | 1353 vma_test = mas_next(&test, vms->vma_count - 1); in vms_gather_munmap_vmas()
|
/linux/tools/testing/radix-tree/ |
H A D | maple.c | 787 entry = mas_next(&tmp, mas_end->last); in mas_ce2_over_count() 796 entry = mas_next(&tmp, mas_end->last); in mas_ce2_over_count() 35237 MT_BUG_ON(mt, mas_next(&mas_reader, ULONG_MAX) != xa_mk_value(val)); in check_rcu_simulated() 35252 mas_next(&mas_reader, ULONG_MAX); in check_rcu_simulated()
|