Searched refs:mas_next (Results 1 – 6 of 6) sorted by relevance
/linux-6.6.21/lib/ |
D | test_maple_tree.c | 1330 entry = mas_next(&mas, limit); in check_next_entry() 1443 ptr = mas_next(&mas, ULONG_MAX); in check_root_expand() 1461 ptr = mas_next(&mas, ULONG_MAX); in check_root_expand() 1542 mas_next(&mas, ULONG_MAX); in check_gap_combining() 1543 entry = mas_next(&mas, ULONG_MAX); in check_gap_combining() 1573 entry = mas_next(&mas, ULONG_MAX); in check_gap_combining() 1575 mas_next(&mas, ULONG_MAX); /* go to the next entry. */ in check_gap_combining() 2055 mas_next(&mas, 1000); in next_prev_test() 2085 val = mas_next(&mas, 1000); in next_prev_test() 2095 val = mas_next(&mas, 1000); in next_prev_test() [all …]
|
D | maple_tree.c | 5714 void *mas_next(struct ma_state *mas, unsigned long max) in mas_next() function 5724 EXPORT_SYMBOL_GPL(mas_next); 5767 entry = mas_next(&mas, max); in mt_next()
|
/linux-6.6.21/Documentation/core-api/ |
D | maple_tree.rst | 169 Using a maple state allows mas_next() and mas_prev() to function as if the 171 performance penalty is outweighed by cache optimization. mas_next() will
|
/linux-6.6.21/include/linux/ |
D | maple_tree.h | 482 void *mas_next(struct ma_state *mas, unsigned long max);
|
/linux-6.6.21/mm/ |
D | mmap.c | 1586 tmp = mas_next(&mas, ULONG_MAX); in unmapped_area() 1638 tmp = mas_next(&mas, ULONG_MAX); in unmapped_area_topdown() 1904 vma = mas_next(&mas, ULONG_MAX); in find_vma_prev() 2545 vma_test = mas_next(&test, count - 1); in do_vmi_align_munmap()
|
/linux-6.6.21/tools/testing/radix-tree/ |
D | maple.c | 759 entry = mas_next(&tmp, mas_end->last); in mas_ce2_over_count() 768 entry = mas_next(&tmp, mas_end->last); in mas_ce2_over_count() 35206 MT_BUG_ON(mt, mas_next(&mas_reader, ULONG_MAX) != xa_mk_value(val)); in check_rcu_simulated() 35221 mas_next(&mas_reader, ULONG_MAX); in check_rcu_simulated()
|