Lines Matching full:mt

229 static inline unsigned int mt_attr(struct maple_tree *mt)  in mt_attr()  argument
231 return mt->ma_flags & ~MT_FLAGS_HEIGHT_MASK; in mt_attr()
395 static __always_inline bool mt_is_alloc(struct maple_tree *mt) in mt_is_alloc() argument
397 return (mt->ma_flags & MT_FLAGS_ALLOC_RANGE); in mt_is_alloc()
766 * @mt: The maple node type
770 static inline void __rcu **ma_slots(struct maple_node *mn, enum maple_type mt) in ma_slots() argument
772 switch (mt) { in ma_slots()
785 static inline bool mt_write_locked(const struct maple_tree *mt) in mt_write_locked() argument
787 return mt_external_lock(mt) ? mt_write_lock_is_held(mt) : in mt_write_locked()
788 lockdep_is_held(&mt->ma_lock); in mt_write_locked()
791 static __always_inline bool mt_locked(const struct maple_tree *mt) in mt_locked() argument
793 return mt_external_lock(mt) ? mt_lock_is_held(mt) : in mt_locked()
794 lockdep_is_held(&mt->ma_lock); in mt_locked()
797 static __always_inline void *mt_slot(const struct maple_tree *mt, in mt_slot() argument
800 return rcu_dereference_check(slots[offset], mt_locked(mt)); in mt_slot()
803 static __always_inline void *mt_slot_locked(struct maple_tree *mt, in mt_slot_locked() argument
806 return rcu_dereference_protected(slots[offset], mt_write_locked(mt)); in mt_slot_locked()
847 static inline void *mt_root_locked(struct maple_tree *mt) in mt_root_locked() argument
849 return rcu_dereference_protected(mt->ma_root, mt_write_locked(mt)); in mt_root_locked()
864 enum maple_type mt) in ma_meta() argument
866 switch (mt) { in ma_meta()
877 * @mt: The maple node type
881 static inline void ma_set_meta(struct maple_node *mn, enum maple_type mt, in ma_set_meta() argument
884 struct maple_metadata *meta = ma_meta(mn, mt); in ma_set_meta()
892 * @mt: The maple tree
896 static inline void mt_clear_meta(struct maple_tree *mt, struct maple_node *mn, in mt_clear_meta() argument
909 next = mt_slot_locked(mt, slots, in mt_clear_meta()
930 * @mt: The maple node type
933 enum maple_type mt) in ma_meta_end() argument
935 struct maple_metadata *meta = ma_meta(mn, mt); in ma_meta_end()
952 * @mt: The maple node type
955 static inline void ma_set_meta_gap(struct maple_node *mn, enum maple_type mt, in ma_set_meta_gap() argument
959 struct maple_metadata *meta = ma_meta(mn, mt); in ma_set_meta_gap()
986 static void mt_destroy_walk(struct maple_enode *enode, struct maple_tree *mt,
1480 enum maple_type mt; in mas_leaf_max_gap() local
1488 mt = mte_node_type(mas->node); in mas_leaf_max_gap()
1490 slots = ma_slots(mn, mt); in mas_leaf_max_gap()
1492 if (unlikely(ma_is_dense(mt))) { in mas_leaf_max_gap()
1494 for (i = 0; i < mt_slots[mt]; i++) { in mas_leaf_max_gap()
1512 pivots = ma_pivots(mn, mt); in mas_leaf_max_gap()
1521 max_piv = ma_data_end(mn, mt, pivots, mas->max) - 1; in mas_leaf_max_gap()
1555 * @mt: The maple node type
1563 ma_max_gap(struct maple_node *node, unsigned long *gaps, enum maple_type mt, in ma_max_gap() argument
1569 i = offset = ma_meta_end(node, mt); in ma_max_gap()
1591 enum maple_type mt; in mas_max_gap() local
1594 mt = mte_node_type(mas->node); in mas_max_gap()
1595 if (ma_is_leaf(mt)) in mas_max_gap()
1599 MAS_BUG_ON(mas, mt != maple_arange_64); in mas_max_gap()
1601 gaps = ma_gaps(node, mt); in mas_max_gap()
1761 enum maple_type mt; in mas_find_child() local
1769 mt = mte_node_type(mas->node); in mas_find_child()
1771 slots = ma_slots(node, mt); in mas_find_child()
1772 pivots = ma_pivots(node, mt); in mas_find_child()
1773 end = ma_data_end(node, mt, pivots, mas->max); in mas_find_child()
1924 enum maple_type mt; in mas_mab_cp() local
1932 mt = mte_node_type(mas->node); in mas_mab_cp()
1933 pivots = ma_pivots(node, mt); in mas_mab_cp()
1941 piv_end = min(mas_end, mt_pivots[mt]); in mas_mab_cp()
1951 b_node->pivot[j] = mas_safe_pivot(mas, pivots, i, mt); in mas_mab_cp()
1956 slots = ma_slots(node, mt); in mas_mab_cp()
1958 if (!ma_is_leaf(mt) && mt_is_alloc(mas->tree)) { in mas_mab_cp()
1959 gaps = ma_gaps(node, mt); in mas_mab_cp()
1968 * @mt: The maple type
1972 enum maple_type mt, unsigned char end) in mas_leaf_set_meta() argument
1974 if (end < mt_slots[mt] - 1) in mas_leaf_set_meta()
1975 ma_set_meta(node, mt, 0, end); in mas_leaf_set_meta()
1990 enum maple_type mt = mte_node_type(mas->node); in mab_mas_cp() local
1992 void __rcu **slots = ma_slots(node, mt); in mab_mas_cp()
1993 unsigned long *pivots = ma_pivots(node, mt); in mab_mas_cp()
1997 if (mab_end - mab_start > mt_pivots[mt]) in mab_mas_cp()
2000 if (!pivots[mt_pivots[mt] - 1]) in mab_mas_cp()
2001 slots[mt_pivots[mt]] = NULL; in mab_mas_cp()
2015 if (likely(!ma_is_leaf(mt) && mt_is_alloc(mas->tree))) { in mab_mas_cp()
2019 gaps = ma_gaps(node, mt); in mab_mas_cp()
2028 ma_set_meta(node, mt, offset, end); in mab_mas_cp()
2030 mas_leaf_set_meta(node, mt, end); in mab_mas_cp()
2038 * @mt: The maple node type
2041 enum maple_type mt) in mas_bulk_rebalance() argument
2049 if (end > mt_min_slots[mt]) { in mas_bulk_rebalance()
3005 enum maple_type mt = mte_node_type(mas->node); in mas_destroy_rebalance() local
3008 unsigned char offset, tmp, split = mt_slots[mt] / 2; in mas_destroy_rebalance()
3027 slots = ma_slots(newnode, mt); in mas_destroy_rebalance()
3028 pivs = ma_pivots(newnode, mt); in mas_destroy_rebalance()
3030 l_slots = ma_slots(left, mt); in mas_destroy_rebalance()
3031 l_pivs = ma_pivots(left, mt); in mas_destroy_rebalance()
3039 memcpy(slots + tmp, ma_slots(node, mt), sizeof(void *) * end); in mas_destroy_rebalance()
3040 memcpy(pivs + tmp, ma_pivots(node, mt), sizeof(unsigned long) * end); in mas_destroy_rebalance()
3048 unsigned char max_p = mt_pivots[mt]; in mas_destroy_rebalance()
3049 unsigned char max_s = mt_slots[mt]; in mas_destroy_rebalance()
3055 if (tmp < mt_slots[mt]) in mas_destroy_rebalance()
3059 ma_set_meta(node, mt, 0, tmp - 1); in mas_destroy_rebalance()
3067 ma_set_meta(left, mt, 0, split); in mas_destroy_rebalance()
3074 mas->node = mt_mk_node(newnode, mt); in mas_destroy_rebalance()
3075 ma_set_meta(newnode, mt, 0, tmp); in mas_destroy_rebalance()
3079 mt = mte_node_type(l_mas.node); in mas_destroy_rebalance()
3080 slots = ma_slots(new_left, mt); in mas_destroy_rebalance()
3081 pivs = ma_pivots(new_left, mt); in mas_destroy_rebalance()
3084 ma_set_meta(new_left, mt, 0, split); in mas_destroy_rebalance()
3085 l_mas.node = mt_mk_node(new_left, mt); in mas_destroy_rebalance()
3089 mt = mas_parent_type(&l_mas, l_mas.node); in mas_destroy_rebalance()
3091 slots = ma_slots(parent, mt); in mas_destroy_rebalance()
3092 pivs = ma_pivots(parent, mt); in mas_destroy_rebalance()
3097 eparent = mt_mk_node(parent, mt); in mas_destroy_rebalance()
4398 enum maple_type mt; in mas_prev_node() local
4427 mt = mte_node_type(mas->node); in mas_prev_node()
4430 slots = ma_slots(node, mt); in mas_prev_node()
4435 mt = mte_node_type(mas->node); in mas_prev_node()
4437 pivots = ma_pivots(node, mt); in mas_prev_node()
4438 offset = ma_data_end(node, mt, pivots, max); in mas_prev_node()
4443 slots = ma_slots(node, mt); in mas_prev_node()
4445 pivots = ma_pivots(node, mt); in mas_prev_node()
4571 enum maple_type mt; in mas_next_node() local
4589 mt = mte_node_type(mas->node); in mas_next_node()
4590 pivots = ma_pivots(node, mt); in mas_next_node()
4591 node_end = ma_data_end(node, mt, pivots, mas->max); in mas_next_node()
4597 slots = ma_slots(node, mt); in mas_next_node()
4610 mt = mte_node_type(mas->node); in mas_next_node()
4611 slots = ma_slots(node, mt); in mas_next_node()
4618 pivots = ma_pivots(node, mt); in mas_next_node()
4620 mas->max = mas_safe_pivot(mas, pivots, mas->offset, mt); in mas_next_node()
4622 mt = mte_node_type(enode); in mas_next_node()
4623 pivots = ma_pivots(tmp, mt); in mas_next_node()
4624 mas->end = ma_data_end(tmp, mt, pivots, mas->max); in mas_next_node()
5045 enum maple_type mt; in mas_empty_area() local
5075 mt = mte_node_type(mas->node); in mas_empty_area()
5076 pivots = ma_pivots(node, mt); in mas_empty_area()
5081 mas->end = ma_data_end(node, mt, pivots, mas->max); in mas_empty_area()
5150 * @mt: the maple tree
5158 unsigned char mte_dead_leaves(struct maple_enode *enode, struct maple_tree *mt, in mte_dead_leaves() argument
5167 entry = mt_slot(mt, slots, offset); in mte_dead_leaves()
5254 struct maple_tree *mt, struct maple_enode *prev, unsigned char offset) in mte_destroy_descend() argument
5267 next = mt_slot_locked(mt, slots, next_offset); in mte_destroy_descend()
5269 next = mt_slot_locked(mt, slots, ++next_offset); in mte_destroy_descend()
5283 static void mt_destroy_walk(struct maple_enode *enode, struct maple_tree *mt, in mt_destroy_walk() argument
5296 slots = mte_destroy_descend(&enode, mt, start, 0); in mt_destroy_walk()
5303 node->slot_len = mte_dead_leaves(enode, mt, slots); in mt_destroy_walk()
5316 tmp = mt_slot_locked(mt, slots, offset); in mt_destroy_walk()
5320 slots = mte_destroy_descend(&enode, mt, parent, offset); in mt_destroy_walk()
5327 node->slot_len = mte_dead_leaves(enode, mt, slots); in mt_destroy_walk()
5335 mt_clear_meta(mt, node, node->type); in mt_destroy_walk()
5341 * @mt: the tree to free - needed for node types.
5346 struct maple_tree *mt) in mte_destroy_walk() argument
5350 if (mt_in_rcu(mt)) { in mte_destroy_walk()
5351 mt_destroy_walk(enode, mt, false); in mte_destroy_walk()
5354 mt_destroy_walk(enode, mt, true); in mte_destroy_walk()
5735 * @mt: The maple tree
5745 void *mt_next(struct maple_tree *mt, unsigned long index, unsigned long max) in mt_next() argument
5748 MA_STATE(mas, mt, index, index); in mt_next()
5865 * @mt: The maple tree
5875 void *mt_prev(struct maple_tree *mt, unsigned long index, unsigned long min) in mt_prev() argument
5878 MA_STATE(mas, mt, index, index); in mt_prev()
6264 * @mt: The maple tree
6269 void *mtree_load(struct maple_tree *mt, unsigned long index) in mtree_load() argument
6271 MA_STATE(mas, mt, index, index); in mtree_load()
6302 * @mt: The maple tree
6311 int mtree_store_range(struct maple_tree *mt, unsigned long index, in mtree_store_range() argument
6314 MA_STATE(mas, mt, index, last); in mtree_store_range()
6324 mtree_lock(mt); in mtree_store_range()
6326 mtree_unlock(mt); in mtree_store_range()
6334 * @mt: The maple tree
6342 int mtree_store(struct maple_tree *mt, unsigned long index, void *entry, in mtree_store() argument
6345 return mtree_store_range(mt, index, index, entry, gfp); in mtree_store()
6351 * @mt: The maple tree
6360 int mtree_insert_range(struct maple_tree *mt, unsigned long first, in mtree_insert_range() argument
6363 MA_STATE(ms, mt, first, last); in mtree_insert_range()
6372 mtree_lock(mt); in mtree_insert_range()
6378 mtree_unlock(mt); in mtree_insert_range()
6389 * @mt: The maple tree
6397 int mtree_insert(struct maple_tree *mt, unsigned long index, void *entry, in mtree_insert() argument
6400 return mtree_insert_range(mt, index, index, entry, gfp); in mtree_insert()
6404 int mtree_alloc_range(struct maple_tree *mt, unsigned long *startp, in mtree_alloc_range() argument
6410 MA_STATE(mas, mt, 0, 0); in mtree_alloc_range()
6411 if (!mt_is_alloc(mt)) in mtree_alloc_range()
6417 mtree_lock(mt); in mtree_alloc_range()
6437 mtree_unlock(mt); in mtree_alloc_range()
6445 * @mt: The maple tree.
6453 * Finds an empty entry in @mt after @next, stores the new index into
6456 * @mt must be initialized with the MT_FLAGS_ALLOC_RANGE flag.
6458 * Context: Any context. Takes and releases the mt.lock. May sleep if
6463 * allocated, -EINVAL if @mt cannot be used, or -EBUSY if there are no
6466 int mtree_alloc_cyclic(struct maple_tree *mt, unsigned long *startp, in mtree_alloc_cyclic() argument
6472 MA_STATE(mas, mt, 0, 0); in mtree_alloc_cyclic()
6474 if (!mt_is_alloc(mt)) in mtree_alloc_cyclic()
6478 mtree_lock(mt); in mtree_alloc_cyclic()
6481 mtree_unlock(mt); in mtree_alloc_cyclic()
6486 int mtree_alloc_rrange(struct maple_tree *mt, unsigned long *startp, in mtree_alloc_rrange() argument
6492 MA_STATE(mas, mt, 0, 0); in mtree_alloc_rrange()
6493 if (!mt_is_alloc(mt)) in mtree_alloc_rrange()
6499 mtree_lock(mt); in mtree_alloc_rrange()
6519 mtree_unlock(mt); in mtree_alloc_rrange()
6527 * @mt: The maple tree
6535 void *mtree_erase(struct maple_tree *mt, unsigned long index) in mtree_erase() argument
6539 MA_STATE(mas, mt, index, index); in mtree_erase()
6542 mtree_lock(mt); in mtree_erase()
6544 mtree_unlock(mt); in mtree_erase()
6745 * @mt: The source maple tree
6764 int __mt_dup(struct maple_tree *mt, struct maple_tree *new, gfp_t gfp) in __mt_dup() argument
6767 MA_STATE(mas, mt, 0, 0); in __mt_dup()
6783 * @mt: The source maple tree
6801 int mtree_dup(struct maple_tree *mt, struct maple_tree *new, gfp_t gfp) in mtree_dup() argument
6804 MA_STATE(mas, mt, 0, 0); in mtree_dup()
6824 * @mt: The maple tree
6828 void __mt_destroy(struct maple_tree *mt) in __mt_destroy() argument
6830 void *root = mt_root_locked(mt); in __mt_destroy()
6832 rcu_assign_pointer(mt->ma_root, NULL); in __mt_destroy()
6834 mte_destroy_walk(root, mt); in __mt_destroy()
6836 mt->ma_flags = mt_attr(mt); in __mt_destroy()
6842 * @mt: The maple tree
6846 void mtree_destroy(struct maple_tree *mt) in mtree_destroy() argument
6848 mtree_lock(mt); in mtree_destroy()
6849 __mt_destroy(mt); in mtree_destroy()
6850 mtree_unlock(mt); in mtree_destroy()
6856 * @mt: The maple tree
6870 void *mt_find(struct maple_tree *mt, unsigned long *index, unsigned long max) in mt_find() argument
6872 MA_STATE(mas, mt, *index, *index); in mt_find()
6908 if (MT_WARN_ON(mt, (*index) && ((*index) <= copy))) in mt_find()
6920 * @mt: The maple tree
6930 void *mt_find_after(struct maple_tree *mt, unsigned long *index, in mt_find_after() argument
6936 return mt_find(mt, index, max); in mt_find_after()
7053 static void mt_dump_node(const struct maple_tree *mt, void *entry,
7092 static void mt_dump_range64(const struct maple_tree *mt, void *entry, in mt_dump_range64() argument
7122 mt_dump_entry(mt_slot(mt, node->slot, i), in mt_dump_range64()
7125 mt_dump_node(mt, mt_slot(mt, node->slot, i), in mt_dump_range64()
7145 static void mt_dump_arange64(const struct maple_tree *mt, void *entry, in mt_dump_arange64() argument
7184 mt_dump_node(mt, mt_slot(mt, node->slot, i), in mt_dump_arange64()
7204 static void mt_dump_node(const struct maple_tree *mt, void *entry, in mt_dump_node() argument
7222 mt_dump_entry(mt_slot(mt, node->slot, i), in mt_dump_node()
7228 mt_dump_range64(mt, entry, min, max, depth, format); in mt_dump_node()
7231 mt_dump_arange64(mt, entry, min, max, depth, format); in mt_dump_node()
7239 void mt_dump(const struct maple_tree *mt, enum mt_dump_format format) in mt_dump() argument
7241 void *entry = rcu_dereference_check(mt->ma_root, mt_locked(mt)); in mt_dump()
7244 mt, mt->ma_flags, mt_height(mt), entry); in mt_dump()
7246 mt_dump_node(mt, entry, 0, mt_node_max(entry), 0, format); in mt_dump()
7262 enum maple_type mt = mte_node_type(mas->node); in mas_validate_gaps() local
7267 unsigned long *pivots = ma_pivots(node, mt); in mas_validate_gaps()
7270 if (ma_is_dense(mt)) { in mas_validate_gaps()
7283 gaps = ma_gaps(node, mt); in mas_validate_gaps()
7285 p_end = mas_safe_pivot(mas, pivots, i, mt); in mas_validate_gaps()
7313 if (mt == maple_arange_64) { in mas_validate_gaps()
7496 static void mt_validate_nulls(struct maple_tree *mt) in mt_validate_nulls() argument
7501 MA_STATE(mas, mt, 0, 0); in mt_validate_nulls()
7517 MT_BUG_ON(mt, !last && !entry); in mt_validate_nulls()
7538 void mt_validate(struct maple_tree *mt) in mt_validate() argument
7543 MA_STATE(mas, mt, 0, 0); in mt_validate()
7563 if (mt_is_alloc(mt)) in mt_validate()
7567 mt_validate_nulls(mt); in mt_validate()