Lines Matching full:b
29 if (!c->btree_roots_known[0].b) in bch2_recalc_btree_reserve()
35 if (r->b) in bch2_recalc_btree_reserve()
36 reserve += min_t(unsigned, 1, r->b->c.level) * 8; in bch2_recalc_btree_reserve()
47 static void btree_node_to_freedlist(struct btree_cache *bc, struct btree *b) in btree_node_to_freedlist() argument
49 if (b->c.lock.readers) in btree_node_to_freedlist()
50 list_move(&b->list, &bc->freed_pcpu); in btree_node_to_freedlist()
52 list_move(&b->list, &bc->freed_nonpcpu); in btree_node_to_freedlist()
55 static void btree_node_data_free(struct bch_fs *c, struct btree *b) in btree_node_data_free() argument
59 EBUG_ON(btree_node_write_in_flight(b)); in btree_node_data_free()
61 clear_btree_node_just_written(b); in btree_node_data_free()
63 kvpfree(b->data, btree_buf_bytes(b)); in btree_node_data_free()
64 b->data = NULL; in btree_node_data_free()
66 kvfree(b->aux_data); in btree_node_data_free()
68 munmap(b->aux_data, btree_aux_data_bytes(b)); in btree_node_data_free()
70 b->aux_data = NULL; in btree_node_data_free()
74 btree_node_to_freedlist(bc, b); in btree_node_data_free()
80 const struct btree *b = obj; in bch2_btree_cache_cmp_fn() local
83 return b->hash_val == *v ? 0 : 1; in bch2_btree_cache_cmp_fn()
93 static int btree_node_data_alloc(struct bch_fs *c, struct btree *b, gfp_t gfp) in btree_node_data_alloc() argument
95 BUG_ON(b->data || b->aux_data); in btree_node_data_alloc()
97 b->data = kvpmalloc(btree_buf_bytes(b), gfp); in btree_node_data_alloc()
98 if (!b->data) in btree_node_data_alloc()
101 b->aux_data = kvmalloc(btree_aux_data_bytes(b), gfp); in btree_node_data_alloc()
103 b->aux_data = mmap(NULL, btree_aux_data_bytes(b), in btree_node_data_alloc()
106 if (b->aux_data == MAP_FAILED) in btree_node_data_alloc()
107 b->aux_data = NULL; in btree_node_data_alloc()
109 if (!b->aux_data) { in btree_node_data_alloc()
110 kvpfree(b->data, btree_buf_bytes(b)); in btree_node_data_alloc()
111 b->data = NULL; in btree_node_data_alloc()
120 struct btree *b; in __btree_node_mem_alloc() local
122 b = kzalloc(sizeof(struct btree), gfp); in __btree_node_mem_alloc()
123 if (!b) in __btree_node_mem_alloc()
126 bkey_btree_ptr_init(&b->key); in __btree_node_mem_alloc()
127 INIT_LIST_HEAD(&b->list); in __btree_node_mem_alloc()
128 INIT_LIST_HEAD(&b->write_blocked); in __btree_node_mem_alloc()
129 b->byte_order = ilog2(c->opts.btree_node_size); in __btree_node_mem_alloc()
130 return b; in __btree_node_mem_alloc()
136 struct btree *b; in __bch2_btree_node_mem_alloc() local
138 b = __btree_node_mem_alloc(c, GFP_KERNEL); in __bch2_btree_node_mem_alloc()
139 if (!b) in __bch2_btree_node_mem_alloc()
142 if (btree_node_data_alloc(c, b, GFP_KERNEL)) { in __bch2_btree_node_mem_alloc()
143 kfree(b); in __bch2_btree_node_mem_alloc()
147 bch2_btree_lock_init(&b->c, 0); in __bch2_btree_node_mem_alloc()
150 list_add(&b->list, &bc->freeable); in __bch2_btree_node_mem_alloc()
151 return b; in __bch2_btree_node_mem_alloc()
156 void bch2_btree_node_hash_remove(struct btree_cache *bc, struct btree *b) in bch2_btree_node_hash_remove() argument
158 int ret = rhashtable_remove_fast(&bc->table, &b->hash, bch_btree_cache_params); in bch2_btree_node_hash_remove()
163 b->hash_val = 0; in bch2_btree_node_hash_remove()
166 int __bch2_btree_node_hash_insert(struct btree_cache *bc, struct btree *b) in __bch2_btree_node_hash_insert() argument
168 BUG_ON(b->hash_val); in __bch2_btree_node_hash_insert()
169 b->hash_val = btree_ptr_hash_val(&b->key); in __bch2_btree_node_hash_insert()
171 return rhashtable_lookup_insert_fast(&bc->table, &b->hash, in __bch2_btree_node_hash_insert()
175 int bch2_btree_node_hash_insert(struct btree_cache *bc, struct btree *b, in bch2_btree_node_hash_insert() argument
180 b->c.level = level; in bch2_btree_node_hash_insert()
181 b->c.btree_id = id; in bch2_btree_node_hash_insert()
184 ret = __bch2_btree_node_hash_insert(bc, b); in bch2_btree_node_hash_insert()
186 list_add_tail(&b->list, &bc->live); in bch2_btree_node_hash_insert()
205 static int __btree_node_reclaim(struct bch_fs *c, struct btree *b, bool flush) in __btree_node_reclaim() argument
212 if (b->flags & ((1U << BTREE_NODE_dirty)| in __btree_node_reclaim()
219 bch2_btree_node_wait_on_read(b); in __btree_node_reclaim()
220 bch2_btree_node_wait_on_write(b); in __btree_node_reclaim()
223 if (!six_trylock_intent(&b->c.lock)) in __btree_node_reclaim()
226 if (!six_trylock_write(&b->c.lock)) in __btree_node_reclaim()
230 if (b->flags & ((1U << BTREE_NODE_read_in_flight)| in __btree_node_reclaim()
234 six_unlock_write(&b->c.lock); in __btree_node_reclaim()
235 six_unlock_intent(&b->c.lock); in __btree_node_reclaim()
239 if (btree_node_noevict(b) || in __btree_node_reclaim()
240 btree_node_write_blocked(b) || in __btree_node_reclaim()
241 btree_node_will_make_reachable(b)) in __btree_node_reclaim()
244 if (btree_node_dirty(b)) { in __btree_node_reclaim()
254 bch2_btree_node_write(c, b, SIX_LOCK_intent, in __btree_node_reclaim()
257 __bch2_btree_node_write(c, b, in __btree_node_reclaim()
260 six_unlock_write(&b->c.lock); in __btree_node_reclaim()
261 six_unlock_intent(&b->c.lock); in __btree_node_reclaim()
265 if (b->hash_val && !ret) in __btree_node_reclaim()
266 trace_and_count(c, btree_cache_reap, c, b); in __btree_node_reclaim()
269 six_unlock_write(&b->c.lock); in __btree_node_reclaim()
271 six_unlock_intent(&b->c.lock); in __btree_node_reclaim()
276 static int btree_node_reclaim(struct bch_fs *c, struct btree *b) in btree_node_reclaim() argument
278 return __btree_node_reclaim(c, b, false); in btree_node_reclaim()
281 static int btree_node_write_and_reclaim(struct bch_fs *c, struct btree *b) in btree_node_write_and_reclaim() argument
283 return __btree_node_reclaim(c, b, true); in btree_node_write_and_reclaim()
291 struct btree *b, *t; in bch2_btree_cache_scan() local
318 list_for_each_entry_safe(b, t, &bc->freeable, list) { in bch2_btree_cache_scan()
331 if (!btree_node_reclaim(c, b)) { in bch2_btree_cache_scan()
332 btree_node_data_free(c, b); in bch2_btree_cache_scan()
333 six_unlock_write(&b->c.lock); in bch2_btree_cache_scan()
334 six_unlock_intent(&b->c.lock); in bch2_btree_cache_scan()
339 list_for_each_entry_safe(b, t, &bc->live, list) { in bch2_btree_cache_scan()
342 if (btree_node_accessed(b)) { in bch2_btree_cache_scan()
343 clear_btree_node_accessed(b); in bch2_btree_cache_scan()
344 } else if (!btree_node_reclaim(c, b)) { in bch2_btree_cache_scan()
346 btree_node_data_free(c, b); in bch2_btree_cache_scan()
348 bch2_btree_node_hash_remove(bc, b); in bch2_btree_cache_scan()
349 six_unlock_write(&b->c.lock); in bch2_btree_cache_scan()
350 six_unlock_intent(&b->c.lock); in bch2_btree_cache_scan()
355 btree_node_dirty(b) && in bch2_btree_cache_scan()
356 !btree_node_will_make_reachable(b) && in bch2_btree_cache_scan()
357 !btree_node_write_blocked(b) && in bch2_btree_cache_scan()
358 six_trylock_read(&b->c.lock)) { in bch2_btree_cache_scan()
359 list_move(&bc->live, &b->list); in bch2_btree_cache_scan()
361 __bch2_btree_node_write(c, b, BTREE_WRITE_cache_reclaim); in bch2_btree_cache_scan()
362 six_unlock_read(&b->c.lock); in bch2_btree_cache_scan()
399 struct btree *b; in bch2_fs_btree_cache_exit() local
416 if (r->b) in bch2_fs_btree_cache_exit()
417 list_add(&r->b->list, &bc->live); in bch2_fs_btree_cache_exit()
423 b = list_first_entry(&bc->live, struct btree, list); in bch2_fs_btree_cache_exit()
425 BUG_ON(btree_node_read_in_flight(b) || in bch2_fs_btree_cache_exit()
426 btree_node_write_in_flight(b)); in bch2_fs_btree_cache_exit()
428 btree_node_data_free(c, b); in bch2_fs_btree_cache_exit()
437 b = list_first_entry(&bc->freed_nonpcpu, struct btree, list); in bch2_fs_btree_cache_exit()
438 list_del(&b->list); in bch2_fs_btree_cache_exit()
439 six_lock_exit(&b->c.lock); in bch2_fs_btree_cache_exit()
440 kfree(b); in bch2_fs_btree_cache_exit()
551 struct btree *b; in btree_node_cannibalize() local
553 list_for_each_entry_reverse(b, &bc->live, list) in btree_node_cannibalize()
554 if (!btree_node_reclaim(c, b)) in btree_node_cannibalize()
555 return b; in btree_node_cannibalize()
558 list_for_each_entry_reverse(b, &bc->live, list) in btree_node_cannibalize()
559 if (!btree_node_write_and_reclaim(c, b)) in btree_node_cannibalize()
560 return b; in btree_node_cannibalize()
578 struct btree *b, *b2; in bch2_btree_node_mem_alloc() local
589 list_for_each_entry(b, freed, list) in bch2_btree_node_mem_alloc()
590 if (!btree_node_reclaim(c, b)) { in bch2_btree_node_mem_alloc()
591 list_del_init(&b->list); in bch2_btree_node_mem_alloc()
595 b = __btree_node_mem_alloc(c, GFP_NOWAIT|__GFP_NOWARN); in bch2_btree_node_mem_alloc()
596 if (!b) { in bch2_btree_node_mem_alloc()
599 b = __btree_node_mem_alloc(c, GFP_KERNEL); in bch2_btree_node_mem_alloc()
600 if (!b) in bch2_btree_node_mem_alloc()
605 bch2_btree_lock_init(&b->c, pcpu_read_locks ? SIX_LOCK_INIT_PCPU : 0); in bch2_btree_node_mem_alloc()
607 BUG_ON(!six_trylock_intent(&b->c.lock)); in bch2_btree_node_mem_alloc()
608 BUG_ON(!six_trylock_write(&b->c.lock)); in bch2_btree_node_mem_alloc()
617 swap(b->data, b2->data); in bch2_btree_node_mem_alloc()
618 swap(b->aux_data, b2->aux_data); in bch2_btree_node_mem_alloc()
627 if (btree_node_data_alloc(c, b, GFP_NOWAIT|__GFP_NOWARN)) { in bch2_btree_node_mem_alloc()
629 if (btree_node_data_alloc(c, b, GFP_KERNEL|__GFP_NOWARN)) in bch2_btree_node_mem_alloc()
638 BUG_ON(btree_node_hashed(b)); in bch2_btree_node_mem_alloc()
639 BUG_ON(btree_node_dirty(b)); in bch2_btree_node_mem_alloc()
640 BUG_ON(btree_node_write_in_flight(b)); in bch2_btree_node_mem_alloc()
642 b->flags = 0; in bch2_btree_node_mem_alloc()
643 b->written = 0; in bch2_btree_node_mem_alloc()
644 b->nsets = 0; in bch2_btree_node_mem_alloc()
645 b->sib_u64s[0] = 0; in bch2_btree_node_mem_alloc()
646 b->sib_u64s[1] = 0; in bch2_btree_node_mem_alloc()
647 b->whiteout_u64s = 0; in bch2_btree_node_mem_alloc()
648 bch2_btree_keys_init(b); in bch2_btree_node_mem_alloc()
649 set_btree_node_accessed(b); in bch2_btree_node_mem_alloc()
655 return b; in bch2_btree_node_mem_alloc()
665 if (b) { in bch2_btree_node_mem_alloc()
666 swap(b->data, b2->data); in bch2_btree_node_mem_alloc()
667 swap(b->aux_data, b2->aux_data); in bch2_btree_node_mem_alloc()
672 b = b2; in bch2_btree_node_mem_alloc()
673 list_del_init(&b->list); in bch2_btree_node_mem_alloc()
698 struct btree *b; in bch2_btree_node_fill() local
711 b = bch2_btree_node_mem_alloc(trans, level != 0); in bch2_btree_node_fill()
713 if (bch2_err_matches(PTR_ERR_OR_ZERO(b), ENOMEM)) { in bch2_btree_node_fill()
719 if (IS_ERR(b)) in bch2_btree_node_fill()
720 return b; in bch2_btree_node_fill()
722 bkey_copy(&b->key, k); in bch2_btree_node_fill()
723 if (bch2_btree_node_hash_insert(bc, b, level, btree_id)) { in bch2_btree_node_fill()
727 b->hash_val = 0; in bch2_btree_node_fill()
730 list_add(&b->list, &bc->freeable); in bch2_btree_node_fill()
733 six_unlock_write(&b->c.lock); in bch2_btree_node_fill()
734 six_unlock_intent(&b->c.lock); in bch2_btree_node_fill()
738 set_btree_node_read_in_flight(b); in bch2_btree_node_fill()
740 six_unlock_write(&b->c.lock); in bch2_btree_node_fill()
741 seq = six_lock_seq(&b->c.lock); in bch2_btree_node_fill()
742 six_unlock_intent(&b->c.lock); in bch2_btree_node_fill()
748 bch2_btree_node_read(trans, b, sync); in bch2_btree_node_fill()
762 if (!six_relock_type(&b->c.lock, lock_type, seq)) { in bch2_btree_node_fill()
768 return b; in bch2_btree_node_fill()
771 static noinline void btree_bad_header(struct bch_fs *c, struct btree *b) in btree_bad_header() argument
782 bch2_btree_id_str(b->c.btree_id), b->c.level); in btree_bad_header()
783 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&b->key)); in btree_bad_header()
787 bch2_btree_id_str(BTREE_NODE_ID(b->data)), in btree_bad_header()
788 BTREE_NODE_LEVEL(b->data)); in btree_bad_header()
789 bch2_bpos_to_text(&buf, b->data->min_key); in btree_bad_header()
792 bch2_bpos_to_text(&buf, b->data->max_key); in btree_bad_header()
798 static inline void btree_check_header(struct bch_fs *c, struct btree *b) in btree_check_header() argument
800 if (b->c.btree_id != BTREE_NODE_ID(b->data) || in btree_check_header()
801 b->c.level != BTREE_NODE_LEVEL(b->data) || in btree_check_header()
802 !bpos_eq(b->data->max_key, b->key.k.p) || in btree_check_header()
803 (b->key.k.type == KEY_TYPE_btree_ptr_v2 && in btree_check_header()
804 !bpos_eq(b->data->min_key, in btree_check_header()
805 bkey_i_to_btree_ptr_v2(&b->key)->v.min_key))) in btree_check_header()
806 btree_bad_header(c, b); in btree_check_header()
816 struct btree *b; in __bch2_btree_node_get() local
823 b = btree_cache_find(bc, k); in __bch2_btree_node_get()
824 if (unlikely(!b)) { in __bch2_btree_node_get()
830 b = bch2_btree_node_fill(trans, path, k, path->btree_id, in __bch2_btree_node_get()
835 if (!b) in __bch2_btree_node_get()
838 if (IS_ERR(b)) in __bch2_btree_node_get()
839 return b; in __bch2_btree_node_get()
844 ret = btree_node_lock(trans, path, &b->c, level, lock_type, trace_ip); in __bch2_btree_node_get()
850 if (unlikely(b->hash_val != btree_ptr_hash_val(k) || in __bch2_btree_node_get()
851 b->c.level != level || in __bch2_btree_node_get()
853 six_unlock_type(&b->c.lock, lock_type); in __bch2_btree_node_get()
862 if (!btree_node_accessed(b)) in __bch2_btree_node_get()
863 set_btree_node_accessed(b); in __bch2_btree_node_get()
866 if (unlikely(btree_node_read_in_flight(b))) { in __bch2_btree_node_get()
867 u32 seq = six_lock_seq(&b->c.lock); in __bch2_btree_node_get()
869 six_unlock_type(&b->c.lock, lock_type); in __bch2_btree_node_get()
873 bch2_btree_node_wait_on_read(b); in __bch2_btree_node_get()
879 if (!six_relock_type(&b->c.lock, lock_type, seq)) in __bch2_btree_node_get()
887 six_unlock_type(&b->c.lock, lock_type); in __bch2_btree_node_get()
892 prefetch(b->aux_data); in __bch2_btree_node_get()
894 for_each_bset(b, t) { in __bch2_btree_node_get()
895 void *p = (u64 *) b->aux_data + t->aux_data_offset; in __bch2_btree_node_get()
902 if (unlikely(btree_node_read_error(b))) { in __bch2_btree_node_get()
903 six_unlock_type(&b->c.lock, lock_type); in __bch2_btree_node_get()
907 EBUG_ON(b->c.btree_id != path->btree_id); in __bch2_btree_node_get()
908 EBUG_ON(BTREE_NODE_LEVEL(b->data) != level); in __bch2_btree_node_get()
909 btree_check_header(c, b); in __bch2_btree_node_get()
911 return b; in __bch2_btree_node_get()
936 struct btree *b; in bch2_btree_node_get() local
942 b = btree_node_mem_ptr(k); in bch2_btree_node_get()
945 * Check b->hash_val _before_ calling btree_node_lock() - this might not in bch2_btree_node_get()
950 !b || in bch2_btree_node_get()
951 b->hash_val != btree_ptr_hash_val(k))) in bch2_btree_node_get()
957 ret = btree_node_lock(trans, path, &b->c, level, lock_type, trace_ip); in bch2_btree_node_get()
963 if (unlikely(b->hash_val != btree_ptr_hash_val(k) || in bch2_btree_node_get()
964 b->c.level != level || in bch2_btree_node_get()
966 six_unlock_type(&b->c.lock, lock_type); in bch2_btree_node_get()
974 if (unlikely(btree_node_read_in_flight(b))) { in bch2_btree_node_get()
975 six_unlock_type(&b->c.lock, lock_type); in bch2_btree_node_get()
979 prefetch(b->aux_data); in bch2_btree_node_get()
981 for_each_bset(b, t) { in bch2_btree_node_get()
982 void *p = (u64 *) b->aux_data + t->aux_data_offset; in bch2_btree_node_get()
990 if (!btree_node_accessed(b)) in bch2_btree_node_get()
991 set_btree_node_accessed(b); in bch2_btree_node_get()
993 if (unlikely(btree_node_read_error(b))) { in bch2_btree_node_get()
994 six_unlock_type(&b->c.lock, lock_type); in bch2_btree_node_get()
998 EBUG_ON(b->c.btree_id != path->btree_id); in bch2_btree_node_get()
999 EBUG_ON(BTREE_NODE_LEVEL(b->data) != level); in bch2_btree_node_get()
1000 btree_check_header(c, b); in bch2_btree_node_get()
1002 return b; in bch2_btree_node_get()
1013 struct btree *b; in bch2_btree_node_get_noiter() local
1020 b = btree_node_mem_ptr(k); in bch2_btree_node_get_noiter()
1021 if (b) in bch2_btree_node_get_noiter()
1025 b = btree_cache_find(bc, k); in bch2_btree_node_get_noiter()
1026 if (unlikely(!b)) { in bch2_btree_node_get_noiter()
1030 b = bch2_btree_node_fill(trans, NULL, k, btree_id, in bch2_btree_node_get_noiter()
1034 if (!b) in bch2_btree_node_get_noiter()
1037 if (IS_ERR(b) && in bch2_btree_node_get_noiter()
1041 if (IS_ERR(b)) in bch2_btree_node_get_noiter()
1045 ret = btree_node_lock_nopath(trans, &b->c, SIX_LOCK_read, _THIS_IP_); in bch2_btree_node_get_noiter()
1051 if (unlikely(b->hash_val != btree_ptr_hash_val(k) || in bch2_btree_node_get_noiter()
1052 b->c.btree_id != btree_id || in bch2_btree_node_get_noiter()
1053 b->c.level != level)) { in bch2_btree_node_get_noiter()
1054 six_unlock_read(&b->c.lock); in bch2_btree_node_get_noiter()
1060 __bch2_btree_node_wait_on_read(b); in bch2_btree_node_get_noiter()
1062 prefetch(b->aux_data); in bch2_btree_node_get_noiter()
1064 for_each_bset(b, t) { in bch2_btree_node_get_noiter()
1065 void *p = (u64 *) b->aux_data + t->aux_data_offset; in bch2_btree_node_get_noiter()
1073 if (!btree_node_accessed(b)) in bch2_btree_node_get_noiter()
1074 set_btree_node_accessed(b); in bch2_btree_node_get_noiter()
1076 if (unlikely(btree_node_read_error(b))) { in bch2_btree_node_get_noiter()
1077 six_unlock_read(&b->c.lock); in bch2_btree_node_get_noiter()
1078 b = ERR_PTR(-EIO); in bch2_btree_node_get_noiter()
1082 EBUG_ON(b->c.btree_id != btree_id); in bch2_btree_node_get_noiter()
1083 EBUG_ON(BTREE_NODE_LEVEL(b->data) != level); in bch2_btree_node_get_noiter()
1084 btree_check_header(c, b); in bch2_btree_node_get_noiter()
1087 return b; in bch2_btree_node_get_noiter()
1097 struct btree *b; in bch2_btree_node_prefetch() local
1102 b = btree_cache_find(bc, k); in bch2_btree_node_prefetch()
1103 if (b) in bch2_btree_node_prefetch()
1106 b = bch2_btree_node_fill(trans, path, k, btree_id, in bch2_btree_node_prefetch()
1108 return PTR_ERR_OR_ZERO(b); in bch2_btree_node_prefetch()
1115 struct btree *b; in bch2_btree_node_evict() local
1117 b = btree_cache_find(bc, k); in bch2_btree_node_evict()
1118 if (!b) in bch2_btree_node_evict()
1126 __bch2_btree_node_wait_on_read(b); in bch2_btree_node_evict()
1127 __bch2_btree_node_wait_on_write(b); in bch2_btree_node_evict()
1129 btree_node_lock_nopath_nofail(trans, &b->c, SIX_LOCK_intent); in bch2_btree_node_evict()
1130 btree_node_lock_nopath_nofail(trans, &b->c, SIX_LOCK_write); in bch2_btree_node_evict()
1132 if (btree_node_dirty(b)) { in bch2_btree_node_evict()
1133 __bch2_btree_node_write(c, b, BTREE_WRITE_cache_reclaim); in bch2_btree_node_evict()
1134 six_unlock_write(&b->c.lock); in bch2_btree_node_evict()
1135 six_unlock_intent(&b->c.lock); in bch2_btree_node_evict()
1139 BUG_ON(btree_node_dirty(b)); in bch2_btree_node_evict()
1142 btree_node_data_free(c, b); in bch2_btree_node_evict()
1143 bch2_btree_node_hash_remove(bc, b); in bch2_btree_node_evict()
1146 six_unlock_write(&b->c.lock); in bch2_btree_node_evict()
1147 six_unlock_intent(&b->c.lock); in bch2_btree_node_evict()
1155 void bch2_btree_pos_to_text(struct printbuf *out, struct bch_fs *c, const struct btree *b) in bch2_btree_pos_to_text() argument
1158 bch2_btree_id_str(b->c.btree_id), in bch2_btree_pos_to_text()
1159 b->c.level, in bch2_btree_pos_to_text()
1160 bch2_btree_id_root(c, b->c.btree_id)->level); in bch2_btree_pos_to_text()
1161 bch2_bkey_val_to_text(out, c, bkey_i_to_s_c(&b->key)); in bch2_btree_pos_to_text()
1164 void bch2_btree_node_to_text(struct printbuf *out, struct bch_fs *c, const struct btree *b) in bch2_btree_node_to_text() argument
1170 bch2_btree_keys_stats(b, &stats); in bch2_btree_node_to_text()
1172 prt_printf(out, "l %u ", b->c.level); in bch2_btree_node_to_text()
1173 bch2_bpos_to_text(out, b->data->min_key); in bch2_btree_node_to_text()
1175 bch2_bpos_to_text(out, b->data->max_key); in bch2_btree_node_to_text()
1178 bch2_val_to_text(out, c, bkey_i_to_s_c(&b->key)); in bch2_btree_node_to_text()
1183 bch2_bkey_format_to_text(out, &b->format); in bch2_btree_node_to_text()
1193 b->unpack_fn_len, in bch2_btree_node_to_text()
1194 b->nr.live_u64s * sizeof(u64), in bch2_btree_node_to_text()
1195 btree_buf_bytes(b) - sizeof(struct btree_node), in bch2_btree_node_to_text()
1196 b->nr.live_u64s * 100 / btree_max_u64s(c), in bch2_btree_node_to_text()
1197 b->sib_u64s[0], in bch2_btree_node_to_text()
1198 b->sib_u64s[1], in bch2_btree_node_to_text()
1200 b->nr.packed_keys, in bch2_btree_node_to_text()
1201 b->nr.unpacked_keys, in bch2_btree_node_to_text()