Lines Matching full:k

14 static unsigned bch2_bkey_nr_alloc_ptrs(struct bkey_s_c k)  in bch2_bkey_nr_alloc_ptrs()  argument
16 struct bkey_ptrs_c ptrs = bch2_bkey_ptrs_c(k); in bch2_bkey_nr_alloc_ptrs()
41 struct bkey_s_c k, in count_iters_for_insert() argument
50 *end = bpos_min(*end, k.k->p); in count_iters_for_insert()
54 switch (k.k->type) { in count_iters_for_insert()
57 *nr_iters += bch2_bkey_nr_alloc_ptrs(k); in count_iters_for_insert()
60 *end = bpos_min(*end, k.k->p); in count_iters_for_insert()
66 struct bkey_s_c_reflink_p p = bkey_s_c_to_reflink_p(k); in count_iters_for_insert()
68 unsigned sectors = bpos_min(*end, p.k->p).offset - in count_iters_for_insert()
69 bkey_start_offset(p.k); in count_iters_for_insert()
76 if (bkey_ge(bkey_start_pos(r_k.k), POS(0, idx + sectors))) in count_iters_for_insert()
85 struct bpos pos = bkey_start_pos(k.k); in count_iters_for_insert()
86 pos.offset += min_t(u64, k.k->size, in count_iters_for_insert()
87 r_k.k->p.offset - idx); in count_iters_for_insert()
111 struct bkey_s_c k; in bch2_extent_atomic_end() local
119 *end = insert->k.p; in bch2_extent_atomic_end()
131 for_each_btree_key_upto_continue_norestart(copy, insert->k.p, 0, k, ret) { in bch2_extent_atomic_end()
134 if (bkey_gt(bkey_start_pos(&insert->k), bkey_start_pos(k.k))) in bch2_extent_atomic_end()
135 offset = bkey_start_offset(&insert->k) - in bch2_extent_atomic_end()
136 bkey_start_offset(k.k); in bch2_extent_atomic_end()
139 switch (bch2_extent_overlap(&insert->k, k.k)) { in bch2_extent_atomic_end()
150 ret = count_iters_for_insert(trans, k, offset, end, in bch2_extent_atomic_end()
162 struct bkey_i *k) in bch2_extent_trim_atomic() argument
167 ret = bch2_extent_atomic_end(trans, iter, k, &end); in bch2_extent_trim_atomic()
171 bch2_cut_back(end, k); in bch2_extent_trim_atomic()