Lines Matching full:pick

208 					    struct extent_ptr_decoded *pick,  in __promote_alloc()  argument
292 struct extent_ptr_decoded *pick, in promote_alloc() argument
309 ? max(pick->crc.compressed_size, pick->crc.live_size) in promote_alloc()
325 k, pos, pick, sectors, orig, failed); in promote_alloc()
396 struct bch_dev *ca = bch2_dev_have_ref(rbio->c, rbio->pick.ptr.dev); in bch2_rbio_free()
499 bch2_mark_io_failure(&failed, &rbio->pick, in bch2_rbio_retry()
577 struct bch_dev *ca = rbio->have_ioref ? bch2_dev_have_ref(c, rbio->pick.ptr.dev) : NULL; in bch2_read_io_err()
596 u64 data_offset = rbio->data_pos.offset - rbio->pick.crc.offset; in __bch2_rbio_narrow_crcs()
603 if (crc_is_compressed(rbio->pick.crc)) in __bch2_rbio_narrow_crcs()
612 !bch2_bkey_matches_ptr(c, k, rbio->pick.ptr, data_offset)) in __bch2_rbio_narrow_crcs()
617 k.k->p.offset > data_offset + rbio->pick.crc.uncompressed_size) in __bch2_rbio_narrow_crcs()
621 rbio->pick.crc, NULL, &new_crc, in __bch2_rbio_narrow_crcs()
623 rbio->pick.crc.csum_type)) { in __bch2_rbio_narrow_crcs()
661 struct bch_extent_crc_unpacked crc = rbio->pick.crc; in bch2_read_csum_err()
668 bch2_csum_err_msg(&buf, crc.csum_type, rbio->pick.crc.csum, csum); in bch2_read_csum_err()
670 struct bch_dev *ca = rbio->have_ioref ? bch2_dev_have_ref(c, rbio->pick.ptr.dev) : NULL; in bch2_read_csum_err()
690 struct bch_dev *ca = rbio->have_ioref ? bch2_dev_have_ref(c, rbio->pick.ptr.dev) : NULL; in bch2_read_decompress_err()
710 struct bch_dev *ca = rbio->have_ioref ? bch2_dev_have_ref(c, rbio->pick.ptr.dev) : NULL; in bch2_read_decrypt_err()
726 struct bch_dev *ca = rbio->have_ioref ? bch2_dev_have_ref(c, rbio->pick.ptr.dev) : NULL; in __bch2_read_endio()
731 struct bch_extent_crc_unpacked crc = rbio->pick.crc; in __bch2_read_endio()
751 bool csum_good = !bch2_crc_cmp(csum, rbio->pick.crc.csum) || c->opts.no_data_io; in __bch2_read_endio()
813 rbio->parent->pick = rbio->pick; in __bch2_read_endio()
855 struct bch_dev *ca = rbio->have_ioref ? bch2_dev_have_ref(c, rbio->pick.ptr.dev) : NULL; in bch2_read_endio()
871 (ca && dev_ptr_stale(ca, &rbio->pick.ptr))) { in bch2_read_endio()
883 crc_is_compressed(rbio->pick.crc) || in bch2_read_endio()
884 bch2_csum_type_is_encryption(rbio->pick.crc.csum_type)) in bch2_read_endio()
886 else if (rbio->pick.crc.csum_type) in bch2_read_endio()
946 struct extent_ptr_decoded pick; in __bch2_read_extent() local
967 ret = bch2_bkey_pick_read_device(c, k, failed, &pick, dev); in __bch2_read_extent()
984 if (unlikely(bch2_csum_type_is_encryption(pick.crc.csum_type)) && in __bch2_read_extent()
997 struct bch_dev *ca = bch2_dev_get_ioref(c, pick.ptr.dev, READ); in __bch2_read_extent()
1006 !pick.ptr.cached && in __bch2_read_extent()
1008 unlikely(dev_ptr_stale(ca, &pick.ptr))) { in __bch2_read_extent()
1009 read_from_stale_dirty_pointer(trans, ca, k, pick.ptr); in __bch2_read_extent()
1010 bch2_mark_io_failure(failed, &pick, false); in __bch2_read_extent()
1021 bch2_can_narrow_extent_crcs(k, pick.crc); in __bch2_read_extent()
1028 if (crc_is_compressed(pick.crc) || in __bch2_read_extent()
1029 (pick.crc.csum_type != BCH_CSUM_none && in __bch2_read_extent()
1030 (bvec_iter_sectors(iter) != pick.crc.uncompressed_size || in __bch2_read_extent()
1031 (bch2_csum_type_is_encryption(pick.crc.csum_type) && in __bch2_read_extent()
1042 if (pick.crc.compressed_size > u->op.wbio.bio.bi_iter.bi_size) { in __bch2_read_extent()
1049 iter.bi_size = pick.crc.compressed_size << 9; in __bch2_read_extent()
1054 rbio = promote_alloc(trans, iter, k, &pick, flags, orig, in __bch2_read_extent()
1058 EBUG_ON(crc_is_compressed(pick.crc)); in __bch2_read_extent()
1059 EBUG_ON(pick.crc.csum_type && in __bch2_read_extent()
1060 (bvec_iter_sectors(iter) != pick.crc.uncompressed_size || in __bch2_read_extent()
1061 bvec_iter_sectors(iter) != pick.crc.live_size || in __bch2_read_extent()
1062 pick.crc.offset || in __bch2_read_extent()
1066 pick.ptr.offset += pick.crc.offset + in __bch2_read_extent()
1069 pick.crc.compressed_size = bvec_iter_sectors(iter); in __bch2_read_extent()
1070 pick.crc.uncompressed_size = bvec_iter_sectors(iter); in __bch2_read_extent()
1071 pick.crc.offset = 0; in __bch2_read_extent()
1072 pick.crc.live_size = bvec_iter_sectors(iter); in __bch2_read_extent()
1083 pick.crc.compressed_size << 9); in __bch2_read_extent()
1085 pick.crc.compressed_size << 9; in __bch2_read_extent()
1087 unsigned sectors = pick.crc.compressed_size; in __bch2_read_extent()
1117 EBUG_ON(bio_sectors(&rbio->bio) != pick.crc.compressed_size); in __bch2_read_extent()
1129 rbio->pick = pick; in __bch2_read_extent()
1138 rbio->bio.bi_iter.bi_sector = pick.ptr.offset; in __bch2_read_extent()
1154 if (ca && pick.ptr.cached && !u) in __bch2_read_extent()
1155 bch2_bucket_io_time_reset(trans, pick.ptr.dev, in __bch2_read_extent()
1156 PTR_BUCKET_NR(ca, &pick.ptr), READ); in __bch2_read_extent()
1172 if (likely(!rbio->pick.do_ec_reconstruct)) { in __bch2_read_extent()
1233 bch2_mark_io_failure(failed, &pick, in __bch2_read_extent()