Lines Matching full:bytes

42 	u64 bytes;  member
52 u64 *bytes, bool for_alloc);
57 u64 bytes, bool update_stats);
577 static int io_ctl_add_entry(struct btrfs_io_ctl *io_ctl, u64 offset, u64 bytes, in io_ctl_add_entry() argument
587 put_unaligned_le64(bytes, &entry->bytes); in io_ctl_add_entry()
661 entry->bytes = get_unaligned_le64(&e->bytes); in io_ctl_read_entry()
724 * bytes we can have, or whatever is less than that. in recalculate_thresholds()
825 if (!e->bytes) { in __load_free_space_cache()
909 const u64 bytes = info->bytes; in copy_free_space_cache() local
914 ret = btrfs_add_free_space(block_group, offset, bytes); in copy_free_space_cache()
918 u64 bytes = ctl->unit; in copy_free_space_cache() local
920 ret = search_bitmap(ctl, info, &offset, &bytes, false); in copy_free_space_cache()
922 bitmap_clear_bits(ctl, info, offset, bytes, true); in copy_free_space_cache()
925 bytes); in copy_free_space_cache()
1102 ret = io_ctl_add_entry(io_ctl, e->offset, e->bytes, in write_cache_extent_entries()
1132 trim_entry->bytes, NULL); in write_cache_extent_entries()
1570 static inline unsigned long bytes_to_bits(u64 bytes, u32 unit) in bytes_to_bits() argument
1572 return (unsigned long)(div_u64(bytes, unit)); in bytes_to_bits()
1660 * we've found already if it's larger, or we want to use ->bytes.
1662 * This matters because find_free_space() will skip entries who's ->bytes is
1663 * less than the required bytes. So if we didn't search down this bitmap, we
1666 * ->max_extent_size set to 4K and ->bytes set to 1M. A second entry hasn't set
1667 * ->max_extent_size yet, has ->bytes set to 8K and it's contiguous. We will
1683 return entry->bytes; in get_max_extent_size()
1703 * want a section that has at least bytes size and comes at or after the given
1760 prev->offset + prev->bytes > offset) in tree_search_offset()
1792 prev->offset + prev->bytes > offset) in tree_search_offset()
1797 } else if (entry->offset + entry->bytes > offset) in tree_search_offset()
1813 if (entry->offset + entry->bytes > offset) in tree_search_offset()
1832 ctl->discardable_bytes[BTRFS_STAT_CURR] -= info->bytes; in unlink_free_space()
1836 ctl->free_space -= info->bytes; in unlink_free_space()
1846 ASSERT(info->bytes || info->bitmap); in link_free_space()
1855 ctl->discardable_bytes[BTRFS_STAT_CURR] += info->bytes; in link_free_space()
1858 ctl->free_space += info->bytes; in link_free_space()
1870 * want to re-link it into our ctl bytes index. in relink_bitmap_entry()
1883 u64 offset, u64 bytes, bool update_stat) in bitmap_clear_bits() argument
1889 count = bytes_to_bits(bytes, ctl->unit); in bitmap_clear_bits()
1895 info->bytes -= bytes; in bitmap_clear_bits()
1910 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bytes; in bitmap_clear_bits()
1914 ctl->free_space -= bytes; in bitmap_clear_bits()
1919 u64 bytes) in btrfs_bitmap_set_bits() argument
1925 count = bytes_to_bits(bytes, ctl->unit); in btrfs_bitmap_set_bits()
1932 * We set some bytes, we have no idea what the max extent size is in btrfs_bitmap_set_bits()
1936 info->bytes += bytes; in btrfs_bitmap_set_bits()
1937 ctl->free_space += bytes; in btrfs_bitmap_set_bits()
1950 ctl->discardable_bytes[BTRFS_STAT_CURR] += bytes; in btrfs_bitmap_set_bits()
1955 * If we can not find suitable extent, we will use bytes to record
1960 u64 *bytes, bool for_alloc) in search_bitmap() argument
1974 bitmap_info->max_extent_size < *bytes) { in search_bitmap()
1975 *bytes = bitmap_info->max_extent_size; in search_bitmap()
1981 bits = bytes_to_bits(*bytes, ctl->unit); in search_bitmap()
2002 *bytes = (u64)(found_bits) * ctl->unit; in search_bitmap()
2006 *bytes = (u64)(max_bits) * ctl->unit; in search_bitmap()
2007 bitmap_info->max_extent_size = *bytes; in search_bitmap()
2012 /* Cache the size of the max extent in bytes */
2014 find_free_space(struct btrfs_free_space_ctl *ctl, u64 *offset, u64 *bytes, in find_free_space() argument
2045 * If we are using the bytes index then all subsequent entries in find_free_space()
2046 * in this tree are going to be < bytes, so simply set the max in find_free_space()
2052 if (entry->bytes < *bytes) { in find_free_space()
2063 if (*bytes >= align) { in find_free_space()
2074 * We don't break here if we're using the bytes index because we in find_free_space()
2080 if (entry->bytes < *bytes + align_off) { in find_free_space()
2088 u64 size = *bytes; in find_free_space()
2093 *bytes = size; in find_free_space()
2113 *bytes = entry->bytes - align_off; in find_free_space()
2124 info->bytes = 0; in add_new_bitmap()
2141 if (bitmap_info->bytes && !btrfs_free_space_trimmed(bitmap_info)) { in free_bitmap()
2144 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bitmap_info->bytes; in free_bitmap()
2156 u64 *offset, u64 *bytes) in remove_from_bitmap() argument
2180 search_bytes = min(search_bytes, *bytes); in remove_from_bitmap()
2187 *bytes -= search_bytes; in remove_from_bitmap()
2189 if (*bytes) { in remove_from_bitmap()
2191 if (!bitmap_info->bytes) in remove_from_bitmap()
2195 * no entry after this bitmap, but we still have bytes to in remove_from_bitmap()
2225 } else if (!bitmap_info->bytes) in remove_from_bitmap()
2233 u64 bytes, enum btrfs_trim_state trim_state) in add_bytes_to_bitmap() argument
2246 ctl->discardable_bytes[BTRFS_STAT_CURR] += info->bytes; in add_bytes_to_bitmap()
2253 bytes_to_set = min(end - offset, bytes); in add_bytes_to_bitmap()
2274 if (!forced && info->bytes >= FORCE_EXTENT_THRESHOLD) in use_bitmap()
2289 if (info->bytes <= fs_info->sectorsize * 8) { in use_bitmap()
2321 u64 bytes, offset, bytes_added; in insert_into_bitmap() local
2325 bytes = info->bytes; in insert_into_bitmap()
2363 bytes, trim_state); in insert_into_bitmap()
2364 bytes -= bytes_added; in insert_into_bitmap()
2368 if (!bytes) { in insert_into_bitmap()
2382 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes, in insert_into_bitmap()
2384 bytes -= bytes_added; in insert_into_bitmap()
2388 if (!bytes) { in insert_into_bitmap()
2460 u64 bytes = info->bytes; in try_merge_free_space() local
2469 right_info = tree_search_offset(ctl, offset + bytes, 0, 0); in try_merge_free_space()
2482 info->bytes += right_info->bytes; in try_merge_free_space()
2489 left_info->offset + left_info->bytes == offset && in try_merge_free_space()
2493 info->bytes += left_info->bytes; in try_merge_free_space()
2508 const u64 end = info->offset + info->bytes; in steal_from_bitmap_to_end()
2510 u64 bytes; in steal_from_bitmap_to_end() local
2520 bytes = (j - i) * ctl->unit; in steal_from_bitmap_to_end()
2521 info->bytes += bytes; in steal_from_bitmap_to_end()
2527 bitmap_clear_bits(ctl, bitmap, end, bytes, update_stat); in steal_from_bitmap_to_end()
2529 if (!bitmap->bytes) in steal_from_bitmap_to_end()
2544 u64 bytes; in steal_from_bitmap_to_front() local
2570 bytes = (i + 1) * ctl->unit; in steal_from_bitmap_to_front()
2572 bytes = (i - prev_j) * ctl->unit; in steal_from_bitmap_to_front()
2574 info->offset -= bytes; in steal_from_bitmap_to_front()
2575 info->bytes += bytes; in steal_from_bitmap_to_front()
2581 bitmap_clear_bits(ctl, bitmap, info->offset, bytes, update_stat); in steal_from_bitmap_to_front()
2583 if (!bitmap->bytes) in steal_from_bitmap_to_front()
2626 u64 offset, u64 bytes, in __btrfs_add_free_space() argument
2633 u64 filter_bytes = bytes; in __btrfs_add_free_space()
2642 info->bytes = bytes; in __btrfs_add_free_space()
2673 filter_bytes = max(filter_bytes, info->bytes); in __btrfs_add_free_space()
2803 u64 offset, u64 bytes) in btrfs_remove_free_space() argument
2823 offset + bytes) { in btrfs_remove_free_space()
2825 offset + bytes - block_group->start; in btrfs_remove_free_space()
2834 if (!bytes) in btrfs_remove_free_space()
2860 u64 to_free = min(bytes, info->bytes); in btrfs_remove_free_space()
2862 info->bytes -= to_free; in btrfs_remove_free_space()
2864 if (info->bytes) { in btrfs_remove_free_space()
2872 bytes -= to_free; in btrfs_remove_free_space()
2875 u64 old_end = info->bytes + info->offset; in btrfs_remove_free_space()
2877 info->bytes = offset - info->offset; in btrfs_remove_free_space()
2883 /* Not enough bytes in this entry to satisfy us */ in btrfs_remove_free_space()
2884 if (old_end < offset + bytes) { in btrfs_remove_free_space()
2885 bytes -= old_end - offset; in btrfs_remove_free_space()
2888 } else if (old_end == offset + bytes) { in btrfs_remove_free_space()
2895 offset + bytes, in btrfs_remove_free_space()
2896 old_end - (offset + bytes), in btrfs_remove_free_space()
2903 ret = remove_from_bitmap(ctl, info, &offset, &bytes); in btrfs_remove_free_space()
2916 u64 bytes) in btrfs_dump_free_space() argument
2939 if (info->bytes >= bytes && !block_group->ro) in btrfs_dump_free_space()
2941 btrfs_crit(fs_info, "entry offset %llu, bytes %llu, bitmap %s", in btrfs_dump_free_space()
2942 info->offset, info->bytes, str_yes_no(info->bitmap)); in btrfs_dump_free_space()
2948 "%d free space entries at or bigger than %llu bytes", in btrfs_dump_free_space()
2949 count, bytes); in btrfs_dump_free_space()
3013 entry->bytes; in __btrfs_return_cluster_to_free_space()
3023 entry->bytes; in __btrfs_return_cluster_to_free_space()
3087 u64 offset, u64 bytes, u64 empty_size, in btrfs_find_space_for_alloc() argument
3094 u64 bytes_search = bytes + empty_size; in btrfs_find_space_for_alloc()
3112 bitmap_clear_bits(ctl, entry, offset, bytes, true); in btrfs_find_space_for_alloc()
3115 atomic64_add(bytes, &discard_ctl->discard_bytes_saved); in btrfs_find_space_for_alloc()
3117 if (!entry->bytes) in btrfs_find_space_for_alloc()
3126 atomic64_add(bytes, &discard_ctl->discard_bytes_saved); in btrfs_find_space_for_alloc()
3128 entry->offset = offset + bytes; in btrfs_find_space_for_alloc()
3129 WARN_ON(entry->bytes < bytes + align_gap_len); in btrfs_find_space_for_alloc()
3131 entry->bytes -= bytes + align_gap_len; in btrfs_find_space_for_alloc()
3132 if (!entry->bytes) in btrfs_find_space_for_alloc()
3193 u64 bytes, u64 min_start, in btrfs_alloc_from_bitmap() argument
3199 u64 search_bytes = bytes; in btrfs_alloc_from_bitmap()
3203 search_bytes = bytes; in btrfs_alloc_from_bitmap()
3213 bitmap_clear_bits(ctl, entry, ret, bytes, false); in btrfs_alloc_from_bitmap()
3219 * given a cluster, try to allocate 'bytes' from it, returns 0
3224 struct btrfs_free_cluster *cluster, u64 bytes, in btrfs_alloc_from_cluster() argument
3237 if (bytes > cluster->max_size) in btrfs_alloc_from_cluster()
3249 if (entry->bytes < bytes) in btrfs_alloc_from_cluster()
3253 if (entry->bytes < bytes || in btrfs_alloc_from_cluster()
3265 cluster, entry, bytes, in btrfs_alloc_from_cluster()
3276 cluster->window_start += bytes; in btrfs_alloc_from_cluster()
3280 entry->offset += bytes; in btrfs_alloc_from_cluster()
3281 entry->bytes -= bytes; in btrfs_alloc_from_cluster()
3295 atomic64_add(bytes, &discard_ctl->discard_bytes_saved); in btrfs_alloc_from_cluster()
3297 ctl->free_space -= bytes; in btrfs_alloc_from_cluster()
3299 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bytes; in btrfs_alloc_from_cluster()
3302 if (entry->bytes == 0) { in btrfs_alloc_from_cluster()
3325 u64 offset, u64 bytes, in btrfs_bitmap_cluster() argument
3343 want_bits = bytes_to_bits(bytes, ctl->unit); in btrfs_bitmap_cluster()
3412 * Try to find a cluster with at least bytes total bytes, at least one
3418 struct list_head *bitmaps, u64 offset, u64 bytes, in setup_cluster_no_bitmap() argument
3440 while (entry->bitmap || entry->bytes < min_bytes) { in setup_cluster_no_bitmap()
3449 window_free = entry->bytes; in setup_cluster_no_bitmap()
3450 max_extent = entry->bytes; in setup_cluster_no_bitmap()
3464 if (entry->bytes < min_bytes) in setup_cluster_no_bitmap()
3468 window_free += entry->bytes; in setup_cluster_no_bitmap()
3469 if (entry->bytes > max_extent) in setup_cluster_no_bitmap()
3470 max_extent = entry->bytes; in setup_cluster_no_bitmap()
3473 if (window_free < bytes || max_extent < cont1_bytes) in setup_cluster_no_bitmap()
3489 if (entry->bitmap || entry->bytes < min_bytes) in setup_cluster_no_bitmap()
3495 total_size += entry->bytes; in setup_cluster_no_bitmap()
3511 struct list_head *bitmaps, u64 offset, u64 bytes, in setup_cluster_bitmap() argument
3536 if (entry->bytes < bytes) in setup_cluster_bitmap()
3539 bytes, cont1_bytes, min_bytes); in setup_cluster_bitmap()
3553 * is to find at least bytes+empty_size.
3561 u64 offset, u64 bytes, u64 empty_size) in btrfs_find_space_cluster() argument
3578 cont1_bytes = bytes + empty_size; in btrfs_find_space_cluster()
3581 cont1_bytes = bytes; in btrfs_find_space_cluster()
3584 cont1_bytes = max(bytes, (bytes + empty_size) >> 2); in btrfs_find_space_cluster()
3594 if (ctl->free_space < bytes) { in btrfs_find_space_cluster()
3607 trace_btrfs_find_cluster(block_group, offset, bytes, empty_size, in btrfs_find_space_cluster()
3611 bytes + empty_size, in btrfs_find_space_cluster()
3615 offset, bytes + empty_size, in btrfs_find_space_cluster()
3652 u64 *total_trimmed, u64 start, u64 bytes, in do_trimming() argument
3662 const u64 end = start + bytes; in do_trimming()
3677 ret = btrfs_discard_extent(fs_info, start, bytes, &trimmed); in do_trimming()
3691 __btrfs_add_free_space(block_group, start, bytes, trim_state); in do_trimming()
3725 u64 bytes; in trim_no_bitmap() local
3755 extent_bytes = entry->bytes; in trim_no_bitmap()
3759 bytes = entry->bytes; in trim_no_bitmap()
3760 if (bytes < minlen) { in trim_no_bitmap()
3767 * Let bytes = BTRFS_MAX_DISCARD_SIZE + X. in trim_no_bitmap()
3772 bytes >= (max_discard_size + in trim_no_bitmap()
3774 bytes = max_discard_size; in trim_no_bitmap()
3777 entry->bytes -= max_discard_size; in trim_no_bitmap()
3784 bytes = min(extent_start + extent_bytes, end) - start; in trim_no_bitmap()
3785 if (bytes < minlen) { in trim_no_bitmap()
3797 trim_entry.bytes = extent_bytes; in trim_no_bitmap()
3801 ret = do_trimming(block_group, total_trimmed, start, bytes, in trim_no_bitmap()
3805 block_group->discard_cursor = start + bytes; in trim_no_bitmap()
3809 start += bytes; in trim_no_bitmap()
3856 ctl->discardable_bytes[BTRFS_STAT_CURR] += entry->bytes; in reset_trimming_bitmap()
3871 ctl->discardable_bytes[BTRFS_STAT_CURR] -= entry->bytes; in end_trimming_bitmap()
3888 u64 bytes; in trim_bitmaps() local
3933 bytes = minlen; in trim_bitmaps()
3934 ret2 = search_bitmap(ctl, entry, &start, &bytes, false); in trim_bitmaps()
3960 bytes = min(bytes, end - start); in trim_bitmaps()
3961 if (bytes < minlen || (async && maxlen && bytes > maxlen)) { in trim_bitmaps()
3968 * Let bytes = BTRFS_MAX_DISCARD_SIZE + X. in trim_bitmaps()
3975 bytes > (max_discard_size + minlen)) in trim_bitmaps()
3976 bytes = max_discard_size; in trim_bitmaps()
3978 bitmap_clear_bits(ctl, entry, start, bytes, true); in trim_bitmaps()
3979 if (entry->bytes == 0) in trim_bitmaps()
3984 trim_entry.bytes = bytes; in trim_bitmaps()
3988 ret = do_trimming(block_group, total_trimmed, start, bytes, in trim_bitmaps()
3989 start, bytes, 0, &trim_entry); in trim_bitmaps()
4001 start += bytes; in trim_bitmaps()
4192 u64 offset, u64 bytes, bool bitmap) in test_add_free_space_entry() argument
4211 info->bytes = bytes; in test_add_free_space_entry()
4239 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes, in test_add_free_space_entry()
4242 bytes -= bytes_added; in test_add_free_space_entry()
4246 if (bytes) in test_add_free_space_entry()
4262 u64 offset, u64 bytes) in test_check_exists() argument
4291 offset + bytes > bit_off) { in test_check_exists()
4301 if (tmp->offset + tmp->bytes < offset) in test_check_exists()
4303 if (offset + bytes < tmp->offset) { in test_check_exists()
4315 if (offset + bytes < tmp->offset) in test_check_exists()
4317 if (tmp->offset + tmp->bytes < offset) { in test_check_exists()
4334 if (offset > info->offset && offset < info->offset + info->bytes) in test_check_exists()