Lines Matching full:bytes
43 u64 bytes; member
53 u64 *bytes, bool for_alloc);
58 u64 bytes, bool update_stats);
582 static int io_ctl_add_entry(struct btrfs_io_ctl *io_ctl, u64 offset, u64 bytes, in io_ctl_add_entry() argument
592 put_unaligned_le64(bytes, &entry->bytes); in io_ctl_add_entry()
666 entry->bytes = get_unaligned_le64(&e->bytes); in io_ctl_read_entry()
729 * bytes we can have, or whatever is less than that. in recalculate_thresholds()
830 if (!e->bytes) { in __load_free_space_cache()
913 const u64 bytes = info->bytes; in copy_free_space_cache() local
918 ret = btrfs_add_free_space(block_group, offset, bytes); in copy_free_space_cache()
922 u64 bytes = ctl->unit; in copy_free_space_cache() local
924 ret = search_bitmap(ctl, info, &offset, &bytes, false); in copy_free_space_cache()
926 bitmap_clear_bits(ctl, info, offset, bytes, true); in copy_free_space_cache()
929 bytes); in copy_free_space_cache()
1106 ret = io_ctl_add_entry(io_ctl, e->offset, e->bytes, in write_cache_extent_entries()
1136 trim_entry->bytes, NULL); in write_cache_extent_entries()
1569 static inline unsigned long bytes_to_bits(u64 bytes, u32 unit) in bytes_to_bits() argument
1571 return (unsigned long)(div_u64(bytes, unit)); in bytes_to_bits()
1659 * we've found already if it's larger, or we want to use ->bytes.
1661 * This matters because find_free_space() will skip entries who's ->bytes is
1662 * less than the required bytes. So if we didn't search down this bitmap, we
1665 * ->max_extent_size set to 4K and ->bytes set to 1M. A second entry hasn't set
1666 * ->max_extent_size yet, has ->bytes set to 8K and it's contiguous. We will
1682 return entry->bytes; in get_max_extent_size()
1702 * want a section that has at least bytes size and comes at or after the given
1759 prev->offset + prev->bytes > offset) in tree_search_offset()
1791 prev->offset + prev->bytes > offset) in tree_search_offset()
1796 } else if (entry->offset + entry->bytes > offset) in tree_search_offset()
1812 if (entry->offset + entry->bytes > offset) in tree_search_offset()
1831 ctl->discardable_bytes[BTRFS_STAT_CURR] -= info->bytes; in unlink_free_space()
1835 ctl->free_space -= info->bytes; in unlink_free_space()
1845 ASSERT(info->bytes || info->bitmap); in link_free_space()
1854 ctl->discardable_bytes[BTRFS_STAT_CURR] += info->bytes; in link_free_space()
1857 ctl->free_space += info->bytes; in link_free_space()
1869 * want to re-link it into our ctl bytes index. in relink_bitmap_entry()
1882 u64 offset, u64 bytes, bool update_stat) in bitmap_clear_bits() argument
1888 count = bytes_to_bits(bytes, ctl->unit); in bitmap_clear_bits()
1894 info->bytes -= bytes; in bitmap_clear_bits()
1909 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bytes; in bitmap_clear_bits()
1913 ctl->free_space -= bytes; in bitmap_clear_bits()
1918 u64 bytes) in bitmap_set_bits() argument
1924 count = bytes_to_bits(bytes, ctl->unit); in bitmap_set_bits()
1931 * We set some bytes, we have no idea what the max extent size is in bitmap_set_bits()
1935 info->bytes += bytes; in bitmap_set_bits()
1936 ctl->free_space += bytes; in bitmap_set_bits()
1949 ctl->discardable_bytes[BTRFS_STAT_CURR] += bytes; in bitmap_set_bits()
1954 * If we can not find suitable extent, we will use bytes to record
1959 u64 *bytes, bool for_alloc) in search_bitmap() argument
1973 bitmap_info->max_extent_size < *bytes) { in search_bitmap()
1974 *bytes = bitmap_info->max_extent_size; in search_bitmap()
1980 bits = bytes_to_bits(*bytes, ctl->unit); in search_bitmap()
2001 *bytes = (u64)(found_bits) * ctl->unit; in search_bitmap()
2005 *bytes = (u64)(max_bits) * ctl->unit; in search_bitmap()
2006 bitmap_info->max_extent_size = *bytes; in search_bitmap()
2011 /* Cache the size of the max extent in bytes */
2013 find_free_space(struct btrfs_free_space_ctl *ctl, u64 *offset, u64 *bytes, in find_free_space() argument
2044 * If we are using the bytes index then all subsequent entries in find_free_space()
2045 * in this tree are going to be < bytes, so simply set the max in find_free_space()
2051 if (entry->bytes < *bytes) { in find_free_space()
2062 if (*bytes >= align) { in find_free_space()
2073 * We don't break here if we're using the bytes index because we in find_free_space()
2079 if (entry->bytes < *bytes + align_off) { in find_free_space()
2087 u64 size = *bytes; in find_free_space()
2092 *bytes = size; in find_free_space()
2112 *bytes = entry->bytes - align_off; in find_free_space()
2123 info->bytes = 0; in add_new_bitmap()
2140 if (bitmap_info->bytes && !btrfs_free_space_trimmed(bitmap_info)) { in free_bitmap()
2143 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bitmap_info->bytes; in free_bitmap()
2155 u64 *offset, u64 *bytes) in remove_from_bitmap() argument
2179 search_bytes = min(search_bytes, *bytes); in remove_from_bitmap()
2186 *bytes -= search_bytes; in remove_from_bitmap()
2188 if (*bytes) { in remove_from_bitmap()
2190 if (!bitmap_info->bytes) in remove_from_bitmap()
2194 * no entry after this bitmap, but we still have bytes to in remove_from_bitmap()
2224 } else if (!bitmap_info->bytes) in remove_from_bitmap()
2232 u64 bytes, enum btrfs_trim_state trim_state) in add_bytes_to_bitmap() argument
2245 ctl->discardable_bytes[BTRFS_STAT_CURR] += info->bytes; in add_bytes_to_bitmap()
2252 bytes_to_set = min(end - offset, bytes); in add_bytes_to_bitmap()
2273 if (!forced && info->bytes >= FORCE_EXTENT_THRESHOLD) in use_bitmap()
2288 if (info->bytes <= fs_info->sectorsize * 8) { in use_bitmap()
2320 u64 bytes, offset, bytes_added; in insert_into_bitmap() local
2324 bytes = info->bytes; in insert_into_bitmap()
2362 bytes, trim_state); in insert_into_bitmap()
2363 bytes -= bytes_added; in insert_into_bitmap()
2367 if (!bytes) { in insert_into_bitmap()
2381 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes, in insert_into_bitmap()
2383 bytes -= bytes_added; in insert_into_bitmap()
2387 if (!bytes) { in insert_into_bitmap()
2459 u64 bytes = info->bytes; in try_merge_free_space() local
2468 right_info = tree_search_offset(ctl, offset + bytes, 0, 0); in try_merge_free_space()
2481 info->bytes += right_info->bytes; in try_merge_free_space()
2488 left_info->offset + left_info->bytes == offset && in try_merge_free_space()
2492 info->bytes += left_info->bytes; in try_merge_free_space()
2507 const u64 end = info->offset + info->bytes; in steal_from_bitmap_to_end()
2509 u64 bytes; in steal_from_bitmap_to_end() local
2519 bytes = (j - i) * ctl->unit; in steal_from_bitmap_to_end()
2520 info->bytes += bytes; in steal_from_bitmap_to_end()
2526 bitmap_clear_bits(ctl, bitmap, end, bytes, update_stat); in steal_from_bitmap_to_end()
2528 if (!bitmap->bytes) in steal_from_bitmap_to_end()
2543 u64 bytes; in steal_from_bitmap_to_front() local
2569 bytes = (i + 1) * ctl->unit; in steal_from_bitmap_to_front()
2571 bytes = (i - prev_j) * ctl->unit; in steal_from_bitmap_to_front()
2573 info->offset -= bytes; in steal_from_bitmap_to_front()
2574 info->bytes += bytes; in steal_from_bitmap_to_front()
2580 bitmap_clear_bits(ctl, bitmap, info->offset, bytes, update_stat); in steal_from_bitmap_to_front()
2582 if (!bitmap->bytes) in steal_from_bitmap_to_front()
2625 u64 offset, u64 bytes, in __btrfs_add_free_space() argument
2632 u64 filter_bytes = bytes; in __btrfs_add_free_space()
2641 info->bytes = bytes; in __btrfs_add_free_space()
2672 filter_bytes = max(filter_bytes, info->bytes); in __btrfs_add_free_space()
2798 u64 offset, u64 bytes) in btrfs_remove_free_space() argument
2818 offset + bytes) { in btrfs_remove_free_space()
2820 offset + bytes - block_group->start; in btrfs_remove_free_space()
2829 if (!bytes) in btrfs_remove_free_space()
2855 u64 to_free = min(bytes, info->bytes); in btrfs_remove_free_space()
2857 info->bytes -= to_free; in btrfs_remove_free_space()
2859 if (info->bytes) { in btrfs_remove_free_space()
2867 bytes -= to_free; in btrfs_remove_free_space()
2870 u64 old_end = info->bytes + info->offset; in btrfs_remove_free_space()
2872 info->bytes = offset - info->offset; in btrfs_remove_free_space()
2878 /* Not enough bytes in this entry to satisfy us */ in btrfs_remove_free_space()
2879 if (old_end < offset + bytes) { in btrfs_remove_free_space()
2880 bytes -= old_end - offset; in btrfs_remove_free_space()
2883 } else if (old_end == offset + bytes) { in btrfs_remove_free_space()
2890 offset + bytes, in btrfs_remove_free_space()
2891 old_end - (offset + bytes), in btrfs_remove_free_space()
2898 ret = remove_from_bitmap(ctl, info, &offset, &bytes); in btrfs_remove_free_space()
2911 u64 bytes) in btrfs_dump_free_space() argument
2934 if (info->bytes >= bytes && !block_group->ro) in btrfs_dump_free_space()
2936 btrfs_crit(fs_info, "entry offset %llu, bytes %llu, bitmap %s", in btrfs_dump_free_space()
2937 info->offset, info->bytes, in btrfs_dump_free_space()
2944 "%d free space entries at or bigger than %llu bytes", in btrfs_dump_free_space()
2945 count, bytes); in btrfs_dump_free_space()
3009 entry->bytes; in __btrfs_return_cluster_to_free_space()
3019 entry->bytes; in __btrfs_return_cluster_to_free_space()
3083 u64 offset, u64 bytes, u64 empty_size, in btrfs_find_space_for_alloc() argument
3090 u64 bytes_search = bytes + empty_size; in btrfs_find_space_for_alloc()
3108 bitmap_clear_bits(ctl, entry, offset, bytes, true); in btrfs_find_space_for_alloc()
3111 atomic64_add(bytes, &discard_ctl->discard_bytes_saved); in btrfs_find_space_for_alloc()
3113 if (!entry->bytes) in btrfs_find_space_for_alloc()
3122 atomic64_add(bytes, &discard_ctl->discard_bytes_saved); in btrfs_find_space_for_alloc()
3124 entry->offset = offset + bytes; in btrfs_find_space_for_alloc()
3125 WARN_ON(entry->bytes < bytes + align_gap_len); in btrfs_find_space_for_alloc()
3127 entry->bytes -= bytes + align_gap_len; in btrfs_find_space_for_alloc()
3128 if (!entry->bytes) in btrfs_find_space_for_alloc()
3189 u64 bytes, u64 min_start, in btrfs_alloc_from_bitmap() argument
3195 u64 search_bytes = bytes; in btrfs_alloc_from_bitmap()
3199 search_bytes = bytes; in btrfs_alloc_from_bitmap()
3209 bitmap_clear_bits(ctl, entry, ret, bytes, false); in btrfs_alloc_from_bitmap()
3215 * given a cluster, try to allocate 'bytes' from it, returns 0
3220 struct btrfs_free_cluster *cluster, u64 bytes, in btrfs_alloc_from_cluster() argument
3233 if (bytes > cluster->max_size) in btrfs_alloc_from_cluster()
3245 if (entry->bytes < bytes) in btrfs_alloc_from_cluster()
3249 if (entry->bytes < bytes || in btrfs_alloc_from_cluster()
3261 cluster, entry, bytes, in btrfs_alloc_from_cluster()
3272 cluster->window_start += bytes; in btrfs_alloc_from_cluster()
3276 entry->offset += bytes; in btrfs_alloc_from_cluster()
3277 entry->bytes -= bytes; in btrfs_alloc_from_cluster()
3291 atomic64_add(bytes, &discard_ctl->discard_bytes_saved); in btrfs_alloc_from_cluster()
3293 ctl->free_space -= bytes; in btrfs_alloc_from_cluster()
3295 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bytes; in btrfs_alloc_from_cluster()
3298 if (entry->bytes == 0) { in btrfs_alloc_from_cluster()
3321 u64 offset, u64 bytes, in btrfs_bitmap_cluster() argument
3339 want_bits = bytes_to_bits(bytes, ctl->unit); in btrfs_bitmap_cluster()
3408 * Try to find a cluster with at least bytes total bytes, at least one
3414 struct list_head *bitmaps, u64 offset, u64 bytes, in setup_cluster_no_bitmap() argument
3436 while (entry->bitmap || entry->bytes < min_bytes) { in setup_cluster_no_bitmap()
3445 window_free = entry->bytes; in setup_cluster_no_bitmap()
3446 max_extent = entry->bytes; in setup_cluster_no_bitmap()
3460 if (entry->bytes < min_bytes) in setup_cluster_no_bitmap()
3464 window_free += entry->bytes; in setup_cluster_no_bitmap()
3465 if (entry->bytes > max_extent) in setup_cluster_no_bitmap()
3466 max_extent = entry->bytes; in setup_cluster_no_bitmap()
3469 if (window_free < bytes || max_extent < cont1_bytes) in setup_cluster_no_bitmap()
3485 if (entry->bitmap || entry->bytes < min_bytes) in setup_cluster_no_bitmap()
3491 total_size += entry->bytes; in setup_cluster_no_bitmap()
3507 struct list_head *bitmaps, u64 offset, u64 bytes, in setup_cluster_bitmap() argument
3532 if (entry->bytes < bytes) in setup_cluster_bitmap()
3535 bytes, cont1_bytes, min_bytes); in setup_cluster_bitmap()
3549 * is to find at least bytes+empty_size.
3557 u64 offset, u64 bytes, u64 empty_size) in btrfs_find_space_cluster() argument
3574 cont1_bytes = bytes + empty_size; in btrfs_find_space_cluster()
3577 cont1_bytes = bytes; in btrfs_find_space_cluster()
3580 cont1_bytes = max(bytes, (bytes + empty_size) >> 2); in btrfs_find_space_cluster()
3590 if (ctl->free_space < bytes) { in btrfs_find_space_cluster()
3603 trace_btrfs_find_cluster(block_group, offset, bytes, empty_size, in btrfs_find_space_cluster()
3607 bytes + empty_size, in btrfs_find_space_cluster()
3611 offset, bytes + empty_size, in btrfs_find_space_cluster()
3648 u64 *total_trimmed, u64 start, u64 bytes, in do_trimming() argument
3658 const u64 end = start + bytes; in do_trimming()
3673 ret = btrfs_discard_extent(fs_info, start, bytes, &trimmed); in do_trimming()
3687 __btrfs_add_free_space(block_group, start, bytes, trim_state); in do_trimming()
3721 u64 bytes; in trim_no_bitmap() local
3751 extent_bytes = entry->bytes; in trim_no_bitmap()
3755 bytes = entry->bytes; in trim_no_bitmap()
3756 if (bytes < minlen) { in trim_no_bitmap()
3763 * Let bytes = BTRFS_MAX_DISCARD_SIZE + X. in trim_no_bitmap()
3768 bytes >= (max_discard_size + in trim_no_bitmap()
3770 bytes = max_discard_size; in trim_no_bitmap()
3773 entry->bytes -= max_discard_size; in trim_no_bitmap()
3780 bytes = min(extent_start + extent_bytes, end) - start; in trim_no_bitmap()
3781 if (bytes < minlen) { in trim_no_bitmap()
3793 trim_entry.bytes = extent_bytes; in trim_no_bitmap()
3797 ret = do_trimming(block_group, total_trimmed, start, bytes, in trim_no_bitmap()
3801 block_group->discard_cursor = start + bytes; in trim_no_bitmap()
3805 start += bytes; in trim_no_bitmap()
3852 ctl->discardable_bytes[BTRFS_STAT_CURR] += entry->bytes; in reset_trimming_bitmap()
3867 ctl->discardable_bytes[BTRFS_STAT_CURR] -= entry->bytes; in end_trimming_bitmap()
3884 u64 bytes; in trim_bitmaps() local
3929 bytes = minlen; in trim_bitmaps()
3930 ret2 = search_bitmap(ctl, entry, &start, &bytes, false); in trim_bitmaps()
3956 bytes = min(bytes, end - start); in trim_bitmaps()
3957 if (bytes < minlen || (async && maxlen && bytes > maxlen)) { in trim_bitmaps()
3964 * Let bytes = BTRFS_MAX_DISCARD_SIZE + X. in trim_bitmaps()
3971 bytes > (max_discard_size + minlen)) in trim_bitmaps()
3972 bytes = max_discard_size; in trim_bitmaps()
3974 bitmap_clear_bits(ctl, entry, start, bytes, true); in trim_bitmaps()
3975 if (entry->bytes == 0) in trim_bitmaps()
3980 trim_entry.bytes = bytes; in trim_bitmaps()
3984 ret = do_trimming(block_group, total_trimmed, start, bytes, in trim_bitmaps()
3985 start, bytes, 0, &trim_entry); in trim_bitmaps()
3997 start += bytes; in trim_bitmaps()
4190 u64 offset, u64 bytes, bool bitmap) in test_add_free_space_entry() argument
4209 info->bytes = bytes; in test_add_free_space_entry()
4237 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes, in test_add_free_space_entry()
4240 bytes -= bytes_added; in test_add_free_space_entry()
4244 if (bytes) in test_add_free_space_entry()
4260 u64 offset, u64 bytes) in test_check_exists() argument
4289 offset + bytes > bit_off) { in test_check_exists()
4299 if (tmp->offset + tmp->bytes < offset) in test_check_exists()
4301 if (offset + bytes < tmp->offset) { in test_check_exists()
4313 if (offset + bytes < tmp->offset) in test_check_exists()
4315 if (tmp->offset + tmp->bytes < offset) { in test_check_exists()
4332 if (offset > info->offset && offset < info->offset + info->bytes) in test_check_exists()