Lines Matching full:ctl

46 static int link_free_space(struct btrfs_free_space_ctl *ctl,
48 static void unlink_free_space(struct btrfs_free_space_ctl *ctl,
50 static int search_bitmap(struct btrfs_free_space_ctl *ctl,
53 static void free_bitmap(struct btrfs_free_space_ctl *ctl,
55 static void bitmap_clear_bits(struct btrfs_free_space_ctl *ctl,
64 static void __btrfs_remove_free_space_cache(struct btrfs_free_space_ctl *ctl) in __btrfs_remove_free_space_cache() argument
69 while ((node = rb_last(&ctl->free_space_offset)) != NULL) { in __btrfs_remove_free_space_cache()
72 unlink_free_space(ctl, info, true); in __btrfs_remove_free_space_cache()
75 free_bitmap(ctl, info); in __btrfs_remove_free_space_cache()
78 cond_resched_lock(&ctl->tree_lock); in __btrfs_remove_free_space_cache()
689 static void recalculate_thresholds(struct btrfs_free_space_ctl *ctl) in recalculate_thresholds() argument
691 struct btrfs_block_group *block_group = ctl->block_group; in recalculate_thresholds()
696 u64 bytes_per_bg = BITS_PER_BITMAP * ctl->unit; in recalculate_thresholds()
701 if (ctl->total_bitmaps > max_bitmaps) in recalculate_thresholds()
705 ctl->total_bitmaps, ctl->unit, max_bitmaps, in recalculate_thresholds()
707 ASSERT(ctl->total_bitmaps <= max_bitmaps); in recalculate_thresholds()
720 bitmap_bytes = ctl->total_bitmaps * ctl->unit; in recalculate_thresholds()
729 ctl->extents_thresh = in recalculate_thresholds()
734 struct btrfs_free_space_ctl *ctl, in __load_free_space_cache() argument
832 spin_lock(&ctl->tree_lock); in __load_free_space_cache()
833 ret = link_free_space(ctl, e); in __load_free_space_cache()
834 spin_unlock(&ctl->tree_lock); in __load_free_space_cache()
852 spin_lock(&ctl->tree_lock); in __load_free_space_cache()
853 ret = link_free_space(ctl, e); in __load_free_space_cache()
855 spin_unlock(&ctl->tree_lock); in __load_free_space_cache()
862 ctl->total_bitmaps++; in __load_free_space_cache()
863 recalculate_thresholds(ctl); in __load_free_space_cache()
864 spin_unlock(&ctl->tree_lock); in __load_free_space_cache()
892 spin_lock(&ctl->tree_lock); in __load_free_space_cache()
893 __btrfs_remove_free_space_cache(ctl); in __load_free_space_cache()
894 spin_unlock(&ctl->tree_lock); in __load_free_space_cache()
899 struct btrfs_free_space_ctl *ctl) in copy_free_space_cache() argument
905 while (!ret && (n = rb_first(&ctl->free_space_offset)) != NULL) { in copy_free_space_cache()
911 unlink_free_space(ctl, info, true); in copy_free_space_cache()
912 spin_unlock(&ctl->tree_lock); in copy_free_space_cache()
915 spin_lock(&ctl->tree_lock); in copy_free_space_cache()
918 u64 bytes = ctl->unit; in copy_free_space_cache()
920 ret = search_bitmap(ctl, info, &offset, &bytes, false); in copy_free_space_cache()
922 bitmap_clear_bits(ctl, info, offset, bytes, true); in copy_free_space_cache()
923 spin_unlock(&ctl->tree_lock); in copy_free_space_cache()
926 spin_lock(&ctl->tree_lock); in copy_free_space_cache()
928 free_bitmap(ctl, info); in copy_free_space_cache()
932 cond_resched_lock(&ctl->tree_lock); in copy_free_space_cache()
942 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in load_free_space_cache() local
953 * valid copy it all into the actual free space ctl. in load_free_space_cache()
1061 spin_lock(&ctl->tree_lock); in load_free_space_cache()
1063 spin_unlock(&ctl->tree_lock); in load_free_space_cache()
1070 struct btrfs_free_space_ctl *ctl, in write_cache_extent_entries() argument
1078 struct rb_node *node = rb_first(&ctl->free_space_offset); in write_cache_extent_entries()
1130 list_for_each_entry(trim_entry, &ctl->trimming_ranges, list) { in write_cache_extent_entries()
1366 * @ctl: free space cache we are going to write out
1376 struct btrfs_free_space_ctl *ctl, in __btrfs_write_out_cache() argument
1422 mutex_lock(&ctl->cache_writeout_mutex); in __btrfs_write_out_cache()
1424 spin_lock(&ctl->tree_lock); in __btrfs_write_out_cache()
1425 ret = write_cache_extent_entries(io_ctl, ctl, in __btrfs_write_out_cache()
1449 spin_unlock(&ctl->tree_lock); in __btrfs_write_out_cache()
1450 mutex_unlock(&ctl->cache_writeout_mutex); in __btrfs_write_out_cache()
1497 spin_unlock(&ctl->tree_lock); in __btrfs_write_out_cache()
1498 mutex_unlock(&ctl->cache_writeout_mutex); in __btrfs_write_out_cache()
1525 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_write_out_cache() local
1540 ret = __btrfs_write_out_cache(inode, ctl, block_group, in btrfs_write_out_cache()
1575 static inline u64 offset_to_bitmap(struct btrfs_free_space_ctl *ctl, in offset_to_bitmap() argument
1581 bytes_per_bitmap = BITS_PER_BITMAP * ctl->unit; in offset_to_bitmap()
1582 bitmap_start = offset - ctl->start; in offset_to_bitmap()
1585 bitmap_start += ctl->start; in offset_to_bitmap()
1590 static int tree_insert_offset(struct btrfs_free_space_ctl *ctl, in tree_insert_offset() argument
1598 lockdep_assert_held(&ctl->tree_lock); in tree_insert_offset()
1604 root = &ctl->free_space_offset; in tree_insert_offset()
1707 tree_search_offset(struct btrfs_free_space_ctl *ctl, in tree_search_offset() argument
1710 struct rb_node *n = ctl->free_space_offset.rb_node; in tree_search_offset()
1713 lockdep_assert_held(&ctl->tree_lock); in tree_search_offset()
1795 if (entry->offset + BITS_PER_BITMAP * ctl->unit > offset) in tree_search_offset()
1810 ctl->unit > offset) in tree_search_offset()
1820 static inline void unlink_free_space(struct btrfs_free_space_ctl *ctl, in unlink_free_space() argument
1824 lockdep_assert_held(&ctl->tree_lock); in unlink_free_space()
1826 rb_erase(&info->offset_index, &ctl->free_space_offset); in unlink_free_space()
1827 rb_erase_cached(&info->bytes_index, &ctl->free_space_bytes); in unlink_free_space()
1828 ctl->free_extents--; in unlink_free_space()
1831 ctl->discardable_extents[BTRFS_STAT_CURR]--; in unlink_free_space()
1832 ctl->discardable_bytes[BTRFS_STAT_CURR] -= info->bytes; in unlink_free_space()
1836 ctl->free_space -= info->bytes; in unlink_free_space()
1839 static int link_free_space(struct btrfs_free_space_ctl *ctl, in link_free_space() argument
1844 lockdep_assert_held(&ctl->tree_lock); in link_free_space()
1847 ret = tree_insert_offset(ctl, NULL, info); in link_free_space()
1851 rb_add_cached(&info->bytes_index, &ctl->free_space_bytes, entry_less); in link_free_space()
1854 ctl->discardable_extents[BTRFS_STAT_CURR]++; in link_free_space()
1855 ctl->discardable_bytes[BTRFS_STAT_CURR] += info->bytes; in link_free_space()
1858 ctl->free_space += info->bytes; in link_free_space()
1859 ctl->free_extents++; in link_free_space()
1863 static void relink_bitmap_entry(struct btrfs_free_space_ctl *ctl, in relink_bitmap_entry() argument
1870 * want to re-link it into our ctl bytes index. in relink_bitmap_entry()
1875 lockdep_assert_held(&ctl->tree_lock); in relink_bitmap_entry()
1877 rb_erase_cached(&info->bytes_index, &ctl->free_space_bytes); in relink_bitmap_entry()
1878 rb_add_cached(&info->bytes_index, &ctl->free_space_bytes, entry_less); in relink_bitmap_entry()
1881 static inline void bitmap_clear_bits(struct btrfs_free_space_ctl *ctl, in bitmap_clear_bits() argument
1888 start = offset_to_bit(info->offset, ctl->unit, offset); in bitmap_clear_bits()
1889 count = bytes_to_bits(bytes, ctl->unit); in bitmap_clear_bits()
1896 if (info->max_extent_size > ctl->unit) in bitmap_clear_bits()
1899 relink_bitmap_entry(ctl, info); in bitmap_clear_bits()
1909 ctl->discardable_extents[BTRFS_STAT_CURR] += extent_delta; in bitmap_clear_bits()
1910 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bytes; in bitmap_clear_bits()
1914 ctl->free_space -= bytes; in bitmap_clear_bits()
1917 static void btrfs_bitmap_set_bits(struct btrfs_free_space_ctl *ctl, in btrfs_bitmap_set_bits() argument
1924 start = offset_to_bit(info->offset, ctl->unit, offset); in btrfs_bitmap_set_bits()
1925 count = bytes_to_bits(bytes, ctl->unit); in btrfs_bitmap_set_bits()
1937 ctl->free_space += bytes; in btrfs_bitmap_set_bits()
1939 relink_bitmap_entry(ctl, info); in btrfs_bitmap_set_bits()
1949 ctl->discardable_extents[BTRFS_STAT_CURR] += extent_delta; in btrfs_bitmap_set_bits()
1950 ctl->discardable_bytes[BTRFS_STAT_CURR] += bytes; in btrfs_bitmap_set_bits()
1958 static int search_bitmap(struct btrfs_free_space_ctl *ctl, in search_bitmap() argument
1979 i = offset_to_bit(bitmap_info->offset, ctl->unit, in search_bitmap()
1981 bits = bytes_to_bits(*bytes, ctl->unit); in search_bitmap()
2001 *offset = (u64)(i * ctl->unit) + bitmap_info->offset; in search_bitmap()
2002 *bytes = (u64)(found_bits) * ctl->unit; in search_bitmap()
2006 *bytes = (u64)(max_bits) * ctl->unit; in search_bitmap()
2008 relink_bitmap_entry(ctl, bitmap_info); in search_bitmap()
2014 find_free_space(struct btrfs_free_space_ctl *ctl, u64 *offset, u64 *bytes, in find_free_space() argument
2023 if (!ctl->free_space_offset.rb_node) in find_free_space()
2027 node = rb_first_cached(&ctl->free_space_bytes); in find_free_space()
2029 entry = tree_search_offset(ctl, offset_to_bitmap(ctl, *offset), in find_free_space()
2064 tmp = entry->offset - ctl->start + align - 1; in find_free_space()
2066 tmp = tmp * align + ctl->start; in find_free_space()
2090 ret = search_bitmap(ctl, entry, &tmp, &size, true); in find_free_space()
2120 static void add_new_bitmap(struct btrfs_free_space_ctl *ctl, in add_new_bitmap() argument
2123 info->offset = offset_to_bitmap(ctl, offset); in add_new_bitmap()
2127 link_free_space(ctl, info); in add_new_bitmap()
2128 ctl->total_bitmaps++; in add_new_bitmap()
2129 recalculate_thresholds(ctl); in add_new_bitmap()
2132 static void free_bitmap(struct btrfs_free_space_ctl *ctl, in free_bitmap() argument
2142 ctl->discardable_extents[BTRFS_STAT_CURR] -= in free_bitmap()
2144 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bitmap_info->bytes; in free_bitmap()
2147 unlink_free_space(ctl, bitmap_info, true); in free_bitmap()
2150 ctl->total_bitmaps--; in free_bitmap()
2151 recalculate_thresholds(ctl); in free_bitmap()
2154 static noinline int remove_from_bitmap(struct btrfs_free_space_ctl *ctl, in remove_from_bitmap() argument
2163 end = bitmap_info->offset + (u64)(BITS_PER_BITMAP * ctl->unit) - 1; in remove_from_bitmap()
2172 search_bytes = ctl->unit; in remove_from_bitmap()
2174 ret = search_bitmap(ctl, bitmap_info, &search_start, &search_bytes, in remove_from_bitmap()
2185 bitmap_clear_bits(ctl, bitmap_info, search_start, search_bytes, true); in remove_from_bitmap()
2192 free_bitmap(ctl, bitmap_info); in remove_from_bitmap()
2218 search_bytes = ctl->unit; in remove_from_bitmap()
2219 ret = search_bitmap(ctl, bitmap_info, &search_start, in remove_from_bitmap()
2226 free_bitmap(ctl, bitmap_info); in remove_from_bitmap()
2231 static u64 add_bytes_to_bitmap(struct btrfs_free_space_ctl *ctl, in add_bytes_to_bitmap() argument
2244 ctl->discardable_extents[BTRFS_STAT_CURR] += in add_bytes_to_bitmap()
2246 ctl->discardable_bytes[BTRFS_STAT_CURR] += info->bytes; in add_bytes_to_bitmap()
2251 end = info->offset + (u64)(BITS_PER_BITMAP * ctl->unit); in add_bytes_to_bitmap()
2255 btrfs_bitmap_set_bits(ctl, info, offset, bytes_to_set); in add_bytes_to_bitmap()
2261 static bool use_bitmap(struct btrfs_free_space_ctl *ctl, in use_bitmap() argument
2264 struct btrfs_block_group *block_group = ctl->block_group; in use_bitmap()
2281 if (!forced && ctl->free_extents < ctl->extents_thresh) { in use_bitmap()
2290 if (ctl->free_extents * 3 <= ctl->extents_thresh) in use_bitmap()
2305 if (((BITS_PER_BITMAP * ctl->unit) >> 1) > block_group->length) in use_bitmap()
2315 static int insert_into_bitmap(struct btrfs_free_space_ctl *ctl, in insert_into_bitmap() argument
2329 if (!ctl->op->use_bitmap(ctl, info)) in insert_into_bitmap()
2332 if (ctl->op == &free_space_op) in insert_into_bitmap()
2333 block_group = ctl->block_group; in insert_into_bitmap()
2361 if (entry->offset == offset_to_bitmap(ctl, offset)) { in insert_into_bitmap()
2362 bytes_added = add_bytes_to_bitmap(ctl, entry, offset, in insert_into_bitmap()
2375 bitmap_info = tree_search_offset(ctl, offset_to_bitmap(ctl, offset), in insert_into_bitmap()
2382 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes, in insert_into_bitmap()
2396 add_new_bitmap(ctl, info, offset); in insert_into_bitmap()
2401 spin_unlock(&ctl->tree_lock); in insert_into_bitmap()
2408 spin_lock(&ctl->tree_lock); in insert_into_bitmap()
2418 spin_lock(&ctl->tree_lock); in insert_into_bitmap()
2453 static bool try_merge_free_space(struct btrfs_free_space_ctl *ctl, in try_merge_free_space() argument
2469 right_info = tree_search_offset(ctl, offset + bytes, 0, 0); in try_merge_free_space()
2476 left_info = tree_search_offset(ctl, offset - 1, 0, 0); in try_merge_free_space()
2481 unlink_free_space(ctl, right_info, update_stat); in try_merge_free_space()
2491 unlink_free_space(ctl, left_info, update_stat); in try_merge_free_space()
2501 static bool steal_from_bitmap_to_end(struct btrfs_free_space_ctl *ctl, in steal_from_bitmap_to_end() argument
2509 const u64 bitmap_offset = offset_to_bitmap(ctl, end); in steal_from_bitmap_to_end()
2512 bitmap = tree_search_offset(ctl, bitmap_offset, 1, 0); in steal_from_bitmap_to_end()
2516 i = offset_to_bit(bitmap->offset, ctl->unit, end); in steal_from_bitmap_to_end()
2520 bytes = (j - i) * ctl->unit; in steal_from_bitmap_to_end()
2527 bitmap_clear_bits(ctl, bitmap, end, bytes, update_stat); in steal_from_bitmap_to_end()
2530 free_bitmap(ctl, bitmap); in steal_from_bitmap_to_end()
2535 static bool steal_from_bitmap_to_front(struct btrfs_free_space_ctl *ctl, in steal_from_bitmap_to_front() argument
2546 bitmap_offset = offset_to_bitmap(ctl, info->offset); in steal_from_bitmap_to_front()
2551 bitmap_offset = offset_to_bitmap(ctl, info->offset - 1); in steal_from_bitmap_to_front()
2554 bitmap = tree_search_offset(ctl, bitmap_offset, 1, 0); in steal_from_bitmap_to_front()
2558 i = offset_to_bit(bitmap->offset, ctl->unit, info->offset) - 1; in steal_from_bitmap_to_front()
2570 bytes = (i + 1) * ctl->unit; in steal_from_bitmap_to_front()
2572 bytes = (i - prev_j) * ctl->unit; in steal_from_bitmap_to_front()
2581 bitmap_clear_bits(ctl, bitmap, info->offset, bytes, update_stat); in steal_from_bitmap_to_front()
2584 free_bitmap(ctl, bitmap); in steal_from_bitmap_to_front()
2600 static void steal_from_bitmap(struct btrfs_free_space_ctl *ctl, in steal_from_bitmap() argument
2611 if (ctl->total_bitmaps > 0) { in steal_from_bitmap()
2615 stole_end = steal_from_bitmap_to_end(ctl, info, update_stat); in steal_from_bitmap()
2616 if (ctl->total_bitmaps > 0) in steal_from_bitmap()
2617 stole_front = steal_from_bitmap_to_front(ctl, info, in steal_from_bitmap()
2621 try_merge_free_space(ctl, info, update_stat); in steal_from_bitmap()
2630 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in __btrfs_add_free_space() local
2647 spin_lock(&ctl->tree_lock); in __btrfs_add_free_space()
2649 if (try_merge_free_space(ctl, info, true)) in __btrfs_add_free_space()
2657 ret = insert_into_bitmap(ctl, info); in __btrfs_add_free_space()
2671 steal_from_bitmap(ctl, info, true); in __btrfs_add_free_space()
2675 ret = link_free_space(ctl, info); in __btrfs_add_free_space()
2680 spin_unlock(&ctl->tree_lock); in __btrfs_add_free_space()
2699 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in __btrfs_add_free_space_zoned() local
2725 spin_lock(&ctl->tree_lock); in __btrfs_add_free_space_zoned()
2726 ctl->free_space += to_free; in __btrfs_add_free_space_zoned()
2727 spin_unlock(&ctl->tree_lock); in __btrfs_add_free_space_zoned()
2805 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_remove_free_space() local
2830 spin_lock(&ctl->tree_lock); in btrfs_remove_free_space()
2837 info = tree_search_offset(ctl, offset, 0, 0); in btrfs_remove_free_space()
2843 info = tree_search_offset(ctl, offset_to_bitmap(ctl, offset), in btrfs_remove_free_space()
2858 unlink_free_space(ctl, info, true); in btrfs_remove_free_space()
2865 ret = link_free_space(ctl, info); in btrfs_remove_free_space()
2878 ret = link_free_space(ctl, info); in btrfs_remove_free_space()
2892 spin_unlock(&ctl->tree_lock); in btrfs_remove_free_space()
2903 ret = remove_from_bitmap(ctl, info, &offset, &bytes); in btrfs_remove_free_space()
2910 spin_unlock(&ctl->tree_lock); in btrfs_remove_free_space()
2919 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_dump_free_space() local
2936 spin_lock(&ctl->tree_lock); in btrfs_dump_free_space()
2937 for (n = rb_first(&ctl->free_space_offset); n; n = rb_next(n)) { in btrfs_dump_free_space()
2944 spin_unlock(&ctl->tree_lock); in btrfs_dump_free_space()
2953 struct btrfs_free_space_ctl *ctl) in btrfs_init_free_space_ctl() argument
2957 spin_lock_init(&ctl->tree_lock); in btrfs_init_free_space_ctl()
2958 ctl->unit = fs_info->sectorsize; in btrfs_init_free_space_ctl()
2959 ctl->start = block_group->start; in btrfs_init_free_space_ctl()
2960 ctl->block_group = block_group; in btrfs_init_free_space_ctl()
2961 ctl->op = &free_space_op; in btrfs_init_free_space_ctl()
2962 ctl->free_space_bytes = RB_ROOT_CACHED; in btrfs_init_free_space_ctl()
2963 INIT_LIST_HEAD(&ctl->trimming_ranges); in btrfs_init_free_space_ctl()
2964 mutex_init(&ctl->cache_writeout_mutex); in btrfs_init_free_space_ctl()
2971 ctl->extents_thresh = (SZ_32K / 2) / sizeof(struct btrfs_free_space); in btrfs_init_free_space_ctl()
2984 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in __btrfs_return_cluster_to_free_space() local
2987 lockdep_assert_held(&ctl->tree_lock); in __btrfs_return_cluster_to_free_space()
3011 ctl->discardable_extents[BTRFS_STAT_CURR]--; in __btrfs_return_cluster_to_free_space()
3012 ctl->discardable_bytes[BTRFS_STAT_CURR] -= in __btrfs_return_cluster_to_free_space()
3016 try_merge_free_space(ctl, entry, false); in __btrfs_return_cluster_to_free_space()
3017 steal_from_bitmap(ctl, entry, false); in __btrfs_return_cluster_to_free_space()
3021 ctl->discardable_extents[BTRFS_STAT_CURR]++; in __btrfs_return_cluster_to_free_space()
3022 ctl->discardable_bytes[BTRFS_STAT_CURR] += in __btrfs_return_cluster_to_free_space()
3026 tree_insert_offset(ctl, NULL, entry); in __btrfs_return_cluster_to_free_space()
3027 rb_add_cached(&entry->bytes_index, &ctl->free_space_bytes, in __btrfs_return_cluster_to_free_space()
3037 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_remove_free_space_cache() local
3041 spin_lock(&ctl->tree_lock); in btrfs_remove_free_space_cache()
3050 cond_resched_lock(&ctl->tree_lock); in btrfs_remove_free_space_cache()
3052 __btrfs_remove_free_space_cache(ctl); in btrfs_remove_free_space_cache()
3054 spin_unlock(&ctl->tree_lock); in btrfs_remove_free_space_cache()
3063 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_is_free_space_trimmed() local
3068 spin_lock(&ctl->tree_lock); in btrfs_is_free_space_trimmed()
3069 node = rb_first(&ctl->free_space_offset); in btrfs_is_free_space_trimmed()
3082 spin_unlock(&ctl->tree_lock); in btrfs_is_free_space_trimmed()
3090 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_find_space_for_alloc() local
3103 spin_lock(&ctl->tree_lock); in btrfs_find_space_for_alloc()
3104 entry = find_free_space(ctl, &offset, &bytes_search, in btrfs_find_space_for_alloc()
3112 bitmap_clear_bits(ctl, entry, offset, bytes, true); in btrfs_find_space_for_alloc()
3118 free_bitmap(ctl, entry); in btrfs_find_space_for_alloc()
3120 unlink_free_space(ctl, entry, true); in btrfs_find_space_for_alloc()
3135 link_free_space(ctl, entry); in btrfs_find_space_for_alloc()
3139 spin_unlock(&ctl->tree_lock); in btrfs_find_space_for_alloc()
3159 struct btrfs_free_space_ctl *ctl; in btrfs_return_cluster_to_free_space() local
3177 ctl = block_group->free_space_ctl; in btrfs_return_cluster_to_free_space()
3180 spin_lock(&ctl->tree_lock); in btrfs_return_cluster_to_free_space()
3182 spin_unlock(&ctl->tree_lock); in btrfs_return_cluster_to_free_space()
3196 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_alloc_from_bitmap() local
3205 err = search_bitmap(ctl, entry, &search_start, &search_bytes, true); in btrfs_alloc_from_bitmap()
3213 bitmap_clear_bits(ctl, entry, ret, bytes, false); in btrfs_alloc_from_bitmap()
3227 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_alloc_from_cluster() local
3292 spin_lock(&ctl->tree_lock); in btrfs_alloc_from_cluster()
3297 ctl->free_space -= bytes; in btrfs_alloc_from_cluster()
3299 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bytes; in btrfs_alloc_from_cluster()
3304 ctl->free_extents--; in btrfs_alloc_from_cluster()
3308 ctl->total_bitmaps--; in btrfs_alloc_from_cluster()
3309 recalculate_thresholds(ctl); in btrfs_alloc_from_cluster()
3311 ctl->discardable_extents[BTRFS_STAT_CURR]--; in btrfs_alloc_from_cluster()
3317 spin_unlock(&ctl->tree_lock); in btrfs_alloc_from_cluster()
3328 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_bitmap_cluster() local
3339 lockdep_assert_held(&ctl->tree_lock); in btrfs_bitmap_cluster()
3341 i = offset_to_bit(entry->offset, ctl->unit, in btrfs_bitmap_cluster()
3343 want_bits = bytes_to_bits(bytes, ctl->unit); in btrfs_bitmap_cluster()
3344 min_bits = bytes_to_bits(min_bytes, ctl->unit); in btrfs_bitmap_cluster()
3370 entry->max_extent_size = (u64)max_bits * ctl->unit; in btrfs_bitmap_cluster()
3381 if (cluster->max_size < found_bits * ctl->unit) in btrfs_bitmap_cluster()
3382 cluster->max_size = found_bits * ctl->unit; in btrfs_bitmap_cluster()
3389 cluster->window_start = start * ctl->unit + entry->offset; in btrfs_bitmap_cluster()
3390 rb_erase(&entry->offset_index, &ctl->free_space_offset); in btrfs_bitmap_cluster()
3391 rb_erase_cached(&entry->bytes_index, &ctl->free_space_bytes); in btrfs_bitmap_cluster()
3402 ret = tree_insert_offset(ctl, cluster, entry); in btrfs_bitmap_cluster()
3406 total_found * ctl->unit, 1); in btrfs_bitmap_cluster()
3421 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in setup_cluster_no_bitmap() local
3430 lockdep_assert_held(&ctl->tree_lock); in setup_cluster_no_bitmap()
3432 entry = tree_search_offset(ctl, offset, 0, 1); in setup_cluster_no_bitmap()
3492 rb_erase(&entry->offset_index, &ctl->free_space_offset); in setup_cluster_no_bitmap()
3493 rb_erase_cached(&entry->bytes_index, &ctl->free_space_bytes); in setup_cluster_no_bitmap()
3494 ret = tree_insert_offset(ctl, cluster, entry); in setup_cluster_no_bitmap()
3514 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in setup_cluster_bitmap() local
3517 u64 bitmap_offset = offset_to_bitmap(ctl, offset); in setup_cluster_bitmap()
3519 if (ctl->total_bitmaps == 0) in setup_cluster_bitmap()
3530 entry = tree_search_offset(ctl, bitmap_offset, 1, 0); in setup_cluster_bitmap()
3564 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_find_space_cluster() local
3588 spin_lock(&ctl->tree_lock); in btrfs_find_space_cluster()
3594 if (ctl->free_space < bytes) { in btrfs_find_space_cluster()
3595 spin_unlock(&ctl->tree_lock); in btrfs_find_space_cluster()
3632 spin_unlock(&ctl->tree_lock); in btrfs_find_space_cluster()
3659 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in do_trimming() local
3683 mutex_lock(&ctl->cache_writeout_mutex); in do_trimming()
3693 mutex_unlock(&ctl->cache_writeout_mutex); in do_trimming()
3718 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in trim_no_bitmap() local
3731 mutex_lock(&ctl->cache_writeout_mutex); in trim_no_bitmap()
3732 spin_lock(&ctl->tree_lock); in trim_no_bitmap()
3734 if (ctl->free_space < minlen) in trim_no_bitmap()
3737 entry = tree_search_offset(ctl, start, 0, 1); in trim_no_bitmap()
3761 spin_unlock(&ctl->tree_lock); in trim_no_bitmap()
3762 mutex_unlock(&ctl->cache_writeout_mutex); in trim_no_bitmap()
3765 unlink_free_space(ctl, entry, true); in trim_no_bitmap()
3778 link_free_space(ctl, entry); in trim_no_bitmap()
3786 spin_unlock(&ctl->tree_lock); in trim_no_bitmap()
3787 mutex_unlock(&ctl->cache_writeout_mutex); in trim_no_bitmap()
3791 unlink_free_space(ctl, entry, true); in trim_no_bitmap()
3795 spin_unlock(&ctl->tree_lock); in trim_no_bitmap()
3798 list_add_tail(&trim_entry.list, &ctl->trimming_ranges); in trim_no_bitmap()
3799 mutex_unlock(&ctl->cache_writeout_mutex); in trim_no_bitmap()
3826 spin_unlock(&ctl->tree_lock); in trim_no_bitmap()
3827 mutex_unlock(&ctl->cache_writeout_mutex); in trim_no_bitmap()
3846 static void reset_trimming_bitmap(struct btrfs_free_space_ctl *ctl, u64 offset) in reset_trimming_bitmap() argument
3850 spin_lock(&ctl->tree_lock); in reset_trimming_bitmap()
3851 entry = tree_search_offset(ctl, offset, 1, 0); in reset_trimming_bitmap()
3854 ctl->discardable_extents[BTRFS_STAT_CURR] += in reset_trimming_bitmap()
3856 ctl->discardable_bytes[BTRFS_STAT_CURR] += entry->bytes; in reset_trimming_bitmap()
3861 spin_unlock(&ctl->tree_lock); in reset_trimming_bitmap()
3864 static void end_trimming_bitmap(struct btrfs_free_space_ctl *ctl, in end_trimming_bitmap() argument
3869 ctl->discardable_extents[BTRFS_STAT_CURR] -= in end_trimming_bitmap()
3871 ctl->discardable_bytes[BTRFS_STAT_CURR] -= entry->bytes; in end_trimming_bitmap()
3884 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in trim_bitmaps() local
3889 u64 offset = offset_to_bitmap(ctl, start); in trim_bitmaps()
3896 mutex_lock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3897 spin_lock(&ctl->tree_lock); in trim_bitmaps()
3899 if (ctl->free_space < minlen) { in trim_bitmaps()
3902 spin_unlock(&ctl->tree_lock); in trim_bitmaps()
3903 mutex_unlock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3907 entry = tree_search_offset(ctl, offset, 1, 0); in trim_bitmaps()
3918 spin_unlock(&ctl->tree_lock); in trim_bitmaps()
3919 mutex_unlock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3934 ret2 = search_bitmap(ctl, entry, &start, &bytes, false); in trim_bitmaps()
3941 end_trimming_bitmap(ctl, entry); in trim_bitmaps()
3944 spin_unlock(&ctl->tree_lock); in trim_bitmaps()
3945 mutex_unlock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3955 spin_unlock(&ctl->tree_lock); in trim_bitmaps()
3956 mutex_unlock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3962 spin_unlock(&ctl->tree_lock); in trim_bitmaps()
3963 mutex_unlock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3978 bitmap_clear_bits(ctl, entry, start, bytes, true); in trim_bitmaps()
3980 free_bitmap(ctl, entry); in trim_bitmaps()
3982 spin_unlock(&ctl->tree_lock); in trim_bitmaps()
3985 list_add_tail(&trim_entry.list, &ctl->trimming_ranges); in trim_bitmaps()
3986 mutex_unlock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3991 reset_trimming_bitmap(ctl, offset); in trim_bitmaps()
3998 offset += BITS_PER_BITMAP * ctl->unit; in trim_bitmaps()
4007 reset_trimming_bitmap(ctl, offset); in trim_bitmaps()
4025 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_trim_block_group() local
4046 div64_u64_rem(end, BITS_PER_BITMAP * ctl->unit, &rem); in btrfs_trim_block_group()
4049 reset_trimming_bitmap(ctl, offset_to_bitmap(ctl, end)); in btrfs_trim_block_group()
4194 struct btrfs_free_space_ctl *ctl = cache->free_space_ctl; in test_add_free_space_entry() local
4209 spin_lock(&ctl->tree_lock); in test_add_free_space_entry()
4213 ret = link_free_space(ctl, info); in test_add_free_space_entry()
4214 spin_unlock(&ctl->tree_lock); in test_add_free_space_entry()
4228 spin_lock(&ctl->tree_lock); in test_add_free_space_entry()
4229 bitmap_info = tree_search_offset(ctl, offset_to_bitmap(ctl, offset), in test_add_free_space_entry()
4234 add_new_bitmap(ctl, info, offset); in test_add_free_space_entry()
4239 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes, in test_add_free_space_entry()
4244 spin_unlock(&ctl->tree_lock); in test_add_free_space_entry()
4264 struct btrfs_free_space_ctl *ctl = cache->free_space_ctl; in test_check_exists() local
4268 spin_lock(&ctl->tree_lock); in test_check_exists()
4269 info = tree_search_offset(ctl, offset, 0, 0); in test_check_exists()
4271 info = tree_search_offset(ctl, offset_to_bitmap(ctl, offset), in test_check_exists()
4284 bit_bytes = ctl->unit; in test_check_exists()
4285 ret = search_bitmap(ctl, info, &bit_off, &bit_bytes, false); in test_check_exists()
4337 spin_unlock(&ctl->tree_lock); in test_check_exists()