Lines Matching full:ac

421 static void ext4_mb_new_preallocation(struct ext4_allocation_context *ac);
423 static bool ext4_mb_good_group(struct ext4_allocation_context *ac,
874 static void ext4_mb_choose_next_group_p2_aligned(struct ext4_allocation_context *ac, in ext4_mb_choose_next_group_p2_aligned() argument
877 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_choose_next_group_p2_aligned()
881 if (ac->ac_status == AC_STATUS_FOUND) in ext4_mb_choose_next_group_p2_aligned()
884 if (unlikely(sbi->s_mb_stats && ac->ac_flags & EXT4_MB_CR_POWER2_ALIGNED_OPTIMIZED)) in ext4_mb_choose_next_group_p2_aligned()
887 for (i = ac->ac_2order; i < MB_NUM_ORDERS(ac->ac_sb); i++) { in ext4_mb_choose_next_group_p2_aligned()
899 if (likely(ext4_mb_good_group(ac, iter->bb_group, CR_POWER2_ALIGNED))) { in ext4_mb_choose_next_group_p2_aligned()
901 ac->ac_flags |= EXT4_MB_CR_POWER2_ALIGNED_OPTIMIZED; in ext4_mb_choose_next_group_p2_aligned()
917 ext4_mb_find_good_group_avg_frag_lists(struct ext4_allocation_context *ac, int order) in ext4_mb_find_good_group_avg_frag_lists() argument
919 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_find_good_group_avg_frag_lists()
923 enum criteria cr = ac->ac_criteria; in ext4_mb_find_good_group_avg_frag_lists()
935 if (likely(ext4_mb_good_group(ac, iter->bb_group, cr))) { in ext4_mb_find_good_group_avg_frag_lists()
948 static void ext4_mb_choose_next_group_goal_fast(struct ext4_allocation_context *ac, in ext4_mb_choose_next_group_goal_fast() argument
951 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_choose_next_group_goal_fast()
955 if (unlikely(ac->ac_flags & EXT4_MB_CR_GOAL_LEN_FAST_OPTIMIZED)) { in ext4_mb_choose_next_group_goal_fast()
960 for (i = mb_avg_fragment_size_order(ac->ac_sb, ac->ac_g_ex.fe_len); in ext4_mb_choose_next_group_goal_fast()
961 i < MB_NUM_ORDERS(ac->ac_sb); i++) { in ext4_mb_choose_next_group_goal_fast()
962 grp = ext4_mb_find_good_group_avg_frag_lists(ac, i); in ext4_mb_choose_next_group_goal_fast()
965 ac->ac_flags |= EXT4_MB_CR_GOAL_LEN_FAST_OPTIMIZED; in ext4_mb_choose_next_group_goal_fast()
978 if (ac->ac_flags & EXT4_MB_HINT_DATA) in ext4_mb_choose_next_group_goal_fast()
993 static void ext4_mb_choose_next_group_best_avail(struct ext4_allocation_context *ac, in ext4_mb_choose_next_group_best_avail() argument
996 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_choose_next_group_best_avail()
1001 if (unlikely(ac->ac_flags & EXT4_MB_CR_BEST_AVAIL_LEN_OPTIMIZED)) { in ext4_mb_choose_next_group_best_avail()
1012 order = fls(ac->ac_g_ex.fe_len) - 1; in ext4_mb_choose_next_group_best_avail()
1013 if (WARN_ON_ONCE(order - 1 > MB_NUM_ORDERS(ac->ac_sb))) in ext4_mb_choose_next_group_best_avail()
1014 order = MB_NUM_ORDERS(ac->ac_sb); in ext4_mb_choose_next_group_best_avail()
1033 if (1 << min_order < ac->ac_o_ex.fe_len) in ext4_mb_choose_next_group_best_avail()
1034 min_order = fls(ac->ac_o_ex.fe_len); in ext4_mb_choose_next_group_best_avail()
1043 ac->ac_g_ex.fe_len = 1 << i; in ext4_mb_choose_next_group_best_avail()
1051 ac->ac_g_ex.fe_len = roundup(ac->ac_g_ex.fe_len, in ext4_mb_choose_next_group_best_avail()
1055 frag_order = mb_avg_fragment_size_order(ac->ac_sb, in ext4_mb_choose_next_group_best_avail()
1056 ac->ac_g_ex.fe_len); in ext4_mb_choose_next_group_best_avail()
1058 grp = ext4_mb_find_good_group_avg_frag_lists(ac, frag_order); in ext4_mb_choose_next_group_best_avail()
1061 ac->ac_flags |= EXT4_MB_CR_BEST_AVAIL_LEN_OPTIMIZED; in ext4_mb_choose_next_group_best_avail()
1067 ac->ac_g_ex.fe_len = ac->ac_orig_goal_len; in ext4_mb_choose_next_group_best_avail()
1071 static inline int should_optimize_scan(struct ext4_allocation_context *ac) in should_optimize_scan() argument
1073 if (unlikely(!test_opt2(ac->ac_sb, MB_OPTIMIZE_SCAN))) in should_optimize_scan()
1075 if (ac->ac_criteria >= CR_GOAL_LEN_SLOW) in should_optimize_scan()
1077 if (!ext4_test_inode_flag(ac->ac_inode, EXT4_INODE_EXTENTS)) in should_optimize_scan()
1098 * @ac Allocation Context
1108 static void ext4_mb_choose_next_group(struct ext4_allocation_context *ac, in ext4_mb_choose_next_group() argument
1111 *new_cr = ac->ac_criteria; in ext4_mb_choose_next_group()
1113 if (!should_optimize_scan(ac)) { in ext4_mb_choose_next_group()
1123 if (ac->ac_groups_linear_remaining) { in ext4_mb_choose_next_group()
1125 ac->ac_groups_linear_remaining--; in ext4_mb_choose_next_group()
1130 ext4_mb_choose_next_group_p2_aligned(ac, new_cr, group); in ext4_mb_choose_next_group()
1132 ext4_mb_choose_next_group_goal_fast(ac, new_cr, group); in ext4_mb_choose_next_group()
1134 ext4_mb_choose_next_group_best_avail(ac, new_cr, group); in ext4_mb_choose_next_group()
2136 static void ext4_mb_use_best_found(struct ext4_allocation_context *ac, in ext4_mb_use_best_found() argument
2139 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_use_best_found()
2142 BUG_ON(ac->ac_b_ex.fe_group != e4b->bd_group); in ext4_mb_use_best_found()
2143 BUG_ON(ac->ac_status == AC_STATUS_FOUND); in ext4_mb_use_best_found()
2145 ac->ac_b_ex.fe_len = min(ac->ac_b_ex.fe_len, ac->ac_g_ex.fe_len); in ext4_mb_use_best_found()
2146 ac->ac_b_ex.fe_logical = ac->ac_g_ex.fe_logical; in ext4_mb_use_best_found()
2147 ret = mb_mark_used(e4b, &ac->ac_b_ex); in ext4_mb_use_best_found()
2151 ac->ac_f_ex = ac->ac_b_ex; in ext4_mb_use_best_found()
2153 ac->ac_status = AC_STATUS_FOUND; in ext4_mb_use_best_found()
2154 ac->ac_tail = ret & 0xffff; in ext4_mb_use_best_found()
2155 ac->ac_buddy = ret >> 16; in ext4_mb_use_best_found()
2164 ac->ac_bitmap_folio = e4b->bd_bitmap_folio; in ext4_mb_use_best_found()
2165 folio_get(ac->ac_bitmap_folio); in ext4_mb_use_best_found()
2166 ac->ac_buddy_folio = e4b->bd_buddy_folio; in ext4_mb_use_best_found()
2167 folio_get(ac->ac_buddy_folio); in ext4_mb_use_best_found()
2169 if (ac->ac_flags & EXT4_MB_STREAM_ALLOC) { in ext4_mb_use_best_found()
2171 sbi->s_mb_last_group = ac->ac_f_ex.fe_group; in ext4_mb_use_best_found()
2172 sbi->s_mb_last_start = ac->ac_f_ex.fe_start; in ext4_mb_use_best_found()
2180 if (ac->ac_o_ex.fe_len < ac->ac_b_ex.fe_len) in ext4_mb_use_best_found()
2181 ext4_mb_new_preallocation(ac); in ext4_mb_use_best_found()
2185 static void ext4_mb_check_limits(struct ext4_allocation_context *ac, in ext4_mb_check_limits() argument
2189 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_check_limits()
2190 struct ext4_free_extent *bex = &ac->ac_b_ex; in ext4_mb_check_limits()
2191 struct ext4_free_extent *gex = &ac->ac_g_ex; in ext4_mb_check_limits()
2193 if (ac->ac_status == AC_STATUS_FOUND) in ext4_mb_check_limits()
2198 if (ac->ac_found > sbi->s_mb_max_to_scan && in ext4_mb_check_limits()
2199 !(ac->ac_flags & EXT4_MB_HINT_FIRST)) { in ext4_mb_check_limits()
2200 ac->ac_status = AC_STATUS_BREAK; in ext4_mb_check_limits()
2210 if (finish_group || ac->ac_found > sbi->s_mb_min_to_scan) in ext4_mb_check_limits()
2211 ext4_mb_use_best_found(ac, e4b); in ext4_mb_check_limits()
2238 static void ext4_mb_measure_extent(struct ext4_allocation_context *ac, in ext4_mb_measure_extent() argument
2242 struct ext4_free_extent *bex = &ac->ac_b_ex; in ext4_mb_measure_extent()
2243 struct ext4_free_extent *gex = &ac->ac_g_ex; in ext4_mb_measure_extent()
2246 BUG_ON(ex->fe_len > EXT4_CLUSTERS_PER_GROUP(ac->ac_sb)); in ext4_mb_measure_extent()
2247 BUG_ON(ex->fe_start >= EXT4_CLUSTERS_PER_GROUP(ac->ac_sb)); in ext4_mb_measure_extent()
2248 BUG_ON(ac->ac_status != AC_STATUS_CONTINUE); in ext4_mb_measure_extent()
2250 ac->ac_found++; in ext4_mb_measure_extent()
2251 ac->ac_cX_found[ac->ac_criteria]++; in ext4_mb_measure_extent()
2256 if (unlikely(ac->ac_flags & EXT4_MB_HINT_FIRST)) { in ext4_mb_measure_extent()
2258 ext4_mb_use_best_found(ac, e4b); in ext4_mb_measure_extent()
2267 ext4_mb_use_best_found(ac, e4b); in ext4_mb_measure_extent()
2295 ext4_mb_check_limits(ac, e4b, 0); in ext4_mb_measure_extent()
2299 void ext4_mb_try_best_found(struct ext4_allocation_context *ac, in ext4_mb_try_best_found() argument
2302 struct ext4_free_extent ex = ac->ac_b_ex; in ext4_mb_try_best_found()
2308 err = ext4_mb_load_buddy(ac->ac_sb, group, e4b); in ext4_mb_try_best_found()
2312 ext4_lock_group(ac->ac_sb, group); in ext4_mb_try_best_found()
2319 ac->ac_b_ex = ex; in ext4_mb_try_best_found()
2320 ext4_mb_use_best_found(ac, e4b); in ext4_mb_try_best_found()
2324 ext4_unlock_group(ac->ac_sb, group); in ext4_mb_try_best_found()
2329 int ext4_mb_find_by_goal(struct ext4_allocation_context *ac, in ext4_mb_find_by_goal() argument
2332 ext4_group_t group = ac->ac_g_ex.fe_group; in ext4_mb_find_by_goal()
2335 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_find_by_goal()
2336 struct ext4_group_info *grp = ext4_get_group_info(ac->ac_sb, group); in ext4_mb_find_by_goal()
2341 if (!(ac->ac_flags & (EXT4_MB_HINT_TRY_GOAL | EXT4_MB_HINT_GOAL_ONLY))) in ext4_mb_find_by_goal()
2346 err = ext4_mb_load_buddy(ac->ac_sb, group, e4b); in ext4_mb_find_by_goal()
2350 ext4_lock_group(ac->ac_sb, group); in ext4_mb_find_by_goal()
2354 max = mb_find_extent(e4b, ac->ac_g_ex.fe_start, in ext4_mb_find_by_goal()
2355 ac->ac_g_ex.fe_len, &ex); in ext4_mb_find_by_goal()
2358 if (max >= ac->ac_g_ex.fe_len && in ext4_mb_find_by_goal()
2359 ac->ac_g_ex.fe_len == EXT4_NUM_B2C(sbi, sbi->s_stripe)) { in ext4_mb_find_by_goal()
2362 start = ext4_grp_offs_to_block(ac->ac_sb, &ex); in ext4_mb_find_by_goal()
2365 ac->ac_found++; in ext4_mb_find_by_goal()
2366 ac->ac_b_ex = ex; in ext4_mb_find_by_goal()
2367 ext4_mb_use_best_found(ac, e4b); in ext4_mb_find_by_goal()
2369 } else if (max >= ac->ac_g_ex.fe_len) { in ext4_mb_find_by_goal()
2371 BUG_ON(ex.fe_group != ac->ac_g_ex.fe_group); in ext4_mb_find_by_goal()
2372 BUG_ON(ex.fe_start != ac->ac_g_ex.fe_start); in ext4_mb_find_by_goal()
2373 ac->ac_found++; in ext4_mb_find_by_goal()
2374 ac->ac_b_ex = ex; in ext4_mb_find_by_goal()
2375 ext4_mb_use_best_found(ac, e4b); in ext4_mb_find_by_goal()
2376 } else if (max > 0 && (ac->ac_flags & EXT4_MB_HINT_MERGE)) { in ext4_mb_find_by_goal()
2380 BUG_ON(ex.fe_group != ac->ac_g_ex.fe_group); in ext4_mb_find_by_goal()
2381 BUG_ON(ex.fe_start != ac->ac_g_ex.fe_start); in ext4_mb_find_by_goal()
2382 ac->ac_found++; in ext4_mb_find_by_goal()
2383 ac->ac_b_ex = ex; in ext4_mb_find_by_goal()
2384 ext4_mb_use_best_found(ac, e4b); in ext4_mb_find_by_goal()
2387 ext4_unlock_group(ac->ac_sb, group); in ext4_mb_find_by_goal()
2398 void ext4_mb_simple_scan_group(struct ext4_allocation_context *ac, in ext4_mb_simple_scan_group() argument
2401 struct super_block *sb = ac->ac_sb; in ext4_mb_simple_scan_group()
2408 BUG_ON(ac->ac_2order <= 0); in ext4_mb_simple_scan_group()
2409 for (i = ac->ac_2order; i < MB_NUM_ORDERS(sb); i++) { in ext4_mb_simple_scan_group()
2420 ext4_mark_group_bitmap_corrupted(ac->ac_sb, in ext4_mb_simple_scan_group()
2423 ext4_grp_locked_error(ac->ac_sb, e4b->bd_group, 0, 0, in ext4_mb_simple_scan_group()
2428 ac->ac_found++; in ext4_mb_simple_scan_group()
2429 ac->ac_cX_found[ac->ac_criteria]++; in ext4_mb_simple_scan_group()
2431 ac->ac_b_ex.fe_len = 1 << i; in ext4_mb_simple_scan_group()
2432 ac->ac_b_ex.fe_start = k << i; in ext4_mb_simple_scan_group()
2433 ac->ac_b_ex.fe_group = e4b->bd_group; in ext4_mb_simple_scan_group()
2435 ext4_mb_use_best_found(ac, e4b); in ext4_mb_simple_scan_group()
2437 BUG_ON(ac->ac_f_ex.fe_len != ac->ac_g_ex.fe_len); in ext4_mb_simple_scan_group()
2452 void ext4_mb_complex_scan_group(struct ext4_allocation_context *ac, in ext4_mb_complex_scan_group() argument
2455 struct super_block *sb = ac->ac_sb; in ext4_mb_complex_scan_group()
2467 while (free && ac->ac_status == AC_STATUS_CONTINUE) { in ext4_mb_complex_scan_group()
2485 if (!ext4_mb_cr_expensive(ac->ac_criteria)) { in ext4_mb_complex_scan_group()
2496 if (freelen < ac->ac_g_ex.fe_len) { in ext4_mb_complex_scan_group()
2503 mb_find_extent(e4b, i, ac->ac_g_ex.fe_len, &ex); in ext4_mb_complex_scan_group()
2521 ext4_mb_measure_extent(ac, &ex, e4b); in ext4_mb_complex_scan_group()
2527 ext4_mb_check_limits(ac, e4b, 1); in ext4_mb_complex_scan_group()
2535 void ext4_mb_scan_aligned(struct ext4_allocation_context *ac, in ext4_mb_scan_aligned() argument
2538 struct super_block *sb = ac->ac_sb; in ext4_mb_scan_aligned()
2562 ac->ac_found++; in ext4_mb_scan_aligned()
2563 ac->ac_cX_found[ac->ac_criteria]++; in ext4_mb_scan_aligned()
2565 ac->ac_b_ex = ex; in ext4_mb_scan_aligned()
2566 ext4_mb_use_best_found(ac, e4b); in ext4_mb_scan_aligned()
2579 static bool ext4_mb_good_group(struct ext4_allocation_context *ac, in ext4_mb_good_group() argument
2583 int flex_size = ext4_flex_bg_size(EXT4_SB(ac->ac_sb)); in ext4_mb_good_group()
2584 struct ext4_group_info *grp = ext4_get_group_info(ac->ac_sb, group); in ext4_mb_good_group()
2601 BUG_ON(ac->ac_2order == 0); in ext4_mb_good_group()
2604 if ((ac->ac_flags & EXT4_MB_HINT_DATA) && in ext4_mb_good_group()
2609 if (free < ac->ac_g_ex.fe_len) in ext4_mb_good_group()
2612 if (ac->ac_2order >= MB_NUM_ORDERS(ac->ac_sb)) in ext4_mb_good_group()
2615 if (grp->bb_largest_free_order < ac->ac_2order) in ext4_mb_good_group()
2621 if ((free / fragments) >= ac->ac_g_ex.fe_len) in ext4_mb_good_group()
2625 if (free >= ac->ac_g_ex.fe_len) in ext4_mb_good_group()
2648 static int ext4_mb_good_group_nolock(struct ext4_allocation_context *ac, in ext4_mb_good_group_nolock() argument
2651 struct ext4_group_info *grp = ext4_get_group_info(ac->ac_sb, group); in ext4_mb_good_group_nolock()
2652 struct super_block *sb = ac->ac_sb; in ext4_mb_good_group_nolock()
2654 bool should_lock = ac->ac_flags & EXT4_MB_STRICT_CHECK; in ext4_mb_good_group_nolock()
2661 atomic64_inc(&sbi->s_bal_cX_groups_considered[ac->ac_criteria]); in ext4_mb_good_group_nolock()
2674 if (cr < CR_ANY_FREE && free < ac->ac_g_ex.fe_len) in ext4_mb_good_group_nolock()
2713 ret = ext4_mb_good_group(ac, group, cr); in ext4_mb_good_group_nolock()
2797 ext4_mb_regular_allocator(struct ext4_allocation_context *ac) in ext4_mb_regular_allocator() argument
2808 sb = ac->ac_sb; in ext4_mb_regular_allocator()
2812 if (!(ext4_test_inode_flag(ac->ac_inode, EXT4_INODE_EXTENTS))) in ext4_mb_regular_allocator()
2815 BUG_ON(ac->ac_status == AC_STATUS_FOUND); in ext4_mb_regular_allocator()
2818 err = ext4_mb_find_by_goal(ac, &e4b); in ext4_mb_regular_allocator()
2819 if (err || ac->ac_status == AC_STATUS_FOUND) in ext4_mb_regular_allocator()
2822 if (unlikely(ac->ac_flags & EXT4_MB_HINT_GOAL_ONLY)) in ext4_mb_regular_allocator()
2826 * ac->ac_2order is set only if the fe_len is a power of 2 in ext4_mb_regular_allocator()
2827 * if ac->ac_2order is set we also set criteria to CR_POWER2_ALIGNED in ext4_mb_regular_allocator()
2830 i = fls(ac->ac_g_ex.fe_len); in ext4_mb_regular_allocator()
2831 ac->ac_2order = 0; in ext4_mb_regular_allocator()
2840 if (is_power_of_2(ac->ac_g_ex.fe_len)) in ext4_mb_regular_allocator()
2841 ac->ac_2order = array_index_nospec(i - 1, in ext4_mb_regular_allocator()
2846 if (ac->ac_flags & EXT4_MB_STREAM_ALLOC) { in ext4_mb_regular_allocator()
2849 ac->ac_g_ex.fe_group = sbi->s_mb_last_group; in ext4_mb_regular_allocator()
2850 ac->ac_g_ex.fe_start = sbi->s_mb_last_start; in ext4_mb_regular_allocator()
2859 if (ac->ac_2order) in ext4_mb_regular_allocator()
2862 for (; cr < EXT4_MB_NUM_CRS && ac->ac_status == AC_STATUS_CONTINUE; cr++) { in ext4_mb_regular_allocator()
2863 ac->ac_criteria = cr; in ext4_mb_regular_allocator()
2868 group = ac->ac_g_ex.fe_group; in ext4_mb_regular_allocator()
2869 ac->ac_groups_linear_remaining = sbi->s_mb_max_linear_groups; in ext4_mb_regular_allocator()
2874 ext4_mb_choose_next_group(ac, &new_cr, &group, ngroups)) { in ext4_mb_regular_allocator()
2903 ret = ext4_mb_good_group_nolock(ac, group, cr); in ext4_mb_regular_allocator()
2920 ret = ext4_mb_good_group(ac, group, cr); in ext4_mb_regular_allocator()
2927 ac->ac_groups_scanned++; in ext4_mb_regular_allocator()
2929 ext4_mb_simple_scan_group(ac, &e4b); in ext4_mb_regular_allocator()
2934 !(ac->ac_g_ex.fe_len % in ext4_mb_regular_allocator()
2940 ext4_mb_scan_aligned(ac, &e4b); in ext4_mb_regular_allocator()
2942 if (ac->ac_status == AC_STATUS_CONTINUE) in ext4_mb_regular_allocator()
2943 ext4_mb_complex_scan_group(ac, &e4b); in ext4_mb_regular_allocator()
2949 if (ac->ac_status != AC_STATUS_CONTINUE) in ext4_mb_regular_allocator()
2956 if (i == ngroups && ac->ac_criteria == CR_BEST_AVAIL_LEN) in ext4_mb_regular_allocator()
2959 ac->ac_g_ex.fe_len = ac->ac_orig_goal_len; in ext4_mb_regular_allocator()
2962 if (ac->ac_b_ex.fe_len > 0 && ac->ac_status != AC_STATUS_FOUND && in ext4_mb_regular_allocator()
2963 !(ac->ac_flags & EXT4_MB_HINT_FIRST)) { in ext4_mb_regular_allocator()
2968 ext4_mb_try_best_found(ac, &e4b); in ext4_mb_regular_allocator()
2969 if (ac->ac_status != AC_STATUS_FOUND) { in ext4_mb_regular_allocator()
2977 ac->ac_b_ex.fe_group, ac->ac_b_ex.fe_start, in ext4_mb_regular_allocator()
2978 ac->ac_b_ex.fe_len, lost); in ext4_mb_regular_allocator()
2980 ac->ac_b_ex.fe_group = 0; in ext4_mb_regular_allocator()
2981 ac->ac_b_ex.fe_start = 0; in ext4_mb_regular_allocator()
2982 ac->ac_b_ex.fe_len = 0; in ext4_mb_regular_allocator()
2983 ac->ac_status = AC_STATUS_CONTINUE; in ext4_mb_regular_allocator()
2984 ac->ac_flags |= EXT4_MB_HINT_FIRST; in ext4_mb_regular_allocator()
2990 if (sbi->s_mb_stats && ac->ac_status == AC_STATUS_FOUND) in ext4_mb_regular_allocator()
2991 atomic64_inc(&sbi->s_bal_cX_hits[ac->ac_criteria]); in ext4_mb_regular_allocator()
2993 if (!err && ac->ac_status != AC_STATUS_FOUND && first_err) in ext4_mb_regular_allocator()
2997 ac->ac_b_ex.fe_len, ac->ac_o_ex.fe_len, ac->ac_status, in ext4_mb_regular_allocator()
2998 ac->ac_flags, cr, err); in ext4_mb_regular_allocator()
4082 * Check quota and mark chosen space (ac->ac_b_ex) non-free in bitmaps
4086 ext4_mb_mark_diskspace_used(struct ext4_allocation_context *ac, in ext4_mb_mark_diskspace_used() argument
4097 BUG_ON(ac->ac_status != AC_STATUS_FOUND); in ext4_mb_mark_diskspace_used()
4098 BUG_ON(ac->ac_b_ex.fe_len <= 0); in ext4_mb_mark_diskspace_used()
4100 sb = ac->ac_sb; in ext4_mb_mark_diskspace_used()
4103 gdp = ext4_get_group_desc(sb, ac->ac_b_ex.fe_group, NULL); in ext4_mb_mark_diskspace_used()
4106 ext4_debug("using block group %u(%d)\n", ac->ac_b_ex.fe_group, in ext4_mb_mark_diskspace_used()
4109 block = ext4_grp_offs_to_block(sb, &ac->ac_b_ex); in ext4_mb_mark_diskspace_used()
4110 len = EXT4_C2B(sbi, ac->ac_b_ex.fe_len); in ext4_mb_mark_diskspace_used()
4111 if (!ext4_inode_block_valid(ac->ac_inode, block, len)) { in ext4_mb_mark_diskspace_used()
4119 ac->ac_b_ex.fe_group, in ext4_mb_mark_diskspace_used()
4120 ac->ac_b_ex.fe_start, in ext4_mb_mark_diskspace_used()
4121 ac->ac_b_ex.fe_len, in ext4_mb_mark_diskspace_used()
4131 err = ext4_mb_mark_context(handle, sb, true, ac->ac_b_ex.fe_group, in ext4_mb_mark_diskspace_used()
4132 ac->ac_b_ex.fe_start, ac->ac_b_ex.fe_len, in ext4_mb_mark_diskspace_used()
4139 BUG_ON(changed != ac->ac_b_ex.fe_len); in ext4_mb_mark_diskspace_used()
4141 percpu_counter_sub(&sbi->s_freeclusters_counter, ac->ac_b_ex.fe_len); in ext4_mb_mark_diskspace_used()
4145 if (!(ac->ac_flags & EXT4_MB_DELALLOC_RESERVED)) in ext4_mb_mark_diskspace_used()
4211 static void ext4_mb_normalize_group_request(struct ext4_allocation_context *ac) in ext4_mb_normalize_group_request() argument
4213 struct super_block *sb = ac->ac_sb; in ext4_mb_normalize_group_request()
4214 struct ext4_locality_group *lg = ac->ac_lg; in ext4_mb_normalize_group_request()
4217 ac->ac_g_ex.fe_len = EXT4_SB(sb)->s_mb_group_prealloc; in ext4_mb_normalize_group_request()
4218 mb_debug(sb, "goal %u blocks for locality group\n", ac->ac_g_ex.fe_len); in ext4_mb_normalize_group_request()
4240 ext4_mb_pa_assert_overlap(struct ext4_allocation_context *ac, in ext4_mb_pa_assert_overlap() argument
4243 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_pa_assert_overlap()
4244 struct ext4_inode_info *ei = EXT4_I(ac->ac_inode); in ext4_mb_pa_assert_overlap()
4267 * Given an allocation context "ac" and a range "start", "end", check
4272 * ac allocation context
4277 ext4_mb_pa_adjust_overlap(struct ext4_allocation_context *ac, in ext4_mb_pa_adjust_overlap() argument
4280 struct ext4_inode_info *ei = EXT4_I(ac->ac_inode); in ext4_mb_pa_adjust_overlap()
4281 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_pa_adjust_overlap()
4299 iter = ext4_mb_pa_rb_next_iter(ac->ac_o_ex.fe_logical, in ext4_mb_pa_adjust_overlap()
4309 BUG_ON(!(ac->ac_o_ex.fe_logical >= tmp_pa_end || in ext4_mb_pa_adjust_overlap()
4310 ac->ac_o_ex.fe_logical < tmp_pa_start)); in ext4_mb_pa_adjust_overlap()
4319 if (tmp_pa->pa_lstart < ac->ac_o_ex.fe_logical) { in ext4_mb_pa_adjust_overlap()
4385 BUG_ON(left_pa_end > ac->ac_o_ex.fe_logical); in ext4_mb_pa_adjust_overlap()
4390 BUG_ON(right_pa_start <= ac->ac_o_ex.fe_logical); in ext4_mb_pa_adjust_overlap()
4406 ext4_mb_pa_assert_overlap(ac, new_start, new_end); in ext4_mb_pa_adjust_overlap()
4417 ext4_mb_normalize_request(struct ext4_allocation_context *ac, in ext4_mb_normalize_request() argument
4420 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_normalize_request()
4429 if (!(ac->ac_flags & EXT4_MB_HINT_DATA)) in ext4_mb_normalize_request()
4433 if (unlikely(ac->ac_flags & EXT4_MB_HINT_GOAL_ONLY)) in ext4_mb_normalize_request()
4438 if (ac->ac_flags & EXT4_MB_HINT_NOPREALLOC) in ext4_mb_normalize_request()
4441 if (ac->ac_flags & EXT4_MB_HINT_GROUP_ALLOC) { in ext4_mb_normalize_request()
4442 ext4_mb_normalize_group_request(ac); in ext4_mb_normalize_request()
4446 bsbits = ac->ac_sb->s_blocksize_bits; in ext4_mb_normalize_request()
4450 size = extent_logical_end(sbi, &ac->ac_o_ex); in ext4_mb_normalize_request()
4452 if (size < i_size_read(ac->ac_inode)) in ext4_mb_normalize_request()
4453 size = i_size_read(ac->ac_inode); in ext4_mb_normalize_request()
4480 start_off = ((loff_t)ac->ac_o_ex.fe_logical >> in ext4_mb_normalize_request()
4484 start_off = ((loff_t)ac->ac_o_ex.fe_logical >> in ext4_mb_normalize_request()
4487 } else if (NRL_CHECK_SIZE(EXT4_C2B(sbi, ac->ac_o_ex.fe_len), in ext4_mb_normalize_request()
4489 start_off = ((loff_t)ac->ac_o_ex.fe_logical >> in ext4_mb_normalize_request()
4493 start_off = (loff_t) ac->ac_o_ex.fe_logical << bsbits; in ext4_mb_normalize_request()
4495 ac->ac_o_ex.fe_len) << bsbits; in ext4_mb_normalize_request()
4506 start = max(start, rounddown(ac->ac_o_ex.fe_logical, in ext4_mb_normalize_request()
4507 (ext4_lblk_t)EXT4_BLOCKS_PER_GROUP(ac->ac_sb))); in ext4_mb_normalize_request()
4525 if (size > EXT4_BLOCKS_PER_GROUP(ac->ac_sb)) in ext4_mb_normalize_request()
4526 size = EXT4_BLOCKS_PER_GROUP(ac->ac_sb); in ext4_mb_normalize_request()
4530 ext4_mb_pa_adjust_overlap(ac, &start, &end); in ext4_mb_normalize_request()
4538 * ac->ac_o_ex.fe_logical & fe_len should always lie within "start" and in ext4_mb_normalize_request()
4549 if (start + size <= ac->ac_o_ex.fe_logical || in ext4_mb_normalize_request()
4550 start > ac->ac_o_ex.fe_logical) { in ext4_mb_normalize_request()
4551 ext4_msg(ac->ac_sb, KERN_ERR, in ext4_mb_normalize_request()
4554 (unsigned long) ac->ac_o_ex.fe_logical); in ext4_mb_normalize_request()
4557 BUG_ON(size <= 0 || size > EXT4_BLOCKS_PER_GROUP(ac->ac_sb)); in ext4_mb_normalize_request()
4563 ac->ac_g_ex.fe_logical = start; in ext4_mb_normalize_request()
4564 ac->ac_g_ex.fe_len = EXT4_NUM_B2C(sbi, size); in ext4_mb_normalize_request()
4565 ac->ac_orig_goal_len = ac->ac_g_ex.fe_len; in ext4_mb_normalize_request()
4572 ext4_get_group_no_and_offset(ac->ac_sb, ar->pright - size, in ext4_mb_normalize_request()
4573 &ac->ac_g_ex.fe_group, in ext4_mb_normalize_request()
4574 &ac->ac_g_ex.fe_start); in ext4_mb_normalize_request()
4575 ac->ac_flags |= EXT4_MB_HINT_TRY_GOAL; in ext4_mb_normalize_request()
4580 ext4_get_group_no_and_offset(ac->ac_sb, ar->pleft + 1, in ext4_mb_normalize_request()
4581 &ac->ac_g_ex.fe_group, in ext4_mb_normalize_request()
4582 &ac->ac_g_ex.fe_start); in ext4_mb_normalize_request()
4583 ac->ac_flags |= EXT4_MB_HINT_TRY_GOAL; in ext4_mb_normalize_request()
4586 mb_debug(ac->ac_sb, "goal: %lld(was %lld) blocks at %u\n", size, in ext4_mb_normalize_request()
4590 static void ext4_mb_collect_stats(struct ext4_allocation_context *ac) in ext4_mb_collect_stats() argument
4592 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_collect_stats()
4594 if (sbi->s_mb_stats && ac->ac_g_ex.fe_len >= 1) { in ext4_mb_collect_stats()
4596 atomic_add(ac->ac_b_ex.fe_len, &sbi->s_bal_allocated); in ext4_mb_collect_stats()
4597 if (ac->ac_b_ex.fe_len >= ac->ac_o_ex.fe_len) in ext4_mb_collect_stats()
4600 atomic_add(ac->ac_found, &sbi->s_bal_ex_scanned); in ext4_mb_collect_stats()
4602 atomic_add(ac->ac_cX_found[i], &sbi->s_bal_cX_ex_scanned[i]); in ext4_mb_collect_stats()
4605 atomic_add(ac->ac_groups_scanned, &sbi->s_bal_groups_scanned); in ext4_mb_collect_stats()
4606 if (ac->ac_g_ex.fe_start == ac->ac_b_ex.fe_start && in ext4_mb_collect_stats()
4607 ac->ac_g_ex.fe_group == ac->ac_b_ex.fe_group) in ext4_mb_collect_stats()
4610 if (ac->ac_f_ex.fe_len == ac->ac_orig_goal_len) in ext4_mb_collect_stats()
4613 if (ac->ac_found > sbi->s_mb_max_to_scan) in ext4_mb_collect_stats()
4617 if (ac->ac_op == EXT4_MB_HISTORY_ALLOC) in ext4_mb_collect_stats()
4618 trace_ext4_mballoc_alloc(ac); in ext4_mb_collect_stats()
4620 trace_ext4_mballoc_prealloc(ac); in ext4_mb_collect_stats()
4627 * zeroed out ac->ac_b_ex.fe_len, so group_pa->pa_free is not changed.
4629 static void ext4_discard_allocated_blocks(struct ext4_allocation_context *ac) in ext4_discard_allocated_blocks() argument
4631 struct ext4_prealloc_space *pa = ac->ac_pa; in ext4_discard_allocated_blocks()
4636 if (ac->ac_f_ex.fe_len == 0) in ext4_discard_allocated_blocks()
4638 err = ext4_mb_load_buddy(ac->ac_sb, ac->ac_f_ex.fe_group, &e4b); in ext4_discard_allocated_blocks()
4647 ext4_lock_group(ac->ac_sb, ac->ac_f_ex.fe_group); in ext4_discard_allocated_blocks()
4648 mb_free_blocks(ac->ac_inode, &e4b, ac->ac_f_ex.fe_start, in ext4_discard_allocated_blocks()
4649 ac->ac_f_ex.fe_len); in ext4_discard_allocated_blocks()
4650 ext4_unlock_group(ac->ac_sb, ac->ac_f_ex.fe_group); in ext4_discard_allocated_blocks()
4656 pa->pa_free += ac->ac_b_ex.fe_len; in ext4_discard_allocated_blocks()
4664 static void ext4_mb_use_inode_pa(struct ext4_allocation_context *ac, in ext4_mb_use_inode_pa() argument
4667 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_use_inode_pa()
4673 start = pa->pa_pstart + (ac->ac_o_ex.fe_logical - pa->pa_lstart); in ext4_mb_use_inode_pa()
4675 start + EXT4_C2B(sbi, ac->ac_o_ex.fe_len)); in ext4_mb_use_inode_pa()
4677 ext4_get_group_no_and_offset(ac->ac_sb, start, &ac->ac_b_ex.fe_group, in ext4_mb_use_inode_pa()
4678 &ac->ac_b_ex.fe_start); in ext4_mb_use_inode_pa()
4679 ac->ac_b_ex.fe_len = len; in ext4_mb_use_inode_pa()
4680 ac->ac_status = AC_STATUS_FOUND; in ext4_mb_use_inode_pa()
4681 ac->ac_pa = pa; in ext4_mb_use_inode_pa()
4686 BUG_ON(ac->ac_b_ex.fe_len <= 0); in ext4_mb_use_inode_pa()
4689 mb_debug(ac->ac_sb, "use %llu/%d from inode pa %p\n", start, len, pa); in ext4_mb_use_inode_pa()
4695 static void ext4_mb_use_group_pa(struct ext4_allocation_context *ac, in ext4_mb_use_group_pa() argument
4698 unsigned int len = ac->ac_o_ex.fe_len; in ext4_mb_use_group_pa()
4700 ext4_get_group_no_and_offset(ac->ac_sb, pa->pa_pstart, in ext4_mb_use_group_pa()
4701 &ac->ac_b_ex.fe_group, in ext4_mb_use_group_pa()
4702 &ac->ac_b_ex.fe_start); in ext4_mb_use_group_pa()
4703 ac->ac_b_ex.fe_len = len; in ext4_mb_use_group_pa()
4704 ac->ac_status = AC_STATUS_FOUND; in ext4_mb_use_group_pa()
4705 ac->ac_pa = pa; in ext4_mb_use_group_pa()
4713 mb_debug(ac->ac_sb, "use %u/%u from group pa %p\n", in ext4_mb_use_group_pa()
4750 ext4_mb_pa_goal_check(struct ext4_allocation_context *ac, in ext4_mb_pa_goal_check() argument
4753 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_pa_goal_check()
4756 if (likely(!(ac->ac_flags & EXT4_MB_HINT_GOAL_ONLY))) in ext4_mb_pa_goal_check()
4766 (ac->ac_g_ex.fe_logical - pa->pa_lstart); in ext4_mb_pa_goal_check()
4767 if (ext4_grp_offs_to_block(ac->ac_sb, &ac->ac_g_ex) != start) in ext4_mb_pa_goal_check()
4770 if (ac->ac_g_ex.fe_len > pa->pa_len - in ext4_mb_pa_goal_check()
4771 EXT4_B2C(sbi, ac->ac_g_ex.fe_logical - pa->pa_lstart)) in ext4_mb_pa_goal_check()
4781 ext4_mb_use_preallocated(struct ext4_allocation_context *ac) in ext4_mb_use_preallocated() argument
4783 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_use_preallocated()
4785 struct ext4_inode_info *ei = EXT4_I(ac->ac_inode); in ext4_mb_use_preallocated()
4792 if (!(ac->ac_flags & EXT4_MB_HINT_DATA)) in ext4_mb_use_preallocated()
4815 iter = ext4_mb_pa_rb_next_iter(ac->ac_o_ex.fe_logical, in ext4_mb_use_preallocated()
4826 if (tmp_pa->pa_lstart > ac->ac_o_ex.fe_logical) { in ext4_mb_use_preallocated()
4843 BUG_ON(!(tmp_pa && tmp_pa->pa_lstart <= ac->ac_o_ex.fe_logical)); in ext4_mb_use_preallocated()
4875 BUG_ON(!(tmp_pa && tmp_pa->pa_lstart <= ac->ac_o_ex.fe_logical)); in ext4_mb_use_preallocated()
4883 if (ac->ac_o_ex.fe_logical >= pa_logical_end(sbi, tmp_pa)) { in ext4_mb_use_preallocated()
4889 if (!(ext4_test_inode_flag(ac->ac_inode, EXT4_INODE_EXTENTS)) && in ext4_mb_use_preallocated()
4900 if (tmp_pa->pa_free && likely(ext4_mb_pa_goal_check(ac, tmp_pa))) { in ext4_mb_use_preallocated()
4902 ext4_mb_use_inode_pa(ac, tmp_pa); in ext4_mb_use_preallocated()
4941 if (!(ac->ac_flags & EXT4_MB_HINT_GROUP_ALLOC)) in ext4_mb_use_preallocated()
4945 lg = ac->ac_lg; in ext4_mb_use_preallocated()
4948 order = fls(ac->ac_o_ex.fe_len) - 1; in ext4_mb_use_preallocated()
4953 goal_block = ext4_grp_offs_to_block(ac->ac_sb, &ac->ac_g_ex); in ext4_mb_use_preallocated()
4964 tmp_pa->pa_free >= ac->ac_o_ex.fe_len) { in ext4_mb_use_preallocated()
4974 ext4_mb_use_group_pa(ac, cpa); in ext4_mb_use_preallocated()
5064 static void ext4_mb_put_pa(struct ext4_allocation_context *ac, in ext4_mb_put_pa() argument
5069 struct ext4_inode_info *ei = EXT4_I(ac->ac_inode); in ext4_mb_put_pa()
5156 ext4_mb_new_inode_pa(struct ext4_allocation_context *ac) in ext4_mb_new_inode_pa() argument
5158 struct super_block *sb = ac->ac_sb; in ext4_mb_new_inode_pa()
5165 BUG_ON(ac->ac_o_ex.fe_len >= ac->ac_b_ex.fe_len); in ext4_mb_new_inode_pa()
5166 BUG_ON(ac->ac_status != AC_STATUS_FOUND); in ext4_mb_new_inode_pa()
5167 BUG_ON(!S_ISREG(ac->ac_inode->i_mode)); in ext4_mb_new_inode_pa()
5168 BUG_ON(ac->ac_pa == NULL); in ext4_mb_new_inode_pa()
5170 pa = ac->ac_pa; in ext4_mb_new_inode_pa()
5172 if (ac->ac_b_ex.fe_len < ac->ac_orig_goal_len) { in ext4_mb_new_inode_pa()
5174 .fe_logical = ac->ac_g_ex.fe_logical, in ext4_mb_new_inode_pa()
5175 .fe_len = ac->ac_orig_goal_len, in ext4_mb_new_inode_pa()
5178 loff_t o_ex_end = extent_logical_end(sbi, &ac->ac_o_ex); in ext4_mb_new_inode_pa()
5188 BUG_ON(ac->ac_g_ex.fe_logical > ac->ac_o_ex.fe_logical); in ext4_mb_new_inode_pa()
5189 BUG_ON(ac->ac_g_ex.fe_len < ac->ac_o_ex.fe_len); in ext4_mb_new_inode_pa()
5202 ex.fe_len = ac->ac_b_ex.fe_len; in ext4_mb_new_inode_pa()
5205 if (ac->ac_o_ex.fe_logical >= ex.fe_logical) in ext4_mb_new_inode_pa()
5208 ex.fe_logical = ac->ac_g_ex.fe_logical; in ext4_mb_new_inode_pa()
5212 ex.fe_logical = ac->ac_o_ex.fe_logical; in ext4_mb_new_inode_pa()
5214 ac->ac_b_ex.fe_logical = ex.fe_logical; in ext4_mb_new_inode_pa()
5216 BUG_ON(ac->ac_o_ex.fe_logical < ac->ac_b_ex.fe_logical); in ext4_mb_new_inode_pa()
5220 pa->pa_lstart = ac->ac_b_ex.fe_logical; in ext4_mb_new_inode_pa()
5221 pa->pa_pstart = ext4_grp_offs_to_block(sb, &ac->ac_b_ex); in ext4_mb_new_inode_pa()
5222 pa->pa_len = ac->ac_b_ex.fe_len; in ext4_mb_new_inode_pa()
5231 trace_ext4_mb_new_inode_pa(ac, pa); in ext4_mb_new_inode_pa()
5234 ext4_mb_use_inode_pa(ac, pa); in ext4_mb_new_inode_pa()
5236 ei = EXT4_I(ac->ac_inode); in ext4_mb_new_inode_pa()
5237 grp = ext4_get_group_info(sb, ac->ac_b_ex.fe_group); in ext4_mb_new_inode_pa()
5242 pa->pa_inode = ac->ac_inode; in ext4_mb_new_inode_pa()
5256 ext4_mb_new_group_pa(struct ext4_allocation_context *ac) in ext4_mb_new_group_pa() argument
5258 struct super_block *sb = ac->ac_sb; in ext4_mb_new_group_pa()
5264 BUG_ON(ac->ac_o_ex.fe_len >= ac->ac_b_ex.fe_len); in ext4_mb_new_group_pa()
5265 BUG_ON(ac->ac_status != AC_STATUS_FOUND); in ext4_mb_new_group_pa()
5266 BUG_ON(!S_ISREG(ac->ac_inode->i_mode)); in ext4_mb_new_group_pa()
5267 BUG_ON(ac->ac_pa == NULL); in ext4_mb_new_group_pa()
5269 pa = ac->ac_pa; in ext4_mb_new_group_pa()
5271 pa->pa_pstart = ext4_grp_offs_to_block(sb, &ac->ac_b_ex); in ext4_mb_new_group_pa()
5273 pa->pa_len = ac->ac_b_ex.fe_len; in ext4_mb_new_group_pa()
5283 trace_ext4_mb_new_group_pa(ac, pa); in ext4_mb_new_group_pa()
5285 ext4_mb_use_group_pa(ac, pa); in ext4_mb_new_group_pa()
5288 grp = ext4_get_group_info(sb, ac->ac_b_ex.fe_group); in ext4_mb_new_group_pa()
5291 lg = ac->ac_lg; in ext4_mb_new_group_pa()
5305 static void ext4_mb_new_preallocation(struct ext4_allocation_context *ac) in ext4_mb_new_preallocation() argument
5307 if (ac->ac_flags & EXT4_MB_HINT_GROUP_ALLOC) in ext4_mb_new_preallocation()
5308 ext4_mb_new_group_pa(ac); in ext4_mb_new_preallocation()
5310 ext4_mb_new_inode_pa(ac); in ext4_mb_new_preallocation()
5620 static int ext4_mb_pa_alloc(struct ext4_allocation_context *ac) in ext4_mb_pa_alloc() argument
5629 ac->ac_pa = pa; in ext4_mb_pa_alloc()
5633 static void ext4_mb_pa_put_free(struct ext4_allocation_context *ac) in ext4_mb_pa_put_free() argument
5635 struct ext4_prealloc_space *pa = ac->ac_pa; in ext4_mb_pa_put_free()
5638 ac->ac_pa = NULL; in ext4_mb_pa_put_free()
5684 static void ext4_mb_show_ac(struct ext4_allocation_context *ac) in ext4_mb_show_ac() argument
5686 struct super_block *sb = ac->ac_sb; in ext4_mb_show_ac()
5694 ac->ac_status, ac->ac_flags); in ext4_mb_show_ac()
5698 (unsigned long)ac->ac_o_ex.fe_group, in ext4_mb_show_ac()
5699 (unsigned long)ac->ac_o_ex.fe_start, in ext4_mb_show_ac()
5700 (unsigned long)ac->ac_o_ex.fe_len, in ext4_mb_show_ac()
5701 (unsigned long)ac->ac_o_ex.fe_logical, in ext4_mb_show_ac()
5702 (unsigned long)ac->ac_g_ex.fe_group, in ext4_mb_show_ac()
5703 (unsigned long)ac->ac_g_ex.fe_start, in ext4_mb_show_ac()
5704 (unsigned long)ac->ac_g_ex.fe_len, in ext4_mb_show_ac()
5705 (unsigned long)ac->ac_g_ex.fe_logical, in ext4_mb_show_ac()
5706 (unsigned long)ac->ac_b_ex.fe_group, in ext4_mb_show_ac()
5707 (unsigned long)ac->ac_b_ex.fe_start, in ext4_mb_show_ac()
5708 (unsigned long)ac->ac_b_ex.fe_len, in ext4_mb_show_ac()
5709 (unsigned long)ac->ac_b_ex.fe_logical, in ext4_mb_show_ac()
5710 (int)ac->ac_criteria); in ext4_mb_show_ac()
5711 mb_debug(sb, "%u found", ac->ac_found); in ext4_mb_show_ac()
5712 mb_debug(sb, "used pa: %s, ", str_yes_no(ac->ac_pa)); in ext4_mb_show_ac()
5713 if (ac->ac_pa) in ext4_mb_show_ac()
5714 mb_debug(sb, "pa_type %s\n", ac->ac_pa->pa_type == MB_GROUP_PA ? in ext4_mb_show_ac()
5722 static inline void ext4_mb_show_ac(struct ext4_allocation_context *ac) in ext4_mb_show_ac() argument
5724 ext4_mb_show_pa(ac->ac_sb); in ext4_mb_show_ac()
5735 static void ext4_mb_group_or_file(struct ext4_allocation_context *ac) in ext4_mb_group_or_file() argument
5737 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_group_or_file()
5738 int bsbits = ac->ac_sb->s_blocksize_bits; in ext4_mb_group_or_file()
5742 if (!(ac->ac_flags & EXT4_MB_HINT_DATA)) in ext4_mb_group_or_file()
5745 if (unlikely(ac->ac_flags & EXT4_MB_HINT_GOAL_ONLY)) in ext4_mb_group_or_file()
5750 size = extent_logical_end(sbi, &ac->ac_o_ex); in ext4_mb_group_or_file()
5751 isize = (i_size_read(ac->ac_inode) + ac->ac_sb->s_blocksize - 1) in ext4_mb_group_or_file()
5756 !inode_is_open_for_write(ac->ac_inode)) in ext4_mb_group_or_file()
5766 ac->ac_flags |= EXT4_MB_STREAM_ALLOC; in ext4_mb_group_or_file()
5768 ac->ac_flags |= EXT4_MB_HINT_NOPREALLOC; in ext4_mb_group_or_file()
5772 BUG_ON(ac->ac_lg != NULL); in ext4_mb_group_or_file()
5778 ac->ac_lg = raw_cpu_ptr(sbi->s_locality_groups); in ext4_mb_group_or_file()
5781 ac->ac_flags |= EXT4_MB_HINT_GROUP_ALLOC; in ext4_mb_group_or_file()
5784 mutex_lock(&ac->ac_lg->lg_mutex); in ext4_mb_group_or_file()
5788 ext4_mb_initialize_context(struct ext4_allocation_context *ac, in ext4_mb_initialize_context() argument
5814 ac->ac_b_ex.fe_logical = EXT4_LBLK_CMASK(sbi, ar->logical); in ext4_mb_initialize_context()
5815 ac->ac_status = AC_STATUS_CONTINUE; in ext4_mb_initialize_context()
5816 ac->ac_sb = sb; in ext4_mb_initialize_context()
5817 ac->ac_inode = ar->inode; in ext4_mb_initialize_context()
5818 ac->ac_o_ex.fe_logical = ac->ac_b_ex.fe_logical; in ext4_mb_initialize_context()
5819 ac->ac_o_ex.fe_group = group; in ext4_mb_initialize_context()
5820 ac->ac_o_ex.fe_start = block; in ext4_mb_initialize_context()
5821 ac->ac_o_ex.fe_len = len; in ext4_mb_initialize_context()
5822 ac->ac_g_ex = ac->ac_o_ex; in ext4_mb_initialize_context()
5823 ac->ac_orig_goal_len = ac->ac_g_ex.fe_len; in ext4_mb_initialize_context()
5824 ac->ac_flags = ar->flags; in ext4_mb_initialize_context()
5828 ext4_mb_group_or_file(ac); in ext4_mb_initialize_context()
5830 mb_debug(sb, "init ac: %u blocks @ %u, goal %u, flags 0x%x, 2^%d, " in ext4_mb_initialize_context()
5833 (unsigned) ar->goal, ac->ac_flags, ac->ac_2order, in ext4_mb_initialize_context()
5923 static void ext4_mb_add_n_trim(struct ext4_allocation_context *ac) in ext4_mb_add_n_trim() argument
5926 struct super_block *sb = ac->ac_sb; in ext4_mb_add_n_trim()
5927 struct ext4_locality_group *lg = ac->ac_lg; in ext4_mb_add_n_trim()
5928 struct ext4_prealloc_space *tmp_pa, *pa = ac->ac_pa; in ext4_mb_add_n_trim()
5971 static void ext4_mb_release_context(struct ext4_allocation_context *ac) in ext4_mb_release_context() argument
5973 struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb); in ext4_mb_release_context()
5974 struct ext4_prealloc_space *pa = ac->ac_pa; in ext4_mb_release_context()
5979 pa->pa_pstart += EXT4_C2B(sbi, ac->ac_b_ex.fe_len); in ext4_mb_release_context()
5980 pa->pa_lstart += EXT4_C2B(sbi, ac->ac_b_ex.fe_len); in ext4_mb_release_context()
5981 pa->pa_free -= ac->ac_b_ex.fe_len; in ext4_mb_release_context()
5982 pa->pa_len -= ac->ac_b_ex.fe_len; in ext4_mb_release_context()
5995 ext4_mb_add_n_trim(ac); in ext4_mb_release_context()
5999 ext4_mb_put_pa(ac, ac->ac_sb, pa); in ext4_mb_release_context()
6001 if (ac->ac_bitmap_folio) in ext4_mb_release_context()
6002 folio_put(ac->ac_bitmap_folio); in ext4_mb_release_context()
6003 if (ac->ac_buddy_folio) in ext4_mb_release_context()
6004 folio_put(ac->ac_buddy_folio); in ext4_mb_release_context()
6005 if (ac->ac_flags & EXT4_MB_HINT_GROUP_ALLOC) in ext4_mb_release_context()
6006 mutex_unlock(&ac->ac_lg->lg_mutex); in ext4_mb_release_context()
6007 ext4_mb_collect_stats(ac); in ext4_mb_release_context()
6038 struct ext4_allocation_context *ac, u64 *seq) in ext4_mb_discard_preallocations_should_retry() argument
6044 freed = ext4_mb_discard_preallocations(sb, ac->ac_o_ex.fe_len); in ext4_mb_discard_preallocations_should_retry()
6050 if (!(ac->ac_flags & EXT4_MB_STRICT_CHECK) || seq_retry != *seq) { in ext4_mb_discard_preallocations_should_retry()
6051 ac->ac_flags |= EXT4_MB_STRICT_CHECK; in ext4_mb_discard_preallocations_should_retry()
6137 struct ext4_allocation_context *ac = NULL; in ext4_mb_new_blocks() local
6195 ac = kmem_cache_zalloc(ext4_ac_cachep, GFP_NOFS); in ext4_mb_new_blocks()
6196 if (!ac) { in ext4_mb_new_blocks()
6202 ext4_mb_initialize_context(ac, ar); in ext4_mb_new_blocks()
6204 ac->ac_op = EXT4_MB_HISTORY_PREALLOC; in ext4_mb_new_blocks()
6206 if (!ext4_mb_use_preallocated(ac)) { in ext4_mb_new_blocks()
6207 ac->ac_op = EXT4_MB_HISTORY_ALLOC; in ext4_mb_new_blocks()
6208 ext4_mb_normalize_request(ac, ar); in ext4_mb_new_blocks()
6210 *errp = ext4_mb_pa_alloc(ac); in ext4_mb_new_blocks()
6215 *errp = ext4_mb_regular_allocator(ac); in ext4_mb_new_blocks()
6219 * ac->ac_status == AC_STATUS_FOUND. in ext4_mb_new_blocks()
6220 * And error from above mean ac->ac_status != AC_STATUS_FOUND in ext4_mb_new_blocks()
6224 ext4_mb_pa_put_free(ac); in ext4_mb_new_blocks()
6225 ext4_discard_allocated_blocks(ac); in ext4_mb_new_blocks()
6228 if (ac->ac_status == AC_STATUS_FOUND && in ext4_mb_new_blocks()
6229 ac->ac_o_ex.fe_len >= ac->ac_f_ex.fe_len) in ext4_mb_new_blocks()
6230 ext4_mb_pa_put_free(ac); in ext4_mb_new_blocks()
6232 if (likely(ac->ac_status == AC_STATUS_FOUND)) { in ext4_mb_new_blocks()
6233 *errp = ext4_mb_mark_diskspace_used(ac, handle, reserv_clstrs); in ext4_mb_new_blocks()
6235 ext4_discard_allocated_blocks(ac); in ext4_mb_new_blocks()
6238 block = ext4_grp_offs_to_block(sb, &ac->ac_b_ex); in ext4_mb_new_blocks()
6239 ar->len = ac->ac_b_ex.fe_len; in ext4_mb_new_blocks()
6243 ext4_mb_discard_preallocations_should_retry(sb, ac, &seq)) in ext4_mb_new_blocks()
6249 ext4_mb_pa_put_free(ac); in ext4_mb_new_blocks()
6255 ac->ac_b_ex.fe_len = 0; in ext4_mb_new_blocks()
6257 ext4_mb_show_ac(ac); in ext4_mb_new_blocks()
6259 ext4_mb_release_context(ac); in ext4_mb_new_blocks()
6260 kmem_cache_free(ext4_ac_cachep, ac); in ext4_mb_new_blocks()