Lines Matching full:end
46 pr_err("BTRFS: state leak: start %llu end %llu state %u in tree %d refs %d\n", in btrfs_extent_state_leak_debug_check()
47 state->start, state->end, state->state, in btrfs_extent_state_leak_debug_check()
55 #define btrfs_debug_check_extent_io_range(tree, start, end) \ argument
56 __btrfs_debug_check_extent_io_range(__func__, (tree), (start), (end))
59 u64 start, u64 end) in __btrfs_debug_check_extent_io_range() argument
69 if (end >= PAGE_SIZE && (end % 2) == 0 && end != isize - 1) { in __btrfs_debug_check_extent_io_range()
72 caller, btrfs_ino(inode), isize, start, end); in __btrfs_debug_check_extent_io_range()
214 changeset->bytes_changed += state->end - state->start + 1; in add_extent_changeset()
215 ret = ulist_add(&changeset->range_changed, state->start, state->end, in add_extent_changeset()
242 * entry->start <= offset && entry->end >= offset.
273 else if (offset > entry->end) in tree_search_for_insert()
284 /* Search neighbors until we find the first one past the end */ in tree_search_for_insert()
285 while (entry && offset > entry->end) in tree_search_for_insert()
321 else if (offset > entry->end) in tree_search_prev_next()
328 while (entry && offset > entry->end) in tree_search_prev_next()
354 "extent io tree error on %s state start %llu end %llu", in extent_io_tree_panic()
355 opname, state->start, state->end); in extent_io_tree_panic()
363 if (prev && prev->end == state->start - 1 && prev->state == state->state) { in merge_prev_state()
379 if (next && next->start == state->end + 1 && next->state == state->state) { in merge_next_state()
383 state->end = next->end; in merge_next_state()
446 const u64 end = state->end + 1; in insert_state() local
458 if (state->end < entry->start) { in insert_state()
459 if (try_merge && end == entry->start && in insert_state()
471 } else if (state->end > entry->end) { in insert_state()
472 if (try_merge && entry->end == start && in insert_state()
478 entry->end = state->end; in insert_state()
515 * the tree has 'orig' at [orig->start, orig->end]. After calling, there
518 * orig: [ split, orig->end ]
534 prealloc->end = split - 1; in split_state()
546 if (prealloc->end < entry->start) { in split_state()
548 } else if (prealloc->end > entry->end) { in split_state()
621 * The range [start, end] is inclusive.
625 int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in __clear_extent_bit() argument
640 btrfs_debug_check_extent_io_range(tree, start, end); in __clear_extent_bit()
641 trace_btrfs_clear_extent_bit(tree, start, end - start + 1, bits); in __clear_extent_bit()
655 * Don't care for allocation failure here because we might end in __clear_extent_bit()
659 * If we end up needing a new extent state we allocate it later. in __clear_extent_bit()
674 cached->start <= start && cached->end > start) { in __clear_extent_bit()
684 /* This search will find the extents that end after our range starts. */ in __clear_extent_bit()
689 if (state->start > end) in __clear_extent_bit()
691 WARN_ON(state->end < start); in __clear_extent_bit()
692 last_end = state->end; in __clear_extent_bit()
726 if (state->end <= end) { in __clear_extent_bit()
737 if (state->start <= end && state->end > end) { in __clear_extent_bit()
741 err = split_state(tree, state, prealloc, end + 1); in __clear_extent_bit()
759 if (start <= end && state && !need_resched()) in __clear_extent_bit()
763 if (start > end) in __clear_extent_bit()
781 * The range [start, end] is inclusive.
784 static void wait_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in wait_extent_bit() argument
789 btrfs_debug_check_extent_io_range(tree, start, end); in wait_extent_bit()
800 state->start <= start && start < state->end) in wait_extent_bit()
805 * This search will find all the extents that end after our in wait_extent_bit()
812 if (state->start > end) in wait_extent_bit()
828 start = state->end + 1; in wait_extent_bit()
830 if (start > end) in wait_extent_bit()
878 * This search will find all the extents that end after our range in find_first_extent_bit_state()
883 if (state->end >= start && (state->state & bits)) in find_first_extent_bit_state()
908 if (state->end == start - 1 && extent_state_in_tree(state)) { in find_first_extent_bit()
935 *end_ret = state->end; in find_first_extent_bit()
971 *end_ret = state->end; in find_contiguous_extent_bit()
975 *end_ret = state->end; in find_contiguous_extent_bit()
985 * than 'max_bytes'. start and end are used to return the range,
990 u64 *end, u64 max_bytes, in btrfs_find_delalloc_range() argument
1001 * This search will find all the extents that end after our range in btrfs_find_delalloc_range()
1006 *end = (u64)-1; in btrfs_find_delalloc_range()
1017 *end = state->end; in btrfs_find_delalloc_range()
1026 *end = state->end; in btrfs_find_delalloc_range()
1027 cur_start = state->end + 1; in btrfs_find_delalloc_range()
1028 total_bytes += state->end - state->start + 1; in btrfs_find_delalloc_range()
1050 * [start, end] is inclusive This takes the tree lock.
1052 static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in __set_extent_bit() argument
1069 btrfs_debug_check_extent_io_range(tree, start, end); in __set_extent_bit()
1070 trace_btrfs_set_extent_bit(tree, start, end - start + 1, bits); in __set_extent_bit()
1079 * Don't care for allocation failure here because we might end in __set_extent_bit()
1083 * If we end up needing a new extent state we allocate it later. in __set_extent_bit()
1091 if (state->start <= start && state->end > start && in __set_extent_bit()
1096 * This search will find all the extents that end after our range in __set_extent_bit()
1105 prealloc->end = end; in __set_extent_bit()
1113 last_end = state->end; in __set_extent_bit()
1121 if (state->start == start && state->end <= end) { in __set_extent_bit()
1136 if (start < end && state && state->start == start && in __set_extent_bit()
1170 start = state->end + 1; in __set_extent_bit()
1185 if (state->end <= end) { in __set_extent_bit()
1193 if (start < end && state && state->start == start && in __set_extent_bit()
1210 if (end < last_start) in __set_extent_bit()
1211 this_end = end; in __set_extent_bit()
1224 prealloc->end = this_end; in __set_extent_bit()
1243 if (state->start <= end && state->end > end) { in __set_extent_bit()
1254 err = split_state(tree, state, prealloc, end + 1); in __set_extent_bit()
1266 if (start > end) in __set_extent_bit()
1282 int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_bit() argument
1285 return __set_extent_bit(tree, start, end, bits, NULL, NULL, in set_extent_bit()
1294 * @end: the end offset in bytes (inclusive)
1307 int convert_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in convert_extent_bit() argument
1320 btrfs_debug_check_extent_io_range(tree, start, end); in convert_extent_bit()
1321 trace_btrfs_convert_extent_bit(tree, start, end - start + 1, bits, in convert_extent_bit()
1341 if (state->start <= start && state->end > start && in convert_extent_bit()
1347 * This search will find all the extents that end after our range in convert_extent_bit()
1358 prealloc->end = end; in convert_extent_bit()
1366 last_end = state->end; in convert_extent_bit()
1374 if (state->start == start && state->end <= end) { in convert_extent_bit()
1381 if (start < end && state && state->start == start && in convert_extent_bit()
1414 if (state->end <= end) { in convert_extent_bit()
1421 if (start < end && state && state->start == start && in convert_extent_bit()
1438 if (end < last_start) in convert_extent_bit()
1439 this_end = end; in convert_extent_bit()
1454 prealloc->end = this_end; in convert_extent_bit()
1472 if (state->start <= end && state->end > end) { in convert_extent_bit()
1479 err = split_state(tree, state, prealloc, end + 1); in convert_extent_bit()
1491 if (start > end) in convert_extent_bit()
1513 * @end_ret: records the end of the range (inclusive)
1518 * spans (last_range_end, end of device]. In this case it's up to the caller to
1543 * the end of the last extent. in find_first_clear_extent_bit()
1545 *start_ret = prev->end + 1; in find_first_clear_extent_bit()
1556 if (in_range(start, state->start, state->end - state->start + 1)) { in find_first_clear_extent_bit()
1563 start = state->end + 1; in find_first_clear_extent_bit()
1590 *start_ret = prev->end + 1; in find_first_clear_extent_bit()
1602 if (state->end >= start && !(state->state & bits)) { in find_first_clear_extent_bit()
1603 *end_ret = state->end; in find_first_clear_extent_bit()
1621 * can end up being bigger than the initial value.
1622 * @search_end: The end offset (inclusive value) of the search range.
1664 if (cached->start <= cur_start && cur_start <= cached->end) { in count_range_bits()
1679 else if (prev->start <= cur_start && cur_start <= prev->end) in count_range_bits()
1684 * This search will find all the extents that end after our range in count_range_bits()
1696 if (state->end >= cur_start && (state->state & bits) == bits) { in count_range_bits()
1697 total_bytes += min(search_end, state->end) + 1 - in count_range_bits()
1705 last = state->end; in count_range_bits()
1727 bool test_range_bit_exists(struct extent_io_tree *tree, u64 start, u64 end, u32 bit) in test_range_bit_exists() argument
1736 while (state && start <= end) { in test_range_bit_exists()
1737 if (state->start > end) in test_range_bit_exists()
1745 /* If state->end is (u64)-1, start will overflow to 0 */ in test_range_bit_exists()
1746 start = state->end + 1; in test_range_bit_exists()
1747 if (start > end || start == 0) in test_range_bit_exists()
1756 * Check if the whole range [@start,@end) contains the single @bit set.
1758 bool test_range_bit(struct extent_io_tree *tree, u64 start, u64 end, u32 bit, in test_range_bit() argument
1768 cached->end > start) in test_range_bit()
1772 while (state && start <= end) { in test_range_bit()
1778 if (state->start > end) in test_range_bit()
1786 if (state->end == (u64)-1) in test_range_bit()
1790 * Last entry (if state->end is (u64)-1 and overflow happens), in test_range_bit()
1793 start = state->end + 1; in test_range_bit()
1794 if (start > end || start == 0) in test_range_bit()
1807 int set_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in set_record_extent_bits() argument
1818 return __set_extent_bit(tree, start, end, bits, NULL, NULL, NULL, changeset); in set_record_extent_bits()
1821 int clear_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in clear_record_extent_bits() argument
1830 return __clear_extent_bit(tree, start, end, bits, NULL, changeset); in clear_record_extent_bits()
1833 int try_lock_extent(struct extent_io_tree *tree, u64 start, u64 end, in try_lock_extent() argument
1839 err = __set_extent_bit(tree, start, end, EXTENT_LOCKED, &failed_start, in try_lock_extent()
1851 * Either insert or lock state struct between start and end use mask to tell
1854 int lock_extent(struct extent_io_tree *tree, u64 start, u64 end, in lock_extent() argument
1861 err = __set_extent_bit(tree, start, end, EXTENT_LOCKED, &failed_start, in lock_extent()
1868 wait_extent_bit(tree, failed_start, end, EXTENT_LOCKED, in lock_extent()
1870 err = __set_extent_bit(tree, start, end, EXTENT_LOCKED, in lock_extent()