Lines Matching refs:end
47 state->start, state->end, state->state, in btrfs_extent_state_leak_debug_check()
55 #define btrfs_debug_check_extent_io_range(tree, start, end) \ argument
56 __btrfs_debug_check_extent_io_range(__func__, (tree), (start), (end))
59 u64 start, u64 end) in __btrfs_debug_check_extent_io_range() argument
68 if (end >= PAGE_SIZE && (end % 2) == 0 && end != isize - 1) { in __btrfs_debug_check_extent_io_range()
71 caller, btrfs_ino(inode), isize, start, end); in __btrfs_debug_check_extent_io_range()
92 u64 end; member
190 changeset->bytes_changed += state->end - state->start + 1; in add_extent_changeset()
191 ret = ulist_add(&changeset->range_changed, state->start, state->end, in add_extent_changeset()
249 else if (offset > entry->end) in tree_search_for_insert()
261 while (entry && offset > entry->end) in tree_search_for_insert()
297 else if (offset > entry->end) in tree_search_prev_next()
304 while (entry && offset > entry->end) in tree_search_prev_next()
347 if (other && other->end == state->start - 1 && in merge_state()
357 if (other && other->start == state->end + 1 && in merge_state()
361 state->end = other->end; in merge_state()
399 const u64 end = state->end; in insert_state() local
410 if (end < entry->start) { in insert_state()
412 } else if (end > entry->end) { in insert_state()
417 entry->start, entry->end, state->start, end); in insert_state()
467 prealloc->end = split - 1; in split_state()
479 if (prealloc->end < entry->start) { in split_state()
481 } else if (prealloc->end > entry->end) { in split_state()
557 int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in __clear_extent_bit() argument
572 btrfs_debug_check_extent_io_range(tree, start, end); in __clear_extent_bit()
573 trace_btrfs_clear_extent_bit(tree, start, end - start + 1, bits); in __clear_extent_bit()
606 cached->start <= start && cached->end > start) { in __clear_extent_bit()
621 if (state->start > end) in __clear_extent_bit()
623 WARN_ON(state->end < start); in __clear_extent_bit()
624 last_end = state->end; in __clear_extent_bit()
658 if (state->end <= end) { in __clear_extent_bit()
669 if (state->start <= end && state->end > end) { in __clear_extent_bit()
673 err = split_state(tree, state, prealloc, end + 1); in __clear_extent_bit()
691 if (start <= end && state && !need_resched()) in __clear_extent_bit()
695 if (start > end) in __clear_extent_bit()
729 void wait_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, u32 bits, in wait_extent_bit() argument
734 btrfs_debug_check_extent_io_range(tree, start, end); in wait_extent_bit()
745 state->start <= start && start < state->end) in wait_extent_bit()
757 if (state->start > end) in wait_extent_bit()
767 start = state->end + 1; in wait_extent_bit()
769 if (start > end) in wait_extent_bit()
822 if (state->end >= start && (state->state & bits)) in find_first_extent_bit_state()
847 if (state->end == start - 1 && extent_state_in_tree(state)) { in find_first_extent_bit()
865 *end_ret = state->end; in find_first_extent_bit()
899 *end_ret = state->end; in find_contiguous_extent_bit()
903 *end_ret = state->end; in find_contiguous_extent_bit()
918 u64 *end, u64 max_bytes, in btrfs_find_delalloc_range() argument
934 *end = (u64)-1; in btrfs_find_delalloc_range()
945 *end = state->end; in btrfs_find_delalloc_range()
954 *end = state->end; in btrfs_find_delalloc_range()
955 cur_start = state->end + 1; in btrfs_find_delalloc_range()
956 total_bytes += state->end - state->start + 1; in btrfs_find_delalloc_range()
980 static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in __set_extent_bit() argument
997 btrfs_debug_check_extent_io_range(tree, start, end); in __set_extent_bit()
998 trace_btrfs_set_extent_bit(tree, start, end - start + 1, bits); in __set_extent_bit()
1019 if (state->start <= start && state->end > start && in __set_extent_bit()
1033 prealloc->end = end; in __set_extent_bit()
1041 last_end = state->end; in __set_extent_bit()
1049 if (state->start == start && state->end <= end) { in __set_extent_bit()
1064 if (start < end && state && state->start == start && in __set_extent_bit()
1098 start = state->end + 1; in __set_extent_bit()
1113 if (state->end <= end) { in __set_extent_bit()
1121 if (start < end && state && state->start == start && in __set_extent_bit()
1136 if (end < last_start) in __set_extent_bit()
1137 this_end = end; in __set_extent_bit()
1150 prealloc->end = this_end; in __set_extent_bit()
1166 if (state->start <= end && state->end > end) { in __set_extent_bit()
1177 err = split_state(tree, state, prealloc, end + 1); in __set_extent_bit()
1189 if (start > end) in __set_extent_bit()
1205 int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_bit() argument
1208 return __set_extent_bit(tree, start, end, bits, NULL, NULL, in set_extent_bit()
1230 int convert_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in convert_extent_bit() argument
1243 btrfs_debug_check_extent_io_range(tree, start, end); in convert_extent_bit()
1244 trace_btrfs_convert_extent_bit(tree, start, end - start + 1, bits, in convert_extent_bit()
1264 if (state->start <= start && state->end > start && in convert_extent_bit()
1281 prealloc->end = end; in convert_extent_bit()
1289 last_end = state->end; in convert_extent_bit()
1297 if (state->start == start && state->end <= end) { in convert_extent_bit()
1304 if (start < end && state && state->start == start && in convert_extent_bit()
1337 if (state->end <= end) { in convert_extent_bit()
1344 if (start < end && state && state->start == start && in convert_extent_bit()
1359 if (end < last_start) in convert_extent_bit()
1360 this_end = end; in convert_extent_bit()
1375 prealloc->end = this_end; in convert_extent_bit()
1390 if (state->start <= end && state->end > end) { in convert_extent_bit()
1397 err = split_state(tree, state, prealloc, end + 1); in convert_extent_bit()
1409 if (start > end) in convert_extent_bit()
1463 *start_ret = prev->end + 1; in find_first_clear_extent_bit()
1474 if (in_range(start, state->start, state->end - state->start + 1)) { in find_first_clear_extent_bit()
1481 start = state->end + 1; in find_first_clear_extent_bit()
1508 *start_ret = prev->end + 1; in find_first_clear_extent_bit()
1520 if (state->end >= start && !(state->state & bits)) { in find_first_clear_extent_bit()
1521 *end_ret = state->end; in find_first_clear_extent_bit()
1582 if (cached->start <= cur_start && cur_start <= cached->end) { in count_range_bits()
1597 else if (prev->start <= cur_start && cur_start <= prev->end) in count_range_bits()
1614 if (state->end >= cur_start && (state->state & bits) == bits) { in count_range_bits()
1615 total_bytes += min(search_end, state->end) + 1 - in count_range_bits()
1623 last = state->end; in count_range_bits()
1647 int test_range_bit(struct extent_io_tree *tree, u64 start, u64 end, in test_range_bit() argument
1655 cached->end > start) in test_range_bit()
1659 while (state && start <= end) { in test_range_bit()
1665 if (state->start > end) in test_range_bit()
1677 if (state->end == (u64)-1) in test_range_bit()
1680 start = state->end + 1; in test_range_bit()
1681 if (start > end) in test_range_bit()
1694 int set_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in set_record_extent_bits() argument
1705 return __set_extent_bit(tree, start, end, bits, NULL, NULL, NULL, changeset); in set_record_extent_bits()
1708 int clear_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in clear_record_extent_bits() argument
1717 return __clear_extent_bit(tree, start, end, bits, NULL, changeset); in clear_record_extent_bits()
1720 int try_lock_extent(struct extent_io_tree *tree, u64 start, u64 end, in try_lock_extent() argument
1726 err = __set_extent_bit(tree, start, end, EXTENT_LOCKED, &failed_start, in try_lock_extent()
1741 int lock_extent(struct extent_io_tree *tree, u64 start, u64 end, in lock_extent() argument
1748 err = __set_extent_bit(tree, start, end, EXTENT_LOCKED, &failed_start, in lock_extent()
1755 wait_extent_bit(tree, failed_start, end, EXTENT_LOCKED, in lock_extent()
1757 err = __set_extent_bit(tree, start, end, EXTENT_LOCKED, in lock_extent()