Lines Matching refs:end

46 		       state->start, state->end, state->state,  in btrfs_extent_state_leak_debug_check()
54 #define btrfs_debug_check_extent_io_range(tree, start, end) \ argument
55 __btrfs_debug_check_extent_io_range(__func__, (tree), (start), (end))
58 u64 start, u64 end) in __btrfs_debug_check_extent_io_range() argument
67 if (end >= PAGE_SIZE && (end % 2) == 0 && end != isize - 1) { in __btrfs_debug_check_extent_io_range()
70 caller, btrfs_ino(BTRFS_I(inode)), isize, start, end); in __btrfs_debug_check_extent_io_range()
91 u64 end; member
190 changeset->bytes_changed += state->end - state->start + 1; in add_extent_changeset()
191 ret = ulist_add(&changeset->range_changed, state->start, state->end, in add_extent_changeset()
249 else if (offset > entry->end) in tree_search_for_insert()
261 while (entry && offset > entry->end) in tree_search_for_insert()
297 else if (offset > entry->end) in tree_search_prev_next()
304 while (entry && offset > entry->end) in tree_search_prev_next()
347 if (other && other->end == state->start - 1 && in merge_state()
358 if (other && other->start == state->end + 1 && in merge_state()
363 state->end = other->end; in merge_state()
401 const u64 end = state->end; in insert_state() local
412 if (end < entry->start) { in insert_state()
414 } else if (end > entry->end) { in insert_state()
419 entry->start, entry->end, state->start, end); in insert_state()
469 prealloc->end = split - 1; in split_state()
481 if (prealloc->end < entry->start) { in split_state()
483 } else if (prealloc->end > entry->end) { in split_state()
549 int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in __clear_extent_bit() argument
562 btrfs_debug_check_extent_io_range(tree, start, end); in __clear_extent_bit()
563 trace_btrfs_clear_extent_bit(tree, start, end - start + 1, bits); in __clear_extent_bit()
596 cached->start <= start && cached->end > start) { in __clear_extent_bit()
611 if (state->start > end) in __clear_extent_bit()
613 WARN_ON(state->end < start); in __clear_extent_bit()
614 last_end = state->end; in __clear_extent_bit()
647 if (state->end <= end) { in __clear_extent_bit()
658 if (state->start <= end && state->end > end) { in __clear_extent_bit()
661 err = split_state(tree, state, prealloc, end + 1); in __clear_extent_bit()
679 if (start <= end && state && !need_resched()) in __clear_extent_bit()
683 if (start > end) in __clear_extent_bit()
717 void wait_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, u32 bits) in wait_extent_bit() argument
721 btrfs_debug_check_extent_io_range(tree, start, end); in wait_extent_bit()
734 if (state->start > end) in wait_extent_bit()
744 start = state->end + 1; in wait_extent_bit()
746 if (start > end) in wait_extent_bit()
793 if (state->end >= start && (state->state & bits)) in find_first_extent_bit_state()
818 if (state->end == start - 1 && extent_state_in_tree(state)) { in find_first_extent_bit()
836 *end_ret = state->end; in find_first_extent_bit()
870 *end_ret = state->end; in find_contiguous_extent_bit()
874 *end_ret = state->end; in find_contiguous_extent_bit()
889 u64 *end, u64 max_bytes, in btrfs_find_delalloc_range() argument
905 *end = (u64)-1; in btrfs_find_delalloc_range()
916 *end = state->end; in btrfs_find_delalloc_range()
925 *end = state->end; in btrfs_find_delalloc_range()
926 cur_start = state->end + 1; in btrfs_find_delalloc_range()
927 total_bytes += state->end - state->start + 1; in btrfs_find_delalloc_range()
947 static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in __set_extent_bit() argument
961 btrfs_debug_check_extent_io_range(tree, start, end); in __set_extent_bit()
962 trace_btrfs_set_extent_bit(tree, start, end - start + 1, bits); in __set_extent_bit()
983 if (state->start <= start && state->end > start && in __set_extent_bit()
996 prealloc->end = end; in __set_extent_bit()
1004 last_end = state->end; in __set_extent_bit()
1012 if (state->start == start && state->end <= end) { in __set_extent_bit()
1026 if (start < end && state && state->start == start && in __set_extent_bit()
1059 start = state->end + 1; in __set_extent_bit()
1073 if (state->end <= end) { in __set_extent_bit()
1081 if (start < end && state && state->start == start && in __set_extent_bit()
1096 if (end < last_start) in __set_extent_bit()
1097 this_end = end; in __set_extent_bit()
1109 prealloc->end = this_end; in __set_extent_bit()
1125 if (state->start <= end && state->end > end) { in __set_extent_bit()
1134 err = split_state(tree, state, prealloc, end + 1); in __set_extent_bit()
1146 if (start > end) in __set_extent_bit()
1162 int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_bit() argument
1165 return __set_extent_bit(tree, start, end, bits, NULL, cached_state, in set_extent_bit()
1187 int convert_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in convert_extent_bit() argument
1200 btrfs_debug_check_extent_io_range(tree, start, end); in convert_extent_bit()
1201 trace_btrfs_convert_extent_bit(tree, start, end - start + 1, bits, in convert_extent_bit()
1221 if (state->start <= start && state->end > start && in convert_extent_bit()
1238 prealloc->end = end; in convert_extent_bit()
1246 last_end = state->end; in convert_extent_bit()
1254 if (state->start == start && state->end <= end) { in convert_extent_bit()
1261 if (start < end && state && state->start == start && in convert_extent_bit()
1294 if (state->end <= end) { in convert_extent_bit()
1301 if (start < end && state && state->start == start && in convert_extent_bit()
1316 if (end < last_start) in convert_extent_bit()
1317 this_end = end; in convert_extent_bit()
1332 prealloc->end = this_end; in convert_extent_bit()
1347 if (state->start <= end && state->end > end) { in convert_extent_bit()
1354 err = split_state(tree, state, prealloc, end + 1); in convert_extent_bit()
1366 if (start > end) in convert_extent_bit()
1420 *start_ret = prev->end + 1; in find_first_clear_extent_bit()
1431 if (in_range(start, state->start, state->end - state->start + 1)) { in find_first_clear_extent_bit()
1438 start = state->end + 1; in find_first_clear_extent_bit()
1465 *start_ret = prev->end + 1; in find_first_clear_extent_bit()
1477 if (state->end >= start && !(state->state & bits)) { in find_first_clear_extent_bit()
1478 *end_ret = state->end; in find_first_clear_extent_bit()
1519 if (state->end >= cur_start && (state->state & bits) == bits) { in count_range_bits()
1520 total_bytes += min(search_end, state->end) + 1 - in count_range_bits()
1528 last = state->end; in count_range_bits()
1543 int test_range_bit(struct extent_io_tree *tree, u64 start, u64 end, in test_range_bit() argument
1551 cached->end > start) in test_range_bit()
1555 while (state && start <= end) { in test_range_bit()
1561 if (state->start > end) in test_range_bit()
1573 if (state->end == (u64)-1) in test_range_bit()
1576 start = state->end + 1; in test_range_bit()
1577 if (start > end) in test_range_bit()
1590 int set_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in set_record_extent_bits() argument
1601 return __set_extent_bit(tree, start, end, bits, NULL, NULL, changeset, in set_record_extent_bits()
1605 int clear_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in clear_record_extent_bits() argument
1614 return __clear_extent_bit(tree, start, end, bits, NULL, GFP_NOFS, in clear_record_extent_bits()
1618 int try_lock_extent(struct extent_io_tree *tree, u64 start, u64 end) in try_lock_extent() argument
1623 err = __set_extent_bit(tree, start, end, EXTENT_LOCKED, &failed_start, in try_lock_extent()
1638 int lock_extent(struct extent_io_tree *tree, u64 start, u64 end, in lock_extent() argument
1644 err = __set_extent_bit(tree, start, end, EXTENT_LOCKED, &failed_start, in lock_extent()
1651 wait_extent_bit(tree, failed_start, end, EXTENT_LOCKED); in lock_extent()
1652 err = __set_extent_bit(tree, start, end, EXTENT_LOCKED, in lock_extent()