Lines Matching +full:- +full:state

1 // SPDX-License-Identifier: GPL-2.0
6 #include "extent-io-tree.h"
12 static inline bool extent_state_in_tree(const struct extent_state *state) in extent_state_in_tree() argument
14 return !RB_EMPTY_NODE(&state->rb_node); in extent_state_in_tree()
21 static inline void btrfs_leak_debug_add_state(struct extent_state *state) in btrfs_leak_debug_add_state() argument
26 list_add(&state->leak_list, &states); in btrfs_leak_debug_add_state()
30 static inline void btrfs_leak_debug_del_state(struct extent_state *state) in btrfs_leak_debug_del_state() argument
35 list_del(&state->leak_list); in btrfs_leak_debug_del_state()
41 struct extent_state *state; in btrfs_extent_state_leak_debug_check() local
44 state = list_entry(states.next, struct extent_state, leak_list); in btrfs_extent_state_leak_debug_check()
45 pr_err("BTRFS: state leak: start %llu end %llu state %u in tree %d refs %d\n", in btrfs_extent_state_leak_debug_check()
46 state->start, state->end, state->state, in btrfs_extent_state_leak_debug_check()
47 extent_state_in_tree(state), in btrfs_extent_state_leak_debug_check()
48 refcount_read(&state->refs)); in btrfs_extent_state_leak_debug_check()
49 list_del(&state->leak_list); in btrfs_extent_state_leak_debug_check()
50 kmem_cache_free(extent_state_cache, state); in btrfs_extent_state_leak_debug_check()
60 struct inode *inode = tree->private_data; in __btrfs_debug_check_extent_io_range()
67 if (end >= PAGE_SIZE && (end % 2) == 0 && end != isize - 1) { in __btrfs_debug_check_extent_io_range()
68 btrfs_debug_rl(BTRFS_I(inode)->root->fs_info, in __btrfs_debug_check_extent_io_range()
74 #define btrfs_leak_debug_add_state(state) do {} while (0) argument
75 #define btrfs_leak_debug_del_state(state) do {} while (0) argument
99 tree->fs_info = fs_info; in extent_io_tree_init()
100 tree->state = RB_ROOT; in extent_io_tree_init()
101 spin_lock_init(&tree->lock); in extent_io_tree_init()
102 tree->private_data = private_data; in extent_io_tree_init()
103 tree->owner = owner; in extent_io_tree_init()
105 lockdep_set_class(&tree->lock, &file_extent_tree_class); in extent_io_tree_init()
110 spin_lock(&tree->lock); in extent_io_tree_release()
112 * Do a single barrier for the waitqueue_active check here, the state in extent_io_tree_release()
117 while (!RB_EMPTY_ROOT(&tree->state)) { in extent_io_tree_release()
119 struct extent_state *state; in extent_io_tree_release() local
121 node = rb_first(&tree->state); in extent_io_tree_release()
122 state = rb_entry(node, struct extent_state, rb_node); in extent_io_tree_release()
123 rb_erase(&state->rb_node, &tree->state); in extent_io_tree_release()
124 RB_CLEAR_NODE(&state->rb_node); in extent_io_tree_release()
129 ASSERT(!waitqueue_active(&state->wq)); in extent_io_tree_release()
130 free_extent_state(state); in extent_io_tree_release()
132 cond_resched_lock(&tree->lock); in extent_io_tree_release()
134 spin_unlock(&tree->lock); in extent_io_tree_release()
139 struct extent_state *state; in alloc_extent_state() local
146 state = kmem_cache_alloc(extent_state_cache, mask); in alloc_extent_state()
147 if (!state) in alloc_extent_state()
148 return state; in alloc_extent_state()
149 state->state = 0; in alloc_extent_state()
150 RB_CLEAR_NODE(&state->rb_node); in alloc_extent_state()
151 btrfs_leak_debug_add_state(state); in alloc_extent_state()
152 refcount_set(&state->refs, 1); in alloc_extent_state()
153 init_waitqueue_head(&state->wq); in alloc_extent_state()
154 trace_alloc_extent_state(state, mask, _RET_IP_); in alloc_extent_state()
155 return state; in alloc_extent_state()
166 void free_extent_state(struct extent_state *state) in free_extent_state() argument
168 if (!state) in free_extent_state()
170 if (refcount_dec_and_test(&state->refs)) { in free_extent_state()
171 WARN_ON(extent_state_in_tree(state)); in free_extent_state()
172 btrfs_leak_debug_del_state(state); in free_extent_state()
173 trace_free_extent_state(state, _RET_IP_); in free_extent_state()
174 kmem_cache_free(extent_state_cache, state); in free_extent_state()
178 static int add_extent_changeset(struct extent_state *state, u32 bits, in add_extent_changeset() argument
186 if (set && (state->state & bits) == bits) in add_extent_changeset()
188 if (!set && (state->state & bits) == 0) in add_extent_changeset()
190 changeset->bytes_changed += state->end - state->start + 1; in add_extent_changeset()
191 ret = ulist_add(&changeset->range_changed, state->start, state->end, in add_extent_changeset()
196 static inline struct extent_state *next_state(struct extent_state *state) in next_state() argument
198 struct rb_node *next = rb_next(&state->rb_node); in next_state()
206 static inline struct extent_state *prev_state(struct extent_state *state) in prev_state() argument
208 struct rb_node *next = rb_prev(&state->rb_node); in prev_state()
218 * entry->start <= offset && entry->end >= offset.
238 struct rb_root *root = &tree->state; in tree_search_for_insert()
239 struct rb_node **node = &root->rb_node; in tree_search_for_insert()
247 if (offset < entry->start) in tree_search_for_insert()
248 node = &(*node)->rb_left; in tree_search_for_insert()
249 else if (offset > entry->end) in tree_search_for_insert()
250 node = &(*node)->rb_right; in tree_search_for_insert()
261 while (entry && offset > entry->end) in tree_search_for_insert()
284 struct rb_root *root = &tree->state; in tree_search_prev_next()
285 struct rb_node **node = &root->rb_node; in tree_search_prev_next()
295 if (offset < entry->start) in tree_search_prev_next()
296 node = &(*node)->rb_left; in tree_search_prev_next()
297 else if (offset > entry->end) in tree_search_prev_next()
298 node = &(*node)->rb_right; in tree_search_prev_next()
304 while (entry && offset > entry->end) in tree_search_prev_next()
309 while (entry && offset < entry->start) in tree_search_prev_next()
317 * Inexact rb-tree search, return the next entry if @offset is not found
326 btrfs_panic(tree->fs_info, err, in extent_io_tree_panic()
332 * extents with matching state are merged together into a single extent in the
333 * tree. Extents with EXTENT_IO in their state field are not merged because
339 static void merge_state(struct extent_io_tree *tree, struct extent_state *state) in merge_state() argument
343 if (state->state & (EXTENT_LOCKED | EXTENT_BOUNDARY)) in merge_state()
346 other = prev_state(state); in merge_state()
347 if (other && other->end == state->start - 1 && in merge_state()
348 other->state == state->state) { in merge_state()
349 if (tree->private_data) in merge_state()
350 btrfs_merge_delalloc_extent(tree->private_data, in merge_state()
351 state, other); in merge_state()
352 state->start = other->start; in merge_state()
353 rb_erase(&other->rb_node, &tree->state); in merge_state()
354 RB_CLEAR_NODE(&other->rb_node); in merge_state()
357 other = next_state(state); in merge_state()
358 if (other && other->start == state->end + 1 && in merge_state()
359 other->state == state->state) { in merge_state()
360 if (tree->private_data) in merge_state()
361 btrfs_merge_delalloc_extent(tree->private_data, state, in merge_state()
363 state->end = other->end; in merge_state()
364 rb_erase(&other->rb_node, &tree->state); in merge_state()
365 RB_CLEAR_NODE(&other->rb_node); in merge_state()
371 struct extent_state *state, in set_state_bits() argument
377 if (tree->private_data) in set_state_bits()
378 btrfs_set_delalloc_extent(tree->private_data, state, bits); in set_state_bits()
380 ret = add_extent_changeset(state, bits_to_set, changeset, 1); in set_state_bits()
382 state->state |= bits_to_set; in set_state_bits()
389 * This may return -EEXIST if the extent is already there, in which case the
390 * state struct is freed.
396 struct extent_state *state, in insert_state() argument
401 const u64 end = state->end; in insert_state()
403 set_state_bits(tree, state, bits, changeset); in insert_state()
405 node = &tree->state.rb_node; in insert_state()
412 if (end < entry->start) { in insert_state()
413 node = &(*node)->rb_left; in insert_state()
414 } else if (end > entry->end) { in insert_state()
415 node = &(*node)->rb_right; in insert_state()
417 btrfs_err(tree->fs_info, in insert_state()
419 entry->start, entry->end, state->start, end); in insert_state()
420 return -EEXIST; in insert_state()
424 rb_link_node(&state->rb_node, parent, node); in insert_state()
425 rb_insert_color(&state->rb_node, &tree->state); in insert_state()
427 merge_state(tree, state); in insert_state()
432 * Insert state to @tree to the location given by @node and @parent.
435 struct extent_state *state, struct rb_node **node, in insert_state_fast() argument
439 set_state_bits(tree, state, bits, changeset); in insert_state_fast()
440 rb_link_node(&state->rb_node, parent, node); in insert_state_fast()
441 rb_insert_color(&state->rb_node, &tree->state); in insert_state_fast()
442 merge_state(tree, state); in insert_state_fast()
446 * Split a given extent state struct in two, inserting the preallocated
451 * the tree has 'orig' at [orig->start, orig->end]. After calling, there
452 * are two extent state structs in the tree:
453 * prealloc: [orig->start, split - 1]
454 * orig: [ split, orig->end ]
465 if (tree->private_data) in split_state()
466 btrfs_split_delalloc_extent(tree->private_data, orig, split); in split_state()
468 prealloc->start = orig->start; in split_state()
469 prealloc->end = split - 1; in split_state()
470 prealloc->state = orig->state; in split_state()
471 orig->start = split; in split_state()
473 parent = &orig->rb_node; in split_state()
481 if (prealloc->end < entry->start) { in split_state()
482 node = &(*node)->rb_left; in split_state()
483 } else if (prealloc->end > entry->end) { in split_state()
484 node = &(*node)->rb_right; in split_state()
487 return -EEXIST; in split_state()
491 rb_link_node(&prealloc->rb_node, parent, node); in split_state()
492 rb_insert_color(&prealloc->rb_node, &tree->state); in split_state()
498 * Utility function to clear some bits in an extent state struct. It will
499 * optionally wake up anyone waiting on this state (wake == 1).
501 * If no bits are set on the state struct after clearing things, the
505 struct extent_state *state, in clear_state_bit() argument
513 if (tree->private_data) in clear_state_bit()
514 btrfs_clear_delalloc_extent(tree->private_data, state, bits); in clear_state_bit()
516 ret = add_extent_changeset(state, bits_to_clear, changeset, 0); in clear_state_bit()
518 state->state &= ~bits_to_clear; in clear_state_bit()
520 wake_up(&state->wq); in clear_state_bit()
521 if (state->state == 0) { in clear_state_bit()
522 next = next_state(state); in clear_state_bit()
523 if (extent_state_in_tree(state)) { in clear_state_bit()
524 rb_erase(&state->rb_node, &tree->state); in clear_state_bit()
525 RB_CLEAR_NODE(&state->rb_node); in clear_state_bit()
526 free_extent_state(state); in clear_state_bit()
531 merge_state(tree, state); in clear_state_bit()
532 next = next_state(state); in clear_state_bit()
543 * range from the tree regardless of state (ie for truncate).
553 struct extent_state *state; in __clear_extent_bit() local
563 trace_btrfs_clear_extent_bit(tree, start, end - start + 1, bits); in __clear_extent_bit()
578 * up not needing the pre-allocated extent state at all, which in __clear_extent_bit()
581 * If we end up needing a new extent state we allocate it later. in __clear_extent_bit()
586 spin_lock(&tree->lock); in __clear_extent_bit()
596 cached->start <= start && cached->end > start) { in __clear_extent_bit()
598 refcount_dec(&cached->refs); in __clear_extent_bit()
599 state = cached; in __clear_extent_bit()
607 state = tree_search(tree, start); in __clear_extent_bit()
608 if (!state) in __clear_extent_bit()
611 if (state->start > end) in __clear_extent_bit()
613 WARN_ON(state->end < start); in __clear_extent_bit()
614 last_end = state->end; in __clear_extent_bit()
616 /* The state doesn't have the wanted bits, go ahead. */ in __clear_extent_bit()
617 if (!(state->state & bits)) { in __clear_extent_bit()
618 state = next_state(state); in __clear_extent_bit()
623 * | ---- desired range ---- | in __clear_extent_bit()
624 * | state | or in __clear_extent_bit()
625 * | ------------- state -------------- | in __clear_extent_bit()
637 if (state->start < start) { in __clear_extent_bit()
640 err = split_state(tree, state, prealloc, start); in __clear_extent_bit()
647 if (state->end <= end) { in __clear_extent_bit()
648 state = clear_state_bit(tree, state, bits, wake, changeset); in __clear_extent_bit()
654 * | ---- desired range ---- | in __clear_extent_bit()
655 * | state | in __clear_extent_bit()
658 if (state->start <= end && state->end > end) { in __clear_extent_bit()
661 err = split_state(tree, state, prealloc, end + 1); in __clear_extent_bit()
666 wake_up(&state->wq); in __clear_extent_bit()
674 state = clear_state_bit(tree, state, bits, wake, changeset); in __clear_extent_bit()
676 if (last_end == (u64)-1) in __clear_extent_bit()
679 if (start <= end && state && !need_resched()) in __clear_extent_bit()
685 spin_unlock(&tree->lock); in __clear_extent_bit()
691 spin_unlock(&tree->lock); in __clear_extent_bit()
700 struct extent_state *state) in wait_on_state() argument
701 __releases(tree->lock) in wait_on_state()
702 __acquires(tree->lock) in wait_on_state()
705 prepare_to_wait(&state->wq, &wait, TASK_UNINTERRUPTIBLE); in wait_on_state()
706 spin_unlock(&tree->lock); in wait_on_state()
708 spin_lock(&tree->lock); in wait_on_state()
709 finish_wait(&state->wq, &wait); in wait_on_state()
713 * Wait for one or more bits to clear on a range in the state tree.
719 struct extent_state *state; in wait_extent_bit() local
723 spin_lock(&tree->lock); in wait_extent_bit()
730 state = tree_search(tree, start); in wait_extent_bit()
732 if (!state) in wait_extent_bit()
734 if (state->start > end) in wait_extent_bit()
737 if (state->state & bits) { in wait_extent_bit()
738 start = state->start; in wait_extent_bit()
739 refcount_inc(&state->refs); in wait_extent_bit()
740 wait_on_state(tree, state); in wait_extent_bit()
741 free_extent_state(state); in wait_extent_bit()
744 start = state->end + 1; in wait_extent_bit()
749 if (!cond_resched_lock(&tree->lock)) { in wait_extent_bit()
750 state = next_state(state); in wait_extent_bit()
755 spin_unlock(&tree->lock); in wait_extent_bit()
758 static void cache_state_if_flags(struct extent_state *state, in cache_state_if_flags() argument
763 if (!flags || (state->state & flags)) { in cache_state_if_flags()
764 *cached_ptr = state; in cache_state_if_flags()
765 refcount_inc(&state->refs); in cache_state_if_flags()
770 static void cache_state(struct extent_state *state, in cache_state() argument
773 return cache_state_if_flags(state, cached_ptr, in cache_state()
778 * Find the first state struct with 'bits' set after 'start', and return it.
779 * tree->lock must be held. NULL will returned if nothing was found after
785 struct extent_state *state; in find_first_extent_bit_state() local
791 state = tree_search(tree, start); in find_first_extent_bit_state()
792 while (state) { in find_first_extent_bit_state()
793 if (state->end >= start && (state->state & bits)) in find_first_extent_bit_state()
794 return state; in find_first_extent_bit_state()
795 state = next_state(state); in find_first_extent_bit_state()
812 struct extent_state *state; in find_first_extent_bit() local
815 spin_lock(&tree->lock); in find_first_extent_bit()
817 state = *cached_state; in find_first_extent_bit()
818 if (state->end == start - 1 && extent_state_in_tree(state)) { in find_first_extent_bit()
819 while ((state = next_state(state)) != NULL) { in find_first_extent_bit()
820 if (state->state & bits) in find_first_extent_bit()
831 state = find_first_extent_bit_state(tree, start, bits); in find_first_extent_bit()
833 if (state) { in find_first_extent_bit()
834 cache_state_if_flags(state, cached_state, 0); in find_first_extent_bit()
835 *start_ret = state->start; in find_first_extent_bit()
836 *end_ret = state->end; in find_first_extent_bit()
840 spin_unlock(&tree->lock); in find_first_extent_bit()
855 * will drop the tree->lock, so use this helper if you want to find the actual
857 * then walk down the tree until we find a non-contiguous area. The area
863 struct extent_state *state; in find_contiguous_extent_bit() local
866 spin_lock(&tree->lock); in find_contiguous_extent_bit()
867 state = find_first_extent_bit_state(tree, start, bits); in find_contiguous_extent_bit()
868 if (state) { in find_contiguous_extent_bit()
869 *start_ret = state->start; in find_contiguous_extent_bit()
870 *end_ret = state->end; in find_contiguous_extent_bit()
871 while ((state = next_state(state)) != NULL) { in find_contiguous_extent_bit()
872 if (state->start > (*end_ret + 1)) in find_contiguous_extent_bit()
874 *end_ret = state->end; in find_contiguous_extent_bit()
878 spin_unlock(&tree->lock); in find_contiguous_extent_bit()
892 struct extent_state *state; in btrfs_find_delalloc_range() local
897 spin_lock(&tree->lock); in btrfs_find_delalloc_range()
903 state = tree_search(tree, cur_start); in btrfs_find_delalloc_range()
904 if (!state) { in btrfs_find_delalloc_range()
905 *end = (u64)-1; in btrfs_find_delalloc_range()
909 while (state) { in btrfs_find_delalloc_range()
910 if (found && (state->start != cur_start || in btrfs_find_delalloc_range()
911 (state->state & EXTENT_BOUNDARY))) { in btrfs_find_delalloc_range()
914 if (!(state->state & EXTENT_DELALLOC)) { in btrfs_find_delalloc_range()
916 *end = state->end; in btrfs_find_delalloc_range()
920 *start = state->start; in btrfs_find_delalloc_range()
921 *cached_state = state; in btrfs_find_delalloc_range()
922 refcount_inc(&state->refs); in btrfs_find_delalloc_range()
925 *end = state->end; in btrfs_find_delalloc_range()
926 cur_start = state->end + 1; in btrfs_find_delalloc_range()
927 total_bytes += state->end - state->start + 1; in btrfs_find_delalloc_range()
930 state = next_state(state); in btrfs_find_delalloc_range()
933 spin_unlock(&tree->lock); in btrfs_find_delalloc_range()
941 * If any of the exclusive bits are set, this will fail with -EEXIST if some
952 struct extent_state *state; in __set_extent_bit() local
962 trace_btrfs_set_extent_bit(tree, start, end - start + 1, bits); in __set_extent_bit()
972 * up not needing the pre-allocated extent state at all, which in __set_extent_bit()
975 * If we end up needing a new extent state we allocate it later. in __set_extent_bit()
980 spin_lock(&tree->lock); in __set_extent_bit()
982 state = *cached_state; in __set_extent_bit()
983 if (state->start <= start && state->end > start && in __set_extent_bit()
984 extent_state_in_tree(state)) in __set_extent_bit()
991 state = tree_search_for_insert(tree, start, &p, &parent); in __set_extent_bit()
992 if (!state) { in __set_extent_bit()
995 prealloc->start = start; in __set_extent_bit()
996 prealloc->end = end; in __set_extent_bit()
1003 last_start = state->start; in __set_extent_bit()
1004 last_end = state->end; in __set_extent_bit()
1007 * | ---- desired range ---- | in __set_extent_bit()
1008 * | state | in __set_extent_bit()
1012 if (state->start == start && state->end <= end) { in __set_extent_bit()
1013 if (state->state & exclusive_bits) { in __set_extent_bit()
1014 *failed_start = state->start; in __set_extent_bit()
1015 err = -EEXIST; in __set_extent_bit()
1019 set_state_bits(tree, state, bits, changeset); in __set_extent_bit()
1020 cache_state(state, cached_state); in __set_extent_bit()
1021 merge_state(tree, state); in __set_extent_bit()
1022 if (last_end == (u64)-1) in __set_extent_bit()
1025 state = next_state(state); in __set_extent_bit()
1026 if (start < end && state && state->start == start && in __set_extent_bit()
1033 * | ---- desired range ---- | in __set_extent_bit()
1034 * | state | in __set_extent_bit()
1036 * | ------------- state -------------- | in __set_extent_bit()
1047 if (state->start < start) { in __set_extent_bit()
1048 if (state->state & exclusive_bits) { in __set_extent_bit()
1050 err = -EEXIST; in __set_extent_bit()
1058 if ((state->state & bits) == bits) { in __set_extent_bit()
1059 start = state->end + 1; in __set_extent_bit()
1060 cache_state(state, cached_state); in __set_extent_bit()
1066 err = split_state(tree, state, prealloc, start); in __set_extent_bit()
1073 if (state->end <= end) { in __set_extent_bit()
1074 set_state_bits(tree, state, bits, changeset); in __set_extent_bit()
1075 cache_state(state, cached_state); in __set_extent_bit()
1076 merge_state(tree, state); in __set_extent_bit()
1077 if (last_end == (u64)-1) in __set_extent_bit()
1080 state = next_state(state); in __set_extent_bit()
1081 if (start < end && state && state->start == start && in __set_extent_bit()
1088 * | ---- desired range ---- | in __set_extent_bit()
1089 * | state | or | state | in __set_extent_bit()
1094 if (state->start > start) { in __set_extent_bit()
1099 this_end = last_start - 1; in __set_extent_bit()
1108 prealloc->start = start; in __set_extent_bit()
1109 prealloc->end = this_end; in __set_extent_bit()
1120 * | ---- desired range ---- | in __set_extent_bit()
1121 * | state | in __set_extent_bit()
1125 if (state->start <= end && state->end > end) { in __set_extent_bit()
1126 if (state->state & exclusive_bits) { in __set_extent_bit()
1128 err = -EEXIST; in __set_extent_bit()
1134 err = split_state(tree, state, prealloc, end + 1); in __set_extent_bit()
1148 spin_unlock(&tree->lock); in __set_extent_bit()
1154 spin_unlock(&tree->lock); in __set_extent_bit()
1177 * @cached_state: state that we're going to cache
1191 struct extent_state *state; in convert_extent_bit() local
1201 trace_btrfs_convert_extent_bit(tree, start, end - start + 1, bits, in convert_extent_bit()
1207 * Best effort, don't worry if extent state allocation fails in convert_extent_bit()
1208 * here for the first iteration. We might have a cached state in convert_extent_bit()
1210 * extent state allocations are needed. We'll only know this in convert_extent_bit()
1215 return -ENOMEM; in convert_extent_bit()
1218 spin_lock(&tree->lock); in convert_extent_bit()
1220 state = *cached_state; in convert_extent_bit()
1221 if (state->start <= start && state->end > start && in convert_extent_bit()
1222 extent_state_in_tree(state)) in convert_extent_bit()
1230 state = tree_search_for_insert(tree, start, &p, &parent); in convert_extent_bit()
1231 if (!state) { in convert_extent_bit()
1234 err = -ENOMEM; in convert_extent_bit()
1237 prealloc->start = start; in convert_extent_bit()
1238 prealloc->end = end; in convert_extent_bit()
1245 last_start = state->start; in convert_extent_bit()
1246 last_end = state->end; in convert_extent_bit()
1249 * | ---- desired range ---- | in convert_extent_bit()
1250 * | state | in convert_extent_bit()
1254 if (state->start == start && state->end <= end) { in convert_extent_bit()
1255 set_state_bits(tree, state, bits, NULL); in convert_extent_bit()
1256 cache_state(state, cached_state); in convert_extent_bit()
1257 state = clear_state_bit(tree, state, clear_bits, 0, NULL); in convert_extent_bit()
1258 if (last_end == (u64)-1) in convert_extent_bit()
1261 if (start < end && state && state->start == start && in convert_extent_bit()
1268 * | ---- desired range ---- | in convert_extent_bit()
1269 * | state | in convert_extent_bit()
1271 * | ------------- state -------------- | in convert_extent_bit()
1282 if (state->start < start) { in convert_extent_bit()
1285 err = -ENOMEM; in convert_extent_bit()
1288 err = split_state(tree, state, prealloc, start); in convert_extent_bit()
1294 if (state->end <= end) { in convert_extent_bit()
1295 set_state_bits(tree, state, bits, NULL); in convert_extent_bit()
1296 cache_state(state, cached_state); in convert_extent_bit()
1297 state = clear_state_bit(tree, state, clear_bits, 0, NULL); in convert_extent_bit()
1298 if (last_end == (u64)-1) in convert_extent_bit()
1301 if (start < end && state && state->start == start && in convert_extent_bit()
1308 * | ---- desired range ---- | in convert_extent_bit()
1309 * | state | or | state | in convert_extent_bit()
1314 if (state->start > start) { in convert_extent_bit()
1319 this_end = last_start - 1; in convert_extent_bit()
1323 err = -ENOMEM; in convert_extent_bit()
1331 prealloc->start = start; in convert_extent_bit()
1332 prealloc->end = this_end; in convert_extent_bit()
1342 * | ---- desired range ---- | in convert_extent_bit()
1343 * | state | in convert_extent_bit()
1347 if (state->start <= end && state->end > end) { in convert_extent_bit()
1350 err = -ENOMEM; in convert_extent_bit()
1354 err = split_state(tree, state, prealloc, end + 1); in convert_extent_bit()
1368 spin_unlock(&tree->lock); in convert_extent_bit()
1374 spin_unlock(&tree->lock); in convert_extent_bit()
1392 * set it's possible that @end_ret contains -1, this happens in case the range
1399 struct extent_state *state; in find_first_clear_extent_bit() local
1402 spin_lock(&tree->lock); in find_first_clear_extent_bit()
1406 state = tree_search_prev_next(tree, start, &prev, &next); in find_first_clear_extent_bit()
1407 if (!state && !next && !prev) { in find_first_clear_extent_bit()
1413 *end_ret = -1; in find_first_clear_extent_bit()
1415 } else if (!state && !next) { in find_first_clear_extent_bit()
1420 *start_ret = prev->end + 1; in find_first_clear_extent_bit()
1421 *end_ret = -1; in find_first_clear_extent_bit()
1423 } else if (!state) { in find_first_clear_extent_bit()
1424 state = next; in find_first_clear_extent_bit()
1428 * At this point 'state' either contains 'start' or start is in find_first_clear_extent_bit()
1429 * before 'state' in find_first_clear_extent_bit()
1431 if (in_range(start, state->start, state->end - state->start + 1)) { in find_first_clear_extent_bit()
1432 if (state->state & bits) { in find_first_clear_extent_bit()
1434 * |--range with bits sets--| in find_first_clear_extent_bit()
1438 start = state->end + 1; in find_first_clear_extent_bit()
1445 * |--range with bits cleared----| in find_first_clear_extent_bit()
1449 *start_ret = state->start; in find_first_clear_extent_bit()
1454 * |---prev range---|---hole/unset---|---node range---| in find_first_clear_extent_bit()
1460 * |---hole/unset--||--first node--| in find_first_clear_extent_bit()
1465 *start_ret = prev->end + 1; in find_first_clear_extent_bit()
1476 while (state) { in find_first_clear_extent_bit()
1477 if (state->end >= start && !(state->state & bits)) { in find_first_clear_extent_bit()
1478 *end_ret = state->end; in find_first_clear_extent_bit()
1480 *end_ret = state->start - 1; in find_first_clear_extent_bit()
1483 state = next_state(state); in find_first_clear_extent_bit()
1486 spin_unlock(&tree->lock); in find_first_clear_extent_bit()
1498 struct extent_state *state; in count_range_bits() local
1507 spin_lock(&tree->lock); in count_range_bits()
1513 state = tree_search(tree, cur_start); in count_range_bits()
1514 while (state) { in count_range_bits()
1515 if (state->start > search_end) in count_range_bits()
1517 if (contig && found && state->start > last + 1) in count_range_bits()
1519 if (state->end >= cur_start && (state->state & bits) == bits) { in count_range_bits()
1520 total_bytes += min(search_end, state->end) + 1 - in count_range_bits()
1521 max(cur_start, state->start); in count_range_bits()
1525 *start = max(cur_start, state->start); in count_range_bits()
1528 last = state->end; in count_range_bits()
1532 state = next_state(state); in count_range_bits()
1534 spin_unlock(&tree->lock); in count_range_bits()
1539 * Searche a range in the state tree for a given mask. If 'filled' == 1, this
1546 struct extent_state *state = NULL; in test_range_bit() local
1549 spin_lock(&tree->lock); in test_range_bit()
1550 if (cached && extent_state_in_tree(cached) && cached->start <= start && in test_range_bit()
1551 cached->end > start) in test_range_bit()
1552 state = cached; in test_range_bit()
1554 state = tree_search(tree, start); in test_range_bit()
1555 while (state && start <= end) { in test_range_bit()
1556 if (filled && state->start > start) { in test_range_bit()
1561 if (state->start > end) in test_range_bit()
1564 if (state->state & bits) { in test_range_bit()
1573 if (state->end == (u64)-1) in test_range_bit()
1576 start = state->end + 1; in test_range_bit()
1579 state = next_state(state); in test_range_bit()
1583 if (filled && !state) in test_range_bit()
1585 spin_unlock(&tree->lock); in test_range_bit()
1596 * either fail with -EEXIST or changeset will record the whole in set_record_extent_bits()
1625 if (err == -EEXIST) { in try_lock_extent()
1627 clear_extent_bit(tree, start, failed_start - 1, in try_lock_extent()
1635 * Either insert or lock state struct between start and end use mask to tell
1646 while (err == -EEXIST) { in lock_extent()
1648 clear_extent_bit(tree, start, failed_start - 1, in lock_extent()
1671 return -ENOMEM; in extent_state_init_cachep()