Lines Matching full:state

13 static inline bool extent_state_in_tree(const struct extent_state *state)  in extent_state_in_tree()  argument
15 return !RB_EMPTY_NODE(&state->rb_node); in extent_state_in_tree()
22 static inline void btrfs_leak_debug_add_state(struct extent_state *state) in btrfs_leak_debug_add_state() argument
27 list_add(&state->leak_list, &states); in btrfs_leak_debug_add_state()
31 static inline void btrfs_leak_debug_del_state(struct extent_state *state) in btrfs_leak_debug_del_state() argument
36 list_del(&state->leak_list); in btrfs_leak_debug_del_state()
42 struct extent_state *state; in btrfs_extent_state_leak_debug_check() local
45 state = list_entry(states.next, struct extent_state, leak_list); in btrfs_extent_state_leak_debug_check()
46 pr_err("BTRFS: state leak: start %llu end %llu state %u in tree %d refs %d\n", in btrfs_extent_state_leak_debug_check()
47 state->start, state->end, state->state, in btrfs_extent_state_leak_debug_check()
48 extent_state_in_tree(state), in btrfs_extent_state_leak_debug_check()
49 refcount_read(&state->refs)); in btrfs_extent_state_leak_debug_check()
50 list_del(&state->leak_list); in btrfs_extent_state_leak_debug_check()
51 kmem_cache_free(extent_state_cache, state); in btrfs_extent_state_leak_debug_check()
75 #define btrfs_leak_debug_add_state(state) do {} while (0) argument
76 #define btrfs_leak_debug_del_state(state) do {} while (0) argument
100 tree->state = RB_ROOT; in extent_io_tree_init()
112 * Do a single barrier for the waitqueue_active check here, the state in extent_io_tree_release()
117 while (!RB_EMPTY_ROOT(&tree->state)) { in extent_io_tree_release()
119 struct extent_state *state; in extent_io_tree_release() local
121 node = rb_first(&tree->state); in extent_io_tree_release()
122 state = rb_entry(node, struct extent_state, rb_node); in extent_io_tree_release()
123 rb_erase(&state->rb_node, &tree->state); in extent_io_tree_release()
124 RB_CLEAR_NODE(&state->rb_node); in extent_io_tree_release()
129 ASSERT(!waitqueue_active(&state->wq)); in extent_io_tree_release()
130 free_extent_state(state); in extent_io_tree_release()
139 struct extent_state *state; in alloc_extent_state() local
146 state = kmem_cache_alloc(extent_state_cache, mask); in alloc_extent_state()
147 if (!state) in alloc_extent_state()
148 return state; in alloc_extent_state()
149 state->state = 0; in alloc_extent_state()
150 RB_CLEAR_NODE(&state->rb_node); in alloc_extent_state()
151 btrfs_leak_debug_add_state(state); in alloc_extent_state()
152 refcount_set(&state->refs, 1); in alloc_extent_state()
153 init_waitqueue_head(&state->wq); in alloc_extent_state()
154 trace_alloc_extent_state(state, mask, _RET_IP_); in alloc_extent_state()
155 return state; in alloc_extent_state()
166 void free_extent_state(struct extent_state *state) in free_extent_state() argument
168 if (!state) in free_extent_state()
170 if (refcount_dec_and_test(&state->refs)) { in free_extent_state()
171 WARN_ON(extent_state_in_tree(state)); in free_extent_state()
172 btrfs_leak_debug_del_state(state); in free_extent_state()
173 trace_free_extent_state(state, _RET_IP_); in free_extent_state()
174 kmem_cache_free(extent_state_cache, state); in free_extent_state()
178 static int add_extent_changeset(struct extent_state *state, u32 bits, in add_extent_changeset() argument
186 if (set && (state->state & bits) == bits) in add_extent_changeset()
188 if (!set && (state->state & bits) == 0) in add_extent_changeset()
190 changeset->bytes_changed += state->end - state->start + 1; in add_extent_changeset()
191 ret = ulist_add(&changeset->range_changed, state->start, state->end, in add_extent_changeset()
196 static inline struct extent_state *next_state(struct extent_state *state) in next_state() argument
198 struct rb_node *next = rb_next(&state->rb_node); in next_state()
206 static inline struct extent_state *prev_state(struct extent_state *state) in prev_state() argument
208 struct rb_node *next = rb_prev(&state->rb_node); in prev_state()
238 struct rb_root *root = &tree->state; in tree_search_for_insert()
284 struct rb_root *root = &tree->state; in tree_search_prev_next()
332 * extents with matching state are merged together into a single extent in the
333 * tree. Extents with EXTENT_IO in their state field are not merged because
339 static void merge_state(struct extent_io_tree *tree, struct extent_state *state) in merge_state() argument
343 if (state->state & (EXTENT_LOCKED | EXTENT_BOUNDARY)) in merge_state()
346 other = prev_state(state); in merge_state()
347 if (other && other->end == state->start - 1 && in merge_state()
348 other->state == state->state) { in merge_state()
350 btrfs_merge_delalloc_extent(tree->inode, state, other); in merge_state()
351 state->start = other->start; in merge_state()
352 rb_erase(&other->rb_node, &tree->state); in merge_state()
356 other = next_state(state); in merge_state()
357 if (other && other->start == state->end + 1 && in merge_state()
358 other->state == state->state) { in merge_state()
360 btrfs_merge_delalloc_extent(tree->inode, state, other); in merge_state()
361 state->end = other->end; in merge_state()
362 rb_erase(&other->rb_node, &tree->state); in merge_state()
369 struct extent_state *state, in set_state_bits() argument
376 btrfs_set_delalloc_extent(tree->inode, state, bits); in set_state_bits()
378 ret = add_extent_changeset(state, bits_to_set, changeset, 1); in set_state_bits()
380 state->state |= bits_to_set; in set_state_bits()
388 * state struct is freed.
394 struct extent_state *state, in insert_state() argument
399 const u64 end = state->end; in insert_state()
401 set_state_bits(tree, state, bits, changeset); in insert_state()
403 node = &tree->state.rb_node; in insert_state()
417 entry->start, entry->end, state->start, end); in insert_state()
422 rb_link_node(&state->rb_node, parent, node); in insert_state()
423 rb_insert_color(&state->rb_node, &tree->state); in insert_state()
425 merge_state(tree, state); in insert_state()
430 * Insert state to @tree to the location given by @node and @parent.
433 struct extent_state *state, struct rb_node **node, in insert_state_fast() argument
437 set_state_bits(tree, state, bits, changeset); in insert_state_fast()
438 rb_link_node(&state->rb_node, parent, node); in insert_state_fast()
439 rb_insert_color(&state->rb_node, &tree->state); in insert_state_fast()
440 merge_state(tree, state); in insert_state_fast()
444 * Split a given extent state struct in two, inserting the preallocated
450 * are two extent state structs in the tree:
468 prealloc->state = orig->state; in split_state()
490 rb_insert_color(&prealloc->rb_node, &tree->state); in split_state()
496 * Utility function to clear some bits in an extent state struct. It will
497 * optionally wake up anyone waiting on this state (wake == 1).
499 * If no bits are set on the state struct after clearing things, the
503 struct extent_state *state, in clear_state_bit() argument
512 btrfs_clear_delalloc_extent(tree->inode, state, bits); in clear_state_bit()
514 ret = add_extent_changeset(state, bits_to_clear, changeset, 0); in clear_state_bit()
516 state->state &= ~bits_to_clear; in clear_state_bit()
518 wake_up(&state->wq); in clear_state_bit()
519 if (state->state == 0) { in clear_state_bit()
520 next = next_state(state); in clear_state_bit()
521 if (extent_state_in_tree(state)) { in clear_state_bit()
522 rb_erase(&state->rb_node, &tree->state); in clear_state_bit()
523 RB_CLEAR_NODE(&state->rb_node); in clear_state_bit()
524 free_extent_state(state); in clear_state_bit()
529 merge_state(tree, state); in clear_state_bit()
530 next = next_state(state); in clear_state_bit()
551 * range from the tree regardless of state (ie for truncate).
561 struct extent_state *state; in __clear_extent_bit() local
588 * up not needing the pre-allocated extent state at all, which in __clear_extent_bit()
591 * If we end up needing a new extent state we allocate it later. in __clear_extent_bit()
609 state = cached; in __clear_extent_bit()
617 state = tree_search(tree, start); in __clear_extent_bit()
618 if (!state) in __clear_extent_bit()
621 if (state->start > end) in __clear_extent_bit()
623 WARN_ON(state->end < start); in __clear_extent_bit()
624 last_end = state->end; in __clear_extent_bit()
626 /* The state doesn't have the wanted bits, go ahead. */ in __clear_extent_bit()
627 if (!(state->state & bits)) { in __clear_extent_bit()
628 state = next_state(state); in __clear_extent_bit()
634 * | state | or in __clear_extent_bit()
635 * | ------------- state -------------- | in __clear_extent_bit()
647 if (state->start < start) { in __clear_extent_bit()
651 err = split_state(tree, state, prealloc, start); in __clear_extent_bit()
658 if (state->end <= end) { in __clear_extent_bit()
659 state = clear_state_bit(tree, state, bits, wake, changeset); in __clear_extent_bit()
666 * | state | in __clear_extent_bit()
669 if (state->start <= end && state->end > end) { in __clear_extent_bit()
673 err = split_state(tree, state, prealloc, end + 1); in __clear_extent_bit()
678 wake_up(&state->wq); in __clear_extent_bit()
686 state = clear_state_bit(tree, state, bits, wake, changeset); in __clear_extent_bit()
691 if (start <= end && state && !need_resched()) in __clear_extent_bit()
712 struct extent_state *state) in wait_on_state() argument
717 prepare_to_wait(&state->wq, &wait, TASK_UNINTERRUPTIBLE); in wait_on_state()
721 finish_wait(&state->wq, &wait); in wait_on_state()
725 * Wait for one or more bits to clear on a range in the state tree.
732 struct extent_state *state; in wait_extent_bit() local
740 * are more bits than the bits we're waiting on set on this state. in wait_extent_bit()
743 state = *cached_state; in wait_extent_bit()
744 if (extent_state_in_tree(state) && in wait_extent_bit()
745 state->start <= start && start < state->end) in wait_extent_bit()
753 state = tree_search(tree, start); in wait_extent_bit()
755 if (!state) in wait_extent_bit()
757 if (state->start > end) in wait_extent_bit()
760 if (state->state & bits) { in wait_extent_bit()
761 start = state->start; in wait_extent_bit()
762 refcount_inc(&state->refs); in wait_extent_bit()
763 wait_on_state(tree, state); in wait_extent_bit()
764 free_extent_state(state); in wait_extent_bit()
767 start = state->end + 1; in wait_extent_bit()
773 state = next_state(state); in wait_extent_bit()
778 /* This state is no longer useful, clear it and free it up. */ in wait_extent_bit()
780 state = *cached_state; in wait_extent_bit()
782 free_extent_state(state); in wait_extent_bit()
787 static void cache_state_if_flags(struct extent_state *state, in cache_state_if_flags() argument
792 if (!flags || (state->state & flags)) { in cache_state_if_flags()
793 *cached_ptr = state; in cache_state_if_flags()
794 refcount_inc(&state->refs); in cache_state_if_flags()
799 static void cache_state(struct extent_state *state, in cache_state() argument
802 return cache_state_if_flags(state, cached_ptr, in cache_state()
807 * Find the first state struct with 'bits' set after 'start', and return it.
814 struct extent_state *state; in find_first_extent_bit_state() local
820 state = tree_search(tree, start); in find_first_extent_bit_state()
821 while (state) { in find_first_extent_bit_state()
822 if (state->end >= start && (state->state & bits)) in find_first_extent_bit_state()
823 return state; in find_first_extent_bit_state()
824 state = next_state(state); in find_first_extent_bit_state()
841 struct extent_state *state; in find_first_extent_bit() local
846 state = *cached_state; in find_first_extent_bit()
847 if (state->end == start - 1 && extent_state_in_tree(state)) { in find_first_extent_bit()
848 while ((state = next_state(state)) != NULL) { in find_first_extent_bit()
849 if (state->state & bits) in find_first_extent_bit()
860 state = find_first_extent_bit_state(tree, start, bits); in find_first_extent_bit()
862 if (state) { in find_first_extent_bit()
863 cache_state_if_flags(state, cached_state, 0); in find_first_extent_bit()
864 *start_ret = state->start; in find_first_extent_bit()
865 *end_ret = state->end; in find_first_extent_bit()
892 struct extent_state *state; in find_contiguous_extent_bit() local
896 state = find_first_extent_bit_state(tree, start, bits); in find_contiguous_extent_bit()
897 if (state) { in find_contiguous_extent_bit()
898 *start_ret = state->start; in find_contiguous_extent_bit()
899 *end_ret = state->end; in find_contiguous_extent_bit()
900 while ((state = next_state(state)) != NULL) { in find_contiguous_extent_bit()
901 if (state->start > (*end_ret + 1)) in find_contiguous_extent_bit()
903 *end_ret = state->end; in find_contiguous_extent_bit()
921 struct extent_state *state; in btrfs_find_delalloc_range() local
932 state = tree_search(tree, cur_start); in btrfs_find_delalloc_range()
933 if (!state) { in btrfs_find_delalloc_range()
938 while (state) { in btrfs_find_delalloc_range()
939 if (found && (state->start != cur_start || in btrfs_find_delalloc_range()
940 (state->state & EXTENT_BOUNDARY))) { in btrfs_find_delalloc_range()
943 if (!(state->state & EXTENT_DELALLOC)) { in btrfs_find_delalloc_range()
945 *end = state->end; in btrfs_find_delalloc_range()
949 *start = state->start; in btrfs_find_delalloc_range()
950 *cached_state = state; in btrfs_find_delalloc_range()
951 refcount_inc(&state->refs); in btrfs_find_delalloc_range()
954 *end = state->end; in btrfs_find_delalloc_range()
955 cur_start = state->end + 1; in btrfs_find_delalloc_range()
956 total_bytes += state->end - state->start + 1; in btrfs_find_delalloc_range()
959 state = next_state(state); in btrfs_find_delalloc_range()
986 struct extent_state *state; in __set_extent_bit() local
1008 * up not needing the pre-allocated extent state at all, which in __set_extent_bit()
1011 * If we end up needing a new extent state we allocate it later. in __set_extent_bit()
1018 state = *cached_state; in __set_extent_bit()
1019 if (state->start <= start && state->end > start && in __set_extent_bit()
1020 extent_state_in_tree(state)) in __set_extent_bit()
1027 state = tree_search_for_insert(tree, start, &p, &parent); in __set_extent_bit()
1028 if (!state) { in __set_extent_bit()
1040 last_start = state->start; in __set_extent_bit()
1041 last_end = state->end; in __set_extent_bit()
1045 * | state | in __set_extent_bit()
1049 if (state->start == start && state->end <= end) { in __set_extent_bit()
1050 if (state->state & exclusive_bits) { in __set_extent_bit()
1051 *failed_start = state->start; in __set_extent_bit()
1052 cache_state(state, failed_state); in __set_extent_bit()
1057 set_state_bits(tree, state, bits, changeset); in __set_extent_bit()
1058 cache_state(state, cached_state); in __set_extent_bit()
1059 merge_state(tree, state); in __set_extent_bit()
1063 state = next_state(state); in __set_extent_bit()
1064 if (start < end && state && state->start == start && in __set_extent_bit()
1072 * | state | in __set_extent_bit()
1074 * | ------------- state -------------- | in __set_extent_bit()
1085 if (state->start < start) { in __set_extent_bit()
1086 if (state->state & exclusive_bits) { in __set_extent_bit()
1088 cache_state(state, failed_state); in __set_extent_bit()
1097 if ((state->state & bits) == bits) { in __set_extent_bit()
1098 start = state->end + 1; in __set_extent_bit()
1099 cache_state(state, cached_state); in __set_extent_bit()
1106 err = split_state(tree, state, prealloc, start); in __set_extent_bit()
1113 if (state->end <= end) { in __set_extent_bit()
1114 set_state_bits(tree, state, bits, changeset); in __set_extent_bit()
1115 cache_state(state, cached_state); in __set_extent_bit()
1116 merge_state(tree, state); in __set_extent_bit()
1120 state = next_state(state); in __set_extent_bit()
1121 if (start < end && state && state->start == start && in __set_extent_bit()
1129 * | state | or | state | in __set_extent_bit()
1134 if (state->start > start) { in __set_extent_bit()
1162 * | state | in __set_extent_bit()
1166 if (state->start <= end && state->end > end) { in __set_extent_bit()
1167 if (state->state & exclusive_bits) { in __set_extent_bit()
1169 cache_state(state, failed_state); in __set_extent_bit()
1177 err = split_state(tree, state, prealloc, end + 1); in __set_extent_bit()
1220 * @cached_state: state that we're going to cache
1234 struct extent_state *state; in convert_extent_bit() local
1250 * Best effort, don't worry if extent state allocation fails in convert_extent_bit()
1251 * here for the first iteration. We might have a cached state in convert_extent_bit()
1253 * extent state allocations are needed. We'll only know this in convert_extent_bit()
1263 state = *cached_state; in convert_extent_bit()
1264 if (state->start <= start && state->end > start && in convert_extent_bit()
1265 extent_state_in_tree(state)) in convert_extent_bit()
1273 state = tree_search_for_insert(tree, start, &p, &parent); in convert_extent_bit()
1274 if (!state) { in convert_extent_bit()
1288 last_start = state->start; in convert_extent_bit()
1289 last_end = state->end; in convert_extent_bit()
1293 * | state | in convert_extent_bit()
1297 if (state->start == start && state->end <= end) { in convert_extent_bit()
1298 set_state_bits(tree, state, bits, NULL); in convert_extent_bit()
1299 cache_state(state, cached_state); in convert_extent_bit()
1300 state = clear_state_bit(tree, state, clear_bits, 0, NULL); in convert_extent_bit()
1304 if (start < end && state && state->start == start && in convert_extent_bit()
1312 * | state | in convert_extent_bit()
1314 * | ------------- state -------------- | in convert_extent_bit()
1325 if (state->start < start) { in convert_extent_bit()
1331 err = split_state(tree, state, prealloc, start); in convert_extent_bit()
1337 if (state->end <= end) { in convert_extent_bit()
1338 set_state_bits(tree, state, bits, NULL); in convert_extent_bit()
1339 cache_state(state, cached_state); in convert_extent_bit()
1340 state = clear_state_bit(tree, state, clear_bits, 0, NULL); in convert_extent_bit()
1344 if (start < end && state && state->start == start && in convert_extent_bit()
1352 * | state | or | state | in convert_extent_bit()
1357 if (state->start > start) { in convert_extent_bit()
1386 * | state | in convert_extent_bit()
1390 if (state->start <= end && state->end > end) { in convert_extent_bit()
1397 err = split_state(tree, state, prealloc, end + 1); in convert_extent_bit()
1442 struct extent_state *state; in find_first_clear_extent_bit() local
1449 state = tree_search_prev_next(tree, start, &prev, &next); in find_first_clear_extent_bit()
1450 if (!state && !next && !prev) { in find_first_clear_extent_bit()
1458 } else if (!state && !next) { in find_first_clear_extent_bit()
1466 } else if (!state) { in find_first_clear_extent_bit()
1467 state = next; in find_first_clear_extent_bit()
1471 * At this point 'state' either contains 'start' or start is in find_first_clear_extent_bit()
1472 * before 'state' in find_first_clear_extent_bit()
1474 if (in_range(start, state->start, state->end - state->start + 1)) { in find_first_clear_extent_bit()
1475 if (state->state & bits) { in find_first_clear_extent_bit()
1481 start = state->end + 1; in find_first_clear_extent_bit()
1492 *start_ret = state->start; in find_first_clear_extent_bit()
1519 while (state) { in find_first_clear_extent_bit()
1520 if (state->end >= start && !(state->state & bits)) { in find_first_clear_extent_bit()
1521 *end_ret = state->end; in find_first_clear_extent_bit()
1523 *end_ret = state->start - 1; in find_first_clear_extent_bit()
1526 state = next_state(state); in find_first_clear_extent_bit()
1548 * @cached_state: A cached state to be used across multiple calls to this
1562 struct extent_state *state = NULL; in count_range_bits() local
1583 state = cached; in count_range_bits()
1588 * The cached state starts after our search range's start. Check in count_range_bits()
1589 * if the previous state record starts at or before the range we in count_range_bits()
1592 * no previous state record, we can start from our cached state. in count_range_bits()
1596 state = cached; in count_range_bits()
1598 state = prev; in count_range_bits()
1606 if (!state) in count_range_bits()
1607 state = tree_search(tree, cur_start); in count_range_bits()
1609 while (state) { in count_range_bits()
1610 if (state->start > search_end) in count_range_bits()
1612 if (contig && found && state->start > last + 1) in count_range_bits()
1614 if (state->end >= cur_start && (state->state & bits) == bits) { in count_range_bits()
1615 total_bytes += min(search_end, state->end) + 1 - in count_range_bits()
1616 max(cur_start, state->start); in count_range_bits()
1620 *start = max(cur_start, state->start); in count_range_bits()
1623 last = state->end; in count_range_bits()
1627 state = next_state(state); in count_range_bits()
1632 *cached_state = state; in count_range_bits()
1633 if (state) in count_range_bits()
1634 refcount_inc(&state->refs); in count_range_bits()
1643 * Search a range in the state tree for a given mask. If 'filled' == 1, this
1650 struct extent_state *state = NULL; in test_range_bit() local
1656 state = cached; in test_range_bit()
1658 state = tree_search(tree, start); in test_range_bit()
1659 while (state && start <= end) { in test_range_bit()
1660 if (filled && state->start > start) { in test_range_bit()
1665 if (state->start > end) in test_range_bit()
1668 if (state->state & bits) { in test_range_bit()
1677 if (state->end == (u64)-1) in test_range_bit()
1680 start = state->end + 1; in test_range_bit()
1683 state = next_state(state); in test_range_bit()
1687 if (filled && !state) in test_range_bit()
1738 * Either insert or lock state struct between start and end use mask to tell