Lines Matching refs:state
28 static inline bool extent_state_in_tree(const struct extent_state *state) in extent_state_in_tree() argument
30 return !RB_EMPTY_NODE(&state->rb_node); in extent_state_in_tree()
62 struct extent_state *state; in btrfs_leak_debug_check() local
66 state = list_entry(states.next, struct extent_state, leak_list); in btrfs_leak_debug_check()
68 state->start, state->end, state->state, in btrfs_leak_debug_check()
69 extent_state_in_tree(state), in btrfs_leak_debug_check()
70 atomic_read(&state->refs)); in btrfs_leak_debug_check()
71 list_del(&state->leak_list); in btrfs_leak_debug_check()
72 kmem_cache_free(extent_state_cache, state); in btrfs_leak_debug_check()
134 static void add_extent_changeset(struct extent_state *state, unsigned bits, in add_extent_changeset() argument
142 if (set && (state->state & bits) == bits) in add_extent_changeset()
144 if (!set && (state->state & bits) == 0) in add_extent_changeset()
146 changeset->bytes_changed += state->end - state->start + 1; in add_extent_changeset()
147 ret = ulist_add(changeset->range_changed, state->start, state->end, in add_extent_changeset()
220 tree->state = RB_ROOT; in extent_io_tree_init()
229 struct extent_state *state; in alloc_extent_state() local
231 state = kmem_cache_alloc(extent_state_cache, mask); in alloc_extent_state()
232 if (!state) in alloc_extent_state()
233 return state; in alloc_extent_state()
234 state->state = 0; in alloc_extent_state()
235 state->private = 0; in alloc_extent_state()
236 RB_CLEAR_NODE(&state->rb_node); in alloc_extent_state()
237 btrfs_leak_debug_add(&state->leak_list, &states); in alloc_extent_state()
238 atomic_set(&state->refs, 1); in alloc_extent_state()
239 init_waitqueue_head(&state->wq); in alloc_extent_state()
240 trace_alloc_extent_state(state, mask, _RET_IP_); in alloc_extent_state()
241 return state; in alloc_extent_state()
244 void free_extent_state(struct extent_state *state) in free_extent_state() argument
246 if (!state) in free_extent_state()
248 if (atomic_dec_and_test(&state->refs)) { in free_extent_state()
249 WARN_ON(extent_state_in_tree(state)); in free_extent_state()
250 btrfs_leak_debug_del(&state->leak_list); in free_extent_state()
251 trace_free_extent_state(state, _RET_IP_); in free_extent_state()
252 kmem_cache_free(extent_state_cache, state); in free_extent_state()
298 struct rb_root *root = &tree->state; in __etree_search()
383 struct extent_state *state) in merge_state() argument
388 if (state->state & (EXTENT_IOBITS | EXTENT_BOUNDARY)) in merge_state()
391 other_node = rb_prev(&state->rb_node); in merge_state()
394 if (other->end == state->start - 1 && in merge_state()
395 other->state == state->state) { in merge_state()
396 merge_cb(tree, state, other); in merge_state()
397 state->start = other->start; in merge_state()
398 rb_erase(&other->rb_node, &tree->state); in merge_state()
403 other_node = rb_next(&state->rb_node); in merge_state()
406 if (other->start == state->end + 1 && in merge_state()
407 other->state == state->state) { in merge_state()
408 merge_cb(tree, state, other); in merge_state()
409 state->end = other->end; in merge_state()
410 rb_erase(&other->rb_node, &tree->state); in merge_state()
418 struct extent_state *state, unsigned *bits) in set_state_cb() argument
421 tree->ops->set_bit_hook(tree->mapping->host, state, bits); in set_state_cb()
425 struct extent_state *state, unsigned *bits) in clear_state_cb() argument
428 tree->ops->clear_bit_hook(tree->mapping->host, state, bits); in clear_state_cb()
432 struct extent_state *state, unsigned *bits,
446 struct extent_state *state, u64 start, u64 end, in insert_state() argument
456 state->start = start; in insert_state()
457 state->end = end; in insert_state()
459 set_state_bits(tree, state, bits, changeset); in insert_state()
461 node = tree_insert(&tree->state, NULL, end, &state->rb_node, p, parent); in insert_state()
470 merge_state(tree, state); in insert_state()
504 prealloc->state = orig->state; in split_state()
507 node = tree_insert(&tree->state, &orig->rb_node, prealloc->end, in split_state()
516 static struct extent_state *next_state(struct extent_state *state) in next_state() argument
518 struct rb_node *next = rb_next(&state->rb_node); in next_state()
533 struct extent_state *state, in clear_state_bit() argument
540 if ((bits_to_clear & EXTENT_DIRTY) && (state->state & EXTENT_DIRTY)) { in clear_state_bit()
541 u64 range = state->end - state->start + 1; in clear_state_bit()
545 clear_state_cb(tree, state, bits); in clear_state_bit()
546 add_extent_changeset(state, bits_to_clear, changeset, 0); in clear_state_bit()
547 state->state &= ~bits_to_clear; in clear_state_bit()
549 wake_up(&state->wq); in clear_state_bit()
550 if (state->state == 0) { in clear_state_bit()
551 next = next_state(state); in clear_state_bit()
552 if (extent_state_in_tree(state)) { in clear_state_bit()
553 rb_erase(&state->rb_node, &tree->state); in clear_state_bit()
554 RB_CLEAR_NODE(&state->rb_node); in clear_state_bit()
555 free_extent_state(state); in clear_state_bit()
560 merge_state(tree, state); in clear_state_bit()
561 next = next_state(state); in clear_state_bit()
599 struct extent_state *state; in __clear_extent_bit() local
643 state = cached; in __clear_extent_bit()
656 state = rb_entry(node, struct extent_state, rb_node); in __clear_extent_bit()
658 if (state->start > end) in __clear_extent_bit()
660 WARN_ON(state->end < start); in __clear_extent_bit()
661 last_end = state->end; in __clear_extent_bit()
664 if (!(state->state & bits)) { in __clear_extent_bit()
665 state = next_state(state); in __clear_extent_bit()
685 if (state->start < start) { in __clear_extent_bit()
688 err = split_state(tree, state, prealloc, start); in __clear_extent_bit()
695 if (state->end <= end) { in __clear_extent_bit()
696 state = clear_state_bit(tree, state, &bits, wake, in __clear_extent_bit()
708 if (state->start <= end && state->end > end) { in __clear_extent_bit()
711 err = split_state(tree, state, prealloc, end + 1); in __clear_extent_bit()
716 wake_up(&state->wq); in __clear_extent_bit()
724 state = clear_state_bit(tree, state, &bits, wake, changeset); in __clear_extent_bit()
729 if (start <= end && state && !need_resched()) in __clear_extent_bit()
750 struct extent_state *state) in wait_on_state() argument
755 prepare_to_wait(&state->wq, &wait, TASK_UNINTERRUPTIBLE); in wait_on_state()
759 finish_wait(&state->wq, &wait); in wait_on_state()
770 struct extent_state *state; in wait_extent_bit() local
787 state = rb_entry(node, struct extent_state, rb_node); in wait_extent_bit()
789 if (state->start > end) in wait_extent_bit()
792 if (state->state & bits) { in wait_extent_bit()
793 start = state->start; in wait_extent_bit()
794 atomic_inc(&state->refs); in wait_extent_bit()
795 wait_on_state(tree, state); in wait_extent_bit()
796 free_extent_state(state); in wait_extent_bit()
799 start = state->end + 1; in wait_extent_bit()
814 struct extent_state *state, in set_state_bits() argument
819 set_state_cb(tree, state, bits); in set_state_bits()
820 if ((bits_to_set & EXTENT_DIRTY) && !(state->state & EXTENT_DIRTY)) { in set_state_bits()
821 u64 range = state->end - state->start + 1; in set_state_bits()
824 add_extent_changeset(state, bits_to_set, changeset, 1); in set_state_bits()
825 state->state |= bits_to_set; in set_state_bits()
828 static void cache_state_if_flags(struct extent_state *state, in cache_state_if_flags() argument
833 if (!flags || (state->state & flags)) { in cache_state_if_flags()
834 *cached_ptr = state; in cache_state_if_flags()
835 atomic_inc(&state->refs); in cache_state_if_flags()
840 static void cache_state(struct extent_state *state, in cache_state() argument
843 return cache_state_if_flags(state, cached_ptr, in cache_state()
864 struct extent_state *state; in __set_extent_bit() local
884 state = *cached_state; in __set_extent_bit()
885 if (state->start <= start && state->end > start && in __set_extent_bit()
886 extent_state_in_tree(state)) { in __set_extent_bit()
887 node = &state->rb_node; in __set_extent_bit()
908 state = rb_entry(node, struct extent_state, rb_node); in __set_extent_bit()
910 last_start = state->start; in __set_extent_bit()
911 last_end = state->end; in __set_extent_bit()
919 if (state->start == start && state->end <= end) { in __set_extent_bit()
920 if (state->state & exclusive_bits) { in __set_extent_bit()
921 *failed_start = state->start; in __set_extent_bit()
926 set_state_bits(tree, state, &bits, changeset); in __set_extent_bit()
927 cache_state(state, cached_state); in __set_extent_bit()
928 merge_state(tree, state); in __set_extent_bit()
932 state = next_state(state); in __set_extent_bit()
933 if (start < end && state && state->start == start && in __set_extent_bit()
955 if (state->start < start) { in __set_extent_bit()
956 if (state->state & exclusive_bits) { in __set_extent_bit()
964 err = split_state(tree, state, prealloc, start); in __set_extent_bit()
971 if (state->end <= end) { in __set_extent_bit()
972 set_state_bits(tree, state, &bits, changeset); in __set_extent_bit()
973 cache_state(state, cached_state); in __set_extent_bit()
974 merge_state(tree, state); in __set_extent_bit()
978 state = next_state(state); in __set_extent_bit()
979 if (start < end && state && state->start == start && in __set_extent_bit()
992 if (state->start > start) { in __set_extent_bit()
1022 if (state->start <= end && state->end > end) { in __set_extent_bit()
1023 if (state->state & exclusive_bits) { in __set_extent_bit()
1031 err = split_state(tree, state, prealloc, end + 1); in __set_extent_bit()
1090 struct extent_state *state; in convert_extent_bit() local
1118 state = *cached_state; in convert_extent_bit()
1119 if (state->start <= start && state->end > start && in convert_extent_bit()
1120 extent_state_in_tree(state)) { in convert_extent_bit()
1121 node = &state->rb_node; in convert_extent_bit()
1145 state = rb_entry(node, struct extent_state, rb_node); in convert_extent_bit()
1147 last_start = state->start; in convert_extent_bit()
1148 last_end = state->end; in convert_extent_bit()
1156 if (state->start == start && state->end <= end) { in convert_extent_bit()
1157 set_state_bits(tree, state, &bits, NULL); in convert_extent_bit()
1158 cache_state(state, cached_state); in convert_extent_bit()
1159 state = clear_state_bit(tree, state, &clear_bits, 0, NULL); in convert_extent_bit()
1163 if (start < end && state && state->start == start && in convert_extent_bit()
1185 if (state->start < start) { in convert_extent_bit()
1191 err = split_state(tree, state, prealloc, start); in convert_extent_bit()
1197 if (state->end <= end) { in convert_extent_bit()
1198 set_state_bits(tree, state, &bits, NULL); in convert_extent_bit()
1199 cache_state(state, cached_state); in convert_extent_bit()
1200 state = clear_state_bit(tree, state, &clear_bits, 0, in convert_extent_bit()
1205 if (start < end && state && state->start == start && in convert_extent_bit()
1218 if (state->start > start) { in convert_extent_bit()
1250 if (state->start <= end && state->end > end) { in convert_extent_bit()
1257 err = split_state(tree, state, prealloc, end + 1); in convert_extent_bit()
1515 struct extent_state *state; in find_first_extent_bit_state() local
1526 state = rb_entry(node, struct extent_state, rb_node); in find_first_extent_bit_state()
1527 if (state->end >= start && (state->state & bits)) in find_first_extent_bit_state()
1528 return state; in find_first_extent_bit_state()
1549 struct extent_state *state; in find_first_extent_bit() local
1555 state = *cached_state; in find_first_extent_bit()
1556 if (state->end == start - 1 && extent_state_in_tree(state)) { in find_first_extent_bit()
1557 n = rb_next(&state->rb_node); in find_first_extent_bit()
1559 state = rb_entry(n, struct extent_state, in find_first_extent_bit()
1561 if (state->state & bits) in find_first_extent_bit()
1573 state = find_first_extent_bit_state(tree, start, bits); in find_first_extent_bit()
1575 if (state) { in find_first_extent_bit()
1576 cache_state_if_flags(state, cached_state, 0); in find_first_extent_bit()
1577 *start_ret = state->start; in find_first_extent_bit()
1578 *end_ret = state->end; in find_first_extent_bit()
1597 struct extent_state *state; in find_delalloc_range() local
1616 state = rb_entry(node, struct extent_state, rb_node); in find_delalloc_range()
1617 if (found && (state->start != cur_start || in find_delalloc_range()
1618 (state->state & EXTENT_BOUNDARY))) { in find_delalloc_range()
1621 if (!(state->state & EXTENT_DELALLOC)) { in find_delalloc_range()
1623 *end = state->end; in find_delalloc_range()
1627 *start = state->start; in find_delalloc_range()
1628 *cached_state = state; in find_delalloc_range()
1629 atomic_inc(&state->refs); in find_delalloc_range()
1632 *end = state->end; in find_delalloc_range()
1633 cur_start = state->end + 1; in find_delalloc_range()
1635 total_bytes += state->end - state->start + 1; in find_delalloc_range()
1885 struct extent_state *state; in count_range_bits() local
1908 state = rb_entry(node, struct extent_state, rb_node); in count_range_bits()
1909 if (state->start > search_end) in count_range_bits()
1911 if (contig && found && state->start > last + 1) in count_range_bits()
1913 if (state->end >= cur_start && (state->state & bits) == bits) { in count_range_bits()
1914 total_bytes += min(search_end, state->end) + 1 - in count_range_bits()
1915 max(cur_start, state->start); in count_range_bits()
1919 *start = max(cur_start, state->start); in count_range_bits()
1922 last = state->end; in count_range_bits()
1942 struct extent_state *state; in set_state_private() local
1955 state = rb_entry(node, struct extent_state, rb_node); in set_state_private()
1956 if (state->start != start) { in set_state_private()
1960 state->private = private; in set_state_private()
1969 struct extent_state *state; in get_state_private() local
1982 state = rb_entry(node, struct extent_state, rb_node); in get_state_private()
1983 if (state->start != start) { in get_state_private()
1987 *private = state->private; in get_state_private()
2002 struct extent_state *state = NULL; in test_range_bit() local
2013 state = rb_entry(node, struct extent_state, rb_node); in test_range_bit()
2015 if (filled && state->start > start) { in test_range_bit()
2020 if (state->start > end) in test_range_bit()
2023 if (state->state & bits) { in test_range_bit()
2032 if (state->end == (u64)-1) in test_range_bit()
2035 start = state->end + 1; in test_range_bit()
2187 struct extent_state *state; in clean_io_failure() local
2215 state = find_first_extent_bit_state(&BTRFS_I(inode)->io_tree, in clean_io_failure()
2220 if (state && state->start <= failrec->start && in clean_io_failure()
2221 state->end >= failrec->start + failrec->len - 1) { in clean_io_failure()
2247 struct extent_state *state, *next; in btrfs_free_io_failure_record() local
2249 if (RB_EMPTY_ROOT(&failure_tree->state)) in btrfs_free_io_failure_record()
2253 state = find_first_extent_bit_state(failure_tree, start, EXTENT_DIRTY); in btrfs_free_io_failure_record()
2254 while (state) { in btrfs_free_io_failure_record()
2255 if (state->start > end) in btrfs_free_io_failure_record()
2258 ASSERT(state->end <= end); in btrfs_free_io_failure_record()
2260 next = next_state(state); in btrfs_free_io_failure_record()
2262 failrec = (struct io_failure_record *)(unsigned long)state->private; in btrfs_free_io_failure_record()
2263 free_extent_state(state); in btrfs_free_io_failure_record()
2266 state = next; in btrfs_free_io_failure_record()