• Home
  • Raw
  • Download

Lines Matching refs:tree

108 #define btrfs_debug_check_extent_io_range(tree, start, end)		\  argument
109 __btrfs_debug_check_extent_io_range(__func__, (tree), (start), (end))
111 struct extent_io_tree *tree, u64 start, u64 end) in __btrfs_debug_check_extent_io_range() argument
113 struct inode *inode = tree->private_data; in __btrfs_debug_check_extent_io_range()
172 struct extent_io_tree *tree = bio->bi_private; in submit_one_bio() local
178 if (is_data_inode(tree->private_data)) in submit_one_bio()
179 ret = btrfs_submit_data_bio(tree->private_data, bio, mirror_num, in submit_one_bio()
182 ret = btrfs_submit_metadata_bio(tree->private_data, bio, in submit_one_bio()
289 struct extent_io_tree *tree, unsigned int owner, in extent_io_tree_init() argument
292 tree->fs_info = fs_info; in extent_io_tree_init()
293 tree->state = RB_ROOT; in extent_io_tree_init()
294 tree->dirty_bytes = 0; in extent_io_tree_init()
295 spin_lock_init(&tree->lock); in extent_io_tree_init()
296 tree->private_data = private_data; in extent_io_tree_init()
297 tree->owner = owner; in extent_io_tree_init()
299 lockdep_set_class(&tree->lock, &file_extent_tree_class); in extent_io_tree_init()
302 void extent_io_tree_release(struct extent_io_tree *tree) in extent_io_tree_release() argument
304 spin_lock(&tree->lock); in extent_io_tree_release()
311 while (!RB_EMPTY_ROOT(&tree->state)) { in extent_io_tree_release()
315 node = rb_first(&tree->state); in extent_io_tree_release()
317 rb_erase(&state->rb_node, &tree->state); in extent_io_tree_release()
326 cond_resched_lock(&tree->lock); in extent_io_tree_release()
328 spin_unlock(&tree->lock); in extent_io_tree_release()
419 static struct rb_node *__etree_search(struct extent_io_tree *tree, u64 offset, in __etree_search() argument
425 struct rb_root *root = &tree->state; in __etree_search()
472 tree_search_for_insert(struct extent_io_tree *tree, in tree_search_for_insert() argument
480 ret = __etree_search(tree, offset, &next, NULL, p_ret, parent_ret); in tree_search_for_insert()
486 static inline struct rb_node *tree_search(struct extent_io_tree *tree, in tree_search() argument
489 return tree_search_for_insert(tree, offset, NULL, NULL); in tree_search()
501 static void merge_state(struct extent_io_tree *tree, in merge_state() argument
515 if (tree->private_data && in merge_state()
516 is_data_inode(tree->private_data)) in merge_state()
517 btrfs_merge_delalloc_extent(tree->private_data, in merge_state()
520 rb_erase(&other->rb_node, &tree->state); in merge_state()
530 if (tree->private_data && in merge_state()
531 is_data_inode(tree->private_data)) in merge_state()
532 btrfs_merge_delalloc_extent(tree->private_data, in merge_state()
535 rb_erase(&other->rb_node, &tree->state); in merge_state()
542 static void set_state_bits(struct extent_io_tree *tree,
556 static int insert_state(struct extent_io_tree *tree, in insert_state() argument
565 btrfs_err(tree->fs_info, in insert_state()
572 set_state_bits(tree, state, bits, changeset); in insert_state()
574 node = tree_insert(&tree->state, NULL, end, &state->rb_node, p, parent); in insert_state()
578 btrfs_err(tree->fs_info, in insert_state()
583 merge_state(tree, state); in insert_state()
601 static int split_state(struct extent_io_tree *tree, struct extent_state *orig, in split_state() argument
606 if (tree->private_data && is_data_inode(tree->private_data)) in split_state()
607 btrfs_split_delalloc_extent(tree->private_data, orig, split); in split_state()
614 node = tree_insert(&tree->state, &orig->rb_node, prealloc->end, in split_state()
639 static struct extent_state *clear_state_bit(struct extent_io_tree *tree, in clear_state_bit() argument
650 WARN_ON(range > tree->dirty_bytes); in clear_state_bit()
651 tree->dirty_bytes -= range; in clear_state_bit()
654 if (tree->private_data && is_data_inode(tree->private_data)) in clear_state_bit()
655 btrfs_clear_delalloc_extent(tree->private_data, state, bits); in clear_state_bit()
665 rb_erase(&state->rb_node, &tree->state); in clear_state_bit()
672 merge_state(tree, state); in clear_state_bit()
687 static void extent_io_tree_panic(struct extent_io_tree *tree, int err) in extent_io_tree_panic() argument
689 btrfs_panic(tree->fs_info, err, in extent_io_tree_panic()
705 int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in __clear_extent_bit() argument
718 btrfs_debug_check_extent_io_range(tree, start, end); in __clear_extent_bit()
719 trace_btrfs_clear_extent_bit(tree, start, end - start + 1, bits); in __clear_extent_bit()
741 spin_lock(&tree->lock); in __clear_extent_bit()
764 node = tree_search(tree, start); in __clear_extent_bit()
799 err = split_state(tree, state, prealloc, start); in __clear_extent_bit()
801 extent_io_tree_panic(tree, err); in __clear_extent_bit()
807 state = clear_state_bit(tree, state, &bits, wake, in __clear_extent_bit()
822 err = split_state(tree, state, prealloc, end + 1); in __clear_extent_bit()
824 extent_io_tree_panic(tree, err); in __clear_extent_bit()
829 clear_state_bit(tree, prealloc, &bits, wake, changeset); in __clear_extent_bit()
835 state = clear_state_bit(tree, state, &bits, wake, changeset); in __clear_extent_bit()
846 spin_unlock(&tree->lock); in __clear_extent_bit()
852 spin_unlock(&tree->lock); in __clear_extent_bit()
860 static void wait_on_state(struct extent_io_tree *tree, in wait_on_state() argument
862 __releases(tree->lock) in wait_on_state()
863 __acquires(tree->lock) in wait_on_state()
867 spin_unlock(&tree->lock); in wait_on_state()
869 spin_lock(&tree->lock); in wait_on_state()
878 static void wait_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in wait_extent_bit() argument
884 btrfs_debug_check_extent_io_range(tree, start, end); in wait_extent_bit()
886 spin_lock(&tree->lock); in wait_extent_bit()
893 node = tree_search(tree, start); in wait_extent_bit()
906 wait_on_state(tree, state); in wait_extent_bit()
915 if (!cond_resched_lock(&tree->lock)) { in wait_extent_bit()
921 spin_unlock(&tree->lock); in wait_extent_bit()
924 static void set_state_bits(struct extent_io_tree *tree, in set_state_bits() argument
931 if (tree->private_data && is_data_inode(tree->private_data)) in set_state_bits()
932 btrfs_set_delalloc_extent(tree->private_data, state, bits); in set_state_bits()
936 tree->dirty_bytes += range; in set_state_bits()
972 int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, u32 bits, in set_extent_bit() argument
986 btrfs_debug_check_extent_io_range(tree, start, end); in set_extent_bit()
987 trace_btrfs_set_extent_bit(tree, start, end - start + 1, bits); in set_extent_bit()
1005 spin_lock(&tree->lock); in set_extent_bit()
1018 node = tree_search_for_insert(tree, start, &p, &parent); in set_extent_bit()
1022 err = insert_state(tree, prealloc, start, end, in set_extent_bit()
1025 extent_io_tree_panic(tree, err); in set_extent_bit()
1049 set_state_bits(tree, state, &bits, changeset); in set_extent_bit()
1051 merge_state(tree, state); in set_extent_bit()
1097 err = split_state(tree, state, prealloc, start); in set_extent_bit()
1099 extent_io_tree_panic(tree, err); in set_extent_bit()
1105 set_state_bits(tree, state, &bits, changeset); in set_extent_bit()
1107 merge_state(tree, state); in set_extent_bit()
1139 err = insert_state(tree, prealloc, start, this_end, in set_extent_bit()
1142 extent_io_tree_panic(tree, err); in set_extent_bit()
1164 err = split_state(tree, state, prealloc, end + 1); in set_extent_bit()
1166 extent_io_tree_panic(tree, err); in set_extent_bit()
1168 set_state_bits(tree, prealloc, &bits, changeset); in set_extent_bit()
1170 merge_state(tree, prealloc); in set_extent_bit()
1178 spin_unlock(&tree->lock); in set_extent_bit()
1184 spin_unlock(&tree->lock); in set_extent_bit()
1210 int convert_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in convert_extent_bit() argument
1224 btrfs_debug_check_extent_io_range(tree, start, end); in convert_extent_bit()
1225 trace_btrfs_convert_extent_bit(tree, start, end - start + 1, bits, in convert_extent_bit()
1242 spin_lock(&tree->lock); in convert_extent_bit()
1256 node = tree_search_for_insert(tree, start, &p, &parent); in convert_extent_bit()
1263 err = insert_state(tree, prealloc, start, end, in convert_extent_bit()
1266 extent_io_tree_panic(tree, err); in convert_extent_bit()
1283 set_state_bits(tree, state, &bits, NULL); in convert_extent_bit()
1285 state = clear_state_bit(tree, state, &clear_bits, 0, NULL); in convert_extent_bit()
1317 err = split_state(tree, state, prealloc, start); in convert_extent_bit()
1319 extent_io_tree_panic(tree, err); in convert_extent_bit()
1324 set_state_bits(tree, state, &bits, NULL); in convert_extent_bit()
1326 state = clear_state_bit(tree, state, &clear_bits, 0, in convert_extent_bit()
1361 err = insert_state(tree, prealloc, start, this_end, in convert_extent_bit()
1364 extent_io_tree_panic(tree, err); in convert_extent_bit()
1383 err = split_state(tree, state, prealloc, end + 1); in convert_extent_bit()
1385 extent_io_tree_panic(tree, err); in convert_extent_bit()
1387 set_state_bits(tree, prealloc, &bits, NULL); in convert_extent_bit()
1389 clear_state_bit(tree, prealloc, &clear_bits, 0, NULL); in convert_extent_bit()
1397 spin_unlock(&tree->lock); in convert_extent_bit()
1403 spin_unlock(&tree->lock); in convert_extent_bit()
1411 int set_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in set_record_extent_bits() argument
1422 return set_extent_bit(tree, start, end, bits, 0, NULL, NULL, GFP_NOFS, in set_record_extent_bits()
1426 int set_extent_bits_nowait(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_bits_nowait() argument
1429 return set_extent_bit(tree, start, end, bits, 0, NULL, NULL, in set_extent_bits_nowait()
1433 int clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in clear_extent_bit() argument
1437 return __clear_extent_bit(tree, start, end, bits, wake, delete, in clear_extent_bit()
1441 int clear_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in clear_record_extent_bits() argument
1450 return __clear_extent_bit(tree, start, end, bits, 0, 0, NULL, GFP_NOFS, in clear_record_extent_bits()
1458 int lock_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in lock_extent_bits() argument
1465 err = set_extent_bit(tree, start, end, EXTENT_LOCKED, in lock_extent_bits()
1469 wait_extent_bit(tree, failed_start, end, EXTENT_LOCKED); in lock_extent_bits()
1478 int try_lock_extent(struct extent_io_tree *tree, u64 start, u64 end) in try_lock_extent() argument
1483 err = set_extent_bit(tree, start, end, EXTENT_LOCKED, EXTENT_LOCKED, in try_lock_extent()
1487 clear_extent_bit(tree, start, failed_start - 1, in try_lock_extent()
1530 find_first_extent_bit_state(struct extent_io_tree *tree, u64 start, u32 bits) in find_first_extent_bit_state() argument
1539 node = tree_search(tree, start); in find_first_extent_bit_state()
1564 int find_first_extent_bit(struct extent_io_tree *tree, u64 start, in find_first_extent_bit() argument
1571 spin_lock(&tree->lock); in find_first_extent_bit()
1587 state = find_first_extent_bit_state(tree, start, bits); in find_first_extent_bit()
1596 spin_unlock(&tree->lock); in find_first_extent_bit()
1616 int find_contiguous_extent_bit(struct extent_io_tree *tree, u64 start, in find_contiguous_extent_bit() argument
1622 spin_lock(&tree->lock); in find_contiguous_extent_bit()
1623 state = find_first_extent_bit_state(tree, start, bits); in find_contiguous_extent_bit()
1634 spin_unlock(&tree->lock); in find_contiguous_extent_bit()
1653 void find_first_clear_extent_bit(struct extent_io_tree *tree, u64 start, in find_first_clear_extent_bit() argument
1659 spin_lock(&tree->lock); in find_first_clear_extent_bit()
1663 node = __etree_search(tree, start, &next, &prev, NULL, NULL); in find_first_clear_extent_bit()
1752 spin_unlock(&tree->lock); in find_first_clear_extent_bit()
1761 bool btrfs_find_delalloc_range(struct extent_io_tree *tree, u64 *start, in btrfs_find_delalloc_range() argument
1771 spin_lock(&tree->lock); in btrfs_find_delalloc_range()
1777 node = tree_search(tree, cur_start); in btrfs_find_delalloc_range()
1810 spin_unlock(&tree->lock); in btrfs_find_delalloc_range()
1989 struct extent_io_tree *tree = &BTRFS_I(inode)->io_tree; in find_lock_delalloc_range() local
2003 found = btrfs_find_delalloc_range(tree, &delalloc_start, &delalloc_end, in find_lock_delalloc_range()
2047 lock_extent_bits(tree, delalloc_start, delalloc_end, &cached_state); in find_lock_delalloc_range()
2050 ret = test_range_bit(tree, delalloc_start, delalloc_end, in find_lock_delalloc_range()
2053 unlock_extent_cached(tree, delalloc_start, delalloc_end, in find_lock_delalloc_range()
2082 u64 count_range_bits(struct extent_io_tree *tree, in count_range_bits() argument
2096 spin_lock(&tree->lock); in count_range_bits()
2098 total_bytes = tree->dirty_bytes; in count_range_bits()
2105 node = tree_search(tree, cur_start); in count_range_bits()
2133 spin_unlock(&tree->lock); in count_range_bits()
2141 int set_state_failrec(struct extent_io_tree *tree, u64 start, in set_state_failrec() argument
2148 spin_lock(&tree->lock); in set_state_failrec()
2153 node = tree_search(tree, start); in set_state_failrec()
2165 spin_unlock(&tree->lock); in set_state_failrec()
2169 struct io_failure_record *get_state_failrec(struct extent_io_tree *tree, u64 start) in get_state_failrec() argument
2175 spin_lock(&tree->lock); in get_state_failrec()
2180 node = tree_search(tree, start); in get_state_failrec()
2193 spin_unlock(&tree->lock); in get_state_failrec()
2203 int test_range_bit(struct extent_io_tree *tree, u64 start, u64 end, in test_range_bit() argument
2210 spin_lock(&tree->lock); in test_range_bit()
2215 node = tree_search(tree, start); in test_range_bit()
2249 spin_unlock(&tree->lock); in test_range_bit()
2488 struct extent_io_tree *tree = &BTRFS_I(inode)->io_tree; in btrfs_get_io_failure_record() local
2556 ret = set_extent_bits(tree, start, start + sectorsize - 1, in btrfs_get_io_failure_record()
2621 struct extent_io_tree *tree = &BTRFS_I(inode)->io_tree; in btrfs_repair_one_sector() local
2640 free_io_failure(failure_tree, tree, failrec); in btrfs_repair_one_sector()
2670 free_io_failure(failure_tree, tree, failrec); in btrfs_repair_one_sector()
2886 struct extent_io_tree *tree; in endio_readpage_release_extent() local
2909 tree = &processed->inode->io_tree; in endio_readpage_release_extent()
2914 if (processed->uptodate && tree->track_uptodate) in endio_readpage_release_extent()
2915 set_extent_uptodate(tree, processed->start, processed->end, in endio_readpage_release_extent()
2917 unlock_extent_cached_atomic(tree, processed->start, processed->end, in endio_readpage_release_extent()
2982 struct extent_io_tree *tree, *failure_tree; in end_bio_extent_readpage() local
3009 tree = &BTRFS_I(inode)->io_tree; in end_bio_extent_readpage()
3047 failure_tree, tree, start, in end_bio_extent_readpage()
3569 struct extent_io_tree *tree = &BTRFS_I(inode)->io_tree; in btrfs_do_readpage() local
3573 unlock_extent(tree, start, end); in btrfs_do_readpage()
3582 unlock_extent(tree, start, end); in btrfs_do_readpage()
3609 set_extent_uptodate(tree, cur, cur + iosize - 1, in btrfs_do_readpage()
3611 unlock_extent_cached(tree, cur, in btrfs_do_readpage()
3619 unlock_extent(tree, cur, end); in btrfs_do_readpage()
3696 set_extent_uptodate(tree, cur, cur + iosize - 1, in btrfs_do_readpage()
3698 unlock_extent_cached(tree, cur, in btrfs_do_readpage()
3706 if (test_range_bit(tree, cur, cur_end, in btrfs_do_readpage()
3708 unlock_extent(tree, cur, cur + iosize - 1); in btrfs_do_readpage()
3718 unlock_extent(tree, cur, cur + iosize - 1); in btrfs_do_readpage()
3736 unlock_extent(tree, cur, end); in btrfs_do_readpage()
5199 int extent_invalidatepage(struct extent_io_tree *tree, in extent_invalidatepage() argument
5208 ASSERT(tree->owner == IO_TREE_BTREE_INODE_IO); in extent_invalidatepage()
5214 lock_extent_bits(tree, start, end, &cached_state); in extent_invalidatepage()
5222 unlock_extent_cached(tree, start, end, &cached_state); in extent_invalidatepage()
5231 static int try_release_extent_state(struct extent_io_tree *tree, in try_release_extent_state() argument
5238 if (test_range_bit(tree, start, end, EXTENT_LOCKED, 0, NULL)) { in try_release_extent_state()
5247 ret = __clear_extent_bit(tree, start, end, in try_release_extent_state()
5273 struct extent_io_tree *tree = &btrfs_inode->io_tree; in try_release_extent_mapping() local
5296 if (test_range_bit(tree, em->start, in try_release_extent_mapping()
5344 return try_release_extent_state(tree, page, mask); in try_release_extent_mapping()