Lines Matching refs:tree
47 struct extent_io_tree *tree; member
87 state->state, state->tree, atomic_read(&state->refs)); in extent_io_exit()
107 void extent_io_tree_init(struct extent_io_tree *tree, in extent_io_tree_init() argument
110 tree->state.rb_node = NULL; in extent_io_tree_init()
111 tree->buffer.rb_node = NULL; in extent_io_tree_init()
112 tree->ops = NULL; in extent_io_tree_init()
113 tree->dirty_bytes = 0; in extent_io_tree_init()
114 spin_lock_init(&tree->lock); in extent_io_tree_init()
115 spin_lock_init(&tree->buffer_lock); in extent_io_tree_init()
116 tree->mapping = mapping; in extent_io_tree_init()
131 state->tree = NULL; in alloc_extent_state()
150 WARN_ON(state->tree); in free_extent_state()
185 static struct rb_node *__etree_search(struct extent_io_tree *tree, u64 offset, in __etree_search() argument
189 struct rb_root *root = &tree->state; in __etree_search()
230 static inline struct rb_node *tree_search(struct extent_io_tree *tree, in tree_search() argument
236 ret = __etree_search(tree, offset, &prev, NULL); in tree_search()
242 static struct extent_buffer *buffer_tree_insert(struct extent_io_tree *tree, in buffer_tree_insert() argument
245 struct rb_root *root = &tree->buffer; in buffer_tree_insert()
267 static struct extent_buffer *buffer_search(struct extent_io_tree *tree, in buffer_search() argument
270 struct rb_root *root = &tree->buffer; in buffer_search()
295 static int merge_state(struct extent_io_tree *tree, in merge_state() argument
310 other->tree = NULL; in merge_state()
311 rb_erase(&other->rb_node, &tree->state); in merge_state()
321 state->tree = NULL; in merge_state()
322 rb_erase(&state->rb_node, &tree->state); in merge_state()
329 static void set_state_cb(struct extent_io_tree *tree, in set_state_cb() argument
333 if (tree->ops && tree->ops->set_bit_hook) { in set_state_cb()
334 tree->ops->set_bit_hook(tree->mapping->host, state->start, in set_state_cb()
339 static void clear_state_cb(struct extent_io_tree *tree, in clear_state_cb() argument
343 if (tree->ops && tree->ops->clear_bit_hook) { in clear_state_cb()
344 tree->ops->clear_bit_hook(tree->mapping->host, state->start, in clear_state_cb()
359 static int insert_state(struct extent_io_tree *tree, in insert_state() argument
372 tree->dirty_bytes += end - start + 1; in insert_state()
373 set_state_cb(tree, state, bits); in insert_state()
377 node = tree_insert(&tree->state, end, &state->rb_node); in insert_state()
388 state->tree = tree; in insert_state()
389 merge_state(tree, state); in insert_state()
407 static int split_state(struct extent_io_tree *tree, struct extent_state *orig, in split_state() argument
416 node = tree_insert(&tree->state, prealloc->end, &prealloc->rb_node); in split_state()
421 prealloc->tree = tree; in split_state()
433 static int clear_state_bit(struct extent_io_tree *tree, in clear_state_bit() argument
441 WARN_ON(range > tree->dirty_bytes); in clear_state_bit()
442 tree->dirty_bytes -= range; in clear_state_bit()
444 clear_state_cb(tree, state, bits); in clear_state_bit()
449 if (state->tree) { in clear_state_bit()
450 clear_state_cb(tree, state, state->state); in clear_state_bit()
451 rb_erase(&state->rb_node, &tree->state); in clear_state_bit()
452 state->tree = NULL; in clear_state_bit()
458 merge_state(tree, state); in clear_state_bit()
476 int clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in clear_extent_bit() argument
492 spin_lock(&tree->lock); in clear_extent_bit()
497 node = tree_search(tree, start); in clear_extent_bit()
524 err = split_state(tree, state, prealloc, start); in clear_extent_bit()
531 set |= clear_state_bit(tree, state, bits, in clear_extent_bit()
547 err = split_state(tree, state, prealloc, end + 1); in clear_extent_bit()
552 set |= clear_state_bit(tree, prealloc, bits, in clear_extent_bit()
559 set |= clear_state_bit(tree, state, bits, wake, delete); in clear_extent_bit()
563 spin_unlock(&tree->lock); in clear_extent_bit()
572 spin_unlock(&tree->lock); in clear_extent_bit()
578 static int wait_on_state(struct extent_io_tree *tree, in wait_on_state() argument
580 __releases(tree->lock) in wait_on_state()
581 __acquires(tree->lock) in wait_on_state()
585 spin_unlock(&tree->lock); in wait_on_state()
587 spin_lock(&tree->lock); in wait_on_state()
597 int wait_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, int bits) in wait_extent_bit() argument
602 spin_lock(&tree->lock); in wait_extent_bit()
609 node = tree_search(tree, start); in wait_extent_bit()
621 wait_on_state(tree, state); in wait_extent_bit()
631 spin_unlock(&tree->lock); in wait_extent_bit()
633 spin_lock(&tree->lock); in wait_extent_bit()
637 spin_unlock(&tree->lock); in wait_extent_bit()
641 static void set_state_bits(struct extent_io_tree *tree, in set_state_bits() argument
647 tree->dirty_bytes += range; in set_state_bits()
649 set_state_cb(tree, state, bits); in set_state_bits()
664 static int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_bit() argument
682 spin_lock(&tree->lock); in set_extent_bit()
687 node = tree_search(tree, start); in set_extent_bit()
689 err = insert_state(tree, prealloc, start, end, bits); in set_extent_bit()
712 set_state_bits(tree, state, bits); in set_extent_bit()
714 merge_state(tree, state); in set_extent_bit()
741 err = split_state(tree, state, prealloc, start); in set_extent_bit()
747 set_state_bits(tree, state, bits); in set_extent_bit()
749 merge_state(tree, state); in set_extent_bit()
768 err = insert_state(tree, prealloc, start, this_end, in set_extent_bit()
790 err = split_state(tree, state, prealloc, end + 1); in set_extent_bit()
793 set_state_bits(tree, prealloc, bits); in set_extent_bit()
794 merge_state(tree, prealloc); in set_extent_bit()
802 spin_unlock(&tree->lock); in set_extent_bit()
811 spin_unlock(&tree->lock); in set_extent_bit()
818 int set_extent_dirty(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_dirty() argument
821 return set_extent_bit(tree, start, end, EXTENT_DIRTY, 0, NULL, in set_extent_dirty()
825 int set_extent_ordered(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_ordered() argument
828 return set_extent_bit(tree, start, end, EXTENT_ORDERED, 0, NULL, mask); in set_extent_ordered()
831 int set_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_bits() argument
834 return set_extent_bit(tree, start, end, bits, 0, NULL, in set_extent_bits()
838 int clear_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in clear_extent_bits() argument
841 return clear_extent_bit(tree, start, end, bits, 0, 0, mask); in clear_extent_bits()
844 int set_extent_delalloc(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_delalloc() argument
847 return set_extent_bit(tree, start, end, in set_extent_delalloc()
852 int clear_extent_dirty(struct extent_io_tree *tree, u64 start, u64 end, in clear_extent_dirty() argument
855 return clear_extent_bit(tree, start, end, in clear_extent_dirty()
859 int clear_extent_ordered(struct extent_io_tree *tree, u64 start, u64 end, in clear_extent_ordered() argument
862 return clear_extent_bit(tree, start, end, EXTENT_ORDERED, 1, 0, mask); in clear_extent_ordered()
865 int set_extent_new(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_new() argument
868 return set_extent_bit(tree, start, end, EXTENT_NEW, 0, NULL, in set_extent_new()
872 static int clear_extent_new(struct extent_io_tree *tree, u64 start, u64 end, in clear_extent_new() argument
875 return clear_extent_bit(tree, start, end, EXTENT_NEW, 0, 0, mask); in clear_extent_new()
878 int set_extent_uptodate(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_uptodate() argument
881 return set_extent_bit(tree, start, end, EXTENT_UPTODATE, 0, NULL, in set_extent_uptodate()
885 static int clear_extent_uptodate(struct extent_io_tree *tree, u64 start, in clear_extent_uptodate() argument
888 return clear_extent_bit(tree, start, end, EXTENT_UPTODATE, 0, 0, mask); in clear_extent_uptodate()
891 static int set_extent_writeback(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_writeback() argument
894 return set_extent_bit(tree, start, end, EXTENT_WRITEBACK, in set_extent_writeback()
898 static int clear_extent_writeback(struct extent_io_tree *tree, u64 start, in clear_extent_writeback() argument
901 return clear_extent_bit(tree, start, end, EXTENT_WRITEBACK, 1, 0, mask); in clear_extent_writeback()
904 int wait_on_extent_writeback(struct extent_io_tree *tree, u64 start, u64 end) in wait_on_extent_writeback() argument
906 return wait_extent_bit(tree, start, end, EXTENT_WRITEBACK); in wait_on_extent_writeback()
913 int lock_extent(struct extent_io_tree *tree, u64 start, u64 end, gfp_t mask) in lock_extent() argument
918 err = set_extent_bit(tree, start, end, EXTENT_LOCKED, 1, in lock_extent()
921 wait_extent_bit(tree, failed_start, end, EXTENT_LOCKED); in lock_extent()
931 int try_lock_extent(struct extent_io_tree *tree, u64 start, u64 end, in try_lock_extent() argument
937 err = set_extent_bit(tree, start, end, EXTENT_LOCKED, 1, in try_lock_extent()
941 clear_extent_bit(tree, start, failed_start - 1, in try_lock_extent()
948 int unlock_extent(struct extent_io_tree *tree, u64 start, u64 end, in unlock_extent() argument
951 return clear_extent_bit(tree, start, end, EXTENT_LOCKED, 1, 0, mask); in unlock_extent()
957 int set_range_dirty(struct extent_io_tree *tree, u64 start, u64 end) in set_range_dirty() argument
964 page = find_get_page(tree->mapping, index); in set_range_dirty()
970 set_extent_dirty(tree, start, end, GFP_NOFS); in set_range_dirty()
977 static int set_range_writeback(struct extent_io_tree *tree, u64 start, u64 end) in set_range_writeback() argument
984 page = find_get_page(tree->mapping, index); in set_range_writeback()
990 set_extent_writeback(tree, start, end, GFP_NOFS); in set_range_writeback()
1001 int find_first_extent_bit(struct extent_io_tree *tree, u64 start, in find_first_extent_bit() argument
1008 spin_lock(&tree->lock); in find_first_extent_bit()
1013 node = tree_search(tree, start); in find_first_extent_bit()
1030 spin_unlock(&tree->lock); in find_first_extent_bit()
1038 struct extent_state *find_first_extent_bit_state(struct extent_io_tree *tree, in find_first_extent_bit_state() argument
1048 node = tree_search(tree, start); in find_first_extent_bit_state()
1071 static noinline u64 find_delalloc_range(struct extent_io_tree *tree, in find_delalloc_range() argument
1080 spin_lock(&tree->lock); in find_delalloc_range()
1086 node = tree_search(tree, cur_start); in find_delalloc_range()
1117 spin_unlock(&tree->lock); in find_delalloc_range()
1220 struct extent_io_tree *tree, in find_lock_delalloc_range() argument
1235 found = find_delalloc_range(tree, &delalloc_start, &delalloc_end, in find_lock_delalloc_range()
1278 lock_extent(tree, delalloc_start, delalloc_end, GFP_NOFS); in find_lock_delalloc_range()
1281 ret = test_range_bit(tree, delalloc_start, delalloc_end, in find_lock_delalloc_range()
1284 unlock_extent(tree, delalloc_start, delalloc_end, GFP_NOFS); in find_lock_delalloc_range()
1297 struct extent_io_tree *tree, in extent_clear_unlock_delalloc() argument
1321 clear_extent_bit(tree, start, end, clear_bits, 1, 0, GFP_NOFS); in extent_clear_unlock_delalloc()
1356 u64 count_range_bits(struct extent_io_tree *tree, in count_range_bits() argument
1371 spin_lock(&tree->lock); in count_range_bits()
1373 total_bytes = tree->dirty_bytes; in count_range_bits()
1380 node = tree_search(tree, cur_start); in count_range_bits()
1403 spin_unlock(&tree->lock); in count_range_bits()
1412 static int lock_range(struct extent_io_tree *tree, u64 start, u64 end)
1420 page = grab_cache_page(tree->mapping, index);
1431 lock_extent(tree, start, end, GFP_NOFS);
1442 page = find_get_page(tree->mapping, index);
1453 static int unlock_range(struct extent_io_tree *tree, u64 start, u64 end)
1460 page = find_get_page(tree->mapping, index);
1465 unlock_extent(tree, start, end, GFP_NOFS);
1474 int set_state_private(struct extent_io_tree *tree, u64 start, u64 private) in set_state_private() argument
1480 spin_lock(&tree->lock); in set_state_private()
1485 node = tree_search(tree, start); in set_state_private()
1497 spin_unlock(&tree->lock); in set_state_private()
1501 int get_state_private(struct extent_io_tree *tree, u64 start, u64 *private) in get_state_private() argument
1507 spin_lock(&tree->lock); in get_state_private()
1512 node = tree_search(tree, start); in get_state_private()
1524 spin_unlock(&tree->lock); in get_state_private()
1534 int test_range_bit(struct extent_io_tree *tree, u64 start, u64 end, in test_range_bit() argument
1541 spin_lock(&tree->lock); in test_range_bit()
1542 node = tree_search(tree, start); in test_range_bit()
1572 spin_unlock(&tree->lock); in test_range_bit()
1580 static int check_page_uptodate(struct extent_io_tree *tree, in check_page_uptodate() argument
1585 if (test_range_bit(tree, start, end, EXTENT_UPTODATE, 1)) in check_page_uptodate()
1594 static int check_page_locked(struct extent_io_tree *tree, in check_page_locked() argument
1599 if (!test_range_bit(tree, start, end, EXTENT_LOCKED, 0)) in check_page_locked()
1608 static int check_page_writeback(struct extent_io_tree *tree, in check_page_writeback() argument
1613 if (!test_range_bit(tree, start, end, EXTENT_WRITEBACK, 0)) in check_page_writeback()
1633 struct extent_io_tree *tree; in end_bio_extent_writepage() local
1641 tree = &BTRFS_I(page->mapping->host)->io_tree; in end_bio_extent_writepage()
1654 if (tree->ops && tree->ops->writepage_end_io_hook) { in end_bio_extent_writepage()
1655 ret = tree->ops->writepage_end_io_hook(page, start, in end_bio_extent_writepage()
1661 if (!uptodate && tree->ops && in end_bio_extent_writepage()
1662 tree->ops->writepage_io_failed_hook) { in end_bio_extent_writepage()
1663 ret = tree->ops->writepage_io_failed_hook(bio, page, in end_bio_extent_writepage()
1672 clear_extent_uptodate(tree, start, end, GFP_ATOMIC); in end_bio_extent_writepage()
1677 clear_extent_writeback(tree, start, end, GFP_ATOMIC); in end_bio_extent_writepage()
1682 check_page_writeback(tree, page); in end_bio_extent_writepage()
1703 struct extent_io_tree *tree; in end_bio_extent_readpage() local
1714 tree = &BTRFS_I(page->mapping->host)->io_tree; in end_bio_extent_readpage()
1728 if (uptodate && tree->ops && tree->ops->readpage_end_io_hook) { in end_bio_extent_readpage()
1729 ret = tree->ops->readpage_end_io_hook(page, start, end, in end_bio_extent_readpage()
1734 if (!uptodate && tree->ops && in end_bio_extent_readpage()
1735 tree->ops->readpage_io_failed_hook) { in end_bio_extent_readpage()
1736 ret = tree->ops->readpage_io_failed_hook(bio, page, in end_bio_extent_readpage()
1748 set_extent_uptodate(tree, start, end, in end_bio_extent_readpage()
1751 unlock_extent(tree, start, end, GFP_ATOMIC); in end_bio_extent_readpage()
1763 check_page_uptodate(tree, page); in end_bio_extent_readpage()
1768 check_page_locked(tree, page); in end_bio_extent_readpage()
1784 struct extent_io_tree *tree; in end_bio_extent_preparewrite() local
1790 tree = &BTRFS_I(page->mapping->host)->io_tree; in end_bio_extent_preparewrite()
1800 set_extent_uptodate(tree, start, end, GFP_ATOMIC); in end_bio_extent_preparewrite()
1806 unlock_extent(tree, start, end, GFP_ATOMIC); in end_bio_extent_preparewrite()
1840 struct extent_io_tree *tree = bio->bi_private; in submit_one_bio() local
1851 if (tree->ops && tree->ops->submit_bio_hook) in submit_one_bio()
1852 tree->ops->submit_bio_hook(page->mapping->host, rw, bio, in submit_one_bio()
1862 static int submit_extent_page(int rw, struct extent_io_tree *tree, in submit_extent_page() argument
1890 (tree->ops && tree->ops->merge_bio_hook && in submit_extent_page()
1891 tree->ops->merge_bio_hook(page, offset, page_size, bio, in submit_extent_page()
1910 bio->bi_private = tree; in submit_extent_page()
1939 static int __extent_read_full_page(struct extent_io_tree *tree, in __extent_read_full_page() argument
1968 lock_extent(tree, start, end, GFP_NOFS); in __extent_read_full_page()
1990 set_extent_uptodate(tree, cur, cur + iosize - 1, in __extent_read_full_page()
1992 unlock_extent(tree, cur, cur + iosize - 1, GFP_NOFS); in __extent_read_full_page()
1999 unlock_extent(tree, cur, end, GFP_NOFS); in __extent_read_full_page()
2034 set_extent_uptodate(tree, cur, cur + iosize - 1, in __extent_read_full_page()
2036 unlock_extent(tree, cur, cur + iosize - 1, GFP_NOFS); in __extent_read_full_page()
2042 if (test_range_bit(tree, cur, cur_end, EXTENT_UPTODATE, 1)) { in __extent_read_full_page()
2043 check_page_uptodate(tree, page); in __extent_read_full_page()
2044 unlock_extent(tree, cur, cur + iosize - 1, GFP_NOFS); in __extent_read_full_page()
2054 unlock_extent(tree, cur, cur + iosize - 1, GFP_NOFS); in __extent_read_full_page()
2061 if (tree->ops && tree->ops->readpage_io_hook) { in __extent_read_full_page()
2062 ret = tree->ops->readpage_io_hook(page, cur, in __extent_read_full_page()
2068 ret = submit_extent_page(READ, tree, page, in __extent_read_full_page()
2090 int extent_read_full_page(struct extent_io_tree *tree, struct page *page, in extent_read_full_page() argument
2097 ret = __extent_read_full_page(tree, page, get_extent, &bio, 0, in extent_read_full_page()
2115 struct extent_io_tree *tree = epd->tree; in __extent_writepage() local
2168 nr_delalloc = find_lock_delalloc_range(inode, tree, in __extent_writepage()
2177 tree->ops->fill_delalloc(inode, page, delalloc_start, in __extent_writepage()
2191 lock_extent(tree, start, page_end, GFP_NOFS); in __extent_writepage()
2195 if (tree->ops && tree->ops->writepage_start_hook) { in __extent_writepage()
2196 ret = tree->ops->writepage_start_hook(page, start, in __extent_writepage()
2199 unlock_extent(tree, start, page_end, GFP_NOFS); in __extent_writepage()
2210 if (test_range_bit(tree, start, page_end, EXTENT_DELALLOC, 0)) in __extent_writepage()
2214 clear_extent_dirty(tree, start, page_end, GFP_NOFS); in __extent_writepage()
2215 unlock_extent(tree, start, page_end, GFP_NOFS); in __extent_writepage()
2216 if (tree->ops && tree->ops->writepage_end_io_hook) in __extent_writepage()
2217 tree->ops->writepage_end_io_hook(page, start, in __extent_writepage()
2223 set_extent_uptodate(tree, start, page_end, GFP_NOFS); in __extent_writepage()
2228 clear_extent_dirty(tree, cur, page_end, GFP_NOFS); in __extent_writepage()
2229 unlock_extent(tree, unlock_start, page_end, GFP_NOFS); in __extent_writepage()
2230 if (tree->ops && tree->ops->writepage_end_io_hook) in __extent_writepage()
2231 tree->ops->writepage_end_io_hook(page, cur, in __extent_writepage()
2261 clear_extent_dirty(tree, cur, in __extent_writepage()
2264 unlock_extent(tree, unlock_start, cur + iosize - 1, in __extent_writepage()
2271 if (!compressed && tree->ops && in __extent_writepage()
2272 tree->ops->writepage_end_io_hook) in __extent_writepage()
2273 tree->ops->writepage_end_io_hook(page, cur, in __extent_writepage()
2290 if (0 && !test_range_bit(tree, cur, cur + iosize - 1, in __extent_writepage()
2297 clear_extent_dirty(tree, cur, cur + iosize - 1, GFP_NOFS); in __extent_writepage()
2298 if (tree->ops && tree->ops->writepage_io_hook) { in __extent_writepage()
2299 ret = tree->ops->writepage_io_hook(page, cur, in __extent_writepage()
2309 set_range_writeback(tree, cur, cur + iosize - 1); in __extent_writepage()
2317 ret = submit_extent_page(WRITE, tree, page, sector, in __extent_writepage()
2336 unlock_extent(tree, unlock_start, page_end, GFP_NOFS); in __extent_writepage()
2362 static int extent_write_cache_pages(struct extent_io_tree *tree, in extent_write_cache_pages() argument
2407 if (tree->ops && tree->ops->write_cache_pages_lock_hook) in extent_write_cache_pages()
2408 tree->ops->write_cache_pages_lock_hook(page); in extent_write_cache_pages()
2472 int extent_write_full_page(struct extent_io_tree *tree, struct page *page, in extent_write_full_page() argument
2480 .tree = tree, in extent_write_full_page()
2496 extent_write_cache_pages(tree, mapping, &wbc_writepages, in extent_write_full_page()
2503 int extent_write_locked_range(struct extent_io_tree *tree, struct inode *inode, in extent_write_locked_range() argument
2515 .tree = tree, in extent_write_locked_range()
2533 if (tree->ops && tree->ops->writepage_end_io_hook) in extent_write_locked_range()
2534 tree->ops->writepage_end_io_hook(page, start, in extent_write_locked_range()
2548 int extent_writepages(struct extent_io_tree *tree, in extent_writepages() argument
2556 .tree = tree, in extent_writepages()
2561 ret = extent_write_cache_pages(tree, mapping, wbc, in extent_writepages()
2569 int extent_readpages(struct extent_io_tree *tree, in extent_readpages() argument
2596 __extent_read_full_page(tree, page, get_extent, in extent_readpages()
2614 int extent_invalidatepage(struct extent_io_tree *tree, in extent_invalidatepage() argument
2625 lock_extent(tree, start, end, GFP_NOFS); in extent_invalidatepage()
2626 wait_on_extent_writeback(tree, start, end); in extent_invalidatepage()
2627 clear_extent_bit(tree, start, end, in extent_invalidatepage()
2637 int extent_commit_write(struct extent_io_tree *tree, in extent_commit_write() argument
2653 int extent_prepare_write(struct extent_io_tree *tree, in extent_prepare_write() argument
2679 lock_extent(tree, page_start, page_end, GFP_NOFS); in extent_prepare_write()
2689 isnew = clear_extent_new(tree, block_start, cur_end, GFP_NOFS); in extent_prepare_write()
2708 !test_range_bit(tree, block_start, cur_end, in extent_prepare_write()
2721 set_extent_bit(tree, block_start, in extent_prepare_write()
2724 ret = submit_extent_page(READ, tree, page, in extent_prepare_write()
2732 set_extent_uptodate(tree, block_start, cur_end, in extent_prepare_write()
2734 unlock_extent(tree, block_start, cur_end, GFP_NOFS); in extent_prepare_write()
2741 wait_extent_bit(tree, orig_block_start, in extent_prepare_write()
2744 check_page_uptodate(tree, page); in extent_prepare_write()
2756 struct extent_io_tree *tree, struct page *page, in try_release_extent_state() argument
2763 if (test_range_bit(tree, start, end, in try_release_extent_state()
2769 clear_extent_bit(tree, start, end, EXTENT_UPTODATE, in try_release_extent_state()
2781 struct extent_io_tree *tree, struct page *page, in try_release_extent_mapping() argument
2805 if (!test_range_bit(tree, em->start, in try_release_extent_mapping()
2821 return try_release_extent_state(map, tree, page, mask); in try_release_extent_mapping()
2973 static struct extent_buffer *__alloc_extent_buffer(struct extent_io_tree *tree, in __alloc_extent_buffer() argument
3010 struct extent_buffer *alloc_extent_buffer(struct extent_io_tree *tree, in alloc_extent_buffer() argument
3021 struct address_space *mapping = tree->mapping; in alloc_extent_buffer()
3024 spin_lock(&tree->buffer_lock); in alloc_extent_buffer()
3025 eb = buffer_search(tree, start); in alloc_extent_buffer()
3028 spin_unlock(&tree->buffer_lock); in alloc_extent_buffer()
3032 spin_unlock(&tree->buffer_lock); in alloc_extent_buffer()
3034 eb = __alloc_extent_buffer(tree, start, len, mask); in alloc_extent_buffer()
3071 spin_lock(&tree->buffer_lock); in alloc_extent_buffer()
3072 exists = buffer_tree_insert(tree, start, &eb->rb_node); in alloc_extent_buffer()
3076 spin_unlock(&tree->buffer_lock); in alloc_extent_buffer()
3079 spin_unlock(&tree->buffer_lock); in alloc_extent_buffer()
3095 struct extent_buffer *find_extent_buffer(struct extent_io_tree *tree, in find_extent_buffer() argument
3101 spin_lock(&tree->buffer_lock); in find_extent_buffer()
3102 eb = buffer_search(tree, start); in find_extent_buffer()
3105 spin_unlock(&tree->buffer_lock); in find_extent_buffer()
3124 int clear_extent_buffer_dirty(struct extent_io_tree *tree, in clear_extent_buffer_dirty() argument
3135 set = clear_extent_dirty(tree, start, end, GFP_NOFS); in clear_extent_buffer_dirty()
3159 if (test_range_bit(tree, start, end, in clear_extent_buffer_dirty()
3178 int wait_on_extent_buffer_writeback(struct extent_io_tree *tree, in wait_on_extent_buffer_writeback() argument
3181 return wait_on_extent_writeback(tree, eb->start, in wait_on_extent_buffer_writeback()
3185 int set_extent_buffer_dirty(struct extent_io_tree *tree, in set_extent_buffer_dirty() argument
3207 set_extent_dirty(tree, page_offset(page), in set_extent_buffer_dirty()
3215 int clear_extent_buffer_uptodate(struct extent_io_tree *tree, in clear_extent_buffer_uptodate() argument
3225 clear_extent_uptodate(tree, eb->start, eb->start + eb->len - 1, in clear_extent_buffer_uptodate()
3235 int set_extent_buffer_uptodate(struct extent_io_tree *tree, in set_extent_buffer_uptodate() argument
3244 set_extent_uptodate(tree, eb->start, eb->start + eb->len - 1, in set_extent_buffer_uptodate()
3251 check_page_uptodate(tree, page); in set_extent_buffer_uptodate()
3259 int extent_range_uptodate(struct extent_io_tree *tree, in extent_range_uptodate() argument
3268 ret = test_range_bit(tree, start, end, EXTENT_UPTODATE, 1); in extent_range_uptodate()
3273 page = find_get_page(tree->mapping, index); in extent_range_uptodate()
3285 int extent_buffer_uptodate(struct extent_io_tree *tree, in extent_buffer_uptodate() argument
3297 ret = test_range_bit(tree, eb->start, eb->start + eb->len - 1, in extent_buffer_uptodate()
3313 int read_extent_buffer_pages(struct extent_io_tree *tree, in read_extent_buffer_pages() argument
3333 if (test_range_bit(tree, eb->start, eb->start + eb->len - 1, in read_extent_buffer_pages()
3373 err = __extent_read_full_page(tree, page, in read_extent_buffer_pages()
3775 int try_release_extent_buffer(struct extent_io_tree *tree, struct page *page) in try_release_extent_buffer() argument
3783 spin_lock(&tree->buffer_lock); in try_release_extent_buffer()
3784 eb = buffer_search(tree, start); in try_release_extent_buffer()
3796 rb_erase(&eb->rb_node, &tree->buffer); in try_release_extent_buffer()
3799 spin_unlock(&tree->buffer_lock); in try_release_extent_buffer()