Lines Matching +full:root +full:- +full:node
1 // SPDX-License-Identifier: GPL-2.0
15 #include "disk-io.h"
17 #include "delalloc-space.h"
24 if (entry->file_offset + entry->num_bytes < entry->file_offset) in entry_end()
25 return (u64)-1; in entry_end()
26 return entry->file_offset + entry->num_bytes; in entry_end()
29 /* returns NULL if the insertion worked, or it returns the node it did find
32 static struct rb_node *tree_insert(struct rb_root *root, u64 file_offset, in tree_insert() argument
33 struct rb_node *node) in tree_insert() argument
35 struct rb_node **p = &root->rb_node; in tree_insert()
43 if (file_offset < entry->file_offset) in tree_insert()
44 p = &(*p)->rb_left; in tree_insert()
46 p = &(*p)->rb_right; in tree_insert()
51 rb_link_node(node, parent, p); in tree_insert()
52 rb_insert_color(node, root); in tree_insert()
60 static struct rb_node *__tree_search(struct rb_root *root, u64 file_offset, in __tree_search() argument
63 struct rb_node *n = root->rb_node; in __tree_search()
74 if (file_offset < entry->file_offset) in __tree_search()
75 n = n->rb_left; in __tree_search()
77 n = n->rb_right; in __tree_search()
115 if (file_offset < entry->file_offset || in offset_in_entry()
116 entry->file_offset + entry->num_bytes <= file_offset) in offset_in_entry()
124 if (file_offset + len <= entry->file_offset || in range_overlaps()
125 entry->file_offset + entry->num_bytes <= file_offset) in range_overlaps()
137 struct rb_root *root = &tree->tree; in tree_search() local
142 if (tree->last) { in tree_search()
143 entry = rb_entry(tree->last, struct btrfs_ordered_extent, in tree_search()
146 return tree->last; in tree_search()
148 ret = __tree_search(root, file_offset, &prev); in tree_search()
152 tree->last = ret; in tree_search()
157 * Allocate and add a new ordered_extent into the per-inode tree.
167 struct btrfs_root *root = inode->root; in __btrfs_add_ordered_extent() local
168 struct btrfs_fs_info *fs_info = root->fs_info; in __btrfs_add_ordered_extent()
169 struct btrfs_ordered_inode_tree *tree = &inode->ordered_tree; in __btrfs_add_ordered_extent()
170 struct rb_node *node; in __btrfs_add_ordered_extent() local
191 return -ENOMEM; in __btrfs_add_ordered_extent()
193 entry->file_offset = file_offset; in __btrfs_add_ordered_extent()
194 entry->disk_bytenr = disk_bytenr; in __btrfs_add_ordered_extent()
195 entry->num_bytes = num_bytes; in __btrfs_add_ordered_extent()
196 entry->disk_num_bytes = disk_num_bytes; in __btrfs_add_ordered_extent()
197 entry->bytes_left = num_bytes; in __btrfs_add_ordered_extent()
198 entry->inode = igrab(&inode->vfs_inode); in __btrfs_add_ordered_extent()
199 entry->compress_type = compress_type; in __btrfs_add_ordered_extent()
200 entry->truncated_len = (u64)-1; in __btrfs_add_ordered_extent()
201 entry->qgroup_rsv = ret; in __btrfs_add_ordered_extent()
203 set_bit(type, &entry->flags); in __btrfs_add_ordered_extent()
206 percpu_counter_add_batch(&fs_info->dio_bytes, num_bytes, in __btrfs_add_ordered_extent()
207 fs_info->delalloc_batch); in __btrfs_add_ordered_extent()
208 set_bit(BTRFS_ORDERED_DIRECT, &entry->flags); in __btrfs_add_ordered_extent()
212 refcount_set(&entry->refs, 1); in __btrfs_add_ordered_extent()
213 init_waitqueue_head(&entry->wait); in __btrfs_add_ordered_extent()
214 INIT_LIST_HEAD(&entry->list); in __btrfs_add_ordered_extent()
215 INIT_LIST_HEAD(&entry->log_list); in __btrfs_add_ordered_extent()
216 INIT_LIST_HEAD(&entry->root_extent_list); in __btrfs_add_ordered_extent()
217 INIT_LIST_HEAD(&entry->work_list); in __btrfs_add_ordered_extent()
218 init_completion(&entry->completion); in __btrfs_add_ordered_extent()
222 spin_lock_irq(&tree->lock); in __btrfs_add_ordered_extent()
223 node = tree_insert(&tree->tree, file_offset, in __btrfs_add_ordered_extent()
224 &entry->rb_node); in __btrfs_add_ordered_extent()
225 if (node) in __btrfs_add_ordered_extent()
226 btrfs_panic(fs_info, -EEXIST, in __btrfs_add_ordered_extent()
229 spin_unlock_irq(&tree->lock); in __btrfs_add_ordered_extent()
231 spin_lock(&root->ordered_extent_lock); in __btrfs_add_ordered_extent()
232 list_add_tail(&entry->root_extent_list, in __btrfs_add_ordered_extent()
233 &root->ordered_extents); in __btrfs_add_ordered_extent()
234 root->nr_ordered_extents++; in __btrfs_add_ordered_extent()
235 if (root->nr_ordered_extents == 1) { in __btrfs_add_ordered_extent()
236 spin_lock(&fs_info->ordered_root_lock); in __btrfs_add_ordered_extent()
237 BUG_ON(!list_empty(&root->ordered_root)); in __btrfs_add_ordered_extent()
238 list_add_tail(&root->ordered_root, &fs_info->ordered_roots); in __btrfs_add_ordered_extent()
239 spin_unlock(&fs_info->ordered_root_lock); in __btrfs_add_ordered_extent()
241 spin_unlock(&root->ordered_extent_lock); in __btrfs_add_ordered_extent()
248 spin_lock(&inode->lock); in __btrfs_add_ordered_extent()
250 spin_unlock(&inode->lock); in __btrfs_add_ordered_extent()
293 tree = &BTRFS_I(entry->inode)->ordered_tree; in btrfs_add_ordered_sum()
294 spin_lock_irq(&tree->lock); in btrfs_add_ordered_sum()
295 list_add_tail(&sum->list, &entry->list); in btrfs_add_ordered_sum()
296 spin_unlock_irq(&tree->lock); in btrfs_add_ordered_sum()
315 struct btrfs_fs_info *fs_info = inode->root->fs_info; in btrfs_dec_test_first_ordered_pending()
316 struct btrfs_ordered_inode_tree *tree = &inode->ordered_tree; in btrfs_dec_test_first_ordered_pending()
317 struct rb_node *node; in btrfs_dec_test_first_ordered_pending() local
325 spin_lock_irqsave(&tree->lock, flags); in btrfs_dec_test_first_ordered_pending()
326 node = tree_search(tree, *file_offset); in btrfs_dec_test_first_ordered_pending()
327 if (!node) { in btrfs_dec_test_first_ordered_pending()
332 entry = rb_entry(node, struct btrfs_ordered_extent, rb_node); in btrfs_dec_test_first_ordered_pending()
338 dec_start = max(*file_offset, entry->file_offset); in btrfs_dec_test_first_ordered_pending()
340 entry->file_offset + entry->num_bytes); in btrfs_dec_test_first_ordered_pending()
346 to_dec = dec_end - dec_start; in btrfs_dec_test_first_ordered_pending()
347 if (to_dec > entry->bytes_left) { in btrfs_dec_test_first_ordered_pending()
350 entry->bytes_left, to_dec); in btrfs_dec_test_first_ordered_pending()
352 entry->bytes_left -= to_dec; in btrfs_dec_test_first_ordered_pending()
354 set_bit(BTRFS_ORDERED_IOERR, &entry->flags); in btrfs_dec_test_first_ordered_pending()
356 if (entry->bytes_left == 0) { in btrfs_dec_test_first_ordered_pending()
357 ret = test_and_set_bit(BTRFS_ORDERED_IO_DONE, &entry->flags); in btrfs_dec_test_first_ordered_pending()
359 cond_wake_up_nomb(&entry->wait); in btrfs_dec_test_first_ordered_pending()
366 refcount_inc(&entry->refs); in btrfs_dec_test_first_ordered_pending()
368 spin_unlock_irqrestore(&tree->lock, flags); in btrfs_dec_test_first_ordered_pending()
385 struct btrfs_ordered_inode_tree *tree = &inode->ordered_tree; in btrfs_dec_test_ordered_pending()
386 struct rb_node *node; in btrfs_dec_test_ordered_pending() local
391 spin_lock_irqsave(&tree->lock, flags); in btrfs_dec_test_ordered_pending()
397 node = tree_search(tree, file_offset); in btrfs_dec_test_ordered_pending()
398 if (!node) { in btrfs_dec_test_ordered_pending()
403 entry = rb_entry(node, struct btrfs_ordered_extent, rb_node); in btrfs_dec_test_ordered_pending()
410 if (io_size > entry->bytes_left) { in btrfs_dec_test_ordered_pending()
411 btrfs_crit(inode->root->fs_info, in btrfs_dec_test_ordered_pending()
413 entry->bytes_left, io_size); in btrfs_dec_test_ordered_pending()
415 entry->bytes_left -= io_size; in btrfs_dec_test_ordered_pending()
417 set_bit(BTRFS_ORDERED_IOERR, &entry->flags); in btrfs_dec_test_ordered_pending()
419 if (entry->bytes_left == 0) { in btrfs_dec_test_ordered_pending()
420 ret = test_and_set_bit(BTRFS_ORDERED_IO_DONE, &entry->flags); in btrfs_dec_test_ordered_pending()
422 cond_wake_up_nomb(&entry->wait); in btrfs_dec_test_ordered_pending()
429 refcount_inc(&entry->refs); in btrfs_dec_test_ordered_pending()
431 spin_unlock_irqrestore(&tree->lock, flags); in btrfs_dec_test_ordered_pending()
444 trace_btrfs_ordered_extent_put(BTRFS_I(entry->inode), entry); in btrfs_put_ordered_extent()
446 if (refcount_dec_and_test(&entry->refs)) { in btrfs_put_ordered_extent()
447 ASSERT(list_empty(&entry->root_extent_list)); in btrfs_put_ordered_extent()
448 ASSERT(list_empty(&entry->log_list)); in btrfs_put_ordered_extent()
449 ASSERT(RB_EMPTY_NODE(&entry->rb_node)); in btrfs_put_ordered_extent()
450 if (entry->inode) in btrfs_put_ordered_extent()
451 btrfs_add_delayed_iput(entry->inode); in btrfs_put_ordered_extent()
452 while (!list_empty(&entry->list)) { in btrfs_put_ordered_extent()
453 cur = entry->list.next; in btrfs_put_ordered_extent()
455 list_del(&sum->list); in btrfs_put_ordered_extent()
470 struct btrfs_root *root = btrfs_inode->root; in btrfs_remove_ordered_extent() local
471 struct btrfs_fs_info *fs_info = root->fs_info; in btrfs_remove_ordered_extent()
472 struct rb_node *node; in btrfs_remove_ordered_extent() local
476 spin_lock(&btrfs_inode->lock); in btrfs_remove_ordered_extent()
477 btrfs_mod_outstanding_extents(btrfs_inode, -1); in btrfs_remove_ordered_extent()
478 spin_unlock(&btrfs_inode->lock); in btrfs_remove_ordered_extent()
479 if (root != fs_info->tree_root) in btrfs_remove_ordered_extent()
480 btrfs_delalloc_release_metadata(btrfs_inode, entry->num_bytes, in btrfs_remove_ordered_extent()
483 if (test_bit(BTRFS_ORDERED_DIRECT, &entry->flags)) in btrfs_remove_ordered_extent()
484 percpu_counter_add_batch(&fs_info->dio_bytes, -entry->num_bytes, in btrfs_remove_ordered_extent()
485 fs_info->delalloc_batch); in btrfs_remove_ordered_extent()
487 tree = &btrfs_inode->ordered_tree; in btrfs_remove_ordered_extent()
488 spin_lock_irq(&tree->lock); in btrfs_remove_ordered_extent()
489 node = &entry->rb_node; in btrfs_remove_ordered_extent()
490 rb_erase(node, &tree->tree); in btrfs_remove_ordered_extent()
491 RB_CLEAR_NODE(node); in btrfs_remove_ordered_extent()
492 if (tree->last == node) in btrfs_remove_ordered_extent()
493 tree->last = NULL; in btrfs_remove_ordered_extent()
494 set_bit(BTRFS_ORDERED_COMPLETE, &entry->flags); in btrfs_remove_ordered_extent()
495 pending = test_and_clear_bit(BTRFS_ORDERED_PENDING, &entry->flags); in btrfs_remove_ordered_extent()
496 spin_unlock_irq(&tree->lock); in btrfs_remove_ordered_extent()
511 spin_lock(&fs_info->trans_lock); in btrfs_remove_ordered_extent()
512 trans = fs_info->running_transaction; in btrfs_remove_ordered_extent()
514 refcount_inc(&trans->use_count); in btrfs_remove_ordered_extent()
515 spin_unlock(&fs_info->trans_lock); in btrfs_remove_ordered_extent()
519 if (atomic_dec_and_test(&trans->pending_ordered)) in btrfs_remove_ordered_extent()
520 wake_up(&trans->pending_wait); in btrfs_remove_ordered_extent()
525 spin_lock(&root->ordered_extent_lock); in btrfs_remove_ordered_extent()
526 list_del_init(&entry->root_extent_list); in btrfs_remove_ordered_extent()
527 root->nr_ordered_extents--; in btrfs_remove_ordered_extent()
531 if (!root->nr_ordered_extents) { in btrfs_remove_ordered_extent()
532 spin_lock(&fs_info->ordered_root_lock); in btrfs_remove_ordered_extent()
533 BUG_ON(list_empty(&root->ordered_root)); in btrfs_remove_ordered_extent()
534 list_del_init(&root->ordered_root); in btrfs_remove_ordered_extent()
535 spin_unlock(&fs_info->ordered_root_lock); in btrfs_remove_ordered_extent()
537 spin_unlock(&root->ordered_extent_lock); in btrfs_remove_ordered_extent()
538 wake_up(&entry->wait); in btrfs_remove_ordered_extent()
547 complete(&ordered->completion); in btrfs_run_ordered_extent_work()
551 * wait for all the ordered extents in a root. This is done when balancing
554 u64 btrfs_wait_ordered_extents(struct btrfs_root *root, u64 nr, in btrfs_wait_ordered_extents() argument
557 struct btrfs_fs_info *fs_info = root->fs_info; in btrfs_wait_ordered_extents()
565 mutex_lock(&root->ordered_extent_mutex); in btrfs_wait_ordered_extents()
566 spin_lock(&root->ordered_extent_lock); in btrfs_wait_ordered_extents()
567 list_splice_init(&root->ordered_extents, &splice); in btrfs_wait_ordered_extents()
572 if (range_end <= ordered->disk_bytenr || in btrfs_wait_ordered_extents()
573 ordered->disk_bytenr + ordered->disk_num_bytes <= range_start) { in btrfs_wait_ordered_extents()
574 list_move_tail(&ordered->root_extent_list, &skipped); in btrfs_wait_ordered_extents()
575 cond_resched_lock(&root->ordered_extent_lock); in btrfs_wait_ordered_extents()
579 list_move_tail(&ordered->root_extent_list, in btrfs_wait_ordered_extents()
580 &root->ordered_extents); in btrfs_wait_ordered_extents()
581 refcount_inc(&ordered->refs); in btrfs_wait_ordered_extents()
582 spin_unlock(&root->ordered_extent_lock); in btrfs_wait_ordered_extents()
584 btrfs_init_work(&ordered->flush_work, in btrfs_wait_ordered_extents()
586 list_add_tail(&ordered->work_list, &works); in btrfs_wait_ordered_extents()
587 btrfs_queue_work(fs_info->flush_workers, &ordered->flush_work); in btrfs_wait_ordered_extents()
590 spin_lock(&root->ordered_extent_lock); in btrfs_wait_ordered_extents()
592 nr--; in btrfs_wait_ordered_extents()
595 list_splice_tail(&skipped, &root->ordered_extents); in btrfs_wait_ordered_extents()
596 list_splice_tail(&splice, &root->ordered_extents); in btrfs_wait_ordered_extents()
597 spin_unlock(&root->ordered_extent_lock); in btrfs_wait_ordered_extents()
600 list_del_init(&ordered->work_list); in btrfs_wait_ordered_extents()
601 wait_for_completion(&ordered->completion); in btrfs_wait_ordered_extents()
605 mutex_unlock(&root->ordered_extent_mutex); in btrfs_wait_ordered_extents()
613 struct btrfs_root *root; in btrfs_wait_ordered_roots() local
619 mutex_lock(&fs_info->ordered_operations_mutex); in btrfs_wait_ordered_roots()
620 spin_lock(&fs_info->ordered_root_lock); in btrfs_wait_ordered_roots()
621 list_splice_init(&fs_info->ordered_roots, &splice); in btrfs_wait_ordered_roots()
623 root = list_first_entry(&splice, struct btrfs_root, in btrfs_wait_ordered_roots()
625 root = btrfs_grab_root(root); in btrfs_wait_ordered_roots()
626 BUG_ON(!root); in btrfs_wait_ordered_roots()
627 list_move_tail(&root->ordered_root, in btrfs_wait_ordered_roots()
628 &fs_info->ordered_roots); in btrfs_wait_ordered_roots()
629 spin_unlock(&fs_info->ordered_root_lock); in btrfs_wait_ordered_roots()
631 done = btrfs_wait_ordered_extents(root, nr, in btrfs_wait_ordered_roots()
633 btrfs_put_root(root); in btrfs_wait_ordered_roots()
635 spin_lock(&fs_info->ordered_root_lock); in btrfs_wait_ordered_roots()
637 nr -= done; in btrfs_wait_ordered_roots()
640 list_splice_tail(&splice, &fs_info->ordered_roots); in btrfs_wait_ordered_roots()
641 spin_unlock(&fs_info->ordered_root_lock); in btrfs_wait_ordered_roots()
642 mutex_unlock(&fs_info->ordered_operations_mutex); in btrfs_wait_ordered_roots()
654 u64 start = entry->file_offset; in btrfs_start_ordered_extent()
655 u64 end = start + entry->num_bytes - 1; in btrfs_start_ordered_extent()
656 struct btrfs_inode *inode = BTRFS_I(entry->inode); in btrfs_start_ordered_extent()
665 if (!test_bit(BTRFS_ORDERED_DIRECT, &entry->flags)) in btrfs_start_ordered_extent()
666 filemap_fdatawrite_range(inode->vfs_inode.i_mapping, start, end); in btrfs_start_ordered_extent()
668 wait_event(entry->wait, test_bit(BTRFS_ORDERED_COMPLETE, in btrfs_start_ordered_extent()
669 &entry->flags)); in btrfs_start_ordered_extent()
687 orig_end = start + len - 1; in btrfs_wait_ordered_range()
703 * before the ordered extents complete - to avoid failures (-EEXIST) in btrfs_wait_ordered_range()
706 ret_wb = filemap_fdatawait_range(inode->i_mapping, start, orig_end); in btrfs_wait_ordered_range()
713 if (ordered->file_offset > orig_end) { in btrfs_wait_ordered_range()
717 if (ordered->file_offset + ordered->num_bytes <= start) { in btrfs_wait_ordered_range()
722 end = ordered->file_offset; in btrfs_wait_ordered_range()
728 if (test_bit(BTRFS_ORDERED_IOERR, &ordered->flags)) in btrfs_wait_ordered_range()
729 ret = -EIO; in btrfs_wait_ordered_range()
733 end--; in btrfs_wait_ordered_range()
746 struct rb_node *node; in btrfs_lookup_ordered_extent() local
749 tree = &inode->ordered_tree; in btrfs_lookup_ordered_extent()
750 spin_lock_irq(&tree->lock); in btrfs_lookup_ordered_extent()
751 node = tree_search(tree, file_offset); in btrfs_lookup_ordered_extent()
752 if (!node) in btrfs_lookup_ordered_extent()
755 entry = rb_entry(node, struct btrfs_ordered_extent, rb_node); in btrfs_lookup_ordered_extent()
759 refcount_inc(&entry->refs); in btrfs_lookup_ordered_extent()
761 spin_unlock_irq(&tree->lock); in btrfs_lookup_ordered_extent()
772 struct rb_node *node; in btrfs_lookup_ordered_range() local
775 tree = &inode->ordered_tree; in btrfs_lookup_ordered_range()
776 spin_lock_irq(&tree->lock); in btrfs_lookup_ordered_range()
777 node = tree_search(tree, file_offset); in btrfs_lookup_ordered_range()
778 if (!node) { in btrfs_lookup_ordered_range()
779 node = tree_search(tree, file_offset + len); in btrfs_lookup_ordered_range()
780 if (!node) in btrfs_lookup_ordered_range()
785 entry = rb_entry(node, struct btrfs_ordered_extent, rb_node); in btrfs_lookup_ordered_range()
789 if (entry->file_offset >= file_offset + len) { in btrfs_lookup_ordered_range()
794 node = rb_next(node); in btrfs_lookup_ordered_range()
795 if (!node) in btrfs_lookup_ordered_range()
800 refcount_inc(&entry->refs); in btrfs_lookup_ordered_range()
801 spin_unlock_irq(&tree->lock); in btrfs_lookup_ordered_range()
812 struct btrfs_ordered_inode_tree *tree = &inode->ordered_tree; in btrfs_get_ordered_extents_for_logging()
815 ASSERT(inode_is_locked(&inode->vfs_inode)); in btrfs_get_ordered_extents_for_logging()
817 spin_lock_irq(&tree->lock); in btrfs_get_ordered_extents_for_logging()
818 for (n = rb_first(&tree->tree); n; n = rb_next(n)) { in btrfs_get_ordered_extents_for_logging()
823 if (test_bit(BTRFS_ORDERED_LOGGED, &ordered->flags)) in btrfs_get_ordered_extents_for_logging()
826 ASSERT(list_empty(&ordered->log_list)); in btrfs_get_ordered_extents_for_logging()
827 list_add_tail(&ordered->log_list, list); in btrfs_get_ordered_extents_for_logging()
828 refcount_inc(&ordered->refs); in btrfs_get_ordered_extents_for_logging()
830 spin_unlock_irq(&tree->lock); in btrfs_get_ordered_extents_for_logging()
841 struct rb_node *node; in btrfs_lookup_first_ordered_extent() local
844 tree = &inode->ordered_tree; in btrfs_lookup_first_ordered_extent()
845 spin_lock_irq(&tree->lock); in btrfs_lookup_first_ordered_extent()
846 node = tree_search(tree, file_offset); in btrfs_lookup_first_ordered_extent()
847 if (!node) in btrfs_lookup_first_ordered_extent()
850 entry = rb_entry(node, struct btrfs_ordered_extent, rb_node); in btrfs_lookup_first_ordered_extent()
851 refcount_inc(&entry->refs); in btrfs_lookup_first_ordered_extent()
853 spin_unlock_irq(&tree->lock); in btrfs_lookup_first_ordered_extent()
865 struct btrfs_fs_info *fs_info = inode->root->fs_info; in btrfs_find_ordered_sum()
868 struct btrfs_ordered_inode_tree *tree = &inode->ordered_tree; in btrfs_find_ordered_sum()
872 const u8 blocksize_bits = inode->vfs_inode.i_sb->s_blocksize_bits; in btrfs_find_ordered_sum()
873 const u16 csum_size = btrfs_super_csum_size(fs_info->super_copy); in btrfs_find_ordered_sum()
880 spin_lock_irq(&tree->lock); in btrfs_find_ordered_sum()
881 list_for_each_entry_reverse(ordered_sum, &ordered->list, list) { in btrfs_find_ordered_sum()
882 if (disk_bytenr >= ordered_sum->bytenr && in btrfs_find_ordered_sum()
883 disk_bytenr < ordered_sum->bytenr + ordered_sum->len) { in btrfs_find_ordered_sum()
884 i = (disk_bytenr - ordered_sum->bytenr) >> blocksize_bits; in btrfs_find_ordered_sum()
885 num_sectors = ordered_sum->len >> blocksize_bits; in btrfs_find_ordered_sum()
886 num_sectors = min_t(int, len - index, num_sectors - i); in btrfs_find_ordered_sum()
887 memcpy(sum + index, ordered_sum->sums + i * csum_size, in btrfs_find_ordered_sum()
897 spin_unlock_irq(&tree->lock); in btrfs_find_ordered_sum()
903 * btrfs_flush_ordered_range - Lock the passed range and ensures all pending
927 lock_extent_bits(&inode->io_tree, start, end, cachedp); in btrfs_lock_and_flush_ordered_range()
929 end - start + 1); in btrfs_lock_and_flush_ordered_range()
937 refcount_dec(&cache->refs); in btrfs_lock_and_flush_ordered_range()
940 unlock_extent_cached(&inode->io_tree, start, end, cachedp); in btrfs_lock_and_flush_ordered_range()
953 return -ENOMEM; in ordered_data_init()