Lines Matching refs:head
121 struct btrfs_delayed_ref_head *head; in btrfs_lookup_extent_info() local
206 head = btrfs_find_delayed_ref_head(delayed_refs, bytenr); in btrfs_lookup_extent_info()
207 if (head) { in btrfs_lookup_extent_info()
208 if (!mutex_trylock(&head->mutex)) { in btrfs_lookup_extent_info()
209 refcount_inc(&head->refs); in btrfs_lookup_extent_info()
218 mutex_lock(&head->mutex); in btrfs_lookup_extent_info()
219 mutex_unlock(&head->mutex); in btrfs_lookup_extent_info()
220 btrfs_put_delayed_ref_head(head); in btrfs_lookup_extent_info()
223 spin_lock(&head->lock); in btrfs_lookup_extent_info()
224 if (head->extent_op && head->extent_op->update_flags) in btrfs_lookup_extent_info()
225 extent_flags |= head->extent_op->flags_to_set; in btrfs_lookup_extent_info()
229 num_refs += head->ref_mod; in btrfs_lookup_extent_info()
230 spin_unlock(&head->lock); in btrfs_lookup_extent_info()
231 mutex_unlock(&head->mutex); in btrfs_lookup_extent_info()
1564 struct btrfs_delayed_ref_head *head, in run_delayed_extent_op() argument
1588 key.objectid = head->bytenr; in run_delayed_extent_op()
1595 key.offset = head->num_bytes; in run_delayed_extent_op()
1611 if (key.objectid == head->bytenr && in run_delayed_extent_op()
1613 key.offset == head->num_bytes) in run_delayed_extent_op()
1620 key.objectid = head->bytenr; in run_delayed_extent_op()
1621 key.offset = head->num_bytes; in run_delayed_extent_op()
1724 select_delayed_ref(struct btrfs_delayed_ref_head *head) in select_delayed_ref() argument
1728 if (RB_EMPTY_ROOT(&head->ref_tree.rb_root)) in select_delayed_ref()
1737 if (!list_empty(&head->ref_add_list)) in select_delayed_ref()
1738 return list_first_entry(&head->ref_add_list, in select_delayed_ref()
1741 ref = rb_entry(rb_first_cached(&head->ref_tree), in select_delayed_ref()
1748 struct btrfs_delayed_ref_head *head) in unselect_delayed_ref_head() argument
1751 head->processing = 0; in unselect_delayed_ref_head()
1754 btrfs_delayed_ref_unlock(head); in unselect_delayed_ref_head()
1758 struct btrfs_delayed_ref_head *head) in cleanup_extent_op() argument
1760 struct btrfs_delayed_extent_op *extent_op = head->extent_op; in cleanup_extent_op()
1765 if (head->must_insert_reserved) { in cleanup_extent_op()
1766 head->extent_op = NULL; in cleanup_extent_op()
1774 struct btrfs_delayed_ref_head *head) in run_and_cleanup_extent_op() argument
1779 extent_op = cleanup_extent_op(head); in run_and_cleanup_extent_op()
1782 head->extent_op = NULL; in run_and_cleanup_extent_op()
1783 spin_unlock(&head->lock); in run_and_cleanup_extent_op()
1784 ret = run_delayed_extent_op(trans, head, extent_op); in run_and_cleanup_extent_op()
1791 struct btrfs_delayed_ref_head *head) in btrfs_cleanup_ref_head_accounting() argument
1799 if (head->total_ref_mod < 0 && head->is_data) { in btrfs_cleanup_ref_head_accounting()
1801 delayed_refs->pending_csums -= head->num_bytes; in btrfs_cleanup_ref_head_accounting()
1803 nr_items += btrfs_csum_bytes_to_leaves(fs_info, head->num_bytes); in btrfs_cleanup_ref_head_accounting()
1810 struct btrfs_delayed_ref_head *head) in cleanup_ref_head() argument
1819 ret = run_and_cleanup_extent_op(trans, head); in cleanup_ref_head()
1821 unselect_delayed_ref_head(delayed_refs, head); in cleanup_ref_head()
1832 spin_unlock(&head->lock); in cleanup_ref_head()
1834 spin_lock(&head->lock); in cleanup_ref_head()
1835 if (!RB_EMPTY_ROOT(&head->ref_tree.rb_root) || head->extent_op) { in cleanup_ref_head()
1836 spin_unlock(&head->lock); in cleanup_ref_head()
1840 btrfs_delete_ref_head(delayed_refs, head); in cleanup_ref_head()
1841 spin_unlock(&head->lock); in cleanup_ref_head()
1844 if (head->must_insert_reserved) { in cleanup_ref_head()
1845 btrfs_pin_extent(trans, head->bytenr, head->num_bytes, 1); in cleanup_ref_head()
1846 if (head->is_data) { in cleanup_ref_head()
1849 csum_root = btrfs_csum_root(fs_info, head->bytenr); in cleanup_ref_head()
1850 ret = btrfs_del_csums(trans, csum_root, head->bytenr, in cleanup_ref_head()
1851 head->num_bytes); in cleanup_ref_head()
1855 btrfs_cleanup_ref_head_accounting(fs_info, delayed_refs, head); in cleanup_ref_head()
1857 trace_run_delayed_ref_head(fs_info, head, 0); in cleanup_ref_head()
1858 btrfs_delayed_ref_unlock(head); in cleanup_ref_head()
1859 btrfs_put_delayed_ref_head(head); in cleanup_ref_head()
1868 struct btrfs_delayed_ref_head *head = NULL; in btrfs_obtain_ref_head() local
1872 head = btrfs_select_ref_head(delayed_refs); in btrfs_obtain_ref_head()
1873 if (!head) { in btrfs_obtain_ref_head()
1875 return head; in btrfs_obtain_ref_head()
1882 ret = btrfs_delayed_ref_lock(delayed_refs, head); in btrfs_obtain_ref_head()
1891 head = ERR_PTR(-EAGAIN); in btrfs_obtain_ref_head()
1893 return head; in btrfs_obtain_ref_head()
2129 struct btrfs_delayed_ref_head *head; in btrfs_run_delayed_refs() local
2163 head = rb_entry(node, struct btrfs_delayed_ref_head, in btrfs_run_delayed_refs()
2165 refcount_inc(&head->refs); in btrfs_run_delayed_refs()
2169 mutex_lock(&head->mutex); in btrfs_run_delayed_refs()
2170 mutex_unlock(&head->mutex); in btrfs_run_delayed_refs()
2172 btrfs_put_delayed_ref_head(head); in btrfs_run_delayed_refs()
2206 struct btrfs_delayed_ref_head *head; in check_delayed_ref() local
2224 head = btrfs_find_delayed_ref_head(delayed_refs, bytenr); in check_delayed_ref()
2225 if (!head) { in check_delayed_ref()
2231 if (!mutex_trylock(&head->mutex)) { in check_delayed_ref()
2238 refcount_inc(&head->refs); in check_delayed_ref()
2247 mutex_lock(&head->mutex); in check_delayed_ref()
2248 mutex_unlock(&head->mutex); in check_delayed_ref()
2249 btrfs_put_delayed_ref_head(head); in check_delayed_ref()
2255 spin_lock(&head->lock); in check_delayed_ref()
2260 for (node = rb_first_cached(&head->ref_tree); node; in check_delayed_ref()
2282 spin_unlock(&head->lock); in check_delayed_ref()
2283 mutex_unlock(&head->mutex); in check_delayed_ref()
3220 struct btrfs_delayed_ref_head *head; in check_ref_cleanup() local
3226 head = btrfs_find_delayed_ref_head(delayed_refs, bytenr); in check_ref_cleanup()
3227 if (!head) in check_ref_cleanup()
3230 spin_lock(&head->lock); in check_ref_cleanup()
3231 if (!RB_EMPTY_ROOT(&head->ref_tree.rb_root)) in check_ref_cleanup()
3234 if (cleanup_extent_op(head) != NULL) in check_ref_cleanup()
3241 if (!mutex_trylock(&head->mutex)) in check_ref_cleanup()
3244 btrfs_delete_ref_head(delayed_refs, head); in check_ref_cleanup()
3245 head->processing = 0; in check_ref_cleanup()
3247 spin_unlock(&head->lock); in check_ref_cleanup()
3250 BUG_ON(head->extent_op); in check_ref_cleanup()
3251 if (head->must_insert_reserved) in check_ref_cleanup()
3254 btrfs_cleanup_ref_head_accounting(trans->fs_info, delayed_refs, head); in check_ref_cleanup()
3255 mutex_unlock(&head->mutex); in check_ref_cleanup()
3256 btrfs_put_delayed_ref_head(head); in check_ref_cleanup()
3259 spin_unlock(&head->lock); in check_ref_cleanup()