Lines Matching refs:head_ref
749 static void init_delayed_ref_head(struct btrfs_delayed_ref_head *head_ref, in init_delayed_ref_head() argument
785 refcount_set(&head_ref->refs, 1); in init_delayed_ref_head()
786 head_ref->bytenr = bytenr; in init_delayed_ref_head()
787 head_ref->num_bytes = num_bytes; in init_delayed_ref_head()
788 head_ref->ref_mod = count_mod; in init_delayed_ref_head()
789 head_ref->must_insert_reserved = must_insert_reserved; in init_delayed_ref_head()
790 head_ref->is_data = is_data; in init_delayed_ref_head()
791 head_ref->is_system = is_system; in init_delayed_ref_head()
792 head_ref->ref_tree = RB_ROOT_CACHED; in init_delayed_ref_head()
793 INIT_LIST_HEAD(&head_ref->ref_add_list); in init_delayed_ref_head()
794 RB_CLEAR_NODE(&head_ref->href_node); in init_delayed_ref_head()
795 head_ref->processing = 0; in init_delayed_ref_head()
796 head_ref->total_ref_mod = count_mod; in init_delayed_ref_head()
797 spin_lock_init(&head_ref->lock); in init_delayed_ref_head()
798 mutex_init(&head_ref->mutex); in init_delayed_ref_head()
818 struct btrfs_delayed_ref_head *head_ref, in add_delayed_ref_head() argument
837 trace_add_delayed_ref_head(trans->fs_info, head_ref, action); in add_delayed_ref_head()
840 &head_ref->href_node); in add_delayed_ref_head()
842 update_existing_head_ref(trans, existing, head_ref); in add_delayed_ref_head()
847 kmem_cache_free(btrfs_delayed_ref_head_cachep, head_ref); in add_delayed_ref_head()
848 head_ref = existing; in add_delayed_ref_head()
850 u64 flags = btrfs_ref_head_to_space_flags(head_ref); in add_delayed_ref_head()
852 if (head_ref->is_data && head_ref->ref_mod < 0) { in add_delayed_ref_head()
853 delayed_refs->pending_csums += head_ref->num_bytes; in add_delayed_ref_head()
856 head_ref->num_bytes); in add_delayed_ref_head()
858 if (head_ref->ref_mod < 0) in add_delayed_ref_head()
860 head_ref->num_bytes); in add_delayed_ref_head()
869 return head_ref; in add_delayed_ref_head()
934 struct btrfs_delayed_ref_head *head_ref; in btrfs_add_delayed_tree_ref() local
955 head_ref = kmem_cache_alloc(btrfs_delayed_ref_head_cachep, GFP_NOFS); in btrfs_add_delayed_tree_ref()
956 if (!head_ref) { in btrfs_add_delayed_tree_ref()
968 kmem_cache_free(btrfs_delayed_ref_head_cachep, head_ref); in btrfs_add_delayed_tree_ref()
984 init_delayed_ref_head(head_ref, record, bytenr, num_bytes, in btrfs_add_delayed_tree_ref()
987 head_ref->extent_op = extent_op; in btrfs_add_delayed_tree_ref()
996 head_ref = add_delayed_ref_head(trans, head_ref, record, in btrfs_add_delayed_tree_ref()
999 ret = insert_delayed_ref(trans, delayed_refs, head_ref, &ref->node); in btrfs_add_delayed_tree_ref()
1029 struct btrfs_delayed_ref_head *head_ref; in btrfs_add_delayed_data_ref() local
1060 head_ref = kmem_cache_alloc(btrfs_delayed_ref_head_cachep, GFP_NOFS); in btrfs_add_delayed_data_ref()
1061 if (!head_ref) { in btrfs_add_delayed_data_ref()
1074 head_ref); in btrfs_add_delayed_data_ref()
1079 init_delayed_ref_head(head_ref, record, bytenr, num_bytes, ref_root, in btrfs_add_delayed_data_ref()
1081 head_ref->extent_op = NULL; in btrfs_add_delayed_data_ref()
1090 head_ref = add_delayed_ref_head(trans, head_ref, record, in btrfs_add_delayed_data_ref()
1093 ret = insert_delayed_ref(trans, delayed_refs, head_ref, &ref->node); in btrfs_add_delayed_data_ref()
1118 struct btrfs_delayed_ref_head *head_ref; in btrfs_add_delayed_extent_op() local
1121 head_ref = kmem_cache_alloc(btrfs_delayed_ref_head_cachep, GFP_NOFS); in btrfs_add_delayed_extent_op()
1122 if (!head_ref) in btrfs_add_delayed_extent_op()
1125 init_delayed_ref_head(head_ref, NULL, bytenr, num_bytes, 0, 0, in btrfs_add_delayed_extent_op()
1128 head_ref->extent_op = extent_op; in btrfs_add_delayed_extent_op()
1133 add_delayed_ref_head(trans, head_ref, NULL, BTRFS_UPDATE_DELAYED_HEAD, in btrfs_add_delayed_extent_op()