Lines Matching defs:locked_ref
1913 struct btrfs_delayed_ref_head *locked_ref,
1925 lockdep_assert_held(&locked_ref->mutex);
1926 lockdep_assert_held(&locked_ref->lock);
1928 while ((ref = btrfs_select_delayed_ref(locked_ref))) {
1931 spin_unlock(&locked_ref->lock);
1932 btrfs_unselect_ref_head(delayed_refs, locked_ref);
1936 rb_erase_cached(&ref->ref_node, &locked_ref->ref_tree);
1947 locked_ref->ref_mod -= ref->ref_mod;
1950 locked_ref->ref_mod += ref->ref_mod;
1960 must_insert_reserved = locked_ref->must_insert_reserved;
1967 locked_ref->must_insert_reserved = false;
1969 extent_op = locked_ref->extent_op;
1970 locked_ref->extent_op = NULL;
1971 spin_unlock(&locked_ref->lock);
1973 ret = run_one_delayed_ref(trans, locked_ref, ref, extent_op,
1980 btrfs_unselect_ref_head(delayed_refs, locked_ref);
1988 spin_lock(&locked_ref->lock);
1989 btrfs_merge_delayed_refs(fs_info, delayed_refs, locked_ref);
2004 struct btrfs_delayed_ref_head *locked_ref = NULL;
2022 if (!locked_ref) {
2023 locked_ref = btrfs_select_ref_head(fs_info, delayed_refs);
2024 if (IS_ERR_OR_NULL(locked_ref)) {
2025 if (PTR_ERR(locked_ref) == -EAGAIN) {
2045 spin_lock(&locked_ref->lock);
2046 btrfs_merge_delayed_refs(fs_info, delayed_refs, locked_ref);
2048 ret = btrfs_run_delayed_refs_for_head(trans, locked_ref, &bytes_processed);
2060 ret = cleanup_ref_head(trans, locked_ref, &bytes_processed);
2075 locked_ref = NULL;
2079 locked_ref);