Lines Matching refs:locked_ref

2515 	struct btrfs_delayed_ref_head *locked_ref = NULL;  in __btrfs_run_delayed_refs()  local
2525 if (!locked_ref) { in __btrfs_run_delayed_refs()
2530 locked_ref = btrfs_select_ref_head(trans); in __btrfs_run_delayed_refs()
2531 if (!locked_ref) { in __btrfs_run_delayed_refs()
2538 ret = btrfs_delayed_ref_lock(trans, locked_ref); in __btrfs_run_delayed_refs()
2547 locked_ref = NULL; in __btrfs_run_delayed_refs()
2565 spin_lock(&locked_ref->lock); in __btrfs_run_delayed_refs()
2566 btrfs_merge_delayed_refs(trans, delayed_refs, locked_ref); in __btrfs_run_delayed_refs()
2568 ref = select_delayed_ref(locked_ref); in __btrfs_run_delayed_refs()
2572 spin_unlock(&locked_ref->lock); in __btrfs_run_delayed_refs()
2573 unselect_delayed_ref_head(delayed_refs, locked_ref); in __btrfs_run_delayed_refs()
2574 locked_ref = NULL; in __btrfs_run_delayed_refs()
2585 ret = cleanup_ref_head(trans, locked_ref); in __btrfs_run_delayed_refs()
2593 locked_ref = NULL; in __btrfs_run_delayed_refs()
2600 rb_erase(&ref->ref_node, &locked_ref->ref_tree); in __btrfs_run_delayed_refs()
2611 locked_ref->ref_mod -= ref->ref_mod; in __btrfs_run_delayed_refs()
2614 locked_ref->ref_mod += ref->ref_mod; in __btrfs_run_delayed_refs()
2625 must_insert_reserved = locked_ref->must_insert_reserved; in __btrfs_run_delayed_refs()
2626 locked_ref->must_insert_reserved = 0; in __btrfs_run_delayed_refs()
2628 extent_op = locked_ref->extent_op; in __btrfs_run_delayed_refs()
2629 locked_ref->extent_op = NULL; in __btrfs_run_delayed_refs()
2630 spin_unlock(&locked_ref->lock); in __btrfs_run_delayed_refs()
2637 unselect_delayed_ref_head(delayed_refs, locked_ref); in __btrfs_run_delayed_refs()