Lines Matching refs:trans

19 struct six_lock_count bch2_btree_node_lock_counts(struct btree_trans *trans,  in bch2_btree_node_lock_counts()  argument
33 trans_for_each_path(trans, path, i) in bch2_btree_node_lock_counts()
46 void bch2_btree_node_unlock_write(struct btree_trans *trans, in bch2_btree_node_unlock_write() argument
49 bch2_btree_node_unlock_write_inlined(trans, path, b); in bch2_btree_node_unlock_write()
58 struct btree_trans *trans; member
80 struct task_struct *task = READ_ONCE(i->trans->locking_wait.task); in print_cycle()
84 bch2_btree_trans_to_text(out, i->trans); in print_cycle()
94 struct task_struct *task = i->trans->locking_wait.task; in print_chain()
104 closure_put(&g->g[--g->nr].trans->ref); in lock_graph_up()
119 static void __lock_graph_down(struct lock_graph *g, struct btree_trans *trans) in __lock_graph_down() argument
122 .trans = trans, in __lock_graph_down()
123 .node_want = trans->locking, in __lock_graph_down()
124 .lock_want = trans->locking_wait.lock_want, in __lock_graph_down()
128 static void lock_graph_down(struct lock_graph *g, struct btree_trans *trans) in lock_graph_down() argument
130 closure_get(&trans->ref); in lock_graph_down()
131 __lock_graph_down(g, trans); in lock_graph_down()
139 if (from->trans->locking != from->node_want) { in lock_graph_remove_non_waiters()
145 if (i->trans->locking != i->node_want || in lock_graph_remove_non_waiters()
146 i->trans->locking_wait.start_time != i[-1].lock_start_time) { in lock_graph_remove_non_waiters()
154 static void trace_would_deadlock(struct lock_graph *g, struct btree_trans *trans) in trace_would_deadlock() argument
156 struct bch_fs *c = trans->c; in trace_would_deadlock()
166 trace_trans_restart_would_deadlock(trans, buf.buf); in trace_would_deadlock()
174 trace_would_deadlock(g, i->trans); in abort_lock()
175 return btree_trans_restart(i->trans, BCH_ERR_transaction_restart_would_deadlock); in abort_lock()
177 i->trans->lock_must_abort = true; in abort_lock()
178 wake_up_process(i->trans->locking_wait.task); in abort_lock()
183 static int btree_trans_abort_preference(struct btree_trans *trans) in btree_trans_abort_preference() argument
185 if (trans->lock_may_not_fail) in btree_trans_abort_preference()
187 if (trans->locking_wait.lock_want == SIX_LOCK_write) in btree_trans_abort_preference()
189 if (!trans->in_traverse_all) in btree_trans_abort_preference()
212 pref = btree_trans_abort_preference(i->trans); in break_cycle()
223 prt_printf(&buf, bch2_fmt(g->g->trans->c, "cycle of nofail locks")); in break_cycle()
226 struct btree_trans *trans = i->trans; in break_cycle() local
228 bch2_btree_trans_to_text(&buf, trans); in break_cycle()
232 bch2_prt_task_backtrace(&buf, trans->locking_wait.task, 2, GFP_NOWAIT); in break_cycle()
251 static int lock_graph_descend(struct lock_graph *g, struct btree_trans *trans, in lock_graph_descend() argument
254 struct btree_trans *orig_trans = g->g->trans; in lock_graph_descend()
258 if (i->trans == trans) { in lock_graph_descend()
259 closure_put(&trans->ref); in lock_graph_descend()
264 closure_put(&trans->ref); in lock_graph_descend()
274 trace_and_count(trans->c, trans_restart_would_deadlock_recursion_limit, trans, _RET_IP_); in lock_graph_descend()
278 __lock_graph_down(g, trans); in lock_graph_descend()
287 int bch2_check_for_deadlock(struct btree_trans *trans, struct printbuf *cycle) in bch2_check_for_deadlock() argument
297 if (trans->lock_must_abort && !trans->lock_may_not_fail) { in bch2_check_for_deadlock()
301 trace_would_deadlock(&g, trans); in bch2_check_for_deadlock()
302 return btree_trans_restart(trans, BCH_ERR_transaction_restart_would_deadlock); in bch2_check_for_deadlock()
305 lock_graph_down(&g, trans); in bch2_check_for_deadlock()
317 struct btree_path *paths = rcu_dereference(top->trans->paths); in bch2_check_for_deadlock()
372 list_for_each_entry(trans, &b->lock.wait_list, locking_wait.list) { in bch2_check_for_deadlock()
373 BUG_ON(b != trans->locking); in bch2_check_for_deadlock()
376 time_after_eq64(top->lock_start_time, trans->locking_wait.start_time)) in bch2_check_for_deadlock()
379 top->lock_start_time = trans->locking_wait.start_time; in bch2_check_for_deadlock()
382 if (trans == top->trans || in bch2_check_for_deadlock()
383 !lock_type_conflicts(lock_held, trans->locking_wait.lock_want)) in bch2_check_for_deadlock()
386 closure_get(&trans->ref); in bch2_check_for_deadlock()
389 ret = lock_graph_descend(&g, trans, cycle); in bch2_check_for_deadlock()
412 struct btree_trans *trans = p; in bch2_six_check_for_deadlock() local
414 return bch2_check_for_deadlock(trans, NULL); in bch2_six_check_for_deadlock()
417 int __bch2_btree_node_lock_write(struct btree_trans *trans, struct btree_path *path, in __bch2_btree_node_lock_write() argument
421 int readers = bch2_btree_node_lock_counts(trans, NULL, b, b->level).n[SIX_LOCK_read]; in __bch2_btree_node_lock_write()
431 ret = __btree_node_lock_nopath(trans, b, SIX_LOCK_write, in __bch2_btree_node_lock_write()
441 void bch2_btree_node_lock_write_nofail(struct btree_trans *trans, in bch2_btree_node_lock_write_nofail() argument
445 int ret = __btree_node_lock_write(trans, path, b, true); in bch2_btree_node_lock_write_nofail()
451 static inline bool btree_path_get_locks(struct btree_trans *trans, in btree_path_get_locks() argument
464 ? bch2_btree_node_upgrade(trans, path, l) in btree_path_get_locks()
465 : bch2_btree_node_relock(trans, path, l))) { in btree_path_get_locks()
483 __bch2_btree_path_unlock(trans, path); in btree_path_get_locks()
500 bool __bch2_btree_node_relock(struct btree_trans *trans, in __bch2_btree_node_relock() argument
512 btree_node_lock_increment(trans, &b->c, level, want))) { in __bch2_btree_node_relock()
513 mark_btree_node_locked(trans, path, level, want); in __bch2_btree_node_relock()
517 if (trace && !trans->notrace_relock_fail) in __bch2_btree_node_relock()
518 trace_and_count(trans->c, btree_path_relock_fail, trans, _RET_IP_, path, level); in __bch2_btree_node_relock()
524 bool bch2_btree_node_upgrade(struct btree_trans *trans, in bch2_btree_node_upgrade() argument
538 return bch2_btree_node_relock(trans, path, level); in bch2_btree_node_upgrade()
557 btree_node_lock_increment(trans, &b->c, level, BTREE_NODE_INTENT_LOCKED)) { in bch2_btree_node_upgrade()
558 btree_node_unlock(trans, path, level); in bch2_btree_node_upgrade()
562 trace_and_count(trans->c, btree_path_upgrade_fail, trans, _RET_IP_, path, level); in bch2_btree_node_upgrade()
574 int bch2_btree_path_relock_intent(struct btree_trans *trans, in bch2_btree_path_relock_intent() argument
582 if (!bch2_btree_node_relock(trans, path, l)) { in bch2_btree_path_relock_intent()
583 __bch2_btree_path_unlock(trans, path); in bch2_btree_path_relock_intent()
585 trace_and_count(trans->c, trans_restart_relock_path_intent, trans, _RET_IP_, path); in bch2_btree_path_relock_intent()
586 return btree_trans_restart(trans, BCH_ERR_transaction_restart_relock_path_intent); in bch2_btree_path_relock_intent()
594 bool bch2_btree_path_relock_norestart(struct btree_trans *trans, struct btree_path *path) in bch2_btree_path_relock_norestart() argument
598 bool ret = btree_path_get_locks(trans, path, false, &f); in bch2_btree_path_relock_norestart()
599 bch2_trans_verify_locks(trans); in bch2_btree_path_relock_norestart()
603 int __bch2_btree_path_relock(struct btree_trans *trans, in __bch2_btree_path_relock() argument
606 if (!bch2_btree_path_relock_norestart(trans, path)) { in __bch2_btree_path_relock()
607 trace_and_count(trans->c, trans_restart_relock_path, trans, trace_ip, path); in __bch2_btree_path_relock()
608 return btree_trans_restart(trans, BCH_ERR_transaction_restart_relock_path); in __bch2_btree_path_relock()
614 bool bch2_btree_path_upgrade_noupgrade_sibs(struct btree_trans *trans, in bch2_btree_path_upgrade_noupgrade_sibs() argument
623 bool ret = btree_path_get_locks(trans, path, true, f); in bch2_btree_path_upgrade_noupgrade_sibs()
624 bch2_trans_verify_locks(trans); in bch2_btree_path_upgrade_noupgrade_sibs()
628 bool __bch2_btree_path_upgrade(struct btree_trans *trans, in __bch2_btree_path_upgrade() argument
633 bool ret = bch2_btree_path_upgrade_noupgrade_sibs(trans, path, new_locks_want, f); in __bch2_btree_path_upgrade()
656 if (!path->cached && !trans->in_traverse_all) { in __bch2_btree_path_upgrade()
660 trans_for_each_path(trans, linked, i) in __bch2_btree_path_upgrade()
666 btree_path_get_locks(trans, linked, true, NULL); in __bch2_btree_path_upgrade()
670 bch2_trans_verify_locks(trans); in __bch2_btree_path_upgrade()
674 void __bch2_btree_path_downgrade(struct btree_trans *trans, in __bch2_btree_path_downgrade() argument
680 if (trans->restarted) in __bch2_btree_path_downgrade()
690 btree_node_unlock(trans, path, l); in __bch2_btree_path_downgrade()
702 trace_path_downgrade(trans, _RET_IP_, path, old_locks_want); in __bch2_btree_path_downgrade()
707 void bch2_trans_downgrade(struct btree_trans *trans) in bch2_trans_downgrade() argument
712 if (trans->restarted) in bch2_trans_downgrade()
715 trans_for_each_path(trans, path, i) in bch2_trans_downgrade()
717 bch2_btree_path_downgrade(trans, path); in bch2_trans_downgrade()
720 static inline void __bch2_trans_unlock(struct btree_trans *trans) in __bch2_trans_unlock() argument
725 trans_for_each_path(trans, path, i) in __bch2_trans_unlock()
726 __bch2_btree_path_unlock(trans, path); in __bch2_trans_unlock()
729 static noinline __cold int bch2_trans_relock_fail(struct btree_trans *trans, struct btree_path *pat… in bch2_trans_relock_fail() argument
746 bch2_btree_node_lock_counts(trans, NULL, &f->b->c, f->l); in bch2_trans_relock_fail()
753 trace_trans_restart_relock(trans, _RET_IP_, buf.buf); in bch2_trans_relock_fail()
757 count_event(trans->c, trans_restart_relock); in bch2_trans_relock_fail()
759 __bch2_trans_unlock(trans); in bch2_trans_relock_fail()
760 bch2_trans_verify_locks(trans); in bch2_trans_relock_fail()
761 return btree_trans_restart(trans, BCH_ERR_transaction_restart_relock); in bch2_trans_relock_fail()
764 static inline int __bch2_trans_relock(struct btree_trans *trans, bool trace) in __bch2_trans_relock() argument
766 bch2_trans_verify_locks(trans); in __bch2_trans_relock()
768 if (unlikely(trans->restarted)) in __bch2_trans_relock()
769 return -((int) trans->restarted); in __bch2_trans_relock()
770 if (unlikely(trans->locked)) in __bch2_trans_relock()
776 trans_for_each_path(trans, path, i) { in __bch2_trans_relock()
780 !btree_path_get_locks(trans, path, false, &f)) in __bch2_trans_relock()
781 return bch2_trans_relock_fail(trans, path, &f, trace); in __bch2_trans_relock()
784 trans_set_locked(trans, true); in __bch2_trans_relock()
786 bch2_trans_verify_locks(trans); in __bch2_trans_relock()
790 int bch2_trans_relock(struct btree_trans *trans) in bch2_trans_relock() argument
792 return __bch2_trans_relock(trans, true); in bch2_trans_relock()
795 int bch2_trans_relock_notrace(struct btree_trans *trans) in bch2_trans_relock_notrace() argument
797 return __bch2_trans_relock(trans, false); in bch2_trans_relock_notrace()
800 void bch2_trans_unlock_noassert(struct btree_trans *trans) in bch2_trans_unlock_noassert() argument
802 __bch2_trans_unlock(trans); in bch2_trans_unlock_noassert()
804 trans_set_unlocked(trans); in bch2_trans_unlock_noassert()
807 void bch2_trans_unlock(struct btree_trans *trans) in bch2_trans_unlock() argument
809 __bch2_trans_unlock(trans); in bch2_trans_unlock()
811 trans_set_unlocked(trans); in bch2_trans_unlock()
814 void bch2_trans_unlock_long(struct btree_trans *trans) in bch2_trans_unlock_long() argument
816 bch2_trans_unlock(trans); in bch2_trans_unlock_long()
817 bch2_trans_srcu_unlock(trans); in bch2_trans_unlock_long()
820 void bch2_trans_unlock_write(struct btree_trans *trans) in bch2_trans_unlock_write() argument
825 trans_for_each_path(trans, path, i) in bch2_trans_unlock_write()
828 bch2_btree_node_unlock_write(trans, path, path->l[l].b); in bch2_trans_unlock_write()
831 int __bch2_trans_mutex_lock(struct btree_trans *trans, in __bch2_trans_mutex_lock() argument
834 int ret = drop_locks_do(trans, (mutex_lock(lock), 0)); in __bch2_trans_mutex_lock()
875 static bool bch2_trans_locked(struct btree_trans *trans) in bch2_trans_locked() argument
880 trans_for_each_path(trans, path, i) in bch2_trans_locked()
886 void bch2_trans_verify_locks(struct btree_trans *trans) in bch2_trans_verify_locks() argument
888 if (!trans->locked) { in bch2_trans_verify_locks()
889 BUG_ON(bch2_trans_locked(trans)); in bch2_trans_verify_locks()
896 trans_for_each_path(trans, path, i) in bch2_trans_verify_locks()