Home
last modified time | relevance | path

Searched refs:queued_spin_trylock (Results 1 – 6 of 6) sorted by relevance

/linux/arch/powerpc/include/asm/
H A Dqspinlock.h138 static __always_inline int queued_spin_trylock(struct qspinlock *lock) in queued_spin_trylock() function
150 if (!queued_spin_trylock(lock)) in queued_spin_lock()
165 #define arch_spin_trylock(l) queued_spin_trylock(l)
/linux/include/asm-generic/
H A Dqspinlock.h90 static __always_inline int queued_spin_trylock(struct qspinlock *lock) in queued_spin_trylock() function
148 #define arch_spin_trylock(l) queued_spin_trylock(l)
/linux/kernel/locking/
H A Dqspinlock.c232 while (!queued_spin_trylock(lock)) in xchg_tail()
260 if (queued_spin_trylock(lock)) in set_locked()
H A Dqspinlock_paravirt.h80 #define queued_spin_trylock(l) pv_hybrid_queued_unfair_trylock(l) macro
/linux/kernel/bpf/
H A Drqspinlock.c474 if (!queued_spin_trylock(lock)) { in resilient_queued_spin_lock_slowpath()
503 if (queued_spin_trylock(lock)) in resilient_queued_spin_lock_slowpath()
/linux/arch/powerpc/lib/
H A Dqspinlock.c543 while (!queued_spin_trylock(lock)) in queued_spin_lock_mcs_queue()