Home
last modified time | relevance | path

Searched refs:_Q_LOCKED_VAL (Results 1 – 12 of 12) sorted by relevance

/linux/kernel/locking/
H A Dqspinlock_paravirt.h92 try_cmpxchg_acquire(&lock->locked, &old, _Q_LOCKED_VAL)) { in pv_hybrid_queued_unfair_trylock()
125 try_cmpxchg_acquire(&lock->locked_pending, &old, _Q_LOCKED_VAL); in trylock_clear_pending()
144 new = (old & ~_Q_PENDING_MASK) | _Q_LOCKED_VAL; in trylock_clear_pending()
458 WRITE_ONCE(lock->locked, _Q_LOCKED_VAL); in pv_wait_head_or_lock()
481 return (u32)(atomic_read(&lock->val) | _Q_LOCKED_VAL); in pv_wait_head_or_lock()
545 u8 locked = _Q_LOCKED_VAL; in __pv_queued_spin_unlock()
H A Dqspinlock.c343 * In the PV case we might already have _Q_LOCKED_VAL set, because in queued_spin_lock_slowpath()
353 if (atomic_try_cmpxchg_relaxed(&lock->val, &val, _Q_LOCKED_VAL)) in queued_spin_lock_slowpath()
/linux/tools/testing/selftests/bpf/progs/
H A Dbpf_arena_spin_lock.h107 #define _Q_LOCKED_VAL (1U << _Q_LOCKED_OFFSET) macro
193 WRITE_ONCE(lock->locked_pending, _Q_LOCKED_VAL); in clear_pending_set_locked()
204 WRITE_ONCE(lock->locked, _Q_LOCKED_VAL); in set_locked()
240 return likely(atomic_try_cmpxchg_acquire(&lock->val, &val, _Q_LOCKED_VAL)); in arena_spin_trylock()
429 if (atomic_try_cmpxchg_relaxed(&lock->val, &val, _Q_LOCKED_VAL)) in arena_spin_lock_slowpath()
501 if (likely(atomic_try_cmpxchg_acquire(&lock->val, &val, _Q_LOCKED_VAL))) in arena_spin_lock()
/linux/include/asm-generic/
H A Dqspinlock.h97 return likely(atomic_try_cmpxchg_acquire(&lock->val, &val, _Q_LOCKED_VAL)); in queued_spin_trylock()
111 if (likely(atomic_try_cmpxchg_acquire(&lock->val, &val, _Q_LOCKED_VAL))) in queued_spin_lock()
H A Dqspinlock_types.h92 #define _Q_LOCKED_VAL (1U << _Q_LOCKED_OFFSET) macro
/linux/arch/powerpc/lib/
H A Dqspinlock.c147 "i" (_Q_LOCKED_VAL), in trylock_clean_tail()
219 BUG_ON(!(old & _Q_LOCKED_VAL)); in try_set_sleepy()
291 BUG_ON(!(val & _Q_LOCKED_VAL)); in __yield_to_locked_owner()
395 if (val & _Q_LOCKED_VAL) { in yield_to_prev()
479 if (unlikely(!(val & _Q_LOCKED_VAL))) { in try_to_steal_lock()
614 if (!(val & _Q_LOCKED_VAL)) in queued_spin_lock_mcs_queue()
662 if (unlikely(old & _Q_LOCKED_VAL)) { in queued_spin_lock_mcs_queue()
/linux/arch/loongarch/include/asm/
H A Dqspinlock.h29 if (val || !atomic_try_cmpxchg(&lock->val, &val, _Q_LOCKED_VAL)) { in virt_spin_lock()
/linux/arch/x86/include/asm/
H A Dqspinlock_paravirt.h43 "mov $" __stringify(_Q_LOCKED_VAL) ",%eax\n\t" \
H A Dqspinlock.h104 if (val || !atomic_try_cmpxchg(&lock->val, &val, _Q_LOCKED_VAL)) { in virt_spin_lock()
/linux/arch/powerpc/include/asm/
H A Dqspinlock_types.h42 #define _Q_LOCKED_VAL (1U << _Q_LOCKED_OFFSET) macro
H A Dqspinlock.h89 return _Q_LOCKED_VAL | (smp_processor_id() << _Q_OWNER_CPU_OFFSET); in queued_spin_encode_locked_val()
/linux/kernel/bpf/
H A Drqspinlock.c631 if (atomic_try_cmpxchg_relaxed(&lock->val, &val, _Q_LOCKED_VAL)) in resilient_queued_spin_lock_slowpath()