Searched refs:atomic_try_cmpxchg_relaxed (Results 1 – 9 of 9) sorted by relevance
| /linux/kernel/locking/ |
| H A D | qspinlock.c | 353 if (atomic_try_cmpxchg_relaxed(&lock->val, &val, _Q_LOCKED_VAL)) in queued_spin_lock_slowpath()
|
| /linux/tools/testing/selftests/bpf/progs/ |
| H A D | bpf_arena_spin_lock.h | 164 } while (!atomic_try_cmpxchg_relaxed(&lock->val, &old, new)); in xchg_tail() 429 if (atomic_try_cmpxchg_relaxed(&lock->val, &val, _Q_LOCKED_VAL)) in arena_spin_lock_slowpath()
|
| /linux/tools/testing/selftests/bpf/ |
| H A D | bpf_atomic.h | 134 #define atomic_try_cmpxchg_relaxed(p, pold, new) \ macro
|
| /linux/rust/kernel/sync/ |
| H A D | atomic.rs | 474 T::Repr::atomic_try_cmpxchg_relaxed(&self.0, &mut tmp, new) in try_cmpxchg()
|
| /linux/kernel/bpf/ |
| H A D | rqspinlock.c | 632 if (atomic_try_cmpxchg_relaxed(&lock->val, &val, _Q_LOCKED_VAL)) in resilient_queued_spin_lock_slowpath()
|
| /linux/rust/helpers/ |
| H A D | atomic.c | 443 return atomic_try_cmpxchg_relaxed(v, old, new); in rust_helper_atomic_try_cmpxchg_release()
|
| /linux/mm/ |
| H A D | page_owner.c | 224 if (atomic_try_cmpxchg_relaxed(&stack_record->count.refs, &old, 1)) in inc_stack_record_count()
|
| /linux/drivers/misc/keba/ |
| H A D | cp500.c | 715 if (!atomic_try_cmpxchg_relaxed(&cp500->nvmem_notified, ¬ified, 1)) { in cp500_nvmem()
|
| /linux/kernel/ |
| H A D | workqueue.c | 1713 } while (!atomic_try_cmpxchg_relaxed(&nna->nr, &old, old + 1)); in tryinc_node_nr_active()
|