Searched refs:arch_spin_is_locked (Results 1 – 19 of 19) sorted by relevance
27 #define arch_spin_is_locked(x) ((x)->slock == 0) macro62 #define arch_spin_is_locked(lock) ((void)(lock), 0) macro
116 #define raw_spin_is_locked(lock) arch_spin_is_locked(&(lock)->raw_lock)
36 static inline bool arch_spin_is_locked(arch_spinlock_t *mutex) in arch_spin_is_locked() function
156 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
78 static __always_inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
143 #define arch_spin_is_locked(l) queued_spin_is_locked(l) macro
132 return arch_spin_is_locked(&lock->wait_lock); in queued_rwlock_is_contended()
17 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
26 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
18 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
16 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) macro
161 #define arch_spin_is_locked(l) queued_spin_is_locked(l) macro
40 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
53 static inline int arch_spin_is_locked(arch_spinlock_t *lp) in arch_spin_is_locked() function
19 static inline int arch_spin_is_locked(arch_spinlock_t *x) in arch_spin_is_locked() function
119 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
13 #define arch_spin_is_locked(x) ((x)->slock != __ARCH_SPIN_LOCK_UNLOCKED__) macro
812 if (arch_spin_is_locked(&old.lock)) in read_hpet()844 } while ((new.value == old.value) && arch_spin_is_locked(&new.lock)); in read_hpet()
4778 * not being up to date. So arch_spin_is_locked() might have a in rcu_lockdep_current_cpu_online() 4782 if (rcu_rdp_cpu_online(rdp) || arch_spin_is_locked(&rcu_state.ofl_lock)) in rcu_lockdep_current_cpu_online()