/linux/arch/powerpc/include/asm/ |
H A D | simple_spinlock.h | 35 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() 40 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() 49 static inline unsigned long __arch_spin_trylock(arch_spinlock_t *lock) in __arch_spin_trylock() 70 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() 91 void splpar_spin_yield(arch_spinlock_t *lock); 94 static inline void splpar_spin_yield(arch_spinlock_t *lock) {} in splpar_spin_yield() 98 static inline void spin_yield(arch_spinlock_t *lock) in spin_yield() 114 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() 128 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock()
|
H A D | simple_spinlock_types.h | 11 } arch_spinlock_t; typedef
|
/linux/arch/riscv/include/asm/ |
H A D | spinlock.h | 31 SPINLOCK_BASE_DECLARE(lock, void, arch_spinlock_t *) 32 SPINLOCK_BASE_DECLARE(unlock, void, arch_spinlock_t *) 33 SPINLOCK_BASE_DECLARE(is_locked, int, arch_spinlock_t *) 34 SPINLOCK_BASE_DECLARE(is_contended, int, arch_spinlock_t *) 35 SPINLOCK_BASE_DECLARE(trylock, bool, arch_spinlock_t *) 36 SPINLOCK_BASE_DECLARE(value_unlocked, int, arch_spinlock_t)
|
/linux/lib/ |
H A D | atomic64.c | 28 arch_spinlock_t lock; 36 static inline arch_spinlock_t *lock_addr(const atomic64_t *v) in lock_addr() 48 arch_spinlock_t *lock = lock_addr(v); in generic_atomic64_read() 63 arch_spinlock_t *lock = lock_addr(v); in generic_atomic64_set() 77 arch_spinlock_t *lock = lock_addr(v); \ 91 arch_spinlock_t *lock = lock_addr(v); \ 107 arch_spinlock_t *lock = lock_addr(v); \ 144 arch_spinlock_t *lock = lock_addr(v); in generic_atomic64_dec_if_positive() 161 arch_spinlock_t *lock = lock_addr(v); in generic_atomic64_cmpxchg() 178 arch_spinlock_t *lock = lock_addr(v); in generic_atomic64_xchg() [all …]
|
/linux/include/asm-generic/ |
H A D | ticket_spinlock.h | 33 static __always_inline void ticket_spin_lock(arch_spinlock_t *lock) in ticket_spin_lock() 53 static __always_inline bool ticket_spin_trylock(arch_spinlock_t *lock) in ticket_spin_trylock() 63 static __always_inline void ticket_spin_unlock(arch_spinlock_t *lock) in ticket_spin_unlock() 71 static __always_inline int ticket_spin_value_unlocked(arch_spinlock_t lock) in ticket_spin_value_unlocked() 78 static __always_inline int ticket_spin_is_locked(arch_spinlock_t *lock) in ticket_spin_is_locked() 80 arch_spinlock_t val = READ_ONCE(*lock); in ticket_spin_is_locked() 85 static __always_inline int ticket_spin_is_contended(arch_spinlock_t *lock) in ticket_spin_is_contended()
|
/linux/arch/arm/include/asm/ |
H A D | spinlock.h | 56 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() 60 arch_spinlock_t lockval; in arch_spin_lock() 81 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() 107 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() 114 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() 119 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() 124 static inline int arch_spin_is_contended(arch_spinlock_t *lock) in arch_spin_is_contended()
|
/linux/arch/parisc/include/asm/ |
H A D | futex.h | 21 _futex_spin_lock_irqsave(arch_spinlock_t *s, unsigned long *flags) in _futex_spin_lock_irqsave() 28 _futex_spin_unlock_irqrestore(arch_spinlock_t *s, unsigned long *flags) in _futex_spin_unlock_irqrestore() 39 arch_spinlock_t *s; in arch_futex_atomic_op_inuser() 44 s = (arch_spinlock_t *)&lws_lock_start[_futex_hash_index(ua)]; in arch_futex_atomic_op_inuser() 95 arch_spinlock_t *s; in futex_atomic_cmpxchg_inatomic() 109 s = (arch_spinlock_t *)&lws_lock_start[_futex_hash_index(ua)]; in futex_atomic_cmpxchg_inatomic()
|
H A D | spinlock.h | 19 static inline int arch_spin_is_locked(arch_spinlock_t *x) in arch_spin_is_locked() 30 static inline void arch_spin_lock(arch_spinlock_t *x) in arch_spin_lock() 49 static inline void arch_spin_unlock(arch_spinlock_t *x) in arch_spin_unlock() 59 static inline int arch_spin_trylock(arch_spinlock_t *x) in arch_spin_trylock()
|
H A D | spinlock_types.h | 16 } arch_spinlock_t; typedef 25 arch_spinlock_t lock_mutex;
|
H A D | atomic.h | 32 extern arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned; 37 arch_spinlock_t *s = ATOMIC_HASH(l); \ 43 arch_spinlock_t *s = ATOMIC_HASH(l); \
|
/linux/tools/include/linux/ |
H A D | spinlock.h | 23 #define arch_spinlock_t pthread_mutex_t macro 26 static inline void arch_spin_lock(arch_spinlock_t *mutex) in arch_spin_lock() 31 static inline void arch_spin_unlock(arch_spinlock_t *mutex) in arch_spin_unlock() 36 static inline bool arch_spin_is_locked(arch_spinlock_t *mutex) in arch_spin_is_locked()
|
/linux/arch/arc/include/asm/ |
H A D | spinlock.h | 17 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() 44 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() 67 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() 222 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() 244 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() 261 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock()
|
H A D | spinlock_types.h | 11 } arch_spinlock_t; typedef 27 arch_spinlock_t lock_mutex;
|
/linux/arch/alpha/include/asm/ |
H A D | spinlock.h | 19 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() 24 static inline void arch_spin_unlock(arch_spinlock_t * lock) in arch_spin_unlock() 30 static inline void arch_spin_lock(arch_spinlock_t * lock) in arch_spin_lock() 50 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
|
H A D | spinlock_types.h | 11 } arch_spinlock_t; typedef
|
/linux/include/linux/ |
H A D | spinlock_up.h | 29 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() 35 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() 45 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock()
|
H A D | spinlock_types_up.h | 19 } arch_spinlock_t; typedef 25 typedef struct { } arch_spinlock_t; typedef
|
/linux/arch/hexagon/include/asm/ |
H A D | spinlock.h | 115 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() 130 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() 136 static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
|
H A D | spinlock_types.h | 17 } arch_spinlock_t; typedef
|
/linux/arch/sh/include/asm/ |
H A D | spinlock-cas.h | 28 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() 33 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() 38 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
|
H A D | spinlock-llsc.h | 26 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() 46 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() 61 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
|
/linux/arch/sparc/include/asm/ |
H A D | spinlock_32.h | 18 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() 38 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() 48 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock()
|
H A D | spinlock_types.h | 11 } arch_spinlock_t; typedef
|
/linux/arch/s390/include/asm/ |
H A D | spinlock_types.h | 11 } arch_spinlock_t; typedef 17 arch_spinlock_t wait;
|
/linux/kernel/trace/ |
H A D | trace_clock.c | 88 arch_spinlock_t lock; 91 .lock = (arch_spinlock_t)__ARCH_SPIN_LOCK_UNLOCKED,
|