/linux/include/linux/ |
H A D | spinlock_api_up.h | 19 #define assert_raw_spin_locked(lock) do { (void)(lock); } while (0) argument 27 #define ___LOCK(lock) \ argument 30 #define __LOCK(lock) \ argument 33 #define __LOCK_BH(lock) \ argument 36 #define __LOCK_IRQ(lock) \ argument 39 #define __LOCK_IRQSAVE(lock, flags) \ argument 42 #define ___UNLOCK(lock) \ argument 45 #define __UNLOCK(lock) \ argument 48 #define __UNLOCK_BH(lock) \ argument 52 #define __UNLOCK_IRQ(lock) \ argument [all …]
|
H A D | rwlock_api_smp.h | 45 #define _raw_read_lock(lock) __raw_read_lock(lock) argument 49 #define _raw_write_lock(lock) __raw_write_lock(lock) argument 53 #define _raw_read_lock_bh(lock) __raw_read_lock_bh(lock) argument 57 #define _raw_write_lock_bh(lock) __raw_write_lock_bh(lock) argument 61 #define _raw_read_lock_irq(lock) __raw_read_lock_irq(lock) argument 65 #define _raw_write_lock_irq(lock) __raw_write_lock_irq(lock) argument 69 #define _raw_read_lock_irqsave(lock) __raw_read_lock_irqsave(lock) argument 73 #define _raw_write_lock_irqsave(lock) __raw_write_lock_irqsave(lock) argument 77 #define _raw_read_trylock(lock) __raw_read_trylock(lock) argument 81 #define _raw_write_trylock(lock) __raw_write_trylock(lock) argument [all …]
|
H A D | spinlock.h | 104 # define raw_spin_lock_init(lock) \ argument 112 # define raw_spin_lock_init(lock) \ argument 116 #define raw_spin_is_locked(lock) arch_spin_is_locked(&(lock)->raw_lock) argument 119 #define raw_spin_is_contended(lock) arch_spin_is_contended(&(lock)->raw_lock) argument 121 #define raw_spin_is_contended(lock) (((void)(lock), 0)) argument 184 static inline void do_raw_spin_lock(raw_spinlock_t *lock) __acquires(lock) in do_raw_spin_lock() 191 static inline int do_raw_spin_trylock(raw_spinlock_t *lock) in do_raw_spin_trylock() 201 static inline void do_raw_spin_unlock(raw_spinlock_t *lock) __releases(lock) in do_raw_spin_unlock() 215 #define raw_spin_trylock(lock) __cond_lock(lock, _raw_spin_trylock(lock)) argument 217 #define raw_spin_lock(lock) _raw_spin_lock(lock) argument [all …]
|
H A D | rwlock.h | 20 # define rwlock_init(lock) \ argument 27 # define rwlock_init(lock) \ argument 52 #define read_trylock(lock) __cond_lock(lock, _raw_read_trylock(lock)) argument 53 #define write_trylock(lock) __cond_lock(lock, _raw_write_trylock(lock)) argument 55 #define write_lock(lock) _raw_write_lock(lock) argument 56 #define read_lock(lock) _raw_read_lock(lock) argument 59 #define write_lock_nested(lock, subclass) _raw_write_lock_nested(lock, subclass) argument 61 #define write_lock_nested(lock, subclass) _raw_write_lock(lock) argument 66 #define read_lock_irqsave(lock, flags) \ argument 71 #define write_lock_irqsave(lock, flags) \ argument [all …]
|
H A D | spinlock_api_smp.h | 47 #define _raw_spin_lock(lock) __raw_spin_lock(lock) argument 51 #define _raw_spin_lock_bh(lock) __raw_spin_lock_bh(lock) argument 55 #define _raw_spin_lock_irq(lock) __raw_spin_lock_irq(lock) argument 59 #define _raw_spin_lock_irqsave(lock) __raw_spin_lock_irqsave(lock) argument 63 #define _raw_spin_trylock(lock) __raw_spin_trylock(lock) argument 67 #define _raw_spin_trylock_bh(lock) __raw_spin_trylock_bh(lock) argument 71 #define _raw_spin_unlock(lock) __raw_spin_unlock(lock) argument 75 #define _raw_spin_unlock_bh(lock) __raw_spin_unlock_bh(lock) argument 79 #define _raw_spin_unlock_irq(lock) __raw_spin_unlock_irq(lock) argument 83 #define _raw_spin_unlock_irqrestore(lock, flags) __raw_spin_unlock_irqrestore(lock, flags) argument [all …]
|
H A D | spinlock_up.h | 29 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() 35 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() 45 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() 54 #define arch_read_lock(lock) do { barrier(); (void)(lock); } while (0) argument 55 #define arch_write_lock(lock) do { barrier(); (void)(lock); } while (0) argument 56 #define arch_read_trylock(lock) ({ barrier(); (void)(lock); 1; }) argument 57 #define arch_write_trylock(lock) ({ barrier(); (void)(lock); 1; }) argument 58 #define arch_read_unlock(lock) do { barrier(); (void)(lock); } while (0) argument 59 #define arch_write_unlock(lock) do { barrier(); (void)(lock); } while (0) argument 62 #define arch_spin_is_locked(lock) ((void)(lock), 0) argument [all …]
|
H A D | local_lock_internal.h | 54 #define __local_lock_init(lock) \ argument 65 #define __spinlock_nested_bh_init(lock) \ argument 76 #define __local_lock(lock) \ argument 82 #define __local_lock_irq(lock) \ argument 88 #define __local_lock_irqsave(lock, flags) \ argument 94 #define __local_unlock(lock) \ argument 100 #define __local_unlock_irq(lock) \ argument 106 #define __local_unlock_irqrestore(lock, flags) \ argument 112 #define __local_lock_nested_bh(lock) \ argument 118 #define __local_unlock_nested_bh(lock) \ argument [all …]
|
H A D | mutex.h | 48 static inline void mutex_destroy(struct mutex *lock) {} in mutex_destroy() 133 static inline int __devm_mutex_init(struct device *dev, struct mutex *lock) in __devm_mutex_init() 166 #define mutex_lock(lock) mutex_lock_nested(lock, 0) argument 167 #define mutex_lock_interruptible(lock) mutex_lock_interruptible_nested(lock, 0) argument 168 #define mutex_lock_killable(lock) mutex_lock_killable_nested(lock, 0) argument 169 #define mutex_lock_io(lock) mutex_lock_io_nested(lock, 0) argument 171 #define mutex_lock_nest_lock(lock, nest_lock) \ argument 183 # define mutex_lock_nested(lock, subclass) mutex_lock(lock) argument 184 # define mutex_lock_interruptible_nested(lock, subclass) mutex_lock_interruptible(lock) argument 185 # define mutex_lock_killable_nested(lock, subclass) mutex_lock_killable(lock) argument [all …]
|
H A D | local_lock.h | 10 #define local_lock_init(lock) __local_lock_init(lock) argument 16 #define local_lock(lock) __local_lock(lock) argument 22 #define local_lock_irq(lock) __local_lock_irq(lock) argument 30 #define local_lock_irqsave(lock, flags) \ argument 37 #define local_unlock(lock) __local_unlock(lock) argument 43 #define local_unlock_irq(lock) __local_unlock_irq(lock) argument 51 #define local_unlock_irqrestore(lock, flags) \ argument
|
/linux/kernel/locking/ |
H A D | spinlock_debug.c | 17 void __raw_spin_lock_init(raw_spinlock_t *lock, const char *name, in __raw_spin_lock_init() 36 void __rwlock_init(rwlock_t *lock, const char *name, in __rwlock_init() 55 static void spin_dump(raw_spinlock_t *lock, const char *msg) in spin_dump() 73 static void spin_bug(raw_spinlock_t *lock, const char *msg) in spin_bug() 81 #define SPIN_BUG_ON(cond, lock, msg) if (unlikely(cond)) spin_bug(lock, msg) argument 84 debug_spin_lock_before(raw_spinlock_t *lock) in debug_spin_lock_before() 92 static inline void debug_spin_lock_after(raw_spinlock_t *lock) in debug_spin_lock_after() 98 static inline void debug_spin_unlock(raw_spinlock_t *lock) in debug_spin_unlock() 113 void do_raw_spin_lock(raw_spinlock_t *lock) in do_raw_spin_lock() 121 int do_raw_spin_trylock(raw_spinlock_t *lock) in do_raw_spin_trylock() [all …]
|
H A D | rtmutex_api.c | 22 static __always_inline int __rt_mutex_lock_common(struct rt_mutex *lock, in __rt_mutex_lock_common() 50 void __sched rt_mutex_lock_nested(struct rt_mutex *lock, unsigned int subclass) in rt_mutex_lock_nested() 56 void __sched _rt_mutex_lock_nest_lock(struct rt_mutex *lock, struct lockdep_map *nest_lock) in _rt_mutex_lock_nest_lock() 69 void __sched rt_mutex_lock(struct rt_mutex *lock) in rt_mutex_lock() 85 int __sched rt_mutex_lock_interruptible(struct rt_mutex *lock) in rt_mutex_lock_interruptible() 100 int __sched rt_mutex_lock_killable(struct rt_mutex *lock) in rt_mutex_lock_killable() 118 int __sched rt_mutex_trylock(struct rt_mutex *lock) in rt_mutex_trylock() 138 void __sched rt_mutex_unlock(struct rt_mutex *lock) in rt_mutex_unlock() 148 int __sched rt_mutex_futex_trylock(struct rt_mutex_base *lock) in rt_mutex_futex_trylock() 153 int __sched __rt_mutex_futex_trylock(struct rt_mutex_base *lock) in __rt_mutex_futex_trylock() [all …]
|
H A D | ww_mutex.h | 9 __ww_waiter_first(struct mutex *lock) in __ww_waiter_first() 21 __ww_waiter_next(struct mutex *lock, struct mutex_waiter *w) in __ww_waiter_next() 31 __ww_waiter_prev(struct mutex *lock, struct mutex_waiter *w) in __ww_waiter_prev() 41 __ww_waiter_last(struct mutex *lock) in __ww_waiter_last() 53 __ww_waiter_add(struct mutex *lock, struct mutex_waiter *waiter, struct mutex_waiter *pos) in __ww_waiter_add() 62 __ww_mutex_owner(struct mutex *lock) in __ww_mutex_owner() 68 __ww_mutex_has_waiters(struct mutex *lock) in __ww_mutex_has_waiters() 73 static inline void lock_wait_lock(struct mutex *lock, unsigned long *flags) in lock_wait_lock() 78 static inline void unlock_wait_lock(struct mutex *lock, unsigned long *flags) in unlock_wait_lock() 83 static inline void lockdep_assert_wait_lock_held(struct mutex *lock) in lockdep_assert_wait_lock_held() [all …]
|
H A D | rtmutex.c | 36 struct rt_mutex *lock, in __ww_mutex_add_waiter() 43 static inline void __ww_mutex_check_waiters(struct rt_mutex *lock, in __ww_mutex_check_waiters() 49 static inline void ww_mutex_lock_acquired(struct ww_mutex *lock, in ww_mutex_lock_acquired() 54 static inline int __ww_mutex_check_kill(struct rt_mutex *lock, in __ww_mutex_check_kill() 95 rt_mutex_owner_encode(struct rt_mutex_base *lock, struct task_struct *owner) in rt_mutex_owner_encode() 106 rt_mutex_set_owner(struct rt_mutex_base *lock, struct task_struct *owner) in rt_mutex_set_owner() 115 static __always_inline void rt_mutex_clear_owner(struct rt_mutex_base *lock) in rt_mutex_clear_owner() 121 static __always_inline void clear_rt_mutex_waiters(struct rt_mutex_base *lock) in clear_rt_mutex_waiters() 128 fixup_rt_mutex_waiters(struct rt_mutex_base *lock, bool acquire_lock) in fixup_rt_mutex_waiters() 216 static __always_inline bool rt_mutex_cmpxchg_acquire(struct rt_mutex_base *lock, in rt_mutex_cmpxchg_acquire() [all …]
|
H A D | qspinlock.c | 149 static __always_inline void clear_pending(struct qspinlock *lock) in clear_pending() 162 static __always_inline void clear_pending_set_locked(struct qspinlock *lock) in clear_pending_set_locked() 177 static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) in xchg_tail() 195 static __always_inline void clear_pending(struct qspinlock *lock) in clear_pending() 206 static __always_inline void clear_pending_set_locked(struct qspinlock *lock) in clear_pending_set_locked() 221 static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) in xchg_tail() 247 static __always_inline u32 queued_fetch_set_pending_acquire(struct qspinlock *lock) in queued_fetch_set_pending_acquire() 259 static __always_inline void set_locked(struct qspinlock *lock) in set_locked() 273 static __always_inline void __pv_kick_node(struct qspinlock *lock, in __pv_kick_node() 275 static __always_inline u32 __pv_wait_head_or_lock(struct qspinlock *lock, in __pv_wait_head_or_lock() [all …]
|
H A D | mutex-debug.c | 28 debug_mutex_lock_common(struct mutex * lock,struct mutex_waiter * waiter) debug_mutex_lock_common() argument 36 debug_mutex_wake_waiter(struct mutex * lock,struct mutex_waiter * waiter) debug_mutex_wake_waiter() argument 50 debug_mutex_add_waiter(struct mutex * lock,struct mutex_waiter * waiter,struct task_struct * task) debug_mutex_add_waiter() argument 59 debug_mutex_remove_waiter(struct mutex * lock,struct mutex_waiter * waiter,struct task_struct * task) debug_mutex_remove_waiter() argument 71 debug_mutex_unlock(struct mutex * lock) debug_mutex_unlock() argument 79 debug_mutex_init(struct mutex * lock,const char * name,struct lock_class_key * key) debug_mutex_init() argument 100 mutex_destroy(struct mutex * lock) mutex_destroy() argument [all...] |
H A D | qspinlock_paravirt.h | 81 static inline bool pv_hybrid_queued_unfair_trylock(struct qspinlock *lock) in pv_hybrid_queued_unfair_trylock() 110 static __always_inline void set_pending(struct qspinlock *lock) in set_pending() 120 static __always_inline bool trylock_clear_pending(struct qspinlock *lock) in trylock_clear_pending() 128 static __always_inline void set_pending(struct qspinlock *lock) in set_pending() 133 static __always_inline bool trylock_clear_pending(struct qspinlock *lock) in trylock_clear_pending() 168 struct qspinlock *lock; member 208 static struct qspinlock **pv_hash(struct qspinlock *lock, struct pv_node *node) in pv_hash() 236 static struct pv_node *pv_unhash(struct qspinlock *lock) in pv_unhash() 357 static void pv_kick_node(struct qspinlock *lock, struct mcs_spinlock *node) in pv_kick_node() 399 pv_wait_head_or_lock(struct qspinlock *lock, struct mcs_spinlock *node) in pv_wait_head_or_lock() [all …]
|
/linux/fs/bcachefs/ |
H A D | six.c | 72 static inline void six_set_bitmask(struct six_lock *lock, u32 mask) in six_set_bitmask() 78 static inline void six_clear_bitmask(struct six_lock *lock, u32 mask) in six_clear_bitmask() 84 static inline void six_set_owner(struct six_lock *lock, enum six_lock_type type, in six_set_owner() 98 static inline unsigned pcpu_read_count(struct six_lock *lock) in pcpu_read_count() 117 static int __do_six_trylock(struct six_lock *lock, enum six_lock_type type, in __do_six_trylock() 212 static void __six_lock_wakeup(struct six_lock *lock, enum six_lock_type lock_type) in __six_lock_wakeup() 265 static void six_lock_wakeup(struct six_lock *lock, u32 state, in six_lock_wakeup() 278 static bool do_six_trylock(struct six_lock *lock, enum six_lock_type type, bool try) in do_six_trylock() 297 bool six_trylock_ip(struct six_lock *lock, enum six_lock_type type, unsigned long ip) in six_trylock_ip() 318 bool six_relock_ip(struct six_lock *lock, enum six_lock_type type, in six_relock_ip() [all …]
|
/linux/arch/powerpc/include/asm/ |
H A D | simple_spinlock.h | 35 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() 40 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() 49 static inline unsigned long __arch_spin_trylock(arch_spinlock_t *lock) in __arch_spin_trylock() 70 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() 94 static inline void splpar_spin_yield(arch_spinlock_t *lock) {} in splpar_spin_yield() 95 static inline void splpar_rw_yield(arch_rwlock_t *lock) {} in splpar_rw_yield() 98 static inline void spin_yield(arch_spinlock_t *lock) in spin_yield() 106 static inline void rw_yield(arch_rwlock_t *lock) in rw_yield() 114 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() 128 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() [all …]
|
/linux/arch/alpha/include/asm/ |
H A D | spinlock.h | 19 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() 24 static inline void arch_spin_unlock(arch_spinlock_t * lock) in arch_spin_unlock() 30 static inline void arch_spin_lock(arch_spinlock_t * lock) in arch_spin_lock() 50 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() 57 static inline void arch_read_lock(arch_rwlock_t *lock) in arch_read_lock() 77 static inline void arch_write_lock(arch_rwlock_t *lock) in arch_write_lock() 97 static inline int arch_read_trylock(arch_rwlock_t * lock) in arch_read_trylock() 119 static inline int arch_write_trylock(arch_rwlock_t * lock) in arch_write_trylock() 141 static inline void arch_read_unlock(arch_rwlock_t * lock) in arch_read_unlock() 157 static inline void arch_write_unlock(arch_rwlock_t * lock) in arch_write_unlock()
|
/linux/arch/hexagon/include/asm/ |
H A D | spinlock.h | 28 static inline void arch_read_lock(arch_rwlock_t *lock) in arch_read_lock() 43 static inline void arch_read_unlock(arch_rwlock_t *lock) in arch_read_unlock() 58 static inline int arch_read_trylock(arch_rwlock_t *lock) in arch_read_trylock() 76 static inline void arch_write_lock(arch_rwlock_t *lock) in arch_write_lock() 91 static inline int arch_write_trylock(arch_rwlock_t *lock) in arch_write_trylock() 109 static inline void arch_write_unlock(arch_rwlock_t *lock) in arch_write_unlock() 115 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() 130 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() 136 static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
|
/linux/tools/virtio/linux/ |
H A D | spinlock.h | 8 static inline void spin_lock_init(spinlock_t *lock) in spin_lock_init() 14 static inline void spin_lock(spinlock_t *lock) in spin_lock() 20 static inline void spin_unlock(spinlock_t *lock) in spin_unlock() 26 static inline void spin_lock_bh(spinlock_t *lock) in spin_lock_bh() 31 static inline void spin_unlock_bh(spinlock_t *lock) in spin_unlock_bh() 36 static inline void spin_lock_irq(spinlock_t *lock) in spin_lock_irq() 41 static inline void spin_unlock_irq(spinlock_t *lock) in spin_unlock_irq() 46 static inline void spin_lock_irqsave(spinlock_t *lock, unsigned long f) in spin_lock_irqsave() 51 static inline void spin_unlock_irqrestore(spinlock_t *lock, unsigned long f) in spin_unlock_irqrestore()
|
/linux/arch/sparc/include/asm/ |
H A D | spinlock_32.h | 16 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) argument 18 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() 38 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() 48 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() 92 #define arch_read_lock(lock) \ argument 112 #define arch_read_unlock(lock) \ argument 133 static inline void arch_write_unlock(arch_rwlock_t *lock) in arch_write_unlock() 177 #define arch_read_trylock(lock) \ argument
|
/linux/fs/ocfs2/dlm/ |
H A D | dlmast.c | 47 static int dlm_should_cancel_bast(struct dlm_ctxt *dlm, struct dlm_lock *lock) in dlm_should_cancel_bast() 74 void __dlm_queue_ast(struct dlm_ctxt *dlm, struct dlm_lock *lock) in __dlm_queue_ast() 129 void dlm_queue_ast(struct dlm_ctxt *dlm, struct dlm_lock *lock) in dlm_queue_ast() 140 void __dlm_queue_bast(struct dlm_ctxt *dlm, struct dlm_lock *lock) in __dlm_queue_bast() 167 struct dlm_lock *lock) in dlm_update_lvb() 197 struct dlm_lock *lock) in dlm_do_local_ast() 215 struct dlm_lock *lock) in dlm_do_remote_ast() 239 struct dlm_lock *lock, int blocked_type) in dlm_do_local_bast() 263 struct dlm_lock *lock = NULL; in dlm_proxy_ast_handler() local 422 struct dlm_lock *lock, int msg_type, in dlm_send_proxy_ast_msg()
|
/linux/include/asm-generic/ |
H A D | qspinlock.h | 51 static __always_inline int queued_spin_is_locked(struct qspinlock *lock) in queued_spin_is_locked() 71 static __always_inline int queued_spin_value_unlocked(struct qspinlock lock) in queued_spin_value_unlocked() 81 static __always_inline int queued_spin_is_contended(struct qspinlock *lock) in queued_spin_is_contended() 90 static __always_inline int queued_spin_trylock(struct qspinlock *lock) in queued_spin_trylock() 107 static __always_inline void queued_spin_lock(struct qspinlock *lock) in queued_spin_lock() 123 static __always_inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock() 133 static __always_inline bool virt_spin_lock(struct qspinlock *lock) in virt_spin_lock()
|
/linux/drivers/acpi/acpica/ |
H A D | utlock.c | 28 acpi_status acpi_ut_create_rw_lock(struct acpi_rw_lock *lock) in acpi_ut_create_rw_lock() 42 void acpi_ut_delete_rw_lock(struct acpi_rw_lock *lock) in acpi_ut_delete_rw_lock() 71 acpi_status acpi_ut_acquire_read_lock(struct acpi_rw_lock *lock) in acpi_ut_acquire_read_lock() 93 acpi_status acpi_ut_release_read_lock(struct acpi_rw_lock *lock) in acpi_ut_release_read_lock() 129 acpi_status acpi_ut_acquire_write_lock(struct acpi_rw_lock *lock) in acpi_ut_acquire_write_lock() 137 void acpi_ut_release_write_lock(struct acpi_rw_lock *lock) in acpi_ut_release_write_lock()
|