/linux/lib/ |
H A D | atomic64.c | 54 arch_spin_unlock(lock); in generic_atomic64_read() 68 arch_spin_unlock(lock); in generic_atomic64_set() 82 arch_spin_unlock(lock); \ 97 arch_spin_unlock(lock); \ 114 arch_spin_unlock(lock); \ 152 arch_spin_unlock(lock); in generic_atomic64_dec_if_positive() 169 arch_spin_unlock(lock); in generic_atomic64_cmpxchg() 185 arch_spin_unlock(lock); in generic_atomic64_xchg() 202 arch_spin_unlock(lock); in generic_atomic64_fetch_add_unless()
|
/linux/arch/parisc/include/asm/ |
H A D | spinlock.h | 49 static inline void arch_spin_unlock(arch_spinlock_t *x) in arch_spin_unlock() function 96 arch_spin_unlock(&(rw->lock_mutex)); in arch_read_trylock() 121 arch_spin_unlock(&(rw->lock_mutex)); in arch_write_trylock() 146 arch_spin_unlock(&(rw->lock_mutex)); in arch_read_unlock() 157 arch_spin_unlock(&(rw->lock_mutex)); in arch_write_unlock()
|
/linux/arch/arc/include/asm/ |
H A D | spinlock.h | 67 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function 261 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function 315 arch_spin_unlock(&(rw->lock_mutex)); in arch_read_trylock() 340 arch_spin_unlock(&(rw->lock_mutex)); in arch_write_trylock() 365 arch_spin_unlock(&(rw->lock_mutex)); in arch_read_unlock() 376 arch_spin_unlock(&(rw->lock_mutex)); in arch_write_unlock()
|
H A D | smp.h | 117 arch_spin_unlock(&smp_atomic_ops_lock); \
|
/linux/arch/arm/common/ |
H A D | mcpm_entry.c | 232 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_power_up() 268 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_power_down() 274 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_power_down() 335 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_suspend() 365 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_powered_up()
|
/linux/include/linux/ |
H A D | spinlock_up.h | 45 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function 65 # define arch_spin_unlock(lock) do { barrier(); (void)(lock); } while (0) macro
|
/linux/kernel/locking/ |
H A D | qrwlock.c | 56 arch_spin_unlock(&lock->wait_lock); in queued_read_lock_slowpath() 88 arch_spin_unlock(&lock->wait_lock); in queued_write_lock_slowpath()
|
/linux/kernel/trace/ |
H A D | trace_sched_switch.c | 277 arch_spin_unlock(&trace_cmdline_lock); in trace_save_cmdline() 316 arch_spin_unlock(&trace_cmdline_lock); in trace_find_cmdline() 558 arch_spin_unlock(&trace_cmdline_lock); in saved_cmdlines_stop() 607 arch_spin_unlock(&trace_cmdline_lock); in tracing_saved_cmdlines_size_read() 630 arch_spin_unlock(&trace_cmdline_lock); in tracing_resize_saved_cmdlines()
|
H A D | trace_stack.c | 282 arch_spin_unlock(&stack_trace_max_lock); in check_stack() 360 arch_spin_unlock(&stack_trace_max_lock); in stack_max_size_write() 410 arch_spin_unlock(&stack_trace_max_lock); in t_stop()
|
H A D | trace_clock.c | 139 arch_spin_unlock(&trace_clock_struct.lock); in trace_clock_global()
|
/linux/arch/mips/kernel/ |
H A D | sync-r4k.c | 66 arch_spin_unlock(&sync_lock); in check_counter_warp() 96 arch_spin_unlock(&sync_lock); in check_counter_warp()
|
/linux/kernel/kcsan/ |
H A D | selftest.c | 166 KCSAN_CHECK_READ_BARRIER(arch_spin_unlock(&arch_spinlock)); in test_barrier() 195 KCSAN_CHECK_WRITE_BARRIER(arch_spin_unlock(&arch_spinlock)); in test_barrier() 227 KCSAN_CHECK_RW_BARRIER(arch_spin_unlock(&arch_spinlock)); in test_barrier()
|
/linux/arch/x86/mm/ |
H A D | kmmio.c | 356 arch_spin_unlock(&kmmio_lock); in post_kmmio_handler() 467 arch_spin_unlock(&kmmio_lock); in register_kmmio_probe() 516 arch_spin_unlock(&kmmio_lock); in remove_kmmio_fault_pages() 559 arch_spin_unlock(&kmmio_lock); in unregister_kmmio_probe()
|
/linux/arch/powerpc/kvm/ |
H A D | book3s_xive.c | 575 arch_spin_unlock(&sb->lock); in xive_vm_h_eoi() 583 arch_spin_unlock(&sb->lock); in xive_vm_h_eoi() 1105 arch_spin_unlock(&sb->lock); in xive_lock_and_mask() 1139 arch_spin_unlock(&sb->lock); in xive_lock_for_unmask() 1365 arch_spin_unlock(&sb->lock); in kvmppc_xive_set_xive() 1387 arch_spin_unlock(&sb->lock); in kvmppc_xive_get_xive() 1426 arch_spin_unlock(&sb->lock); in kvmppc_xive_int_on() 1452 arch_spin_unlock(&sb->lock); in kvmppc_xive_int_off() 1643 arch_spin_unlock(&sb->lock); in kvmppc_xive_set_mapped() 1723 arch_spin_unlock(&sb->lock); in kvmppc_xive_clr_mapped() [all …]
|
H A D | book3s_xics.c | 160 arch_spin_unlock(&ics->lock); in write_xive() 216 arch_spin_unlock(&ics->lock); in kvmppc_xics_get_xive() 473 arch_spin_unlock(&ics->lock); in icp_deliver_irq() 502 arch_spin_unlock(&ics->lock); in icp_deliver_irq() 509 arch_spin_unlock(&ics->lock); in icp_deliver_irq() 1009 arch_spin_unlock(&ics->lock); in xics_debug_show() 1217 arch_spin_unlock(&ics->lock); in xics_get_source() 1275 arch_spin_unlock(&ics->lock); in xics_set_source()
|
/linux/arch/x86/kernel/ |
H A D | tsc_sync.c | 286 arch_spin_unlock(&sync_lock); in check_tsc_warp() 316 arch_spin_unlock(&sync_lock); in check_tsc_warp()
|
/linux/tools/include/linux/ |
H A D | spinlock.h | 31 static inline void arch_spin_unlock(arch_spinlock_t *mutex) in arch_spin_unlock() function
|
/linux/arch/hexagon/include/asm/ |
H A D | spinlock.h | 130 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
|
/linux/arch/alpha/include/asm/ |
H A D | spinlock.h | 24 static inline void arch_spin_unlock(arch_spinlock_t * lock) in arch_spin_unlock() function
|
/linux/arch/sh/include/asm/ |
H A D | spinlock-cas.h | 33 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
|
H A D | spinlock-llsc.h | 46 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
|
/linux/include/asm-generic/ |
H A D | qspinlock.h | 149 #define arch_spin_unlock(l) queued_spin_unlock(l) macro
|
H A D | ticket_spinlock.h | 102 #define arch_spin_unlock(l) ticket_spin_unlock(l) macro
|
/linux/arch/powerpc/platforms/pasemi/ |
H A D | setup.c | 114 arch_spin_unlock(&timebase_lock); in pas_give_timebase() 130 arch_spin_unlock(&timebase_lock); in pas_take_timebase()
|
/linux/arch/sparc/include/asm/ |
H A D | spinlock_32.h | 48 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
|