xref: /linux/arch/riscv/include/asm/spinlock.h (revision 7f71507851fc7764b36a3221839607d3a45c2025)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 
3 #ifndef __ASM_RISCV_SPINLOCK_H
4 #define __ASM_RISCV_SPINLOCK_H
5 
6 #ifdef CONFIG_RISCV_COMBO_SPINLOCKS
7 #define _Q_PENDING_LOOPS	(1 << 9)
8 
9 #define __no_arch_spinlock_redefine
10 #include <asm/ticket_spinlock.h>
11 #include <asm/qspinlock.h>
12 #include <asm/jump_label.h>
13 
14 /*
15  * TODO: Use an alternative instead of a static key when we are able to parse
16  * the extensions string earlier in the boot process.
17  */
18 DECLARE_STATIC_KEY_TRUE(qspinlock_key);
19 
20 #define SPINLOCK_BASE_DECLARE(op, type, type_lock)			\
21 static __always_inline type arch_spin_##op(type_lock lock)		\
22 {									\
23 	if (static_branch_unlikely(&qspinlock_key))			\
24 		return queued_spin_##op(lock);				\
25 	return ticket_spin_##op(lock);					\
26 }
27 
28 SPINLOCK_BASE_DECLARE(lock, void, arch_spinlock_t *)
29 SPINLOCK_BASE_DECLARE(unlock, void, arch_spinlock_t *)
30 SPINLOCK_BASE_DECLARE(is_locked, int, arch_spinlock_t *)
31 SPINLOCK_BASE_DECLARE(is_contended, int, arch_spinlock_t *)
32 SPINLOCK_BASE_DECLARE(trylock, bool, arch_spinlock_t *)
33 SPINLOCK_BASE_DECLARE(value_unlocked, int, arch_spinlock_t)
34 
35 #elif defined(CONFIG_RISCV_QUEUED_SPINLOCKS)
36 
37 #include <asm/qspinlock.h>
38 
39 #else
40 
41 #include <asm/ticket_spinlock.h>
42 
43 #endif
44 
45 #include <asm/qrwlock.h>
46 
47 #endif /* __ASM_RISCV_SPINLOCK_H */
48