xref: /linux/arch/riscv/include/asm/spinlock.h (revision 52a5a22d8afe3bd195f7b470c7535c63717f5ff7)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 
3 #ifndef __ASM_RISCV_SPINLOCK_H
4 #define __ASM_RISCV_SPINLOCK_H
5 
6 #ifdef CONFIG_QUEUED_SPINLOCKS
7 #define _Q_PENDING_LOOPS	(1 << 9)
8 #endif
9 
10 #ifdef CONFIG_RISCV_COMBO_SPINLOCKS
11 
12 #define __no_arch_spinlock_redefine
13 #include <asm/ticket_spinlock.h>
14 #include <asm/qspinlock.h>
15 #include <asm/jump_label.h>
16 
17 /*
18  * TODO: Use an alternative instead of a static key when we are able to parse
19  * the extensions string earlier in the boot process.
20  */
21 DECLARE_STATIC_KEY_TRUE(qspinlock_key);
22 
23 #define SPINLOCK_BASE_DECLARE(op, type, type_lock)			\
24 static __always_inline type arch_spin_##op(type_lock lock)		\
25 {									\
26 	if (static_branch_unlikely(&qspinlock_key))			\
27 		return queued_spin_##op(lock);				\
28 	return ticket_spin_##op(lock);					\
29 }
30 
31 SPINLOCK_BASE_DECLARE(lock, void, arch_spinlock_t *)
32 SPINLOCK_BASE_DECLARE(unlock, void, arch_spinlock_t *)
33 SPINLOCK_BASE_DECLARE(is_locked, int, arch_spinlock_t *)
34 SPINLOCK_BASE_DECLARE(is_contended, int, arch_spinlock_t *)
35 SPINLOCK_BASE_DECLARE(trylock, bool, arch_spinlock_t *)
36 SPINLOCK_BASE_DECLARE(value_unlocked, int, arch_spinlock_t)
37 
38 #elif defined(CONFIG_RISCV_QUEUED_SPINLOCKS)
39 
40 #include <asm/qspinlock.h>
41 
42 #else
43 
44 #include <asm/ticket_spinlock.h>
45 
46 #endif
47 
48 #include <asm/qrwlock.h>
49 
50 #endif /* __ASM_RISCV_SPINLOCK_H */
51