xref: /linux/arch/loongarch/include/asm/percpu.h (revision 3fd6c59042dbba50391e30862beac979491145fe)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4  */
5 #ifndef __ASM_PERCPU_H
6 #define __ASM_PERCPU_H
7 
8 #include <asm/cmpxchg.h>
9 #include <asm/loongarch.h>
10 
11 /*
12  * The "address" (in fact, offset from $r21) of a per-CPU variable is close to
13  * the loading address of main kernel image, but far from where the modules are
14  * loaded. Tell the compiler this fact when using explicit relocs.
15  */
16 #if defined(MODULE) && defined(CONFIG_AS_HAS_EXPLICIT_RELOCS)
17 # if __has_attribute(model)
18 #  define PER_CPU_ATTRIBUTES __attribute__((model("extreme")))
19 # else
20 #  error compiler support for the model attribute is necessary when a recent assembler is used
21 # endif
22 #endif
23 
24 /* Use r21 for fast access */
25 register unsigned long __my_cpu_offset __asm__("$r21");
26 
set_my_cpu_offset(unsigned long off)27 static inline void set_my_cpu_offset(unsigned long off)
28 {
29 	__my_cpu_offset = off;
30 	csr_write64(off, PERCPU_BASE_KS);
31 }
32 
33 #define __my_cpu_offset					\
34 ({							\
35 	__asm__ __volatile__("":"+r"(__my_cpu_offset));	\
36 	__my_cpu_offset;				\
37 })
38 
39 #define PERCPU_OP(op, asm_op, c_op)					\
40 static __always_inline unsigned long __percpu_##op(void *ptr,		\
41 			unsigned long val, int size)			\
42 {									\
43 	unsigned long ret;						\
44 									\
45 	switch (size) {							\
46 	case 4:								\
47 		__asm__ __volatile__(					\
48 		"am"#asm_op".w"	" %[ret], %[val], %[ptr]	\n"	\
49 		: [ret] "=&r" (ret), [ptr] "+ZB"(*(u32 *)ptr)		\
50 		: [val] "r" (val));					\
51 		break;							\
52 	case 8:								\
53 		__asm__ __volatile__(					\
54 		"am"#asm_op".d" " %[ret], %[val], %[ptr]	\n"	\
55 		: [ret] "=&r" (ret), [ptr] "+ZB"(*(u64 *)ptr)		\
56 		: [val] "r" (val));					\
57 		break;							\
58 	default:							\
59 		ret = 0;						\
60 		BUILD_BUG();						\
61 	}								\
62 									\
63 	return ret c_op val;						\
64 }
65 
66 PERCPU_OP(add, add, +)
67 PERCPU_OP(and, and, &)
68 PERCPU_OP(or, or, |)
69 #undef PERCPU_OP
70 
__percpu_xchg(void * ptr,unsigned long val,int size)71 static __always_inline unsigned long __percpu_xchg(void *ptr, unsigned long val, int size)
72 {
73 	switch (size) {
74 	case 1:
75 	case 2:
76 		return __xchg_small((volatile void *)ptr, val, size);
77 
78 	case 4:
79 		return __xchg_asm("amswap.w", (volatile u32 *)ptr, (u32)val);
80 
81 	case 8:
82 		return __xchg_asm("amswap.d", (volatile u64 *)ptr, (u64)val);
83 
84 	default:
85 		BUILD_BUG();
86 	}
87 
88 	return 0;
89 }
90 
91 #define __pcpu_op_1(op)		op ".b "
92 #define __pcpu_op_2(op)		op ".h "
93 #define __pcpu_op_4(op)		op ".w "
94 #define __pcpu_op_8(op)		op ".d "
95 
96 #define _percpu_read(size, _pcp)					\
97 ({									\
98 	typeof(_pcp) __pcp_ret;						\
99 									\
100 	__asm__ __volatile__(						\
101 		__pcpu_op_##size("ldx") "%[ret], $r21, %[ptr]	\n"	\
102 		: [ret] "=&r"(__pcp_ret)				\
103 		: [ptr] "r"(&(_pcp))					\
104 		: "memory");						\
105 									\
106 	__pcp_ret;							\
107 })
108 
109 #define _percpu_write(size, _pcp, _val)					\
110 do {									\
111 	__asm__ __volatile__(						\
112 		__pcpu_op_##size("stx") "%[val], $r21, %[ptr]	\n"	\
113 		:							\
114 		: [val] "r"(_val), [ptr] "r"(&(_pcp))			\
115 		: "memory");						\
116 } while (0)
117 
118 /* this_cpu_cmpxchg */
119 #define _protect_cmpxchg_local(pcp, o, n)			\
120 ({								\
121 	typeof(*raw_cpu_ptr(&(pcp))) __ret;			\
122 	preempt_disable_notrace();				\
123 	__ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n);	\
124 	preempt_enable_notrace();				\
125 	__ret;							\
126 })
127 
128 #define _pcp_protect(operation, pcp, val)			\
129 ({								\
130 	typeof(pcp) __retval;					\
131 	preempt_disable_notrace();				\
132 	__retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)),	\
133 					  (val), sizeof(pcp));	\
134 	preempt_enable_notrace();				\
135 	__retval;						\
136 })
137 
138 #define _percpu_add(pcp, val) \
139 	_pcp_protect(__percpu_add, pcp, val)
140 
141 #define _percpu_add_return(pcp, val) _percpu_add(pcp, val)
142 
143 #define _percpu_and(pcp, val) \
144 	_pcp_protect(__percpu_and, pcp, val)
145 
146 #define _percpu_or(pcp, val) \
147 	_pcp_protect(__percpu_or, pcp, val)
148 
149 #define _percpu_xchg(pcp, val) ((typeof(pcp)) \
150 	_pcp_protect(__percpu_xchg, pcp, (unsigned long)(val)))
151 
152 #define this_cpu_add_4(pcp, val) _percpu_add(pcp, val)
153 #define this_cpu_add_8(pcp, val) _percpu_add(pcp, val)
154 
155 #define this_cpu_add_return_4(pcp, val) _percpu_add_return(pcp, val)
156 #define this_cpu_add_return_8(pcp, val) _percpu_add_return(pcp, val)
157 
158 #define this_cpu_and_4(pcp, val) _percpu_and(pcp, val)
159 #define this_cpu_and_8(pcp, val) _percpu_and(pcp, val)
160 
161 #define this_cpu_or_4(pcp, val) _percpu_or(pcp, val)
162 #define this_cpu_or_8(pcp, val) _percpu_or(pcp, val)
163 
164 #define this_cpu_read_1(pcp) _percpu_read(1, pcp)
165 #define this_cpu_read_2(pcp) _percpu_read(2, pcp)
166 #define this_cpu_read_4(pcp) _percpu_read(4, pcp)
167 #define this_cpu_read_8(pcp) _percpu_read(8, pcp)
168 
169 #define this_cpu_write_1(pcp, val) _percpu_write(1, pcp, val)
170 #define this_cpu_write_2(pcp, val) _percpu_write(2, pcp, val)
171 #define this_cpu_write_4(pcp, val) _percpu_write(4, pcp, val)
172 #define this_cpu_write_8(pcp, val) _percpu_write(8, pcp, val)
173 
174 #define this_cpu_xchg_1(pcp, val) _percpu_xchg(pcp, val)
175 #define this_cpu_xchg_2(pcp, val) _percpu_xchg(pcp, val)
176 #define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val)
177 #define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val)
178 
179 #define this_cpu_cmpxchg_1(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
180 #define this_cpu_cmpxchg_2(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
181 #define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
182 #define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
183 
184 #include <asm-generic/percpu.h>
185 
186 #endif /* __ASM_PERCPU_H */
187