1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4 */
5 #ifndef __ASM_PERCPU_H
6 #define __ASM_PERCPU_H
7
8 #include <asm/cmpxchg.h>
9 #include <asm/loongarch.h>
10
11 /*
12 * The "address" (in fact, offset from $r21) of a per-CPU variable is close to
13 * the loading address of main kernel image, but far from where the modules are
14 * loaded. Tell the compiler this fact when using explicit relocs.
15 */
16 #if defined(MODULE) && defined(CONFIG_AS_HAS_EXPLICIT_RELOCS) && defined(CONFIG_64BIT)
17 # if __has_attribute(model)
18 # define PER_CPU_ATTRIBUTES __attribute__((model("extreme")))
19 # else
20 # error compiler support for the model attribute is necessary when a recent assembler is used
21 # endif
22 #endif
23
24 /* Use r21 for fast access */
25 register unsigned long __my_cpu_offset __asm__("$r21");
26
set_my_cpu_offset(unsigned long off)27 static inline void set_my_cpu_offset(unsigned long off)
28 {
29 __my_cpu_offset = off;
30 csr_write(off, PERCPU_BASE_KS);
31 }
32
33 #define __my_cpu_offset \
34 ({ \
35 __asm__ __volatile__("":"+r"(__my_cpu_offset)); \
36 __my_cpu_offset; \
37 })
38
39 #ifdef CONFIG_CPU_HAS_AMO
40
41 #define PERCPU_OP(op, asm_op, c_op) \
42 static __always_inline unsigned long __percpu_##op(void *ptr, \
43 unsigned long val, int size) \
44 { \
45 unsigned long ret; \
46 \
47 switch (size) { \
48 case 4: \
49 __asm__ __volatile__( \
50 "am"#asm_op".w" " %[ret], %[val], %[ptr] \n" \
51 : [ret] "=&r" (ret), [ptr] "+ZB"(*(u32 *)ptr) \
52 : [val] "r" (val)); \
53 break; \
54 case 8: \
55 __asm__ __volatile__( \
56 "am"#asm_op".d" " %[ret], %[val], %[ptr] \n" \
57 : [ret] "=&r" (ret), [ptr] "+ZB"(*(u64 *)ptr) \
58 : [val] "r" (val)); \
59 break; \
60 default: \
61 ret = 0; \
62 BUILD_BUG(); \
63 } \
64 \
65 return ret c_op val; \
66 }
67
68 PERCPU_OP(add, add, +)
69 PERCPU_OP(and, and, &)
70 PERCPU_OP(or, or, |)
71 #undef PERCPU_OP
72
73 #endif
74
75 #ifdef CONFIG_64BIT
76
77 #define __pcpu_op_1(op) op ".b "
78 #define __pcpu_op_2(op) op ".h "
79 #define __pcpu_op_4(op) op ".w "
80 #define __pcpu_op_8(op) op ".d "
81
82 #define _percpu_read(size, _pcp) \
83 ({ \
84 typeof(_pcp) __pcp_ret; \
85 \
86 __asm__ __volatile__( \
87 __pcpu_op_##size("ldx") "%[ret], $r21, %[ptr] \n" \
88 : [ret] "=&r"(__pcp_ret) \
89 : [ptr] "r"(&(_pcp)) \
90 : "memory"); \
91 \
92 __pcp_ret; \
93 })
94
95 #define _percpu_write(size, _pcp, _val) \
96 do { \
97 __asm__ __volatile__( \
98 __pcpu_op_##size("stx") "%[val], $r21, %[ptr] \n" \
99 : \
100 : [val] "r"(_val), [ptr] "r"(&(_pcp)) \
101 : "memory"); \
102 } while (0)
103
104 #endif
105
106 #define __percpu_xchg __arch_xchg
107
108 /* this_cpu_cmpxchg */
109 #define _protect_cmpxchg_local(pcp, o, n) \
110 ({ \
111 typeof(*raw_cpu_ptr(&(pcp))) __ret; \
112 preempt_disable_notrace(); \
113 __ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n); \
114 preempt_enable_notrace(); \
115 __ret; \
116 })
117
118 #define _pcp_protect(operation, pcp, val) \
119 ({ \
120 typeof(pcp) __retval; \
121 preempt_disable_notrace(); \
122 __retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)), \
123 (val), sizeof(pcp)); \
124 preempt_enable_notrace(); \
125 __retval; \
126 })
127
128 #ifdef CONFIG_CPU_HAS_AMO
129
130 #define _percpu_add(pcp, val) \
131 _pcp_protect(__percpu_add, pcp, val)
132
133 #define _percpu_add_return(pcp, val) _percpu_add(pcp, val)
134
135 #define _percpu_and(pcp, val) \
136 _pcp_protect(__percpu_and, pcp, val)
137
138 #define _percpu_or(pcp, val) \
139 _pcp_protect(__percpu_or, pcp, val)
140
141 #define this_cpu_add_4(pcp, val) _percpu_add(pcp, val)
142 #define this_cpu_add_8(pcp, val) _percpu_add(pcp, val)
143
144 #define this_cpu_add_return_4(pcp, val) _percpu_add_return(pcp, val)
145 #define this_cpu_add_return_8(pcp, val) _percpu_add_return(pcp, val)
146
147 #define this_cpu_and_4(pcp, val) _percpu_and(pcp, val)
148 #define this_cpu_and_8(pcp, val) _percpu_and(pcp, val)
149
150 #define this_cpu_or_4(pcp, val) _percpu_or(pcp, val)
151 #define this_cpu_or_8(pcp, val) _percpu_or(pcp, val)
152
153 #endif
154
155 #ifdef CONFIG_64BIT
156
157 #define this_cpu_read_1(pcp) _percpu_read(1, pcp)
158 #define this_cpu_read_2(pcp) _percpu_read(2, pcp)
159 #define this_cpu_read_4(pcp) _percpu_read(4, pcp)
160 #define this_cpu_read_8(pcp) _percpu_read(8, pcp)
161
162 #define this_cpu_write_1(pcp, val) _percpu_write(1, pcp, val)
163 #define this_cpu_write_2(pcp, val) _percpu_write(2, pcp, val)
164 #define this_cpu_write_4(pcp, val) _percpu_write(4, pcp, val)
165 #define this_cpu_write_8(pcp, val) _percpu_write(8, pcp, val)
166
167 #endif
168
169 #define _percpu_xchg(pcp, val) ((typeof(pcp)) \
170 _pcp_protect(__percpu_xchg, pcp, (unsigned long)(val)))
171
172 #define this_cpu_xchg_1(pcp, val) _percpu_xchg(pcp, val)
173 #define this_cpu_xchg_2(pcp, val) _percpu_xchg(pcp, val)
174 #define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val)
175 #define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val)
176
177 #define this_cpu_cmpxchg_1(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
178 #define this_cpu_cmpxchg_2(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
179 #define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
180 #define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
181
182 #include <asm-generic/percpu.h>
183
184 #endif /* __ASM_PERCPU_H */
185