xref: /linux/arch/s390/include/asm/percpu.h (revision 7a5f1cd22d47f8ca4b760b6334378ae42c1bd24b)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ARCH_S390_PERCPU__
3 #define __ARCH_S390_PERCPU__
4 
5 #include <linux/preempt.h>
6 #include <asm/cmpxchg.h>
7 #include <asm/march.h>
8 
9 /*
10  * s390 uses its own implementation for per cpu data, the offset of
11  * the cpu local data area is cached in the cpu's lowcore memory.
12  */
13 #define __my_cpu_offset get_lowcore()->percpu_offset
14 
15 #define arch_raw_cpu_ptr(_ptr)						\
16 ({									\
17 	unsigned long lc_percpu, tcp_ptr__;				\
18 									\
19 	tcp_ptr__ = (__force unsigned long)(_ptr);			\
20 	lc_percpu = offsetof(struct lowcore, percpu_offset);		\
21 	asm_inline volatile(						\
22 	ALTERNATIVE("ag		%[__ptr__],%[offzero](%%r0)\n",		\
23 		    "ag		%[__ptr__],%[offalt](%%r0)\n",		\
24 		    ALT_FEATURE(MFEATURE_LOWCORE))			\
25 	: [__ptr__] "+d" (tcp_ptr__)					\
26 	: [offzero] "i" (lc_percpu),					\
27 	  [offalt] "i" (lc_percpu + LOWCORE_ALT_ADDRESS),		\
28 	  "m" (((struct lowcore *)0)->percpu_offset)			\
29 	: "cc");							\
30 	(TYPEOF_UNQUAL(*(_ptr)) __force __kernel *)tcp_ptr__;		\
31 })
32 
33 /*
34  * We use a compare-and-swap loop since that uses less cpu cycles than
35  * disabling and enabling interrupts like the generic variant would do.
36  */
37 #define arch_this_cpu_to_op_simple(pcp, val, op)			\
38 ({									\
39 	typedef typeof(pcp) pcp_op_T__;					\
40 	pcp_op_T__ old__, new__, prev__;				\
41 	pcp_op_T__ *ptr__;						\
42 	preempt_disable_notrace();					\
43 	ptr__ = raw_cpu_ptr(&(pcp));					\
44 	prev__ = READ_ONCE(*ptr__);					\
45 	do {								\
46 		old__ = prev__;						\
47 		new__ = old__ op (val);					\
48 		prev__ = cmpxchg(ptr__, old__, new__);			\
49 	} while (prev__ != old__);					\
50 	preempt_enable_notrace();					\
51 	new__;								\
52 })
53 
54 #define this_cpu_add_1(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
55 #define this_cpu_add_2(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
56 #define this_cpu_add_return_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
57 #define this_cpu_add_return_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
58 #define this_cpu_and_1(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
59 #define this_cpu_and_2(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
60 #define this_cpu_or_1(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
61 #define this_cpu_or_2(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
62 
63 #ifndef MARCH_HAS_Z196_FEATURES
64 
65 #define this_cpu_add_4(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
66 #define this_cpu_add_8(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
67 #define this_cpu_add_return_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
68 #define this_cpu_add_return_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
69 #define this_cpu_and_4(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
70 #define this_cpu_and_8(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
71 #define this_cpu_or_4(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
72 #define this_cpu_or_8(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
73 
74 #else /* MARCH_HAS_Z196_FEATURES */
75 
76 #define arch_this_cpu_add(pcp, val, op1, op2, szcast)			\
77 {									\
78 	typedef typeof(pcp) pcp_op_T__; 				\
79 	pcp_op_T__ val__ = (val);					\
80 	pcp_op_T__ old__, *ptr__;					\
81 	preempt_disable_notrace();					\
82 	ptr__ = raw_cpu_ptr(&(pcp)); 				\
83 	if (__builtin_constant_p(val__) &&				\
84 	    ((szcast)val__ > -129) && ((szcast)val__ < 128)) {		\
85 		asm volatile(						\
86 			op2 "   %[ptr__],%[val__]"			\
87 			: [ptr__] "+Q" (*ptr__) 			\
88 			: [val__] "i" ((szcast)val__)			\
89 			: "cc");					\
90 	} else {							\
91 		asm volatile(						\
92 			op1 "   %[old__],%[val__],%[ptr__]"		\
93 			: [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)	\
94 			: [val__] "d" (val__)				\
95 			: "cc");					\
96 	}								\
97 	preempt_enable_notrace();					\
98 }
99 
100 #define this_cpu_add_4(pcp, val) arch_this_cpu_add(pcp, val, "laa", "asi", int)
101 #define this_cpu_add_8(pcp, val) arch_this_cpu_add(pcp, val, "laag", "agsi", long)
102 
103 #define arch_this_cpu_add_return(pcp, val, op)				\
104 ({									\
105 	typedef typeof(pcp) pcp_op_T__; 				\
106 	pcp_op_T__ val__ = (val);					\
107 	pcp_op_T__ old__, *ptr__;					\
108 	preempt_disable_notrace();					\
109 	ptr__ = raw_cpu_ptr(&(pcp));	 				\
110 	asm volatile(							\
111 		op "    %[old__],%[val__],%[ptr__]"			\
112 		: [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)		\
113 		: [val__] "d" (val__)					\
114 		: "cc");						\
115 	preempt_enable_notrace();						\
116 	old__ + val__;							\
117 })
118 
119 #define this_cpu_add_return_4(pcp, val) arch_this_cpu_add_return(pcp, val, "laa")
120 #define this_cpu_add_return_8(pcp, val) arch_this_cpu_add_return(pcp, val, "laag")
121 
122 #define arch_this_cpu_to_op(pcp, val, op)				\
123 {									\
124 	typedef typeof(pcp) pcp_op_T__; 				\
125 	pcp_op_T__ val__ = (val);					\
126 	pcp_op_T__ old__, *ptr__;					\
127 	preempt_disable_notrace();					\
128 	ptr__ = raw_cpu_ptr(&(pcp));	 				\
129 	asm volatile(							\
130 		op "    %[old__],%[val__],%[ptr__]"			\
131 		: [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)		\
132 		: [val__] "d" (val__)					\
133 		: "cc");						\
134 	preempt_enable_notrace();					\
135 }
136 
137 #define this_cpu_and_4(pcp, val)	arch_this_cpu_to_op(pcp, val, "lan")
138 #define this_cpu_and_8(pcp, val)	arch_this_cpu_to_op(pcp, val, "lang")
139 #define this_cpu_or_4(pcp, val)		arch_this_cpu_to_op(pcp, val, "lao")
140 #define this_cpu_or_8(pcp, val)		arch_this_cpu_to_op(pcp, val, "laog")
141 
142 #endif /* MARCH_HAS_Z196_FEATURES */
143 
144 #define arch_this_cpu_cmpxchg(pcp, oval, nval)				\
145 ({									\
146 	typedef typeof(pcp) pcp_op_T__;					\
147 	pcp_op_T__ ret__;						\
148 	pcp_op_T__ *ptr__;						\
149 	preempt_disable_notrace();					\
150 	ptr__ = raw_cpu_ptr(&(pcp));					\
151 	ret__ = cmpxchg(ptr__, oval, nval);				\
152 	preempt_enable_notrace();					\
153 	ret__;								\
154 })
155 
156 #define this_cpu_cmpxchg_1(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
157 #define this_cpu_cmpxchg_2(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
158 #define this_cpu_cmpxchg_4(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
159 #define this_cpu_cmpxchg_8(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
160 
161 #define this_cpu_cmpxchg64(pcp, o, n)	this_cpu_cmpxchg_8(pcp, o, n)
162 
163 #define this_cpu_cmpxchg128(pcp, oval, nval)				\
164 ({									\
165 	typedef typeof(pcp) pcp_op_T__;					\
166 	u128 old__, new__, ret__;					\
167 	pcp_op_T__ *ptr__;						\
168 	old__ = oval;							\
169 	new__ = nval;							\
170 	preempt_disable_notrace();					\
171 	ptr__ = raw_cpu_ptr(&(pcp));					\
172 	ret__ = cmpxchg128((void *)ptr__, old__, new__);		\
173 	preempt_enable_notrace();					\
174 	ret__;								\
175 })
176 
177 #define arch_this_cpu_xchg(pcp, nval)					\
178 ({									\
179 	typeof(pcp) *ptr__;						\
180 	typeof(pcp) ret__;						\
181 	preempt_disable_notrace();					\
182 	ptr__ = raw_cpu_ptr(&(pcp));					\
183 	ret__ = xchg(ptr__, nval);					\
184 	preempt_enable_notrace();					\
185 	ret__;								\
186 })
187 
188 #define this_cpu_xchg_1(pcp, nval) arch_this_cpu_xchg(pcp, nval)
189 #define this_cpu_xchg_2(pcp, nval) arch_this_cpu_xchg(pcp, nval)
190 #define this_cpu_xchg_4(pcp, nval) arch_this_cpu_xchg(pcp, nval)
191 #define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval)
192 
193 #include <asm-generic/percpu.h>
194 
195 #endif /* __ARCH_S390_PERCPU__ */
196