xref: /linux/arch/s390/include/asm/percpu.h (revision 7fc2cd2e4b398c57c9cf961cfea05eadbf34c05c)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ARCH_S390_PERCPU__
3 #define __ARCH_S390_PERCPU__
4 
5 #include <linux/preempt.h>
6 #include <asm/cmpxchg.h>
7 #include <asm/march.h>
8 
9 /*
10  * s390 uses its own implementation for per cpu data, the offset of
11  * the cpu local data area is cached in the cpu's lowcore memory.
12  */
13 #define __my_cpu_offset get_lowcore()->percpu_offset
14 
15 /*
16  * We use a compare-and-swap loop since that uses less cpu cycles than
17  * disabling and enabling interrupts like the generic variant would do.
18  */
19 #define arch_this_cpu_to_op_simple(pcp, val, op)			\
20 ({									\
21 	typedef typeof(pcp) pcp_op_T__;					\
22 	pcp_op_T__ old__, new__, prev__;				\
23 	pcp_op_T__ *ptr__;						\
24 	preempt_disable_notrace();					\
25 	ptr__ = raw_cpu_ptr(&(pcp));					\
26 	prev__ = READ_ONCE(*ptr__);					\
27 	do {								\
28 		old__ = prev__;						\
29 		new__ = old__ op (val);					\
30 		prev__ = cmpxchg(ptr__, old__, new__);			\
31 	} while (prev__ != old__);					\
32 	preempt_enable_notrace();					\
33 	new__;								\
34 })
35 
36 #define this_cpu_add_1(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
37 #define this_cpu_add_2(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
38 #define this_cpu_add_return_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
39 #define this_cpu_add_return_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
40 #define this_cpu_and_1(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
41 #define this_cpu_and_2(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
42 #define this_cpu_or_1(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
43 #define this_cpu_or_2(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
44 
45 #ifndef MARCH_HAS_Z196_FEATURES
46 
47 #define this_cpu_add_4(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
48 #define this_cpu_add_8(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
49 #define this_cpu_add_return_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
50 #define this_cpu_add_return_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
51 #define this_cpu_and_4(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
52 #define this_cpu_and_8(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
53 #define this_cpu_or_4(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
54 #define this_cpu_or_8(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
55 
56 #else /* MARCH_HAS_Z196_FEATURES */
57 
58 #define arch_this_cpu_add(pcp, val, op1, op2, szcast)			\
59 {									\
60 	typedef typeof(pcp) pcp_op_T__; 				\
61 	pcp_op_T__ val__ = (val);					\
62 	pcp_op_T__ old__, *ptr__;					\
63 	preempt_disable_notrace();					\
64 	ptr__ = raw_cpu_ptr(&(pcp)); 				\
65 	if (__builtin_constant_p(val__) &&				\
66 	    ((szcast)val__ > -129) && ((szcast)val__ < 128)) {		\
67 		asm volatile(						\
68 			op2 "   %[ptr__],%[val__]"			\
69 			: [ptr__] "+Q" (*ptr__) 			\
70 			: [val__] "i" ((szcast)val__)			\
71 			: "cc");					\
72 	} else {							\
73 		asm volatile(						\
74 			op1 "   %[old__],%[val__],%[ptr__]"		\
75 			: [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)	\
76 			: [val__] "d" (val__)				\
77 			: "cc");					\
78 	}								\
79 	preempt_enable_notrace();					\
80 }
81 
82 #define this_cpu_add_4(pcp, val) arch_this_cpu_add(pcp, val, "laa", "asi", int)
83 #define this_cpu_add_8(pcp, val) arch_this_cpu_add(pcp, val, "laag", "agsi", long)
84 
85 #define arch_this_cpu_add_return(pcp, val, op)				\
86 ({									\
87 	typedef typeof(pcp) pcp_op_T__; 				\
88 	pcp_op_T__ val__ = (val);					\
89 	pcp_op_T__ old__, *ptr__;					\
90 	preempt_disable_notrace();					\
91 	ptr__ = raw_cpu_ptr(&(pcp));	 				\
92 	asm volatile(							\
93 		op "    %[old__],%[val__],%[ptr__]"			\
94 		: [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)		\
95 		: [val__] "d" (val__)					\
96 		: "cc");						\
97 	preempt_enable_notrace();						\
98 	old__ + val__;							\
99 })
100 
101 #define this_cpu_add_return_4(pcp, val) arch_this_cpu_add_return(pcp, val, "laa")
102 #define this_cpu_add_return_8(pcp, val) arch_this_cpu_add_return(pcp, val, "laag")
103 
104 #define arch_this_cpu_to_op(pcp, val, op)				\
105 {									\
106 	typedef typeof(pcp) pcp_op_T__; 				\
107 	pcp_op_T__ val__ = (val);					\
108 	pcp_op_T__ old__, *ptr__;					\
109 	preempt_disable_notrace();					\
110 	ptr__ = raw_cpu_ptr(&(pcp));	 				\
111 	asm volatile(							\
112 		op "    %[old__],%[val__],%[ptr__]"			\
113 		: [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)		\
114 		: [val__] "d" (val__)					\
115 		: "cc");						\
116 	preempt_enable_notrace();					\
117 }
118 
119 #define this_cpu_and_4(pcp, val)	arch_this_cpu_to_op(pcp, val, "lan")
120 #define this_cpu_and_8(pcp, val)	arch_this_cpu_to_op(pcp, val, "lang")
121 #define this_cpu_or_4(pcp, val)		arch_this_cpu_to_op(pcp, val, "lao")
122 #define this_cpu_or_8(pcp, val)		arch_this_cpu_to_op(pcp, val, "laog")
123 
124 #endif /* MARCH_HAS_Z196_FEATURES */
125 
126 #define arch_this_cpu_cmpxchg(pcp, oval, nval)				\
127 ({									\
128 	typedef typeof(pcp) pcp_op_T__;					\
129 	pcp_op_T__ ret__;						\
130 	pcp_op_T__ *ptr__;						\
131 	preempt_disable_notrace();					\
132 	ptr__ = raw_cpu_ptr(&(pcp));					\
133 	ret__ = cmpxchg(ptr__, oval, nval);				\
134 	preempt_enable_notrace();					\
135 	ret__;								\
136 })
137 
138 #define this_cpu_cmpxchg_1(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
139 #define this_cpu_cmpxchg_2(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
140 #define this_cpu_cmpxchg_4(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
141 #define this_cpu_cmpxchg_8(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
142 
143 #define this_cpu_cmpxchg64(pcp, o, n)	this_cpu_cmpxchg_8(pcp, o, n)
144 
145 #define this_cpu_cmpxchg128(pcp, oval, nval)				\
146 ({									\
147 	typedef typeof(pcp) pcp_op_T__;					\
148 	u128 old__, new__, ret__;					\
149 	pcp_op_T__ *ptr__;						\
150 	old__ = oval;							\
151 	new__ = nval;							\
152 	preempt_disable_notrace();					\
153 	ptr__ = raw_cpu_ptr(&(pcp));					\
154 	ret__ = cmpxchg128((void *)ptr__, old__, new__);		\
155 	preempt_enable_notrace();					\
156 	ret__;								\
157 })
158 
159 #define arch_this_cpu_xchg(pcp, nval)					\
160 ({									\
161 	typeof(pcp) *ptr__;						\
162 	typeof(pcp) ret__;						\
163 	preempt_disable_notrace();					\
164 	ptr__ = raw_cpu_ptr(&(pcp));					\
165 	ret__ = xchg(ptr__, nval);					\
166 	preempt_enable_notrace();					\
167 	ret__;								\
168 })
169 
170 #define this_cpu_xchg_1(pcp, nval) arch_this_cpu_xchg(pcp, nval)
171 #define this_cpu_xchg_2(pcp, nval) arch_this_cpu_xchg(pcp, nval)
172 #define this_cpu_xchg_4(pcp, nval) arch_this_cpu_xchg(pcp, nval)
173 #define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval)
174 
175 #include <asm-generic/percpu.h>
176 
177 #endif /* __ARCH_S390_PERCPU__ */
178