xref: /linux/include/asm-generic/percpu.h (revision 6d12c8d308e68b9b0fa98ca2df4f83db4b4c965d)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_GENERIC_PERCPU_H_
3 #define _ASM_GENERIC_PERCPU_H_
4 
5 #include <linux/compiler.h>
6 #include <linux/threads.h>
7 #include <linux/percpu-defs.h>
8 
9 #ifdef CONFIG_SMP
10 
11 /*
12  * per_cpu_offset() is the offset that has to be added to a
13  * percpu variable to get to the instance for a certain processor.
14  *
15  * Most arches use the __per_cpu_offset array for those offsets but
16  * some arches have their own ways of determining the offset (x86_64, s390).
17  */
18 #ifndef __per_cpu_offset
19 extern unsigned long __per_cpu_offset[NR_CPUS];
20 
21 #define per_cpu_offset(x) (__per_cpu_offset[x])
22 #endif
23 
24 /*
25  * Determine the offset for the currently active processor.
26  * An arch may define __my_cpu_offset to provide a more effective
27  * means of obtaining the offset to the per cpu variables of the
28  * current processor.
29  */
30 #ifndef __my_cpu_offset
31 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
32 #endif
33 #ifdef CONFIG_DEBUG_PREEMPT
34 #define my_cpu_offset per_cpu_offset(smp_processor_id())
35 #else
36 #define my_cpu_offset __my_cpu_offset
37 #endif
38 
39 /*
40  * Arch may define arch_raw_cpu_ptr() to provide more efficient address
41  * translations for raw_cpu_ptr().
42  */
43 #ifndef arch_raw_cpu_ptr
44 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
45 #endif
46 
47 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
48 extern void setup_per_cpu_areas(void);
49 #endif
50 
51 #endif	/* SMP */
52 
53 #ifndef PER_CPU_BASE_SECTION
54 #ifdef CONFIG_SMP
55 #define PER_CPU_BASE_SECTION ".data..percpu"
56 #else
57 #define PER_CPU_BASE_SECTION ".data"
58 #endif
59 #endif
60 
61 #ifndef PER_CPU_ATTRIBUTES
62 #define PER_CPU_ATTRIBUTES
63 #endif
64 
65 #define raw_cpu_generic_read(pcp)					\
66 ({									\
67 	*raw_cpu_ptr(&(pcp));						\
68 })
69 
70 #define raw_cpu_generic_to_op(pcp, val, op)				\
71 do {									\
72 	*raw_cpu_ptr(&(pcp)) op val;					\
73 } while (0)
74 
75 #define raw_cpu_generic_add_return(pcp, val)				\
76 ({									\
77 	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
78 									\
79 	*__p += val;							\
80 	*__p;								\
81 })
82 
83 #define raw_cpu_generic_xchg(pcp, nval)					\
84 ({									\
85 	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
86 	typeof(pcp) __ret;						\
87 	__ret = *__p;							\
88 	*__p = nval;							\
89 	__ret;								\
90 })
91 
92 #define __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, _cmpxchg)		\
93 ({									\
94 	typeof(pcp) __val, __old = *(ovalp);				\
95 	__val = _cmpxchg(pcp, __old, nval);				\
96 	if (__val != __old)						\
97 		*(ovalp) = __val;					\
98 	__val == __old;							\
99 })
100 
101 #define raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)			\
102 ({									\
103 	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
104 	typeof(pcp) __val = *__p, __old = *(ovalp);			\
105 	bool __ret;							\
106 	if (__val == __old) {						\
107 		*__p = nval;						\
108 		__ret = true;						\
109 	} else {							\
110 		*(ovalp) = __val;					\
111 		__ret = false;						\
112 	}								\
113 	__ret;								\
114 })
115 
116 #define raw_cpu_generic_cmpxchg(pcp, oval, nval)			\
117 ({									\
118 	typeof(pcp) __old = (oval);					\
119 	raw_cpu_generic_try_cmpxchg(pcp, &__old, nval);			\
120 	__old;								\
121 })
122 
123 #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
124 ({									\
125 	typeof(pcp1) *__p1 = raw_cpu_ptr(&(pcp1));			\
126 	typeof(pcp2) *__p2 = raw_cpu_ptr(&(pcp2));			\
127 	int __ret = 0;							\
128 	if (*__p1 == (oval1) && *__p2  == (oval2)) {			\
129 		*__p1 = nval1;						\
130 		*__p2 = nval2;						\
131 		__ret = 1;						\
132 	}								\
133 	(__ret);							\
134 })
135 
136 #define __this_cpu_generic_read_nopreempt(pcp)				\
137 ({									\
138 	typeof(pcp) ___ret;						\
139 	preempt_disable_notrace();					\
140 	___ret = READ_ONCE(*raw_cpu_ptr(&(pcp)));			\
141 	preempt_enable_notrace();					\
142 	___ret;								\
143 })
144 
145 #define __this_cpu_generic_read_noirq(pcp)				\
146 ({									\
147 	typeof(pcp) ___ret;						\
148 	unsigned long ___flags;						\
149 	raw_local_irq_save(___flags);					\
150 	___ret = raw_cpu_generic_read(pcp);				\
151 	raw_local_irq_restore(___flags);				\
152 	___ret;								\
153 })
154 
155 #define this_cpu_generic_read(pcp)					\
156 ({									\
157 	typeof(pcp) __ret;						\
158 	if (__native_word(pcp))						\
159 		__ret = __this_cpu_generic_read_nopreempt(pcp);		\
160 	else								\
161 		__ret = __this_cpu_generic_read_noirq(pcp);		\
162 	__ret;								\
163 })
164 
165 #define this_cpu_generic_to_op(pcp, val, op)				\
166 do {									\
167 	unsigned long __flags;						\
168 	raw_local_irq_save(__flags);					\
169 	raw_cpu_generic_to_op(pcp, val, op);				\
170 	raw_local_irq_restore(__flags);					\
171 } while (0)
172 
173 
174 #define this_cpu_generic_add_return(pcp, val)				\
175 ({									\
176 	typeof(pcp) __ret;						\
177 	unsigned long __flags;						\
178 	raw_local_irq_save(__flags);					\
179 	__ret = raw_cpu_generic_add_return(pcp, val);			\
180 	raw_local_irq_restore(__flags);					\
181 	__ret;								\
182 })
183 
184 #define this_cpu_generic_xchg(pcp, nval)				\
185 ({									\
186 	typeof(pcp) __ret;						\
187 	unsigned long __flags;						\
188 	raw_local_irq_save(__flags);					\
189 	__ret = raw_cpu_generic_xchg(pcp, nval);			\
190 	raw_local_irq_restore(__flags);					\
191 	__ret;								\
192 })
193 
194 #define this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)			\
195 ({									\
196 	bool __ret;							\
197 	unsigned long __flags;						\
198 	raw_local_irq_save(__flags);					\
199 	__ret = raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval);		\
200 	raw_local_irq_restore(__flags);					\
201 	__ret;								\
202 })
203 
204 #define this_cpu_generic_cmpxchg(pcp, oval, nval)			\
205 ({									\
206 	typeof(pcp) __ret;						\
207 	unsigned long __flags;						\
208 	raw_local_irq_save(__flags);					\
209 	__ret = raw_cpu_generic_cmpxchg(pcp, oval, nval);		\
210 	raw_local_irq_restore(__flags);					\
211 	__ret;								\
212 })
213 
214 #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)	\
215 ({									\
216 	int __ret;							\
217 	unsigned long __flags;						\
218 	raw_local_irq_save(__flags);					\
219 	__ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2,		\
220 			oval1, oval2, nval1, nval2);			\
221 	raw_local_irq_restore(__flags);					\
222 	__ret;								\
223 })
224 
225 #ifndef raw_cpu_read_1
226 #define raw_cpu_read_1(pcp)		raw_cpu_generic_read(pcp)
227 #endif
228 #ifndef raw_cpu_read_2
229 #define raw_cpu_read_2(pcp)		raw_cpu_generic_read(pcp)
230 #endif
231 #ifndef raw_cpu_read_4
232 #define raw_cpu_read_4(pcp)		raw_cpu_generic_read(pcp)
233 #endif
234 #ifndef raw_cpu_read_8
235 #define raw_cpu_read_8(pcp)		raw_cpu_generic_read(pcp)
236 #endif
237 
238 #ifndef raw_cpu_write_1
239 #define raw_cpu_write_1(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
240 #endif
241 #ifndef raw_cpu_write_2
242 #define raw_cpu_write_2(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
243 #endif
244 #ifndef raw_cpu_write_4
245 #define raw_cpu_write_4(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
246 #endif
247 #ifndef raw_cpu_write_8
248 #define raw_cpu_write_8(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
249 #endif
250 
251 #ifndef raw_cpu_add_1
252 #define raw_cpu_add_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
253 #endif
254 #ifndef raw_cpu_add_2
255 #define raw_cpu_add_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
256 #endif
257 #ifndef raw_cpu_add_4
258 #define raw_cpu_add_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
259 #endif
260 #ifndef raw_cpu_add_8
261 #define raw_cpu_add_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
262 #endif
263 
264 #ifndef raw_cpu_and_1
265 #define raw_cpu_and_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
266 #endif
267 #ifndef raw_cpu_and_2
268 #define raw_cpu_and_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
269 #endif
270 #ifndef raw_cpu_and_4
271 #define raw_cpu_and_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
272 #endif
273 #ifndef raw_cpu_and_8
274 #define raw_cpu_and_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
275 #endif
276 
277 #ifndef raw_cpu_or_1
278 #define raw_cpu_or_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
279 #endif
280 #ifndef raw_cpu_or_2
281 #define raw_cpu_or_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
282 #endif
283 #ifndef raw_cpu_or_4
284 #define raw_cpu_or_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
285 #endif
286 #ifndef raw_cpu_or_8
287 #define raw_cpu_or_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
288 #endif
289 
290 #ifndef raw_cpu_add_return_1
291 #define raw_cpu_add_return_1(pcp, val)	raw_cpu_generic_add_return(pcp, val)
292 #endif
293 #ifndef raw_cpu_add_return_2
294 #define raw_cpu_add_return_2(pcp, val)	raw_cpu_generic_add_return(pcp, val)
295 #endif
296 #ifndef raw_cpu_add_return_4
297 #define raw_cpu_add_return_4(pcp, val)	raw_cpu_generic_add_return(pcp, val)
298 #endif
299 #ifndef raw_cpu_add_return_8
300 #define raw_cpu_add_return_8(pcp, val)	raw_cpu_generic_add_return(pcp, val)
301 #endif
302 
303 #ifndef raw_cpu_xchg_1
304 #define raw_cpu_xchg_1(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
305 #endif
306 #ifndef raw_cpu_xchg_2
307 #define raw_cpu_xchg_2(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
308 #endif
309 #ifndef raw_cpu_xchg_4
310 #define raw_cpu_xchg_4(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
311 #endif
312 #ifndef raw_cpu_xchg_8
313 #define raw_cpu_xchg_8(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
314 #endif
315 
316 #ifndef raw_cpu_try_cmpxchg_1
317 #ifdef raw_cpu_cmpxchg_1
318 #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
319 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_1)
320 #else
321 #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
322 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
323 #endif
324 #endif
325 #ifndef raw_cpu_try_cmpxchg_2
326 #ifdef raw_cpu_cmpxchg_2
327 #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
328 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_2)
329 #else
330 #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
331 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
332 #endif
333 #endif
334 #ifndef raw_cpu_try_cmpxchg_4
335 #ifdef raw_cpu_cmpxchg_4
336 #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
337 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_4)
338 #else
339 #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
340 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
341 #endif
342 #endif
343 #ifndef raw_cpu_try_cmpxchg_8
344 #ifdef raw_cpu_cmpxchg_8
345 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
346 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_8)
347 #else
348 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
349 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
350 #endif
351 #endif
352 
353 #ifndef raw_cpu_try_cmpxchg64
354 #ifdef raw_cpu_cmpxchg64
355 #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \
356 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg64)
357 #else
358 #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \
359 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
360 #endif
361 #endif
362 #ifndef raw_cpu_try_cmpxchg128
363 #ifdef raw_cpu_cmpxchg128
364 #define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \
365 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg128)
366 #else
367 #define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \
368 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
369 #endif
370 #endif
371 
372 #ifndef raw_cpu_cmpxchg_1
373 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
374 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
375 #endif
376 #ifndef raw_cpu_cmpxchg_2
377 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
378 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
379 #endif
380 #ifndef raw_cpu_cmpxchg_4
381 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
382 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
383 #endif
384 #ifndef raw_cpu_cmpxchg_8
385 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
386 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
387 #endif
388 
389 #ifndef raw_cpu_cmpxchg64
390 #define raw_cpu_cmpxchg64(pcp, oval, nval) \
391 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
392 #endif
393 #ifndef raw_cpu_cmpxchg128
394 #define raw_cpu_cmpxchg128(pcp, oval, nval) \
395 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
396 #endif
397 
398 #ifndef raw_cpu_cmpxchg_double_1
399 #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
400 	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
401 #endif
402 #ifndef raw_cpu_cmpxchg_double_2
403 #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
404 	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
405 #endif
406 #ifndef raw_cpu_cmpxchg_double_4
407 #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
408 	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
409 #endif
410 #ifndef raw_cpu_cmpxchg_double_8
411 #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
412 	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
413 #endif
414 
415 #ifndef this_cpu_read_1
416 #define this_cpu_read_1(pcp)		this_cpu_generic_read(pcp)
417 #endif
418 #ifndef this_cpu_read_2
419 #define this_cpu_read_2(pcp)		this_cpu_generic_read(pcp)
420 #endif
421 #ifndef this_cpu_read_4
422 #define this_cpu_read_4(pcp)		this_cpu_generic_read(pcp)
423 #endif
424 #ifndef this_cpu_read_8
425 #define this_cpu_read_8(pcp)		this_cpu_generic_read(pcp)
426 #endif
427 
428 #ifndef this_cpu_write_1
429 #define this_cpu_write_1(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
430 #endif
431 #ifndef this_cpu_write_2
432 #define this_cpu_write_2(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
433 #endif
434 #ifndef this_cpu_write_4
435 #define this_cpu_write_4(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
436 #endif
437 #ifndef this_cpu_write_8
438 #define this_cpu_write_8(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
439 #endif
440 
441 #ifndef this_cpu_add_1
442 #define this_cpu_add_1(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
443 #endif
444 #ifndef this_cpu_add_2
445 #define this_cpu_add_2(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
446 #endif
447 #ifndef this_cpu_add_4
448 #define this_cpu_add_4(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
449 #endif
450 #ifndef this_cpu_add_8
451 #define this_cpu_add_8(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
452 #endif
453 
454 #ifndef this_cpu_and_1
455 #define this_cpu_and_1(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
456 #endif
457 #ifndef this_cpu_and_2
458 #define this_cpu_and_2(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
459 #endif
460 #ifndef this_cpu_and_4
461 #define this_cpu_and_4(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
462 #endif
463 #ifndef this_cpu_and_8
464 #define this_cpu_and_8(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
465 #endif
466 
467 #ifndef this_cpu_or_1
468 #define this_cpu_or_1(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
469 #endif
470 #ifndef this_cpu_or_2
471 #define this_cpu_or_2(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
472 #endif
473 #ifndef this_cpu_or_4
474 #define this_cpu_or_4(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
475 #endif
476 #ifndef this_cpu_or_8
477 #define this_cpu_or_8(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
478 #endif
479 
480 #ifndef this_cpu_add_return_1
481 #define this_cpu_add_return_1(pcp, val)	this_cpu_generic_add_return(pcp, val)
482 #endif
483 #ifndef this_cpu_add_return_2
484 #define this_cpu_add_return_2(pcp, val)	this_cpu_generic_add_return(pcp, val)
485 #endif
486 #ifndef this_cpu_add_return_4
487 #define this_cpu_add_return_4(pcp, val)	this_cpu_generic_add_return(pcp, val)
488 #endif
489 #ifndef this_cpu_add_return_8
490 #define this_cpu_add_return_8(pcp, val)	this_cpu_generic_add_return(pcp, val)
491 #endif
492 
493 #ifndef this_cpu_xchg_1
494 #define this_cpu_xchg_1(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
495 #endif
496 #ifndef this_cpu_xchg_2
497 #define this_cpu_xchg_2(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
498 #endif
499 #ifndef this_cpu_xchg_4
500 #define this_cpu_xchg_4(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
501 #endif
502 #ifndef this_cpu_xchg_8
503 #define this_cpu_xchg_8(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
504 #endif
505 
506 #ifndef this_cpu_try_cmpxchg_1
507 #ifdef this_cpu_cmpxchg_1
508 #define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
509 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_1)
510 #else
511 #define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
512 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
513 #endif
514 #endif
515 #ifndef this_cpu_try_cmpxchg_2
516 #ifdef this_cpu_cmpxchg_2
517 #define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
518 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_2)
519 #else
520 #define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
521 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
522 #endif
523 #endif
524 #ifndef this_cpu_try_cmpxchg_4
525 #ifdef this_cpu_cmpxchg_4
526 #define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
527 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_4)
528 #else
529 #define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
530 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
531 #endif
532 #endif
533 #ifndef this_cpu_try_cmpxchg_8
534 #ifdef this_cpu_cmpxchg_8
535 #define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
536 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_8)
537 #else
538 #define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
539 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
540 #endif
541 #endif
542 
543 #ifndef this_cpu_try_cmpxchg64
544 #ifdef this_cpu_cmpxchg64
545 #define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \
546 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg64)
547 #else
548 #define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \
549 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
550 #endif
551 #endif
552 #ifndef this_cpu_try_cmpxchg128
553 #ifdef this_cpu_cmpxchg128
554 #define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \
555 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg128)
556 #else
557 #define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \
558 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
559 #endif
560 #endif
561 
562 #ifndef this_cpu_cmpxchg_1
563 #define this_cpu_cmpxchg_1(pcp, oval, nval) \
564 	this_cpu_generic_cmpxchg(pcp, oval, nval)
565 #endif
566 #ifndef this_cpu_cmpxchg_2
567 #define this_cpu_cmpxchg_2(pcp, oval, nval) \
568 	this_cpu_generic_cmpxchg(pcp, oval, nval)
569 #endif
570 #ifndef this_cpu_cmpxchg_4
571 #define this_cpu_cmpxchg_4(pcp, oval, nval) \
572 	this_cpu_generic_cmpxchg(pcp, oval, nval)
573 #endif
574 #ifndef this_cpu_cmpxchg_8
575 #define this_cpu_cmpxchg_8(pcp, oval, nval) \
576 	this_cpu_generic_cmpxchg(pcp, oval, nval)
577 #endif
578 
579 #ifndef this_cpu_cmpxchg64
580 #define this_cpu_cmpxchg64(pcp, oval, nval) \
581 	this_cpu_generic_cmpxchg(pcp, oval, nval)
582 #endif
583 #ifndef this_cpu_cmpxchg128
584 #define this_cpu_cmpxchg128(pcp, oval, nval) \
585 	this_cpu_generic_cmpxchg(pcp, oval, nval)
586 #endif
587 
588 #ifndef this_cpu_cmpxchg_double_1
589 #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
590 	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
591 #endif
592 #ifndef this_cpu_cmpxchg_double_2
593 #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
594 	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
595 #endif
596 #ifndef this_cpu_cmpxchg_double_4
597 #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
598 	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
599 #endif
600 #ifndef this_cpu_cmpxchg_double_8
601 #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
602 	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
603 #endif
604 
605 #endif /* _ASM_GENERIC_PERCPU_H_ */
606