xref: /linux/include/asm-generic/percpu.h (revision 399ead3a6d76cbdd29a716660db5c84a314dab70)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_GENERIC_PERCPU_H_
3 #define _ASM_GENERIC_PERCPU_H_
4 
5 #ifndef __ASSEMBLER__
6 
7 #include <linux/compiler.h>
8 #include <linux/threads.h>
9 #include <linux/percpu-defs.h>
10 
11 /*
12  * __percpu_qual is the qualifier for the percpu named address space.
13  *
14  * Most arches use generic named address space for percpu variables but
15  * some arches define percpu variables in different named address space
16  * (on the x86 arch, percpu variable may be declared as being relative
17  * to the %fs or %gs segments using __seg_fs or __seg_gs named address
18  * space qualifier).
19  */
20 #ifndef __percpu_qual
21 # define __percpu_qual
22 #endif
23 
24 #ifdef CONFIG_SMP
25 
26 /*
27  * per_cpu_offset() is the offset that has to be added to a
28  * percpu variable to get to the instance for a certain processor.
29  *
30  * Most arches use the __per_cpu_offset array for those offsets but
31  * some arches have their own ways of determining the offset (x86_64, s390).
32  */
33 #ifndef __per_cpu_offset
34 extern unsigned long __per_cpu_offset[NR_CPUS];
35 
36 #define per_cpu_offset(x) (__per_cpu_offset[x])
37 #endif
38 
39 /*
40  * Determine the offset for the currently active processor.
41  * An arch may define __my_cpu_offset to provide a more effective
42  * means of obtaining the offset to the per cpu variables of the
43  * current processor.
44  */
45 #ifndef __my_cpu_offset
46 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
47 #endif
48 #ifdef CONFIG_DEBUG_PREEMPT
49 #define my_cpu_offset per_cpu_offset(smp_processor_id())
50 #else
51 #define my_cpu_offset __my_cpu_offset
52 #endif
53 
54 /*
55  * Arch may define arch_raw_cpu_ptr() to provide more efficient address
56  * translations for raw_cpu_ptr().
57  */
58 #ifndef arch_raw_cpu_ptr
59 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
60 #endif
61 
62 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
63 extern void setup_per_cpu_areas(void);
64 #endif
65 
66 #endif	/* SMP */
67 
68 #ifndef PER_CPU_BASE_SECTION
69 #ifdef CONFIG_SMP
70 #define PER_CPU_BASE_SECTION ".data..percpu"
71 #else
72 #define PER_CPU_BASE_SECTION ".data"
73 #endif
74 #endif
75 
76 #ifndef PER_CPU_ATTRIBUTES
77 #define PER_CPU_ATTRIBUTES
78 #endif
79 
80 #define raw_cpu_generic_read(pcp)					\
81 ({									\
82 	*raw_cpu_ptr(&(pcp));						\
83 })
84 
85 #define raw_cpu_generic_to_op(pcp, val, op)				\
86 do {									\
87 	*raw_cpu_ptr(&(pcp)) op val;					\
88 } while (0)
89 
90 #define raw_cpu_generic_add_return(pcp, val)				\
91 ({									\
92 	TYPEOF_UNQUAL(pcp) *__p = raw_cpu_ptr(&(pcp));			\
93 									\
94 	*__p += val;							\
95 	*__p;								\
96 })
97 
98 #define raw_cpu_generic_xchg(pcp, nval)					\
99 ({									\
100 	TYPEOF_UNQUAL(pcp) *__p = raw_cpu_ptr(&(pcp));			\
101 	TYPEOF_UNQUAL(pcp) __ret;					\
102 	__ret = *__p;							\
103 	*__p = nval;							\
104 	__ret;								\
105 })
106 
107 #define __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, _cmpxchg)		\
108 ({									\
109 	TYPEOF_UNQUAL(pcp) __val, __old = *(ovalp);			\
110 	__val = _cmpxchg(pcp, __old, nval);				\
111 	if (__val != __old)						\
112 		*(ovalp) = __val;					\
113 	__val == __old;							\
114 })
115 
116 #define raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)			\
117 ({									\
118 	TYPEOF_UNQUAL(pcp) *__p = raw_cpu_ptr(&(pcp));			\
119 	TYPEOF_UNQUAL(pcp) __val = *__p, ___old = *(ovalp);		\
120 	bool __ret;							\
121 	if (__val == ___old) {						\
122 		*__p = nval;						\
123 		__ret = true;						\
124 	} else {							\
125 		*(ovalp) = __val;					\
126 		__ret = false;						\
127 	}								\
128 	__ret;								\
129 })
130 
131 #define raw_cpu_generic_cmpxchg(pcp, oval, nval)			\
132 ({									\
133 	TYPEOF_UNQUAL(pcp) __old = (oval);				\
134 	raw_cpu_generic_try_cmpxchg(pcp, &__old, nval);			\
135 	__old;								\
136 })
137 
138 #define __this_cpu_generic_read_nopreempt(pcp)				\
139 ({									\
140 	TYPEOF_UNQUAL(pcp) ___ret;					\
141 	preempt_disable_notrace();					\
142 	___ret = READ_ONCE(*raw_cpu_ptr(&(pcp)));			\
143 	preempt_enable_notrace();					\
144 	___ret;								\
145 })
146 
147 #define __this_cpu_generic_read_noirq(pcp)				\
148 ({									\
149 	TYPEOF_UNQUAL(pcp) ___ret;					\
150 	unsigned long ___flags;						\
151 	raw_local_irq_save(___flags);					\
152 	___ret = raw_cpu_generic_read(pcp);				\
153 	raw_local_irq_restore(___flags);				\
154 	___ret;								\
155 })
156 
157 #define this_cpu_generic_read(pcp)					\
158 ({									\
159 	TYPEOF_UNQUAL(pcp) __ret;					\
160 	if (__native_word(pcp))						\
161 		__ret = __this_cpu_generic_read_nopreempt(pcp);		\
162 	else								\
163 		__ret = __this_cpu_generic_read_noirq(pcp);		\
164 	__ret;								\
165 })
166 
167 #define this_cpu_generic_to_op(pcp, val, op)				\
168 do {									\
169 	unsigned long __flags;						\
170 	raw_local_irq_save(__flags);					\
171 	raw_cpu_generic_to_op(pcp, val, op);				\
172 	raw_local_irq_restore(__flags);					\
173 } while (0)
174 
175 
176 #define this_cpu_generic_add_return(pcp, val)				\
177 ({									\
178 	TYPEOF_UNQUAL(pcp) __ret;					\
179 	unsigned long __flags;						\
180 	raw_local_irq_save(__flags);					\
181 	__ret = raw_cpu_generic_add_return(pcp, val);			\
182 	raw_local_irq_restore(__flags);					\
183 	__ret;								\
184 })
185 
186 #define this_cpu_generic_xchg(pcp, nval)				\
187 ({									\
188 	TYPEOF_UNQUAL(pcp) __ret;					\
189 	unsigned long __flags;						\
190 	raw_local_irq_save(__flags);					\
191 	__ret = raw_cpu_generic_xchg(pcp, nval);			\
192 	raw_local_irq_restore(__flags);					\
193 	__ret;								\
194 })
195 
196 #define this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)			\
197 ({									\
198 	bool __ret;							\
199 	unsigned long __flags;						\
200 	raw_local_irq_save(__flags);					\
201 	__ret = raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval);		\
202 	raw_local_irq_restore(__flags);					\
203 	__ret;								\
204 })
205 
206 #define this_cpu_generic_cmpxchg(pcp, oval, nval)			\
207 ({									\
208 	TYPEOF_UNQUAL(pcp) __ret;					\
209 	unsigned long __flags;						\
210 	raw_local_irq_save(__flags);					\
211 	__ret = raw_cpu_generic_cmpxchg(pcp, oval, nval);		\
212 	raw_local_irq_restore(__flags);					\
213 	__ret;								\
214 })
215 
216 #ifndef raw_cpu_read_1
217 #define raw_cpu_read_1(pcp)		raw_cpu_generic_read(pcp)
218 #endif
219 #ifndef raw_cpu_read_2
220 #define raw_cpu_read_2(pcp)		raw_cpu_generic_read(pcp)
221 #endif
222 #ifndef raw_cpu_read_4
223 #define raw_cpu_read_4(pcp)		raw_cpu_generic_read(pcp)
224 #endif
225 #ifndef raw_cpu_read_8
226 #define raw_cpu_read_8(pcp)		raw_cpu_generic_read(pcp)
227 #endif
228 
229 #ifndef raw_cpu_write_1
230 #define raw_cpu_write_1(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
231 #endif
232 #ifndef raw_cpu_write_2
233 #define raw_cpu_write_2(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
234 #endif
235 #ifndef raw_cpu_write_4
236 #define raw_cpu_write_4(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
237 #endif
238 #ifndef raw_cpu_write_8
239 #define raw_cpu_write_8(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
240 #endif
241 
242 #ifndef raw_cpu_add_1
243 #define raw_cpu_add_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
244 #endif
245 #ifndef raw_cpu_add_2
246 #define raw_cpu_add_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
247 #endif
248 #ifndef raw_cpu_add_4
249 #define raw_cpu_add_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
250 #endif
251 #ifndef raw_cpu_add_8
252 #define raw_cpu_add_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
253 #endif
254 
255 #ifndef raw_cpu_and_1
256 #define raw_cpu_and_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
257 #endif
258 #ifndef raw_cpu_and_2
259 #define raw_cpu_and_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
260 #endif
261 #ifndef raw_cpu_and_4
262 #define raw_cpu_and_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
263 #endif
264 #ifndef raw_cpu_and_8
265 #define raw_cpu_and_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
266 #endif
267 
268 #ifndef raw_cpu_or_1
269 #define raw_cpu_or_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
270 #endif
271 #ifndef raw_cpu_or_2
272 #define raw_cpu_or_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
273 #endif
274 #ifndef raw_cpu_or_4
275 #define raw_cpu_or_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
276 #endif
277 #ifndef raw_cpu_or_8
278 #define raw_cpu_or_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
279 #endif
280 
281 #ifndef raw_cpu_add_return_1
282 #define raw_cpu_add_return_1(pcp, val)	raw_cpu_generic_add_return(pcp, val)
283 #endif
284 #ifndef raw_cpu_add_return_2
285 #define raw_cpu_add_return_2(pcp, val)	raw_cpu_generic_add_return(pcp, val)
286 #endif
287 #ifndef raw_cpu_add_return_4
288 #define raw_cpu_add_return_4(pcp, val)	raw_cpu_generic_add_return(pcp, val)
289 #endif
290 #ifndef raw_cpu_add_return_8
291 #define raw_cpu_add_return_8(pcp, val)	raw_cpu_generic_add_return(pcp, val)
292 #endif
293 
294 #ifndef raw_cpu_xchg_1
295 #define raw_cpu_xchg_1(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
296 #endif
297 #ifndef raw_cpu_xchg_2
298 #define raw_cpu_xchg_2(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
299 #endif
300 #ifndef raw_cpu_xchg_4
301 #define raw_cpu_xchg_4(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
302 #endif
303 #ifndef raw_cpu_xchg_8
304 #define raw_cpu_xchg_8(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
305 #endif
306 
307 #ifndef raw_cpu_try_cmpxchg_1
308 #ifdef raw_cpu_cmpxchg_1
309 #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
310 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_1)
311 #else
312 #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
313 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
314 #endif
315 #endif
316 #ifndef raw_cpu_try_cmpxchg_2
317 #ifdef raw_cpu_cmpxchg_2
318 #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
319 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_2)
320 #else
321 #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
322 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
323 #endif
324 #endif
325 #ifndef raw_cpu_try_cmpxchg_4
326 #ifdef raw_cpu_cmpxchg_4
327 #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
328 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_4)
329 #else
330 #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
331 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
332 #endif
333 #endif
334 #ifndef raw_cpu_try_cmpxchg_8
335 #ifdef raw_cpu_cmpxchg_8
336 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
337 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_8)
338 #else
339 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
340 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
341 #endif
342 #endif
343 
344 #ifndef raw_cpu_try_cmpxchg64
345 #ifdef raw_cpu_cmpxchg64
346 #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \
347 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg64)
348 #else
349 #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \
350 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
351 #endif
352 #endif
353 #ifndef raw_cpu_try_cmpxchg128
354 #ifdef raw_cpu_cmpxchg128
355 #define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \
356 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg128)
357 #else
358 #define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \
359 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
360 #endif
361 #endif
362 
363 #ifndef raw_cpu_cmpxchg_1
364 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
365 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
366 #endif
367 #ifndef raw_cpu_cmpxchg_2
368 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
369 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
370 #endif
371 #ifndef raw_cpu_cmpxchg_4
372 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
373 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
374 #endif
375 #ifndef raw_cpu_cmpxchg_8
376 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
377 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
378 #endif
379 
380 #ifndef raw_cpu_cmpxchg64
381 #define raw_cpu_cmpxchg64(pcp, oval, nval) \
382 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
383 #endif
384 #ifndef raw_cpu_cmpxchg128
385 #define raw_cpu_cmpxchg128(pcp, oval, nval) \
386 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
387 #endif
388 
389 #ifndef this_cpu_read_1
390 #define this_cpu_read_1(pcp)		this_cpu_generic_read(pcp)
391 #endif
392 #ifndef this_cpu_read_2
393 #define this_cpu_read_2(pcp)		this_cpu_generic_read(pcp)
394 #endif
395 #ifndef this_cpu_read_4
396 #define this_cpu_read_4(pcp)		this_cpu_generic_read(pcp)
397 #endif
398 #ifndef this_cpu_read_8
399 #define this_cpu_read_8(pcp)		this_cpu_generic_read(pcp)
400 #endif
401 
402 #ifndef this_cpu_write_1
403 #define this_cpu_write_1(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
404 #endif
405 #ifndef this_cpu_write_2
406 #define this_cpu_write_2(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
407 #endif
408 #ifndef this_cpu_write_4
409 #define this_cpu_write_4(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
410 #endif
411 #ifndef this_cpu_write_8
412 #define this_cpu_write_8(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
413 #endif
414 
415 #ifndef this_cpu_add_1
416 #define this_cpu_add_1(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
417 #endif
418 #ifndef this_cpu_add_2
419 #define this_cpu_add_2(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
420 #endif
421 #ifndef this_cpu_add_4
422 #define this_cpu_add_4(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
423 #endif
424 #ifndef this_cpu_add_8
425 #define this_cpu_add_8(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
426 #endif
427 
428 #ifndef this_cpu_and_1
429 #define this_cpu_and_1(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
430 #endif
431 #ifndef this_cpu_and_2
432 #define this_cpu_and_2(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
433 #endif
434 #ifndef this_cpu_and_4
435 #define this_cpu_and_4(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
436 #endif
437 #ifndef this_cpu_and_8
438 #define this_cpu_and_8(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
439 #endif
440 
441 #ifndef this_cpu_or_1
442 #define this_cpu_or_1(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
443 #endif
444 #ifndef this_cpu_or_2
445 #define this_cpu_or_2(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
446 #endif
447 #ifndef this_cpu_or_4
448 #define this_cpu_or_4(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
449 #endif
450 #ifndef this_cpu_or_8
451 #define this_cpu_or_8(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
452 #endif
453 
454 #ifndef this_cpu_add_return_1
455 #define this_cpu_add_return_1(pcp, val)	this_cpu_generic_add_return(pcp, val)
456 #endif
457 #ifndef this_cpu_add_return_2
458 #define this_cpu_add_return_2(pcp, val)	this_cpu_generic_add_return(pcp, val)
459 #endif
460 #ifndef this_cpu_add_return_4
461 #define this_cpu_add_return_4(pcp, val)	this_cpu_generic_add_return(pcp, val)
462 #endif
463 #ifndef this_cpu_add_return_8
464 #define this_cpu_add_return_8(pcp, val)	this_cpu_generic_add_return(pcp, val)
465 #endif
466 
467 #ifndef this_cpu_xchg_1
468 #define this_cpu_xchg_1(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
469 #endif
470 #ifndef this_cpu_xchg_2
471 #define this_cpu_xchg_2(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
472 #endif
473 #ifndef this_cpu_xchg_4
474 #define this_cpu_xchg_4(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
475 #endif
476 #ifndef this_cpu_xchg_8
477 #define this_cpu_xchg_8(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
478 #endif
479 
480 #ifndef this_cpu_try_cmpxchg_1
481 #ifdef this_cpu_cmpxchg_1
482 #define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
483 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_1)
484 #else
485 #define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
486 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
487 #endif
488 #endif
489 #ifndef this_cpu_try_cmpxchg_2
490 #ifdef this_cpu_cmpxchg_2
491 #define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
492 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_2)
493 #else
494 #define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
495 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
496 #endif
497 #endif
498 #ifndef this_cpu_try_cmpxchg_4
499 #ifdef this_cpu_cmpxchg_4
500 #define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
501 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_4)
502 #else
503 #define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
504 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
505 #endif
506 #endif
507 #ifndef this_cpu_try_cmpxchg_8
508 #ifdef this_cpu_cmpxchg_8
509 #define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
510 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_8)
511 #else
512 #define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
513 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
514 #endif
515 #endif
516 
517 #ifndef this_cpu_try_cmpxchg64
518 #ifdef this_cpu_cmpxchg64
519 #define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \
520 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg64)
521 #else
522 #define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \
523 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
524 #endif
525 #endif
526 #ifndef this_cpu_try_cmpxchg128
527 #ifdef this_cpu_cmpxchg128
528 #define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \
529 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg128)
530 #else
531 #define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \
532 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
533 #endif
534 #endif
535 
536 #ifndef this_cpu_cmpxchg_1
537 #define this_cpu_cmpxchg_1(pcp, oval, nval) \
538 	this_cpu_generic_cmpxchg(pcp, oval, nval)
539 #endif
540 #ifndef this_cpu_cmpxchg_2
541 #define this_cpu_cmpxchg_2(pcp, oval, nval) \
542 	this_cpu_generic_cmpxchg(pcp, oval, nval)
543 #endif
544 #ifndef this_cpu_cmpxchg_4
545 #define this_cpu_cmpxchg_4(pcp, oval, nval) \
546 	this_cpu_generic_cmpxchg(pcp, oval, nval)
547 #endif
548 #ifndef this_cpu_cmpxchg_8
549 #define this_cpu_cmpxchg_8(pcp, oval, nval) \
550 	this_cpu_generic_cmpxchg(pcp, oval, nval)
551 #endif
552 
553 #ifndef this_cpu_cmpxchg64
554 #define this_cpu_cmpxchg64(pcp, oval, nval) \
555 	this_cpu_generic_cmpxchg(pcp, oval, nval)
556 #endif
557 #ifndef this_cpu_cmpxchg128
558 #define this_cpu_cmpxchg128(pcp, oval, nval) \
559 	this_cpu_generic_cmpxchg(pcp, oval, nval)
560 #endif
561 
562 #endif /* __ASSEMBLER__ */
563 #endif /* _ASM_GENERIC_PERCPU_H_ */
564