xref: /linux/include/asm-generic/percpu.h (revision 71901cc4109b3794b863884e348aff3c71e693cc)
1 #ifndef _ASM_GENERIC_PERCPU_H_
2 #define _ASM_GENERIC_PERCPU_H_
3 
4 #include <linux/compiler.h>
5 #include <linux/threads.h>
6 #include <linux/percpu-defs.h>
7 
8 #ifdef CONFIG_SMP
9 
10 /*
11  * per_cpu_offset() is the offset that has to be added to a
12  * percpu variable to get to the instance for a certain processor.
13  *
14  * Most arches use the __per_cpu_offset array for those offsets but
15  * some arches have their own ways of determining the offset (x86_64, s390).
16  */
17 #ifndef __per_cpu_offset
18 extern unsigned long __per_cpu_offset[NR_CPUS];
19 
20 #define per_cpu_offset(x) (__per_cpu_offset[x])
21 #endif
22 
23 /*
24  * Determine the offset for the currently active processor.
25  * An arch may define __my_cpu_offset to provide a more effective
26  * means of obtaining the offset to the per cpu variables of the
27  * current processor.
28  */
29 #ifndef __my_cpu_offset
30 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
31 #endif
32 #ifdef CONFIG_DEBUG_PREEMPT
33 #define my_cpu_offset per_cpu_offset(smp_processor_id())
34 #else
35 #define my_cpu_offset __my_cpu_offset
36 #endif
37 
38 /*
39  * Add a offset to a pointer but keep the pointer as is.
40  *
41  * Only S390 provides its own means of moving the pointer.
42  */
43 #ifndef SHIFT_PERCPU_PTR
44 /* Weird cast keeps both GCC and sparse happy. */
45 #define SHIFT_PERCPU_PTR(__p, __offset)	({				\
46 	__verify_pcpu_ptr((__p));					\
47 	RELOC_HIDE((typeof(*(__p)) __kernel __force *)(__p), (__offset)); \
48 })
49 #endif
50 
51 /*
52  * A percpu variable may point to a discarded regions. The following are
53  * established ways to produce a usable pointer from the percpu variable
54  * offset.
55  */
56 #define per_cpu(var, cpu) \
57 	(*SHIFT_PERCPU_PTR(&(var), per_cpu_offset(cpu)))
58 #define __get_cpu_var(var) \
59 	(*SHIFT_PERCPU_PTR(&(var), my_cpu_offset))
60 #define __raw_get_cpu_var(var) \
61 	(*SHIFT_PERCPU_PTR(&(var), __my_cpu_offset))
62 
63 #define this_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, my_cpu_offset)
64 #define __this_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
65 
66 
67 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
68 extern void setup_per_cpu_areas(void);
69 #endif
70 
71 #else /* ! SMP */
72 
73 #define VERIFY_PERCPU_PTR(__p) ({			\
74 	__verify_pcpu_ptr((__p));			\
75 	(typeof(*(__p)) __kernel __force *)(__p);	\
76 })
77 
78 #define per_cpu(var, cpu)	(*((void)(cpu), VERIFY_PERCPU_PTR(&(var))))
79 #define __get_cpu_var(var)	(*VERIFY_PERCPU_PTR(&(var)))
80 #define __raw_get_cpu_var(var)	(*VERIFY_PERCPU_PTR(&(var)))
81 #define this_cpu_ptr(ptr)	per_cpu_ptr(ptr, 0)
82 #define __this_cpu_ptr(ptr)	this_cpu_ptr(ptr)
83 
84 #endif	/* SMP */
85 
86 #ifndef PER_CPU_BASE_SECTION
87 #ifdef CONFIG_SMP
88 #define PER_CPU_BASE_SECTION ".data..percpu"
89 #else
90 #define PER_CPU_BASE_SECTION ".data"
91 #endif
92 #endif
93 
94 #ifdef CONFIG_SMP
95 
96 #ifdef MODULE
97 #define PER_CPU_SHARED_ALIGNED_SECTION ""
98 #define PER_CPU_ALIGNED_SECTION ""
99 #else
100 #define PER_CPU_SHARED_ALIGNED_SECTION "..shared_aligned"
101 #define PER_CPU_ALIGNED_SECTION "..shared_aligned"
102 #endif
103 #define PER_CPU_FIRST_SECTION "..first"
104 
105 #else
106 
107 #define PER_CPU_SHARED_ALIGNED_SECTION ""
108 #define PER_CPU_ALIGNED_SECTION "..shared_aligned"
109 #define PER_CPU_FIRST_SECTION ""
110 
111 #endif
112 
113 #ifndef PER_CPU_ATTRIBUTES
114 #define PER_CPU_ATTRIBUTES
115 #endif
116 
117 #ifndef PER_CPU_DEF_ATTRIBUTES
118 #define PER_CPU_DEF_ATTRIBUTES
119 #endif
120 
121 #endif /* _ASM_GENERIC_PERCPU_H_ */
122