xref: /linux/arch/powerpc/include/asm/feature-fixups.h (revision 02680c23d7b3febe45ea3d4f9818c2b2dc89020a)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 #ifndef __ASM_POWERPC_FEATURE_FIXUPS_H
3 #define __ASM_POWERPC_FEATURE_FIXUPS_H
4 
5 #include <asm/asm-const.h>
6 
7 /*
8  */
9 
10 /*
11  * Feature section common macros
12  *
13  * Note that the entries now contain offsets between the table entry
14  * and the code rather than absolute code pointers in order to be
15  * useable with the vdso shared library. There is also an assumption
16  * that values will be negative, that is, the fixup table has to be
17  * located after the code it fixes up.
18  */
19 #if defined(CONFIG_PPC64) && !defined(__powerpc64__)
20 /* 64 bits kernel, 32 bits code (ie. vdso32) */
21 #define FTR_ENTRY_LONG		.8byte
22 #define FTR_ENTRY_OFFSET	.long 0xffffffff; .long
23 #elif defined(CONFIG_PPC64)
24 #define FTR_ENTRY_LONG		.8byte
25 #define FTR_ENTRY_OFFSET	.8byte
26 #else
27 #define FTR_ENTRY_LONG		.long
28 #define FTR_ENTRY_OFFSET	.long
29 #endif
30 
31 #define START_FTR_SECTION(label)	label##1:
32 
33 #define FTR_SECTION_ELSE_NESTED(label)			\
34 label##2:						\
35 	.pushsection __ftr_alt_##label,"a";		\
36 	.align 2;					\
37 label##3:
38 
39 
40 #ifndef CONFIG_CC_IS_CLANG
41 #define CHECK_ALT_SIZE(else_size, body_size)			\
42 	.ifgt (else_size) - (body_size);			\
43 	.error "Feature section else case larger than body";	\
44 	.endif;
45 #else
46 /*
47  * If we use the ifgt syntax above, clang's assembler complains about the
48  * expression being non-absolute when the code appears in an inline assembly
49  * statement.
50  * As a workaround use an .org directive that has no effect if the else case
51  * instructions are smaller than the body, but fails otherwise.
52  */
53 #define CHECK_ALT_SIZE(else_size, body_size)			\
54 	.org . + ((else_size) > (body_size));
55 #endif
56 
57 #define MAKE_FTR_SECTION_ENTRY(msk, val, label, sect)		\
58 label##4:							\
59 	.popsection;						\
60 	.pushsection sect,"a";					\
61 	.align 3;						\
62 label##5:							\
63 	FTR_ENTRY_LONG msk;					\
64 	FTR_ENTRY_LONG val;					\
65 	FTR_ENTRY_OFFSET label##1b-label##5b;			\
66 	FTR_ENTRY_OFFSET label##2b-label##5b;			\
67 	FTR_ENTRY_OFFSET label##3b-label##5b;			\
68 	FTR_ENTRY_OFFSET label##4b-label##5b;			\
69 	CHECK_ALT_SIZE((label##4b-label##3b), (label##2b-label##1b)); \
70 	.popsection;
71 
72 
73 /* CPU feature dependent sections */
74 #define BEGIN_FTR_SECTION_NESTED(label)	START_FTR_SECTION(label)
75 #define BEGIN_FTR_SECTION		START_FTR_SECTION(97)
76 
77 #define END_FTR_SECTION_NESTED(msk, val, label) 		\
78 	FTR_SECTION_ELSE_NESTED(label)				\
79 	MAKE_FTR_SECTION_ENTRY(msk, val, label, __ftr_fixup)
80 
81 #define END_FTR_SECTION(msk, val)		\
82 	END_FTR_SECTION_NESTED(msk, val, 97)
83 
84 #define END_FTR_SECTION_NESTED_IFSET(msk, label)	\
85 	END_FTR_SECTION_NESTED((msk), (msk), label)
86 
87 #define END_FTR_SECTION_IFSET(msk)	END_FTR_SECTION((msk), (msk))
88 #define END_FTR_SECTION_IFCLR(msk)	END_FTR_SECTION((msk), 0)
89 
90 /* CPU feature sections with alternatives, use BEGIN_FTR_SECTION to start */
91 #define FTR_SECTION_ELSE	FTR_SECTION_ELSE_NESTED(97)
92 #define ALT_FTR_SECTION_END_NESTED(msk, val, label)	\
93 	MAKE_FTR_SECTION_ENTRY(msk, val, label, __ftr_fixup)
94 #define ALT_FTR_SECTION_END_NESTED_IFSET(msk, label)	\
95 	ALT_FTR_SECTION_END_NESTED(msk, msk, label)
96 #define ALT_FTR_SECTION_END_NESTED_IFCLR(msk, label)	\
97 	ALT_FTR_SECTION_END_NESTED(msk, 0, label)
98 #define ALT_FTR_SECTION_END(msk, val)	\
99 	ALT_FTR_SECTION_END_NESTED(msk, val, 97)
100 #define ALT_FTR_SECTION_END_IFSET(msk)	\
101 	ALT_FTR_SECTION_END_NESTED_IFSET(msk, 97)
102 #define ALT_FTR_SECTION_END_IFCLR(msk)	\
103 	ALT_FTR_SECTION_END_NESTED_IFCLR(msk, 97)
104 
105 /* MMU feature dependent sections */
106 #define BEGIN_MMU_FTR_SECTION_NESTED(label)	START_FTR_SECTION(label)
107 #define BEGIN_MMU_FTR_SECTION			START_FTR_SECTION(97)
108 
109 #define END_MMU_FTR_SECTION_NESTED(msk, val, label) 		\
110 	FTR_SECTION_ELSE_NESTED(label)				\
111 	MAKE_FTR_SECTION_ENTRY(msk, val, label, __mmu_ftr_fixup)
112 
113 #define END_MMU_FTR_SECTION(msk, val)		\
114 	END_MMU_FTR_SECTION_NESTED(msk, val, 97)
115 
116 #define END_MMU_FTR_SECTION_NESTED_IFSET(msk, label)	\
117 	END_MMU_FTR_SECTION_NESTED((msk), (msk), label)
118 
119 #define END_MMU_FTR_SECTION_NESTED_IFCLR(msk, label)	\
120 	END_MMU_FTR_SECTION_NESTED((msk), 0, label)
121 
122 #define END_MMU_FTR_SECTION_IFSET(msk)	END_MMU_FTR_SECTION((msk), (msk))
123 #define END_MMU_FTR_SECTION_IFCLR(msk)	END_MMU_FTR_SECTION((msk), 0)
124 
125 /* MMU feature sections with alternatives, use BEGIN_FTR_SECTION to start */
126 #define MMU_FTR_SECTION_ELSE_NESTED(label)	FTR_SECTION_ELSE_NESTED(label)
127 #define MMU_FTR_SECTION_ELSE	MMU_FTR_SECTION_ELSE_NESTED(97)
128 #define ALT_MMU_FTR_SECTION_END_NESTED(msk, val, label)	\
129 	MAKE_FTR_SECTION_ENTRY(msk, val, label, __mmu_ftr_fixup)
130 #define ALT_MMU_FTR_SECTION_END_NESTED_IFSET(msk, label)	\
131 	ALT_MMU_FTR_SECTION_END_NESTED(msk, msk, label)
132 #define ALT_MMU_FTR_SECTION_END_NESTED_IFCLR(msk, label)	\
133 	ALT_MMU_FTR_SECTION_END_NESTED(msk, 0, label)
134 #define ALT_MMU_FTR_SECTION_END(msk, val)	\
135 	ALT_MMU_FTR_SECTION_END_NESTED(msk, val, 97)
136 #define ALT_MMU_FTR_SECTION_END_IFSET(msk)	\
137 	ALT_MMU_FTR_SECTION_END_NESTED_IFSET(msk, 97)
138 #define ALT_MMU_FTR_SECTION_END_IFCLR(msk)	\
139 	ALT_MMU_FTR_SECTION_END_NESTED_IFCLR(msk, 97)
140 
141 /* Firmware feature dependent sections */
142 #define BEGIN_FW_FTR_SECTION_NESTED(label)	START_FTR_SECTION(label)
143 #define BEGIN_FW_FTR_SECTION			START_FTR_SECTION(97)
144 
145 #define END_FW_FTR_SECTION_NESTED(msk, val, label) 		\
146 	FTR_SECTION_ELSE_NESTED(label)				\
147 	MAKE_FTR_SECTION_ENTRY(msk, val, label, __fw_ftr_fixup)
148 
149 #define END_FW_FTR_SECTION(msk, val)		\
150 	END_FW_FTR_SECTION_NESTED(msk, val, 97)
151 
152 #define END_FW_FTR_SECTION_IFSET(msk)	END_FW_FTR_SECTION((msk), (msk))
153 #define END_FW_FTR_SECTION_IFCLR(msk)	END_FW_FTR_SECTION((msk), 0)
154 
155 /* Firmware feature sections with alternatives */
156 #define FW_FTR_SECTION_ELSE_NESTED(label)	FTR_SECTION_ELSE_NESTED(label)
157 #define FW_FTR_SECTION_ELSE	FTR_SECTION_ELSE_NESTED(97)
158 #define ALT_FW_FTR_SECTION_END_NESTED(msk, val, label)	\
159 	MAKE_FTR_SECTION_ENTRY(msk, val, label, __fw_ftr_fixup)
160 #define ALT_FW_FTR_SECTION_END_NESTED_IFSET(msk, label)	\
161 	ALT_FW_FTR_SECTION_END_NESTED(msk, msk, label)
162 #define ALT_FW_FTR_SECTION_END_NESTED_IFCLR(msk, label)	\
163 	ALT_FW_FTR_SECTION_END_NESTED(msk, 0, label)
164 #define ALT_FW_FTR_SECTION_END(msk, val)	\
165 	ALT_FW_FTR_SECTION_END_NESTED(msk, val, 97)
166 #define ALT_FW_FTR_SECTION_END_IFSET(msk)	\
167 	ALT_FW_FTR_SECTION_END_NESTED_IFSET(msk, 97)
168 #define ALT_FW_FTR_SECTION_END_IFCLR(msk)	\
169 	ALT_FW_FTR_SECTION_END_NESTED_IFCLR(msk, 97)
170 
171 #ifndef __ASSEMBLY__
172 
173 #define ASM_FTR_IF(section_if, section_else, msk, val)	\
174 	stringify_in_c(BEGIN_FTR_SECTION)			\
175 	section_if "; "						\
176 	stringify_in_c(FTR_SECTION_ELSE)			\
177 	section_else "; "					\
178 	stringify_in_c(ALT_FTR_SECTION_END((msk), (val)))
179 
180 #define ASM_FTR_IFSET(section_if, section_else, msk)	\
181 	ASM_FTR_IF(section_if, section_else, (msk), (msk))
182 
183 #define ASM_FTR_IFCLR(section_if, section_else, msk)	\
184 	ASM_FTR_IF(section_if, section_else, (msk), 0)
185 
186 #define ASM_MMU_FTR_IF(section_if, section_else, msk, val)	\
187 	stringify_in_c(BEGIN_MMU_FTR_SECTION)			\
188 	section_if "; "						\
189 	stringify_in_c(MMU_FTR_SECTION_ELSE)			\
190 	section_else "; "					\
191 	stringify_in_c(ALT_MMU_FTR_SECTION_END((msk), (val)))
192 
193 #define ASM_MMU_FTR_IFSET(section_if, section_else, msk)	\
194 	ASM_MMU_FTR_IF(section_if, section_else, (msk), (msk))
195 
196 #define ASM_MMU_FTR_IFCLR(section_if, section_else, msk)	\
197 	ASM_MMU_FTR_IF(section_if, section_else, (msk), 0)
198 
199 #endif /* __ASSEMBLY__ */
200 
201 /* LWSYNC feature sections */
202 #define START_LWSYNC_SECTION(label)	label##1:
203 #define MAKE_LWSYNC_SECTION_ENTRY(label, sect)		\
204 label##2:						\
205 	.pushsection sect,"a";				\
206 	.align 2;					\
207 label##3:					       	\
208 	FTR_ENTRY_OFFSET label##1b-label##3b;		\
209 	.popsection;
210 
211 #define STF_ENTRY_BARRIER_FIXUP_SECTION			\
212 953:							\
213 	.pushsection __stf_entry_barrier_fixup,"a";	\
214 	.align 2;					\
215 954:							\
216 	FTR_ENTRY_OFFSET 953b-954b;			\
217 	.popsection;
218 
219 #define STF_EXIT_BARRIER_FIXUP_SECTION			\
220 955:							\
221 	.pushsection __stf_exit_barrier_fixup,"a";	\
222 	.align 2;					\
223 956:							\
224 	FTR_ENTRY_OFFSET 955b-956b;			\
225 	.popsection;
226 
227 #define UACCESS_FLUSH_FIXUP_SECTION			\
228 959:							\
229 	.pushsection __uaccess_flush_fixup,"a";		\
230 	.align 2;					\
231 960:							\
232 	FTR_ENTRY_OFFSET 959b-960b;			\
233 	.popsection;
234 
235 #define ENTRY_FLUSH_FIXUP_SECTION			\
236 957:							\
237 	.pushsection __entry_flush_fixup,"a";		\
238 	.align 2;					\
239 958:							\
240 	FTR_ENTRY_OFFSET 957b-958b;			\
241 	.popsection;
242 
243 #define SCV_ENTRY_FLUSH_FIXUP_SECTION			\
244 957:							\
245 	.pushsection __scv_entry_flush_fixup,"a";	\
246 	.align 2;					\
247 958:							\
248 	FTR_ENTRY_OFFSET 957b-958b;			\
249 	.popsection;
250 
251 #define RFI_FLUSH_FIXUP_SECTION				\
252 951:							\
253 	.pushsection __rfi_flush_fixup,"a";		\
254 	.align 2;					\
255 952:							\
256 	FTR_ENTRY_OFFSET 951b-952b;			\
257 	.popsection;
258 
259 #define NOSPEC_BARRIER_FIXUP_SECTION			\
260 953:							\
261 	.pushsection __barrier_nospec_fixup,"a";	\
262 	.align 2;					\
263 954:							\
264 	FTR_ENTRY_OFFSET 953b-954b;			\
265 	.popsection;
266 
267 #define START_BTB_FLUSH_SECTION			\
268 955:							\
269 
270 #define END_BTB_FLUSH_SECTION			\
271 956:							\
272 	.pushsection __btb_flush_fixup,"a";	\
273 	.align 2;							\
274 957:						\
275 	FTR_ENTRY_OFFSET 955b-957b;			\
276 	FTR_ENTRY_OFFSET 956b-957b;			\
277 	.popsection;
278 
279 #ifndef __ASSEMBLY__
280 #include <linux/types.h>
281 
282 extern long stf_barrier_fallback;
283 extern long entry_flush_fallback;
284 extern long scv_entry_flush_fallback;
285 extern long __start___stf_entry_barrier_fixup, __stop___stf_entry_barrier_fixup;
286 extern long __start___stf_exit_barrier_fixup, __stop___stf_exit_barrier_fixup;
287 extern long __start___uaccess_flush_fixup, __stop___uaccess_flush_fixup;
288 extern long __start___entry_flush_fixup, __stop___entry_flush_fixup;
289 extern long __start___scv_entry_flush_fixup, __stop___scv_entry_flush_fixup;
290 extern long __start___rfi_flush_fixup, __stop___rfi_flush_fixup;
291 extern long __start___barrier_nospec_fixup, __stop___barrier_nospec_fixup;
292 extern long __start__btb_flush_fixup, __stop__btb_flush_fixup;
293 
294 void apply_feature_fixups(void);
295 void setup_feature_keys(void);
296 #endif
297 
298 #endif /* __ASM_POWERPC_FEATURE_FIXUPS_H */
299