xref: /linux/arch/x86/include/asm/alternative.h (revision f694f30e81c4ade358eb8c75273bac1a48f0cb8f)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_ALTERNATIVE_H
3 #define _ASM_X86_ALTERNATIVE_H
4 
5 #include <linux/types.h>
6 #include <linux/stringify.h>
7 #include <linux/objtool.h>
8 #include <asm/asm.h>
9 
10 #define ALT_FLAGS_SHIFT		16
11 
12 #define ALT_FLAG_NOT		(1 << 0)
13 #define ALT_NOT(feature)	((ALT_FLAG_NOT << ALT_FLAGS_SHIFT) | (feature))
14 #define ALT_FLAG_DIRECT_CALL	(1 << 1)
15 #define ALT_DIRECT_CALL(feature) ((ALT_FLAG_DIRECT_CALL << ALT_FLAGS_SHIFT) | (feature))
16 #define ALT_CALL_ALWAYS		ALT_DIRECT_CALL(X86_FEATURE_ALWAYS)
17 
18 #ifndef __ASSEMBLER__
19 
20 #include <linux/stddef.h>
21 
22 /*
23  * Alternative inline assembly for SMP.
24  *
25  * The LOCK_PREFIX macro defined here replaces the LOCK and
26  * LOCK_PREFIX macros used everywhere in the source tree.
27  *
28  * SMP alternatives use the same data structures as the other
29  * alternatives and the X86_FEATURE_UP flag to indicate the case of a
30  * UP system running a SMP kernel.  The existing apply_alternatives()
31  * works fine for patching a SMP kernel for UP.
32  *
33  * The SMP alternative tables can be kept after boot and contain both
34  * UP and SMP versions of the instructions to allow switching back to
35  * SMP at runtime, when hotplugging in a new CPU, which is especially
36  * useful in virtualized environments.
37  *
38  * The very common lock prefix is handled as special case in a
39  * separate table which is a pure address list without replacement ptr
40  * and size information.  That keeps the table sizes small.
41  */
42 
43 #ifdef CONFIG_SMP
44 #define LOCK_PREFIX_HERE \
45 		".pushsection .smp_locks,\"a\"\n"	\
46 		".balign 4\n"				\
47 		".long 671f - .\n" /* offset */		\
48 		".popsection\n"				\
49 		"671:"
50 
51 #define LOCK_PREFIX LOCK_PREFIX_HERE "\n\tlock "
52 
53 #else /* ! CONFIG_SMP */
54 #define LOCK_PREFIX_HERE ""
55 #define LOCK_PREFIX ""
56 #endif
57 
58 /*
59  * The patching flags are part of the upper bits of the @ft_flags parameter when
60  * specifying them. The split is currently like this:
61  *
62  * [31... flags ...16][15... CPUID feature bit ...0]
63  *
64  * but since this is all hidden in the macros argument being split, those fields can be
65  * extended in the future to fit in a u64 or however the need arises.
66  */
67 struct alt_instr {
68 	s32 instr_offset;	/* original instruction */
69 	s32 repl_offset;	/* offset to replacement instruction */
70 
71 	union {
72 		struct {
73 			u32 cpuid: 16;	/* CPUID bit set for replacement */
74 			u32 flags: 16;	/* patching control flags */
75 		};
76 		u32 ft_flags;
77 	};
78 
79 	u8  instrlen;		/* length of original instruction */
80 	u8  replacementlen;	/* length of new instruction */
81 } __packed;
82 
83 extern struct alt_instr __alt_instructions[], __alt_instructions_end[];
84 
85 /*
86  * Debug flag that can be tested to see whether alternative
87  * instructions were patched in already:
88  */
89 extern int alternatives_patched;
90 
91 extern void alternative_instructions(void);
92 extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end);
93 extern void apply_retpolines(s32 *start, s32 *end);
94 extern void apply_returns(s32 *start, s32 *end);
95 extern void apply_seal_endbr(s32 *start, s32 *end);
96 extern void apply_fineibt(s32 *start_retpoline, s32 *end_retpoine,
97 			  s32 *start_cfi, s32 *end_cfi);
98 
99 struct module;
100 
101 struct callthunk_sites {
102 	s32				*call_start, *call_end;
103 };
104 
105 #ifdef CONFIG_CALL_THUNKS
106 extern void callthunks_patch_builtin_calls(void);
107 extern void callthunks_patch_module_calls(struct callthunk_sites *sites,
108 					  struct module *mod);
109 extern void *callthunks_translate_call_dest(void *dest);
110 extern int x86_call_depth_emit_accounting(u8 **pprog, void *func, void *ip);
111 #else
112 static __always_inline void callthunks_patch_builtin_calls(void) {}
113 static __always_inline void
114 callthunks_patch_module_calls(struct callthunk_sites *sites,
115 			      struct module *mod) {}
116 static __always_inline void *callthunks_translate_call_dest(void *dest)
117 {
118 	return dest;
119 }
120 static __always_inline int x86_call_depth_emit_accounting(u8 **pprog,
121 							  void *func, void *ip)
122 {
123 	return 0;
124 }
125 #endif
126 
127 #ifdef CONFIG_SMP
128 extern void alternatives_smp_module_add(struct module *mod, char *name,
129 					void *locks, void *locks_end,
130 					void *text, void *text_end);
131 extern void alternatives_smp_module_del(struct module *mod);
132 extern void alternatives_enable_smp(void);
133 extern int alternatives_text_reserved(void *start, void *end);
134 extern bool skip_smp_alternatives;
135 #else
136 static inline void alternatives_smp_module_add(struct module *mod, char *name,
137 					       void *locks, void *locks_end,
138 					       void *text, void *text_end) {}
139 static inline void alternatives_smp_module_del(struct module *mod) {}
140 static inline void alternatives_enable_smp(void) {}
141 static inline int alternatives_text_reserved(void *start, void *end)
142 {
143 	return 0;
144 }
145 #endif	/* CONFIG_SMP */
146 
147 #define ALT_CALL_INSTR		"call BUG_func"
148 
149 #define alt_slen		"772b-771b"
150 #define alt_total_slen		"773b-771b"
151 #define alt_rlen		"775f-774f"
152 
153 #define OLDINSTR(oldinstr)						\
154 	"# ALT: oldinstr\n"						\
155 	"771:\n\t" oldinstr "\n772:\n"					\
156 	"# ALT: padding\n"						\
157 	".skip -(((" alt_rlen ")-(" alt_slen ")) > 0) * "		\
158 		"((" alt_rlen ")-(" alt_slen ")),0x90\n"		\
159 	"773:\n"
160 
161 #define ALTINSTR_ENTRY(ft_flags)					      \
162 	".pushsection .altinstructions,\"a\"\n"				      \
163 	" .long 771b - .\n"				/* label           */ \
164 	" .long 774f - .\n"				/* new instruction */ \
165 	" .4byte " __stringify(ft_flags) "\n"		/* feature + flags */ \
166 	" .byte " alt_total_slen "\n"			/* source len      */ \
167 	" .byte " alt_rlen "\n"				/* replacement len */ \
168 	".popsection\n"
169 
170 #define ALTINSTR_REPLACEMENT(newinstr)		/* replacement */	\
171 	".pushsection .altinstr_replacement, \"ax\"\n"			\
172 	"# ALT: replacement\n"						\
173 	"774:\n\t" newinstr "\n775:\n"					\
174 	".popsection\n"
175 
176 /* alternative assembly primitive: */
177 #define ALTERNATIVE(oldinstr, newinstr, ft_flags)			\
178 	OLDINSTR(oldinstr)						\
179 	ALTINSTR_ENTRY(ft_flags)					\
180 	ALTINSTR_REPLACEMENT(newinstr)
181 
182 #define ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
183 	ALTERNATIVE(ALTERNATIVE(oldinstr, newinstr1, ft_flags1), newinstr2, ft_flags2)
184 
185 /* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
186 #define ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
187 	ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, newinstr_yes, ft_flags)
188 
189 #define ALTERNATIVE_3(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, \
190 			newinstr3, ft_flags3)				\
191 	ALTERNATIVE(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2), \
192 		      newinstr3, ft_flags3)
193 
194 /*
195  * Alternative instructions for different CPU types or capabilities.
196  *
197  * This allows to use optimized instructions even on generic binary
198  * kernels.
199  *
200  * length of oldinstr must be longer or equal the length of newinstr
201  * It can be padded with nops as needed.
202  *
203  * For non barrier like inlines please define new variants
204  * without volatile and memory clobber.
205  */
206 #define alternative(oldinstr, newinstr, ft_flags)			\
207 	asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")
208 
209 #define alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
210 	asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")
211 
212 /*
213  * Alternative inline assembly with input.
214  *
215  * Peculiarities:
216  * No memory clobber here.
217  * Argument numbers start with 1.
218  * Leaving an unused argument 0 to keep API compatibility.
219  */
220 #define alternative_input(oldinstr, newinstr, ft_flags, input...)	\
221 	asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) \
222 		: : "i" (0), ## input)
223 
224 /* Like alternative_input, but with a single output argument */
225 #define alternative_io(oldinstr, newinstr, ft_flags, output, input...)	\
226 	asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags)	\
227 		: output : "i" (0), ## input)
228 
229 /*
230  * Like alternative_io, but for replacing a direct call with another one.
231  *
232  * Use the %c operand modifier which is the generic way to print a bare
233  * constant expression with all syntax-specific punctuation omitted. %P
234  * is the x86-specific variant which can handle constants too, for
235  * historical reasons, but it should be used primarily for PIC
236  * references: i.e., if used for a function, it would add the PLT
237  * suffix.
238  */
239 #define alternative_call(oldfunc, newfunc, ft_flags, output, input, clobbers...)	\
240 	asm_inline volatile(ALTERNATIVE("call %c[old]", "call %c[new]", ft_flags)	\
241 		: ALT_OUTPUT_SP(output)							\
242 		: [old] "i" (oldfunc), [new] "i" (newfunc)				\
243 		  COMMA(input)								\
244 		: clobbers)
245 
246 /*
247  * Like alternative_call, but there are two features and respective functions.
248  * If CPU has feature2, function2 is used.
249  * Otherwise, if CPU has feature1, function1 is used.
250  * Otherwise, old function is used.
251  */
252 #define alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2,		\
253 			   output, input, clobbers...)					\
254 	asm_inline volatile(ALTERNATIVE_2("call %c[old]", "call %c[new1]", ft_flags1,	\
255 		"call %c[new2]", ft_flags2)						\
256 		: ALT_OUTPUT_SP(output)							\
257 		: [old] "i" (oldfunc), [new1] "i" (newfunc1),				\
258 		  [new2] "i" (newfunc2)							\
259 		  COMMA(input)								\
260 		: clobbers)
261 
262 #define ALT_OUTPUT_SP(...) ASM_CALL_CONSTRAINT, ## __VA_ARGS__
263 
264 /* Macro for creating assembler functions avoiding any C magic. */
265 #define DEFINE_ASM_FUNC(func, instr, sec)		\
266 	asm (".pushsection " #sec ", \"ax\"\n"		\
267 	     ".global " #func "\n\t"			\
268 	     ".type " #func ", @function\n\t"		\
269 	     ASM_FUNC_ALIGN "\n"			\
270 	     #func ":\n\t"				\
271 	     ASM_ENDBR					\
272 	     instr "\n\t"				\
273 	     ASM_RET					\
274 	     ".size " #func ", . - " #func "\n\t"	\
275 	     ".popsection")
276 
277 void BUG_func(void);
278 void nop_func(void);
279 
280 #else /* __ASSEMBLER__ */
281 
282 #ifdef CONFIG_SMP
283 	.macro LOCK_PREFIX
284 672:	lock
285 	.pushsection .smp_locks,"a"
286 	.balign 4
287 	.long 672b - .
288 	.popsection
289 	.endm
290 #else
291 	.macro LOCK_PREFIX
292 	.endm
293 #endif
294 
295 /*
296  * Issue one struct alt_instr descriptor entry (need to put it into
297  * the section .altinstructions, see below). This entry contains
298  * enough information for the alternatives patching code to patch an
299  * instruction. See apply_alternatives().
300  */
301 .macro altinstr_entry orig alt ft_flags orig_len alt_len
302 	.long \orig - .
303 	.long \alt - .
304 	.4byte \ft_flags
305 	.byte \orig_len
306 	.byte \alt_len
307 .endm
308 
309 .macro ALT_CALL_INSTR
310 	call BUG_func
311 .endm
312 
313 /*
314  * Define an alternative between two instructions. If @feature is
315  * present, early code in apply_alternatives() replaces @oldinstr with
316  * @newinstr. ".skip" directive takes care of proper instruction padding
317  * in case @newinstr is longer than @oldinstr.
318  */
319 #define __ALTERNATIVE(oldinst, newinst, flag)				\
320 740:									\
321 	oldinst	;							\
322 741:									\
323 	.skip -(((744f-743f)-(741b-740b)) > 0) * ((744f-743f)-(741b-740b)),0x90	;\
324 742:									\
325 	.pushsection .altinstructions,"a" ;				\
326 	altinstr_entry 740b,743f,flag,742b-740b,744f-743f ;		\
327 	.popsection ;							\
328 	.pushsection .altinstr_replacement,"ax"	;			\
329 743:									\
330 	newinst	;							\
331 744:									\
332 	.popsection ;
333 
334 .macro ALTERNATIVE oldinstr, newinstr, ft_flags
335 	__ALTERNATIVE(\oldinstr, \newinstr, \ft_flags)
336 .endm
337 
338 #define old_len			141b-140b
339 #define new_len1		144f-143f
340 #define new_len2		145f-144f
341 #define new_len3		146f-145f
342 
343 /*
344  * Same as ALTERNATIVE macro above but for two alternatives. If CPU
345  * has @feature1, it replaces @oldinstr with @newinstr1. If CPU has
346  * @feature2, it replaces @oldinstr with @feature2.
347  */
348 .macro ALTERNATIVE_2 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2
349 	__ALTERNATIVE(__ALTERNATIVE(\oldinstr, \newinstr1, \ft_flags1),
350 		      \newinstr2, \ft_flags2)
351 .endm
352 
353 .macro ALTERNATIVE_3 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3
354 	__ALTERNATIVE(ALTERNATIVE_2(\oldinstr, \newinstr1, \ft_flags1, \newinstr2, \ft_flags2),
355 		      \newinstr3, \ft_flags3)
356 .endm
357 
358 /* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
359 #define ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
360 	ALTERNATIVE_2 oldinstr, newinstr_no, X86_FEATURE_ALWAYS,	\
361 	newinstr_yes, ft_flags
362 
363 #endif /* __ASSEMBLER__ */
364 
365 #endif /* _ASM_X86_ALTERNATIVE_H */
366