xref: /linux/arch/x86/include/asm/alternative.h (revision e3610441d1fb47b1f00e4c38bdf333176e824729)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_ALTERNATIVE_H
3 #define _ASM_X86_ALTERNATIVE_H
4 
5 #include <linux/types.h>
6 #include <linux/stringify.h>
7 #include <linux/objtool.h>
8 #include <asm/asm.h>
9 
10 #define ALT_FLAGS_SHIFT		16
11 
12 #define ALT_FLAG_NOT		(1 << 0)
13 #define ALT_NOT(feature)	((ALT_FLAG_NOT << ALT_FLAGS_SHIFT) | (feature))
14 #define ALT_FLAG_DIRECT_CALL	(1 << 1)
15 #define ALT_DIRECT_CALL(feature) ((ALT_FLAG_DIRECT_CALL << ALT_FLAGS_SHIFT) | (feature))
16 #define ALT_CALL_ALWAYS		ALT_DIRECT_CALL(X86_FEATURE_ALWAYS)
17 
18 #ifndef __ASSEMBLY__
19 
20 #include <linux/stddef.h>
21 
22 /*
23  * Alternative inline assembly for SMP.
24  *
25  * The LOCK_PREFIX macro defined here replaces the LOCK and
26  * LOCK_PREFIX macros used everywhere in the source tree.
27  *
28  * SMP alternatives use the same data structures as the other
29  * alternatives and the X86_FEATURE_UP flag to indicate the case of a
30  * UP system running a SMP kernel.  The existing apply_alternatives()
31  * works fine for patching a SMP kernel for UP.
32  *
33  * The SMP alternative tables can be kept after boot and contain both
34  * UP and SMP versions of the instructions to allow switching back to
35  * SMP at runtime, when hotplugging in a new CPU, which is especially
36  * useful in virtualized environments.
37  *
38  * The very common lock prefix is handled as special case in a
39  * separate table which is a pure address list without replacement ptr
40  * and size information.  That keeps the table sizes small.
41  */
42 
43 #ifdef CONFIG_SMP
44 #define LOCK_PREFIX_HERE \
45 		".pushsection .smp_locks,\"a\"\n"	\
46 		".balign 4\n"				\
47 		".long 671f - .\n" /* offset */		\
48 		".popsection\n"				\
49 		"671:"
50 
51 #define LOCK_PREFIX LOCK_PREFIX_HERE "\n\tlock; "
52 
53 #else /* ! CONFIG_SMP */
54 #define LOCK_PREFIX_HERE ""
55 #define LOCK_PREFIX ""
56 #endif
57 
58 /*
59  * The patching flags are part of the upper bits of the @ft_flags parameter when
60  * specifying them. The split is currently like this:
61  *
62  * [31... flags ...16][15... CPUID feature bit ...0]
63  *
64  * but since this is all hidden in the macros argument being split, those fields can be
65  * extended in the future to fit in a u64 or however the need arises.
66  */
67 struct alt_instr {
68 	s32 instr_offset;	/* original instruction */
69 	s32 repl_offset;	/* offset to replacement instruction */
70 
71 	union {
72 		struct {
73 			u32 cpuid: 16;	/* CPUID bit set for replacement */
74 			u32 flags: 16;	/* patching control flags */
75 		};
76 		u32 ft_flags;
77 	};
78 
79 	u8  instrlen;		/* length of original instruction */
80 	u8  replacementlen;	/* length of new instruction */
81 } __packed;
82 
83 extern struct alt_instr __alt_instructions[], __alt_instructions_end[];
84 
85 /*
86  * Debug flag that can be tested to see whether alternative
87  * instructions were patched in already:
88  */
89 extern int alternatives_patched;
90 struct module;
91 
92 extern void alternative_instructions(void);
93 extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end,
94 			       struct module *mod);
95 extern void apply_retpolines(s32 *start, s32 *end, struct module *mod);
96 extern void apply_returns(s32 *start, s32 *end, struct module *mod);
97 extern void apply_seal_endbr(s32 *start, s32 *end, struct module *mod);
98 extern void apply_fineibt(s32 *start_retpoline, s32 *end_retpoine,
99 			  s32 *start_cfi, s32 *end_cfi, struct module *mod);
100 
101 struct callthunk_sites {
102 	s32				*call_start, *call_end;
103 	struct alt_instr		*alt_start, *alt_end;
104 };
105 
106 #ifdef CONFIG_CALL_THUNKS
107 extern void callthunks_patch_builtin_calls(void);
108 extern void callthunks_patch_module_calls(struct callthunk_sites *sites,
109 					  struct module *mod);
110 extern void *callthunks_translate_call_dest(void *dest);
111 extern int x86_call_depth_emit_accounting(u8 **pprog, void *func, void *ip);
112 #else
113 static __always_inline void callthunks_patch_builtin_calls(void) {}
114 static __always_inline void
115 callthunks_patch_module_calls(struct callthunk_sites *sites,
116 			      struct module *mod) {}
117 static __always_inline void *callthunks_translate_call_dest(void *dest)
118 {
119 	return dest;
120 }
121 static __always_inline int x86_call_depth_emit_accounting(u8 **pprog,
122 							  void *func, void *ip)
123 {
124 	return 0;
125 }
126 #endif
127 
128 #ifdef CONFIG_SMP
129 extern void alternatives_smp_module_add(struct module *mod, char *name,
130 					void *locks, void *locks_end,
131 					void *text, void *text_end);
132 extern void alternatives_smp_module_del(struct module *mod);
133 extern void alternatives_enable_smp(void);
134 extern int alternatives_text_reserved(void *start, void *end);
135 extern bool skip_smp_alternatives;
136 #else
137 static inline void alternatives_smp_module_add(struct module *mod, char *name,
138 					       void *locks, void *locks_end,
139 					       void *text, void *text_end) {}
140 static inline void alternatives_smp_module_del(struct module *mod) {}
141 static inline void alternatives_enable_smp(void) {}
142 static inline int alternatives_text_reserved(void *start, void *end)
143 {
144 	return 0;
145 }
146 #endif	/* CONFIG_SMP */
147 
148 #define ALT_CALL_INSTR		"call BUG_func"
149 
150 #define alt_slen		"772b-771b"
151 #define alt_total_slen		"773b-771b"
152 #define alt_rlen		"775f-774f"
153 
154 #define OLDINSTR(oldinstr)						\
155 	"# ALT: oldinstr\n"						\
156 	"771:\n\t" oldinstr "\n772:\n"					\
157 	"# ALT: padding\n"						\
158 	".skip -(((" alt_rlen ")-(" alt_slen ")) > 0) * "		\
159 		"((" alt_rlen ")-(" alt_slen ")),0x90\n"		\
160 	"773:\n"
161 
162 #define ALTINSTR_ENTRY(ft_flags)					      \
163 	".pushsection .altinstructions,\"a\"\n"				      \
164 	" .long 771b - .\n"				/* label           */ \
165 	" .long 774f - .\n"				/* new instruction */ \
166 	" .4byte " __stringify(ft_flags) "\n"		/* feature + flags */ \
167 	" .byte " alt_total_slen "\n"			/* source len      */ \
168 	" .byte " alt_rlen "\n"				/* replacement len */ \
169 	".popsection\n"
170 
171 #define ALTINSTR_REPLACEMENT(newinstr)		/* replacement */	\
172 	".pushsection .altinstr_replacement, \"ax\"\n"			\
173 	"# ALT: replacement\n"						\
174 	"774:\n\t" newinstr "\n775:\n"					\
175 	".popsection\n"
176 
177 /* alternative assembly primitive: */
178 #define ALTERNATIVE(oldinstr, newinstr, ft_flags)			\
179 	OLDINSTR(oldinstr)						\
180 	ALTINSTR_ENTRY(ft_flags)					\
181 	ALTINSTR_REPLACEMENT(newinstr)
182 
183 #define ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
184 	ALTERNATIVE(ALTERNATIVE(oldinstr, newinstr1, ft_flags1), newinstr2, ft_flags2)
185 
186 /* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
187 #define ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
188 	ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, newinstr_yes, ft_flags)
189 
190 #define ALTERNATIVE_3(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, \
191 			newinstr3, ft_flags3)				\
192 	ALTERNATIVE(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2), \
193 		      newinstr3, ft_flags3)
194 
195 /*
196  * Alternative instructions for different CPU types or capabilities.
197  *
198  * This allows to use optimized instructions even on generic binary
199  * kernels.
200  *
201  * length of oldinstr must be longer or equal the length of newinstr
202  * It can be padded with nops as needed.
203  *
204  * For non barrier like inlines please define new variants
205  * without volatile and memory clobber.
206  */
207 #define alternative(oldinstr, newinstr, ft_flags)			\
208 	asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")
209 
210 #define alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
211 	asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")
212 
213 /*
214  * Alternative inline assembly with input.
215  *
216  * Peculiarities:
217  * No memory clobber here.
218  * Argument numbers start with 1.
219  * Leaving an unused argument 0 to keep API compatibility.
220  */
221 #define alternative_input(oldinstr, newinstr, ft_flags, input...)	\
222 	asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) \
223 		: : "i" (0), ## input)
224 
225 /* Like alternative_input, but with a single output argument */
226 #define alternative_io(oldinstr, newinstr, ft_flags, output, input...)	\
227 	asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags)	\
228 		: output : "i" (0), ## input)
229 
230 /*
231  * Like alternative_io, but for replacing a direct call with another one.
232  *
233  * Use the %c operand modifier which is the generic way to print a bare
234  * constant expression with all syntax-specific punctuation omitted. %P
235  * is the x86-specific variant which can handle constants too, for
236  * historical reasons, but it should be used primarily for PIC
237  * references: i.e., if used for a function, it would add the PLT
238  * suffix.
239  */
240 #define alternative_call(oldfunc, newfunc, ft_flags, output, input...)			\
241 	asm_inline volatile(ALTERNATIVE("call %c[old]", "call %c[new]", ft_flags)	\
242 		: ALT_OUTPUT_SP(output)							\
243 		: [old] "i" (oldfunc), [new] "i" (newfunc), ## input)
244 
245 /*
246  * Like alternative_call, but there are two features and respective functions.
247  * If CPU has feature2, function2 is used.
248  * Otherwise, if CPU has feature1, function1 is used.
249  * Otherwise, old function is used.
250  */
251 #define alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2,		\
252 			   output, input...)						\
253 	asm_inline volatile(ALTERNATIVE_2("call %c[old]", "call %c[new1]", ft_flags1,	\
254 		"call %c[new2]", ft_flags2)						\
255 		: ALT_OUTPUT_SP(output)							\
256 		: [old] "i" (oldfunc), [new1] "i" (newfunc1),				\
257 		  [new2] "i" (newfunc2), ## input)
258 
259 /*
260  * use this macro(s) if you need more than one output parameter
261  * in alternative_io
262  */
263 #define ASM_OUTPUT2(a...) a
264 
265 /*
266  * use this macro if you need clobbers but no inputs in
267  * alternative_{input,io,call}()
268  */
269 #define ASM_NO_INPUT_CLOBBER(clbr...) "i" (0) : clbr
270 
271 #define ALT_OUTPUT_SP(...) ASM_CALL_CONSTRAINT, ## __VA_ARGS__
272 
273 /* Macro for creating assembler functions avoiding any C magic. */
274 #define DEFINE_ASM_FUNC(func, instr, sec)		\
275 	asm (".pushsection " #sec ", \"ax\"\n"		\
276 	     ".global " #func "\n\t"			\
277 	     ".type " #func ", @function\n\t"		\
278 	     ASM_FUNC_ALIGN "\n"			\
279 	     #func ":\n\t"				\
280 	     ASM_ENDBR					\
281 	     instr "\n\t"				\
282 	     ASM_RET					\
283 	     ".size " #func ", . - " #func "\n\t"	\
284 	     ".popsection")
285 
286 void BUG_func(void);
287 void nop_func(void);
288 
289 #else /* __ASSEMBLY__ */
290 
291 #ifdef CONFIG_SMP
292 	.macro LOCK_PREFIX
293 672:	lock
294 	.pushsection .smp_locks,"a"
295 	.balign 4
296 	.long 672b - .
297 	.popsection
298 	.endm
299 #else
300 	.macro LOCK_PREFIX
301 	.endm
302 #endif
303 
304 /*
305  * Issue one struct alt_instr descriptor entry (need to put it into
306  * the section .altinstructions, see below). This entry contains
307  * enough information for the alternatives patching code to patch an
308  * instruction. See apply_alternatives().
309  */
310 .macro altinstr_entry orig alt ft_flags orig_len alt_len
311 	.long \orig - .
312 	.long \alt - .
313 	.4byte \ft_flags
314 	.byte \orig_len
315 	.byte \alt_len
316 .endm
317 
318 .macro ALT_CALL_INSTR
319 	call BUG_func
320 .endm
321 
322 /*
323  * Define an alternative between two instructions. If @feature is
324  * present, early code in apply_alternatives() replaces @oldinstr with
325  * @newinstr. ".skip" directive takes care of proper instruction padding
326  * in case @newinstr is longer than @oldinstr.
327  */
328 #define __ALTERNATIVE(oldinst, newinst, flag)				\
329 740:									\
330 	oldinst	;							\
331 741:									\
332 	.skip -(((744f-743f)-(741b-740b)) > 0) * ((744f-743f)-(741b-740b)),0x90	;\
333 742:									\
334 	.pushsection .altinstructions,"a" ;				\
335 	altinstr_entry 740b,743f,flag,742b-740b,744f-743f ;		\
336 	.popsection ;							\
337 	.pushsection .altinstr_replacement,"ax"	;			\
338 743:									\
339 	newinst	;							\
340 744:									\
341 	.popsection ;
342 
343 .macro ALTERNATIVE oldinstr, newinstr, ft_flags
344 	__ALTERNATIVE(\oldinstr, \newinstr, \ft_flags)
345 .endm
346 
347 #define old_len			141b-140b
348 #define new_len1		144f-143f
349 #define new_len2		145f-144f
350 #define new_len3		146f-145f
351 
352 /*
353  * Same as ALTERNATIVE macro above but for two alternatives. If CPU
354  * has @feature1, it replaces @oldinstr with @newinstr1. If CPU has
355  * @feature2, it replaces @oldinstr with @feature2.
356  */
357 .macro ALTERNATIVE_2 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2
358 	__ALTERNATIVE(__ALTERNATIVE(\oldinstr, \newinstr1, \ft_flags1),
359 		      \newinstr2, \ft_flags2)
360 .endm
361 
362 .macro ALTERNATIVE_3 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3
363 	__ALTERNATIVE(ALTERNATIVE_2(\oldinstr, \newinstr1, \ft_flags1, \newinstr2, \ft_flags2),
364 		      \newinstr3, \ft_flags3)
365 .endm
366 
367 /* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
368 #define ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
369 	ALTERNATIVE_2 oldinstr, newinstr_no, X86_FEATURE_ALWAYS,	\
370 	newinstr_yes, ft_flags
371 
372 #endif /* __ASSEMBLY__ */
373 
374 #endif /* _ASM_X86_ALTERNATIVE_H */
375