xref: /linux/arch/arm64/include/asm/alternative-macros.h (revision 3a39d672e7f48b8d6b91a09afa4b55352773b4b5)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_ALTERNATIVE_MACROS_H
3 #define __ASM_ALTERNATIVE_MACROS_H
4 
5 #include <linux/const.h>
6 #include <vdso/bits.h>
7 
8 #include <asm/cpucaps.h>
9 #include <asm/insn-def.h>
10 
11 /*
12  * Binutils 2.27.0 can't handle a 'UL' suffix on constants, so for the assembly
13  * macros below we must use we must use `(1 << ARM64_CB_SHIFT)`.
14  */
15 #define ARM64_CB_SHIFT	15
16 #define ARM64_CB_BIT	BIT(ARM64_CB_SHIFT)
17 
18 #if ARM64_NCAPS >= ARM64_CB_BIT
19 #error "cpucaps have overflown ARM64_CB_BIT"
20 #endif
21 
22 #ifndef __ASSEMBLY__
23 
24 #include <linux/stringify.h>
25 
26 #define ALTINSTR_ENTRY(cpucap)					              \
27 	" .word 661b - .\n"				/* label           */ \
28 	" .word 663f - .\n"				/* new instruction */ \
29 	" .hword " __stringify(cpucap) "\n"		/* cpucap          */ \
30 	" .byte 662b-661b\n"				/* source len      */ \
31 	" .byte 664f-663f\n"				/* replacement len */
32 
33 #define ALTINSTR_ENTRY_CB(cpucap, cb)					      \
34 	" .word 661b - .\n"				/* label           */ \
35 	" .word " __stringify(cb) "- .\n"		/* callback        */ \
36 	" .hword " __stringify(cpucap) "\n"		/* cpucap          */ \
37 	" .byte 662b-661b\n"				/* source len      */ \
38 	" .byte 664f-663f\n"				/* replacement len */
39 
40 /*
41  * alternative assembly primitive:
42  *
43  * If any of these .org directive fail, it means that insn1 and insn2
44  * don't have the same length. This used to be written as
45  *
46  * .if ((664b-663b) != (662b-661b))
47  * 	.error "Alternatives instruction length mismatch"
48  * .endif
49  *
50  * but most assemblers die if insn1 or insn2 have a .inst. This should
51  * be fixed in a binutils release posterior to 2.25.51.0.2 (anything
52  * containing commit 4e4d08cf7399b606 or c1baaddf8861).
53  *
54  * Alternatives with callbacks do not generate replacement instructions.
55  */
56 #define __ALTERNATIVE_CFG(oldinstr, newinstr, cpucap, cfg_enabled)	\
57 	".if "__stringify(cfg_enabled)" == 1\n"				\
58 	"661:\n\t"							\
59 	oldinstr "\n"							\
60 	"662:\n"							\
61 	".pushsection .altinstructions,\"a\"\n"				\
62 	ALTINSTR_ENTRY(cpucap)						\
63 	".popsection\n"							\
64 	".subsection 1\n"						\
65 	"663:\n\t"							\
66 	newinstr "\n"							\
67 	"664:\n\t"							\
68 	".org	. - (664b-663b) + (662b-661b)\n\t"			\
69 	".org	. - (662b-661b) + (664b-663b)\n\t"			\
70 	".previous\n"							\
71 	".endif\n"
72 
73 #define __ALTERNATIVE_CFG_CB(oldinstr, cpucap, cfg_enabled, cb)	\
74 	".if "__stringify(cfg_enabled)" == 1\n"				\
75 	"661:\n\t"							\
76 	oldinstr "\n"							\
77 	"662:\n"							\
78 	".pushsection .altinstructions,\"a\"\n"				\
79 	ALTINSTR_ENTRY_CB(cpucap, cb)					\
80 	".popsection\n"							\
81 	"663:\n\t"							\
82 	"664:\n\t"							\
83 	".endif\n"
84 
85 #define _ALTERNATIVE_CFG(oldinstr, newinstr, cpucap, cfg, ...)	\
86 	__ALTERNATIVE_CFG(oldinstr, newinstr, cpucap, IS_ENABLED(cfg))
87 
88 #define ALTERNATIVE_CB(oldinstr, cpucap, cb) \
89 	__ALTERNATIVE_CFG_CB(oldinstr, (1 << ARM64_CB_SHIFT) | (cpucap), 1, cb)
90 #else
91 
92 #include <asm/assembler.h>
93 
94 .macro altinstruction_entry orig_offset alt_offset cpucap orig_len alt_len
95 	.word \orig_offset - .
96 	.word \alt_offset - .
97 	.hword (\cpucap)
98 	.byte \orig_len
99 	.byte \alt_len
100 .endm
101 
102 .macro alternative_insn insn1, insn2, cap, enable = 1
103 	.if \enable
104 661:	\insn1
105 662:	.pushsection .altinstructions, "a"
106 	altinstruction_entry 661b, 663f, \cap, 662b-661b, 664f-663f
107 	.popsection
108 	.subsection 1
109 663:	\insn2
110 664:	.org	. - (664b-663b) + (662b-661b)
111 	.org	. - (662b-661b) + (664b-663b)
112 	.previous
113 	.endif
114 .endm
115 
116 /*
117  * Alternative sequences
118  *
119  * The code for the case where the capability is not present will be
120  * assembled and linked as normal. There are no restrictions on this
121  * code.
122  *
123  * The code for the case where the capability is present will be
124  * assembled into a special section to be used for dynamic patching.
125  * Code for that case must:
126  *
127  * 1. Be exactly the same length (in bytes) as the default code
128  *    sequence.
129  *
130  * 2. Not contain a branch target that is used outside of the
131  *    alternative sequence it is defined in (branches into an
132  *    alternative sequence are not fixed up).
133  */
134 
135 /*
136  * Begin an alternative code sequence.
137  */
138 .macro alternative_if_not cap
139 	.set .Lasm_alt_mode, 0
140 	.pushsection .altinstructions, "a"
141 	altinstruction_entry 661f, 663f, \cap, 662f-661f, 664f-663f
142 	.popsection
143 661:
144 .endm
145 
146 .macro alternative_if cap
147 	.set .Lasm_alt_mode, 1
148 	.pushsection .altinstructions, "a"
149 	altinstruction_entry 663f, 661f, \cap, 664f-663f, 662f-661f
150 	.popsection
151 	.subsection 1
152 	.align 2	/* So GAS knows label 661 is suitably aligned */
153 661:
154 .endm
155 
156 .macro alternative_cb cap, cb
157 	.set .Lasm_alt_mode, 0
158 	.pushsection .altinstructions, "a"
159 	altinstruction_entry 661f, \cb, (1 << ARM64_CB_SHIFT) | \cap, 662f-661f, 0
160 	.popsection
161 661:
162 .endm
163 
164 /*
165  * Provide the other half of the alternative code sequence.
166  */
167 .macro alternative_else
168 662:
169 	.if .Lasm_alt_mode==0
170 	.subsection 1
171 	.else
172 	.previous
173 	.endif
174 663:
175 .endm
176 
177 /*
178  * Complete an alternative code sequence.
179  */
180 .macro alternative_endif
181 664:
182 	.org	. - (664b-663b) + (662b-661b)
183 	.org	. - (662b-661b) + (664b-663b)
184 	.if .Lasm_alt_mode==0
185 	.previous
186 	.endif
187 .endm
188 
189 /*
190  * Callback-based alternative epilogue
191  */
192 .macro alternative_cb_end
193 662:
194 .endm
195 
196 /*
197  * Provides a trivial alternative or default sequence consisting solely
198  * of NOPs. The number of NOPs is chosen automatically to match the
199  * previous case.
200  */
201 .macro alternative_else_nop_endif
202 alternative_else
203 	nops	(662b-661b) / AARCH64_INSN_SIZE
204 alternative_endif
205 .endm
206 
207 #define _ALTERNATIVE_CFG(insn1, insn2, cap, cfg, ...)	\
208 	alternative_insn insn1, insn2, cap, IS_ENABLED(cfg)
209 
210 #endif  /*  __ASSEMBLY__  */
211 
212 /*
213  * Usage: asm(ALTERNATIVE(oldinstr, newinstr, cpucap));
214  *
215  * Usage: asm(ALTERNATIVE(oldinstr, newinstr, cpucap, CONFIG_FOO));
216  * N.B. If CONFIG_FOO is specified, but not selected, the whole block
217  *      will be omitted, including oldinstr.
218  */
219 #define ALTERNATIVE(oldinstr, newinstr, ...)   \
220 	_ALTERNATIVE_CFG(oldinstr, newinstr, __VA_ARGS__, 1)
221 
222 #ifndef __ASSEMBLY__
223 
224 #include <linux/types.h>
225 
226 static __always_inline bool
227 alternative_has_cap_likely(const unsigned long cpucap)
228 {
229 	if (!cpucap_is_possible(cpucap))
230 		return false;
231 
232 	asm goto(
233 #ifdef BUILD_VDSO
234 	ALTERNATIVE("b	%l[l_no]", "nop", %[cpucap])
235 #else
236 	ALTERNATIVE_CB("b	%l[l_no]", %[cpucap], alt_cb_patch_nops)
237 #endif
238 	:
239 	: [cpucap] "i" (cpucap)
240 	:
241 	: l_no);
242 
243 	return true;
244 l_no:
245 	return false;
246 }
247 
248 static __always_inline bool
alternative_has_cap_unlikely(const unsigned long cpucap)249 alternative_has_cap_unlikely(const unsigned long cpucap)
250 {
251 	if (!cpucap_is_possible(cpucap))
252 		return false;
253 
254 	asm goto(
255 	ALTERNATIVE("nop", "b	%l[l_yes]", %[cpucap])
256 	:
257 	: [cpucap] "i" (cpucap)
258 	:
259 	: l_yes);
260 
261 	return false;
262 l_yes:
263 	return true;
264 }
265 
266 #endif /* __ASSEMBLY__ */
267 
268 #endif /* __ASM_ALTERNATIVE_MACROS_H */
269