xref: /linux/arch/loongarch/include/asm/alternative.h (revision 0526b56cbc3c489642bd6a5fe4b718dea7ef0ee8)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_ALTERNATIVE_H
3 #define _ASM_ALTERNATIVE_H
4 
5 #ifndef __ASSEMBLY__
6 
7 #include <linux/types.h>
8 #include <linux/stddef.h>
9 #include <linux/stringify.h>
10 #include <asm/asm.h>
11 
12 struct alt_instr {
13 	s32 instr_offset;	/* offset to original instruction */
14 	s32 replace_offset;	/* offset to replacement instruction */
15 	u16 feature;		/* feature bit set for replacement */
16 	u8  instrlen;		/* length of original instruction */
17 	u8  replacementlen;	/* length of new instruction */
18 } __packed;
19 
20 /*
21  * Debug flag that can be tested to see whether alternative
22  * instructions were patched in already:
23  */
24 extern int alternatives_patched;
25 extern struct alt_instr __alt_instructions[], __alt_instructions_end[];
26 
27 extern void alternative_instructions(void);
28 extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end);
29 
30 #define b_replacement(num)	"664"#num
31 #define e_replacement(num)	"665"#num
32 
33 #define alt_end_marker		"663"
34 #define alt_slen		"662b-661b"
35 #define alt_total_slen		alt_end_marker"b-661b"
36 #define alt_rlen(num)		e_replacement(num)"f-"b_replacement(num)"f"
37 
38 #define __OLDINSTR(oldinstr, num)					\
39 	"661:\n\t" oldinstr "\n662:\n"					\
40 	".fill -(((" alt_rlen(num) ")-(" alt_slen ")) > 0) * "		\
41 		"((" alt_rlen(num) ")-(" alt_slen ")) / 4, 4, 0x03400000\n"
42 
43 #define OLDINSTR(oldinstr, num)						\
44 	__OLDINSTR(oldinstr, num)					\
45 	alt_end_marker ":\n"
46 
47 #define alt_max_short(a, b)	"((" a ") ^ (((" a ") ^ (" b ")) & -(-((" a ") < (" b ")))))"
48 
49 /*
50  * Pad the second replacement alternative with additional NOPs if it is
51  * additionally longer than the first replacement alternative.
52  */
53 #define OLDINSTR_2(oldinstr, num1, num2) \
54 	"661:\n\t" oldinstr "\n662:\n"								\
55 	".fill -((" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")) > 0) * "	\
56 		"(" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")) / 4, "	\
57 		"4, 0x03400000\n"	\
58 	alt_end_marker ":\n"
59 
60 #define ALTINSTR_ENTRY(feature, num)					      \
61 	" .long 661b - .\n"				/* label           */ \
62 	" .long " b_replacement(num)"f - .\n"		/* new instruction */ \
63 	" .short " __stringify(feature) "\n"		/* feature bit     */ \
64 	" .byte " alt_total_slen "\n"			/* source len      */ \
65 	" .byte " alt_rlen(num) "\n"			/* replacement len */
66 
67 #define ALTINSTR_REPLACEMENT(newinstr, feature, num)	/* replacement */     \
68 	b_replacement(num)":\n\t" newinstr "\n" e_replacement(num) ":\n\t"
69 
70 /* alternative assembly primitive: */
71 #define ALTERNATIVE(oldinstr, newinstr, feature)			\
72 	OLDINSTR(oldinstr, 1)						\
73 	".pushsection .altinstructions,\"a\"\n"				\
74 	ALTINSTR_ENTRY(feature, 1)					\
75 	".popsection\n"							\
76 	".subsection 1\n" \
77 	ALTINSTR_REPLACEMENT(newinstr, feature, 1)			\
78 	".previous\n"
79 
80 #define ALTERNATIVE_2(oldinstr, newinstr1, feature1, newinstr2, feature2)\
81 	OLDINSTR_2(oldinstr, 1, 2)					\
82 	".pushsection .altinstructions,\"a\"\n"				\
83 	ALTINSTR_ENTRY(feature1, 1)					\
84 	ALTINSTR_ENTRY(feature2, 2)					\
85 	".popsection\n"							\
86 	".subsection 1\n" \
87 	ALTINSTR_REPLACEMENT(newinstr1, feature1, 1)			\
88 	ALTINSTR_REPLACEMENT(newinstr2, feature2, 2)			\
89 	".previous\n"
90 
91 /*
92  * Alternative instructions for different CPU types or capabilities.
93  *
94  * This allows to use optimized instructions even on generic binary
95  * kernels.
96  *
97  * length of oldinstr must be longer or equal the length of newinstr
98  * It can be padded with nops as needed.
99  *
100  * For non barrier like inlines please define new variants
101  * without volatile and memory clobber.
102  */
103 #define alternative(oldinstr, newinstr, feature)			\
104 	(asm volatile (ALTERNATIVE(oldinstr, newinstr, feature) : : : "memory"))
105 
106 #define alternative_2(oldinstr, newinstr1, feature1, newinstr2, feature2) \
107 	(asm volatile(ALTERNATIVE_2(oldinstr, newinstr1, feature1, newinstr2, feature2) ::: "memory"))
108 
109 #endif /* __ASSEMBLY__ */
110 
111 #endif /* _ASM_ALTERNATIVE_H */
112