1 // SPDX-License-Identifier: GPL-2.0
2 #include <linux/static_call.h>
3 #include <linux/memory.h>
4 #include <linux/bug.h>
5 #include <asm/text-patching.h>
6
7 enum insn_type {
8 CALL = 0, /* site call */
9 NOP = 1, /* site cond-call */
10 JMP = 2, /* tramp / site tail-call */
11 RET = 3, /* tramp / site cond-tail-call */
12 JCC = 4,
13 };
14
15 /*
16 * ud1 %esp, %ecx - a 3 byte #UD that is unique to trampolines, chosen such
17 * that there is no false-positive trampoline identification while also being a
18 * speculation stop.
19 */
20 static const u8 tramp_ud[] = { 0x0f, 0xb9, 0xcc };
21
22 /*
23 * cs cs cs xorl %eax, %eax - a single 5 byte instruction that clears %[er]ax
24 */
25 static const u8 xor5rax[] = { 0x2e, 0x2e, 0x2e, 0x31, 0xc0 };
26
27 static const u8 retinsn[] = { RET_INSN_OPCODE, 0xcc, 0xcc, 0xcc, 0xcc };
28
29 /*
30 * ud1 (%edx),%rdi -- see __WARN_trap() / decode_bug()
31 */
32 static const u8 warninsn[] = { 0x67, 0x48, 0x0f, 0xb9, 0x3a };
33
__is_Jcc(u8 * insn)34 static u8 __is_Jcc(u8 *insn) /* Jcc.d32 */
35 {
36 u8 ret = 0;
37
38 if (insn[0] == 0x0f) {
39 u8 tmp = insn[1];
40 if ((tmp & 0xf0) == 0x80)
41 ret = tmp;
42 }
43
44 return ret;
45 }
46
47 extern void __static_call_return(void);
48
49 asm (".global __static_call_return\n\t"
50 ".type __static_call_return, @function\n\t"
51 ASM_FUNC_ALIGN "\n\t"
52 "__static_call_return:\n\t"
53 ANNOTATE_NOENDBR "\n\t"
54 ANNOTATE_RETPOLINE_SAFE "\n\t"
55 "ret; int3\n\t"
56 ".size __static_call_return, . - __static_call_return \n\t");
57
__static_call_transform(void * insn,enum insn_type type,void * func,bool modinit)58 static void __ref __static_call_transform(void *insn, enum insn_type type,
59 void *func, bool modinit)
60 {
61 const void *emulate = NULL;
62 int size = CALL_INSN_SIZE;
63 const void *code;
64 u8 op, buf[6];
65
66 if ((type == JMP || type == RET) && (op = __is_Jcc(insn)))
67 type = JCC;
68
69 switch (type) {
70 case CALL:
71 func = callthunks_translate_call_dest(func);
72 code = text_gen_insn(CALL_INSN_OPCODE, insn, func);
73 if (func == &__static_call_return0) {
74 emulate = code;
75 code = &xor5rax;
76 }
77 if (func == &__WARN_trap) {
78 emulate = code;
79 code = &warninsn;
80 }
81 break;
82
83 case NOP:
84 code = x86_nops[5];
85 break;
86
87 case JMP:
88 code = text_gen_insn(JMP32_INSN_OPCODE, insn, func);
89 break;
90
91 case RET:
92 if (cpu_wants_rethunk_at(insn))
93 code = text_gen_insn(JMP32_INSN_OPCODE, insn, x86_return_thunk);
94 else
95 code = &retinsn;
96 break;
97
98 case JCC:
99 if (!func) {
100 func = __static_call_return;
101 if (cpu_wants_rethunk())
102 func = x86_return_thunk;
103 }
104
105 buf[0] = 0x0f;
106 __text_gen_insn(buf+1, op, insn+1, func, 5);
107 code = buf;
108 size = 6;
109
110 break;
111 }
112
113 if (memcmp(insn, code, size) == 0)
114 return;
115
116 if (system_state == SYSTEM_BOOTING || modinit)
117 return text_poke_early(insn, code, size);
118
119 smp_text_poke_single(insn, code, size, emulate);
120 }
121
__static_call_validate(u8 * insn,bool tail,bool tramp)122 static void __static_call_validate(u8 *insn, bool tail, bool tramp)
123 {
124 u8 opcode = insn[0];
125
126 if (tramp && memcmp(insn+5, tramp_ud, 3)) {
127 pr_err("trampoline signature fail");
128 BUG();
129 }
130
131 if (tail) {
132 if (opcode == JMP32_INSN_OPCODE ||
133 opcode == RET_INSN_OPCODE ||
134 __is_Jcc(insn))
135 return;
136 } else {
137 if (opcode == CALL_INSN_OPCODE ||
138 !memcmp(insn, x86_nops[5], 5) ||
139 !memcmp(insn, xor5rax, 5) ||
140 !memcmp(insn, warninsn, 5))
141 return;
142 }
143
144 /*
145 * If we ever trigger this, our text is corrupt, we'll probably not live long.
146 */
147 pr_err("unexpected static_call insn opcode 0x%x at %pS\n", opcode, insn);
148 BUG();
149 }
150
__sc_insn(bool null,bool tail)151 static inline enum insn_type __sc_insn(bool null, bool tail)
152 {
153 /*
154 * Encode the following table without branches:
155 *
156 * tail null insn
157 * -----+-------+------
158 * 0 | 0 | CALL
159 * 0 | 1 | NOP
160 * 1 | 0 | JMP
161 * 1 | 1 | RET
162 */
163 return 2*tail + null;
164 }
165
arch_static_call_transform(void * site,void * tramp,void * func,bool tail)166 void arch_static_call_transform(void *site, void *tramp, void *func, bool tail)
167 {
168 mutex_lock(&text_mutex);
169
170 if (tramp && !site) {
171 __static_call_validate(tramp, true, true);
172 __static_call_transform(tramp, __sc_insn(!func, true), func, false);
173 }
174
175 if (IS_ENABLED(CONFIG_HAVE_STATIC_CALL_INLINE) && site) {
176 __static_call_validate(site, tail, false);
177 __static_call_transform(site, __sc_insn(!func, tail), func, false);
178 }
179
180 mutex_unlock(&text_mutex);
181 }
182 EXPORT_SYMBOL_GPL(arch_static_call_transform);
183
__static_call_update_early(void * tramp,void * func)184 noinstr void __static_call_update_early(void *tramp, void *func)
185 {
186 BUG_ON(system_state != SYSTEM_BOOTING);
187 BUG_ON(static_call_initialized);
188 __text_gen_insn(tramp, JMP32_INSN_OPCODE, tramp, func, JMP32_INSN_SIZE);
189 sync_core();
190 }
191
192 #ifdef CONFIG_MITIGATION_RETHUNK
193 /*
194 * This is called by apply_returns() to fix up static call trampolines,
195 * specifically ARCH_DEFINE_STATIC_CALL_NULL_TRAMP which is recorded as
196 * having a return trampoline.
197 *
198 * The problem is that static_call() is available before determining
199 * X86_FEATURE_RETHUNK and, by implication, running alternatives.
200 *
201 * This means that __static_call_transform() above can have overwritten the
202 * return trampoline and we now need to fix things up to be consistent.
203 */
__static_call_fixup(void * tramp,u8 op,void * dest)204 bool __static_call_fixup(void *tramp, u8 op, void *dest)
205 {
206 unsigned long addr = (unsigned long)tramp;
207 /*
208 * Not all .return_sites are a static_call trampoline (most are not).
209 * Check if the 3 bytes after the return are still kernel text, if not,
210 * then this definitely is not a trampoline and we need not worry
211 * further.
212 *
213 * This avoids the memcmp() below tripping over pagefaults etc..
214 */
215 if (((addr >> PAGE_SHIFT) != ((addr + 7) >> PAGE_SHIFT)) &&
216 !kernel_text_address(addr + 7))
217 return false;
218
219 if (memcmp(tramp+5, tramp_ud, 3)) {
220 /* Not a trampoline site, not our problem. */
221 return false;
222 }
223
224 mutex_lock(&text_mutex);
225 if (op == RET_INSN_OPCODE || dest == &__x86_return_thunk)
226 __static_call_transform(tramp, RET, NULL, true);
227 mutex_unlock(&text_mutex);
228
229 return true;
230 }
231 #endif
232