xref: /linux/arch/x86/include/asm/static_call.h (revision 59dbb9d81adfe07a6f8483269146b407cf9d44d7)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_STATIC_CALL_H
3 #define _ASM_STATIC_CALL_H
4 
5 #include <asm/text-patching.h>
6 
7 /*
8  * For CONFIG_HAVE_STATIC_CALL_INLINE, this is a temporary trampoline which
9  * uses the current value of the key->func pointer to do an indirect jump to
10  * the function.  This trampoline is only used during boot, before the call
11  * sites get patched by static_call_update().  The name of this trampoline has
12  * a magical aspect: objtool uses it to find static call sites so it can create
13  * the .static_call_sites section.
14  *
15  * For CONFIG_HAVE_STATIC_CALL, this is a permanent trampoline which
16  * does a direct jump to the function.  The direct jump gets patched by
17  * static_call_update().
18  *
19  * Having the trampoline in a special section forces GCC to emit a JMP.d32 when
20  * it does tail-call optimization on the call; since you cannot compute the
21  * relative displacement across sections.
22  */
23 
24 /*
25  * The trampoline is 8 bytes and of the general form:
26  *
27  *   jmp.d32 \func
28  *   ud1 %esp, %ecx
29  *
30  * That trailing #UD provides both a speculation stop and serves as a unique
31  * 3 byte signature identifying static call trampolines. Also see tramp_ud[]
32  * and __static_call_fixup().
33  */
34 #define __ARCH_DEFINE_STATIC_CALL_TRAMP(name, insns)			\
35 	asm(".pushsection .static_call.text, \"ax\"		\n"	\
36 	    ".align 4						\n"	\
37 	    ".globl " STATIC_CALL_TRAMP_STR(name) "		\n"	\
38 	    STATIC_CALL_TRAMP_STR(name) ":			\n"	\
39 	    ANNOTATE_NOENDBR						\
40 	    insns "						\n"	\
41 	    ".byte 0x0f, 0xb9, 0xcc				\n"	\
42 	    ".type " STATIC_CALL_TRAMP_STR(name) ", @function	\n"	\
43 	    ".size " STATIC_CALL_TRAMP_STR(name) ", . - " STATIC_CALL_TRAMP_STR(name) " \n" \
44 	    ".popsection					\n")
45 
46 #define ARCH_DEFINE_STATIC_CALL_TRAMP(name, func)			\
47 	__ARCH_DEFINE_STATIC_CALL_TRAMP(name, ".byte 0xe9; .long " #func " - (. + 4)")
48 
49 #ifdef CONFIG_MITIGATION_RETHUNK
50 #define ARCH_DEFINE_STATIC_CALL_NULL_TRAMP(name)			\
51 	__ARCH_DEFINE_STATIC_CALL_TRAMP(name, "jmp __x86_return_thunk")
52 #else
53 #define ARCH_DEFINE_STATIC_CALL_NULL_TRAMP(name)			\
54 	__ARCH_DEFINE_STATIC_CALL_TRAMP(name, "ret; int3; nop; nop; nop")
55 #endif
56 
57 #define ARCH_DEFINE_STATIC_CALL_RET0_TRAMP(name)			\
58 	ARCH_DEFINE_STATIC_CALL_TRAMP(name, __static_call_return0)
59 
60 #define ARCH_ADD_TRAMP_KEY(name)					\
61 	asm(".pushsection .static_call_tramp_key, \"a\"		\n"	\
62 	    ".long " STATIC_CALL_TRAMP_STR(name) " - .		\n"	\
63 	    ".long " STATIC_CALL_KEY_STR(name) " - .		\n"	\
64 	    ".popsection					\n")
65 
66 extern bool __static_call_fixup(void *tramp, u8 op, void *dest);
67 
68 extern void __static_call_update_early(void *tramp, void *func);
69 
70 #define static_call_update_early(name, _func)				\
71 ({									\
72 	typeof(&STATIC_CALL_TRAMP(name)) __F = (_func);			\
73 	if (static_call_initialized) {					\
74 		__static_call_update(&STATIC_CALL_KEY(name),		\
75 				     STATIC_CALL_TRAMP_ADDR(name), __F);\
76 	} else {							\
77 		WRITE_ONCE(STATIC_CALL_KEY(name).func, _func);		\
78 		__static_call_update_early(STATIC_CALL_TRAMP_ADDR(name),\
79 					   __F);			\
80 	}								\
81 })
82 
83 #endif /* _ASM_STATIC_CALL_H */
84