xref: /linux/arch/x86/include/asm/jump_label.h (revision 7fc2cd2e4b398c57c9cf961cfea05eadbf34c05c)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_JUMP_LABEL_H
3 #define _ASM_X86_JUMP_LABEL_H
4 
5 #define HAVE_JUMP_LABEL_BATCH
6 
7 #include <asm/asm.h>
8 #include <asm/nops.h>
9 
10 #ifndef __ASSEMBLER__
11 
12 #include <linux/stringify.h>
13 #include <linux/types.h>
14 
15 #define JUMP_TABLE_ENTRY(key, label)			\
16 	".pushsection __jump_table,  \"aw\" \n\t"	\
17 	_ASM_ALIGN "\n\t"				\
18 	ANNOTATE_DATA_SPECIAL				\
19 	".long 1b - . \n\t"				\
20 	".long " label " - . \n\t"			\
21 	_ASM_PTR " " key " - . \n\t"			\
22 	".popsection \n\t"
23 
24 /* This macro is also expanded on the Rust side. */
25 #ifdef CONFIG_HAVE_JUMP_LABEL_HACK
26 #define ARCH_STATIC_BRANCH_ASM(key, label)		\
27 	"1: jmp " label " # objtool NOPs this \n\t"	\
28 	JUMP_TABLE_ENTRY(key " + 2", label)
29 #else /* !CONFIG_HAVE_JUMP_LABEL_HACK */
30 #define ARCH_STATIC_BRANCH_ASM(key, label)		\
31 	"1: .byte " __stringify(BYTES_NOP5) "\n\t"	\
32 	JUMP_TABLE_ENTRY(key, label)
33 #endif /* CONFIG_HAVE_JUMP_LABEL_HACK */
34 
35 static __always_inline bool arch_static_branch(struct static_key * const key, const bool branch)
36 {
37 	asm goto(ARCH_STATIC_BRANCH_ASM("%c0 + %c1", "%l[l_yes]")
38 		: :  "i" (key), "i" (branch) : : l_yes);
39 
40 	return false;
41 l_yes:
42 	return true;
43 }
44 
45 static __always_inline bool arch_static_branch_jump(struct static_key * const key, const bool branch)
46 {
47 	asm goto("1:"
48 		"jmp %l[l_yes]\n\t"
49 		JUMP_TABLE_ENTRY("%c0 + %c1", "%l[l_yes]")
50 		: :  "i" (key), "i" (branch) : : l_yes);
51 
52 	return false;
53 l_yes:
54 	return true;
55 }
56 
57 extern int arch_jump_entry_size(struct jump_entry *entry);
58 
59 #endif	/* __ASSEMBLER__ */
60 
61 #endif
62