xref: /linux/arch/arm64/include/asm/runtime-const.h (revision c715f13bb30f9f4d1bd8888667ef32e43b6fedc1)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_RUNTIME_CONST_H
3 #define _ASM_RUNTIME_CONST_H
4 
5 #ifdef MODULE
6   #error "Cannot use runtime-const infrastructure from modules"
7 #endif
8 
9 #include <asm/cacheflush.h>
10 
11 /* Sigh. You can still run arm64 in BE mode */
12 #include <asm/byteorder.h>
13 
14 #define runtime_const_ptr(sym) ({				\
15 	typeof(sym) __ret;					\
16 	asm_inline("1:\t"					\
17 		"movz %0, #0xcdef\n\t"				\
18 		"movk %0, #0x89ab, lsl #16\n\t"			\
19 		"movk %0, #0x4567, lsl #32\n\t"			\
20 		"movk %0, #0x0123, lsl #48\n\t"			\
21 		".pushsection runtime_ptr_" #sym ",\"a\"\n\t"	\
22 		".long 1b - .\n\t"				\
23 		".popsection"					\
24 		:"=r" (__ret));					\
25 	__ret; })
26 
27 #define runtime_const_shift_right_32(val, sym) ({		\
28 	unsigned long __ret;					\
29 	asm_inline("1:\t"					\
30 		"lsr %w0,%w1,#12\n\t"				\
31 		".pushsection runtime_shift_" #sym ",\"a\"\n\t"	\
32 		".long 1b - .\n\t"				\
33 		".popsection"					\
34 		:"=r" (__ret)					\
35 		:"r" (0u+(val)));				\
36 	__ret; })
37 
38 #define runtime_const_init(type, sym) do {		\
39 	extern s32 __start_runtime_##type##_##sym[];	\
40 	extern s32 __stop_runtime_##type##_##sym[];	\
41 	runtime_const_fixup(__runtime_fixup_##type,	\
42 		(unsigned long)(sym), 			\
43 		__start_runtime_##type##_##sym,		\
44 		__stop_runtime_##type##_##sym);		\
45 } while (0)
46 
47 /* 16-bit immediate for wide move (movz and movk) in bits 5..20 */
48 static inline void __runtime_fixup_16(__le32 *p, unsigned int val)
49 {
50 	u32 insn = le32_to_cpu(*p);
51 	insn &= 0xffe0001f;
52 	insn |= (val & 0xffff) << 5;
53 	*p = cpu_to_le32(insn);
54 }
55 
56 static inline void __runtime_fixup_caches(void *where, unsigned int insns)
57 {
58 	unsigned long va = (unsigned long)where;
59 	caches_clean_inval_pou(va, va + 4*insns);
60 }
61 
62 static inline void __runtime_fixup_ptr(void *where, unsigned long val)
63 {
64 	__le32 *p = lm_alias(where);
65 	__runtime_fixup_16(p, val);
66 	__runtime_fixup_16(p+1, val >> 16);
67 	__runtime_fixup_16(p+2, val >> 32);
68 	__runtime_fixup_16(p+3, val >> 48);
69 	__runtime_fixup_caches(where, 4);
70 }
71 
72 /* Immediate value is 6 bits starting at bit #16 */
73 static inline void __runtime_fixup_shift(void *where, unsigned long val)
74 {
75 	__le32 *p = lm_alias(where);
76 	u32 insn = le32_to_cpu(*p);
77 	insn &= 0xffc0ffff;
78 	insn |= (val & 63) << 16;
79 	*p = cpu_to_le32(insn);
80 	__runtime_fixup_caches(where, 1);
81 }
82 
83 static inline void runtime_const_fixup(void (*fn)(void *, unsigned long),
84 	unsigned long val, s32 *start, s32 *end)
85 {
86 	while (start < end) {
87 		fn(*start + (void *)start, val);
88 		start++;
89 	}
90 }
91 
92 #endif
93