xref: /linux/arch/arm64/include/asm/runtime-const.h (revision c532de5a67a70f8533d495f8f2aaa9a0491c3ad0)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_RUNTIME_CONST_H
3 #define _ASM_RUNTIME_CONST_H
4 
5 #include <asm/cacheflush.h>
6 
7 /* Sigh. You can still run arm64 in BE mode */
8 #include <asm/byteorder.h>
9 
10 #define runtime_const_ptr(sym) ({				\
11 	typeof(sym) __ret;					\
12 	asm_inline("1:\t"					\
13 		"movz %0, #0xcdef\n\t"				\
14 		"movk %0, #0x89ab, lsl #16\n\t"			\
15 		"movk %0, #0x4567, lsl #32\n\t"			\
16 		"movk %0, #0x0123, lsl #48\n\t"			\
17 		".pushsection runtime_ptr_" #sym ",\"a\"\n\t"	\
18 		".long 1b - .\n\t"				\
19 		".popsection"					\
20 		:"=r" (__ret));					\
21 	__ret; })
22 
23 #define runtime_const_shift_right_32(val, sym) ({		\
24 	unsigned long __ret;					\
25 	asm_inline("1:\t"					\
26 		"lsr %w0,%w1,#12\n\t"				\
27 		".pushsection runtime_shift_" #sym ",\"a\"\n\t"	\
28 		".long 1b - .\n\t"				\
29 		".popsection"					\
30 		:"=r" (__ret)					\
31 		:"r" (0u+(val)));				\
32 	__ret; })
33 
34 #define runtime_const_init(type, sym) do {		\
35 	extern s32 __start_runtime_##type##_##sym[];	\
36 	extern s32 __stop_runtime_##type##_##sym[];	\
37 	runtime_const_fixup(__runtime_fixup_##type,	\
38 		(unsigned long)(sym), 			\
39 		__start_runtime_##type##_##sym,		\
40 		__stop_runtime_##type##_##sym);		\
41 } while (0)
42 
43 /* 16-bit immediate for wide move (movz and movk) in bits 5..20 */
44 static inline void __runtime_fixup_16(__le32 *p, unsigned int val)
45 {
46 	u32 insn = le32_to_cpu(*p);
47 	insn &= 0xffe0001f;
48 	insn |= (val & 0xffff) << 5;
49 	*p = cpu_to_le32(insn);
50 }
51 
52 static inline void __runtime_fixup_caches(void *where, unsigned int insns)
53 {
54 	unsigned long va = (unsigned long)where;
55 	caches_clean_inval_pou(va, va + 4*insns);
56 }
57 
58 static inline void __runtime_fixup_ptr(void *where, unsigned long val)
59 {
60 	__le32 *p = lm_alias(where);
61 	__runtime_fixup_16(p, val);
62 	__runtime_fixup_16(p+1, val >> 16);
63 	__runtime_fixup_16(p+2, val >> 32);
64 	__runtime_fixup_16(p+3, val >> 48);
65 	__runtime_fixup_caches(where, 4);
66 }
67 
68 /* Immediate value is 6 bits starting at bit #16 */
69 static inline void __runtime_fixup_shift(void *where, unsigned long val)
70 {
71 	__le32 *p = lm_alias(where);
72 	u32 insn = le32_to_cpu(*p);
73 	insn &= 0xffc0ffff;
74 	insn |= (val & 63) << 16;
75 	*p = cpu_to_le32(insn);
76 	__runtime_fixup_caches(where, 1);
77 }
78 
79 static inline void runtime_const_fixup(void (*fn)(void *, unsigned long),
80 	unsigned long val, s32 *start, s32 *end)
81 {
82 	while (start < end) {
83 		fn(*start + (void *)start, val);
84 		start++;
85 	}
86 }
87 
88 #endif
89