xref: /linux/arch/arm64/include/asm/kasan.h (revision fa84cf094ef9667e2b91c104b0a788fd1896f482)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_KASAN_H
3 #define __ASM_KASAN_H
4 
5 #ifndef __ASSEMBLY__
6 
7 #ifdef CONFIG_KASAN
8 
9 #include <linux/linkage.h>
10 #include <asm/memory.h>
11 #include <asm/pgtable-types.h>
12 
13 /*
14  * KASAN_SHADOW_START: beginning of the kernel virtual addresses.
15  * KASAN_SHADOW_END: KASAN_SHADOW_START + 1/N of kernel virtual addresses,
16  * where N = (1 << KASAN_SHADOW_SCALE_SHIFT).
17  */
18 #define KASAN_SHADOW_START      (VA_START)
19 #define KASAN_SHADOW_END        (KASAN_SHADOW_START + KASAN_SHADOW_SIZE)
20 
21 /*
22  * This value is used to map an address to the corresponding shadow
23  * address by the following formula:
24  *     shadow_addr = (address >> KASAN_SHADOW_SCALE_SHIFT) + KASAN_SHADOW_OFFSET
25  *
26  * (1 << (64 - KASAN_SHADOW_SCALE_SHIFT)) shadow addresses that lie in range
27  * [KASAN_SHADOW_OFFSET, KASAN_SHADOW_END) cover all 64-bits of virtual
28  * addresses. So KASAN_SHADOW_OFFSET should satisfy the following equation:
29  *      KASAN_SHADOW_OFFSET = KASAN_SHADOW_END -
30  *				(1ULL << (64 - KASAN_SHADOW_SCALE_SHIFT))
31  */
32 #define KASAN_SHADOW_OFFSET     (KASAN_SHADOW_END - (1ULL << \
33 					(64 - KASAN_SHADOW_SCALE_SHIFT)))
34 
35 void kasan_init(void);
36 void kasan_copy_shadow(pgd_t *pgdir);
37 asmlinkage void kasan_early_init(void);
38 
39 #else
40 static inline void kasan_init(void) { }
41 static inline void kasan_copy_shadow(pgd_t *pgdir) { }
42 #endif
43 
44 #endif
45 #endif
46