xref: /linux/arch/powerpc/mm/kasan/init_32.c (revision 79790b6818e96c58fe2bffee1b418c16e64e7b80)
1f08aed52SDaniel Axtens // SPDX-License-Identifier: GPL-2.0
2f08aed52SDaniel Axtens 
3f08aed52SDaniel Axtens #define DISABLE_BRANCH_PROFILING
4f08aed52SDaniel Axtens 
5f08aed52SDaniel Axtens #include <linux/kasan.h>
6f08aed52SDaniel Axtens #include <linux/printk.h>
7f08aed52SDaniel Axtens #include <linux/memblock.h>
8f08aed52SDaniel Axtens #include <linux/sched/task.h>
9f08aed52SDaniel Axtens #include <asm/pgalloc.h>
10f08aed52SDaniel Axtens #include <asm/code-patching.h>
11f08aed52SDaniel Axtens #include <mm/mmu_decl.h>
12f08aed52SDaniel Axtens 
kasan_prot_ro(void)13f08aed52SDaniel Axtens static pgprot_t __init kasan_prot_ro(void)
14f08aed52SDaniel Axtens {
15f08aed52SDaniel Axtens 	if (early_mmu_has_feature(MMU_FTR_HPTE_TABLE))
16f08aed52SDaniel Axtens 		return PAGE_READONLY;
17f08aed52SDaniel Axtens 
18f08aed52SDaniel Axtens 	return PAGE_KERNEL_RO;
19f08aed52SDaniel Axtens }
20f08aed52SDaniel Axtens 
kasan_populate_pte(pte_t * ptep,pgprot_t prot)21f08aed52SDaniel Axtens static void __init kasan_populate_pte(pte_t *ptep, pgprot_t prot)
22f08aed52SDaniel Axtens {
23f08aed52SDaniel Axtens 	unsigned long va = (unsigned long)kasan_early_shadow_page;
24f08aed52SDaniel Axtens 	phys_addr_t pa = __pa(kasan_early_shadow_page);
25f08aed52SDaniel Axtens 	int i;
26f08aed52SDaniel Axtens 
27f08aed52SDaniel Axtens 	for (i = 0; i < PTRS_PER_PTE; i++, ptep++)
286042a165SChristophe Leroy 		__set_pte_at(&init_mm, va, ptep, pfn_pte(PHYS_PFN(pa), prot), 1);
29f08aed52SDaniel Axtens }
30f08aed52SDaniel Axtens 
kasan_init_shadow_page_tables(unsigned long k_start,unsigned long k_end)31f08aed52SDaniel Axtens int __init kasan_init_shadow_page_tables(unsigned long k_start, unsigned long k_end)
32f08aed52SDaniel Axtens {
33f08aed52SDaniel Axtens 	pmd_t *pmd;
34f08aed52SDaniel Axtens 	unsigned long k_cur, k_next;
35f08aed52SDaniel Axtens 
36f08aed52SDaniel Axtens 	pmd = pmd_off_k(k_start);
37f08aed52SDaniel Axtens 
38f08aed52SDaniel Axtens 	for (k_cur = k_start; k_cur != k_end; k_cur = k_next, pmd++) {
39f08aed52SDaniel Axtens 		pte_t *new;
40f08aed52SDaniel Axtens 
41f08aed52SDaniel Axtens 		k_next = pgd_addr_end(k_cur, k_end);
42f08aed52SDaniel Axtens 		if ((void *)pmd_page_vaddr(*pmd) != kasan_early_shadow_pte)
43f08aed52SDaniel Axtens 			continue;
44f08aed52SDaniel Axtens 
45f08aed52SDaniel Axtens 		new = memblock_alloc(PTE_FRAG_SIZE, PTE_FRAG_SIZE);
46f08aed52SDaniel Axtens 
47f08aed52SDaniel Axtens 		if (!new)
48f08aed52SDaniel Axtens 			return -ENOMEM;
49f08aed52SDaniel Axtens 		kasan_populate_pte(new, PAGE_KERNEL);
50f08aed52SDaniel Axtens 		pmd_populate_kernel(&init_mm, pmd, new);
51f08aed52SDaniel Axtens 	}
52f08aed52SDaniel Axtens 	return 0;
53f08aed52SDaniel Axtens }
54f08aed52SDaniel Axtens 
kasan_init_region(void * start,size_t size)55f08aed52SDaniel Axtens int __init __weak kasan_init_region(void *start, size_t size)
56f08aed52SDaniel Axtens {
57f08aed52SDaniel Axtens 	unsigned long k_start = (unsigned long)kasan_mem_to_shadow(start);
58f08aed52SDaniel Axtens 	unsigned long k_end = (unsigned long)kasan_mem_to_shadow(start + size);
59f08aed52SDaniel Axtens 	unsigned long k_cur;
60f08aed52SDaniel Axtens 	int ret;
61f08aed52SDaniel Axtens 	void *block;
62f08aed52SDaniel Axtens 
63f08aed52SDaniel Axtens 	ret = kasan_init_shadow_page_tables(k_start, k_end);
64f08aed52SDaniel Axtens 	if (ret)
65f08aed52SDaniel Axtens 		return ret;
66f08aed52SDaniel Axtens 
67*4a7aee96SJiangfeng Xiao 	k_start = k_start & PAGE_MASK;
68f08aed52SDaniel Axtens 	block = memblock_alloc(k_end - k_start, PAGE_SIZE);
69f08aed52SDaniel Axtens 	if (!block)
70f08aed52SDaniel Axtens 		return -ENOMEM;
71f08aed52SDaniel Axtens 
72f08aed52SDaniel Axtens 	for (k_cur = k_start & PAGE_MASK; k_cur < k_end; k_cur += PAGE_SIZE) {
73f08aed52SDaniel Axtens 		pmd_t *pmd = pmd_off_k(k_cur);
74f08aed52SDaniel Axtens 		void *va = block + k_cur - k_start;
75f08aed52SDaniel Axtens 		pte_t pte = pfn_pte(PHYS_PFN(__pa(va)), PAGE_KERNEL);
76f08aed52SDaniel Axtens 
77f08aed52SDaniel Axtens 		__set_pte_at(&init_mm, k_cur, pte_offset_kernel(pmd, k_cur), pte, 0);
78f08aed52SDaniel Axtens 	}
79f08aed52SDaniel Axtens 	flush_tlb_kernel_range(k_start, k_end);
80f08aed52SDaniel Axtens 	return 0;
81f08aed52SDaniel Axtens }
82f08aed52SDaniel Axtens 
83f08aed52SDaniel Axtens void __init
kasan_update_early_region(unsigned long k_start,unsigned long k_end,pte_t pte)84f08aed52SDaniel Axtens kasan_update_early_region(unsigned long k_start, unsigned long k_end, pte_t pte)
85f08aed52SDaniel Axtens {
86f08aed52SDaniel Axtens 	unsigned long k_cur;
87f08aed52SDaniel Axtens 
88f08aed52SDaniel Axtens 	for (k_cur = k_start; k_cur != k_end; k_cur += PAGE_SIZE) {
89f08aed52SDaniel Axtens 		pmd_t *pmd = pmd_off_k(k_cur);
90f08aed52SDaniel Axtens 		pte_t *ptep = pte_offset_kernel(pmd, k_cur);
91f08aed52SDaniel Axtens 
92f08aed52SDaniel Axtens 		if (pte_page(*ptep) != virt_to_page(lm_alias(kasan_early_shadow_page)))
93f08aed52SDaniel Axtens 			continue;
94f08aed52SDaniel Axtens 
95f08aed52SDaniel Axtens 		__set_pte_at(&init_mm, k_cur, ptep, pte, 0);
96f08aed52SDaniel Axtens 	}
97f08aed52SDaniel Axtens 
98f08aed52SDaniel Axtens 	flush_tlb_kernel_range(k_start, k_end);
99f08aed52SDaniel Axtens }
100f08aed52SDaniel Axtens 
kasan_remap_early_shadow_ro(void)101f08aed52SDaniel Axtens static void __init kasan_remap_early_shadow_ro(void)
102f08aed52SDaniel Axtens {
103f08aed52SDaniel Axtens 	pgprot_t prot = kasan_prot_ro();
104f08aed52SDaniel Axtens 	phys_addr_t pa = __pa(kasan_early_shadow_page);
105f08aed52SDaniel Axtens 
106f08aed52SDaniel Axtens 	kasan_populate_pte(kasan_early_shadow_pte, prot);
107f08aed52SDaniel Axtens 
108f08aed52SDaniel Axtens 	kasan_update_early_region(KASAN_SHADOW_START, KASAN_SHADOW_END,
109f08aed52SDaniel Axtens 				  pfn_pte(PHYS_PFN(pa), prot));
110f08aed52SDaniel Axtens }
111f08aed52SDaniel Axtens 
kasan_unmap_early_shadow_vmalloc(void)112f08aed52SDaniel Axtens static void __init kasan_unmap_early_shadow_vmalloc(void)
113f08aed52SDaniel Axtens {
114f08aed52SDaniel Axtens 	unsigned long k_start = (unsigned long)kasan_mem_to_shadow((void *)VMALLOC_START);
115f08aed52SDaniel Axtens 	unsigned long k_end = (unsigned long)kasan_mem_to_shadow((void *)VMALLOC_END);
116f08aed52SDaniel Axtens 
117f08aed52SDaniel Axtens 	kasan_update_early_region(k_start, k_end, __pte(0));
118f08aed52SDaniel Axtens 
119f08aed52SDaniel Axtens #ifdef MODULES_VADDR
120f08aed52SDaniel Axtens 	k_start = (unsigned long)kasan_mem_to_shadow((void *)MODULES_VADDR);
121f08aed52SDaniel Axtens 	k_end = (unsigned long)kasan_mem_to_shadow((void *)MODULES_END);
122f08aed52SDaniel Axtens 	kasan_update_early_region(k_start, k_end, __pte(0));
123f08aed52SDaniel Axtens #endif
124f08aed52SDaniel Axtens }
125f08aed52SDaniel Axtens 
kasan_mmu_init(void)126f08aed52SDaniel Axtens void __init kasan_mmu_init(void)
127f08aed52SDaniel Axtens {
128f08aed52SDaniel Axtens 	int ret;
129f08aed52SDaniel Axtens 
130f08aed52SDaniel Axtens 	if (early_mmu_has_feature(MMU_FTR_HPTE_TABLE)) {
131f08aed52SDaniel Axtens 		ret = kasan_init_shadow_page_tables(KASAN_SHADOW_START, KASAN_SHADOW_END);
132f08aed52SDaniel Axtens 
133f08aed52SDaniel Axtens 		if (ret)
134f08aed52SDaniel Axtens 			panic("kasan: kasan_init_shadow_page_tables() failed");
135f08aed52SDaniel Axtens 	}
136f08aed52SDaniel Axtens }
137f08aed52SDaniel Axtens 
kasan_init(void)138f08aed52SDaniel Axtens void __init kasan_init(void)
139f08aed52SDaniel Axtens {
140f08aed52SDaniel Axtens 	phys_addr_t base, end;
141f08aed52SDaniel Axtens 	u64 i;
142f08aed52SDaniel Axtens 	int ret;
143f08aed52SDaniel Axtens 
144f08aed52SDaniel Axtens 	for_each_mem_range(i, &base, &end) {
145f08aed52SDaniel Axtens 		phys_addr_t top = min(end, total_lowmem);
146f08aed52SDaniel Axtens 
147f08aed52SDaniel Axtens 		if (base >= top)
148f08aed52SDaniel Axtens 			continue;
149f08aed52SDaniel Axtens 
150f08aed52SDaniel Axtens 		ret = kasan_init_region(__va(base), top - base);
151f08aed52SDaniel Axtens 		if (ret)
152f08aed52SDaniel Axtens 			panic("kasan: kasan_init_region() failed");
153f08aed52SDaniel Axtens 	}
154f08aed52SDaniel Axtens 
155f08aed52SDaniel Axtens 	if (IS_ENABLED(CONFIG_KASAN_VMALLOC)) {
156f08aed52SDaniel Axtens 		ret = kasan_init_shadow_page_tables(KASAN_SHADOW_START, KASAN_SHADOW_END);
157f08aed52SDaniel Axtens 
158f08aed52SDaniel Axtens 		if (ret)
159f08aed52SDaniel Axtens 			panic("kasan: kasan_init_shadow_page_tables() failed");
160f08aed52SDaniel Axtens 	}
161f08aed52SDaniel Axtens 
162f08aed52SDaniel Axtens 	kasan_remap_early_shadow_ro();
163f08aed52SDaniel Axtens 
164f08aed52SDaniel Axtens 	clear_page(kasan_early_shadow_page);
165f08aed52SDaniel Axtens 
166f08aed52SDaniel Axtens 	/* At this point kasan is fully initialized. Enable error messages */
167f08aed52SDaniel Axtens 	init_task.kasan_depth = 0;
168f08aed52SDaniel Axtens 	pr_info("KASAN init done\n");
169f08aed52SDaniel Axtens }
170f08aed52SDaniel Axtens 
kasan_late_init(void)171f08aed52SDaniel Axtens void __init kasan_late_init(void)
172f08aed52SDaniel Axtens {
173f08aed52SDaniel Axtens 	if (IS_ENABLED(CONFIG_KASAN_VMALLOC))
174f08aed52SDaniel Axtens 		kasan_unmap_early_shadow_vmalloc();
175f08aed52SDaniel Axtens }
176f08aed52SDaniel Axtens 
kasan_early_init(void)177f08aed52SDaniel Axtens void __init kasan_early_init(void)
178f08aed52SDaniel Axtens {
179f08aed52SDaniel Axtens 	unsigned long addr = KASAN_SHADOW_START;
180f08aed52SDaniel Axtens 	unsigned long end = KASAN_SHADOW_END;
181f08aed52SDaniel Axtens 	unsigned long next;
182f08aed52SDaniel Axtens 	pmd_t *pmd = pmd_off_k(addr);
183f08aed52SDaniel Axtens 
184f08aed52SDaniel Axtens 	BUILD_BUG_ON(KASAN_SHADOW_START & ~PGDIR_MASK);
185f08aed52SDaniel Axtens 
186f08aed52SDaniel Axtens 	kasan_populate_pte(kasan_early_shadow_pte, PAGE_KERNEL);
187f08aed52SDaniel Axtens 
188f08aed52SDaniel Axtens 	do {
189f08aed52SDaniel Axtens 		next = pgd_addr_end(addr, end);
190f08aed52SDaniel Axtens 		pmd_populate_kernel(&init_mm, pmd, kasan_early_shadow_pte);
191f08aed52SDaniel Axtens 	} while (pmd++, addr = next, addr != end);
192f08aed52SDaniel Axtens }
193