xref: /linux/arch/arm64/include/asm/vmalloc.h (revision 4f2c0a4acffbec01079c28f839422e64ddeff004)
1 #ifndef _ASM_ARM64_VMALLOC_H
2 #define _ASM_ARM64_VMALLOC_H
3 
4 #include <asm/page.h>
5 #include <asm/pgtable.h>
6 
7 #ifdef CONFIG_HAVE_ARCH_HUGE_VMAP
8 
9 #define arch_vmap_pud_supported arch_vmap_pud_supported
arch_vmap_pud_supported(pgprot_t prot)10 static inline bool arch_vmap_pud_supported(pgprot_t prot)
11 {
12 	/*
13 	 * SW table walks can't handle removal of intermediate entries.
14 	 */
15 	return pud_sect_supported() &&
16 	       !IS_ENABLED(CONFIG_PTDUMP_DEBUGFS);
17 }
18 
19 #define arch_vmap_pmd_supported arch_vmap_pmd_supported
arch_vmap_pmd_supported(pgprot_t prot)20 static inline bool arch_vmap_pmd_supported(pgprot_t prot)
21 {
22 	/* See arch_vmap_pud_supported() */
23 	return !IS_ENABLED(CONFIG_PTDUMP_DEBUGFS);
24 }
25 
26 #endif
27 
28 #define arch_vmap_pgprot_tagged arch_vmap_pgprot_tagged
arch_vmap_pgprot_tagged(pgprot_t prot)29 static inline pgprot_t arch_vmap_pgprot_tagged(pgprot_t prot)
30 {
31 	return pgprot_tagged(prot);
32 }
33 
34 #endif /* _ASM_ARM64_VMALLOC_H */
35