Searched refs:P4D_SIZE (Results 1 – 21 of 21) sorted by relevance
/linux/arch/riscv/mm/ |
H A D | kasan_init.c | 128 if (p4d_none(p4dp_get(p4dp)) && IS_ALIGNED(vaddr, P4D_SIZE) && in kasan_populate_p4d() 129 (next - vaddr) >= P4D_SIZE) { in kasan_populate_p4d() 130 phys_addr = memblock_phys_alloc(P4D_SIZE, P4D_SIZE); in kasan_populate_p4d() 133 memset(__va(phys_addr), KASAN_SHADOW_INIT, P4D_SIZE); in kasan_populate_p4d() 206 if (pgtable_l4_enabled && IS_ALIGNED(vaddr, P4D_SIZE) && in kasan_early_clear_p4d() 207 (next - vaddr) >= P4D_SIZE) { in kasan_early_clear_p4d() 290 if (p4d_none(p4dp_get(p4dp)) && IS_ALIGNED(vaddr, P4D_SIZE) && in kasan_early_populate_p4d() 291 (next - vaddr) >= P4D_SIZE) { in kasan_early_populate_p4d()
|
H A D | hugetlbpage.c | 135 return P4D_SIZE - PUD_SIZE; in hugetlb_mask_last_page() 234 else if (sz >= P4D_SIZE) in set_huge_pte_at()
|
/linux/include/asm-generic/ |
H A D | pgtable-nop4d.h | 13 #define P4D_SIZE (1UL << P4D_SHIFT) macro 14 #define P4D_MASK (~(P4D_SIZE-1))
|
H A D | tlb.h | 644 if (_sz >= P4D_SIZE) \
|
/linux/mm/kasan/ |
H A D | init.c | 190 if (IS_ALIGNED(addr, P4D_SIZE) && end - addr >= P4D_SIZE) { in pud_init() 435 if (IS_ALIGNED(addr, P4D_SIZE) && in kasan_remove_p4d_table() 436 IS_ALIGNED(next, P4D_SIZE)) { in kasan_remove_p4d_table()
|
/linux/arch/x86/include/asm/ |
H A D | pgtable_areas.h | 19 #define CPU_ENTRY_AREA_MAP_SIZE P4D_SIZE
|
/linux/arch/riscv/include/asm/ |
H A D | pgtable-64.h | 31 #define P4D_SIZE (_AC(1, UL) << P4D_SHIFT) macro 32 #define P4D_MASK (~(P4D_SIZE - 1))
|
/linux/arch/powerpc/include/asm/nohash/ |
H A D | pgtable.h | 78 else if (sz < P4D_SIZE) in pte_update() 81 pdsize = P4D_SIZE; in pte_update()
|
/linux/arch/arm64/include/asm/ |
H A D | pgtable-hwdef.h | 67 #define P4D_SIZE (_AC(1, UL) << P4D_SHIFT) macro 68 #define P4D_MASK (~(P4D_SIZE-1))
|
/linux/arch/powerpc/mm/ |
H A D | hugetlbpage.c | 53 if (!mm_pud_folded(mm) && sz >= P4D_SIZE) in huge_pte_alloc()
|
/linux/arch/s390/boot/ |
H A D | vmem.c | 124 IS_ALIGNED(addr, P4D_SIZE) && end - addr >= P4D_SIZE) { in kasan_p4d_populate_zero_shadow()
|
/linux/arch/x86/mm/ |
H A D | ident_map.c | 157 next = (addr & P4D_MASK) + P4D_SIZE; in ident_p4d_init()
|
H A D | kasan_init_64.c | 188 for (; start < end; start += P4D_SIZE) in clear_pgds()
|
/linux/mm/ |
H A D | page_vma_mapped.c | 228 step_forward(pvmw, P4D_SIZE); in page_vma_mapped_walk()
|
H A D | vmalloc.c | 245 if ((end - addr) != P4D_SIZE) in vmap_try_huge_p4d() 248 if (!IS_ALIGNED(addr, P4D_SIZE)) in vmap_try_huge_p4d() 251 if (!IS_ALIGNED(phys_addr, P4D_SIZE)) in vmap_try_huge_p4d()
|
H A D | vmscan.c | 3634 end = round_up(end, P4D_SIZE); in walk_pud_range()
|
H A D | hugetlb.c | 7378 return P4D_SIZE - PUD_SIZE; in hugetlb_mask_last_page()
|
/linux/arch/arm64/mm/ |
H A D | kasan_init.c | 182 #define SHADOW_ALIGN P4D_SIZE
|
/linux/arch/arm/mm/ |
H A D | dump.c | 389 addr = start + i * P4D_SIZE; in walk_p4d()
|
/linux/arch/powerpc/mm/ptdump/ |
H A D | hashpagetable.c | 443 addr = start + i * P4D_SIZE; in walk_p4d()
|
/linux/arch/x86/xen/ |
H A D | mmu_pv.c | 1108 xen_free_ro_pages(pa, P4D_SIZE); in xen_cleanmfnmap_p4d() 1905 n_pud = roundup(size, P4D_SIZE) >> P4D_SHIFT; in xen_relocate_p2m()
|