Home
last modified time | relevance | path

Searched refs:P4D_SIZE (Results 1 – 21 of 21) sorted by relevance

/linux/arch/riscv/mm/
H A Dkasan_init.c128 if (p4d_none(p4dp_get(p4dp)) && IS_ALIGNED(vaddr, P4D_SIZE) && in kasan_populate_p4d()
129 (next - vaddr) >= P4D_SIZE) { in kasan_populate_p4d()
130 phys_addr = memblock_phys_alloc(P4D_SIZE, P4D_SIZE); in kasan_populate_p4d()
133 memset(__va(phys_addr), KASAN_SHADOW_INIT, P4D_SIZE); in kasan_populate_p4d()
206 if (pgtable_l4_enabled && IS_ALIGNED(vaddr, P4D_SIZE) && in kasan_early_clear_p4d()
207 (next - vaddr) >= P4D_SIZE) { in kasan_early_clear_p4d()
290 if (p4d_none(p4dp_get(p4dp)) && IS_ALIGNED(vaddr, P4D_SIZE) && in kasan_early_populate_p4d()
291 (next - vaddr) >= P4D_SIZE) { in kasan_early_populate_p4d()
H A Dhugetlbpage.c135 return P4D_SIZE - PUD_SIZE; in hugetlb_mask_last_page()
234 else if (sz >= P4D_SIZE) in set_huge_pte_at()
/linux/include/asm-generic/
H A Dpgtable-nop4d.h13 #define P4D_SIZE (1UL << P4D_SHIFT) macro
14 #define P4D_MASK (~(P4D_SIZE-1))
H A Dtlb.h644 if (_sz >= P4D_SIZE) \
/linux/mm/kasan/
H A Dinit.c190 if (IS_ALIGNED(addr, P4D_SIZE) && end - addr >= P4D_SIZE) { in pud_init()
435 if (IS_ALIGNED(addr, P4D_SIZE) && in kasan_remove_p4d_table()
436 IS_ALIGNED(next, P4D_SIZE)) { in kasan_remove_p4d_table()
/linux/arch/x86/include/asm/
H A Dpgtable_areas.h19 #define CPU_ENTRY_AREA_MAP_SIZE P4D_SIZE
/linux/arch/riscv/include/asm/
H A Dpgtable-64.h31 #define P4D_SIZE (_AC(1, UL) << P4D_SHIFT) macro
32 #define P4D_MASK (~(P4D_SIZE - 1))
/linux/arch/powerpc/include/asm/nohash/
H A Dpgtable.h78 else if (sz < P4D_SIZE) in pte_update()
81 pdsize = P4D_SIZE; in pte_update()
/linux/arch/arm64/include/asm/
H A Dpgtable-hwdef.h67 #define P4D_SIZE (_AC(1, UL) << P4D_SHIFT) macro
68 #define P4D_MASK (~(P4D_SIZE-1))
/linux/arch/powerpc/mm/
H A Dhugetlbpage.c53 if (!mm_pud_folded(mm) && sz >= P4D_SIZE) in huge_pte_alloc()
/linux/arch/s390/boot/
H A Dvmem.c124 IS_ALIGNED(addr, P4D_SIZE) && end - addr >= P4D_SIZE) { in kasan_p4d_populate_zero_shadow()
/linux/arch/x86/mm/
H A Dident_map.c157 next = (addr & P4D_MASK) + P4D_SIZE; in ident_p4d_init()
H A Dkasan_init_64.c188 for (; start < end; start += P4D_SIZE) in clear_pgds()
/linux/mm/
H A Dpage_vma_mapped.c228 step_forward(pvmw, P4D_SIZE); in page_vma_mapped_walk()
H A Dvmalloc.c245 if ((end - addr) != P4D_SIZE) in vmap_try_huge_p4d()
248 if (!IS_ALIGNED(addr, P4D_SIZE)) in vmap_try_huge_p4d()
251 if (!IS_ALIGNED(phys_addr, P4D_SIZE)) in vmap_try_huge_p4d()
H A Dvmscan.c3634 end = round_up(end, P4D_SIZE); in walk_pud_range()
H A Dhugetlb.c7378 return P4D_SIZE - PUD_SIZE; in hugetlb_mask_last_page()
/linux/arch/arm64/mm/
H A Dkasan_init.c182 #define SHADOW_ALIGN P4D_SIZE
/linux/arch/arm/mm/
H A Ddump.c389 addr = start + i * P4D_SIZE; in walk_p4d()
/linux/arch/powerpc/mm/ptdump/
H A Dhashpagetable.c443 addr = start + i * P4D_SIZE; in walk_p4d()
/linux/arch/x86/xen/
H A Dmmu_pv.c1108 xen_free_ro_pages(pa, P4D_SIZE); in xen_cleanmfnmap_p4d()
1905 n_pud = roundup(size, P4D_SIZE) >> P4D_SHIFT; in xen_relocate_p2m()