Home
last modified time | relevance | path

Searched refs:P4D_SIZE (Results 1 – 25 of 26) sorted by relevance

12

/linux/arch/riscv/mm/
H A Dkasan_init.c128 if (p4d_none(p4dp_get(p4dp)) && IS_ALIGNED(vaddr, P4D_SIZE) && in kasan_populate_p4d()
129 (next - vaddr) >= P4D_SIZE) { in kasan_populate_p4d()
130 phys_addr = memblock_phys_alloc(P4D_SIZE, P4D_SIZE); in kasan_populate_p4d()
133 memset(__va(phys_addr), KASAN_SHADOW_INIT, P4D_SIZE); in kasan_populate_p4d()
206 if (pgtable_l4_enabled && IS_ALIGNED(vaddr, P4D_SIZE) && in kasan_early_clear_p4d()
207 (next - vaddr) >= P4D_SIZE) { in kasan_early_clear_p4d()
290 if (p4d_none(p4dp_get(p4dp)) && IS_ALIGNED(vaddr, P4D_SIZE) && in kasan_early_populate_p4d()
291 (next - vaddr) >= P4D_SIZE) { in kasan_early_populate_p4d()
H A Dinit.c616 if (sz == P4D_SIZE) { in create_p4d_mapping()
698 !(pa & (P4D_SIZE - 1)) && !(va & (P4D_SIZE - 1)) && size >= P4D_SIZE) in best_map_size()
699 return P4D_SIZE; in best_map_size()
820 P4D_SIZE, PAGE_TABLE); in set_satp_mode()
1173 (uintptr_t)fixmap_pud, P4D_SIZE, PAGE_TABLE); in setup_vm()
1185 (uintptr_t)trampoline_pud, P4D_SIZE, PAGE_TABLE); in setup_vm()
1728 free_vmemmap_storage(p4d_page(p4d), P4D_SIZE, altmap); in remove_p4d_mapping()
H A Dhugetlbpage.c135 return P4D_SIZE - PUD_SIZE; in hugetlb_mask_last_page()
234 else if (sz >= P4D_SIZE) in set_huge_pte_at()
H A Dpageattr.c196 if (next - vaddr >= P4D_SIZE && in __split_linear_mapping_p4d()
/linux/include/asm-generic/
H A Dpgtable-nop4d.h13 #define P4D_SIZE (1UL << P4D_SHIFT) macro
14 #define P4D_MASK (~(P4D_SIZE-1))
H A Dtlb.h644 if (_sz >= P4D_SIZE) \
/linux/arch/x86/include/asm/
H A Dpgtable_64_types.h65 #define P4D_SIZE (_AC(1, UL) << P4D_SHIFT) macro
66 #define P4D_MASK (~(P4D_SIZE - 1))
H A Dpgtable_areas.h19 #define CPU_ENTRY_AREA_MAP_SIZE P4D_SIZE
/linux/mm/kasan/
H A Dinit.c196 if (IS_ALIGNED(addr, P4D_SIZE) && end - addr >= P4D_SIZE) { in zero_p4d_populate()
441 if (IS_ALIGNED(addr, P4D_SIZE) && in kasan_remove_p4d_table()
442 IS_ALIGNED(next, P4D_SIZE)) { in kasan_remove_p4d_table()
/linux/arch/riscv/include/asm/
H A Dpgtable-64.h31 #define P4D_SIZE (_AC(1, UL) << P4D_SHIFT) macro
32 #define P4D_MASK (~(P4D_SIZE - 1))
/linux/arch/powerpc/include/asm/nohash/
H A Dpgtable.h78 else if (sz < P4D_SIZE) in pte_update()
81 pdsize = P4D_SIZE; in pte_update()
/linux/arch/arm64/include/asm/
H A Dpgtable-hwdef.h67 #define P4D_SIZE (_AC(1, UL) << P4D_SHIFT) macro
68 #define P4D_MASK (~(P4D_SIZE-1))
/linux/arch/powerpc/mm/
H A Dhugetlbpage.c53 if (!mm_pud_folded(mm) && sz >= P4D_SIZE) in huge_pte_alloc()
/linux/arch/x86/mm/
H A Dmem_encrypt_identity.c271 entries += (DIV_ROUND_UP(len, P4D_SIZE) + 1) * sizeof(pud_t) * PTRS_PER_PUD; in sme_pgtable_calc()
282 tables += DIV_ROUND_UP(entries, P4D_SIZE) * sizeof(pud_t) * PTRS_PER_PUD; in sme_pgtable_calc()
H A Dkasan_init_64.c188 for (; start < end; start += P4D_SIZE) in clear_pgds()
H A Dinit_64.c698 vaddr_next = (vaddr & P4D_MASK) + P4D_SIZE; in phys_p4d_init()
/linux/arch/s390/boot/
H A Dvmem.c124 IS_ALIGNED(addr, P4D_SIZE) && end - addr >= P4D_SIZE) { in kasan_p4d_populate_zero_shadow()
/linux/mm/
H A Dpage_vma_mapped.c221 step_forward(pvmw, P4D_SIZE); in page_vma_mapped_walk()
H A Dvmalloc.c245 if ((end - addr) != P4D_SIZE) in vmap_try_huge_p4d()
248 if (!IS_ALIGNED(addr, P4D_SIZE)) in vmap_try_huge_p4d()
251 if (!IS_ALIGNED(phys_addr, P4D_SIZE)) in vmap_try_huge_p4d()
H A Dhugetlb.c7368 return P4D_SIZE - PUD_SIZE; in huge_pmd_unshare()
/linux/arch/arm64/mm/
H A Dkasan_init.c182 #define SHADOW_ALIGN P4D_SIZE
/linux/arch/arm/mm/
H A Ddump.c389 addr = start + i * P4D_SIZE; in walk_p4d()
/linux/arch/powerpc/mm/ptdump/
H A Dhashpagetable.c443 addr = start + i * P4D_SIZE; in walk_p4d()
/linux/arch/powerpc/mm/book3s64/
H A Dradix_pgtable.c925 if (!IS_ALIGNED(addr, P4D_SIZE) || in remove_pagetable()
926 !IS_ALIGNED(next, P4D_SIZE)) { in remove_pagetable()
/linux/arch/powerpc/kvm/
H A Dbook3s_64_mmu_radix.c1388 gpa = (gpa & P4D_MASK) + P4D_SIZE; in debugfs_radix_read()

12