Searched refs:vmemmap_end (Results 1 – 2 of 2) sorted by relevance
/linux/mm/ |
H A D | hugetlb_vmemmap.c | 453 unsigned long vmemmap_start = (unsigned long)&folio->page, vmemmap_end; in __hugetlb_vmemmap_restore_folio() local 465 vmemmap_end = vmemmap_start + hugetlb_vmemmap_size(h); in __hugetlb_vmemmap_restore_folio() 471 * @vmemmap_end) are mapped to are freed to the buddy allocator, and in __hugetlb_vmemmap_restore_folio() 476 ret = vmemmap_remap_alloc(vmemmap_start, vmemmap_end, vmemmap_reuse, flags); in __hugetlb_vmemmap_restore_folio() 565 unsigned long vmemmap_start = (unsigned long)&folio->page, vmemmap_end; in __hugetlb_vmemmap_optimize_folio() local 591 vmemmap_end = vmemmap_start + hugetlb_vmemmap_size(h); in __hugetlb_vmemmap_optimize_folio() 596 * Remap the vmemmap virtual address range [@vmemmap_start, @vmemmap_end) in __hugetlb_vmemmap_optimize_folio() 601 ret = vmemmap_remap_free(vmemmap_start, vmemmap_end, vmemmap_reuse, in __hugetlb_vmemmap_optimize_folio() 631 unsigned long vmemmap_start = (unsigned long)&folio->page, vmemmap_end; in hugetlb_vmemmap_split_folio() local 637 vmemmap_end in hugetlb_vmemmap_split_folio() [all...] |
/linux/arch/powerpc/mm/ |
H A D | init_64.c | 99 unsigned long vmemmap_end = vmemmap_addr + vmemmap_map_size; in vmemmap_populated() local 102 for (; (unsigned long)start < vmemmap_end; start += PAGES_PER_SUBSECTION) in vmemmap_populated()
|