Home
last modified time | relevance | path

Searched refs:end_pfn (Results 1 – 25 of 62) sorted by relevance

123

/linux/mm/
H A Dmm_init.c300 unsigned long start_pfn, end_pfn; in early_calculate_totalpages()
303 for_each_mem_pfn_range(i, MAX_NUMNODES, &start_pfn, &end_pfn, &nid) { in early_calculate_totalpages()
304 unsigned long pages = end_pfn - start_pfn; in early_calculate_totalpages()
462 unsigned long start_pfn, end_pfn; in find_zone_movable_pfns_for_nodes()
480 for_each_mem_pfn_range(i, nid, &start_pfn, &end_pfn, NULL) { in find_zone_movable_pfns_for_nodes()
484 if (start_pfn >= end_pfn) in find_zone_movable_pfns_for_nodes()
490 kernel_pages = min(end_pfn, usable_startpfn) in find_zone_movable_pfns_for_nodes()
499 if (end_pfn <= usable_startpfn) { in find_zone_movable_pfns_for_nodes()
507 zone_movable_pfn[nid] = end_pfn; in find_zone_movable_pfns_for_nodes()
515 * start_pfn->end_pfn in find_zone_movable_pfns_for_nodes()
301 unsigned long start_pfn, end_pfn; early_calculate_totalpages() local
463 unsigned long start_pfn, end_pfn; find_zone_movable_pfns_for_nodes() local
550 unsigned long start_pfn, end_pfn; find_zone_movable_pfns_for_nodes() local
603 unsigned long start_pfn, end_pfn; __early_pfn_to_nid() local
673 defer_init(int nid,unsigned long pfn,unsigned long end_pfn) defer_init() argument
735 defer_init(int nid,unsigned long pfn,unsigned long end_pfn) defer_init() argument
755 unsigned long end_pfn = PFN_UP(end); reserve_bootmem_region() local
854 unsigned long pfn, end_pfn = start_pfn + size; memmap_init_range() local
918 memmap_init_zone_range(struct zone * zone,unsigned long start_pfn,unsigned long end_pfn,unsigned long * hole_pfn) memmap_init_zone_range() argument
942 unsigned long start_pfn, end_pfn; memmap_init() local
1049 unsigned long pfn, end_pfn = head_pfn + nr_pages; memmap_init_compound() local
1075 unsigned long pfn, end_pfn = start_pfn + nr_pages; memmap_init_zone_device() local
1158 unsigned long start_pfn, end_pfn; __absent_pages_in_range() local
1177 absent_pages_in_range(unsigned long start_pfn,unsigned long end_pfn) absent_pages_in_range() argument
1202 unsigned long start_pfn, end_pfn; zone_absent_pages_in_node() local
1276 unsigned long start_pfn, end_pfn; calc_nr_kernel_pages() local
1654 get_pfn_range_for_nid(unsigned int nid,unsigned long * start_pfn,unsigned long * end_pfn) get_pfn_range_for_nid() argument
1675 unsigned long end_pfn = 0; free_area_init_node() local
1759 unsigned long start_pfn, end_pfn; free_area_init() local
1961 deferred_init_pages(struct zone * zone,unsigned long pfn,unsigned long end_pfn) deferred_init_pages() argument
2020 deferred_init_maxorder(u64 * i,struct zone * zone,unsigned long * start_pfn,unsigned long * end_pfn) deferred_init_maxorder() argument
2063 deferred_init_memmap_chunk(unsigned long start_pfn,unsigned long end_pfn,void * arg) deferred_init_memmap_chunk() argument
[all...]
H A Dpage_isolation.c33 static struct page *has_unmovable_pages(unsigned long start_pfn, unsigned long end_pfn, in has_unmovable_pages() argument
41 pageblock_start_pfn(end_pfn - 1)); in has_unmovable_pages()
55 for (pfn = start_pfn; pfn < end_pfn; pfn++) { in has_unmovable_pages()
148 unsigned long start_pfn, unsigned long end_pfn) in set_migratetype_isolate() argument
179 end_pfn); in set_migratetype_isolate()
480 int start_isolate_page_range(unsigned long start_pfn, unsigned long end_pfn, in start_isolate_page_range() argument
487 unsigned long isolate_end = pageblock_align(end_pfn); in start_isolate_page_range()
514 start_pfn, end_pfn)) { in start_isolate_page_range()
534 void undo_isolate_page_range(unsigned long start_pfn, unsigned long end_pfn, in undo_isolate_page_range() argument
540 unsigned long isolate_end = pageblock_align(end_pfn); in undo_isolate_page_range()
[all …]
H A Dpage_idle.c120 unsigned long pfn, end_pfn; in page_idle_bitmap_read() local
130 end_pfn = pfn + count * BITS_PER_BYTE; in page_idle_bitmap_read()
131 if (end_pfn > max_pfn) in page_idle_bitmap_read()
132 end_pfn = max_pfn; in page_idle_bitmap_read()
134 for (; pfn < end_pfn; pfn++) { in page_idle_bitmap_read()
165 unsigned long pfn, end_pfn; in page_idle_bitmap_write() local
175 end_pfn = pfn + count * BITS_PER_BYTE; in page_idle_bitmap_write()
176 if (end_pfn > max_pfn) in page_idle_bitmap_write()
177 end_pfn = max_pfn; in page_idle_bitmap_write()
179 for (; pfn < end_pfn; pfn++) { in page_idle_bitmap_write()
H A Dmemory_hotplug.c372 const unsigned long end_pfn = pfn + nr_pages; in __add_pages() local
399 for (; pfn < end_pfn; pfn += cur_nr_pages) { in __add_pages()
401 cur_nr_pages = min(end_pfn - pfn, in __add_pages()
413 /* find the smallest valid pfn in the range [start_pfn, end_pfn) */
416 unsigned long end_pfn) in find_smallest_section_pfn() argument
418 for (; start_pfn < end_pfn; start_pfn += PAGES_PER_SUBSECTION) { in find_smallest_section_pfn()
434 /* find the biggest valid pfn in the range [start_pfn, end_pfn). */
437 unsigned long end_pfn) in find_biggest_section_pfn() argument
442 pfn = end_pfn - 1; in find_biggest_section_pfn()
460 unsigned long end_pfn) in shrink_zone_span() argument
506 unsigned long end_pfn = zone_end_pfn(zone); update_pgdat_span() local
531 const unsigned long end_pfn = start_pfn + nr_pages; remove_pfn_range_from_zone() local
576 const unsigned long end_pfn = pfn + nr_pages; __remove_pages() local
640 const unsigned long end_pfn = start_pfn + nr_pages; online_pages_range() local
963 unsigned long online_pages = 0, max_pages, end_pfn; auto_movable_zone_for_pfn() local
1087 unsigned long end_pfn = pfn + nr_pages; mhp_init_memmap_on_memory() local
1125 unsigned long end_pfn = pfn + nr_pages; mhp_deinit_memmap_on_memory() local
1773 do_migrate_range(unsigned long start_pfn,unsigned long end_pfn) do_migrate_range() argument
1938 const unsigned long end_pfn = start_pfn + nr_pages; offline_pages() local
[all...]
H A Dcompaction.c590 unsigned long end_pfn, in isolate_freepages_block() argument
609 for (; blockpfn < end_pfn; blockpfn += stride, page += stride) { in isolate_freepages_block()
633 if (blockpfn + (1UL << order) <= end_pfn) { in isolate_freepages_block()
688 if (unlikely(blockpfn > end_pfn)) in isolate_freepages_block()
689 blockpfn = end_pfn; in isolate_freepages_block()
702 if (strict && blockpfn < end_pfn) in isolate_freepages_block()
727 unsigned long start_pfn, unsigned long end_pfn) in isolate_freepages_range() argument
741 for (; pfn < end_pfn; pfn += isolated, in isolate_freepages_range()
757 block_end_pfn = min(block_end_pfn, end_pfn); in isolate_freepages_range()
781 if (pfn < end_pfn) { in isolate_freepages_range()
[all …]
H A Dsparse.c130 unsigned long *end_pfn) in mminit_validate_memmodel_limits() argument
141 *start_pfn, *end_pfn, max_sparsemem_pfn); in mminit_validate_memmodel_limits()
144 *end_pfn = max_sparsemem_pfn; in mminit_validate_memmodel_limits()
145 } else if (*end_pfn > max_sparsemem_pfn) { in mminit_validate_memmodel_limits()
148 *start_pfn, *end_pfn, max_sparsemem_pfn); in mminit_validate_memmodel_limits()
150 *end_pfn = max_sparsemem_pfn; in mminit_validate_memmodel_limits()
594 void online_mem_sections(unsigned long start_pfn, unsigned long end_pfn) in online_mem_sections() argument
598 for (pfn = start_pfn; pfn < end_pfn; pfn += PAGES_PER_SECTION) { in online_mem_sections()
612 void offline_mem_sections(unsigned long start_pfn, unsigned long end_pfn) in offline_mem_sections() argument
616 for (pfn = start_pfn; pfn < end_pfn; pf in offline_mem_sections()
[all...]
H A Dbootmem_info.c105 unsigned long i, pfn, end_pfn, nr_pages; in register_page_bootmem_info_node()
116 end_pfn = pgdat_end_pfn(pgdat); in register_page_bootmem_info_node()
119 for (; pfn < end_pfn; pfn += PAGES_PER_SECTION) { in register_page_bootmem_info_node()
104 unsigned long i, pfn, end_pfn, nr_pages; register_page_bootmem_info_node() local
/linux/include/trace/events/
H A Dpage_isolation.h14 unsigned long end_pfn,
17 TP_ARGS(start_pfn, end_pfn, fin_pfn),
21 __field(unsigned long, end_pfn)
27 __entry->end_pfn = end_pfn;
32 __entry->start_pfn, __entry->end_pfn, __entry->fin_pfn,
33 __entry->end_pfn <= __entry->fin_pfn ? "success" : "fail")
H A Dcompaction.h18 unsigned long end_pfn,
22 TP_ARGS(start_pfn, end_pfn, nr_scanned, nr_taken),
26 __field(unsigned long, end_pfn)
33 __entry->end_pfn = end_pfn;
40 __entry->end_pfn,
49 unsigned long end_pfn,
53 TP_ARGS(start_pfn, end_pfn, nr_scanned, nr_taken)
60 unsigned long end_pfn,
64 TP_ARGS(start_pfn, end_pfn, nr_scanned, nr_taken)
71 unsigned long end_pfn,
[all …]
/linux/arch/sparc/mm/
H A Dinit_32.c65 unsigned long end_pfn = (sp_banks[i].base_addr + sp_banks[i].num_bytes) >> PAGE_SHIFT; in calc_highpages() local
67 if (end_pfn <= max_low_pfn) in calc_highpages()
73 nr += end_pfn - start_pfn; in calc_highpages()
235 static void map_high_region(unsigned long start_pfn, unsigned long end_pfn) in map_high_region() argument
240 printk("mapping high region %08lx - %08lx\n", start_pfn, end_pfn); in map_high_region()
243 for (tmp = start_pfn; tmp < end_pfn; tmp++) in map_high_region()
284 unsigned long end_pfn = (sp_banks[i].base_addr + sp_banks[i].num_bytes) >> PAGE_SHIFT; in mem_init() local
286 if (end_pfn <= highstart_pfn) in mem_init()
292 map_high_region(start_pfn, end_pfn); in mem_init()
/linux/arch/sh/mm/
H A Dnuma.c25 unsigned long start_pfn, end_pfn; in setup_bootmem_node() local
31 end_pfn = PFN_DOWN(end); in setup_bootmem_node()
38 __add_active_range(nid, start_pfn, end_pfn); in setup_bootmem_node()
49 NODE_DATA(nid)->node_spanned_pages = end_pfn - start_pfn; in setup_bootmem_node()
H A Dinit.c210 unsigned long start_pfn, end_pfn; in allocate_pgdat() local
212 get_pfn_range_for_nid(nid, &start_pfn, &end_pfn); in allocate_pgdat()
219 NODE_DATA(nid)->node_spanned_pages = end_pfn - start_pfn; in allocate_pgdat()
224 unsigned long start_pfn, end_pfn; in do_init_bootmem() local
228 for_each_mem_pfn_range(i, MAX_NUMNODES, &start_pfn, &end_pfn, NULL) in do_init_bootmem()
229 __add_active_range(0, start_pfn, end_pfn); in do_init_bootmem()
/linux/arch/mips/loongson64/
H A Dnuma.c85 unsigned long start_pfn, end_pfn; in node_mem_init() local
91 get_pfn_range_for_nid(node, &start_pfn, &end_pfn); in node_mem_init()
93 node, start_pfn, end_pfn); in node_mem_init()
98 NODE_DATA(node)->node_spanned_pages = end_pfn - start_pfn; in node_mem_init()
108 max_low_pfn = end_pfn; in node_mem_init()
/linux/include/linux/
H A Dnode.h108 unsigned long end_pfn,
112 unsigned long end_pfn, in register_memory_blocks_under_node() argument
132 unsigned long end_pfn = start_pfn + pgdat->node_spanned_pages; in register_one_node() local
137 register_memory_blocks_under_node(nid, start_pfn, end_pfn, in register_one_node()
/linux/arch/loongarch/kernel/
H A Dnuma.c192 unsigned long start_pfn, end_pfn; in node_mem_init() local
199 get_pfn_range_for_nid(node, &start_pfn, &end_pfn); in node_mem_init()
201 node, start_pfn, end_pfn); in node_mem_init()
376 unsigned long start_pfn, end_pfn; in paging_init() local
378 get_pfn_range_for_nid(node, &start_pfn, &end_pfn); in paging_init()
380 if (end_pfn > max_low_pfn) in paging_init()
381 max_low_pfn = end_pfn; in paging_init()
/linux/arch/x86/xen/
H A Dsetup.c218 unsigned long end_pfn) in xen_set_identity_and_release_chunk() argument
223 WARN_ON(start_pfn > end_pfn); in xen_set_identity_and_release_chunk()
226 end = min(end_pfn, ini_nr_pages); in xen_set_identity_and_release_chunk()
245 set_phys_range_identity(start_pfn, end_pfn); in xen_set_identity_and_release_chunk()
347 unsigned long start_pfn, unsigned long end_pfn, unsigned long remap_pfn) in xen_set_identity_and_remap_chunk() argument
351 unsigned long n = end_pfn - start_pfn; in xen_set_identity_and_remap_chunk()
393 for (pfn = start_pfn; pfn <= max_pfn_mapped && pfn < end_pfn; pfn++) in xen_set_identity_and_remap_chunk()
402 unsigned long start_pfn, unsigned long end_pfn, in xen_count_remap_pages() argument
408 return remap_pages + min(end_pfn, ini_nr_pages) - start_pfn; in xen_count_remap_pages()
412 unsigned long (*func)(unsigned long start_pfn, unsigned long end_pfn, in xen_foreach_remap_area() argument
[all …]
/linux/arch/x86/virt/vmx/tdx/
H A Dtdx.c185 unsigned long end_pfn, int nid) in add_tdx_memblock() argument
195 tmb->end_pfn = end_pfn; in add_tdx_memblock()
223 unsigned long start_pfn, end_pfn; in build_tdx_memlist() local
226 for_each_mem_pfn_range(i, MAX_NUMNODES, &start_pfn, &end_pfn, &nid) { in build_tdx_memlist()
234 if (start_pfn >= end_pfn) in build_tdx_memlist()
242 ret = add_tdx_memblock(tmb_list, start_pfn, end_pfn, nid); in build_tdx_memlist()
426 end = TDMR_ALIGN_UP(PFN_PHYS(tmb->end_pfn)); in fill_out_tdmrs()
525 if (tmb->end_pfn > PHYS_PFN(tdmr->base)) in tdmr_get_nid()
772 end = PFN_PHYS(tmb->end_pfn); in tdmr_populate_rsvd_holes()
1371 static bool is_tdx_memory(unsigned long start_pfn, unsigned long end_pfn) in is_tdx_memory() argument
[all …]
/linux/arch/x86/platform/efi/
H A Defi_32.c38 u64 start_pfn, end_pfn, end; in efi_map_region() local
45 end_pfn = PFN_UP(end); in efi_map_region()
47 if (pfn_range_is_mapped(start_pfn, end_pfn)) { in efi_map_region()
/linux/arch/sh/kernel/
H A Dswsusp.c22 unsigned long end_pfn = PAGE_ALIGN(__pa(&__nosave_end)) >> PAGE_SHIFT; in pfn_is_nosave() local
24 return (pfn >= begin_pfn) && (pfn < end_pfn); in pfn_is_nosave()
/linux/arch/x86/mm/
H A Dinit_32.c256 unsigned long start_pfn, end_pfn; in kernel_physical_mapping_init() local
267 end_pfn = end >> PAGE_SHIFT; in kernel_physical_mapping_init()
296 if (pfn >= end_pfn) in kernel_physical_mapping_init()
304 for (; pmd_idx < PTRS_PER_PMD && pfn < end_pfn; in kernel_physical_mapping_init()
344 for (; pte_ofs < PTRS_PER_PTE && pfn < end_pfn; in kernel_physical_mapping_init()
399 unsigned long start_pfn, unsigned long end_pfn) in add_highpages_with_active_regions() argument
406 start_pfn, end_pfn); in add_highpages_with_active_regions()
408 start_pfn, end_pfn); in add_highpages_with_active_regions()
H A Dnuma.c127 unsigned long start_pfn, end_pfn; in numa_register_nodes() local
133 get_pfn_range_for_nid(nid, &start_pfn, &end_pfn); in numa_register_nodes()
134 if (start_pfn >= end_pfn) in numa_register_nodes()
/linux/arch/powerpc/mm/
H A Dnuma.c89 static int __init fake_numa_create_new_node(unsigned long end_pfn, in fake_numa_create_new_node() argument
120 if ((end_pfn << PAGE_SHIFT) > mem) { in fake_numa_create_new_node()
1041 unsigned long start_pfn, end_pfn; in setup_nonnuma() local
1048 for_each_mem_pfn_range(i, MAX_NUMNODES, &start_pfn, &end_pfn, NULL) { in setup_nonnuma()
1049 fake_numa_create_new_node(end_pfn, &nid); in setup_nonnuma()
1051 PFN_PHYS(end_pfn - start_pfn), in setup_nonnuma()
1093 static void __init setup_node_data(int nid, u64 start_pfn, u64 end_pfn) in setup_node_data() argument
1095 u64 spanned_pages = end_pfn - start_pfn; in setup_node_data()
1210 unsigned long start_pfn, end_pfn; in initmem_init() local
1212 get_pfn_range_for_nid(nid, &start_pfn, &end_pfn); in initmem_init()
[all …]
/linux/drivers/hv/
H A Dhv_balloon.c429 unsigned long end_pfn; member
439 unsigned long end_pfn; member
593 if (pfn >= gap->start_pfn && pfn < gap->end_pfn) in has_pfn_is_backed()
615 (pfn < has->end_pfn) && in hv_page_offline_check()
773 (pfn + (1UL << order) > has->end_pfn)) in hv_online_page()
794 if (start_pfn < has->start_pfn || start_pfn >= has->end_pfn) in pfn_covered()
810 gap->end_pfn = start_pfn; in pfn_covered()
820 if ((start_pfn + pfn_cnt) > has->end_pfn) { in pfn_covered()
822 residual = (start_pfn + pfn_cnt - has->end_pfn); in pfn_covered()
823 has->end_pfn += ALIGN(residual, ha_pages_in_chunk); in pfn_covered()
[all …]
/linux/arch/parisc/mm/
H A Dinit.c222 unsigned long end_pfn; in setup_bootmem() local
226 end_pfn = pmem_ranges[0].start_pfn + pmem_ranges[0].pages; in setup_bootmem()
229 hole_pages = pmem_ranges[i].start_pfn - end_pfn; in setup_bootmem()
231 pmem_holes[npmem_holes].start_pfn = end_pfn; in setup_bootmem()
233 end_pfn += hole_pages; in setup_bootmem()
235 end_pfn += pmem_ranges[i].pages; in setup_bootmem()
238 pmem_ranges[0].pages = end_pfn - pmem_ranges[0].start_pfn; in setup_bootmem()
/linux/kernel/power/
H A Dsnapshot.c397 unsigned long end_pfn; /* Zone end page frame + 1 */ member
556 zone->end_pfn = end; in create_zone_bm_rtree()
757 if (pfn >= zone->start_pfn && pfn < zone->end_pfn) in memory_bm_find_bit()
764 if (pfn >= curr->start_pfn && pfn < curr->end_pfn) { in memory_bm_find_bit()
935 pages = bm->cur.zone->end_pfn - bm->cur.zone->start_pfn; in memory_bm_next_pfn()
958 unsigned long end_pfn; member
997 void __init register_nosave_region(unsigned long start_pfn, unsigned long end_pfn) in register_nosave_region() argument
1001 if (start_pfn >= end_pfn) in register_nosave_region()
1008 if (region->end_pfn == start_pfn) { in register_nosave_region()
1009 region->end_pfn = end_pfn; in register_nosave_region()
[all …]

123