/linux/kernel/ |
H A D | range.c | 12 int add_range(struct range *range, int az, int nr_range, u64 start, u64 end) in add_range() argument 15 return nr_range; in add_range() 18 if (nr_range >= az) in add_range() 19 return nr_range; in add_range() 21 range[nr_range].start = start; in add_range() 22 range[nr_range].end = end; in add_range() 24 nr_range++; in add_range() 26 return nr_range; in add_range() 29 int add_range_with_merge(struct range *range, int az, int nr_range, in add_range_with_merge() argument 35 return nr_range; in add_range_with_merge() [all …]
|
/linux/arch/x86/mm/ |
H A D | init.c | 329 static int __meminit save_mr(struct map_range *mr, int nr_range, in save_mr() argument 334 if (nr_range >= NR_RANGE_MR) in save_mr() 336 mr[nr_range].start = start_pfn<<PAGE_SHIFT; in save_mr() 337 mr[nr_range].end = end_pfn<<PAGE_SHIFT; in save_mr() 338 mr[nr_range].page_size_mask = page_size_mask; in save_mr() 339 nr_range++; in save_mr() 342 return nr_range; in save_mr() 350 int nr_range) in adjust_range_page_size_mask() argument 354 for (i = 0; i < nr_range; i++) { in adjust_range_page_size_mask() 404 static int __meminit split_mem_range(struct map_range *mr, int nr_range, in split_mem_range() argument [all …]
|
/linux/arch/x86/kernel/cpu/mtrr/ |
H A D | cleanup.c | 54 static int __initdata nr_range; variable 62 x86_get_mtrr_mem_range(struct range *range, int nr_range, in x86_get_mtrr_mem_range() argument 76 nr_range = add_range_with_merge(range, RANGE_NUM, nr_range, in x86_get_mtrr_mem_range() 81 for (i = 0; i < nr_range; i++) in x86_get_mtrr_mem_range() 121 nr_range = clean_sort_range(range, RANGE_NUM); in x86_get_mtrr_mem_range() 124 for (i = 0; i < nr_range; i++) in x86_get_mtrr_mem_range() 128 return nr_range; in x86_get_mtrr_mem_range() 133 static unsigned long __init sum_ranges(struct range *range, int nr_range) in sum_ranges() argument 138 for (i = 0; i < nr_range; i++) in sum_ranges() 460 x86_setup_var_mtrrs(struct range *range, int nr_range, in x86_setup_var_mtrrs() argument [all …]
|
/linux/mm/ |
H A D | memremap.c | 87 for (i = 0; i < pgmap->nr_range; i++) { in pgmap_pfn_valid() 144 for (i = 0; i < pgmap->nr_range; i++) in memunmap_pages() 149 for (i = 0; i < pgmap->nr_range; i++) in memunmap_pages() 299 const int nr_range = pgmap->nr_range; in memremap_pages() local 302 if (WARN_ONCE(!nr_range, "nr_range must be specified\n")) in memremap_pages() 360 * Clear the pgmap nr_range as it will be incremented for each in memremap_pages() 364 pgmap->nr_range = 0; in memremap_pages() 366 for (i = 0; i < nr_range; in memremap_pages() [all...] |
H A D | sparse-vmemmap.c | 360 * Note that memremap_pages() resets @nr_range value and will increment in vmemmap_populate_hugepages() 361 * it after each range successful onlining. Thus the value or @nr_range in vmemmap_populate_hugepages() 370 PHYS_PFN(pgmap->ranges[pgmap->nr_range].start);
|
/linux/include/linux/ |
H A D | range.h | 30 int add_range(struct range *range, int az, int nr_range, 34 int add_range_with_merge(struct range *range, int az, int nr_range, 41 void sort_range(struct range *range, int nr_range);
|
H A D | memremap.h | 136 int nr_range; member
|
/linux/drivers/dax/ |
H A D | kmem.c | 97 for (i = 0; i < dev_dax->nr_range; i++) { in dev_dax_kmem_probe() 117 data = kzalloc(struct_size(data, res, dev_dax->nr_range), GFP_KERNEL); in dev_dax_kmem_probe() 130 for (i = 0; i < dev_dax->nr_range; i++) { in dev_dax_kmem_probe() 215 for (i = 0; i < dev_dax->nr_range; i++) { in dev_dax_kmem_remove() 237 if (success >= dev_dax->nr_range) { in dev_dax_kmem_remove()
|
H A D | bus.c | 194 for (i = 0; i < dev_dax->nr_range; i++) in dev_dax_size() 443 int i = dev_dax->nr_range - 1; in trim_dev_dax_range() 453 if (--dev_dax->nr_range == 0) { in trim_dev_dax_range() 461 while (dev_dax->nr_range) in free_dev_dax_ranges() 854 if (dev_WARN_ONCE(dev, dev_dax->nr_range, in alloc_dev_dax_range() 866 * (dev_dax->nr_range + 1), GFP_KERNEL); in alloc_dev_dax_range() 872 for (i = 0; i < dev_dax->nr_range; i++) in alloc_dev_dax_range() 875 ranges[dev_dax->nr_range++] = (struct dev_dax_range) { in alloc_dev_dax_range() 883 dev_dbg(dev, "alloc range[%d]: %pa:%pa\n", dev_dax->nr_range - 1, in alloc_dev_dax_range() 893 rc = devm_register_dax_mapping(dev_dax, dev_dax->nr_range - 1); in alloc_dev_dax_range() [all …]
|
H A D | device.c | 59 for (i = 0; i < dev_dax->nr_range; i++) { in dax_pgoff_to_phys() 404 if (dev_dax->nr_range > 1) { in dev_dax_probe() 419 struct_size(pgmap, ranges, dev_dax->nr_range - 1), in dev_dax_probe() 424 pgmap->nr_range = dev_dax->nr_range; in dev_dax_probe() 427 for (i = 0; i < dev_dax->nr_range; i++) { in dev_dax_probe() 433 for (i = 0; i < dev_dax->nr_range; i++) { in dev_dax_probe()
|
H A D | dax-private.h | 92 int nr_range; member
|
/linux/tools/testing/nvdimm/ |
H A D | dax-dev.c | 14 for (i = 0; i < dev_dax->nr_range; i++) { in dax_pgoff_to_phys()
|
/linux/drivers/xen/ |
H A D | unpopulated-alloc.c | 95 pgmap->nr_range = 1; in fill_list()
|
/linux/drivers/gpu/drm/nouveau/ |
H A D | nouveau_dmem.c | 254 chunk->pagemap.nr_range = 1; in nouveau_dmem_chunk_alloc()
|
/linux/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_migrate.c | 1047 pgmap->nr_range = 1; in kgd2kfd_init_zone_device()
|
/linux/lib/ |
H A D | test_hmm.c | 531 devmem->pagemap.nr_range = 1; in dmirror_allocate_chunk()
|
/linux/Documentation/mm/ |
H A D | hmm.rst | 284 pagemap.nr_range = 1;
|