| /linux/mm/ |
| H A D | memremap.c | 77 return (range->start + range_len(range)) >> PAGE_SHIFT; in pfn_end() 97 PHYS_PFN(range_len(range))); in pageunmap_range() 100 PHYS_PFN(range_len(range)), NULL); in pageunmap_range() 102 arch_remove_memory(range->start, range_len(range), in pageunmap_range() 104 kasan_remove_zero_shadow(__va(range->start), range_len(range)); in pageunmap_range() 108 pfnmap_untrack(PHYS_PFN(range->start), range_len(range)); in pageunmap_range() 170 is_ram = region_intersects(range->start, range_len(range), in pagemap_range() 188 error = pfnmap_track(PHYS_PFN(range->start), range_len(range), in pagemap_range() 193 if (!mhp_range_allowed(range->start, range_len(range), !is_private)) { in pagemap_range() 213 PHYS_PFN(range_len(range)), params); in pagemap_range() [all …]
|
| /linux/drivers/comedi/ |
| H A D | range.c | 110 int chan, range_len, i; in comedi_check_chanlist() local 116 range_len = s->range_table->length; in comedi_check_chanlist() 118 range_len = s->range_table_list[chan]->length; in comedi_check_chanlist() 120 range_len = 0; in comedi_check_chanlist() 122 CR_RANGE(chanspec) >= range_len) { in comedi_check_chanlist() 125 i, chanspec, chan, range_len); in comedi_check_chanlist()
|
| /linux/drivers/dax/ |
| H A D | bus.c | 195 size += range_len(&dev_dax->ranges[i].range); in dev_dax_size() 452 __release_region(&dax_region->res, range->start, range_len(range)); in trim_dev_dax_range() 650 || !IS_ALIGNED(range_len(range), align)) in alloc_dax_region() 873 pgoff += PHYS_PFN(range_len(&ranges[i].range)); in alloc_dev_dax_range() 968 shrink = min_t(u64, to_shrink, range_len(range)); in dev_dax_shrink() 969 if (shrink >= range_len(range)) { in dev_dax_shrink() 989 return adjust_dev_dax_range(dev_dax, adjust, range_len(range) in dev_dax_shrink() 1191 to_alloc = range_len(&r); in mapping_store() 1215 size_t len = range_len(&dev_dax->ranges[i].range); in dev_dax_validate_align() 1516 if (dev_dax->nr_range && range_len(&dev_dax->ranges[0].range)) { in __devm_create_dev_dax()
|
| H A D | cxl.c | 28 .size = range_len(&cxlr_dax->hpa_range), in cxl_dax_region_probe()
|
| H A D | device.c | 72 pgoff_end = dax_range->pgoff + PHYS_PFN(range_len(range)) - 1; in dax_pgoff_to_phys() 447 range_len(range), dev_name(dev))) { in dev_dax_probe()
|
| /linux/tools/testing/nvdimm/ |
| H A D | dax-dev.c | 20 pgoff_end = dax_range->pgoff + PHYS_PFN(range_len(range)) - 1; in dax_pgoff_to_phys()
|
| /linux/drivers/nvdimm/ |
| H A D | badrange.c | 237 len = range->start + range_len(range) in badblocks_populate() 252 len = range_len(range); in badblocks_populate()
|
| H A D | pfn_devs.c | 701 nd_pfn->npfns = PHYS_PFN((range_len(range) - offset)); in __nvdimm_setup_pfn()
|
| /linux/include/linux/ |
| H A D | range.h | 11 static inline u64 range_len(const struct range *range) in range_len() function
|
| /linux/drivers/virt/nitro_enclaves/ |
| H A D | ne_misc_dev_test.c | 137 KUNIT_EXPECT_EQ(test, range_len(&phys_contig_mem_regions.regions[num - 1]), in ne_misc_dev_test_merge_phys_contig_memory_regions()
|
| H A D | ne_misc_dev.c | 1006 u64 phys_region_size = range_len(&phys_contig_mem_regions.regions[i]); in ne_set_user_memory_region_ioctl() 1024 slot_add_mem_req.size = range_len(&phys_contig_mem_regions.regions[i]); in ne_set_user_memory_region_ioctl()
|
| /linux/fs/btrfs/ |
| H A D | extent_io.c | 325 u32 range_len; in lock_delalloc_folios() local 336 range_len = min_t(u64, folio_next_pos(folio), end + 1) - range_start; in lock_delalloc_folios() 337 btrfs_folio_set_lock(fs_info, folio, range_start, range_len); in lock_delalloc_folios() 339 processed_end = range_start + range_len - 1; in lock_delalloc_folios() 1201 u64 range_len = umin(folio_next_pos(folio), in can_skip_one_ordered_range() local 1209 ASSERT(btrfs_folio_test_dirty(fs_info, folio, cur, range_len)); in can_skip_one_ordered_range() 1223 u64 range_len = umin(folio_next_pos(folio), in can_skip_one_ordered_range() local 1230 ASSERT(btrfs_folio_test_uptodate(fs_info, folio, cur, range_len)); in can_skip_one_ordered_range() 1232 *fileoff = cur + range_len; in can_skip_one_ordered_range() 2232 u32 range_len = min_t(u64, folio_next_pos(folio), in write_one_eb() local [all …]
|
| H A D | file.c | 3213 const u64 range_len = last_byte - cur_offset; in btrfs_fallocate() local 3215 ret = add_falloc_range(&reserve_list, cur_offset, range_len); in btrfs_fallocate() 3221 &data_reserved, cur_offset, range_len); in btrfs_fallocate() 3226 qgroup_reserved += range_len; in btrfs_fallocate() 3227 data_space_needed += range_len; in btrfs_fallocate()
|
| H A D | qgroup.c | 4278 u64 range_len = unode->aux - range_start + 1; in qgroup_free_reserved_data() local 4286 range_start + range_len <= start) in qgroup_free_reserved_data() 4289 free_len = min(start + len, range_start + range_len) - in qgroup_free_reserved_data()
|
| H A D | inode.c | 7612 u32 range_len; in btrfs_invalidate_folio() local 7641 range_len = range_end + 1 - cur; in btrfs_invalidate_folio() 7642 if (!btrfs_folio_test_ordered(fs_info, folio, cur, range_len)) { in btrfs_invalidate_folio() 7651 btrfs_folio_clear_ordered(fs_info, folio, cur, range_len); in btrfs_invalidate_folio()
|
| /linux/drivers/gpu/drm/nouveau/ |
| H A D | nouveau_dmem.c | 386 release_mem_region(chunk->pagemap.range.start, range_len(&chunk->pagemap.range)); in nouveau_dmem_chunk_alloc() 477 unsigned long i, npages = range_len(&chunk->pagemap.range) >> PAGE_SHIFT; in nouveau_dmem_evict_chunk() 546 range_len(&chunk->pagemap.range)); in nouveau_dmem_fini()
|
| /linux/drivers/cxl/core/ |
| H A D | hdm.c | 494 part->range.start, range_len(&part->range), in cxl_dpa_setup() 777 size = range_len(&cxld->hpa_range); in setup_hw_decoder() 950 len = range_len(&info->dvsec_range[which]); in cxl_setup_hdm_decoder_from_dvsec()
|
| H A D | port.c | 114 return sysfs_emit(buf, "%#llx\n", range_len(&cxld->hpa_range)); in size_show()
|
| H A D | region.c | 3604 *res = DEFINE_RES_MEM_NAMED(hpa->start, range_len(hpa), in __construct_region()
|
| /linux/arch/powerpc/kvm/ |
| H A D | book3s_hv_uvmem.c | 704 (range_len(&kvmppc_uvmem_pgmap.range) >> PAGE_SHIFT); in kvmppc_uvmem_get_page() 1221 range_len(&kvmppc_uvmem_pgmap.range)); in kvmppc_uvmem_free()
|
| /linux/lib/ |
| H A D | test_hmm.c | 565 pfn_last = pfn_first + (range_len(&devmem->pagemap.range) >> PAGE_SHIFT); in dmirror_allocate_chunk() 621 range_len(&devmem->pagemap.range)); in dmirror_allocate_chunk() 1463 range_len(&devmem->pagemap.range)); in dmirror_device_remove_chunks()
|
| /linux/drivers/pci/ |
| H A D | p2pdma.c | 439 range_len(&pgmap->range), dev_to_node(&pdev->dev), in pci_p2pdma_add_resource()
|
| /linux/drivers/cxl/ |
| H A D | acpi.c | 371 resource_size_t size = range_len(hpa); in cxl_acpi_set_cache_size()
|
| /linux/tools/testing/cxl/test/ |
| H A D | cxl.c | 401 range_len(&res->range)); in depopulate_all_mock_resources()
|