Searched refs:pfn_first (Results 1 – 4 of 4) sorted by relevance
700 unsigned long pfn_last, pfn_first; in kvmppc_uvmem_get_page() local702 pfn_first = kvmppc_uvmem_pgmap.range.start >> PAGE_SHIFT; in kvmppc_uvmem_get_page()703 pfn_last = pfn_first + in kvmppc_uvmem_get_page()708 pfn_last - pfn_first); in kvmppc_uvmem_get_page()709 if (bit >= (pfn_last - pfn_first)) in kvmppc_uvmem_get_page()718 uvmem_pfn = bit + pfn_first; in kvmppc_uvmem_get_page()1164 unsigned long pfn_last, pfn_first; in kvmppc_uvmem_init() local1196 pfn_first = res->start >> PAGE_SHIFT; in kvmppc_uvmem_init()1197 pfn_last = pfn_first + (resource_size(res) >> PAGE_SHIFT); in kvmppc_uvmem_init()1198 kvmppc_uvmem_bitmap = bitmap_zalloc(pfn_last - pfn_first, GFP_KERNEL); in kvmppc_uvmem_init()
73 static unsigned long pfn_first(struct dev_pagemap *pgmap, int range_id) in pfn_first() function 92 return pfn >= pfn_first(pgmap, i); in pgmap_pfn_valid() 108 pfn_first(pgmap, range_id)) >> pgmap->vmemmap_shift; in pfn_len() 117 first_page = pfn_to_page(pfn_first(pgmap, range_id)); in pageunmap_range()
499 unsigned long pfn_first; in dmirror_allocate_chunk() local561 pfn_first = devmem->pagemap.range.start >> PAGE_SHIFT; in dmirror_allocate_chunk()562 pfn_last = pfn_first + (range_len(&devmem->pagemap.range) >> PAGE_SHIFT); in dmirror_allocate_chunk()571 pfn_first, pfn_last); in dmirror_allocate_chunk()574 for (pfn = pfn_first; pfn < pfn_last; pfn++) { in dmirror_allocate_chunk()
233 unsigned long i, pfn_first; in nouveau_dmem_chunk_alloc() local278 pfn_first = chunk->pagemap.range.start >> PAGE_SHIFT; in nouveau_dmem_chunk_alloc()279 page = pfn_to_page(pfn_first); in nouveau_dmem_chunk_alloc()