Searched refs:hmm_pfns (Results 1 – 10 of 10) sorted by relevance
/linux/mm/ |
H A D | hmm.c | 48 range->hmm_pfns[i] = cpu_flags; in hmm_pfns_fill() 122 const unsigned long hmm_pfns[], unsigned long npages, in hmm_range_need_fault() argument 139 required_fault |= hmm_pte_need_fault(hmm_vma_walk, hmm_pfns[i], in hmm_range_need_fault() 154 unsigned long *hmm_pfns; in hmm_vma_walk_hole() local 158 hmm_pfns = &range->hmm_pfns[i]; in hmm_vma_walk_hole() 160 hmm_range_need_fault(hmm_vma_walk, hmm_pfns, npages, 0); in hmm_vma_walk_hole() 188 unsigned long end, unsigned long hmm_pfns[], in hmm_vma_handle_pmd() argument 200 hmm_range_need_fault(hmm_vma_walk, hmm_pfns, npages, cpu_flags); in hmm_vma_handle_pmd() 206 hmm_pfns[i] = pfn | cpu_flags; in hmm_vma_handle_pmd() 212 unsigned long end, unsigned long hmm_pfns[], pmd_t pmd); [all …]
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_hmm.c | 194 hmm_range->hmm_pfns = pfns; in amdgpu_hmm_range_get_pages() 218 hmm_range->hmm_pfns += MAX_WALK_BYTE >> PAGE_SHIFT; in amdgpu_hmm_range_get_pages() 223 hmm_range->hmm_pfns = pfns; in amdgpu_hmm_range_get_pages() 253 kvfree(hmm_range->hmm_pfns); in amdgpu_hmm_range_get_pages_done()
|
H A D | amdgpu_ttm.c | 769 WARN_ONCE(!range->hmm_pfns, "No user pages to check\n"); in amdgpu_ttm_tt_get_user_pages_done()
|
/linux/drivers/infiniband/core/ |
H A D | umem_odp.c | 390 range.hmm_pfns = &(umem_odp->pfn_list[pfn_start_idx]); in ib_umem_odp_map_dma_and_lock() 423 WARN_ON(range.hmm_pfns[pfn_index] & HMM_PFN_ERROR); in ib_umem_odp_map_dma_and_lock() 424 WARN_ON(!(range.hmm_pfns[pfn_index] & HMM_PFN_VALID)); in ib_umem_odp_map_dma_and_lock() 426 if (!(range.hmm_pfns[pfn_index] & HMM_PFN_VALID)) { in ib_umem_odp_map_dma_and_lock() 431 if (range.hmm_pfns[pfn_index] & HMM_PFN_WRITE) in ib_umem_odp_map_dma_and_lock() 435 hmm_order = hmm_pfn_to_map_order(range.hmm_pfns[pfn_index]); in ib_umem_odp_map_dma_and_lock() 448 umem_odp, dma_index, hmm_pfn_to_page(range.hmm_pfns[pfn_index]), in ib_umem_odp_map_dma_and_lock()
|
/linux/drivers/gpu/drm/xe/ |
H A D | xe_hmm.c | 38 page = hmm_pfn_to_page(range->hmm_pfns[i]); in xe_mark_range_accessed() 94 pages[i] = hmm_pfn_to_page(range->hmm_pfns[i]); in xe_build_sg() 210 hmm_range.hmm_pfns = pfns; in xe_hmm_userptr_populate_range()
|
/linux/drivers/gpu/drm/nouveau/ |
H A D | nouveau_svm.c | 550 if (!(range->hmm_pfns[0] & HMM_PFN_VALID)) { in nouveau_hmm_convert_pfn() 555 page = hmm_pfn_to_page(range->hmm_pfns[0]); in nouveau_hmm_convert_pfn() 564 if (hmm_pfn_to_map_order(range->hmm_pfns[0])) { in nouveau_hmm_convert_pfn() 567 args->p.page = hmm_pfn_to_map_order(range->hmm_pfns[0]) + in nouveau_hmm_convert_pfn() 581 if (range->hmm_pfns[0] & HMM_PFN_WRITE) in nouveau_hmm_convert_pfn() 657 unsigned long hmm_pfns[1]; in nouveau_range_fault() local 661 .hmm_pfns = hmm_pfns, in nouveau_range_fault()
|
/linux/include/linux/ |
H A D | hmm.h | 96 unsigned long *hmm_pfns; member
|
/linux/lib/ |
H A D | test_hmm.c | 209 unsigned long *pfns = range->hmm_pfns; in dmirror_do_fault() 335 .hmm_pfns = pfns, in dmirror_fault() 1153 dmirror_mkentry(dmirror, range, perm + i, range->hmm_pfns[i]); in dmirror_range_snapshot() 1173 .hmm_pfns = pfns, in dmirror_snapshot()
|
/linux/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_svm.c | 161 unsigned long *hmm_pfns, uint32_t gpuidx) in svm_range_dma_map_dev() argument 181 page = hmm_pfn_to_page(hmm_pfns[i]); in svm_range_dma_map_dev() 185 addr[i] = (hmm_pfns[i] << PAGE_SHIFT) + in svm_range_dma_map_dev() 208 unsigned long *hmm_pfns) in svm_range_dma_map() argument 227 hmm_pfns, gpuidx); in svm_range_dma_map() 1728 hmm_range->hmm_pfns); in svm_range_validate_and_map()
|
/linux/Documentation/mm/ |
H A D | hmm.rst | 176 range.hmm_pfns = ...;
|