Home
last modified time | relevance | path

Searched refs:uvma (Results 1 – 6 of 6) sorted by relevance

/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_uvmm.c181 nouveau_uvma_vmm_put(struct nouveau_uvma *uvma) in nouveau_uvma_vmm_put() argument
183 u64 addr = uvma->va.va.addr; in nouveau_uvma_vmm_put()
184 u64 range = uvma->va.va.range; in nouveau_uvma_vmm_put()
186 return nouveau_uvmm_vmm_put(to_uvmm(uvma), addr, range); in nouveau_uvma_vmm_put()
190 nouveau_uvma_map(struct nouveau_uvma *uvma, in nouveau_uvma_map() argument
193 u64 addr = uvma->va.va.addr; in nouveau_uvma_map()
194 u64 offset = uvma->va.gem.offset; in nouveau_uvma_map()
195 u64 range = uvma->va.va.range; in nouveau_uvma_map()
197 return nouveau_uvmm_vmm_map(to_uvmm(uvma), addr, range, in nouveau_uvma_map()
198 offset, uvma->kind, mem); in nouveau_uvma_map()
[all …]
/linux/drivers/gpu/drm/xe/
H A Dxe_hmm.c124 void xe_hmm_userptr_free_sg(struct xe_userptr_vma *uvma) in xe_hmm_userptr_free_sg() argument
126 struct xe_userptr *userptr = &uvma->userptr; in xe_hmm_userptr_free_sg()
127 struct xe_vma *vma = &uvma->vma; in xe_hmm_userptr_free_sg()
164 int xe_hmm_userptr_populate_range(struct xe_userptr_vma *uvma, in xe_hmm_userptr_populate_range() argument
171 struct xe_vma *vma = &uvma->vma; in xe_hmm_userptr_populate_range()
181 userptr = &uvma->userptr; in xe_hmm_userptr_populate_range()
194 xe_hmm_userptr_free_sg(uvma); in xe_hmm_userptr_populate_range()
H A Dxe_hmm.h10 int xe_hmm_userptr_populate_range(struct xe_userptr_vma *uvma, bool is_mm_mmap_locked);
11 void xe_hmm_userptr_free_sg(struct xe_userptr_vma *uvma);
H A Dxe_vm.c60 int xe_vma_userptr_check_repin(struct xe_userptr_vma *uvma) in xe_vma_userptr_check_repin() argument
62 return mmu_interval_check_retry(&uvma->userptr.notifier, in xe_vma_userptr_check_repin()
63 uvma->userptr.notifier_seq) ? in xe_vma_userptr_check_repin()
67 int xe_vma_userptr_pin_pages(struct xe_userptr_vma *uvma) in xe_vma_userptr_pin_pages() argument
69 struct xe_vma *vma = &uvma->vma; in xe_vma_userptr_pin_pages()
76 return xe_hmm_userptr_populate_range(uvma, false); in xe_vma_userptr_pin_pages()
588 struct xe_userptr_vma *uvma = container_of(userptr, typeof(*uvma), userptr); in vma_userptr_invalidate() local
589 struct xe_vma *vma = &uvma->vma; in vma_userptr_invalidate()
661 struct xe_userptr_vma *uvma, *next; in xe_vm_userptr_pin() local
670 list_for_each_entry_safe(uvma, next, &vm->userptr.invalidated, in xe_vm_userptr_pin()
[all …]
H A Dxe_pt.c1181 static bool xe_pt_userptr_inject_eagain(struct xe_userptr_vma *uvma) in xe_pt_userptr_inject_eagain() argument
1183 u32 divisor = uvma->userptr.divisor ? uvma->userptr.divisor : 2; in xe_pt_userptr_inject_eagain()
1187 uvma->userptr.divisor = divisor << 1; in xe_pt_userptr_inject_eagain()
1196 static bool xe_pt_userptr_inject_eagain(struct xe_userptr_vma *uvma) in xe_pt_userptr_inject_eagain() argument
1206 struct xe_userptr_vma *uvma; in vma_check_userptr() local
1214 uvma = to_userptr_vma(vma); in vma_check_userptr()
1215 notifier_seq = uvma->userptr.notifier_seq; in vma_check_userptr()
1217 if (uvma->userptr.initial_bind && !xe_vm_in_fault_mode(vm)) in vma_check_userptr()
1220 if (!mmu_interval_read_retry(&uvma->userptr.notifier, in vma_check_userptr()
1222 !xe_pt_userptr_inject_eagain(uvma)) in vma_check_userptr()
[all …]
H A Dxe_gt_pagefault.c148 struct xe_userptr_vma *uvma = to_userptr_vma(vma); in handle_vma_pagefault() local
150 err = xe_vma_userptr_pin_pages(uvma); in handle_vma_pagefault()