Lines Matching refs:vma

268 void msm_gem_vma_unmap(struct drm_gpuva *vma, const char *reason)  in msm_gem_vma_unmap()  argument
270 struct msm_gem_vm *vm = to_msm_vm(vma->vm); in msm_gem_vma_unmap()
271 struct msm_gem_vma *msm_vma = to_msm_vma(vma); in msm_gem_vma_unmap()
286 .iova = vma->va.addr, in msm_gem_vma_unmap()
287 .range = vma->va.range, in msm_gem_vma_unmap()
299 msm_gem_vma_map(struct drm_gpuva *vma, int prot, struct sg_table *sgt) in msm_gem_vma_map() argument
301 struct msm_gem_vm *vm = to_msm_vm(vma->vm); in msm_gem_vma_map()
302 struct msm_gem_vma *msm_vma = to_msm_vma(vma); in msm_gem_vma_map()
305 if (GEM_WARN_ON(!vma->va.addr)) in msm_gem_vma_map()
328 .iova = vma->va.addr, in msm_gem_vma_map()
329 .range = vma->va.range, in msm_gem_vma_map()
330 .offset = vma->gem.offset, in msm_gem_vma_map()
345 void msm_gem_vma_close(struct drm_gpuva *vma) in msm_gem_vma_close() argument
347 struct msm_gem_vm *vm = to_msm_vm(vma->vm); in msm_gem_vma_close()
348 struct msm_gem_vma *msm_vma = to_msm_vma(vma); in msm_gem_vma_close()
354 if (vma->gem.obj) in msm_gem_vma_close()
355 msm_gem_assert_locked(vma->gem.obj); in msm_gem_vma_close()
357 if (vma->va.addr && vm->managed) in msm_gem_vma_close()
360 drm_gpuva_remove(vma); in msm_gem_vma_close()
361 drm_gpuva_unlink(vma); in msm_gem_vma_close()
363 kfree(vma); in msm_gem_vma_close()
373 struct msm_gem_vma *vma; in msm_gem_vma_new() local
378 vma = kzalloc(sizeof(*vma), GFP_KERNEL); in msm_gem_vma_new()
379 if (!vma) in msm_gem_vma_new()
385 ret = drm_mm_insert_node_in_range(&vm->mm, &vma->node, in msm_gem_vma_new()
392 range_start = vma->node.start; in msm_gem_vma_new()
399 drm_gpuva_init(&vma->base, range_start, range_end - range_start, obj, offset); in msm_gem_vma_new()
400 vma->mapped = false; in msm_gem_vma_new()
402 ret = drm_gpuva_insert(&vm->base, &vma->base); in msm_gem_vma_new()
407 return &vma->base; in msm_gem_vma_new()
416 drm_gpuva_link(&vma->base, vm_bo); in msm_gem_vma_new()
419 return &vma->base; in msm_gem_vma_new()
422 drm_gpuva_remove(&vma->base); in msm_gem_vma_new()
425 drm_mm_remove_node(&vma->node); in msm_gem_vma_new()
427 kfree(vma); in msm_gem_vma_new()
435 struct drm_gpuva *vma; in msm_gem_vm_bo_validate() local
442 drm_gpuvm_bo_for_each_va (vma, vm_bo) { in msm_gem_vm_bo_validate()
443 ret = msm_gem_pin_vma_locked(obj, vma); in msm_gem_vm_bo_validate()
482 struct drm_gpuva *vma; in msm_gem_vm_sm_step_map() local
489 vma = vma_from_op(arg, &op->map); in msm_gem_vm_sm_step_map()
490 if (WARN_ON(IS_ERR(vma))) in msm_gem_vm_sm_step_map()
491 return PTR_ERR(vma); in msm_gem_vm_sm_step_map()
493 vm_dbg("%p:%p:%p: %016llx %016llx", vma->vm, vma, vma->gem.obj, in msm_gem_vm_sm_step_map()
494 vma->va.addr, vma->va.range); in msm_gem_vm_sm_step_map()
496 vma->flags = ((struct op_arg *)arg)->flags; in msm_gem_vm_sm_step_map()
510 .iova = vma->va.addr, in msm_gem_vm_sm_step_map()
511 .range = vma->va.range, in msm_gem_vm_sm_step_map()
512 .offset = vma->gem.offset, in msm_gem_vm_sm_step_map()
516 .obj = vma->gem.obj, in msm_gem_vm_sm_step_map()
519 to_msm_vma(vma)->mapped = true; in msm_gem_vm_sm_step_map()
612 struct drm_gpuva *vma = op->unmap.va; in msm_gem_vm_sm_step_unmap() local
613 struct msm_gem_vma *msm_vma = to_msm_vma(vma); in msm_gem_vm_sm_step_unmap()
615 vm_dbg("%p:%p:%p: %016llx %016llx", vma->vm, vma, vma->gem.obj, in msm_gem_vm_sm_step_unmap()
616 vma->va.addr, vma->va.range); in msm_gem_vm_sm_step_unmap()
626 (vma->gem.obj == arg->op->obj) && in msm_gem_vm_sm_step_unmap()
627 (vma->gem.offset == arg->op->obj_offset) && in msm_gem_vm_sm_step_unmap()
628 (vma->va.addr == arg->op->iova) && in msm_gem_vm_sm_step_unmap()
629 (vma->va.range == arg->op->range)) { in msm_gem_vm_sm_step_unmap()
637 unsigned orig_flags = vma->flags & (DRM_GPUVA_USERBITS - 1); in msm_gem_vm_sm_step_unmap()
638 vma->flags = orig_flags | arg->flags; in msm_gem_vm_sm_step_unmap()
649 .iova = vma->va.addr, in msm_gem_vm_sm_step_unmap()
650 .range = vma->va.range, in msm_gem_vm_sm_step_unmap()
653 .obj = vma->gem.obj, in msm_gem_vm_sm_step_unmap()
659 msm_gem_vma_close(vma); in msm_gem_vm_sm_step_unmap()
872 struct drm_gpuva *vma, *tmp; in msm_gem_vm_close() local
895 drm_gpuvm_for_each_va_safe (vma, tmp, gpuvm) { in msm_gem_vm_close()
896 struct drm_gem_object *obj = vma->gem.obj; in msm_gem_vm_close()
910 msm_gem_vma_unmap(vma, "close"); in msm_gem_vm_close()
911 msm_gem_vma_close(vma); in msm_gem_vm_close()