/linux/drivers/gpu/drm/ |
H A D | drm_gpuvm.c | 1124 struct drm_gpuvm_bo *vm_bo; in __drm_gpuvm_prepare_objects() local 1128 for_each_vm_bo_in_list(gpuvm, extobj, &extobjs, vm_bo) { in __drm_gpuvm_prepare_objects() 1129 ret = exec_prepare_obj(exec, vm_bo->obj, num_fences); in __drm_gpuvm_prepare_objects() 1134 drm_gpuvm_bo_put(vm_bo); in __drm_gpuvm_prepare_objects() 1145 struct drm_gpuvm_bo *vm_bo; in drm_gpuvm_prepare_objects_locked() local 1149 list_for_each_entry(vm_bo, &gpuvm->extobj.list, list.entry.extobj) { in drm_gpuvm_prepare_objects_locked() 1150 ret = exec_prepare_obj(exec, vm_bo->obj, num_fences); in drm_gpuvm_prepare_objects_locked() 1154 if (vm_bo->evicted) in drm_gpuvm_prepare_objects_locked() 1155 drm_gpuvm_bo_list_add(vm_bo, evict, false); in drm_gpuvm_prepare_objects_locked() 1366 struct drm_gpuvm_bo *vm_bo; in __drm_gpuvm_validate() local [all …]
|
/linux/include/drm/ |
H A D | drm_gpuvm.h | 83 struct drm_gpuvm_bo *vm_bo; member 153 void drm_gpuva_link(struct drm_gpuva *va, struct drm_gpuvm_bo *vm_bo); 715 drm_gpuvm_bo_obtain_prealloc(struct drm_gpuvm_bo *vm_bo); 727 drm_gpuvm_bo_get(struct drm_gpuvm_bo *vm_bo) in drm_gpuvm_bo_get() argument 729 kref_get(&vm_bo->kref); in drm_gpuvm_bo_get() 730 return vm_bo; in drm_gpuvm_bo_get() 733 bool drm_gpuvm_bo_put(struct drm_gpuvm_bo *vm_bo); 739 void drm_gpuvm_bo_evict(struct drm_gpuvm_bo *vm_bo, bool evict); 752 struct drm_gpuvm_bo *vm_bo; in drm_gpuvm_bo_gem_evict() local 755 drm_gem_for_each_gpuvm_bo(vm_bo, obj) in drm_gpuvm_bo_gem_evict() [all …]
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_vm.c | 178 static void amdgpu_vm_bo_evicted(struct amdgpu_vm_bo_base *vm_bo) in amdgpu_vm_bo_evicted() argument 180 struct amdgpu_vm *vm = vm_bo->vm; in amdgpu_vm_bo_evicted() 181 struct amdgpu_bo *bo = vm_bo->bo; in amdgpu_vm_bo_evicted() 183 vm_bo->moved = true; in amdgpu_vm_bo_evicted() 184 spin_lock(&vm_bo->vm->status_lock); in amdgpu_vm_bo_evicted() 186 list_move(&vm_bo->vm_status, &vm->evicted); in amdgpu_vm_bo_evicted() 188 list_move_tail(&vm_bo->vm_status, &vm->evicted); in amdgpu_vm_bo_evicted() 189 spin_unlock(&vm_bo->vm->status_lock); in amdgpu_vm_bo_evicted() 199 static void amdgpu_vm_bo_moved(struct amdgpu_vm_bo_base *vm_bo) in amdgpu_vm_bo_moved() argument 201 spin_lock(&vm_bo->vm->status_lock); in amdgpu_vm_bo_moved() [all …]
|
H A D | amdgpu_vm_pt.c | 143 return parent->vm_bo; in amdgpu_vm_pt_parent() 541 entry->bo->vm_bo = NULL; in amdgpu_vm_pt_free()
|
H A D | amdgpu_object.h | 111 struct amdgpu_vm_bo_base *vm_bo; member
|
H A D | amdgpu_dma_buf.c | 361 for (bo_base = bo->vm_bo; bo_base; bo_base = bo_base->next) { in amdgpu_dma_buf_move_notify()
|
H A D | amdgpu_amdkfd_gpuvm.c | 376 struct amdgpu_vm_bo_base *vm_bo; in amdgpu_amdkfd_remove_fence_on_pt_pd_bos() local 386 vm_bo = root->vm_bo; in amdgpu_amdkfd_remove_fence_on_pt_pd_bos() 387 if (!vm_bo) in amdgpu_amdkfd_remove_fence_on_pt_pd_bos() 390 vm = vm_bo->vm; in amdgpu_amdkfd_remove_fence_on_pt_pd_bos() 2261 bo, bo->vm_bo->vm->process_info->eviction_fence); in amdgpu_amdkfd_map_gtt_bo_to_gart()
|
H A D | amdgpu_object.c | 671 bo->vm_bo = NULL; in amdgpu_bo_create() 1293 WARN_ON(abo->vm_bo); in amdgpu_bo_release_notify()
|
H A D | amdgpu_gem.c | 885 for (base = robj->vm_bo; base; base = base->next) in amdgpu_gem_op_ioctl()
|
/linux/drivers/gpu/drm/panthor/ |
H A D | panthor_mmu.c | 199 struct drm_gpuvm_bo *vm_bo; member 1076 static void panthor_vm_bo_put(struct drm_gpuvm_bo *vm_bo) in panthor_vm_bo_put() argument 1078 struct panthor_gem_object *bo = to_panthor_bo(vm_bo->obj); in panthor_vm_bo_put() 1079 struct drm_gpuvm *vm = vm_bo->vm; in panthor_vm_bo_put() 1098 unpin = drm_gpuvm_bo_put(vm_bo); in panthor_vm_bo_put() 1128 if (op_ctx->map.vm_bo) in panthor_vm_cleanup_op_ctx() 1129 panthor_vm_bo_put(op_ctx->map.vm_bo); in panthor_vm_cleanup_op_ctx() 1136 panthor_vm_bo_put(vma->base.vm_bo); in panthor_vm_cleanup_op_ctx() 1273 op_ctx->map.vm_bo = drm_gpuvm_bo_obtain_prealloc(preallocated_vm_bo); in panthor_vm_prepare_map_op_ctx() 1283 if (preallocated_vm_bo != op_ctx->map.vm_bo && in panthor_vm_prepare_map_op_ctx() [all …]
|
/linux/drivers/gpu/drm/nouveau/ |
H A D | nouveau_uvmm.c | 65 struct drm_gpuvm_bo *vm_bo; member 1123 struct drm_gpuvm_bo *vm_bo = bop->vm_bo; in bind_link_gpuvas() local 1130 drm_gpuva_link(&new->map->va, vm_bo); in bind_link_gpuvas() 1136 drm_gpuva_link(&new->prev->va, va->vm_bo); in bind_link_gpuvas() 1138 drm_gpuva_link(&new->next->va, va->vm_bo); in bind_link_gpuvas() 1210 op->vm_bo = drm_gpuvm_bo_obtain(&uvmm->base, obj); in nouveau_uvmm_bind_job_submit() 1212 if (IS_ERR(op->vm_bo)) in nouveau_uvmm_bind_job_submit() 1213 return PTR_ERR(op->vm_bo); in nouveau_uvmm_bind_job_submit() 1215 drm_gpuvm_bo_extobj_add(op->vm_bo); in nouveau_uvmm_bind_job_submit() 1521 if (!IS_ERR_OR_NULL(op->vm_bo)) { in nouveau_uvmm_bind_job_cleanup() [all …]
|
/linux/drivers/gpu/drm/xe/ |
H A D | xe_vm.c | 381 static int xe_gpuvm_validate(struct drm_gpuvm_bo *vm_bo, struct drm_exec *exec) in xe_gpuvm_validate() argument 383 struct xe_vm *vm = gpuvm_to_vm(vm_bo->vm); in xe_gpuvm_validate() 388 drm_gpuvm_bo_for_each_va(gpuva, vm_bo) in xe_gpuvm_validate() 392 ret = xe_bo_validate(gem_to_xe_bo(vm_bo->obj), vm, false); in xe_gpuvm_validate() 396 vm_bo->evicted = false; in xe_gpuvm_validate() 1020 struct drm_gpuvm_bo *vm_bo; in xe_vma_create() local 1024 vm_bo = drm_gpuvm_bo_obtain(vma->gpuva.vm, &bo->ttm.base); in xe_vma_create() 1025 if (IS_ERR(vm_bo)) { in xe_vma_create() 1027 return ERR_CAST(vm_bo); in xe_vma_create() 1030 drm_gpuvm_bo_extobj_add(vm_bo); in xe_vma_create() [all …]
|
H A D | xe_bo.c | 499 struct drm_gpuvm_bo *vm_bo; in xe_bo_trigger_rebind() local 513 drm_gem_for_each_gpuvm_bo(vm_bo, obj) { in xe_bo_trigger_rebind() 514 struct xe_vm *vm = gpuvm_to_vm(vm_bo->vm); in xe_bo_trigger_rebind() 518 drm_gpuvm_bo_evict(vm_bo, true); in xe_bo_trigger_rebind() 542 drm_gpuvm_bo_for_each_va(gpuva, vm_bo) { in xe_bo_trigger_rebind()
|