Searched refs:vm_bo (Results 1 – 9 of 9) sorted by relevance
| /linux/drivers/gpu/drm/ |
| H A D | drm_gpuvm.c | 900 drm_gpuvm_bo_is_zombie(struct drm_gpuvm_bo *vm_bo) in drm_gpuvm_bo_is_zombie() argument 902 return !kref_read(&vm_bo->kref); in drm_gpuvm_bo_is_zombie() 1224 struct drm_gpuvm_bo *vm_bo; in __drm_gpuvm_prepare_objects() local 1228 for_each_vm_bo_in_list(gpuvm, extobj, &extobjs, vm_bo) { in __drm_gpuvm_prepare_objects() 1229 ret = exec_prepare_obj(exec, vm_bo->obj, num_fences); in __drm_gpuvm_prepare_objects() 1234 drm_gpuvm_bo_put(vm_bo); in __drm_gpuvm_prepare_objects() 1245 struct drm_gpuvm_bo *vm_bo; in drm_gpuvm_prepare_objects_locked() local 1249 list_for_each_entry(vm_bo, &gpuvm->extobj.list, list.entry.extobj) { in drm_gpuvm_prepare_objects_locked() 1250 if (drm_gpuvm_bo_is_zombie(vm_bo)) in drm_gpuvm_prepare_objects_locked() 1253 ret = exec_prepare_obj(exec, vm_bo->obj, num_fences); in drm_gpuvm_prepare_objects_locked() [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_vm.c | 163 static void amdgpu_vm_bo_evicted(struct amdgpu_vm_bo_base *vm_bo) in amdgpu_vm_bo_evicted() argument 165 struct amdgpu_vm *vm = vm_bo->vm; in amdgpu_vm_bo_evicted() 166 struct amdgpu_bo *bo = vm_bo->bo; in amdgpu_vm_bo_evicted() 168 vm_bo->moved = true; in amdgpu_vm_bo_evicted() 170 spin_lock(&vm_bo->vm->status_lock); in amdgpu_vm_bo_evicted() 172 list_move(&vm_bo->vm_status, &vm->evicted); in amdgpu_vm_bo_evicted() 174 list_move_tail(&vm_bo->vm_status, &vm->evicted); in amdgpu_vm_bo_evicted() 175 spin_unlock(&vm_bo->vm->status_lock); in amdgpu_vm_bo_evicted() 185 static void amdgpu_vm_bo_moved(struct amdgpu_vm_bo_base *vm_bo) in amdgpu_vm_bo_moved() argument 187 amdgpu_vm_assert_locked(vm_bo->vm); in amdgpu_vm_bo_moved() [all …]
|
| H A D | amdgpu_vm_pt.c | 145 return parent->vm_bo; in amdgpu_vm_pt_parent() 544 entry->bo->vm_bo = NULL; in amdgpu_vm_pt_free()
|
| H A D | amdgpu_amdkfd_gpuvm.c | 2255 bo, bo->vm_bo->vm->process_info->eviction_fence); in amdgpu_amdkfd_map_gtt_bo_to_gart()
|
| /linux/drivers/gpu/drm/nouveau/ |
| H A D | nouveau_uvmm.c | 65 struct drm_gpuvm_bo *vm_bo; member 1191 struct drm_gpuvm_bo *vm_bo = bop->vm_bo; in bind_link_gpuvas() local 1198 drm_gpuva_link(&new->map->va, vm_bo); in bind_link_gpuvas() 1204 drm_gpuva_link(&new->prev->va, va->vm_bo); in bind_link_gpuvas() 1206 drm_gpuva_link(&new->next->va, va->vm_bo); in bind_link_gpuvas() 1278 op->vm_bo = drm_gpuvm_bo_obtain_locked(&uvmm->base, obj); in nouveau_uvmm_bind_job_submit() 1280 if (IS_ERR(op->vm_bo)) in nouveau_uvmm_bind_job_submit() 1281 return PTR_ERR(op->vm_bo); in nouveau_uvmm_bind_job_submit() 1283 drm_gpuvm_bo_extobj_add(op->vm_bo); in nouveau_uvmm_bind_job_submit() 1592 if (!IS_ERR_OR_NULL(op->vm_bo)) { in nouveau_uvmm_bind_job_cleanup() [all …]
|
| /linux/drivers/gpu/drm/panthor/ |
| H A D | panthor_mmu.c | 188 struct drm_gpuvm_bo *vm_bo; member 1082 static void panthor_vm_bo_free(struct drm_gpuvm_bo *vm_bo) in panthor_vm_bo_free() argument 1084 struct panthor_gem_object *bo = to_panthor_bo(vm_bo->obj); in panthor_vm_bo_free() 1088 kfree(vm_bo); in panthor_vm_bo_free() 1105 if (op_ctx->map.vm_bo) in panthor_vm_cleanup_op_ctx() 1106 drm_gpuvm_bo_put_deferred(op_ctx->map.vm_bo); in panthor_vm_cleanup_op_ctx() 1257 op_ctx->map.vm_bo = drm_gpuvm_bo_obtain_prealloc(preallocated_vm_bo); in panthor_vm_prepare_map_op_ctx() 1287 drm_gpuvm_bo_extobj_add(op_ctx->map.vm_bo); in panthor_vm_prepare_map_op_ctx() 2070 struct drm_gpuvm_bo *vm_bo) in panthor_vma_link() argument 2075 drm_gpuva_link(&vma->base, vm_bo); in panthor_vma_link() [all …]
|
| /linux/drivers/gpu/drm/xe/ |
| H A D | xe_vm.c | 328 static int xe_gpuvm_validate(struct drm_gpuvm_bo *vm_bo, struct drm_exec *exec) in xe_gpuvm_validate() argument 330 struct xe_vm *vm = gpuvm_to_vm(vm_bo->vm); in xe_gpuvm_validate() 331 struct xe_bo *bo = gem_to_xe_bo(vm_bo->obj); in xe_gpuvm_validate() 336 drm_gpuvm_bo_for_each_va(gpuva, vm_bo) in xe_gpuvm_validate() 342 vm_bo->evicted = false; in xe_gpuvm_validate() 353 vm_bo->evicted = false; in xe_gpuvm_validate() 1119 struct drm_gpuvm_bo *vm_bo; in xe_vma_create() local 1123 vm_bo = drm_gpuvm_bo_obtain_locked(vma->gpuva.vm, &bo->ttm.base); in xe_vma_create() 1124 if (IS_ERR(vm_bo)) { in xe_vma_create() 1126 return ERR_CAST(vm_bo); in xe_vma_create() [all …]
|
| H A D | xe_vm_madvise.c | 257 struct drm_gpuvm_bo *vm_bo; in xe_bo_all_vmas_dontneed() local 268 drm_gem_for_each_gpuvm_bo(vm_bo, obj) { in xe_bo_all_vmas_dontneed() 269 drm_gpuvm_bo_for_each_va(gpuva, vm_bo) { in xe_bo_all_vmas_dontneed()
|
| H A D | xe_bo.c | 670 struct drm_gpuvm_bo *vm_bo; in xe_bo_trigger_rebind() local 684 drm_gem_for_each_gpuvm_bo(vm_bo, obj) { in xe_bo_trigger_rebind() 685 struct xe_vm *vm = gpuvm_to_vm(vm_bo->vm); in xe_bo_trigger_rebind() 689 drm_gpuvm_bo_evict(vm_bo, true); in xe_bo_trigger_rebind() 718 drm_gpuvm_bo_for_each_va(gpuva, vm_bo) { in xe_bo_trigger_rebind() 1226 struct drm_gpuvm_bo *vm_bo; in xe_bo_eviction_valuable() local 1234 drm_gem_for_each_gpuvm_bo(vm_bo, &bo->base) { in xe_bo_eviction_valuable() 1235 if (xe_vm_is_validating(gpuvm_to_vm(vm_bo->vm))) in xe_bo_eviction_valuable()
|