| /linux/drivers/gpu/drm/xe/ |
| H A D | xe_vm.h | 96 static inline struct xe_vm *gpuva_to_vm(struct drm_gpuva *gpuva) in gpuva_to_vm() argument 98 return gpuvm_to_vm(gpuva->vm); in gpuva_to_vm() 101 static inline struct xe_vma *gpuva_to_vma(struct drm_gpuva *gpuva) in gpuva_to_vma() argument 103 return container_of(gpuva, struct xe_vma, gpuva); in gpuva_to_vma() 117 return vma->gpuva.va.addr; in xe_vma_start() 122 return vma->gpuva.va.range; in xe_vma_size() 132 return vma->gpuva.gem.offset; in xe_vma_bo_offset() 137 return !vma->gpuva.gem.obj ? NULL : in xe_vma_bo() 138 container_of(vma->gpuva.gem.obj, struct xe_bo, ttm.base); in xe_vma_bo() 143 return container_of(vma->gpuva.vm, struct xe_vm, gpuvm); in xe_vma_vm() [all …]
|
| H A D | xe_vm.c | 329 struct drm_gpuva *gpuva; in xe_gpuvm_validate() local 333 drm_gpuvm_bo_for_each_va(gpuva, vm_bo) in xe_gpuvm_validate() 334 list_move_tail(&gpuva_to_vma(gpuva)->combined_links.rebind, in xe_gpuvm_validate() 656 op->base.map.va.addr = vma->gpuva.va.addr; in xe_vm_populate_rebind() 657 op->base.map.va.range = vma->gpuva.va.range; in xe_vm_populate_rebind() 658 op->base.map.gem.obj = vma->gpuva.gem.obj; in xe_vm_populate_rebind() 659 op->base.map.gem.offset = vma->gpuva.gem.offset; in xe_vm_populate_rebind() 662 op->map.vma_flags = vma->gpuva.flags & XE_VMA_CREATE_MASK; in xe_vm_populate_rebind() 1023 vma->gpuva.gem.obj = &bo->ttm.base; in xe_vma_create() 1028 INIT_LIST_HEAD(&vma->gpuva in xe_vma_create() 1193 struct drm_gpuva *gpuva; xe_vm_find_overlapping_vma() local 1738 struct drm_gpuva *gpuva, *next; xe_vm_close_and_put() local 2023 struct drm_gpuva *gpuva; xe_vm_query_vmas() local 2036 struct drm_gpuva *gpuva; get_mem_attrs() local 4065 struct drm_gpuva *gpuva; xe_vm_validate_protected() local 4115 struct drm_gpuva *gpuva; xe_vm_snapshot_capture() local [all...] |
| H A D | xe_vm_madvise.c | 44 struct drm_gpuva *gpuva; in get_vmas() local 56 drm_gpuvm_for_each_va_range(gpuva, &vm->gpuvm, addr, addr + range) { in get_vmas() 57 struct xe_vma *vma = gpuva_to_vma(gpuva); in get_vmas() 195 struct drm_gpuva *gpuva; in xe_zap_ptes_in_madvise_range() local 206 drm_gpuvm_for_each_va_range(gpuva, &vm->gpuvm, start, end) { in xe_zap_ptes_in_madvise_range() 207 struct xe_vma *vma = gpuva_to_vma(gpuva); in xe_zap_ptes_in_madvise_range()
|
| H A D | xe_vm_types.h | 100 /** @gpuva: Base GPUVA object */ 101 struct drm_gpuva gpuva; member 243 * @snap_mutex: Mutex used to guard insertions and removals from gpuva,
|
| H A D | xe_pt.c | 213 XE_WARN_ON(!list_empty(&pt->bo->ttm.base.gpuva.list)); in xe_pt_destroy() 557 xe_walk->vma->gpuva.flags |= in xe_pt_stage_bind_entry() 574 xe_walk->vma->gpuva.flags |= (XE_VMA_PTE_4K << level); in xe_pt_stage_bind_entry() 617 xe_walk->vma->gpuva.flags |= XE_VMA_PTE_COMPACT; in xe_pt_stage_bind_entry() 752 if (vma->gpuva.flags & XE_VMA_ATOMIC_PTE_BIT) { in xe_pt_stage_bind()
|
| H A D | xe_bo.c | 158 return !list_empty(&bo->ttm.base.gpuva.list); in xe_bo_is_vm_bound() 678 if (!list_empty(&bo->ttm.base.gpuva.list)) { in xe_bo_trigger_rebind() 688 struct drm_gpuva *gpuva; in xe_bo_trigger_rebind() local 715 drm_gpuvm_bo_for_each_va(gpuva, vm_bo) { in xe_bo_trigger_rebind() 716 struct xe_vma *vma = gpuva_to_vma(gpuva); in xe_bo_trigger_rebind() 1714 xe_assert(xe, list_empty(&ttm_bo->base.gpuva.list)); in xe_ttm_bo_destroy()
|
| H A D | xe_svm.c | 336 if (!(vma->gpuva.flags & XE_VMA_MADV_AUTORESET)) { in xe_svm_range_set_default_attr()
|
| /linux/drivers/gpu/drm/ |
| H A D | drm_gpuvm.c | 1594 INIT_LIST_HEAD(&vm_bo->list.gpuva); in drm_gpuvm_bo_create() 1736 mutex_unlock(&vm_bo->obj->gpuva.lock); in drm_gpuvm_bo_defer_free() 1762 &vm_bo->obj->gpuva.lock); in drm_gpuvm_bo_put_deferred() 1876 list_add_tail(&vm_bo->list.entry.gem, &obj->gpuva.list); in drm_gpuvm_bo_obtain_locked() 1910 mutex_lock(&obj->gpuva.lock); in drm_gpuvm_bo_obtain_prealloc() 1913 mutex_unlock(&obj->gpuva.lock); in drm_gpuvm_bo_obtain_prealloc() 1919 list_add_tail(&__vm_bo->list.entry.gem, &obj->gpuva.list); in drm_gpuvm_bo_obtain_prealloc() 1920 mutex_unlock(&obj->gpuva.lock); in drm_gpuvm_bo_obtain_prealloc() 2107 list_add_tail(&va->gem.entry, &vm_bo->list.gpuva); in drm_gpuva_link() 2164 mutex_lock(&obj->gpuva.lock); in drm_gpuva_unlink_defer() [all …]
|
| H A D | drm_gem.c | 230 mutex_init(&obj->gpuva.lock); in drm_gem_private_object_init() 254 mutex_destroy(&obj->gpuva.lock); in drm_gem_private_object_fini()
|
| /linux/drivers/gpu/drm/panthor/ |
| H A D | panthor_mmu.c | 1362 struct drm_gpuva *gpuva; in panthor_vm_get_bo_for_va() local 1367 gpuva = drm_gpuva_find_first(&vm->base, va, 1); in panthor_vm_get_bo_for_va() 1368 vma = gpuva ? container_of(gpuva, struct panthor_vma, base) : NULL; in panthor_vm_get_bo_for_va() 2054 mutex_lock(&bo->base.base.gpuva.lock); in panthor_vma_link() 2056 mutex_unlock(&bo->base.base.gpuva.lock); in panthor_vma_link()
|
| /linux/drivers/gpu/drm/msm/ |
| H A D | msm_gem.c | 394 GEM_WARN_ON(!list_is_singular(&vm_bo->list.gpuva)); in lookup_vma() 978 if (!list_empty(&obj->gpuva.list)) { in msm_gem_describe() 1072 if (!list_empty(&obj->gpuva.list)) { in msm_gem_free_object()
|