/linux/drivers/gpu/drm/loongson/ |
H A D | lsdc_ttm.c | 53 if (lbo->tbo.base.size <= PAGE_SIZE) in lsdc_bo_set_placement() 93 lsdc_ttm_tt_create(struct ttm_buffer_object *tbo, uint32_t page_flags) in lsdc_ttm_tt_create() argument 102 ret = ttm_sg_tt_init(tt, tbo, page_flags, ttm_cached); in lsdc_ttm_tt_create() 139 static void lsdc_bo_evict_flags(struct ttm_buffer_object *tbo, in lsdc_bo_evict_flags() argument 142 struct ttm_resource *resource = tbo->resource; in lsdc_bo_evict_flags() 143 struct lsdc_bo *lbo = to_lsdc_bo(tbo); in lsdc_bo_evict_flags() 158 static int lsdc_bo_move(struct ttm_buffer_object *tbo, in lsdc_bo_move() argument 164 struct drm_device *ddev = tbo->base.dev; in lsdc_bo_move() 165 struct ttm_resource *old_mem = tbo->resource; in lsdc_bo_move() 166 struct lsdc_bo *lbo = to_lsdc_bo(tbo); in lsdc_bo_move() [all …]
|
H A D | lsdc_gem.c | 44 struct ttm_buffer_object *tbo = to_ttm_bo(obj); in lsdc_gem_prime_get_sg_table() local 45 struct ttm_tt *tt = tbo->ttm; in lsdc_gem_prime_get_sg_table() 57 struct ttm_buffer_object *tbo = to_ttm_bo(obj); in lsdc_gem_object_free() local 59 if (tbo) in lsdc_gem_object_free() 60 ttm_bo_put(tbo); in lsdc_gem_object_free() 65 struct ttm_buffer_object *tbo = to_ttm_bo(obj); in lsdc_gem_object_vmap() local 66 struct lsdc_bo *lbo = to_lsdc_bo(tbo); in lsdc_gem_object_vmap() 80 ret = ttm_bo_vmap(tbo, &lbo->map); in lsdc_gem_object_vmap() 97 struct ttm_buffer_object *tbo = to_ttm_bo(obj); in lsdc_gem_object_vunmap() local 98 struct lsdc_bo *lbo = to_lsdc_bo(tbo); in lsdc_gem_object_vunmap() [all …]
|
H A D | lsdc_ttm.h | 24 struct ttm_buffer_object tbo; member 52 static inline struct lsdc_bo *to_lsdc_bo(struct ttm_buffer_object *tbo) in to_lsdc_bo() argument 54 return container_of(tbo, struct lsdc_bo, tbo); in to_lsdc_bo() 59 return container_of(gem, struct lsdc_bo, tbo.base); in gem_to_lsdc_bo()
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_object.c | 58 static void amdgpu_bo_destroy(struct ttm_buffer_object *tbo) in amdgpu_bo_destroy() argument 60 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_destroy() 64 if (bo->tbo.base.import_attach) in amdgpu_bo_destroy() 65 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy() 66 drm_gem_object_release(&bo->tbo.base); in amdgpu_bo_destroy() 71 static void amdgpu_bo_user_destroy(struct ttm_buffer_object *tbo) in amdgpu_bo_user_destroy() argument 73 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_user_destroy() 78 amdgpu_bo_destroy(tbo); in amdgpu_bo_user_destroy() 110 struct amdgpu_device *adev = amdgpu_ttm_adev(abo->tbo.bdev); in amdgpu_bo_placement_from_domain() 139 if (abo->tbo.type == ttm_bo_type_kernel && in amdgpu_bo_placement_from_domain() [all …]
|
H A D | amdgpu_dma_buf.c | 58 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_attach() 116 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_map() 120 if (!bo->tbo.pin_count) { in amdgpu_dma_buf_map() 131 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_map() 135 } else if (bo->tbo.resource->mem_type != TTM_PL_TT) { in amdgpu_dma_buf_map() 139 switch (bo->tbo.resource->mem_type) { in amdgpu_dma_buf_map() 142 bo->tbo.ttm->pages, in amdgpu_dma_buf_map() 143 bo->tbo.ttm->num_pages); in amdgpu_dma_buf_map() 153 r = amdgpu_vram_mgr_alloc_sgt(adev, bo->tbo.resource, 0, in amdgpu_dma_buf_map() 154 bo->tbo.base.size, attach->dev, in amdgpu_dma_buf_map() [all …]
|
H A D | amdgpu_amdkfd_gpuvm.c | 295 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_amdkfd_release_notify() 330 ret = amdgpu_gem_object_create(adev, mem->bo->tbo.base.size, 1, in create_dmamap_sg_bo() 332 ttm_bo_type_sg, mem->bo->tbo.base.resv, &gem_obj, 0); in create_dmamap_sg_bo() 367 dma_resv_replace_fences(bo->tbo.base.resv, ef->base.context, in amdgpu_amdkfd_remove_eviction_fence() 401 BUG_ON(!dma_resv_trylock(bo->tbo.base.resv)); in amdgpu_amdkfd_remove_fence_on_pt_pd_bos() 403 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_amdkfd_remove_fence_on_pt_pd_bos() 415 if (WARN(amdgpu_ttm_tt_get_usermm(bo->tbo.ttm), in amdgpu_amdkfd_bo_validate() 420 if (bo->tbo.pin_count) in amdgpu_amdkfd_bo_validate() 425 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_amdkfd_bo_validate() 448 ret = dma_resv_reserve_fences(bo->tbo.base.resv, 1); in amdgpu_amdkfd_bo_validate_and_fence() [all …]
|
H A D | amdgpu_object.h | 107 struct ttm_buffer_object tbo; member 142 static inline struct amdgpu_bo *ttm_to_amdgpu_bo(struct ttm_buffer_object *tbo) in ttm_to_amdgpu_bo() argument 144 return container_of(tbo, struct amdgpu_bo, tbo); in ttm_to_amdgpu_bo() 187 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_reserve() 190 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in amdgpu_bo_reserve() 201 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve() 206 return bo->tbo.base.size; in amdgpu_bo_size() 211 return bo->tbo.base.size / AMDGPU_GPU_PAGE_SIZE; in amdgpu_bo_ngpu_pages() 216 return (bo->tbo.page_alignment << PAGE_SHIFT) / AMDGPU_GPU_PAGE_SIZE; in amdgpu_bo_gpu_page_alignment() 227 return drm_vma_node_offset_addr(&bo->tbo.base.vma_node); in amdgpu_bo_mmap_offset()
|
H A D | amdgpu_gem.c | 92 ttm_bo_put(&aobj->tbo); in amdgpu_gem_object_free() 126 *obj = &bo->tbo.base; in amdgpu_gem_object_create() 163 struct amdgpu_device *adev = amdgpu_ttm_adev(abo->tbo.bdev); in amdgpu_gem_object_open() 170 mm = amdgpu_ttm_tt_get_usermm(abo->tbo.ttm); in amdgpu_gem_object_open() 228 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_gem_object_close() 239 r = drm_exec_prepare_obj(&exec, &bo->tbo.base, 1); in amdgpu_gem_object_close() 278 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) in amdgpu_gem_object_mmap() 369 resv = vm->root.bo->tbo.base.resv; in amdgpu_gem_create_ioctl() 453 r = amdgpu_ttm_tt_set_userptr(&bo->tbo, args->addr, args->flags); in amdgpu_gem_userptr_ioctl() 462 r = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages, in amdgpu_gem_userptr_ioctl() [all …]
|
H A D | amdgpu_vm.c | 184 if (bo->tbo.type == ttm_bo_type_kernel) in amdgpu_vm_bo_evicted() 305 if (!bo || bo->tbo.type != ttm_bo_type_kernel) in amdgpu_vm_bo_reset_state_machine() 339 dma_resv_assert_held(vm->root.bo->tbo.base.resv); in amdgpu_vm_bo_base_init() 341 ttm_bo_set_bulk_move(&bo->tbo, &vm->lru_bulk_move); in amdgpu_vm_bo_base_init() 342 if (bo->tbo.type == ttm_bo_type_kernel && bo->parent) in amdgpu_vm_bo_base_init() 348 amdgpu_mem_type_to_domain(bo->tbo.resource->mem_type)) in amdgpu_vm_bo_base_init() 372 return drm_exec_prepare_obj(exec, &vm->root.bo->tbo.base, in amdgpu_vm_lock_pd() 493 if (bo->tbo.type != ttm_bo_type_kernel) { in amdgpu_vm_validate() 509 if (dma_resv_locking_ctx(bo->tbo.base.resv) != ticket) { in amdgpu_vm_validate() 924 dma_resv_add_fence(vm->root.bo->tbo.base.resv, *fence, in amdgpu_vm_tlb_flush() [all …]
|
/linux/drivers/gpu/drm/qxl/ |
H A D | qxl_object.c | 32 static void qxl_ttm_bo_destroy(struct ttm_buffer_object *tbo) in qxl_ttm_bo_destroy() argument 37 bo = to_qxl_bo(tbo); in qxl_ttm_bo_destroy() 38 qdev = to_qxl(bo->tbo.base.dev); in qxl_ttm_bo_destroy() 45 drm_gem_object_release(&bo->tbo.base); in qxl_ttm_bo_destroy() 62 if (qbo->tbo.base.size <= PAGE_SIZE) in qxl_ttm_placement_from_domain() 123 r = drm_gem_object_init(&qdev->ddev, &bo->tbo.base, size); in qxl_bo_create() 128 bo->tbo.base.funcs = &qxl_object_funcs; in qxl_bo_create() 138 bo->tbo.priority = priority; in qxl_bo_create() 139 r = ttm_bo_init_reserved(&qdev->mman.bdev, &bo->tbo, type, in qxl_bo_create() 150 ttm_bo_pin(&bo->tbo); in qxl_bo_create() [all …]
|
H A D | qxl_gem.c | 35 struct ttm_buffer_object *tbo; in qxl_gem_object_free() local 41 tbo = &qobj->tbo; in qxl_gem_object_free() 42 ttm_bo_put(tbo); in qxl_gem_object_free() 66 *obj = &qbo->tbo.base; in qxl_gem_object_create()
|
H A D | qxl_object.h | 34 r = ttm_bo_reserve(&bo->tbo, true, false, NULL); in qxl_bo_reserve() 37 struct drm_device *ddev = bo->tbo.base.dev; in qxl_bo_reserve() 48 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve() 53 return bo->tbo.base.size; in qxl_bo_size()
|
/linux/drivers/gpu/drm/radeon/ |
H A D | radeon_object.c | 52 static void radeon_ttm_bo_destroy(struct ttm_buffer_object *tbo) in radeon_ttm_bo_destroy() argument 56 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy() 63 if (bo->tbo.base.import_attach) in radeon_ttm_bo_destroy() 64 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in radeon_ttm_bo_destroy() 65 drm_gem_object_release(&bo->tbo.base); in radeon_ttm_bo_destroy() 153 drm_gem_private_object_init(rdev_to_drm(rdev), &bo->tbo.base, size); in radeon_bo_create() 154 bo->tbo.base.funcs = &radeon_gem_object_funcs; in radeon_bo_create() 204 r = ttm_bo_init_validate(&rdev->mman.bdev, &bo->tbo, type, in radeon_bo_create() 223 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_KERNEL, in radeon_bo_kmap() 234 r = ttm_bo_kmap(&bo->tbo, 0, PFN_UP(bo->tbo.base.size), &bo->kmap); in radeon_bo_kmap() [all …]
|
H A D | radeon_object.h | 68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve() 79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve() 96 rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_bo_gpu_offset() 98 switch (bo->tbo.resource->mem_type) { in radeon_bo_gpu_offset() 107 return (bo->tbo.resource->start << PAGE_SHIFT) + start; in radeon_bo_gpu_offset() 112 return bo->tbo.base.size; in radeon_bo_size() 117 return bo->tbo.base.size / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages() 122 return (bo->tbo.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_gpu_page_alignment() 133 return drm_vma_node_offset_addr(&bo->tbo.base.vma_node); in radeon_bo_mmap_offset()
|
H A D | radeon_mn.c | 57 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm)) in radeon_mn_invalidate() 69 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_BOOKKEEP, in radeon_mn_invalidate() 75 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_mn_invalidate()
|
H A D | radeon_prime.c | 41 return drm_prime_pages_to_sg(obj->dev, bo->tbo.ttm->pages, in radeon_gem_prime_get_sg_table() 42 bo->tbo.ttm->num_pages); in radeon_gem_prime_get_sg_table() 61 bo->tbo.base.funcs = &radeon_gem_object_funcs; in radeon_gem_prime_import_sg_table() 68 return &bo->tbo.base; in radeon_gem_prime_import_sg_table() 98 if (radeon_ttm_tt_has_userptr(bo->rdev, bo->tbo.ttm)) in radeon_gem_prime_export()
|
H A D | radeon_gem.c | 89 ttm_bo_put(&robj->tbo); in radeon_gem_object_free() 132 *obj = &robj->tbo.base; in radeon_gem_object_create() 163 r = dma_resv_wait_timeout(robj->tbo.base.resv, in radeon_gem_set_domain() 269 struct radeon_device *rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_gem_object_mmap() 271 if (radeon_ttm_tt_has_userptr(rdev, bo->tbo.ttm)) in radeon_gem_object_mmap() 390 r = radeon_ttm_tt_set_userptr(rdev, bo->tbo.ttm, args->addr, args->flags); in radeon_gem_userptr_ioctl() 409 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_gem_userptr_ioctl() 477 if (radeon_ttm_tt_has_userptr(robj->rdev, robj->tbo.ttm)) { in radeon_mode_dumb_mmap() 509 r = dma_resv_test_signaled(robj->tbo.base.resv, DMA_RESV_USAGE_READ); in radeon_gem_busy_ioctl() 515 cur_placement = READ_ONCE(robj->tbo.resource->mem_type); in radeon_gem_busy_ioctl() [all …]
|
/linux/drivers/gpu/drm/vmwgfx/ |
H A D | vmwgfx_bo.c | 39 WARN_ON(vbo->tbo.base.funcs && in vmw_bo_release() 40 kref_read(&vbo->tbo.base.refcount) != 0); in vmw_bo_release() 64 drm_gem_object_release(&vbo->tbo.base); in vmw_bo_release() 98 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_pin_in_placement() 134 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_pin_in_vram_or_gmr() 203 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_pin_in_start_of_vram() 219 buf->tbo.pin_count == 0) { in vmw_bo_pin_in_start_of_vram() 261 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_unpin() 308 struct ttm_buffer_object *bo = &vbo->tbo; in vmw_bo_pin_reserved() 353 return vmw_bo_map_and_cache_size(vbo, vbo->tbo.base.size); in vmw_bo_map_and_cache() [all …]
|
H A D | vmwgfx_gem.c | 166 (*p_vbo)->tbo.base.funcs = &vmw_gem_object_funcs; in vmw_gem_object_create() 190 ret = drm_gem_handle_create(filp, &(*p_vbo)->tbo.base, handle); in vmw_gem_object_create_with_handle() 220 vbo->tbo.base.funcs = &vmw_gem_object_funcs; in vmw_prime_import_sg_table() 222 gem = &vbo->tbo.base; in vmw_prime_import_sg_table() 246 rep->map_handle = drm_vma_node_offset_addr(&vbo->tbo.base.vma_node); in vmw_gem_object_create_ioctl() 250 drm_gem_object_put(&vbo->tbo.base); in vmw_gem_object_create_ioctl() 262 switch (bo->tbo.resource->mem_type) { in vmw_bo_print_info() 283 switch (bo->tbo.type) { in vmw_bo_print_info() 299 id, bo->tbo.base.size, placement, type); in vmw_bo_print_info() 301 bo->tbo.priority, in vmw_bo_print_info() [all …]
|
H A D | vmwgfx_bo.h | 82 struct ttm_buffer_object tbo; member 163 vbo->tbo.priority = i; in vmw_bo_prio_adjust() 168 vbo->tbo.priority = 3; in vmw_bo_prio_adjust() 207 ttm_bo_put(&tmp_buf->tbo); in vmw_bo_unreference() 212 ttm_bo_get(&buf->tbo); in vmw_bo_reference() 218 drm_gem_object_get(&vbo->tbo.base); in vmw_user_bo_ref() 228 drm_gem_object_put(&tmp_buf->tbo.base); in vmw_user_bo_unref() 233 return container_of((gobj), struct vmw_bo, tbo.base); in to_vmw_bo()
|
H A D | vmwgfx_page_dirty.c | 85 pgoff_t offset = drm_vma_node_start(&vbo->tbo.base.vma_node); in vmw_bo_dirty_scan_pagetable() 86 struct address_space *mapping = vbo->tbo.bdev->dev_mapping; in vmw_bo_dirty_scan_pagetable() 123 unsigned long offset = drm_vma_node_start(&vbo->tbo.base.vma_node); in vmw_bo_dirty_scan_mkwrite() 124 struct address_space *mapping = vbo->tbo.bdev->dev_mapping; in vmw_bo_dirty_scan_mkwrite() 130 num_marked = wp_shared_mapping_range(vbo->tbo.bdev->dev_mapping, in vmw_bo_dirty_scan_mkwrite() 188 unsigned long offset = drm_vma_node_start(&vbo->tbo.base.vma_node); in vmw_bo_dirty_pre_unmap() 189 struct address_space *mapping = vbo->tbo.bdev->dev_mapping; in vmw_bo_dirty_pre_unmap() 212 unsigned long offset = drm_vma_node_start(&vbo->tbo.base.vma_node); in vmw_bo_dirty_unmap() 213 struct address_space *mapping = vbo->tbo.bdev->dev_mapping; in vmw_bo_dirty_unmap() 233 pgoff_t num_pages = PFN_UP(vbo->tbo.resource->size); in vmw_bo_dirty_add() [all …]
|
H A D | vmwgfx_resource.c | 47 dma_resv_assert_held(gbo->tbo.base.resv); in vmw_resource_mob_attach() 75 dma_resv_assert_held(gbo->tbo.base.resv); in vmw_resource_mob_detach() 127 struct ttm_buffer_object *bo = &res->guest_memory_bo->tbo; in vmw_resource_release() 346 BUG_ON(res->guest_memory_bo->tbo.base.size < size); in vmw_resource_buf_alloc() 534 ttm_bo_get(&res->guest_memory_bo->tbo); in vmw_resource_check_buffer() 535 val_buf->bo = &res->guest_memory_bo->tbo; in vmw_resource_check_buffer() 548 ret = ttm_bo_validate(&res->guest_memory_bo->tbo, in vmw_resource_check_buffer() 697 val_buf.bo = &res->guest_memory_bo->tbo; in vmw_resource_validate() 763 .bo = &vbo->tbo, in vmw_resource_unbind_list() 767 dma_resv_assert_held(vbo->tbo.base.resv); in vmw_resource_unbind_list() [all …]
|
H A D | vmwgfx_mob.c | 207 bo = &otable->page_table->pt_bo->tbo; in vmw_takedown_otable_base() 267 &batch->otable_bo->tbo, in vmw_otable_batch_setup() 284 vmw_bo_unpin_unlocked(&batch->otable_bo->tbo); in vmw_otable_batch_setup() 285 ttm_bo_put(&batch->otable_bo->tbo); in vmw_otable_batch_setup() 336 struct ttm_buffer_object *bo = &batch->otable_bo->tbo; in vmw_otable_batch_takedown() 502 struct ttm_buffer_object *bo = &mob->pt_bo->tbo; in vmw_mob_pt_setup() 539 vmw_bo_unpin_unlocked(&mob->pt_bo->tbo); in vmw_mob_destroy() 559 struct ttm_buffer_object *bo = &mob->pt_bo->tbo; in vmw_mob_unbind() 651 vmw_bo_unpin_unlocked(&mob->pt_bo->tbo); in vmw_mob_bind()
|
H A D | vmwgfx_blit.c | 428 container_of(bo->tbo.bdev, struct vmw_private, bdev); in map_external() 432 if (bo->tbo.base.import_attach) { in map_external() 433 ret = dma_buf_vmap(bo->tbo.base.dma_buf, map); in map_external() 450 if (bo->tbo.base.import_attach) in unmap_external() 451 dma_buf_vunmap(bo->tbo.base.dma_buf, map); in unmap_external() 463 container_of(dst->tbo.bdev, struct vmw_private, bdev); in vmw_external_bo_copy() 464 size_t dst_size = dst->tbo.resource->size; in vmw_external_bo_copy() 465 size_t src_size = src->tbo.resource->size; in vmw_external_bo_copy() 548 struct ttm_buffer_object *src = &vmw_src->tbo; in vmw_bo_cpu_blit() 549 struct ttm_buffer_object *dst = &vmw_dst->tbo; in vmw_bo_cpu_blit()
|
/linux/drivers/gpu/drm/xe/ |
H A D | xe_ttm_sys_mgr.c | 19 struct ttm_buffer_object *tbo; member 30 struct ttm_buffer_object *tbo, in xe_ttm_sys_mgr_new() argument 41 node->tbo = tbo; in xe_ttm_sys_mgr_new() 42 ttm_resource_init(tbo, place, &node->base.base); in xe_ttm_sys_mgr_new()
|