| /linux/drivers/gpu/drm/ttm/ |
| H A D | ttm_bo.c | 54 static void ttm_bo_mem_space_debug(struct ttm_buffer_object *bo, in ttm_bo_mem_space_debug() argument 65 man = ttm_manager_type(bo->bdev, mem_type); in ttm_bo_mem_space_debug() 79 void ttm_bo_move_to_lru_tail(struct ttm_buffer_object *bo) in ttm_bo_move_to_lru_tail() argument 81 dma_resv_assert_held(bo->base.resv); in ttm_bo_move_to_lru_tail() 83 if (bo->resource) in ttm_bo_move_to_lru_tail() 84 ttm_resource_move_to_lru_tail(bo->resource); in ttm_bo_move_to_lru_tail() 102 void ttm_bo_set_bulk_move(struct ttm_buffer_object *bo, in ttm_bo_set_bulk_move() argument 105 dma_resv_assert_held(bo->base.resv); in ttm_bo_set_bulk_move() 107 if (bo->bulk_move == bulk) in ttm_bo_set_bulk_move() 110 spin_lock(&bo->bdev->lru_lock); in ttm_bo_set_bulk_move() [all …]
|
| H A D | ttm_bo_util.c | 46 struct ttm_buffer_object *bo; member 146 int ttm_bo_move_memcpy(struct ttm_buffer_object *bo, in ttm_bo_move_memcpy() argument 150 struct ttm_device *bdev = bo->bdev; in ttm_bo_move_memcpy() 152 ttm_manager_type(bo->bdev, dst_mem->mem_type); in ttm_bo_move_memcpy() 153 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_memcpy() 154 struct ttm_resource *src_mem = bo->resource; in ttm_bo_move_memcpy() 170 ret = ttm_bo_populate(bo, ctx); in ttm_bo_move_memcpy() 177 dst_iter = ttm_kmap_iter_tt_init(&_dst_iter.tt, bo->ttm); in ttm_bo_move_memcpy() 183 src_iter = ttm_kmap_iter_tt_init(&_src_iter.tt, bo->ttm); in ttm_bo_move_memcpy() 195 ttm_bo_move_sync_cleanup(bo, dst_mem); in ttm_bo_move_memcpy() [all …]
|
| H A D | ttm_bo_vm.c | 43 static vm_fault_t ttm_bo_vm_fault_idle(struct ttm_buffer_object *bo, in ttm_bo_vm_fault_idle() argument 51 if (dma_resv_test_signaled(bo->base.resv, DMA_RESV_USAGE_KERNEL)) in ttm_bo_vm_fault_idle() 63 drm_gem_object_get(&bo->base); in ttm_bo_vm_fault_idle() 65 (void)dma_resv_wait_timeout(bo->base.resv, in ttm_bo_vm_fault_idle() 68 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_fault_idle() 69 drm_gem_object_put(&bo->base); in ttm_bo_vm_fault_idle() 76 err = dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_KERNEL, true, in ttm_bo_vm_fault_idle() 86 static unsigned long ttm_bo_io_mem_pfn(struct ttm_buffer_object *bo, in ttm_bo_io_mem_pfn() argument 89 struct ttm_device *bdev = bo->bdev; in ttm_bo_io_mem_pfn() 92 return bdev->funcs->io_mem_pfn(bo, page_offset); in ttm_bo_io_mem_pfn() [all …]
|
| H A D | ttm_resource.c | 170 lockdep_assert_held(&pos->first->bo->bdev->lru_lock); in ttm_lru_bulk_move_tail() 171 dma_resv_assert_held(pos->first->bo->base.resv); in ttm_lru_bulk_move_tail() 172 dma_resv_assert_held(pos->last->bo->base.resv); in ttm_lru_bulk_move_tail() 174 man = ttm_manager_type(pos->first->bo->bdev, i); in ttm_lru_bulk_move_tail() 186 return &bulk->pos[res->mem_type][res->bo->priority]; in ttm_lru_bulk_move_pos() 235 WARN_ON(pos->first->bo->base.resv != res->bo->base.resv); in ttm_lru_bulk_move_add() 259 static bool ttm_resource_is_swapped(struct ttm_resource *res, struct ttm_buffer_object *bo) in ttm_resource_is_swapped() argument 267 if (bo->resource != res || !bo->ttm) in ttm_resource_is_swapped() 270 dma_resv_assert_held(bo->base.resv); in ttm_resource_is_swapped() 271 return ttm_tt_is_swapped(bo->ttm); in ttm_resource_is_swapped() [all …]
|
| /linux/drivers/gpu/drm/qxl/ |
| H A D | qxl_object.c | 34 struct qxl_bo *bo; in qxl_ttm_bo_destroy() local 37 bo = to_qxl_bo(tbo); in qxl_ttm_bo_destroy() 38 qdev = to_qxl(bo->tbo.base.dev); in qxl_ttm_bo_destroy() 40 qxl_surface_evict(qdev, bo, false); in qxl_ttm_bo_destroy() 41 WARN_ON_ONCE(bo->map_count > 0); in qxl_ttm_bo_destroy() 43 list_del_init(&bo->list); in qxl_ttm_bo_destroy() 45 drm_gem_object_release(&bo->tbo.base); in qxl_ttm_bo_destroy() 46 kfree(bo); in qxl_ttm_bo_destroy() 49 bool qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo) in qxl_ttm_bo_is_qxl_bo() argument 51 if (bo->destroy == &qxl_ttm_bo_destroy) in qxl_ttm_bo_is_qxl_bo() [all …]
|
| H A D | qxl_object.h | 30 static inline int qxl_bo_reserve(struct qxl_bo *bo) in qxl_bo_reserve() argument 34 r = ttm_bo_reserve(&bo->tbo, true, false, NULL); in qxl_bo_reserve() 37 struct drm_device *ddev = bo->tbo.base.dev; in qxl_bo_reserve() 39 dev_err(ddev->dev, "%p reserve failed\n", bo); in qxl_bo_reserve() 46 static inline void qxl_bo_unreserve(struct qxl_bo *bo) in qxl_bo_unreserve() argument 48 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve() 51 static inline unsigned long qxl_bo_size(struct qxl_bo *bo) in qxl_bo_size() argument 53 return bo->tbo.base.size; in qxl_bo_size() 62 int qxl_bo_pin_and_vmap(struct qxl_bo *bo, struct iosys_map *map); 63 int qxl_bo_vmap_locked(struct qxl_bo *bo, struct iosys_map *map); [all …]
|
| /linux/drivers/gpu/drm/radeon/ |
| H A D | radeon_object.c | 45 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo); 54 struct radeon_bo *bo; in radeon_ttm_bo_destroy() local 56 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy() 58 mutex_lock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy() 59 list_del_init(&bo->list); in radeon_ttm_bo_destroy() 60 mutex_unlock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy() 61 radeon_bo_clear_surface_reg(bo); in radeon_ttm_bo_destroy() 62 WARN_ON_ONCE(!list_empty(&bo->va)); in radeon_ttm_bo_destroy() 63 if (bo->tbo.base.import_attach) in radeon_ttm_bo_destroy() 64 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in radeon_ttm_bo_destroy() [all …]
|
| H A D | radeon_object.h | 64 static inline int radeon_bo_reserve(struct radeon_bo *bo, bool no_intr) in radeon_bo_reserve() argument 68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve() 71 dev_err(bo->rdev->dev, "%p reserve failed\n", bo); in radeon_bo_reserve() 77 static inline void radeon_bo_unreserve(struct radeon_bo *bo) in radeon_bo_unreserve() argument 79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve() 91 static inline u64 radeon_bo_gpu_offset(struct radeon_bo *bo) in radeon_bo_gpu_offset() argument 96 rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_bo_gpu_offset() 98 switch (bo->tbo.resource->mem_type) { in radeon_bo_gpu_offset() 107 return (bo->tbo.resource->start << PAGE_SHIFT) + start; in radeon_bo_gpu_offset() 110 static inline unsigned long radeon_bo_size(struct radeon_bo *bo) in radeon_bo_size() argument [all …]
|
| H A D | radeon_mn.c | 53 struct radeon_bo *bo = container_of(mn, struct radeon_bo, notifier); in radeon_mn_invalidate() local 57 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm)) in radeon_mn_invalidate() 63 r = radeon_bo_reserve(bo, true); in radeon_mn_invalidate() 69 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_BOOKKEEP, in radeon_mn_invalidate() 74 radeon_ttm_placement_from_domain(bo, RADEON_GEM_DOMAIN_CPU); in radeon_mn_invalidate() 75 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_mn_invalidate() 79 radeon_bo_unreserve(bo); in radeon_mn_invalidate() 96 int radeon_mn_register(struct radeon_bo *bo, unsigned long addr) in radeon_mn_register() argument 100 ret = mmu_interval_notifier_insert(&bo->notifier, current->mm, addr, in radeon_mn_register() 101 radeon_bo_size(bo), &radeon_mn_ops); in radeon_mn_register() [all …]
|
| H A D | radeon_prime.c | 39 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_get_sg_table() local 41 return drm_prime_pages_to_sg(obj->dev, bo->tbo.ttm->pages, in radeon_gem_prime_get_sg_table() 42 bo->tbo.ttm->num_pages); in radeon_gem_prime_get_sg_table() 51 struct radeon_bo *bo; in radeon_gem_prime_import_sg_table() local 56 RADEON_GEM_DOMAIN_GTT, 0, sg, resv, &bo); in radeon_gem_prime_import_sg_table() 61 bo->tbo.base.funcs = &radeon_gem_object_funcs; in radeon_gem_prime_import_sg_table() 64 list_add_tail(&bo->list, &rdev->gem.objects); in radeon_gem_prime_import_sg_table() 67 bo->prime_shared_count = 1; in radeon_gem_prime_import_sg_table() 68 return &bo->tbo.base; in radeon_gem_prime_import_sg_table() 73 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_pin() local [all …]
|
| /linux/drivers/gpu/drm/panthor/ |
| H A D | panthor_gem.c | 20 static void panthor_gem_debugfs_bo_init(struct panthor_gem_object *bo) in panthor_gem_debugfs_bo_init() argument 22 INIT_LIST_HEAD(&bo->debugfs.node); in panthor_gem_debugfs_bo_init() 25 static void panthor_gem_debugfs_bo_add(struct panthor_gem_object *bo) in panthor_gem_debugfs_bo_add() argument 27 struct panthor_device *ptdev = container_of(bo->base.base.dev, in panthor_gem_debugfs_bo_add() 30 bo->debugfs.creator.tgid = current->group_leader->pid; in panthor_gem_debugfs_bo_add() 31 get_task_comm(bo->debugfs.creator.process_name, current->group_leader); in panthor_gem_debugfs_bo_add() 34 list_add_tail(&bo->debugfs.node, &ptdev->gems.node); in panthor_gem_debugfs_bo_add() 38 static void panthor_gem_debugfs_bo_rm(struct panthor_gem_object *bo) in panthor_gem_debugfs_bo_rm() argument 40 struct panthor_device *ptdev = container_of(bo->base.base.dev, in panthor_gem_debugfs_bo_rm() 43 if (list_empty(&bo->debugfs.node)) in panthor_gem_debugfs_bo_rm() [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_dma_buf.c | 61 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in dma_buf_attach_adev() local 63 return amdgpu_ttm_adev(bo->tbo.bdev); in dma_buf_attach_adev() 82 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_attach() local 83 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_attach() 95 bo->flags & AMDGPU_GEM_CREATE_GFX12_DCC) in amdgpu_dma_buf_attach() 107 bo->flags & AMDGPU_GEM_CREATE_GFX12_DCC) in amdgpu_dma_buf_attach() 110 if (!amdgpu_dmabuf_is_xgmi_accessible(attach_adev, bo) && in amdgpu_dma_buf_attach() 114 r = dma_resv_lock(bo->tbo.base.resv, NULL); in amdgpu_dma_buf_attach() 118 amdgpu_vm_bo_update_shared(bo); in amdgpu_dma_buf_attach() 120 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_dma_buf_attach() [all …]
|
| H A D | amdgpu_amdkfd_gpuvm.c | 309 void amdgpu_amdkfd_release_notify(struct amdgpu_bo *bo) in amdgpu_amdkfd_release_notify() argument 311 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_amdkfd_release_notify() 312 u32 alloc_flags = bo->kfd_bo->alloc_flags; in amdgpu_amdkfd_release_notify() 313 u64 size = amdgpu_bo_size(bo); in amdgpu_amdkfd_release_notify() 316 bo->xcp_id); in amdgpu_amdkfd_release_notify() 318 kfree(bo->kfd_bo); in amdgpu_amdkfd_release_notify() 338 ret = amdgpu_bo_reserve(mem->bo, false); in create_dmamap_sg_bo() 343 flags |= mem->bo->flags & (AMDGPU_GEM_CREATE_COHERENT | in create_dmamap_sg_bo() 346 ret = amdgpu_gem_object_create(adev, mem->bo->tbo.base.size, 1, in create_dmamap_sg_bo() 348 ttm_bo_type_sg, mem->bo->tbo.base.resv, &gem_obj, 0); in create_dmamap_sg_bo() [all …]
|
| H A D | amdgpu_vm.c | 138 dma_resv_assert_held(vm->root.bo->tbo.base.resv); in amdgpu_vm_assert_locked() 152 struct amdgpu_bo *bo = vm_bo->bo; in amdgpu_vm_bo_evicted() local 157 if (bo->tbo.type == ttm_bo_type_kernel) in amdgpu_vm_bo_evicted() 238 if (vm_bo->bo->parent) { in amdgpu_vm_bo_relocated() 282 struct amdgpu_bo *bo = vm_bo->bo; in amdgpu_vm_bo_reset_state_machine() local 285 if (!bo || bo->tbo.type != ttm_bo_type_kernel) in amdgpu_vm_bo_reset_state_machine() 287 else if (bo->parent) in amdgpu_vm_bo_reset_state_machine() 304 struct amdgpu_bo *bo = base->bo; in amdgpu_vm_update_shared() local 305 uint64_t size = amdgpu_bo_size(bo); in amdgpu_vm_update_shared() 306 uint32_t bo_memtype = amdgpu_bo_mem_stats_placement(bo); in amdgpu_vm_update_shared() [all …]
|
| /linux/drivers/gpu/drm/xe/tests/ |
| H A D | xe_bo.c | 24 static int ccs_test_migrate(struct xe_tile *tile, struct xe_bo *bo, in ccs_test_migrate() argument 38 ret = xe_bo_validate(bo, NULL, false, exec); in ccs_test_migrate() 46 fence = xe_migrate_clear(tile->migrate, bo, bo->ttm.resource, in ccs_test_migrate() 63 ret = xe_bo_evict(bo, exec); in ccs_test_migrate() 70 timeout = dma_resv_wait_timeout(bo->ttm.base.resv, in ccs_test_migrate() 84 ttm = bo->ttm.ttm; in ccs_test_migrate() 90 ccs_page = xe_bo_ccs_pages_start(bo) >> PAGE_SHIFT; in ccs_test_migrate() 109 offset = xe_device_ccs_bytes(tile_to_xe(tile), xe_bo_size(bo)); in ccs_test_migrate() 129 struct xe_bo *bo; in ccs_test_run_tile() local 142 bo = xe_bo_create_user(xe, NULL, SZ_1M, DRM_XE_GEM_CPU_CACHING_WC, in ccs_test_run_tile() [all …]
|
| /linux/drivers/gpu/drm/ttm/tests/ |
| H A D | ttm_tt_test.c | 42 struct ttm_buffer_object *bo; in ttm_tt_init_basic() local 53 bo = ttm_bo_kunit_init(test, test->priv, params->size, NULL); in ttm_tt_init_basic() 55 err = ttm_tt_init(tt, bo, page_flags, caching, extra_pages); in ttm_tt_init_basic() 69 struct ttm_buffer_object *bo; in ttm_tt_init_misaligned() local 79 bo = ttm_bo_kunit_init(test, test->priv, size, NULL); in ttm_tt_init_misaligned() 82 bo->base.size += 1; in ttm_tt_init_misaligned() 84 err = ttm_tt_init(tt, bo, 0, caching, 0); in ttm_tt_init_misaligned() 92 struct ttm_buffer_object *bo; in ttm_tt_fini_basic() local 100 bo = ttm_bo_kunit_init(test, test->priv, BO_SIZE, NULL); in ttm_tt_fini_basic() 102 err = ttm_tt_init(tt, bo, 0, caching, 0); in ttm_tt_fini_basic() [all …]
|
| H A D | ttm_bo_validate_test.c | 112 struct ttm_buffer_object *bo; in ttm_bo_init_reserved_sys_man() local 116 bo = kunit_kzalloc(test, sizeof(*bo), GFP_KERNEL); in ttm_bo_init_reserved_sys_man() 117 KUNIT_ASSERT_NOT_NULL(test, bo); in ttm_bo_init_reserved_sys_man() 122 drm_gem_private_object_init(priv->drm, &bo->base, size); in ttm_bo_init_reserved_sys_man() 124 err = ttm_bo_init_reserved(priv->ttm_dev, bo, bo_type, placement, in ttm_bo_init_reserved_sys_man() 127 dma_resv_unlock(bo->base.resv); in ttm_bo_init_reserved_sys_man() 130 KUNIT_EXPECT_EQ(test, kref_read(&bo->kref), 1); in ttm_bo_init_reserved_sys_man() 131 KUNIT_EXPECT_PTR_EQ(test, bo->bdev, priv->ttm_dev); in ttm_bo_init_reserved_sys_man() 132 KUNIT_EXPECT_EQ(test, bo->type, bo_type); in ttm_bo_init_reserved_sys_man() 133 KUNIT_EXPECT_EQ(test, bo->page_alignment, PAGE_SIZE); in ttm_bo_init_reserved_sys_man() [all …]
|
| H A D | ttm_resource_test.c | 20 struct ttm_buffer_object *bo; member 57 priv->bo = ttm_bo_kunit_init(test, priv->devs, size, NULL); in ttm_init_test_mocks() 112 struct ttm_buffer_object *bo; in ttm_resource_init_basic() local 118 bo = priv->bo; in ttm_resource_init_basic() 130 KUNIT_ASSERT_TRUE(test, list_empty(&man->lru[bo->priority])); in ttm_resource_init_basic() 132 ttm_resource_init(bo, place, res); in ttm_resource_init_basic() 138 KUNIT_ASSERT_PTR_EQ(test, res->bo, bo); in ttm_resource_init_basic() 146 KUNIT_ASSERT_TRUE(test, list_is_singular(&man->lru[bo->priority])); in ttm_resource_init_basic() 155 struct ttm_buffer_object *bo; in ttm_resource_init_pinned() local 160 bo = priv->bo; in ttm_resource_init_pinned() [all …]
|
| /linux/drivers/gpu/drm/virtio/ |
| H A D | virtgpu_prime.c | 36 struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj); in virtgpu_virtio_get_uuid() local 39 wait_event(vgdev->resp_wq, bo->uuid_state != STATE_INITIALIZING); in virtgpu_virtio_get_uuid() 40 if (bo->uuid_state != STATE_OK) in virtgpu_virtio_get_uuid() 43 uuid_copy(uuid, &bo->uuid); in virtgpu_virtio_get_uuid() 53 struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj); in virtgpu_gem_map_dma_buf() local 55 if (virtio_gpu_is_vram(bo)) in virtgpu_gem_map_dma_buf() 56 return virtio_gpu_vram_map_dma_buf(bo, attach->dev, dir); in virtgpu_gem_map_dma_buf() 66 struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj); in virtgpu_gem_unmap_dma_buf() local 68 if (virtio_gpu_is_vram(bo)) { in virtgpu_gem_unmap_dma_buf() 92 struct virtio_gpu_object *bo) in virtio_gpu_resource_assign_uuid() argument [all …]
|
| /linux/drivers/gpu/drm/xe/ |
| H A D | xe_drm_client.c | 133 struct xe_bo *bo) in xe_drm_client_add_bo() argument 135 XE_WARN_ON(bo->client); in xe_drm_client_add_bo() 136 XE_WARN_ON(!list_empty(&bo->client_link)); in xe_drm_client_add_bo() 138 bo->client = xe_drm_client_get(client); in xe_drm_client_add_bo() 140 list_add_tail(&bo->client_link, &client->bos_list); in xe_drm_client_add_bo() 153 void xe_drm_client_remove_bo(struct xe_bo *bo) in xe_drm_client_remove_bo() argument 155 struct xe_device *xe = ttm_to_xe_device(bo->ttm.bdev); in xe_drm_client_remove_bo() 156 struct xe_drm_client *client = bo->client; in xe_drm_client_remove_bo() 158 xe_assert(xe, !kref_read(&bo->ttm.base.refcount)); in xe_drm_client_remove_bo() 161 list_del_init(&bo->client_link); in xe_drm_client_remove_bo() [all …]
|
| /linux/drivers/accel/qaic/ |
| H A D | qaic_data.c | 145 static inline bool bo_queued(struct qaic_bo *bo) in bo_queued() argument 147 return !list_empty(&bo->xfer_list); in bo_queued() 164 slice->bo->total_slice_nents -= slice->nents; in free_slice() 166 drm_gem_object_put(&slice->bo->base); in free_slice() 391 static int qaic_map_one_slice(struct qaic_device *qdev, struct qaic_bo *bo, in qaic_map_one_slice() argument 398 ret = clone_range_of_sgt_for_slice(qdev, &sgt, bo->sgt, slice_ent->size, slice_ent->offset); in qaic_map_one_slice() 417 slice->dir = bo->dir; in qaic_map_one_slice() 418 slice->bo = bo; in qaic_map_one_slice() 426 bo->total_slice_nents += sgt->nents; in qaic_map_one_slice() 428 drm_gem_object_get(&bo->base); in qaic_map_one_slice() [all …]
|
| /linux/drivers/gpu/drm/nouveau/ |
| H A D | nouveau_bo.c | 137 nouveau_bo_del_ttm(struct ttm_buffer_object *bo) in nouveau_bo_del_ttm() argument 139 struct nouveau_drm *drm = nouveau_bdev(bo->bdev); in nouveau_bo_del_ttm() 141 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_del_ttm() 143 WARN_ON(nvbo->bo.pin_count > 0); in nouveau_bo_del_ttm() 144 nouveau_bo_del_io_reserve_lru(bo); in nouveau_bo_del_ttm() 147 if (bo->base.import_attach) in nouveau_bo_del_ttm() 148 drm_prime_gem_destroy(&bo->base, bo->sg); in nouveau_bo_del_ttm() 154 if (bo->base.dev) { in nouveau_bo_del_ttm() 161 drm_gem_object_release(&bo->base); in nouveau_bo_del_ttm() 163 dma_resv_fini(&bo->base._resv); in nouveau_bo_del_ttm() [all …]
|
| /linux/drivers/gpu/drm/tegra/ |
| H A D | fbdev.c | 28 struct tegra_bo *bo; in tegra_fb_mmap() local 31 bo = tegra_fb_get_plane(helper->fb, 0); in tegra_fb_mmap() 33 err = drm_gem_mmap_obj(&bo->gem, bo->gem.size, vma); in tegra_fb_mmap() 37 return __tegra_gem_mmap(&bo->gem, vma); in tegra_fb_mmap() 44 struct tegra_bo *bo = tegra_fb_get_plane(fb, 0); in tegra_fbdev_fb_destroy() local 49 if (bo->pages) { in tegra_fbdev_fb_destroy() 50 vunmap(bo->vaddr); in tegra_fbdev_fb_destroy() 51 bo->vaddr = NULL; in tegra_fbdev_fb_destroy() 80 struct tegra_bo *bo; in tegra_fbdev_driver_fbdev_probe() local 96 bo = tegra_bo_create(drm, size, 0); in tegra_fbdev_driver_fbdev_probe() [all …]
|
| H A D | submit.c | 44 struct gather_bo *bo = container_of(host_bo, struct gather_bo, base); in gather_bo_get() local 46 kref_get(&bo->ref); in gather_bo_get() 53 struct gather_bo *bo = container_of(ref, struct gather_bo, ref); in gather_bo_release() local 55 dma_free_attrs(bo->dev, bo->gather_data_words * 4, bo->gather_data, bo->gather_data_dma, in gather_bo_release() 57 kfree(bo); in gather_bo_release() 62 struct gather_bo *bo = container_of(host_bo, struct gather_bo, base); in gather_bo_put() local 64 kref_put(&bo->ref, gather_bo_release); in gather_bo_put() 68 gather_bo_pin(struct device *dev, struct host1x_bo *bo, enum dma_data_direction direction) in gather_bo_pin() argument 70 struct gather_bo *gather = container_of(bo, struct gather_bo, base); in gather_bo_pin() 79 map->bo = host1x_bo_get(bo); in gather_bo_pin() [all …]
|
| /linux/drivers/gpu/drm/vmwgfx/ |
| H A D | vmwgfx_ttm_buffer.c | 264 const struct vmw_sg_table *vmw_bo_sg_table(struct ttm_buffer_object *bo) in vmw_bo_sg_table() argument 267 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm); in vmw_bo_sg_table() 400 static struct ttm_tt *vmw_ttm_tt_create(struct ttm_buffer_object *bo, in vmw_ttm_tt_create() argument 405 bool external = bo->type == ttm_bo_type_sg; in vmw_ttm_tt_create() 411 vmw_be->dev_priv = vmw_priv_from_ttm(bo->bdev); in vmw_ttm_tt_create() 418 ret = ttm_sg_tt_init(&vmw_be->dma_ttm, bo, page_flags, in vmw_ttm_tt_create() 421 ret = ttm_tt_init(&vmw_be->dma_ttm, bo, page_flags, in vmw_ttm_tt_create() 432 static void vmw_evict_flags(struct ttm_buffer_object *bo, in vmw_evict_flags() argument 471 static void vmw_move_notify(struct ttm_buffer_object *bo, in vmw_move_notify() argument 475 vmw_bo_move_notify(bo, new_mem); in vmw_move_notify() [all …]
|