Home
last modified time | relevance | path

Searched refs:bo (Results 1 – 25 of 328) sorted by relevance

12345678910>>...14

/linux/drivers/staging/media/atomisp/pci/hmm/
H A Dhmm_bo.c47 static int __bo_init(struct hmm_bo_device *bdev, struct hmm_buffer_object *bo, in __bo_init() argument
59 memset(bo, 0, sizeof(*bo)); in __bo_init()
60 mutex_init(&bo->mutex); in __bo_init()
63 INIT_LIST_HEAD(&bo->list); in __bo_init()
65 bo->bdev = bdev; in __bo_init()
66 bo->vmap_addr = NULL; in __bo_init()
67 bo->status = HMM_BO_FREE; in __bo_init()
68 bo->start = bdev->start; in __bo_init()
69 bo->pgnr = pgnr; in __bo_init()
70 bo->end = bo->start + pgnr_to_size(pgnr); in __bo_init()
[all …]
/linux/drivers/accel/ivpu/
H A Divpu_gem.c25 static inline void ivpu_dbg_bo(struct ivpu_device *vdev, struct ivpu_bo *bo, const char *action) in ivpu_dbg_bo() argument
29 action, bo, bo->vpu_addr, ivpu_bo_size(bo), bo->ctx ? bo->ctx->id : 0, in ivpu_dbg_bo()
30 (bool)bo->base.pages, (bool)bo->base.sgt, bo->mmu_mapped, bo->base.map_wc, in ivpu_dbg_bo()
31 (bool)bo->base.base.import_attach); in ivpu_dbg_bo()
41 int __must_check ivpu_bo_pin(struct ivpu_bo *bo) in ivpu_bo_pin() argument
43 struct ivpu_device *vdev = ivpu_bo_to_vdev(bo); in ivpu_bo_pin()
46 mutex_lock(&bo->lock); in ivpu_bo_pin()
48 ivpu_dbg_bo(vdev, bo, "pin"); in ivpu_bo_pin()
49 drm_WARN_ON(&vdev->drm, !bo->ctx); in ivpu_bo_pin()
51 if (!bo->mmu_mapped) { in ivpu_bo_pin()
[all …]
H A Divpu_gem.h27 int ivpu_bo_pin(struct ivpu_bo *bo);
34 void ivpu_bo_free(struct ivpu_bo *bo);
48 static inline void *ivpu_bo_vaddr(struct ivpu_bo *bo) in ivpu_bo_vaddr() argument
50 return bo->base.vaddr; in ivpu_bo_vaddr()
53 static inline size_t ivpu_bo_size(struct ivpu_bo *bo) in ivpu_bo_size() argument
55 return bo->base.base.size; in ivpu_bo_size()
58 static inline u32 ivpu_bo_cache_mode(struct ivpu_bo *bo) in ivpu_bo_cache_mode() argument
60 return bo->flags & DRM_IVPU_BO_CACHE_MASK; in ivpu_bo_cache_mode()
63 static inline struct ivpu_device *ivpu_bo_to_vdev(struct ivpu_bo *bo) in ivpu_bo_to_vdev() argument
65 return to_ivpu_device(bo->base.base.dev); in ivpu_bo_to_vdev()
[all …]
/linux/drivers/gpu/drm/qxl/
H A Dqxl_object.c34 struct qxl_bo *bo; in qxl_ttm_bo_destroy() local
37 bo = to_qxl_bo(tbo); in qxl_ttm_bo_destroy()
38 qdev = to_qxl(bo->tbo.base.dev); in qxl_ttm_bo_destroy()
40 qxl_surface_evict(qdev, bo, false); in qxl_ttm_bo_destroy()
41 WARN_ON_ONCE(bo->map_count > 0); in qxl_ttm_bo_destroy()
43 list_del_init(&bo->list); in qxl_ttm_bo_destroy()
45 drm_gem_object_release(&bo->tbo.base); in qxl_ttm_bo_destroy()
46 kfree(bo); in qxl_ttm_bo_destroy()
49 bool qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo) in qxl_ttm_bo_is_qxl_bo() argument
51 if (bo->destroy == &qxl_ttm_bo_destroy) in qxl_ttm_bo_is_qxl_bo()
[all …]
H A Dqxl_object.h30 static inline int qxl_bo_reserve(struct qxl_bo *bo) in qxl_bo_reserve() argument
34 r = ttm_bo_reserve(&bo->tbo, true, false, NULL); in qxl_bo_reserve()
37 struct drm_device *ddev = bo->tbo.base.dev; in qxl_bo_reserve()
39 dev_err(ddev->dev, "%p reserve failed\n", bo); in qxl_bo_reserve()
46 static inline void qxl_bo_unreserve(struct qxl_bo *bo) in qxl_bo_unreserve() argument
48 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve()
51 static inline unsigned long qxl_bo_size(struct qxl_bo *bo) in qxl_bo_size() argument
53 return bo->tbo.base.size; in qxl_bo_size()
62 int qxl_bo_pin_and_vmap(struct qxl_bo *bo, struct iosys_map *map);
63 int qxl_bo_vmap_locked(struct qxl_bo *bo, struct iosys_map *map);
[all …]
/linux/drivers/gpu/drm/radeon/
H A Dradeon_object.c45 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo);
54 struct radeon_bo *bo; in radeon_ttm_bo_destroy() local
56 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy()
58 mutex_lock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
59 list_del_init(&bo->list); in radeon_ttm_bo_destroy()
60 mutex_unlock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
61 radeon_bo_clear_surface_reg(bo); in radeon_ttm_bo_destroy()
62 WARN_ON_ONCE(!list_empty(&bo->va)); in radeon_ttm_bo_destroy()
63 if (bo->tbo.base.import_attach) in radeon_ttm_bo_destroy()
64 drm_prime_gem_destroy(&bo in radeon_ttm_bo_destroy()
69 radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object * bo) radeon_ttm_bo_is_radeon_bo() argument
134 struct radeon_bo *bo; radeon_bo_create() local
217 radeon_bo_kmap(struct radeon_bo * bo,void ** ptr) radeon_bo_kmap() argument
245 radeon_bo_kunmap(struct radeon_bo * bo) radeon_bo_kunmap() argument
254 radeon_bo_ref(struct radeon_bo * bo) radeon_bo_ref() argument
263 radeon_bo_unref(struct radeon_bo ** bo) radeon_bo_unref() argument
274 radeon_bo_pin_restricted(struct radeon_bo * bo,u32 domain,u64 max_offset,u64 * gpu_addr) radeon_bo_pin_restricted() argument
333 radeon_bo_pin(struct radeon_bo * bo,u32 domain,u64 * gpu_addr) radeon_bo_pin() argument
338 radeon_bo_unpin(struct radeon_bo * bo) radeon_bo_unpin() argument
370 struct radeon_bo *bo, *n; radeon_bo_force_delete() local
486 struct radeon_bo *bo = lobj->robj; radeon_bo_list_validate() local
540 radeon_bo_get_surface_reg(struct radeon_bo * bo) radeon_bo_get_surface_reg() argument
594 radeon_bo_clear_surface_reg(struct radeon_bo * bo) radeon_bo_clear_surface_reg() argument
609 radeon_bo_set_tiling_flags(struct radeon_bo * bo,uint32_t tiling_flags,uint32_t pitch) radeon_bo_set_tiling_flags() argument
669 radeon_bo_get_tiling_flags(struct radeon_bo * bo,uint32_t * tiling_flags,uint32_t * pitch) radeon_bo_get_tiling_flags() argument
681 radeon_bo_check_tiling(struct radeon_bo * bo,bool has_moved,bool force_drop) radeon_bo_check_tiling() argument
710 radeon_bo_move_notify(struct ttm_buffer_object * bo) radeon_bo_move_notify() argument
722 radeon_bo_fault_reserve_notify(struct ttm_buffer_object * bo) radeon_bo_fault_reserve_notify() argument
784 radeon_bo_fence(struct radeon_bo * bo,struct radeon_fence * fence,bool shared) radeon_bo_fence() argument
[all...]
H A Dradeon_object.h64 static inline int radeon_bo_reserve(struct radeon_bo *bo, bool no_intr) in radeon_bo_reserve() argument
68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve()
71 dev_err(bo->rdev->dev, "%p reserve failed\n", bo); in radeon_bo_reserve()
77 static inline void radeon_bo_unreserve(struct radeon_bo *bo) in radeon_bo_unreserve() argument
79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve()
91 static inline u64 radeon_bo_gpu_offset(struct radeon_bo *bo) in radeon_bo_gpu_offset() argument
96 rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_bo_gpu_offset()
98 switch (bo->tbo.resource->mem_type) { in radeon_bo_gpu_offset()
107 return (bo->tbo.resource->start << PAGE_SHIFT) + start; in radeon_bo_gpu_offset()
110 static inline unsigned long radeon_bo_size(struct radeon_bo *bo) in radeon_bo_size() argument
[all …]
/linux/drivers/gpu/drm/tegra/
H A Dgem.c52 static void tegra_bo_put(struct host1x_bo *bo) in tegra_bo_put() argument
54 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_put()
59 static struct host1x_bo_mapping *tegra_bo_pin(struct device *dev, struct host1x_bo *bo, in tegra_bo_pin() argument
62 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_pin()
72 map->bo = host1x_bo_get(bo); in tegra_bo_pin()
173 host1x_bo_put(map->bo); in tegra_bo_unpin()
177 static void *tegra_bo_mmap(struct host1x_bo *bo) in tegra_bo_mmap() argument
179 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_mmap()
203 static void tegra_bo_munmap(struct host1x_bo *bo, void *addr) in tegra_bo_munmap() argument
205 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_munmap()
[all …]
/linux/drivers/gpu/drm/xe/
H A Dxe_bo.h69 void xe_bo_free(struct xe_bo *bo);
71 struct xe_bo *___xe_bo_create_locked(struct xe_device *xe, struct xe_bo *bo,
106 int xe_bo_placement_for_flags(struct xe_device *xe, struct xe_bo *bo,
109 static inline struct xe_bo *ttm_to_xe_bo(const struct ttm_buffer_object *bo) in ttm_to_xe_bo() argument
111 return container_of(bo, struct xe_bo, ttm); in ttm_to_xe_bo()
119 #define xe_bo_device(bo) ttm_to_xe_device((bo)->ttm.bdev) argument
121 static inline struct xe_bo *xe_bo_get(struct xe_bo *bo) in xe_bo_get() argument
123 if (bo) in xe_bo_get()
124 drm_gem_object_get(&bo->ttm.base); in xe_bo_get()
126 return bo; in xe_bo_get()
[all …]
H A Dxe_bo.c87 bool xe_bo_is_vram(struct xe_bo *bo) in xe_bo_is_vram() argument
89 return resource_is_vram(bo->ttm.resource) || in xe_bo_is_vram()
90 resource_is_stolen_vram(xe_bo_device(bo), bo->ttm.resource); in xe_bo_is_vram()
93 bool xe_bo_is_stolen(struct xe_bo *bo) in xe_bo_is_stolen() argument
95 return bo->ttm.resource->mem_type == XE_PL_STOLEN; in xe_bo_is_stolen()
107 bool xe_bo_has_single_placement(struct xe_bo *bo) in xe_bo_has_single_placement() argument
109 return bo->placement.num_placement == 1; in xe_bo_has_single_placement()
121 bool xe_bo_is_stolen_devmem(struct xe_bo *bo) in xe_bo_is_stolen_devmem() argument
123 return xe_bo_is_stolen(bo) && in xe_bo_is_stolen_devmem()
124 GRAPHICS_VERx100(xe_bo_device(bo)) >= 1270; in xe_bo_is_stolen_devmem()
[all …]
H A Dxe_bo_evict.c30 struct xe_bo *bo; in xe_bo_evict_all() local
56 bo = list_first_entry_or_null(&xe->pinned.external_vram, in xe_bo_evict_all()
57 typeof(*bo), pinned_link); in xe_bo_evict_all()
58 if (!bo) in xe_bo_evict_all()
60 xe_bo_get(bo); in xe_bo_evict_all()
61 list_move_tail(&bo->pinned_link, &still_in_list); in xe_bo_evict_all()
64 xe_bo_lock(bo, false); in xe_bo_evict_all()
65 ret = xe_bo_evict_pinned(bo); in xe_bo_evict_all()
66 xe_bo_unlock(bo); in xe_bo_evict_all()
67 xe_bo_put(bo); in xe_bo_evict_all()
[all …]
H A Dxe_dma_buf.c52 struct xe_bo *bo = gem_to_xe_bo(obj); in xe_dma_buf_pin() local
53 struct xe_device *xe = xe_bo_device(bo); in xe_dma_buf_pin()
61 if (xe_bo_is_pinned(bo) && bo->ttm.resource->placement != XE_PL_TT) { in xe_dma_buf_pin()
66 ret = xe_bo_migrate(bo, XE_PL_TT); in xe_dma_buf_pin()
75 ret = xe_bo_pin_external(bo); in xe_dma_buf_pin()
84 struct xe_bo *bo = gem_to_xe_bo(obj); in xe_dma_buf_unpin() local
86 xe_bo_unpin_external(bo); in xe_dma_buf_unpin()
94 struct xe_bo *bo = gem_to_xe_bo(obj); in xe_dma_buf_map() local
98 if (!attach->peer2peer && !xe_bo_can_migrate(bo, XE_PL_TT)) in xe_dma_buf_map()
101 if (!xe_bo_is_pinned(bo)) { in xe_dma_buf_map()
[all …]
H A Dxe_drm_client.c133 struct xe_bo *bo) in xe_drm_client_add_bo() argument
135 XE_WARN_ON(bo->client); in xe_drm_client_add_bo()
136 XE_WARN_ON(!list_empty(&bo->client_link)); in xe_drm_client_add_bo()
139 bo->client = xe_drm_client_get(client); in xe_drm_client_add_bo()
140 list_add_tail(&bo->client_link, &client->bos_list); in xe_drm_client_add_bo()
153 void xe_drm_client_remove_bo(struct xe_bo *bo) in xe_drm_client_remove_bo() argument
155 struct xe_device *xe = ttm_to_xe_device(bo->ttm.bdev); in xe_drm_client_remove_bo()
156 struct xe_drm_client *client = bo->client; in xe_drm_client_remove_bo()
158 xe_assert(xe, !kref_read(&bo->ttm.base.refcount)); in xe_drm_client_remove_bo()
161 list_del_init(&bo->client_link); in xe_drm_client_remove_bo()
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_object.c59 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_destroy() local
61 amdgpu_bo_kunmap(bo); in amdgpu_bo_destroy()
63 if (bo->tbo.base.import_attach) in amdgpu_bo_destroy()
64 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy()
65 drm_gem_object_release(&bo->tbo.base); in amdgpu_bo_destroy()
66 amdgpu_bo_unref(&bo->parent); in amdgpu_bo_destroy()
67 kvfree(bo); in amdgpu_bo_destroy()
72 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_user_destroy() local
75 ubo = to_amdgpu_bo_user(bo); in amdgpu_bo_user_destroy()
90 bool amdgpu_bo_is_amdgpu_bo(struct ttm_buffer_object *bo) in amdgpu_bo_is_amdgpu_bo() argument
[all …]
H A Damdgpu_dma_buf.c57 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_attach() local
58 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_attach()
76 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_pin() local
79 return amdgpu_bo_pin(bo, AMDGPU_GEM_DOMAIN_GTT); in amdgpu_dma_buf_pin()
92 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_unpin() local
94 amdgpu_bo_unpin(bo); in amdgpu_dma_buf_unpin()
115 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_map() local
116 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_map()
120 if (!bo->tbo.pin_count) { in amdgpu_dma_buf_map()
125 if (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM && in amdgpu_dma_buf_map()
[all …]
H A Damdgpu_object.h45 #define to_amdgpu_bo_user(abo) container_of((abo), struct amdgpu_bo_user, bo)
46 #define to_amdgpu_bo_vm(abo) container_of((abo), struct amdgpu_bo_vm, bo)
58 void (*destroy)(struct ttm_buffer_object *bo);
129 struct amdgpu_bo bo; member
138 struct amdgpu_bo bo; member
212 static inline int amdgpu_bo_reserve(struct amdgpu_bo *bo, bool no_intr) in amdgpu_bo_reserve() argument
214 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_reserve()
217 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in amdgpu_bo_reserve()
220 dev_err(adev->dev, "%p reserve failed\n", bo); in amdgpu_bo_reserve()
226 static inline void amdgpu_bo_unreserve(struct amdgpu_bo *bo) in amdgpu_bo_unreserve() argument
[all …]
H A Damdgpu_amdkfd_gpuvm.c293 void amdgpu_amdkfd_release_notify(struct amdgpu_bo *bo) in amdgpu_amdkfd_release_notify() argument
295 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_amdkfd_release_notify()
296 u32 alloc_flags = bo->kfd_bo->alloc_flags; in amdgpu_amdkfd_release_notify()
297 u64 size = amdgpu_bo_size(bo); in amdgpu_amdkfd_release_notify()
300 bo->xcp_id); in amdgpu_amdkfd_release_notify()
302 kfree(bo->kfd_bo); in amdgpu_amdkfd_release_notify()
322 ret = amdgpu_bo_reserve(mem->bo, false); in create_dmamap_sg_bo()
327 flags |= mem->bo->flags & (AMDGPU_GEM_CREATE_COHERENT | in create_dmamap_sg_bo()
330 ret = amdgpu_gem_object_create(adev, mem->bo->tbo.base.size, 1, in create_dmamap_sg_bo()
332 ttm_bo_type_sg, mem->bo->tbo.base.resv, &gem_obj, 0); in create_dmamap_sg_bo()
[all …]
/linux/drivers/gpu/drm/vc4/
H A Dvc4_bo.c134 struct vc4_bo *bo = to_vc4_bo(gem_obj); in vc4_bo_set_label() local
144 vc4->bo_labels[bo->label].num_allocated--; in vc4_bo_set_label()
145 vc4->bo_labels[bo->label].size_allocated -= gem_obj->size; in vc4_bo_set_label()
147 if (vc4->bo_labels[bo->label].num_allocated == 0 && in vc4_bo_set_label()
148 is_user_label(bo->label)) { in vc4_bo_set_label()
154 kfree(vc4->bo_labels[bo->label].name); in vc4_bo_set_label()
155 vc4->bo_labels[bo->label].name = NULL; in vc4_bo_set_label()
158 bo->label = label; in vc4_bo_set_label()
166 static void vc4_bo_destroy(struct vc4_bo *bo) in vc4_bo_destroy() argument
168 struct drm_gem_object *obj = &bo in vc4_bo_destroy()
186 vc4_bo_remove_from_cache(struct vc4_bo * bo) vc4_bo_remove_from_cache() argument
242 struct vc4_bo *bo = list_last_entry(&vc4->bo_cache.time_list, vc4_bo_cache_purge() local
250 vc4_bo_add_to_purgeable_pool(struct vc4_bo * bo) vc4_bo_add_to_purgeable_pool() argument
264 vc4_bo_remove_from_purgeable_pool_locked(struct vc4_bo * bo) vc4_bo_remove_from_purgeable_pool_locked() argument
288 vc4_bo_remove_from_purgeable_pool(struct vc4_bo * bo) vc4_bo_remove_from_purgeable_pool() argument
299 struct vc4_bo *bo = to_vc4_bo(obj); vc4_bo_purge() local
318 struct vc4_bo *bo = list_first_entry(&vc4->purgeable.list, vc4_bo_userspace_cache_purge() local
365 struct vc4_bo *bo = NULL; vc4_bo_get_from_cache() local
397 struct vc4_bo *bo; vc4_create_object() local
428 struct vc4_bo *bo; vc4_bo_create() local
496 struct vc4_bo *bo = NULL; vc4_bo_dumb_create() local
526 struct vc4_bo *bo = list_last_entry(&vc4->bo_cache.time_list, vc4_bo_cache_free_old() local
547 struct vc4_bo *bo = to_vc4_bo(gem_bo); vc4_free_object() local
620 vc4_bo_inc_usecnt(struct vc4_bo * bo) vc4_bo_inc_usecnt() argument
660 vc4_bo_dec_usecnt(struct vc4_bo * bo) vc4_bo_dec_usecnt() argument
689 struct vc4_bo *bo = to_vc4_bo(obj); vc4_prime_export() local
720 struct vc4_bo *bo = to_vc4_bo(obj); vc4_fault() local
734 struct vc4_bo *bo = to_vc4_bo(obj); vc4_gem_object_mmap() local
783 struct vc4_bo *bo = NULL; vc4_create_bo_ioctl() local
839 struct vc4_bo *bo = NULL; vc4_create_shader_bo_ioctl() local
918 struct vc4_bo *bo; vc4_set_tiling_ioctl() local
965 struct vc4_bo *bo; vc4_get_tiling_ioctl() local
[all...]
/linux/drivers/gpu/drm/ttm/
H A Dttm_bo_util.c42 struct ttm_buffer_object *bo; member
129 * @bo: A pointer to a struct ttm_buffer_object.
136 * and update the (@bo)->mem placement flags. If unsuccessful, the old
142 int ttm_bo_move_memcpy(struct ttm_buffer_object *bo, in ttm_bo_move_memcpy() argument
146 struct ttm_device *bdev = bo->bdev; in ttm_bo_move_memcpy()
148 ttm_manager_type(bo->bdev, dst_mem->mem_type); in ttm_bo_move_memcpy()
149 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_memcpy()
150 struct ttm_resource *src_mem = bo->resource; in ttm_bo_move_memcpy()
173 dst_iter = ttm_kmap_iter_tt_init(&_dst_iter.tt, bo->ttm); in ttm_bo_move_memcpy()
179 src_iter = ttm_kmap_iter_tt_init(&_src_iter.tt, bo in ttm_bo_move_memcpy()
201 ttm_transfered_destroy(struct ttm_buffer_object * bo) ttm_transfered_destroy() argument
226 ttm_buffer_object_transfer(struct ttm_buffer_object * bo,struct ttm_buffer_object ** new_obj) ttm_buffer_object_transfer() argument
290 ttm_io_prot(struct ttm_buffer_object * bo,struct ttm_resource * res,pgprot_t tmp) ttm_io_prot() argument
309 ttm_bo_ioremap(struct ttm_buffer_object * bo,unsigned long offset,unsigned long size,struct ttm_bo_kmap_obj * map) ttm_bo_ioremap() argument
335 ttm_bo_kmap_ttm(struct ttm_buffer_object * bo,unsigned long start_page,unsigned long num_pages,struct ttm_bo_kmap_obj * map) ttm_bo_kmap_ttm() argument
396 ttm_bo_kmap(struct ttm_buffer_object * bo,unsigned long start_page,unsigned long num_pages,struct ttm_bo_kmap_obj * map) ttm_bo_kmap() argument
469 ttm_bo_vmap(struct ttm_buffer_object * bo,struct iosys_map * map) ttm_bo_vmap() argument
538 ttm_bo_vunmap(struct ttm_buffer_object * bo,struct iosys_map * map) ttm_bo_vunmap() argument
557 ttm_bo_wait_free_node(struct ttm_buffer_object * bo,bool dst_use_tt) ttm_bo_wait_free_node() argument
575 ttm_bo_move_to_ghost(struct ttm_buffer_object * bo,struct dma_fence * fence,bool dst_use_tt) ttm_bo_move_to_ghost() argument
613 ttm_bo_move_pipeline_evict(struct ttm_buffer_object * bo,struct dma_fence * fence) ttm_bo_move_pipeline_evict() argument
651 ttm_bo_move_accel_cleanup(struct ttm_buffer_object * bo,struct dma_fence * fence,bool evict,bool pipeline,struct ttm_resource * new_mem) ttm_bo_move_accel_cleanup() argument
688 ttm_bo_move_sync_cleanup(struct ttm_buffer_object * bo,struct ttm_resource * new_mem) ttm_bo_move_sync_cleanup() argument
714 ttm_bo_pipeline_gutting(struct ttm_buffer_object * bo) ttm_bo_pipeline_gutting() argument
[all...]
H A Dttm_bo_vm.c41 static vm_fault_t ttm_bo_vm_fault_idle(struct ttm_buffer_object *bo, in ttm_bo_vm_fault_idle() argument
49 if (dma_resv_test_signaled(bo->base.resv, DMA_RESV_USAGE_KERNEL)) in ttm_bo_vm_fault_idle()
61 ttm_bo_get(bo); in ttm_bo_vm_fault_idle()
63 (void)dma_resv_wait_timeout(bo->base.resv, in ttm_bo_vm_fault_idle()
66 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_fault_idle()
67 ttm_bo_put(bo); in ttm_bo_vm_fault_idle()
74 err = dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_KERNEL, true, in ttm_bo_vm_fault_idle()
84 static unsigned long ttm_bo_io_mem_pfn(struct ttm_buffer_object *bo, in ttm_bo_io_mem_pfn() argument
87 struct ttm_device *bdev = bo->bdev; in ttm_bo_io_mem_pfn()
90 return bdev->funcs->io_mem_pfn(bo, page_offset); in ttm_bo_io_mem_pfn()
[all …]
/linux/include/drm/ttm/
H A Dttm_bo.h164 struct ttm_buffer_object *bo; member
208 s64 (*process_bo)(struct ttm_lru_walk *walk, struct ttm_buffer_object *bo);
233 static inline void ttm_bo_get(struct ttm_buffer_object *bo) in ttm_bo_get() argument
235 kref_get(&bo->kref); in ttm_bo_get()
249 ttm_bo_get_unless_zero(struct ttm_buffer_object *bo) in ttm_bo_get_unless_zero() argument
251 if (!kref_get_unless_zero(&bo->kref)) in ttm_bo_get_unless_zero()
253 return bo; in ttm_bo_get_unless_zero()
278 static inline int ttm_bo_reserve(struct ttm_buffer_object *bo, in ttm_bo_reserve() argument
290 success = dma_resv_trylock(bo->base.resv); in ttm_bo_reserve()
295 ret = dma_resv_lock_interruptible(bo->base.resv, ticket); in ttm_bo_reserve()
[all …]
/linux/drivers/gpu/drm/ttm/tests/
H A Dttm_tt_test.c42 struct ttm_buffer_object *bo; in ttm_tt_init_basic() local
53 bo = ttm_bo_kunit_init(test, test->priv, params->size, NULL); in ttm_tt_init_basic()
55 err = ttm_tt_init(tt, bo, page_flags, caching, extra_pages); in ttm_tt_init_basic()
69 struct ttm_buffer_object *bo; in ttm_tt_init_misaligned() local
79 bo = ttm_bo_kunit_init(test, test->priv, size, NULL); in ttm_tt_init_misaligned()
82 bo->base.size += 1; in ttm_tt_init_misaligned()
84 err = ttm_tt_init(tt, bo, 0, caching, 0); in ttm_tt_init_misaligned()
92 struct ttm_buffer_object *bo; in ttm_tt_fini_basic() local
100 bo = ttm_bo_kunit_init(test, test->priv, BO_SIZE, NULL); in ttm_tt_fini_basic()
102 err = ttm_tt_init(tt, bo, 0, caching, 0); in ttm_tt_fini_basic()
[all …]
H A Dttm_bo_validate_test.c112 struct ttm_buffer_object *bo; in ttm_bo_init_reserved_sys_man() local
116 bo = kunit_kzalloc(test, sizeof(*bo), GFP_KERNEL); in ttm_bo_init_reserved_sys_man()
117 KUNIT_ASSERT_NOT_NULL(test, bo); in ttm_bo_init_reserved_sys_man()
122 drm_gem_private_object_init(priv->drm, &bo->base, size); in ttm_bo_init_reserved_sys_man()
124 err = ttm_bo_init_reserved(priv->ttm_dev, bo, bo_type, placement, in ttm_bo_init_reserved_sys_man()
127 dma_resv_unlock(bo->base.resv); in ttm_bo_init_reserved_sys_man()
130 KUNIT_EXPECT_EQ(test, kref_read(&bo->kref), 1); in ttm_bo_init_reserved_sys_man()
131 KUNIT_EXPECT_PTR_EQ(test, bo->bdev, priv->ttm_dev); in ttm_bo_init_reserved_sys_man()
132 KUNIT_EXPECT_EQ(test, bo->type, bo_type); in ttm_bo_init_reserved_sys_man()
133 KUNIT_EXPECT_EQ(test, bo->page_alignment, PAGE_SIZE); in ttm_bo_init_reserved_sys_man()
[all …]
/linux/drivers/gpu/drm/panthor/
H A Dpanthor_gem.c18 struct panthor_gem_object *bo = to_panthor_bo(obj); in panthor_gem_free_object() local
19 struct drm_gem_object *vm_root_gem = bo->exclusive_vm_root_gem; in panthor_gem_free_object()
21 drm_gem_free_mmap_offset(&bo->base.base); in panthor_gem_free_object()
22 mutex_destroy(&bo->gpuva_list_lock); in panthor_gem_free_object()
23 drm_gem_shmem_free(&bo->base); in panthor_gem_free_object()
32 void panthor_kernel_bo_destroy(struct panthor_kernel_bo *bo) in panthor_kernel_bo_destroy() argument
37 if (IS_ERR_OR_NULL(bo)) in panthor_kernel_bo_destroy()
40 vm = bo->vm; in panthor_kernel_bo_destroy()
41 panthor_kernel_bo_vunmap(bo); in panthor_kernel_bo_destroy()
43 if (drm_WARN_ON(bo->obj->dev, in panthor_kernel_bo_destroy()
[all …]
/linux/drivers/gpu/drm/vmwgfx/
H A Dvmwgfx_bo.c72 static void vmw_bo_free(struct ttm_buffer_object *bo) in vmw_bo_free() argument
74 struct vmw_bo *vbo = to_vmw_bo(&bo->base); in vmw_bo_free()
98 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_pin_in_placement() local
103 ret = ttm_bo_reserve(bo, interruptible, false, NULL); in vmw_bo_pin_in_placement()
107 ret = ttm_bo_validate(bo, placement, &ctx); in vmw_bo_pin_in_placement()
111 ttm_bo_unreserve(bo); in vmw_bo_pin_in_placement()
134 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_pin_in_vram_or_gmr() local
139 ret = ttm_bo_reserve(bo, interruptible, false, NULL); in vmw_bo_pin_in_vram_or_gmr()
146 ret = ttm_bo_validate(bo, &buf->placement, &ctx); in vmw_bo_pin_in_vram_or_gmr()
153 ret = ttm_bo_validate(bo, &buf->placement, &ctx); in vmw_bo_pin_in_vram_or_gmr()
[all …]

12345678910>>...14