| /linux/drivers/gpu/drm/radeon/ |
| H A D | radeon_fbdev.c | 41 static void radeon_fbdev_destroy_pinned_object(struct drm_gem_object *gobj) in radeon_fbdev_destroy_pinned_object() argument 43 struct radeon_bo *rbo = gem_to_radeon_bo(gobj); in radeon_fbdev_destroy_pinned_object() 52 drm_gem_object_put(gobj); in radeon_fbdev_destroy_pinned_object() 61 struct drm_gem_object *gobj = NULL; in radeon_fbdev_create_pinned_object() local 82 0, true, &gobj); in radeon_fbdev_create_pinned_object() 87 rbo = gem_to_radeon_bo(gobj); in radeon_fbdev_create_pinned_object() 131 *gobj_p = gobj; in radeon_fbdev_create_pinned_object() 135 radeon_fbdev_destroy_pinned_object(gobj); in radeon_fbdev_create_pinned_object() 175 struct drm_gem_object *gobj = drm_gem_fb_get_obj(fb, 0); in radeon_fbdev_fb_destroy() local 182 radeon_fbdev_destroy_pinned_object(gobj); in radeon_fbdev_fb_destroy() [all …]
|
| H A D | radeon_prime.c | 94 struct dma_buf *radeon_gem_prime_export(struct drm_gem_object *gobj, in radeon_gem_prime_export() argument 97 struct radeon_bo *bo = gem_to_radeon_bo(gobj); in radeon_gem_prime_export() 100 return drm_gem_prime_export(gobj, flags); in radeon_gem_prime_export()
|
| H A D | radeon_prime.h | 29 struct dma_buf *radeon_gem_prime_export(struct drm_gem_object *gobj,
|
| /linux/drivers/gpu/drm/qxl/ |
| H A D | qxl_dumb.c | 37 struct drm_gem_object *gobj; in qxl_mode_dumb_create() local 66 args->size, &surf, &gobj, in qxl_mode_dumb_create() 70 qobj = gem_to_qxl_bo(gobj); in qxl_mode_dumb_create() 72 drm_gem_object_put(gobj); in qxl_mode_dumb_create()
|
| H A D | qxl_drv.h | 94 #define gem_to_qxl_bo(gobj) container_of((gobj), struct qxl_bo, tbo.base) argument 310 struct drm_gem_object **gobj, 312 void qxl_gem_object_free(struct drm_gem_object *gobj);
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_dma_buf.c | 372 struct dma_buf *amdgpu_gem_prime_export(struct drm_gem_object *gobj, in amdgpu_gem_prime_export() argument 375 struct amdgpu_bo *bo = gem_to_amdgpu_bo(gobj); in amdgpu_gem_prime_export() 394 buf = drm_gem_prime_export(gobj, flags); in amdgpu_gem_prime_export() 418 struct drm_gem_object *gobj; in amdgpu_dma_buf_create_obj() local 436 ttm_bo_type_sg, resv, &gobj, 0); in amdgpu_dma_buf_create_obj() 440 bo = gem_to_amdgpu_bo(gobj); in amdgpu_dma_buf_create_obj() 445 return gobj; in amdgpu_dma_buf_create_obj() 584 struct drm_gem_object *gobj; in amdgpu_dmabuf_is_xgmi_accessible() local 596 gobj = dma_buf->priv; in amdgpu_dmabuf_is_xgmi_accessible() 597 bo = gem_to_amdgpu_bo(gobj); in amdgpu_dmabuf_is_xgmi_accessible()
|
| H A D | amdgpu_amdkfd_gpuvm.c | 841 struct drm_gem_object *gobj; in kfd_mem_attach_dmabuf() local 848 gobj = amdgpu_gem_prime_import(adev_to_drm(adev), mem->dmabuf); in kfd_mem_attach_dmabuf() 849 if (IS_ERR(gobj)) in kfd_mem_attach_dmabuf() 850 return PTR_ERR(gobj); in kfd_mem_attach_dmabuf() 852 *bo = gem_to_amdgpu_bo(gobj); in kfd_mem_attach_dmabuf() 1705 struct drm_gem_object *gobj = NULL; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() local 1798 bo_type, NULL, &gobj, xcp_id + 1); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() 1804 ret = drm_vma_node_allow(&gobj->vma_node, drm_priv); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() 1809 ret = drm_gem_handle_create(adev->kfd.client.file, gobj, &(*mem)->gem_handle); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() 1812 bo = gem_to_amdgpu_bo(gobj); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() [all …]
|
| H A D | amdgpu_dma_buf.h | 28 struct dma_buf *amdgpu_gem_prime_export(struct drm_gem_object *gobj,
|
| H A D | amdgpu_cs.c | 130 struct drm_gem_object *gobj; in amdgpu_cs_p1_user_fence() local 133 gobj = drm_gem_object_lookup(p->filp, data->handle); in amdgpu_cs_p1_user_fence() 134 if (gobj == NULL) in amdgpu_cs_p1_user_fence() 137 p->uf_bo = amdgpu_bo_ref(gem_to_amdgpu_bo(gobj)); in amdgpu_cs_p1_user_fence() 138 drm_gem_object_put(gobj); in amdgpu_cs_p1_user_fence() 1289 struct drm_gem_object *gobj; in amdgpu_cs_submit() local 1340 drm_exec_for_each_locked_object(&p->exec, index, gobj) { in amdgpu_cs_submit() 1342 ttm_bo_move_to_lru_tail_unlocked(&gem_to_amdgpu_bo(gobj)->tbo); in amdgpu_cs_submit() 1349 dma_resv_add_fence(gobj->resv, in amdgpu_cs_submit() 1355 dma_resv_add_fence(gobj->resv, p->fence, DMA_RESV_USAGE_WRITE); in amdgpu_cs_submit()
|
| H A D | amdgpu_ttm.c | 683 struct drm_gem_object *gobj; member 916 attach = gtt->gobj->import_attach; in amdgpu_ttm_backend_bind() 1034 } else if (ttm->sg && drm_gem_is_imported(gtt->gobj)) { in amdgpu_ttm_backend_unbind() 1037 attach = gtt->gobj->import_attach; in amdgpu_ttm_backend_unbind() 1085 gtt->gobj = &bo->base; in amdgpu_ttm_tt_create()
|
| H A D | amdgpu_vm.c | 1303 struct drm_gem_object *gobj = dma_buf->priv; in amdgpu_vm_bo_update() local 1304 struct amdgpu_bo *abo = gem_to_amdgpu_bo(gobj); in amdgpu_vm_bo_update() 1308 bo = gem_to_amdgpu_bo(gobj); in amdgpu_vm_bo_update()
|
| /linux/drivers/gpu/drm/virtio/ |
| H A D | virtgpu_ioctl.c | 205 struct drm_gem_object *gobj = NULL; in virtio_gpu_resource_info_ioctl() local 208 gobj = drm_gem_object_lookup(file, ri->bo_handle); in virtio_gpu_resource_info_ioctl() 209 if (gobj == NULL) in virtio_gpu_resource_info_ioctl() 212 qobj = gem_to_virtio_gpu_obj(gobj); in virtio_gpu_resource_info_ioctl() 219 drm_gem_object_put(gobj); in virtio_gpu_resource_info_ioctl()
|
| H A D | virtgpu_gem.c | 65 struct drm_gem_object *gobj; in virtio_gpu_mode_dumb_create() local 90 ret = virtio_gpu_gem_create(file_priv, dev, ¶ms, &gobj, in virtio_gpu_mode_dumb_create()
|
| /linux/drivers/gpu/drm/nouveau/ |
| H A D | nouveau_gem.h | 40 struct dma_buf *nouveau_gem_prime_export(struct drm_gem_object *gobj,
|