/linux/include/linux/ |
H A D | dma-buf.h | 27 struct dma_buf; 72 int (*attach)(struct dma_buf *, struct dma_buf_attachment *); 83 void (*detach)(struct dma_buf *, struct dma_buf_attachment *); 203 void (*release)(struct dma_buf *); 230 int (*begin_cpu_access)(struct dma_buf *, enum dma_data_direction); 247 int (*end_cpu_access)(struct dma_buf *, enum dma_data_direction); 284 int (*mmap)(struct dma_buf *, struct vm_area_struct *vma); 286 int (*vmap)(struct dma_buf *dmabuf, struct iosys_map *map); 287 void (*vunmap)(struct dma_buf *dmabuf, struct iosys_map *map); 302 struct dma_buf { struct [all …]
|
H A D | virtio_dma_buf.h | 24 int (*device_attach)(struct dma_buf *dma_buf, 26 int (*get_uuid)(struct dma_buf *dma_buf, uuid_t *uuid); 29 int virtio_dma_buf_attach(struct dma_buf *dma_buf, 32 struct dma_buf *virtio_dma_buf_export 34 bool is_virtio_dma_buf(struct dma_buf *dma_buf); 35 int virtio_dma_buf_get_uuid(struct dma_buf *dma_buf, uuid_t *uuid);
|
/linux/drivers/gpu/drm/ |
H A D | drm_prime.c | 89 struct dma_buf *dma_buf; member 97 struct dma_buf *dma_buf, uint32_t handle) in drm_prime_add_buf_handle() argument 106 get_dma_buf(dma_buf); in drm_prime_add_buf_handle() 107 member->dma_buf = dma_buf; in drm_prime_add_buf_handle() 117 if (dma_buf > pos->dma_buf) in drm_prime_add_buf_handle() 143 static struct dma_buf *drm_prime_lookup_buf_by_handle(struct drm_prime_file_private *prime_fpriv, in drm_prime_lookup_buf_by_handle() 154 return member->dma_buf; in drm_prime_lookup_buf_by_handle() 165 struct dma_buf *dma_buf, in drm_prime_lookup_buf_handle() argument 175 if (member->dma_buf == dma_buf) { in drm_prime_lookup_buf_handle() 178 } else if (member->dma_buf < dma_buf) { in drm_prime_lookup_buf_handle() [all …]
|
/linux/include/drm/ |
H A D | drm_prime.h | 55 struct dma_buf; 66 struct dma_buf *drm_gem_dmabuf_export(struct drm_device *dev, 68 void drm_gem_dmabuf_release(struct dma_buf *dma_buf); 72 struct dma_buf *drm_gem_prime_handle_to_dmabuf(struct drm_device *dev, 80 int drm_gem_map_attach(struct dma_buf *dma_buf, 82 void drm_gem_map_detach(struct dma_buf *dma_buf, 89 int drm_gem_dmabuf_vmap(struct dma_buf *dma_buf, struct iosys_map *map); 90 void drm_gem_dmabuf_vunmap(struct dma_buf *dma_buf, struct iosys_map *map); 93 int drm_gem_dmabuf_mmap(struct dma_buf *dma_buf, struct vm_area_struct *vma); 97 struct dma_buf *drm_gem_prime_export(struct drm_gem_object *obj, [all …]
|
/linux/drivers/gpu/drm/i915/gem/ |
H A D | i915_gem_dmabuf.c | 23 static struct drm_i915_gem_object *dma_buf_to_obj(struct dma_buf *buf) 70 static int i915_gem_dmabuf_vmap(struct dma_buf *dma_buf, in i915_gem_dmabuf_vmap() argument 73 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); in i915_gem_dmabuf_vmap() 85 static void i915_gem_dmabuf_vunmap(struct dma_buf *dma_buf, in i915_gem_dmabuf_vunmap() argument 88 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); in i915_gem_dmabuf_vunmap() 94 static int i915_gem_dmabuf_mmap(struct dma_buf *dma_buf, struct vm_area_struct *vma) in i915_gem_dmabuf_mmap() argument 96 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); in i915_gem_dmabuf_mmap() 118 static int i915_gem_begin_cpu_access(struct dma_buf *dma_buf, enum dma_data_direction direction) in i915_gem_begin_cpu_access() argument 120 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); in i915_gem_begin_cpu_access() 143 static int i915_gem_end_cpu_access(struct dma_buf *dma_buf, enum dma_data_direction direction) in i915_gem_end_cpu_access() argument [all …]
|
H A D | i915_gem_dmabuf.h | 11 struct dma_buf; 14 struct dma_buf *dma_buf); 16 struct dma_buf *i915_gem_prime_export(struct drm_gem_object *gem_obj, int flags);
|
/linux/drivers/virtio/ |
H A D | virtio_dma_buf.c | 20 struct dma_buf *virtio_dma_buf_export in virtio_dma_buf_export() 40 int virtio_dma_buf_attach(struct dma_buf *dma_buf, in virtio_dma_buf_attach() argument 45 container_of(dma_buf->ops, in virtio_dma_buf_attach() 49 ret = ops->device_attach(dma_buf, attach); in virtio_dma_buf_attach() 61 bool is_virtio_dma_buf(struct dma_buf *dma_buf) in is_virtio_dma_buf() argument 63 return dma_buf->ops->attach == &virtio_dma_buf_attach; in is_virtio_dma_buf() 74 int virtio_dma_buf_get_uuid(struct dma_buf *dma_buf, in virtio_dma_buf_get_uuid() argument 78 container_of(dma_buf->ops, in virtio_dma_buf_get_uuid() 81 if (!is_virtio_dma_buf(dma_buf)) in virtio_dma_buf_get_uuid() 84 return ops->get_uuid(dma_buf, uuid); in virtio_dma_buf_get_uuid()
|
/linux/drivers/gpu/drm/vmwgfx/ |
H A D | ttm_object.c | 96 void (*dmabuf_release)(struct dma_buf *dma_buf); 130 static void ttm_prime_dmabuf_release(struct dma_buf *dma_buf); 472 static bool __must_check get_dma_buf_unless_doomed(struct dma_buf *dmabuf) in get_dma_buf_unless_doomed() 494 BUG_ON(prime->dma_buf != NULL); in ttm_prime_refcount_release() 510 static void ttm_prime_dmabuf_release(struct dma_buf *dma_buf) in ttm_prime_dmabuf_release() argument 513 (struct ttm_prime_object *) dma_buf->priv; in ttm_prime_dmabuf_release() 518 tdev->dmabuf_release(dma_buf); in ttm_prime_dmabuf_release() 520 if (prime->dma_buf == dma_buf) in ttm_prime_dmabuf_release() 521 prime->dma_buf = NULL; in ttm_prime_dmabuf_release() 541 struct dma_buf *dma_buf; in ttm_prime_fd_to_handle() local [all …]
|
H A D | vmwgfx_prime.c | 44 static int vmw_prime_map_attach(struct dma_buf *dma_buf, in vmw_prime_map_attach() argument 50 static void vmw_prime_map_detach(struct dma_buf *dma_buf, in vmw_prime_map_detach() argument
|
/linux/drivers/gpu/drm/xe/ |
H A D | xe_dma_buf.c | 25 static int xe_dma_buf_attach(struct dma_buf *dmabuf, in xe_dma_buf_attach() 41 static void xe_dma_buf_detach(struct dma_buf *dmabuf, in xe_dma_buf_detach() 92 struct dma_buf *dma_buf = attach->dmabuf; in xe_dma_buf_map() local 93 struct drm_gem_object *obj = dma_buf->priv; in xe_dma_buf_map() 148 struct dma_buf *dma_buf = attach->dmabuf; in xe_dma_buf_unmap() local 149 struct xe_bo *bo = gem_to_xe_bo(dma_buf->priv); in xe_dma_buf_unmap() 160 static int xe_dma_buf_begin_cpu_access(struct dma_buf *dma_buf, in xe_dma_buf_begin_cpu_access() argument 163 struct drm_gem_object *obj = dma_buf->priv; in xe_dma_buf_begin_cpu_access() 193 struct dma_buf *xe_gem_prime_export(struct drm_gem_object *obj, int flags) in xe_gem_prime_export() 196 struct dma_buf *buf; in xe_gem_prime_export() [all …]
|
H A D | xe_dma_buf.h | 11 struct dma_buf *xe_gem_prime_export(struct drm_gem_object *obj, int flags); 13 struct dma_buf *dma_buf);
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_dma_buf.c | 53 static int amdgpu_dma_buf_attach(struct dma_buf *dmabuf, in amdgpu_dma_buf_attach() 113 struct dma_buf *dma_buf = attach->dmabuf; in amdgpu_dma_buf_map() local 114 struct drm_gem_object *obj = dma_buf->priv; in amdgpu_dma_buf_map() 205 static int amdgpu_dma_buf_begin_cpu_access(struct dma_buf *dma_buf, in amdgpu_dma_buf_begin_cpu_access() argument 208 struct amdgpu_bo *bo = gem_to_amdgpu_bo(dma_buf->priv); in amdgpu_dma_buf_begin_cpu_access() 257 struct dma_buf *amdgpu_gem_prime_export(struct drm_gem_object *gobj, in amdgpu_gem_prime_export() 261 struct dma_buf *buf; in amdgpu_gem_prime_export() 287 amdgpu_dma_buf_create_obj(struct drm_device *dev, struct dma_buf *dma_buf) in amdgpu_dma_buf_create_obj() argument 289 struct dma_resv *resv = dma_buf->resv; in amdgpu_dma_buf_create_obj() 298 if (dma_buf->ops == &amdgpu_dmabuf_ops) { in amdgpu_dma_buf_create_obj() [all …]
|
H A D | amdgpu_dma_buf.h | 28 struct dma_buf *amdgpu_gem_prime_export(struct drm_gem_object *gobj, 31 struct dma_buf *dma_buf);
|
/linux/drivers/gpu/drm/omapdrm/ |
H A D | omap_gem_dmabuf.c | 40 static int omap_gem_dmabuf_begin_cpu_access(struct dma_buf *buffer, in omap_gem_dmabuf_begin_cpu_access() 55 static int omap_gem_dmabuf_end_cpu_access(struct dma_buf *buffer, in omap_gem_dmabuf_end_cpu_access() 63 static int omap_gem_dmabuf_mmap(struct dma_buf *buffer, in omap_gem_dmabuf_mmap() 80 struct dma_buf *omap_gem_prime_export(struct drm_gem_object *obj, int flags) in omap_gem_prime_export() 98 struct dma_buf *dma_buf) in omap_gem_prime_import() argument 105 if (dma_buf->ops == &omap_dmabuf_ops) { in omap_gem_prime_import() 106 obj = dma_buf->priv; in omap_gem_prime_import() 117 attach = dma_buf_attach(dma_buf, dev->dev); in omap_gem_prime_import() 121 get_dma_buf(dma_buf); in omap_gem_prime_import() 129 obj = omap_gem_new_dmabuf(dev, dma_buf->size, sgt); in omap_gem_prime_import() [all …]
|
/linux/drivers/scsi/lpfc/ |
H A D | lpfc_mem.c | 564 struct hbq_dmabuf *dma_buf; in lpfc_sli4_rb_alloc() local 566 dma_buf = kzalloc(sizeof(struct hbq_dmabuf), GFP_KERNEL); in lpfc_sli4_rb_alloc() 567 if (!dma_buf) in lpfc_sli4_rb_alloc() 570 dma_buf->hbuf.virt = dma_pool_alloc(phba->lpfc_hrb_pool, GFP_KERNEL, in lpfc_sli4_rb_alloc() 571 &dma_buf->hbuf.phys); in lpfc_sli4_rb_alloc() 572 if (!dma_buf->hbuf.virt) { in lpfc_sli4_rb_alloc() 573 kfree(dma_buf); in lpfc_sli4_rb_alloc() 576 dma_buf->dbuf.virt = dma_pool_alloc(phba->lpfc_drb_pool, GFP_KERNEL, in lpfc_sli4_rb_alloc() 577 &dma_buf->dbuf.phys); in lpfc_sli4_rb_alloc() 578 if (!dma_buf->dbuf.virt) { in lpfc_sli4_rb_alloc() [all …]
|
/linux/drivers/dma-buf/ |
H A D | dma-buf.c | 42 static void __dma_buf_debugfs_list_add(struct dma_buf *dmabuf) in __dma_buf_debugfs_list_add() 49 static void __dma_buf_debugfs_list_del(struct dma_buf *dmabuf) in __dma_buf_debugfs_list_del() 59 static void __dma_buf_debugfs_list_add(struct dma_buf *dmabuf) in __dma_buf_debugfs_list_add() 63 static void __dma_buf_debugfs_list_del(struct dma_buf *dmabuf) in __dma_buf_debugfs_list_del() 70 struct dma_buf *dmabuf; in dmabuffs_dname() 86 struct dma_buf *dmabuf; in dma_buf_release() 149 struct dma_buf *dmabuf; in dma_buf_mmap_internal() 170 struct dma_buf *dmabuf; in dma_buf_llseek() 223 struct dma_buf *dmabuf = container_of(dcb->poll, struct dma_buf, poll); in dma_buf_poll_cb() 256 struct dma_buf *dmabuf; in dma_buf_poll() [all …]
|
H A D | dma-buf-sysfs-stats.h | 16 int dma_buf_stats_setup(struct dma_buf *dmabuf, struct file *file); 18 void dma_buf_stats_teardown(struct dma_buf *dmabuf); 28 static inline int dma_buf_stats_setup(struct dma_buf *dmabuf, struct file *file) in dma_buf_stats_setup() 33 static inline void dma_buf_stats_teardown(struct dma_buf *dmabuf) {} in dma_buf_stats_teardown()
|
H A D | dma-buf-sysfs-stats.c | 55 ssize_t (*show)(struct dma_buf *dmabuf, 66 struct dma_buf *dmabuf; in dma_buf_stats_attribute_show() 82 static ssize_t exporter_name_show(struct dma_buf *dmabuf, in exporter_name_show() 89 static ssize_t size_show(struct dma_buf *dmabuf, in size_show() 121 void dma_buf_stats_teardown(struct dma_buf *dmabuf) in dma_buf_stats_teardown() 171 int dma_buf_stats_setup(struct dma_buf *dmabuf, struct file *file) in dma_buf_stats_setup()
|
/linux/drivers/scsi/csiostor/ |
H A D | csio_scsi.c | 206 struct csio_dma_buf *dma_buf; in csio_scsi_init_cmd_wr() local 222 dma_buf = &req->dma_buf; in csio_scsi_init_cmd_wr() 225 wr->rsp_dmalen = cpu_to_be32(dma_buf->len); in csio_scsi_init_cmd_wr() 226 wr->rsp_dmaaddr = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_cmd_wr() 298 struct csio_dma_buf *dma_buf; in csio_scsi_init_ultptx_dsgl() local 329 dma_buf = (struct csio_dma_buf *)tmp; in csio_scsi_init_ultptx_dsgl() 331 sgl->addr0 = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_ultptx_dsgl() 333 min(xfer_len, dma_buf->len)); in csio_scsi_init_ultptx_dsgl() 336 sge_pair->addr[1] = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_ultptx_dsgl() 338 min(xfer_len, dma_buf->len)); in csio_scsi_init_ultptx_dsgl() [all …]
|
/linux/drivers/i2c/busses/ |
H A D | i2c-qcom-geni.c | 96 void *dma_buf; member 399 if (gi2c->dma_buf) { in geni_i2c_rx_msg_cleanup() 403 i2c_put_dma_safe_msg_buf(gi2c->dma_buf, cur, !gi2c->err); in geni_i2c_rx_msg_cleanup() 411 if (gi2c->dma_buf) { in geni_i2c_tx_msg_cleanup() 415 i2c_put_dma_safe_msg_buf(gi2c->dma_buf, cur, !gi2c->err); in geni_i2c_tx_msg_cleanup() 424 void *dma_buf; in geni_i2c_rx_one_msg() local 429 dma_buf = i2c_get_dma_safe_msg_buf(msg, 32); in geni_i2c_rx_one_msg() 430 if (dma_buf) in geni_i2c_rx_one_msg() 438 if (dma_buf && geni_se_rx_dma_prep(se, dma_buf, len, &rx_dma)) { in geni_i2c_rx_one_msg() 440 i2c_put_dma_safe_msg_buf(dma_buf, msg, false); in geni_i2c_rx_one_msg() [all …]
|
H A D | i2c-stm32.c | 87 dma->dma_buf = 0; in stm32_i2c_dma_free() 121 dma->dma_buf = dma_map_single(chan_dev, buf, dma->dma_len, in stm32_i2c_prep_dma_xfer() 123 if (dma_mapping_error(chan_dev, dma->dma_buf)) { in stm32_i2c_prep_dma_xfer() 128 txdesc = dmaengine_prep_slave_single(dma->chan_using, dma->dma_buf, in stm32_i2c_prep_dma_xfer() 153 dma_unmap_single(chan_dev, dma->dma_buf, dma->dma_len, in stm32_i2c_prep_dma_xfer()
|
/linux/drivers/xen/ |
H A D | gntdev-dmabuf.c | 30 struct dma_buf *dmabuf; 220 static int dmabuf_exp_ops_attach(struct dma_buf *dma_buf, in dmabuf_exp_ops_attach() argument 235 static void dmabuf_exp_ops_detach(struct dma_buf *dma_buf, in dmabuf_exp_ops_detach() argument 327 static void dmabuf_exp_ops_release(struct dma_buf *dma_buf) in dmabuf_exp_ops_release() argument 329 struct gntdev_dmabuf *gntdev_dmabuf = dma_buf->priv; in dmabuf_exp_ops_release() 570 struct dma_buf *dma_buf; in dmabuf_imp_to_refs() local 577 dma_buf = dma_buf_get(fd); in dmabuf_imp_to_refs() 578 if (IS_ERR(dma_buf)) in dmabuf_imp_to_refs() 579 return ERR_CAST(dma_buf); in dmabuf_imp_to_refs() 590 attach = dma_buf_attach(dma_buf, dev); in dmabuf_imp_to_refs() [all …]
|
/linux/drivers/gpu/drm/tegra/ |
H A D | gem.c | 79 if (obj->dma_buf) { in tegra_bo_pin() 80 struct dma_buf *buf = obj->dma_buf; in tegra_bo_pin() 187 if (obj->dma_buf) { in tegra_bo_mmap() 188 ret = dma_buf_vmap_unlocked(obj->dma_buf, &map); in tegra_bo_mmap() 211 if (obj->dma_buf) in tegra_bo_munmap() 212 return dma_buf_vunmap_unlocked(obj->dma_buf, &map); in tegra_bo_munmap() 457 struct dma_buf *buf) in tegra_bo_import() 493 bo->dma_buf = buf; in tegra_bo_import() 536 if (bo->dma_buf) in tegra_bo_free_object() 537 dma_buf_put(bo->dma_buf); in tegra_bo_free_object() [all …]
|
/linux/drivers/dma-buf/heaps/ |
H A D | cma_heap.c | 50 static int cma_heap_attach(struct dma_buf *dmabuf, in cma_heap_attach() 83 static void cma_heap_detach(struct dma_buf *dmabuf, in cma_heap_detach() 121 static int cma_heap_dma_buf_begin_cpu_access(struct dma_buf *dmabuf, in cma_heap_dma_buf_begin_cpu_access() 142 static int cma_heap_dma_buf_end_cpu_access(struct dma_buf *dmabuf, in cma_heap_dma_buf_end_cpu_access() 178 static int cma_heap_mmap(struct dma_buf *dmabuf, struct vm_area_struct *vma) in cma_heap_mmap() 204 static int cma_heap_vmap(struct dma_buf *dmabuf, struct iosys_map *map) in cma_heap_vmap() 231 static void cma_heap_vunmap(struct dma_buf *dmabuf, struct iosys_map *map) in cma_heap_vunmap() 244 static void cma_heap_dma_buf_release(struct dma_buf *dmabuf) in cma_heap_dma_buf_release() 275 static struct dma_buf *cma_heap_allocate(struct dma_heap *heap, in cma_heap_allocate() 287 struct dma_buf *dmabuf; in cma_heap_allocate()
|
/linux/drivers/media/common/videobuf2/ |
H A D | videobuf2-vmalloc.c | 32 struct dma_buf *dbuf; 212 static int vb2_vmalloc_dmabuf_ops_attach(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_attach() 251 static void vb2_vmalloc_dmabuf_ops_detach(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_detach() 304 static void vb2_vmalloc_dmabuf_ops_release(struct dma_buf *dbuf) in vb2_vmalloc_dmabuf_ops_release() 310 static int vb2_vmalloc_dmabuf_ops_vmap(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_vmap() 320 static int vb2_vmalloc_dmabuf_ops_mmap(struct dma_buf *dbuf, in vb2_vmalloc_dmabuf_ops_mmap() 336 static struct dma_buf *vb2_vmalloc_get_dmabuf(struct vb2_buffer *vb, in vb2_vmalloc_get_dmabuf() 341 struct dma_buf *dbuf; in vb2_vmalloc_get_dmabuf() 404 struct dma_buf *dbuf, in vb2_vmalloc_attach_dmabuf()
|