| /linux/drivers/gpu/drm/i915/ |
| H A D | i915_mm.c | 37 struct sgt_iter sgt; member 46 return (r->sgt.dma + r->sgt.curr + r->iobase) >> PAGE_SHIFT; in sgt_pfn() 48 return r->sgt.pfn + (r->sgt.curr >> PAGE_SHIFT); in sgt_pfn() 55 if (GEM_WARN_ON(!r->sgt.sgp)) in remap_sg() 63 r->sgt.curr += PAGE_SIZE; in remap_sg() 64 if (r->sgt.curr >= r->sgt.max) in remap_sg() 65 r->sgt = __sgt_iter(__sg_next(r->sgt.sgp), use_dma(r->iobase)); in remap_sg() 138 .sgt = __sgt_iter(sgl, use_dma(iobase)), in remap_io_sg() 146 while (offset >= r.sgt.max >> PAGE_SHIFT) { in remap_io_sg() 147 offset -= r.sgt.max >> PAGE_SHIFT; in remap_io_sg() [all …]
|
| /linux/drivers/gpu/drm/tests/ |
| H A D | drm_gem_shmem_test.c | 70 struct sg_table *sgt; in drm_gem_shmem_test_obj_create_private() local 78 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in drm_gem_shmem_test_obj_create_private() 79 KUNIT_ASSERT_NOT_NULL(test, sgt); in drm_gem_shmem_test_obj_create_private() 81 ret = kunit_add_action_or_reset(test, kfree_wrapper, sgt); in drm_gem_shmem_test_obj_create_private() 84 ret = sg_alloc_table(sgt, 1, GFP_KERNEL); in drm_gem_shmem_test_obj_create_private() 87 ret = kunit_add_action_or_reset(test, sg_free_table_wrapper, sgt); in drm_gem_shmem_test_obj_create_private() 90 sg_init_one(sgt->sgl, buf, TEST_SIZE); in drm_gem_shmem_test_obj_create_private() 100 ret = dma_map_sgtable(drm_dev->dev, sgt, DMA_BIDIRECTIONAL, 0); in drm_gem_shmem_test_obj_create_private() 107 gem_obj = drm_gem_shmem_prime_import_sg_table(drm_dev, &attach_mock, sgt); in drm_gem_shmem_test_obj_create_private() 114 kunit_remove_action(test, sg_free_table_wrapper, sgt); in drm_gem_shmem_test_obj_create_private() [all …]
|
| /linux/drivers/xen/ |
| H A D | gntdev-dmabuf.c | 46 struct sg_table *sgt; member 65 struct sg_table *sgt; member 198 struct sg_table *sgt; in dmabuf_pages_to_sgt() local 201 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in dmabuf_pages_to_sgt() 202 if (!sgt) { in dmabuf_pages_to_sgt() 207 ret = sg_alloc_table_from_pages(sgt, pages, nr_pages, 0, in dmabuf_pages_to_sgt() 213 return sgt; in dmabuf_pages_to_sgt() 216 kfree(sgt); in dmabuf_pages_to_sgt() 241 struct sg_table *sgt = gntdev_dmabuf_attach->sgt; in dmabuf_exp_ops_detach() local 243 if (sgt) { in dmabuf_exp_ops_detach() [all …]
|
| /linux/drivers/hwtracing/intel_th/ |
| H A D | msu-sink.c | 51 static int msu_sink_alloc_window(void *data, struct sg_table **sgt, size_t size) in msu_sink_alloc_window() argument 64 ret = sg_alloc_table(*sgt, nents, GFP_KERNEL); in msu_sink_alloc_window() 68 priv->sgts[priv->nr_sgts++] = *sgt; in msu_sink_alloc_window() 70 for_each_sg((*sgt)->sgl, sg_ptr, nents, i) { in msu_sink_alloc_window() 84 static void msu_sink_free_window(void *data, struct sg_table *sgt) in msu_sink_free_window() argument 90 for_each_sg(sgt->sgl, sg_ptr, sgt->nents, i) { in msu_sink_free_window() 95 sg_free_table(sgt); in msu_sink_free_window() 99 static int msu_sink_ready(void *data, struct sg_table *sgt, size_t bytes) in msu_sink_ready() argument 103 intel_th_msc_window_unlock(priv->dev, sgt); in msu_sink_ready()
|
| /linux/drivers/media/common/videobuf2/ |
| H A D | videobuf2-vmalloc.c | 208 struct sg_table sgt; member 218 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_attach() local 228 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_attach() 229 ret = sg_alloc_table(sgt, num_pages, GFP_KERNEL); in vb2_vmalloc_dmabuf_ops_attach() 234 for_each_sgtable_sg(sgt, sg, i) { in vb2_vmalloc_dmabuf_ops_attach() 238 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_attach() 255 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_detach() local 260 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_detach() 264 dma_unmap_sgtable(db_attach->dev, sgt, attach->dma_dir, 0); in vb2_vmalloc_dmabuf_ops_detach() 265 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_detach() [all …]
|
| /linux/rust/kernel/ |
| H A D | scatterlist.rs | 184 sgt: NonNull<bindings::sg_table>, field 203 sgt: NonNull<bindings::sg_table>, in new() 212 bindings::dma_map_sgtable(dev.as_raw(), sgt.as_ptr(), dir.into(), 0) in new() 218 sgt, in new() 236 bindings::dma_unmap_sgtable(self.dev.as_raw(), self.sgt.as_ptr(), self.dir.into(), 0) in drop() 272 let sgt = Opaque::zeroed(); in new() localVariable 278 sgt.get(), in new() 288 Ok(Self(sgt)) in new() 321 sgt: RawSGTable, field 366 sgt: unsafe { RawSGTable::new(&mut page_vec, size, max_segment, flags) }?, in new() [all …]
|
| /linux/net/ceph/ |
| H A D | crypto.c | 149 static int setup_sgtable(struct sg_table *sgt, struct scatterlist *prealloc_sg, in setup_sgtable() argument 161 memset(sgt, 0, sizeof(*sgt)); in setup_sgtable() 171 ret = sg_alloc_table(sgt, chunk_cnt, GFP_NOFS); in setup_sgtable() 177 sgt->sgl = prealloc_sg; in setup_sgtable() 178 sgt->nents = sgt->orig_nents = 1; in setup_sgtable() 181 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) { in setup_sgtable() 201 static void teardown_sgtable(struct sg_table *sgt) in teardown_sgtable() argument 203 if (sgt->orig_nents > 1) in teardown_sgtable() 204 sg_free_table(sgt); in teardown_sgtable() 211 struct sg_table sgt; in ceph_aes_crypt() local [all …]
|
| /linux/drivers/gpu/drm/i915/gem/ |
| H A D | i915_gem_dmabuf.c | 31 struct sg_table *sgt; in i915_gem_map_dma_buf() local 39 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in i915_gem_map_dma_buf() 40 if (!sgt) { in i915_gem_map_dma_buf() 45 ret = sg_alloc_table(sgt, obj->mm.pages->orig_nents, GFP_KERNEL); in i915_gem_map_dma_buf() 49 dst = sgt->sgl; in i915_gem_map_dma_buf() 55 ret = dma_map_sgtable(attach->dev, sgt, dir, DMA_ATTR_SKIP_CPU_SYNC); in i915_gem_map_dma_buf() 59 return sgt; in i915_gem_map_dma_buf() 62 sg_free_table(sgt); in i915_gem_map_dma_buf() 64 kfree(sgt); in i915_gem_map_dma_buf() 239 struct sg_table *sgt; in i915_gem_object_get_pages_dmabuf() local [all …]
|
| /linux/kernel/dma/ |
| H A D | mapping.c | 318 int dma_map_sgtable(struct device *dev, struct sg_table *sgt, in dma_map_sgtable() argument 323 nents = __dma_map_sg_attrs(dev, sgt->sgl, sgt->orig_nents, dir, attrs); in dma_map_sgtable() 326 sgt->nents = nents; in dma_map_sgtable() 506 int dma_get_sgtable_attrs(struct device *dev, struct sg_table *sgt, in dma_get_sgtable_attrs() argument 513 return dma_direct_get_sgtable(dev, sgt, cpu_addr, dma_addr, in dma_get_sgtable_attrs() 516 return iommu_dma_get_sgtable(dev, sgt, cpu_addr, dma_addr, in dma_get_sgtable_attrs() 520 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, attrs); in dma_get_sgtable_attrs() 768 struct sg_table *sgt; in alloc_single_sgt() local 771 sgt = kmalloc(sizeof(*sgt), gfp); in alloc_single_sgt() 772 if (!sgt) in alloc_single_sgt() [all …]
|
| /linux/drivers/media/platform/nvidia/tegra-vde/ |
| H A D | dmabuf-cache.c | 27 struct sg_table *sgt; member 41 dma_buf_unmap_attachment_unlocked(entry->a, entry->sgt, entry->dma_dir); in tegra_vde_release_entry() 72 struct sg_table *sgt; in tegra_vde_dmabuf_cache_map() local 93 *addrp = sg_dma_address(entry->sgt->sgl); in tegra_vde_dmabuf_cache_map() 105 sgt = dma_buf_map_attachment_unlocked(attachment, dma_dir); in tegra_vde_dmabuf_cache_map() 106 if (IS_ERR(sgt)) { in tegra_vde_dmabuf_cache_map() 108 err = PTR_ERR(sgt); in tegra_vde_dmabuf_cache_map() 112 if (!vde->domain && sgt->nents > 1) { in tegra_vde_dmabuf_cache_map() 125 err = tegra_vde_iommu_map(vde, sgt, &iova, dmabuf->size); in tegra_vde_dmabuf_cache_map() 131 *addrp = sg_dma_address(sgt->sgl); in tegra_vde_dmabuf_cache_map() [all …]
|
| /linux/include/linux/ |
| H A D | scatterlist.h | 46 struct sg_table sgt; /* The scatter list table */ member 224 #define for_each_sgtable_sg(sgt, sg, i) \ argument 225 for_each_sg((sgt)->sgl, sg, (sgt)->orig_nents, i) 232 #define for_each_sgtable_dma_sg(sgt, sg, i) \ argument 233 for_each_sg((sgt)->sgl, sg, (sgt)->nents, i) 459 void sg_free_append_table(struct sg_append_table *sgt); 463 int sg_alloc_append_table_from_pages(struct sg_append_table *sgt, 468 int sg_alloc_table_from_pages_segment(struct sg_table *sgt, struct page **pages, 493 static inline int sg_alloc_table_from_pages(struct sg_table *sgt, in sg_alloc_table_from_pages() argument 499 return sg_alloc_table_from_pages_segment(sgt, pages, n_pages, offset, in sg_alloc_table_from_pages() [all …]
|
| H A D | intel_th.h | 43 int (*alloc_window)(void *priv, struct sg_table **sgt, 45 void (*free_window)(void *priv, struct sg_table *sgt); 59 int (*ready)(void *priv, struct sg_table *sgt, size_t bytes); 65 void intel_th_msc_window_unlock(struct device *dev, struct sg_table *sgt);
|
| /linux/drivers/gpu/drm/ |
| H A D | drm_gem_shmem_helper.c | 191 drm_prime_gem_destroy(obj, shmem->sgt); in drm_gem_shmem_release() 197 if (shmem->sgt) { in drm_gem_shmem_release() 198 dma_unmap_sgtable(obj->dev->dev, shmem->sgt, in drm_gem_shmem_release() 200 sg_free_table(shmem->sgt); in drm_gem_shmem_release() 201 kfree(shmem->sgt); in drm_gem_shmem_release() 518 dma_unmap_sgtable(dev->dev, shmem->sgt, DMA_BIDIRECTIONAL, 0); in drm_gem_shmem_purge_locked() 519 sg_free_table(shmem->sgt); in drm_gem_shmem_purge_locked() 520 kfree(shmem->sgt); in drm_gem_shmem_purge_locked() 521 shmem->sgt = NULL; in drm_gem_shmem_purge_locked() 741 struct sg_table *sgt; in drm_gem_shmem_get_pages_sgt_locked() 740 struct sg_table *sgt; drm_gem_shmem_get_pages_sgt_locked() local 792 struct sg_table *sgt; drm_gem_shmem_get_pages_sgt() local 822 drm_gem_shmem_prime_import_sg_table(struct drm_device * dev,struct dma_buf_attachment * attach,struct sg_table * sgt) drm_gem_shmem_prime_import_sg_table() argument [all...] |
| H A D | drm_gem_dma_helper.c | 236 drm_prime_gem_destroy(gem_obj, dma_obj->sgt); in drm_gem_dma_free() 428 struct sg_table *sgt; in drm_gem_dma_get_sg_table() local 431 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in drm_gem_dma_get_sg_table() 432 if (!sgt) in drm_gem_dma_get_sg_table() 435 ret = dma_get_sgtable(obj->dev->dev, sgt, dma_obj->vaddr, in drm_gem_dma_get_sg_table() 440 return sgt; in drm_gem_dma_get_sg_table() 443 kfree(sgt); in drm_gem_dma_get_sg_table() 468 struct sg_table *sgt) in drm_gem_dma_prime_import_sg_table() argument 473 if (drm_prime_get_contiguous_size(sgt) < attach->dmabuf->size) in drm_gem_dma_prime_import_sg_table() 481 dma_obj->dma_addr = sg_dma_address(sgt->sgl); in drm_gem_dma_prime_import_sg_table() [all …]
|
| /linux/drivers/gpu/drm/virtio/ |
| H A D | virtgpu_prime.c | 62 struct sg_table *sgt, in virtgpu_gem_unmap_dma_buf() argument 69 virtio_gpu_vram_unmap_dma_buf(attach->dev, sgt, dir); in virtgpu_gem_unmap_dma_buf() 73 drm_gem_unmap_dma_buf(attach, sgt, dir); in virtgpu_gem_unmap_dma_buf() 152 struct sg_table *sgt; in virtgpu_dma_buf_import_sgt() local 163 sgt = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL); in virtgpu_dma_buf_import_sgt() 164 if (IS_ERR(sgt)) in virtgpu_dma_buf_import_sgt() 165 return PTR_ERR(sgt); in virtgpu_dma_buf_import_sgt() 167 *ents = kvmalloc_array(sgt->nents, in virtgpu_dma_buf_import_sgt() 171 dma_buf_unmap_attachment(attach, sgt, DMA_BIDIRECTIONAL); in virtgpu_dma_buf_import_sgt() 175 *nents = sgt->nents; in virtgpu_dma_buf_import_sgt() [all …]
|
| /linux/drivers/dma-buf/ |
| H A D | dma-buf-mapping.c | 64 struct sg_table sgt; member 136 ret = sg_alloc_table(&dma->sgt, nents, GFP_KERNEL | __GFP_ZERO); in dma_buf_phys_vec_to_sgt() 140 sgl = dma->sgt.sgl; in dma_buf_phys_vec_to_sgt() 183 dma->sgt.orig_nents = 0; in dma_buf_phys_vec_to_sgt() 191 return &dma->sgt; in dma_buf_phys_vec_to_sgt() 200 for_each_sgtable_dma_sg(&dma->sgt, sgl, i) in dma_buf_phys_vec_to_sgt() 204 sg_free_table(&dma->sgt); in dma_buf_phys_vec_to_sgt() 222 void dma_buf_free_sgt(struct dma_buf_attachment *attach, struct sg_table *sgt, in dma_buf_free_sgt() argument 225 struct dma_buf_dma *dma = container_of(sgt, struct dma_buf_dma, sgt); in dma_buf_free_sgt() 238 for_each_sgtable_dma_sg(sgt, sgl, i) in dma_buf_free_sgt() [all …]
|
| /linux/drivers/infiniband/core/ |
| H A D | umem_dmabuf.c | 17 struct sg_table *sgt; in ib_umem_dmabuf_map_pages() local 29 if (umem_dmabuf->sgt) in ib_umem_dmabuf_map_pages() 32 sgt = dma_buf_map_attachment(umem_dmabuf->attach, in ib_umem_dmabuf_map_pages() 34 if (IS_ERR(sgt)) in ib_umem_dmabuf_map_pages() 35 return PTR_ERR(sgt); in ib_umem_dmabuf_map_pages() 42 for_each_sgtable_dma_sg(sgt, sg, i) { in ib_umem_dmabuf_map_pages() 65 umem_dmabuf->umem.sgt_append.sgt.sgl = umem_dmabuf->first_sg; in ib_umem_dmabuf_map_pages() 66 umem_dmabuf->umem.sgt_append.sgt.nents = nmap; in ib_umem_dmabuf_map_pages() 67 umem_dmabuf->sgt = sgt; in ib_umem_dmabuf_map_pages() 90 if (!umem_dmabuf->sgt) in ib_umem_dmabuf_unmap_pages() [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_dma_buf.c | 200 struct sg_table *sgt; in amdgpu_dma_buf_map() local 221 sgt = drm_prime_pages_to_sg(obj->dev, in amdgpu_dma_buf_map() 224 if (IS_ERR(sgt)) in amdgpu_dma_buf_map() 225 return sgt; in amdgpu_dma_buf_map() 227 if (dma_map_sgtable(attach->dev, sgt, dir, in amdgpu_dma_buf_map() 240 dir, &sgt); in amdgpu_dma_buf_map() 248 return sgt; in amdgpu_dma_buf_map() 251 sg_free_table(sgt); in amdgpu_dma_buf_map() 252 kfree(sgt); in amdgpu_dma_buf_map() 266 struct sg_table *sgt, in amdgpu_dma_buf_unmap() argument [all …]
|
| /linux/drivers/gpu/drm/vmwgfx/ |
| H A D | vmwgfx_ttm_buffer.c | 119 __sg_page_iter_start(&viter->iter.base, vsgt->sgt->sgl, in vmw_piter_start() 120 vsgt->sgt->orig_nents, p_offset); in vmw_piter_start() 139 dma_unmap_sgtable(dev, &vmw_tt->sgt, DMA_BIDIRECTIONAL, 0); in vmw_ttm_unmap_from_dma() 140 vmw_tt->sgt.nents = vmw_tt->sgt.orig_nents; in vmw_ttm_unmap_from_dma() 160 return dma_map_sgtable(dev, &vmw_tt->sgt, DMA_BIDIRECTIONAL, 0); in vmw_ttm_map_for_dma() 186 vsgt->sgt = NULL; in vmw_ttm_map_dma() 192 vsgt->sgt = vmw_tt->dma_ttm.sg; in vmw_ttm_map_dma() 194 vsgt->sgt = &vmw_tt->sgt; in vmw_ttm_map_dma() 195 ret = sg_alloc_table_from_pages_segment(&vmw_tt->sgt, in vmw_ttm_map_dma() 218 sg_free_table(vsgt->sgt); in vmw_ttm_map_dma() [all …]
|
| /linux/drivers/accel/qaic/ |
| H A D | qaic_data.c | 167 sg_free_table(slice->sgt); in free_slice() 168 kfree(slice->sgt); in free_slice() 178 struct sg_table *sgt; in clone_range_of_sgt_for_slice() local 216 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in clone_range_of_sgt_for_slice() 217 if (!sgt) { in clone_range_of_sgt_for_slice() 222 ret = sg_alloc_table(sgt, nents, GFP_KERNEL); in clone_range_of_sgt_for_slice() 228 for_each_sgtable_dma_sg(sgt, sg, j) { in clone_range_of_sgt_for_slice() 246 *sgt_out = sgt; in clone_range_of_sgt_for_slice() 250 kfree(sgt); in clone_range_of_sgt_for_slice() 308 for_each_sgtable_dma_sg(slice->sgt, sg, i) { in encode_reqs() [all …]
|
| /linux/drivers/fpga/tests/ |
| H A D | fpga-mgr-test.c | 147 static int op_write_sg(struct fpga_manager *mgr, struct sg_table *sgt) in op_write_sg() argument 158 sg_miter_start(&miter, sgt->sgl, sgt->nents, SG_MITER_FROM_SG); in op_write_sg() 259 struct sg_table *sgt; in fpga_mgr_test_img_load_sgt() local 265 sgt = kunit_kzalloc(test, sizeof(*sgt), GFP_KERNEL); in fpga_mgr_test_img_load_sgt() 266 KUNIT_ASSERT_NOT_ERR_OR_NULL(test, sgt); in fpga_mgr_test_img_load_sgt() 267 ret = sg_alloc_table(sgt, 1, GFP_KERNEL); in fpga_mgr_test_img_load_sgt() 269 sg_init_one(sgt->sgl, img_buf, IMAGE_SIZE); in fpga_mgr_test_img_load_sgt() 271 ret = kunit_add_action_or_reset(test, sg_free_table_wrapper, sgt); in fpga_mgr_test_img_load_sgt() 274 ctx->img_info->sgt = sgt; in fpga_mgr_test_img_load_sgt()
|
| /linux/drivers/gpu/drm/renesas/rcar-du/ |
| H A D | rcar_du_vsp.c | 267 struct sg_table *sgt = &sg_tables[i]; in rcar_du_vsp_map_fb() local 269 if (gem->sgt) { in rcar_du_vsp_map_fb() 280 ret = sg_alloc_table(sgt, gem->sgt->orig_nents, in rcar_du_vsp_map_fb() 285 src = gem->sgt->sgl; in rcar_du_vsp_map_fb() 286 dst = sgt->sgl; in rcar_du_vsp_map_fb() 287 for (j = 0; j < gem->sgt->orig_nents; ++j) { in rcar_du_vsp_map_fb() 294 ret = dma_get_sgtable(rcdu->dev, sgt, gem->vaddr, in rcar_du_vsp_map_fb() 300 ret = vsp1_du_map_sg(vsp->vsp, sgt); in rcar_du_vsp_map_fb() 302 sg_free_table(sgt); in rcar_du_vsp_map_fb() 311 struct sg_table *sgt = &sg_tables[i]; in rcar_du_vsp_map_fb() local [all …]
|
| /linux/drivers/usb/host/ |
| H A D | xhci-sideband.c | 21 struct sg_table *sgt; in xhci_ring_to_sgtable() local 35 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in xhci_ring_to_sgtable() 36 if (!sgt) { in xhci_ring_to_sgtable() 51 dma_get_sgtable(dev, sgt, seg->trbs, seg->dma, in xhci_ring_to_sgtable() 53 pages[i] = sg_page(sgt->sgl); in xhci_ring_to_sgtable() 54 sg_free_table(sgt); in xhci_ring_to_sgtable() 58 if (sg_alloc_table_from_pages(sgt, pages, n_pages, 0, sz, GFP_KERNEL)) in xhci_ring_to_sgtable() 65 sg_dma_address(sgt->sgl) = ring->first_seg->dma; in xhci_ring_to_sgtable() 67 return sgt; in xhci_ring_to_sgtable() 71 kfree(sgt); in xhci_ring_to_sgtable()
|
| /linux/drivers/spi/ |
| H A D | spi-ep93xx.c | 277 struct sg_table *sgt; in ep93xx_spi_dma_prepare() local 294 sgt = &espi->rx_sgt; in ep93xx_spi_dma_prepare() 301 sgt = &espi->tx_sgt; in ep93xx_spi_dma_prepare() 322 if (nents != sgt->nents) { in ep93xx_spi_dma_prepare() 323 sg_free_table(sgt); in ep93xx_spi_dma_prepare() 325 ret = sg_alloc_table(sgt, nents, GFP_KERNEL); in ep93xx_spi_dma_prepare() 331 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in ep93xx_spi_dma_prepare() 351 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare() 355 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, conf.direction, in ep93xx_spi_dma_prepare() 358 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare() [all …]
|
| /linux/drivers/gpu/drm/omapdrm/ |
| H A D | omap_gem_dmabuf.c | 102 struct sg_table *sgt; in omap_gem_prime_import() local 123 sgt = dma_buf_map_attachment_unlocked(attach, DMA_TO_DEVICE); in omap_gem_prime_import() 124 if (IS_ERR(sgt)) { in omap_gem_prime_import() 125 ret = PTR_ERR(sgt); in omap_gem_prime_import() 129 obj = omap_gem_new_dmabuf(dev, dma_buf->size, sgt); in omap_gem_prime_import() 140 dma_buf_unmap_attachment_unlocked(attach, sgt, DMA_TO_DEVICE); in omap_gem_prime_import()
|