| /linux/drivers/parisc/ |
| H A D | iommu-helpers.h | 47 BUG_ON(pdirp && (dma_len != sg_dma_len(dma_sg))); in iommu_fill_pdir() 51 dma_len = sg_dma_len(startsg); in iommu_fill_pdir() 52 sg_dma_len(startsg) = 0; in iommu_fill_pdir() 71 sg_dma_len(dma_sg) += startsg->length; in iommu_fill_pdir() 125 sg_dma_len(startsg) = 0; in iommu_coalesce_chunks() 142 sg_dma_len(startsg) = 0; in iommu_coalesce_chunks() 171 sg_dma_len(contig_sg) = dma_len; in iommu_coalesce_chunks()
|
| /linux/drivers/infiniband/core/ |
| H A D | umem_dmabuf.c | 43 if (start < cur + sg_dma_len(sg) && cur < end) in ib_umem_dmabuf_map_pages() 45 if (cur <= start && start < cur + sg_dma_len(sg)) { in ib_umem_dmabuf_map_pages() 51 sg_dma_len(sg) -= offset; in ib_umem_dmabuf_map_pages() 54 if (cur < end && end <= cur + sg_dma_len(sg)) { in ib_umem_dmabuf_map_pages() 55 unsigned long trim = cur + sg_dma_len(sg) - end; in ib_umem_dmabuf_map_pages() 59 sg_dma_len(sg) -= trim; in ib_umem_dmabuf_map_pages() 62 cur += sg_dma_len(sg); in ib_umem_dmabuf_map_pages() 97 sg_dma_len(umem_dmabuf->first_sg) += in ib_umem_dmabuf_unmap_pages() 103 sg_dma_len(umem_dmabuf->last_sg) += in ib_umem_dmabuf_unmap_pages()
|
| H A D | umem.c | 140 curr_len += sg_dma_len(sg); in ib_umem_find_best_pgsz() 141 va += sg_dma_len(sg) - pgoff; in ib_umem_find_best_pgsz()
|
| /linux/drivers/gpu/drm/i915/ |
| H A D | i915_scatterlist.h | 34 if (dma && s.sgp && sg_dma_len(s.sgp) == 0) { in __sgt_iter() 40 s.max += sg_dma_len(s.sgp); in __sgt_iter() 57 return sg_dma_len(sg) >> PAGE_SHIFT; in __sg_dma_page_count() 131 while (sg && sg_dma_len(sg)) { in i915_sg_dma_sizes() 133 GEM_BUG_ON(!IS_ALIGNED(sg_dma_len(sg), PAGE_SIZE)); in i915_sg_dma_sizes() 134 page_sizes |= sg_dma_len(sg); in i915_sg_dma_sizes()
|
| H A D | i915_scatterlist.c | 31 sg_dma_len(new_sg) = sg_dma_len(sg); in i915_sg_trim() 128 sg_dma_len(sg) = 0; in i915_rsgt_from_mm_node() 135 sg_dma_len(sg) += len; in i915_rsgt_from_mm_node() 218 sg_dma_len(sg) = 0; in i915_rsgt_from_buddy_resource() 225 sg_dma_len(sg) += len; in i915_rsgt_from_buddy_resource()
|
| /linux/net/rds/ |
| H A D | ib_frmr.c | 134 ret = ib_map_mr_sg_zbva(frmr->mr, ibmr->sg, ibmr->sg_dma_len, in rds_ib_post_reg_frmr() 136 if (unlikely(ret != ibmr->sg_dma_len)) { in rds_ib_post_reg_frmr() 209 ibmr->sg_dma_len = 0; in rds_ib_map_frmr() 211 WARN_ON(ibmr->sg_dma_len); in rds_ib_map_frmr() 212 ibmr->sg_dma_len = ib_dma_map_sg(dev, ibmr->sg, ibmr->sg_len, in rds_ib_map_frmr() 214 if (unlikely(!ibmr->sg_dma_len)) { in rds_ib_map_frmr() 224 for (i = 0; i < ibmr->sg_dma_len; ++i) { in rds_ib_map_frmr() 225 unsigned int dma_len = sg_dma_len(&ibmr->sg[i]); in rds_ib_map_frmr() 237 if (i < ibmr->sg_dma_len - 1) in rds_ib_map_frmr() 266 ibmr->sg_dma_len = 0; in rds_ib_map_frmr() [all …]
|
| H A D | ib.h | 327 unsigned int sg_dma_len, in rds_ib_dma_sync_sg_for_cpu() argument 333 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_cpu() 335 sg_dma_len(sg), direction); in rds_ib_dma_sync_sg_for_cpu() 342 unsigned int sg_dma_len, in rds_ib_dma_sync_sg_for_device() argument 348 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_device() 350 sg_dma_len(sg), direction); in rds_ib_dma_sync_sg_for_device()
|
| /linux/lib/ |
| H A D | sg_split.c | 36 sglen = mapped ? sg_dma_len(sg) : sg->length; in sg_calculate_split() 93 sg_dma_len(out_sg) = 0; in sg_split_phys() 112 sg_dma_len(out_sg) = sg_dma_len(in_sg); in sg_split_mapped() 115 sg_dma_len(out_sg) -= split->skip_sg0; in sg_split_mapped() 119 sg_dma_len(--out_sg) = split->length_last_sg; in sg_split_mapped()
|
| /linux/drivers/media/pci/bt8xx/ |
| H A D | bttv-risc.c | 73 while (offset && offset >= sg_dma_len(sg)) { in bttv_risc_packed() 74 offset -= sg_dma_len(sg); in bttv_risc_packed() 77 if (bpl <= sg_dma_len(sg)-offset) { in bttv_risc_packed() 87 (sg_dma_len(sg)-offset)); in bttv_risc_packed() 89 todo -= (sg_dma_len(sg)-offset); in bttv_risc_packed() 92 while (todo > sg_dma_len(sg)) { in bttv_risc_packed() 94 sg_dma_len(sg)); in bttv_risc_packed() 96 todo -= sg_dma_len(sg); in bttv_risc_packed() 176 while (yoffset && yoffset >= sg_dma_len(ysg)) { in bttv_risc_planar() 177 yoffset -= sg_dma_len(ysg); in bttv_risc_planar() [all …]
|
| /linux/drivers/scsi/qla2xxx/ |
| H A D | qla_dsd.h | 15 put_unaligned_le32(sg_dma_len(sg), &(*dsd)->length); in append_dsd32() 28 put_unaligned_le32(sg_dma_len(sg), &(*dsd)->length); in append_dsd64()
|
| /linux/drivers/media/pci/cx25821/ |
| H A D | cx25821-core.c | 1009 while (offset && offset >= sg_dma_len(sg)) { in cx25821_risc_field() 1010 offset -= sg_dma_len(sg); in cx25821_risc_field() 1013 if (bpl <= sg_dma_len(sg) - offset) { in cx25821_risc_field() 1024 (sg_dma_len(sg) - offset)); in cx25821_risc_field() 1027 todo -= (sg_dma_len(sg) - offset); in cx25821_risc_field() 1030 while (todo > sg_dma_len(sg)) { in cx25821_risc_field() 1032 sg_dma_len(sg)); in cx25821_risc_field() 1035 todo -= sg_dma_len(sg); in cx25821_risc_field() 1114 while (offset && offset >= sg_dma_len(sg)) { in cx25821_risc_field_audio() 1115 offset -= sg_dma_len(sg); in cx25821_risc_field_audio() [all …]
|
| /linux/drivers/crypto/gemini/ |
| H A D | sl3516-ce-cipher.c | 59 if ((sg_dma_len(sg) % 16) != 0) { in sl3516_ce_need_fallback() 75 if ((sg_dma_len(sg) % 16) != 0) { in sl3516_ce_need_fallback() 183 if (sg_dma_len(sg) == 0) in sl3516_ce_cipher() 186 todo = min(len, sg_dma_len(sg)); in sl3516_ce_cipher() 205 if (sg_dma_len(sg) == 0) in sl3516_ce_cipher() 208 todo = min(len, sg_dma_len(sg)); in sl3516_ce_cipher()
|
| /linux/drivers/dma-buf/ |
| H A D | dma-buf-mapping.c | 28 sg_dma_len(sgl) = len; in fill_sg_entry() 202 sg_dma_len(sgl), dir, DMA_ATTR_MMIO); in dma_buf_phys_vec_to_sgt() 240 sg_dma_len(sgl), dir, DMA_ATTR_MMIO); in dma_buf_free_sgt()
|
| /linux/drivers/gpu/drm/i915/gt/ |
| H A D | gen8_ppgtt.c | 469 GEM_BUG_ON(sg_dma_len(iter->sg) < I915_GTT_PAGE_SIZE); in gen8_ppgtt_insert_pte() 475 if (!iter->sg || sg_dma_len(iter->sg) == 0) { in gen8_ppgtt_insert_pte() 481 iter->max = iter->dma + sg_dma_len(iter->sg); in gen8_ppgtt_insert_pte() 510 unsigned int rem = sg_dma_len(iter->sg); in xehp_ppgtt_insert_huge() 594 rem = sg_dma_len(iter->sg); in xehp_ppgtt_insert_huge() 608 } while (iter->sg && sg_dma_len(iter->sg)); in xehp_ppgtt_insert_huge() 618 unsigned int rem = sg_dma_len(iter->sg); in gen8_ppgtt_insert_huge() 661 GEM_BUG_ON(sg_dma_len(iter->sg) < page_size); in gen8_ppgtt_insert_huge() 672 rem = sg_dma_len(iter->sg); in gen8_ppgtt_insert_huge() 732 } while (iter->sg && sg_dma_len(iter->sg)); in gen8_ppgtt_insert_huge()
|
| /linux/drivers/gpu/drm/nouveau/nvkm/core/ |
| H A D | firmware.c | 153 return sgl ? sg_dma_len(sgl) : 0; in nvkm_firmware_mem_size() 208 dma_free_noncoherent(fw->device->dev, sg_dma_len(&fw->mem.sgl), in nvkm_firmware_dtor() 253 sg_dma_len(&fw->mem.sgl) = len; in nvkm_firmware_ctor()
|
| /linux/drivers/media/pci/cx88/ |
| H A D | cx88-core.c | 90 while (offset && offset >= sg_dma_len(sg)) { in cx88_risc_field() 91 offset -= sg_dma_len(sg); in cx88_risc_field() 98 if (bpl <= sg_dma_len(sg) - offset) { in cx88_risc_field() 108 (sg_dma_len(sg) - offset)); in cx88_risc_field() 110 todo -= (sg_dma_len(sg) - offset); in cx88_risc_field() 113 while (todo > sg_dma_len(sg)) { in cx88_risc_field() 115 sg_dma_len(sg)); in cx88_risc_field() 117 todo -= sg_dma_len(sg); in cx88_risc_field()
|
| /linux/drivers/crypto/ccp/ |
| H A D | ccp-dmaengine.c | 384 src_len = sg_dma_len(src_sg); in ccp_create_desc() 387 dst_len = sg_dma_len(dst_sg); in ccp_create_desc() 400 src_len = sg_dma_len(src_sg); in ccp_create_desc() 414 dst_len = sg_dma_len(dst_sg); in ccp_create_desc() 493 sg_dma_len(&dst_sg) = len; in ccp_prep_dma_memcpy() 497 sg_dma_len(&src_sg) = len; in ccp_prep_dma_memcpy()
|
| /linux/drivers/dma/ |
| H A D | ste_dma40_ll.c | 287 unsigned int len = sg_dma_len(current_sg); in d40_phy_sg_to_lli() 290 total_size += sg_dma_len(current_sg); in d40_phy_sg_to_lli() 436 unsigned int len = sg_dma_len(current_sg); in d40_log_sg_to_lli() 439 total_size += sg_dma_len(current_sg); in d40_log_sg_to_lli()
|
| /linux/drivers/media/pci/cobalt/ |
| H A D | cobalt-omnitek.c | 193 bytes = min(sg_dma_len(scatter_list) - offset, in descriptor_list_create() 236 bytes = min(sg_dma_len(scatter_list) - offset, in descriptor_list_create() 241 if (sg_dma_len(scatter_list) == offset) { in descriptor_list_create()
|
| /linux/drivers/iommu/ |
| H A D | dma-iommu.c | 1272 unsigned int s_length = sg_dma_len(s); in __finalise_sg() 1276 sg_dma_len(s) = 0; in __finalise_sg() 1284 sg_dma_len(cur) = s_length; in __finalise_sg() 1315 sg_dma_len(cur) = cur_len; in __finalise_sg() 1339 if (sg_dma_len(s)) in __invalidate_sg() 1340 s->length = sg_dma_len(s); in __invalidate_sg() 1343 sg_dma_len(s) = 0; in __invalidate_sg() 1355 sg_dma_len(s), dir, attrs); in iommu_dma_unmap_sg_swiotlb() 1371 sg_dma_len(s) = s->length; in iommu_dma_map_sg_swiotlb() 1444 sg_dma_len(s) = sg->length; in iommu_dma_map_sg() [all …]
|
| /linux/drivers/xen/ |
| H A D | grant-dma-ops.c | 249 xen_grant_dma_unmap_phys(dev, s->dma_address, sg_dma_len(s), dir, in xen_grant_dma_unmap_sg() 269 sg_dma_len(s) = s->length; in xen_grant_dma_map_sg() 276 sg_dma_len(sg) = 0; in xen_grant_dma_map_sg()
|
| H A D | swiotlb-xen.c | 355 xen_swiotlb_unmap_phys(hwdev, sg->dma_address, sg_dma_len(sg), in xen_swiotlb_unmap_sg() 374 sg_dma_len(sg) = sg->length; in xen_swiotlb_map_sg() 380 sg_dma_len(sgl) = 0; in xen_swiotlb_map_sg()
|
| /linux/drivers/rapidio/devices/ |
| H A D | tsi721_dma.c | 459 (unsigned long long)sg_dma_address(sg), sg_dma_len(sg)); in tsi721_submit_sg() 461 if (sg_dma_len(sg) > TSI721_BDMA_MAX_BCOUNT) { in tsi721_submit_sg() 473 bcount + sg_dma_len(sg) <= TSI721_BDMA_MAX_BCOUNT) { in tsi721_submit_sg() 475 bcount += sg_dma_len(sg); in tsi721_submit_sg() 506 bcount = sg_dma_len(sg); in tsi721_submit_sg() 523 rio_addr += sg_dma_len(sg); in tsi721_submit_sg() 524 next_addr += sg_dma_len(sg); in tsi721_submit_sg()
|
| /linux/drivers/crypto/caam/ |
| H A D | sg_sw_qm2.h | 34 ent_len = min_t(int, sg_dma_len(sg), len); in sg_to_qm_sg()
|
| /linux/drivers/mailbox/ |
| H A D | bcm-flexrm-mailbox.c | 659 if (sg_dma_len(src_sg) & 0xf) in flexrm_spu_write_descs() 661 sg_dma_len(src_sg)); in flexrm_spu_write_descs() 664 sg_dma_len(src_sg)/16); in flexrm_spu_write_descs() 669 dst_target = sg_dma_len(src_sg); in flexrm_spu_write_descs() 675 if (sg_dma_len(dst_sg) & 0xf) in flexrm_spu_write_descs() 677 sg_dma_len(dst_sg)); in flexrm_spu_write_descs() 680 sg_dma_len(dst_sg)/16); in flexrm_spu_write_descs() 685 if (sg_dma_len(dst_sg) < dst_target) in flexrm_spu_write_descs() 686 dst_target -= sg_dma_len(dst_sg); in flexrm_spu_write_descs()
|