Home
last modified time | relevance | path

Searched refs:sglist (Results 1 – 25 of 76) sorted by relevance

1234

/linux/drivers/misc/bcm-vk/
H A Dbcm_vk_sg.c91 dma->sglist = dma_alloc_coherent(dev, in bcm_vk_dma_alloc()
95 if (!dma->sglist) in bcm_vk_dma_alloc()
98 dma->sglist[SGLIST_NUM_SG] = 0; in bcm_vk_dma_alloc()
99 dma->sglist[SGLIST_TOTALSIZE] = vkdata->size; in bcm_vk_dma_alloc()
101 sgdata = (struct _vk_data *)&dma->sglist[SGLIST_VKDATA_START]; in bcm_vk_dma_alloc()
142 dma->sglist[SGLIST_NUM_SG]++; in bcm_vk_dma_alloc()
153 dma->sglist[SGLIST_NUM_SG]++; in bcm_vk_dma_alloc()
157 vkdata->size = (dma->sglist[SGLIST_NUM_SG] * sizeof(*sgdata)) + in bcm_vk_dma_alloc()
163 (u64)dma->sglist, in bcm_vk_dma_alloc()
168 dev_dbg(dev, "i:0x%x 0x%x\n", i, dma->sglist[i]); in bcm_vk_dma_alloc()
[all …]
/linux/drivers/net/ethernet/marvell/octeon_ep/
H A Doctep_tx.c70 dma_unmap_single(iq->dev, tx_buffer->sglist[0].dma_ptr[0], in octep_iq_process_completions()
71 tx_buffer->sglist[0].len[3], DMA_TO_DEVICE); in octep_iq_process_completions()
75 dma_unmap_page(iq->dev, tx_buffer->sglist[i >> 2].dma_ptr[i & 3], in octep_iq_process_completions()
76 tx_buffer->sglist[i >> 2].len[3 - (i & 3)], DMA_TO_DEVICE); in octep_iq_process_completions()
131 tx_buffer->sglist[0].dma_ptr[0], in octep_iq_free_pending()
132 tx_buffer->sglist[0].len[3], in octep_iq_free_pending()
137 dma_unmap_page(iq->dev, tx_buffer->sglist[i >> 2].dma_ptr[i & 3], in octep_iq_free_pending()
138 tx_buffer->sglist[i >> 2].len[3 - (i & 3)], DMA_TO_DEVICE); in octep_iq_free_pending()
208 iq->sglist = dma_alloc_coherent(iq->dev, sglist_size, in octep_setup_iq()
210 if (unlikely(!iq->sglist)) { in octep_setup_iq()
[all …]
H A Doctep_main.c846 struct octep_tx_sglist_desc *sglist; in octep_start_xmit() local
895 sglist = tx_buffer->sglist; in octep_start_xmit()
906 memset(sglist, 0, OCTEP_SGLIST_SIZE_PER_PKT); in octep_start_xmit()
907 sglist[0].len[3] = len; in octep_start_xmit()
908 sglist[0].dma_ptr[0] = dma; in octep_start_xmit()
919 sglist[si >> 2].len[3 - (si & 3)] = len; in octep_start_xmit()
920 sglist[si >> 2].dma_ptr[si & 3] = dma; in octep_start_xmit()
969 dma_unmap_single(iq->dev, sglist[0].dma_ptr[0], in octep_start_xmit()
970 sglist[0].len[3], DMA_TO_DEVICE); in octep_start_xmit()
971 sglist[0].len[3] = 0; in octep_start_xmit()
[all …]
H A Doctep_tx.h55 struct octep_tx_sglist_desc *sglist; member
186 struct octep_tx_sglist_desc *sglist; member
/linux/drivers/net/ethernet/marvell/octeon_ep_vf/
H A Doctep_vf_tx.c71 dma_unmap_single(iq->dev, tx_buffer->sglist[0].dma_ptr[0], in octep_vf_iq_process_completions()
72 tx_buffer->sglist[0].len[3], DMA_TO_DEVICE); in octep_vf_iq_process_completions()
76 dma_unmap_page(iq->dev, tx_buffer->sglist[i >> 2].dma_ptr[i & 3], in octep_vf_iq_process_completions()
77 tx_buffer->sglist[i >> 2].len[3 - (i & 3)], DMA_TO_DEVICE); in octep_vf_iq_process_completions()
130 tx_buffer->sglist[0].dma_ptr[0], in octep_vf_iq_free_pending()
131 tx_buffer->sglist[0].len[0], in octep_vf_iq_free_pending()
136 dma_unmap_page(iq->dev, tx_buffer->sglist[i >> 2].dma_ptr[i & 3], in octep_vf_iq_free_pending()
137 tx_buffer->sglist[i >> 2].len[i & 3], DMA_TO_DEVICE); in octep_vf_iq_free_pending()
207 iq->sglist = dma_alloc_coherent(iq->dev, sglist_size, in octep_vf_setup_iq()
209 if (unlikely(!iq->sglist)) { in octep_vf_setup_iq()
[all …]
H A Doctep_vf_main.c600 struct octep_vf_tx_sglist_desc *sglist; in octep_vf_start_xmit() local
650 sglist = tx_buffer->sglist; in octep_vf_start_xmit()
661 memset(sglist, 0, OCTEP_VF_SGLIST_SIZE_PER_PKT); in octep_vf_start_xmit()
662 sglist[0].len[3] = len; in octep_vf_start_xmit()
663 sglist[0].dma_ptr[0] = dma; in octep_vf_start_xmit()
674 sglist[si >> 2].len[3 - (si & 3)] = len; in octep_vf_start_xmit()
675 sglist[si >> 2].dma_ptr[si & 3] = dma; in octep_vf_start_xmit()
717 dma_unmap_single(iq->dev, sglist[0].dma_ptr[0], in octep_vf_start_xmit()
718 sglist[0].len[0], DMA_TO_DEVICE); in octep_vf_start_xmit()
719 sglist[0].len[0] = 0; in octep_vf_start_xmit()
[all …]
H A Doctep_vf_tx.h55 struct octep_vf_tx_sglist_desc *sglist; member
144 struct octep_vf_tx_sglist_desc *sglist; member
/linux/drivers/firmware/efi/
H A Dcapsule.c246 efi_capsule_block_desc_t *sglist; in efi_capsule_update() local
248 sglist = kmap_atomic(sg_pages[i]); in efi_capsule_update()
254 sglist[j].length = sz; in efi_capsule_update()
255 sglist[j].data = *pages++; in efi_capsule_update()
262 sglist[j].length = 0; in efi_capsule_update()
265 sglist[j].data = 0; in efi_capsule_update()
267 sglist[j].data = page_to_phys(sg_pages[i + 1]); in efi_capsule_update()
278 efi_capsule_flush_cache_range(sglist, PAGE_SIZE); in efi_capsule_update()
280 kunmap_atomic(sglist); in efi_capsule_update()
/linux/drivers/media/pci/intel/ipu6/
H A Dipu6-dma.c140 struct scatterlist *sglist, in ipu6_dma_sync_sg_for_cpu() argument
146 for_each_sg(sglist, sg, nents, i) in ipu6_dma_sync_sg_for_cpu()
327 struct scatterlist *sglist, in ipu6_dma_unmap_sg() argument
334 PHYS_PFN(sg_dma_address(sglist))); in ipu6_dma_unmap_sg()
346 ipu6_dma_sync_sg_for_cpu(dev, sglist, nents, DMA_BIDIRECTIONAL); in ipu6_dma_unmap_sg()
351 for_each_sg(sglist, sg, nents, i) { in ipu6_dma_unmap_sg()
368 for_each_sg(sglist, sg, count, i) { in ipu6_dma_unmap_sg()
384 dma_unmap_sg_attrs(&pdev->dev, sglist, nents, dir, attrs); in ipu6_dma_unmap_sg()
389 static int ipu6_dma_map_sg(struct device *dev, struct scatterlist *sglist, in ipu6_dma_map_sg() argument
401 for_each_sg(sglist, sg, nents, i) { in ipu6_dma_map_sg()
[all …]
/linux/drivers/media/pci/cx23885/
H A Dcx23885-alsa.c90 buf->sglist = vzalloc(array_size(sizeof(*buf->sglist), buf->nr_pages)); in cx23885_alsa_dma_init()
91 if (NULL == buf->sglist) in cx23885_alsa_dma_init()
94 sg_init_table(buf->sglist, buf->nr_pages); in cx23885_alsa_dma_init()
99 sg_set_page(&buf->sglist[i], pg, PAGE_SIZE, 0); in cx23885_alsa_dma_init()
104 vfree(buf->sglist); in cx23885_alsa_dma_init()
105 buf->sglist = NULL; in cx23885_alsa_dma_init()
116 buf->sglen = dma_map_sg(&dev->pci->dev, buf->sglist, in cx23885_alsa_dma_map()
133 dma_unmap_sg(&dev->pci->dev, buf->sglist, buf->nr_pages, DMA_FROM_DEVICE); in cx23885_alsa_dma_unmap()
140 vfree(buf->sglist); in cx23885_alsa_dma_free()
141 buf->sglist = NULL; in cx23885_alsa_dma_free()
[all …]
/linux/drivers/media/pci/cx25821/
H A Dcx25821-alsa.c55 struct scatterlist *sglist; member
153 buf->sglist = vzalloc(array_size(sizeof(*buf->sglist), buf->nr_pages)); in cx25821_alsa_dma_init()
154 if (NULL == buf->sglist) in cx25821_alsa_dma_init()
157 sg_init_table(buf->sglist, buf->nr_pages); in cx25821_alsa_dma_init()
162 sg_set_page(&buf->sglist[i], pg, PAGE_SIZE, 0); in cx25821_alsa_dma_init()
167 vfree(buf->sglist); in cx25821_alsa_dma_init()
168 buf->sglist = NULL; in cx25821_alsa_dma_init()
179 buf->sglen = dma_map_sg(&dev->pci->dev, buf->sglist, in cx25821_alsa_dma_map()
196 dma_unmap_sg(&dev->pci->dev, buf->sglist, buf->nr_pages, DMA_FROM_DEVICE); in cx25821_alsa_dma_unmap()
203 vfree(buf->sglist); in cx25821_alsa_dma_free()
[all …]
/linux/drivers/parisc/
H A Dccio-dma.c895 * @sglist: The scatter/gather list to be mapped in the IOMMU.
903 ccio_map_sg(struct device *dev, struct scatterlist *sglist, int nents, in ccio_map_sg() argument
922 sg_dma_address(sglist) = ccio_map_single(dev, in ccio_map_sg()
923 sg_virt(sglist), sglist->length, in ccio_map_sg()
925 sg_dma_len(sglist) = sglist->length; in ccio_map_sg()
930 prev_len += sglist[i].length; in ccio_map_sg()
946 coalesced = iommu_coalesce_chunks(ioc, dev, sglist, nents, ccio_alloc_range); in ccio_map_sg()
956 filled = iommu_fill_pdir(ioc, sglist, nent in ccio_map_sg()
983 ccio_unmap_sg(struct device * dev,struct scatterlist * sglist,int nents,enum dma_data_direction direction,unsigned long attrs) ccio_unmap_sg() argument
[all...]
H A Dsba_iommu.c942 * @sglist: array of buffer/length pairs
950 sba_map_sg(struct device *dev, struct scatterlist *sglist, int nents, in sba_map_sg() argument
965 sg_dma_address(sglist) = sba_map_single(dev, sg_virt(sglist), in sba_map_sg()
966 sglist->length, direction); in sba_map_sg()
967 sg_dma_len(sglist) = sglist->length; in sba_map_sg()
976 sba_dump_sg(ioc, sglist, nents); in sba_map_sg()
993 iommu_coalesce_chunks(ioc, dev, sglist, nents, sba_alloc_range); in sba_map_sg()
1003 filled = iommu_fill_pdir(ioc, sglist, nent in sba_map_sg()
1035 sba_unmap_sg(struct device * dev,struct scatterlist * sglist,int nents,enum dma_data_direction direction,unsigned long attrs) sba_unmap_sg() argument
[all...]
/linux/drivers/media/pci/tw68/
H A Dtw68-risc.c33 static __le32 *tw68_risc_field(__le32 *rp, struct scatterlist *sglist, in tw68_risc_field() argument
54 sg = sglist; in tw68_risc_field()
130 struct scatterlist *sglist, in tw68_risc_buffer() argument
162 rp = tw68_risc_field(rp, sglist, top_offset, 1, in tw68_risc_buffer()
165 rp = tw68_risc_field(rp, sglist, bottom_offset, 2, in tw68_risc_buffer()
/linux/drivers/media/pci/cx88/
H A Dcx88-alsa.c48 struct scatterlist *sglist; member
292 buf->sglist = vzalloc(array_size(sizeof(*buf->sglist), buf->nr_pages)); in cx88_alsa_dma_init()
293 if (!buf->sglist) in cx88_alsa_dma_init()
296 sg_init_table(buf->sglist, buf->nr_pages); in cx88_alsa_dma_init()
301 sg_set_page(&buf->sglist[i], pg, PAGE_SIZE, 0); in cx88_alsa_dma_init()
306 vfree(buf->sglist); in cx88_alsa_dma_init()
307 buf->sglist = NULL; in cx88_alsa_dma_init()
318 buf->sglen = dma_map_sg(&dev->pci->dev, buf->sglist, in cx88_alsa_dma_map()
335 dma_unmap_sg(&dev->pci->dev, buf->sglist, buf->nr_pages, in cx88_alsa_dma_unmap()
343 vfree(buf->sglist); in cx88_alsa_dma_free()
[all …]
/linux/include/linux/
H A Dscatterlist.h195 #define for_each_sg(sglist, sg, nr, __i) \ argument
196 for (__i = 0, sg = (sglist); __i < (nr); __i++, sg = sg_next(sg))
574 struct scatterlist *sglist, unsigned int nents,
607 #define for_each_sg_page(sglist, piter, nents, pgoffset) \ argument
608 for (__sg_page_iter_start((piter), (sglist), (nents), (pgoffset)); \
622 #define for_each_sg_dma_page(sglist, dma_iter, dma_nents, pgoffset) \ argument
623 for (__sg_page_iter_start(&(dma_iter)->base, sglist, dma_nents, \
/linux/arch/sparc/kernel/
H A Diommu.c432 static int dma_4u_map_sg(struct device *dev, struct scatterlist *sglist, in dma_4u_map_sg() argument
466 outs = s = segstart = &sglist[0]; in dma_4u_map_sg()
477 for_each_sg(sglist, s, nelems, i) { in dma_4u_map_sg()
555 for_each_sg(sglist, s, nelems, i) { in dma_4u_map_sg()
605 static void dma_4u_unmap_sg(struct device *dev, struct scatterlist *sglist, in dma_4u_unmap_sg() argument
619 ctx = fetch_sg_ctx(iommu, sglist); in dma_4u_unmap_sg()
623 sg = sglist; in dma_4u_unmap_sg()
696 struct scatterlist *sglist, int nelems, in dma_4u_sync_sg_for_cpu() argument
720 iopte = iommu->page_table + ((sglist[0].dma_address - in dma_4u_sync_sg_for_cpu()
726 bus_addr = sglist[0].dma_address & IO_PAGE_MASK; in dma_4u_sync_sg_for_cpu()
[all …]
/linux/arch/powerpc/kernel/
H A Ddma-iommu.c118 static int dma_iommu_map_sg(struct device *dev, struct scatterlist *sglist, in dma_iommu_map_sg() argument
122 return ppc_iommu_map_sg(dev, get_iommu_table_base(dev), sglist, nelems, in dma_iommu_map_sg()
126 static void dma_iommu_unmap_sg(struct device *dev, struct scatterlist *sglist, in dma_iommu_unmap_sg() argument
130 ppc_iommu_unmap_sg(get_iommu_table_base(dev), sglist, nelems, in dma_iommu_unmap_sg()
/linux/block/
H A Dblk-merge.c455 struct scatterlist *sglist) in blk_next_sg() argument
458 return sglist; in blk_next_sg()
472 struct bio_vec *bvec, struct scatterlist *sglist, in blk_bvec_map_sg() argument
495 *sg = blk_next_sg(sg, sglist); in blk_bvec_map_sg()
507 struct scatterlist *sglist, struct scatterlist **sg) in __blk_bvec_map_sg() argument
509 *sg = blk_next_sg(sg, sglist); in __blk_bvec_map_sg()
537 struct scatterlist *sglist, in __blk_bios_map_sg() argument
557 nsegs += __blk_bvec_map_sg(bvec, sglist, sg); in __blk_bios_map_sg()
559 nsegs += blk_bvec_map_sg(q, &bvec, sglist, sg); in __blk_bios_map_sg()
577 struct scatterlist *sglist, struct scatterlist **last_sg) in __blk_rq_map_sg() argument
[all …]
/linux/drivers/media/common/saa7146/
H A Dsaa7146_core.c140 struct scatterlist *sglist; in vmalloc_to_sg() local
144 sglist = kmalloc_array(nr_pages, sizeof(struct scatterlist), GFP_KERNEL); in vmalloc_to_sg()
145 if (NULL == sglist) in vmalloc_to_sg()
147 sg_init_table(sglist, nr_pages); in vmalloc_to_sg()
154 sg_set_page(&sglist[i], pg, PAGE_SIZE, 0); in vmalloc_to_sg()
156 return sglist; in vmalloc_to_sg()
159 kfree(sglist); in vmalloc_to_sg()
/linux/drivers/media/pci/saa7134/
H A Dsaa7134-alsa.c274 dma->sglist = vzalloc(array_size(sizeof(*dma->sglist), dma->nr_pages)); in saa7134_alsa_dma_init()
275 if (NULL == dma->sglist) in saa7134_alsa_dma_init()
278 sg_init_table(dma->sglist, dma->nr_pages); in saa7134_alsa_dma_init()
283 sg_set_page(&dma->sglist[i], pg, PAGE_SIZE, 0); in saa7134_alsa_dma_init()
288 vfree(dma->sglist); in saa7134_alsa_dma_init()
289 dma->sglist = NULL; in saa7134_alsa_dma_init()
300 dma->sglen = dma_map_sg(&dev->pci->dev, dma->sglist, in saa7134_alsa_dma_map()
317 dma_unmap_sg(&dev->pci->dev, dma->sglist, dma->nr_pages, DMA_FROM_DEVICE); in saa7134_alsa_dma_unmap()
324 vfree(dma->sglist); in saa7134_alsa_dma_free()
325 dma->sglist = NULL; in saa7134_alsa_dma_free()
[all …]
/linux/tools/virtio/linux/
H A Dscatterlist.h78 #define for_each_sg(sglist, sg, nr, __i) \ argument
79 for (__i = 0, sg = (sglist); __i < (nr); __i++, sg = sg_next(sg))
/linux/drivers/staging/media/ipu3/
H A Dipu3-dmamap.c184 int imgu_dmamap_map_sg(struct imgu_device *imgu, struct scatterlist *sglist, in imgu_dmamap_map_sg() argument
193 for_each_sg(sglist, sg, nents, i) { in imgu_dmamap_map_sg()
216 sglist, nents) < size) in imgu_dmamap_map_sg()
/linux/net/rds/
H A Dib.h326 struct scatterlist *sglist, in rds_ib_dma_sync_sg_for_cpu() argument
333 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_cpu()
341 struct scatterlist *sglist, in rds_ib_dma_sync_sg_for_device() argument
348 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_device()
/linux/kernel/dma/
H A Ddebug.h24 extern void debug_dma_unmap_sg(struct device *dev, struct scatterlist *sglist,
77 struct scatterlist *sglist, in debug_dma_unmap_sg() argument

1234