| /linux/drivers/dma/sh/ |
| H A D | usb-dmac.c | 55 unsigned int sg_len; member 263 static int usb_dmac_desc_alloc(struct usb_dmac_chan *chan, unsigned int sg_len, in usb_dmac_desc_alloc() argument 269 desc = kzalloc(struct_size(desc, sg, sg_len), gfp); in usb_dmac_desc_alloc() 273 desc->sg_allocated_len = sg_len; in usb_dmac_desc_alloc() 299 unsigned int sg_len, gfp_t gfp) in usb_dmac_desc_get() argument 307 if (sg_len <= desc->sg_allocated_len) { in usb_dmac_desc_get() 316 if (!usb_dmac_desc_alloc(chan, sg_len, gfp)) { in usb_dmac_desc_get() 417 unsigned int sg_len, enum dma_transfer_direction dir, in usb_dmac_prep_slave_sg() argument 425 if (!sg_len) { in usb_dmac_prep_slave_sg() 427 "%s: bad parameter: len=%d\n", __func__, sg_len); in usb_dmac_prep_slave_sg() [all …]
|
| H A D | shdma-base.c | 574 struct scatterlist *sgl, unsigned int sg_len, dma_addr_t *addr, in shdma_prep_sg() argument 584 for_each_sg(sgl, sg, sg_len, i) in shdma_prep_sg() 601 for_each_sg(sgl, sg, sg_len, i) { in shdma_prep_sg() 675 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in shdma_prep_slave_sg() argument 690 if (slave_id < 0 || !sg_len) { in shdma_prep_slave_sg() 692 __func__, sg_len, slave_id); in shdma_prep_slave_sg() 698 return shdma_prep_sg(schan, sgl, sg_len, &slave_addr, in shdma_prep_slave_sg() 713 unsigned int sg_len = buf_len / period_len; in shdma_prep_dma_cyclic() local 724 if (sg_len > SHDMA_MAX_SG_LEN) { in shdma_prep_dma_cyclic() 726 sg_len, SHDMA_MAX_SG_LEN); in shdma_prep_dma_cyclic() [all …]
|
| H A D | rcar-dmac.c | 929 unsigned int sg_len, dma_addr_t dev_addr, in rcar_dmac_chan_prep_sg() argument 964 for_each_sg(sgl, sg, sg_len, i) { in rcar_dmac_chan_prep_sg() 1201 unsigned int sg_len, enum dma_transfer_direction dir, in rcar_dmac_prep_slave_sg() argument 1207 if (rchan->mid_rid < 0 || !sg_len || !sg_dma_len(sgl)) { in rcar_dmac_prep_slave_sg() 1210 __func__, sg_len, rchan->mid_rid); in rcar_dmac_prep_slave_sg() 1217 return rcar_dmac_chan_prep_sg(rchan, sgl, sg_len, rchan->map.addr, in rcar_dmac_prep_slave_sg() 1231 unsigned int sg_len; in rcar_dmac_prep_dma_cyclic() local 1245 sg_len = buf_len / period_len; in rcar_dmac_prep_dma_cyclic() 1246 if (sg_len > RCAR_DMAC_MAX_SG_LEN) { in rcar_dmac_prep_dma_cyclic() 1249 rchan->index, sg_len, RCAR_DMAC_MAX_SG_LEN); in rcar_dmac_prep_dma_cyclic() [all …]
|
| /linux/drivers/dma/ |
| H A D | loongson1-apb-dma.c | 217 struct scatterlist *sgl, unsigned int sg_len, in ls1x_dma_prep_lli() argument 244 for_each_sg(sgl, sg, sg_len, i) { in ls1x_dma_prep_lli() 297 unsigned int sg_len, enum dma_transfer_direction dir, in ls1x_dma_prep_slave_sg() argument 303 sg_len, flags, dmaengine_get_direction_text(dir)); in ls1x_dma_prep_slave_sg() 309 if (ls1x_dma_prep_lli(dchan, desc, sgl, sg_len, dir, false)) { in ls1x_dma_prep_slave_sg() 324 unsigned int sg_len; in ls1x_dma_prep_dma_cyclic() local 337 sg_len = buf_len / period_len; in ls1x_dma_prep_dma_cyclic() 338 sgl = kmalloc_array(sg_len, sizeof(*sgl), GFP_NOWAIT); in ls1x_dma_prep_dma_cyclic() 342 sg_init_table(sgl, sg_len); in ls1x_dma_prep_dma_cyclic() 343 for (i = 0; i < sg_len; ++i) { in ls1x_dma_prep_dma_cyclic() [all …]
|
| H A D | st_fdma.c | 239 int sg_len) in st_fdma_alloc_desc() argument 244 fdesc = kzalloc(struct_size(fdesc, node, sg_len), GFP_NOWAIT); in st_fdma_alloc_desc() 249 fdesc->n_nodes = sg_len; in st_fdma_alloc_desc() 250 for (i = 0; i < sg_len; i++) { in st_fdma_alloc_desc() 455 int sg_len, i; in st_fdma_prep_dma_cyclic() local 475 sg_len = len / period_len; in st_fdma_prep_dma_cyclic() 476 fdesc = st_fdma_alloc_desc(fchan, sg_len); in st_fdma_prep_dma_cyclic() 484 for (i = 0; i < sg_len; i++) { in st_fdma_prep_dma_cyclic() 487 hw_node->next = fdesc->node[(i + 1) % sg_len].pdesc; in st_fdma_prep_dma_cyclic() 509 unsigned int sg_len, enum dma_transfer_direction direction, in st_fdma_prep_slave_sg() argument [all …]
|
| H A D | fsl-edma-common.c | 563 int sg_len) in fsl_edma_alloc_desc() argument 568 fsl_desc = kzalloc(struct_size(fsl_desc, tcd, sg_len), GFP_NOWAIT); in fsl_edma_alloc_desc() 573 fsl_desc->n_tcds = sg_len; in fsl_edma_alloc_desc() 574 for (i = 0; i < sg_len; i++) { in fsl_edma_alloc_desc() 599 int sg_len, i; in fsl_edma_prep_dma_cyclic() local 610 sg_len = buf_len / period_len; in fsl_edma_prep_dma_cyclic() 611 fsl_desc = fsl_edma_alloc_desc(fsl_chan, sg_len); in fsl_edma_prep_dma_cyclic() 638 for (i = 0; i < sg_len; i++) { in fsl_edma_prep_dma_cyclic() 643 last_sg = fsl_desc->tcd[(i + 1) % sg_len].ptcd; in fsl_edma_prep_dma_cyclic() 678 unsigned int sg_len, enum dma_transfer_direction direction, in fsl_edma_prep_slave_sg() argument [all …]
|
| H A D | ste_dma40_ll.c | 267 int sg_len, in d40_phy_sg_to_lli() argument 285 for_each_sg(sg, current_sg, sg_len, i) { in d40_phy_sg_to_lli() 292 if (i == sg_len - 1) in d40_phy_sg_to_lli() 419 int sg_len, in d40_log_sg_to_lli() argument 434 for_each_sg(sg, current_sg, sg_len, i) { in d40_log_sg_to_lli()
|
| H A D | at_hdmac.c | 979 size_t sg_len; in atc_prep_dma_memcpy() local 994 sg_len = DIV_ROUND_UP(len, ATC_BTSIZE_MAX); in atc_prep_dma_memcpy() 995 desc = kzalloc(struct_size(desc, sg, sg_len), GFP_ATOMIC); in atc_prep_dma_memcpy() 998 desc->sglen = sg_len; in atc_prep_dma_memcpy() 1164 unsigned int sg_len, int value, in atc_prep_dma_memset_sg() argument 1178 value, sg_len, flags); in atc_prep_dma_memset_sg() 1180 if (unlikely(!sgl || !sg_len)) { in atc_prep_dma_memset_sg() 1194 desc = kzalloc(struct_size(desc, sg, sg_len), GFP_ATOMIC); in atc_prep_dma_memset_sg() 1197 desc->sglen = sg_len; in atc_prep_dma_memset_sg() 1199 for_each_sg(sgl, sg, sg_len, i) { in atc_prep_dma_memset_sg() [all …]
|
| H A D | loongson2-apb-dma.c | 322 u32 sg_len, enum dma_transfer_direction direction, in ls2x_dma_prep_slave_sg() argument 331 if (unlikely(!sg_len || !is_slave_direction(direction))) in ls2x_dma_prep_slave_sg() 338 desc = kzalloc(struct_size(desc, sg, sg_len), GFP_NOWAIT); in ls2x_dma_prep_slave_sg() 342 desc->desc_num = sg_len; in ls2x_dma_prep_slave_sg() 346 for_each_sg(sgl, sg, sg_len, i) { in ls2x_dma_prep_slave_sg() 364 desc->sg[sg_len - 1].hw->ndesc_addr &= ~LDMA_DESC_EN; in ls2x_dma_prep_slave_sg()
|
| H A D | mxs-dma.c | 476 unsigned int sg_len, enum dma_transfer_direction direction, in mxs_dma_prep_slave_sg() argument 490 if (sg_len + idx > NUM_CCW) { in mxs_dma_prep_slave_sg() 493 sg_len, NUM_CCW); in mxs_dma_prep_slave_sg() 519 for (j = 0; j < sg_len;) in mxs_dma_prep_slave_sg() 529 ccw->bits |= BF_CCW(sg_len, PIO_NUM); in mxs_dma_prep_slave_sg() 534 for_each_sg(sgl, sg, sg_len, i) { in mxs_dma_prep_slave_sg() 555 if (i + 1 == sg_len) { in mxs_dma_prep_slave_sg()
|
| H A D | bcm2835-dma.c | 266 unsigned int sg_len) in bcm2835_dma_count_frames_for_sg() argument 273 for_each_sg(sgl, sgent, sg_len, i) in bcm2835_dma_count_frames_for_sg() 388 unsigned int sg_len) in bcm2835_dma_fill_cb_chain_with_sg() argument 397 for_each_sg(sgl, sgent, sg_len, i) { in bcm2835_dma_fill_cb_chain_with_sg() 642 struct scatterlist *sgl, unsigned int sg_len, in bcm2835_dma_prep_slave_sg() argument 675 frames = bcm2835_dma_count_frames_for_sg(c, sgl, sg_len); in bcm2835_dma_prep_slave_sg() 687 sgl, sg_len); in bcm2835_dma_prep_slave_sg()
|
| /linux/drivers/mmc/host/ |
| H A D | uniphier-sd.c | 109 dma_unmap_sg(mmc_dev(host->mmc), host->sg_ptr, host->sg_len, in uniphier_sd_external_dma_callback() 138 int sg_len; in uniphier_sd_external_dma_start() local 151 sg_len = dma_map_sg(mmc_dev(host->mmc), host->sg_ptr, host->sg_len, in uniphier_sd_external_dma_start() 153 if (sg_len == 0) in uniphier_sd_external_dma_start() 156 desc = dmaengine_prep_slave_sg(priv->chan, host->sg_ptr, sg_len, in uniphier_sd_external_dma_start() 173 dma_unmap_sg(mmc_dev(host->mmc), host->sg_ptr, host->sg_len, in uniphier_sd_external_dma_start() 260 int sg_len; in uniphier_sd_internal_dma_start() local 265 if (WARN_ON(host->sg_len != 1)) in uniphier_sd_internal_dma_start() 279 sg_len = dma_map_sg(mmc_dev(host->mmc), sg, 1, priv->dma_dir); in uniphier_sd_internal_dma_start() 280 if (sg_len == 0) in uniphier_sd_internal_dma_start()
|
| H A D | mmci_stm32_sdmmc.c | 94 for_each_sg(data->sg, sg, data->sg_len - 1, i) { in sdmmc_idma_validate_data() 140 sg_copy_to_buffer(data->sg, data->sg_len, in _sdmmc_idma_prep_data() 149 data->sg_len, in _sdmmc_idma_prep_data() 179 sg_copy_from_buffer(data->sg, data->sg_len, in sdmmc_idma_unprep_data() 183 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in sdmmc_idma_unprep_data() 231 if (!host->variant->dma_lli || data->sg_len == 1 || in sdmmc_idma_start() 247 for_each_sg(data->sg, sg, data->sg_len, i) { in sdmmc_idma_start() 256 desc[data->sg_len - 1].idmalar &= ~MMCI_STM32_ULA; in sdmmc_idma_start() 282 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in sdmmc_idma_error()
|
| H A D | tifm_sd.c | 105 int sg_len; member 174 if (host->sg_pos == host->sg_len) in tifm_sd_transfer_data() 181 if (host->sg_pos == host->sg_len) { in tifm_sd_transfer_data() 237 if (host->sg_pos == host->sg_len) in tifm_sd_bounce_block() 269 if (host->sg_pos == host->sg_len) in tifm_sd_set_dma_data() 275 if (host->sg_pos == host->sg_len) in tifm_sd_set_dma_data() 283 if (host->sg_pos == host->sg_len) in tifm_sd_set_dma_data() 658 host->sg_len = r_data->sg_len; in tifm_sd_request() 672 host->sg_len = tifm_map_sg(sock, r_data->sg, in tifm_sd_request() 673 r_data->sg_len, in tifm_sd_request() [all …]
|
| H A D | cavium.c | 377 dma_unmap_sg(host->dev, data->sg, data->sg_len, get_dma_dir(data)); in finish_dma_single() 397 dma_unmap_sg(host->dev, data->sg, data->sg_len, get_dma_dir(data)); in finish_dma_sg() 403 if (host->use_sg && data->sg_len > 1) in finish_dma() 521 count = dma_map_sg(host->dev, data->sg, data->sg_len, in prepare_dma_single() 558 count = dma_map_sg(host->dev, data->sg, data->sg_len, in prepare_dma_sg() 611 dma_unmap_sg(host->dev, data->sg, data->sg_len, get_dma_dir(data)); in prepare_dma_sg() 619 if (host->use_sg && data->sg_len > 1) in prepare_dma() 657 if (!mrq->data || !mrq->data->sg || !mrq->data->sg_len || in cvm_mmc_dma_request() 709 sg_miter_start(&host->smi, mrq->data->sg, mrq->data->sg_len, in do_read_request() 722 sg_miter_start(smi, mrq->data->sg, mrq->data->sg_len, SG_MITER_FROM_SG); in do_write_request()
|
| H A D | loongson2-mmc.c | 361 dma_unmap_sg(mmc_dev(mmc), cmd->data->sg, cmd->data->sg_len, in loongson2_mmc_irq_worker() 630 for_each_sg(cmd->data->sg, sg, cmd->data->sg_len, i) { in ls2k0500_mmc_reorder_cmd_data() 648 ret = dma_map_sg(mmc_dev(mmc), data->sg, data->sg_len, in loongson2_mmc_prepare_external_dma() 660 desc = dmaengine_prep_slave_sg(host->chan, data->sg, data->sg_len, in loongson2_mmc_prepare_external_dma() 672 dma_unmap_sg(mmc_dev(mmc), data->sg, data->sg_len, mmc_get_dma_dir(data)); in loongson2_mmc_prepare_external_dma() 763 for_each_sg(cmd->data->sg, sg, cmd->data->sg_len, i) { in ls2k2000_mmc_reorder_cmd_data() 799 ret = dma_map_sg(mmc_dev(mmc), data->sg, data->sg_len, in loongson2_mmc_prepare_internal_dma() 804 for_each_sg(data->sg, sg, data->sg_len, i) { in loongson2_mmc_prepare_internal_dma()
|
| H A D | mxcmmc.c | 272 sg_miter_start(&sgm, data->sg, data->sg_len, in mxcmci_swap_buffers() 305 for_each_sg(data->sg, sg, data->sg_len, i) { in mxcmci_setup_data() 323 data->sg_len, host->dma_dir); in mxcmci_setup_data() 324 if (nents != data->sg_len) in mxcmci_setup_data() 328 data->sg, data->sg_len, slave_dirn, in mxcmci_setup_data() 332 dma_unmap_sg(host->dma->device->dev, data->sg, data->sg_len, in mxcmci_setup_data() 442 dma_unmap_sg(host->dma->device->dev, data->sg, data->sg_len, in mxcmci_finish_data() 600 sg_miter_start(&sgm, data->sg, data->sg_len, in mxcmci_transfer_data()
|
| H A D | usdhi6rol0.c | 321 __func__, host->mrq->cmd->opcode, data->sg_len, in usdhi6_blk_bounce() 358 struct scatterlist *sg = data->sg_len > 1 ? host->sg : data->sg; in usdhi6_sg_map() 402 struct scatterlist *sg = data->sg_len > 1 ? in usdhi6_sg_unmap() 544 data->sg_len, DMA_FROM_DEVICE); in usdhi6_dma_stop_unmap() 547 data->sg_len, DMA_TO_DEVICE); in usdhi6_dma_stop_unmap() 587 ret = dma_map_sg(chan->device->dev, sg, data->sg_len, data_dir); in usdhi6_dma_setup() 601 __func__, data->sg_len, ret, cookie, desc); in usdhi6_dma_setup() 631 __func__, data->sg_len, data->blocks, data->blksz); in usdhi6_dma_kill() 911 data ? data->blksz : 0, data ? data->sg_len : 0)) in usdhi6_request_done() 1052 "Bad SG of %u: %ux%u @ %u\n", data->sg_len, in usdhi6_rq_start() [all …]
|
| /linux/net/rds/ |
| H A D | ib_frmr.c | 114 atomic_add(ibmr->sg_len, &pool->free_pinned); in rds_ib_free_frmr() 194 struct scatterlist *sg, unsigned int sg_len) in rds_ib_map_frmr() argument 208 ibmr->sg_len = sg_len; in rds_ib_map_frmr() 212 ibmr->sg_dma_len = ib_dma_map_sg(dev, ibmr->sg, ibmr->sg_len, in rds_ib_map_frmr() 264 ib_dma_unmap_sg(rds_ibdev->dev, ibmr->sg, ibmr->sg_len, in rds_ib_map_frmr() 383 *unpinned += ibmr->sg_len; in rds_ib_unreg_frmr()
|
| /linux/drivers/mmc/core/ |
| H A D | sdio_ops.c | 158 data.sg_len = nents; in mmc_io_rw_extended() 160 for_each_sg(data.sg, sg_ptr, data.sg_len, i) { in mmc_io_rw_extended() 167 data.sg_len = 1; in mmc_io_rw_extended()
|
| /linux/sound/soc/fsl/ |
| H A D | fsl_asrc_m2m.c | 110 unsigned int sg_len, max_period_size; in asrc_dmaconfig() local 154 sg_len = buf_len / max_period_size; in asrc_dmaconfig() 156 sg_len += 1; in asrc_dmaconfig() 158 sg = kmalloc_array(sg_len, sizeof(*sg), GFP_KERNEL); in asrc_dmaconfig() 162 sg_init_table(sg, sg_len); in asrc_dmaconfig() 163 for (i = 0; i < (sg_len - 1); i++) { in asrc_dmaconfig() 170 pair->desc[dir] = dmaengine_prep_slave_sg(chan, sg, sg_len, in asrc_dmaconfig()
|
| /linux/drivers/rapidio/devices/ |
| H A D | tsi721_dma.c | 455 for_each_sg(desc->sg, sg, desc->sg_len, i) { in tsi721_submit_sg() 458 bdma_chan->id, i, desc->sg_len, in tsi721_submit_sg() 491 desc->sg_len -= i; in tsi721_submit_sg() 521 desc->sg_len = 0; in tsi721_submit_sg() 664 if (desc->sg_len == 0) { in tsi721_dma_tasklet() 827 struct scatterlist *sgl, unsigned int sg_len, in tsi721_prep_rio_sg() argument 837 if (!sgl || !sg_len) { in tsi721_prep_rio_sg() 878 desc->sg_len = sg_len; in tsi721_prep_rio_sg()
|
| /linux/drivers/dma/hsu/ |
| H A D | hsu.c | 271 unsigned int sg_len, enum dma_transfer_direction direction, in hsu_dma_prep_slave_sg() argument 279 desc = hsu_dma_alloc_desc(sg_len); in hsu_dma_prep_slave_sg() 283 for_each_sg(sgl, sg, sg_len, i) { in hsu_dma_prep_slave_sg() 290 desc->nents = sg_len; in hsu_dma_prep_slave_sg()
|
| /linux/drivers/staging/greybus/ |
| H A D | sdio.c | 261 unsigned int sg_len = data->sg_len; in _gb_sdio_send() local 280 copied = sg_pcopy_to_buffer(sg, sg_len, &request->data[0], len, skip); in _gb_sdio_send() 315 unsigned int sg_len = data->sg_len; in _gb_sdio_recv() local 349 copied = sg_pcopy_from_buffer(sg, sg_len, &response->data[0], len, in _gb_sdio_recv()
|
| /linux/drivers/dma/amd/qdma/ |
| H A D | qdma.h | 195 u32 sg_len; member 202 #define QDMA_VDESC_QUEUED(vdesc) (!(vdesc)->sg_len)
|