Lines Matching refs:xfer
351 dw_edma_device_transfer(struct dw_edma_transfer *xfer) in dw_edma_device_transfer() argument
353 struct dw_edma_chan *chan = dchan2dw_edma_chan(xfer->dchan); in dw_edma_device_transfer()
354 enum dma_transfer_direction dir = xfer->direction; in dw_edma_device_transfer()
403 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
404 if (!xfer->xfer.cyclic.len || !xfer->xfer.cyclic.cnt) in dw_edma_device_transfer()
406 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
407 if (xfer->xfer.sg.len < 1) in dw_edma_device_transfer()
409 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
410 if (!xfer->xfer.il->numf || xfer->xfer.il->frame_size < 1) in dw_edma_device_transfer()
412 if (!xfer->xfer.il->src_inc || !xfer->xfer.il->dst_inc) in dw_edma_device_transfer()
426 if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
427 src_addr = xfer->xfer.il->src_start; in dw_edma_device_transfer()
428 dst_addr = xfer->xfer.il->dst_start; in dw_edma_device_transfer()
439 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
440 cnt = xfer->xfer.cyclic.cnt; in dw_edma_device_transfer()
441 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
442 cnt = xfer->xfer.sg.len; in dw_edma_device_transfer()
443 sg = xfer->xfer.sg.sgl; in dw_edma_device_transfer()
444 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
445 cnt = xfer->xfer.il->numf * xfer->xfer.il->frame_size; in dw_edma_device_transfer()
446 fsz = xfer->xfer.il->frame_size; in dw_edma_device_transfer()
450 if (xfer->type == EDMA_XFER_SCATTER_GATHER && !sg) in dw_edma_device_transfer()
463 if (xfer->type == EDMA_XFER_CYCLIC) in dw_edma_device_transfer()
464 burst->sz = xfer->xfer.cyclic.len; in dw_edma_device_transfer()
465 else if (xfer->type == EDMA_XFER_SCATTER_GATHER) in dw_edma_device_transfer()
467 else if (xfer->type == EDMA_XFER_INTERLEAVED) in dw_edma_device_transfer()
468 burst->sz = xfer->xfer.il->sgl[i % fsz].size; in dw_edma_device_transfer()
475 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
476 burst->dar = xfer->xfer.cyclic.paddr; in dw_edma_device_transfer()
477 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
487 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
492 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
493 burst->sar = xfer->xfer.cyclic.paddr; in dw_edma_device_transfer()
494 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
504 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
509 if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
511 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
512 struct dma_interleaved_template *il = xfer->xfer.il; in dw_edma_device_transfer()
525 return vchan_tx_prep(&chan->vc, &desc->vd, xfer->flags); in dw_edma_device_transfer()
540 struct dw_edma_transfer xfer; in dw_edma_device_prep_slave_sg() local
542 xfer.dchan = dchan; in dw_edma_device_prep_slave_sg()
543 xfer.direction = direction; in dw_edma_device_prep_slave_sg()
544 xfer.xfer.sg.sgl = sgl; in dw_edma_device_prep_slave_sg()
545 xfer.xfer.sg.len = len; in dw_edma_device_prep_slave_sg()
546 xfer.flags = flags; in dw_edma_device_prep_slave_sg()
547 xfer.type = EDMA_XFER_SCATTER_GATHER; in dw_edma_device_prep_slave_sg()
549 return dw_edma_device_transfer(&xfer); in dw_edma_device_prep_slave_sg()
558 struct dw_edma_transfer xfer; in dw_edma_device_prep_dma_cyclic() local
560 xfer.dchan = dchan; in dw_edma_device_prep_dma_cyclic()
561 xfer.direction = direction; in dw_edma_device_prep_dma_cyclic()
562 xfer.xfer.cyclic.paddr = paddr; in dw_edma_device_prep_dma_cyclic()
563 xfer.xfer.cyclic.len = len; in dw_edma_device_prep_dma_cyclic()
564 xfer.xfer.cyclic.cnt = count; in dw_edma_device_prep_dma_cyclic()
565 xfer.flags = flags; in dw_edma_device_prep_dma_cyclic()
566 xfer.type = EDMA_XFER_CYCLIC; in dw_edma_device_prep_dma_cyclic()
568 return dw_edma_device_transfer(&xfer); in dw_edma_device_prep_dma_cyclic()
576 struct dw_edma_transfer xfer; in dw_edma_device_prep_interleaved_dma() local
578 xfer.dchan = dchan; in dw_edma_device_prep_interleaved_dma()
579 xfer.direction = ilt->dir; in dw_edma_device_prep_interleaved_dma()
580 xfer.xfer.il = ilt; in dw_edma_device_prep_interleaved_dma()
581 xfer.flags = flags; in dw_edma_device_prep_interleaved_dma()
582 xfer.type = EDMA_XFER_INTERLEAVED; in dw_edma_device_prep_interleaved_dma()
584 return dw_edma_device_transfer(&xfer); in dw_edma_device_prep_interleaved_dma()