Home
last modified time | relevance | path

Searched refs:vchan (Results 1 – 25 of 34) sorted by relevance

12

/linux/drivers/dma/
H A Dowl-dma.c192 struct owl_dma_vchan *vchan; member
381 static inline int owl_dma_cfg_lli(struct owl_dma_vchan *vchan, in owl_dma_cfg_lli() argument
388 struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); in owl_dma_cfg_lli()
401 mode |= OWL_DMA_MODE_TS(vchan->drq) in owl_dma_cfg_lli()
414 mode |= OWL_DMA_MODE_TS(vchan->drq) in owl_dma_cfg_lli()
469 struct owl_dma_vchan *vchan) in owl_dma_get_pchan() argument
479 if (!pchan->vchan) { in owl_dma_get_pchan()
480 pchan->vchan = vchan; in owl_dma_get_pchan()
520 pchan->vchan = NULL; in owl_dma_terminate_pchan()
535 static int owl_dma_start_next_txd(struct owl_dma_vchan *vchan) in owl_dma_start_next_txd() argument
[all …]
H A Dsun6i-dma.c181 struct sun6i_vchan *vchan; member
396 static inline void sun6i_dma_dump_lli(struct sun6i_vchan *vchan, in sun6i_dma_dump_lli() argument
400 dev_dbg(chan2dev(&vchan->vc.chan), in sun6i_dma_dump_lli()
435 static int sun6i_dma_start_desc(struct sun6i_vchan *vchan) in sun6i_dma_start_desc() argument
437 struct sun6i_dma_dev *sdev = to_sun6i_dma_dev(vchan->vc.chan.device); in sun6i_dma_start_desc()
438 struct virt_dma_desc *desc = vchan_next_desc(&vchan->vc); in sun6i_dma_start_desc()
439 struct sun6i_pchan *pchan = vchan->phy; in sun6i_dma_start_desc()
456 sun6i_dma_dump_lli(vchan, pchan->desc->v_lli, pchan->desc->p_lli); in sun6i_dma_start_desc()
461 vchan->irq_type = vchan->cyclic ? DMA_IRQ_PKG : DMA_IRQ_QUEUE; in sun6i_dma_start_desc()
466 irq_val |= vchan->irq_type << (irq_offset * DMA_IRQ_CHAN_WIDTH); in sun6i_dma_start_desc()
[all …]
H A Dsun4i-dma.c188 struct sun4i_dma_vchan *vchan; member
303 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); in sun4i_dma_free_chan_resources() local
305 vchan_free_chan_resources(&vchan->vc); in sun4i_dma_free_chan_resources()
309 struct sun4i_dma_vchan *vchan) in find_and_use_pchan() argument
319 if (vchan->is_dedicated) { in find_and_use_pchan()
330 pchan->vchan = vchan; in find_and_use_pchan()
347 pchan->vchan = NULL; in release_pchan()
411 struct sun4i_dma_vchan *vchan) in __execute_vchan_pending() argument
419 lockdep_assert_held(&vchan->vc.lock); in __execute_vchan_pending()
422 pchan = find_and_use_pchan(priv, vchan); in __execute_vchan_pending()
[all …]
H A Dst_fdma.c25 return container_of(c, struct st_fdma_chan, vchan.chan); in to_st_fdma_chan()
80 vdesc = vchan_next_desc(&fchan->vchan); in st_fdma_xfer_desc()
86 cmd = FDMA_CMD_START(fchan->vchan.chan.chan_id); in st_fdma_xfer_desc()
95 dev_dbg(fchan->fdev->dev, "start chan:%d\n", fchan->vchan.chan.chan_id); in st_fdma_xfer_desc()
102 int ch_id = fchan->vchan.chan.chan_id; in st_fdma_ch_sta_update()
140 spin_lock(&fchan->vchan.lock); in st_fdma_irq_handler()
158 spin_unlock(&fchan->vchan.lock); in st_fdma_irq_handler()
283 fchan->vchan.chan.chan_id, fchan->cfg.type); in st_fdma_alloc_chan_res()
295 __func__, fchan->vchan.chan.chan_id); in st_fdma_free_chan_res()
300 spin_lock_irqsave(&fchan->vchan.lock, flags); in st_fdma_free_chan_res()
[all …]
H A Dloongson2-apb-dma.c124 struct virt_dma_chan vchan; member
156 return container_of(chan, struct ls2x_dma_chan, vchan.chan); in to_ldma_chan()
186 struct ls2x_dma_priv *priv = to_ldma_priv(lchan->vchan.chan.device); in ls2x_dma_write_cmd()
196 struct ls2x_dma_priv *priv = to_ldma_priv(lchan->vchan.chan.device); in ls2x_dma_start_transfer()
202 vdesc = vchan_next_desc(&lchan->vchan); in ls2x_dma_start_transfer()
367 return vchan_tx_prep(&lchan->vchan, &desc->vdesc, flags); in ls2x_dma_prep_slave_sg()
435 return vchan_tx_prep(&lchan->vchan, &desc->vdesc, flags); in ls2x_dma_prep_dma_cyclic()
466 spin_lock_irqsave(&lchan->vchan.lock, flags); in ls2x_dma_issue_pending()
467 if (vchan_issue_pending(&lchan->vchan) && !lchan->desc) in ls2x_dma_issue_pending()
469 spin_unlock_irqrestore(&lchan->vchan.lock, flags); in ls2x_dma_issue_pending()
[all …]
H A Didma64.c107 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_stop_transfer()
114 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_start_transfer()
118 vdesc = vchan_next_desc(&idma64c->vchan); in idma64_start_transfer()
143 stat = this_cpu_ptr(idma64c->vchan.chan.local); in idma64_chan_irq()
145 spin_lock(&idma64c->vchan.lock); in idma64_chan_irq()
163 spin_unlock(&idma64c->vchan.lock); in idma64_chan_irq()
334 return vchan_tx_prep(&idma64c->vchan, &desc->vdesc, flags); in idma64_prep_slave_sg()
342 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_issue_pending()
343 if (vchan_issue_pending(&idma64c->vchan) && !idma64c->desc) in idma64_issue_pending()
345 spin_unlock_irqrestore(&idma64c->vchan.lock, flags); in idma64_issue_pending()
[all …]
H A Dfsl-edma-common.c49 spin_lock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
53 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
69 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
105 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request()
139 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request()
185 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux()
195 ch_off = fsl_chan->vchan.chan.chan_id % chans_per_mux; in fsl_edma_chan_mux()
242 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
246 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
247 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
[all …]
H A Ddma-jz4780.c130 struct virt_dma_chan vchan; member
167 return container_of(chan, struct jz4780_dma_chan, vchan.chan); in to_jz4780_dma_chan()
179 return container_of(jzchan->vchan.chan.device, struct jz4780_dma_dev, in jz4780_dma_chan_parent()
403 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_slave_sg()
455 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_cyclic()
483 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_memcpy()
494 vdesc = vchan_next_desc(&jzchan->vchan); in jz4780_dma_begin()
566 spin_lock_irqsave(&jzchan->vchan.lock, flags); in jz4780_dma_issue_pending()
568 if (vchan_issue_pending(&jzchan->vchan) && !jzchan->desc) in jz4780_dma_issue_pending()
571 spin_unlock_irqrestore(&jzchan->vchan.lock, flags); in jz4780_dma_issue_pending()
[all …]
H A Dfsl-qdma.c184 struct virt_dma_chan vchan; member
304 return container_of(chan, struct fsl_qdma_chan, vchan.chan); in to_fsl_qdma_chan()
321 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_qdma_free_chan_resources()
322 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_qdma_free_chan_resources()
323 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_qdma_free_chan_resources()
325 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_qdma_free_chan_resources()
732 spin_lock(&fsl_comp->qchan->vchan.lock); in fsl_qdma_queue_transfer_complete()
735 spin_unlock(&fsl_comp->qchan->vchan.lock); in fsl_qdma_queue_transfer_complete()
988 return vchan_tx_prep(&fsl_chan->vchan, &fsl_comp->vdesc, flags); in fsl_qdma_prep_memcpy()
1002 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_qdma_enqueue_desc()
[all …]
H A Ddma-axi-dmac.c146 struct virt_dma_chan vchan; member
179 return container_of(chan->vchan.chan.device, struct axi_dmac, in chan_to_axi_dmac()
185 return container_of(c, struct axi_dmac_chan, vchan.chan); in to_axi_dmac_chan()
246 vdesc = vchan_next_desc(&chan->vchan); in axi_dmac_start_transfer()
473 spin_lock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
484 spin_unlock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
496 spin_lock_irqsave(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
499 vchan_get_all_descriptors(&chan->vchan, &head); in axi_dmac_terminate_all()
501 spin_unlock_irqrestore(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
503 vchan_dma_desc_free_list(&chan->vchan, &head); in axi_dmac_terminate_all()
[all …]
H A Dpxa_dma.c101 struct pxad_chan *vchan; member
152 dev_vdbg(&phy->vchan->vc.chan.dev->device, \
160 dev_vdbg(&phy->vchan->vc.chan.dev->device, \
167 dev_vdbg(&phy->vchan->vc.chan.dev->device, \
389 if (!phy->vchan) { in lookup_phy()
390 phy->vchan = pchan; in lookup_phy()
424 chan->phy->vchan = NULL; in pxad_free_phy()
454 if (!phy->vchan) in phy_enable()
457 dev_dbg(&phy->vchan->vc.chan.dev->device, in phy_enable()
461 pdev = to_pxad_dev(phy->vchan->vc.chan.device); in phy_enable()
[all …]
H A Dmmp_pdma.c134 struct mmp_pdma_chan *vchan; member
299 if (!phy->vchan) in enable_chan()
302 pdev = to_mmp_pdma_dev(phy->vchan->chan.device); in enable_chan()
304 reg = DRCMR(phy->vchan->drcmr); in enable_chan()
308 if (phy->vchan->byte_align) in enable_chan()
329 if (phy->vchan) { in disable_chan()
332 pdev = to_mmp_pdma_dev(phy->vchan->chan.device); in disable_chan()
352 if ((dcsr & DCSR_BUSERR) && (phy->vchan)) in clear_chan_irq()
353 dev_warn(phy->vchan->dev, "DCSR_BUSERR\n"); in clear_chan_irq()
365 tasklet_schedule(&phy->vchan->tasklet); in mmp_pdma_chan_handler()
[all …]
/linux/drivers/dma/sf-pdma/
H A Dsf-pdma.c47 return container_of(dchan, struct sf_pdma_chan, vchan.chan); in to_sf_pdma_chan()
103 desc->async_tx = vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in sf_pdma_prep_dma_memcpy()
105 spin_lock_irqsave(&chan->vchan.lock, iflags); in sf_pdma_prep_dma_memcpy()
107 spin_unlock_irqrestore(&chan->vchan.lock, iflags); in sf_pdma_prep_dma_memcpy()
146 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_free_chan_resources()
150 vchan_get_all_descriptors(&chan->vchan, &head); in sf_pdma_free_chan_resources()
152 spin_unlock_irqrestore(&chan->vchan.lock, flags); in sf_pdma_free_chan_resources()
153 vchan_dma_desc_free_list(&chan->vchan, &head); in sf_pdma_free_chan_resources()
166 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_desc_residue()
168 list_for_each_entry(vd, &chan->vchan.desc_submitted, node) in sf_pdma_desc_residue()
[all …]
/linux/drivers/dma/hsu/
H A Dhsu.c121 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer()
168 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
170 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
222 stat = this_cpu_ptr(hsuc->vchan.chan.local); in hsu_dma_do_irq()
224 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
238 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
295 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags); in hsu_dma_prep_slave_sg()
303 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
304 if (vchan_issue_pending(&hsuc->vchan) && !hsuc->desc) in hsu_dma_issue_pending()
306 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
[all …]
H A Dhsu.h87 struct virt_dma_chan vchan; member
100 return container_of(chan, struct hsu_dma_chan, vchan.chan); in to_hsu_dma_chan()
/linux/drivers/dma/lgm/
H A Dlgm-dma.c195 struct virt_dma_chan vchan; member
292 return container_of(chan, struct ldma_chan, vchan.chan); in to_ldma_chan()
521 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_cctrl_cfg()
550 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_irq_init()
578 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_set_class()
596 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_on()
615 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_off()
638 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_hw_cfg()
662 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_cfg()
696 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_reset()
[all …]
/linux/drivers/dma/stm32/
H A Dstm32-dma.c210 struct virt_dma_chan vchan; member
237 return container_of(chan->vchan.chan.device, struct stm32_dma_device, in stm32_dma_get_dev()
243 return container_of(c, struct stm32_dma_chan, vchan.chan); in to_stm32_dma_chan()
253 return &chan->vchan.chan.dev->device; in chan2dev()
505 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
515 vchan_get_all_descriptors(&chan->vchan, &head); in stm32_dma_terminate_all()
516 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
517 vchan_dma_desc_free_list(&chan->vchan, &head); in stm32_dma_terminate_all()
526 vchan_synchronize(&chan->vchan); in stm32_dma_synchronize()
570 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma_start_transfer()
[all …]
H A Dstm32-mdma.c237 struct virt_dma_chan vchan; member
265 return container_of(chan->vchan.chan.device, struct stm32_mdma_device, in stm32_mdma_get_dev()
271 return container_of(c, struct stm32_mdma_chan, vchan.chan); in to_stm32_mdma_chan()
281 return &chan->vchan.chan.dev->device; in chan2dev()
830 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_slave_sg()
926 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_cyclic()
1112 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_memcpy()
1149 vdesc = vchan_next_desc(&chan->vchan); in stm32_mdma_start_transfer()
1190 dev_dbg(chan2dev(chan), "vchan %p: started\n", &chan->vchan); in stm32_mdma_start_transfer()
1198 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_issue_pending()
[all …]
H A Dstm32-dma3.c285 struct virt_dma_chan vchan; member
318 return container_of(chan->vchan.chan.device, struct stm32_dma3_ddata, dma_dev); in to_stm32_dma3_ddata()
323 return container_of(c, struct stm32_dma3_chan, vchan.chan); in to_stm32_dma3_chan()
333 return &chan->vchan.chan.dev->device; in chan2dev()
778 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma3_chan_start()
810 dev_dbg(chan2dev(chan), "vchan %p: started\n", &chan->vchan); in stm32_dma3_chan_start()
1018 spin_lock(&chan->vchan.lock); in stm32_dma3_chan_irq()
1022 spin_unlock(&chan->vchan.lock); in stm32_dma3_chan_irq()
1067 spin_unlock(&chan->vchan.lock); in stm32_dma3_chan_irq()
1164 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma3_free_chan_resources()
[all …]
/linux/drivers/dma/xilinx/
H A Dxdma.c66 struct virt_dma_chan vchan; member
210 return container_of(chan, struct xdma_chan, vchan.chan); in to_xdma_chan()
327 struct virt_dma_desc *vd = vchan_next_desc(&xchan->vchan); in xdma_xfer_start()
476 xchan->vchan.desc_free = xdma_free_desc; in xdma_alloc_channels()
477 vchan_init(&xchan->vchan, &xdev->dma_dev); in xdma_alloc_channels()
497 spin_lock_irqsave(&xdma_chan->vchan.lock, flags); in xdma_issue_pending()
498 if (vchan_issue_pending(&xdma_chan->vchan)) in xdma_issue_pending()
500 spin_unlock_irqrestore(&xdma_chan->vchan.lock, flags); in xdma_issue_pending()
516 spin_lock_irqsave(&xdma_chan->vchan.lock, flags); in xdma_terminate_all()
520 vd = vchan_next_desc(&xdma_chan->vchan); in xdma_terminate_all()
[all …]
H A Dxilinx_dpdma.c227 struct virt_dma_chan vchan; member
249 container_of(_chan, struct xilinx_dpdma_chan, vchan.chan)
743 return vchan_tx_prep(&chan->vchan, &tx_desc->vdesc, flags); in xilinx_dpdma_chan_prep_cyclic()
925 vdesc = vchan_next_desc(&chan->vchan); in xilinx_dpdma_chan_queue_transfer()
1177 spin_lock(&chan->vchan.lock); in xilinx_dpdma_chan_vsync_irq()
1184 spin_unlock(&chan->vchan.lock); in xilinx_dpdma_chan_vsync_irq()
1255 list_empty(&chan->vchan.desc_issued)) { in xilinx_dpdma_chan_handle_err()
1258 &chan->vchan.desc_issued); in xilinx_dpdma_chan_handle_err()
1309 vchan_tx_prep(&chan->vchan, &desc->vdesc, flags | DMA_CTRL_ACK); in xilinx_dpdma_prep_interleaved_dma()
1352 vchan_free_chan_resources(&chan->vchan); in xilinx_dpdma_free_chan_resources()
[all …]
/linux/drivers/dma/fsl-dpaa2-qdma/
H A Ddpaa2-qdma.c21 return container_of(chan, struct dpaa2_qdma_chan, vchan.chan); in to_dpaa2_qdma_chan()
73 spin_lock_irqsave(&dpaa2_chan->vchan.lock, flags); in dpaa2_qdma_free_chan_resources()
74 vchan_get_all_descriptors(&dpaa2_chan->vchan, &head); in dpaa2_qdma_free_chan_resources()
75 spin_unlock_irqrestore(&dpaa2_chan->vchan.lock, flags); in dpaa2_qdma_free_chan_resources()
77 vchan_dma_desc_free_list(&dpaa2_chan->vchan, &head); in dpaa2_qdma_free_chan_resources()
269 return vchan_tx_prep(&dpaa2_chan->vchan, &dpaa2_comp->vdesc, flags); in dpaa2_qdma_prep_memcpy()
282 spin_lock(&dpaa2_chan->vchan.lock); in dpaa2_qdma_issue_pending()
283 if (vchan_issue_pending(&dpaa2_chan->vchan)) { in dpaa2_qdma_issue_pending()
284 vdesc = vchan_next_desc(&dpaa2_chan->vchan); in dpaa2_qdma_issue_pending()
301 spin_unlock(&dpaa2_chan->vchan.lock); in dpaa2_qdma_issue_pending()
[all …]
/linux/drivers/dma/ti/
H A Dedma.c216 struct virt_dma_chan vchan; member
704 return container_of(c, struct edma_chan, vchan.chan); in to_edma_chan()
723 struct device *dev = echan->vchan.chan.device->dev; in edma_execute()
728 vdesc = vchan_next_desc(&echan->vchan); in edma_execute()
817 spin_lock_irqsave(&echan->vchan.lock, flags); in edma_terminate_all()
834 vchan_get_all_descriptors(&echan->vchan, &head); in edma_terminate_all()
835 spin_unlock_irqrestore(&echan->vchan.lock, flags); in edma_terminate_all()
836 vchan_dma_desc_free_list(&echan->vchan, &head); in edma_terminate_all()
845 vchan_synchronize(&echan->vchan); in edma_synchronize()
1102 return vchan_tx_prep(&echan->vchan, &edesc->vdesc, tx_flags); in edma_prep_slave_sg()
[all …]
/linux/drivers/dma/amd/qdma/
H A Dqdma.c31 return container_of(chan, struct qdma_queue, vchan.chan); in to_qdma_queue()
470 q->vchan.desc_free = qdma_free_vdesc; in qdma_alloc_queues()
471 vchan_init(&q->vchan, &qdev->dma_dev); in qdma_alloc_queues()
549 vchan_free_chan_resources(&queue->vchan); in qdma_free_queue_resources()
613 if (!vchan_next_desc(&queue->vchan)) in qdma_xfer_start()
633 spin_lock_irqsave(&queue->vchan.lock, flags); in qdma_issue_pending()
634 if (vchan_issue_pending(&queue->vchan)) { in qdma_issue_pending()
642 spin_unlock_irqrestore(&queue->vchan.lock, flags); in qdma_issue_pending()
709 struct virt_dma_chan *vc = &q->vchan; in qdma_fill_pending_vdesc()
782 tx = vchan_tx_prep(&q->vchan, &vdesc->vdesc, flags); in qdma_prep_device_sg()
[all …]
/linux/arch/sh/drivers/dma/
H A Ddma-sysfs.c90 dma_configure_channel(channel->vchan, config); in dma_store_config()
133 dev->id = chan->vchan; in dma_create_sysfs_files()

12