Home
last modified time | relevance | path

Searched refs:vchan (Results 1 – 19 of 19) sorted by relevance

/linux/drivers/dma/hsu/
H A Dhsu.c121 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer()
168 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
170 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
222 stat = this_cpu_ptr(hsuc->vchan.chan.local); in hsu_dma_do_irq()
224 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
238 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
295 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags); in hsu_dma_prep_slave_sg()
303 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
304 if (vchan_issue_pending(&hsuc->vchan) && !hsuc->desc) in hsu_dma_issue_pending()
306 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
[all …]
H A Dhsu.h87 struct virt_dma_chan vchan; member
100 return container_of(chan, struct hsu_dma_chan, vchan.chan); in to_hsu_dma_chan()
/linux/drivers/dma/
H A Dloongson2-apb-dma.c124 struct virt_dma_chan vchan; member
156 return container_of(chan, struct ls2x_dma_chan, vchan.chan); in to_ldma_chan()
186 struct ls2x_dma_priv *priv = to_ldma_priv(lchan->vchan.chan.device); in ls2x_dma_write_cmd()
196 struct ls2x_dma_priv *priv = to_ldma_priv(lchan->vchan.chan.device); in ls2x_dma_start_transfer()
202 vdesc = vchan_next_desc(&lchan->vchan); in ls2x_dma_start_transfer()
367 return vchan_tx_prep(&lchan->vchan, &desc->vdesc, flags); in ls2x_dma_prep_slave_sg()
435 return vchan_tx_prep(&lchan->vchan, &desc->vdesc, flags); in ls2x_dma_prep_dma_cyclic()
466 spin_lock_irqsave(&lchan->vchan.lock, flags); in ls2x_dma_issue_pending()
467 if (vchan_issue_pending(&lchan->vchan) && !lchan->desc) in ls2x_dma_issue_pending()
469 spin_unlock_irqrestore(&lchan->vchan.lock, flags); in ls2x_dma_issue_pending()
[all …]
H A Dfsl-edma-common.c49 spin_lock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
53 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
69 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
105 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request()
139 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request()
185 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux()
195 ch_off = fsl_chan->vchan.chan.chan_id % chans_per_mux; in fsl_edma_chan_mux()
238 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
242 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
243 spin_unlock_irqrestore(&fsl_chan->vchan in fsl_edma_terminate_all()
[all...]
H A Ddma-axi-dmac.c143 struct virt_dma_chan vchan; member
176 return container_of(chan->vchan.chan.device, struct axi_dmac, in chan_to_axi_dmac()
182 return container_of(c, struct axi_dmac_chan, vchan.chan); in to_axi_dmac_chan()
245 vdesc = vchan_next_desc(&chan->vchan); in axi_dmac_start_transfer()
470 spin_lock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
481 spin_unlock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
493 spin_lock_irqsave(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
496 vchan_get_all_descriptors(&chan->vchan, &head); in axi_dmac_terminate_all()
498 spin_unlock_irqrestore(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
500 vchan_dma_desc_free_list(&chan->vchan, &head); in axi_dmac_terminate_all()
[all …]
H A Dst_fdma.h125 struct virt_dma_chan vchan; member
186 + (fchan)->vchan.chan.chan_id * 0x4 \
191 + (fchan)->vchan.chan.chan_id * 0x4 \
208 + (fchan)->vchan.chan.chan_id * FDMA_NODE_SZ \
213 + (fchan)->vchan.chan.chan_id * FDMA_NODE_SZ \
H A Didma64.h128 struct virt_dma_chan vchan; member
143 return container_of(chan, struct idma64_chan, vchan.chan); in to_idma64_chan()
H A Dfsl-edma-main.c31 vchan_synchronize(&fsl_chan->vchan); in fsl_edma_synchronize()
701 fsl_chan->vchan.desc_free = fsl_edma_free_desc; in fsl_edma_probe()
718 vchan_init(&fsl_chan->vchan, &fsl_edma->dma_dev); in fsl_edma_probe()
822 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_suspend_late()
831 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_suspend_late()
H A Dfsl-edma-common.h152 struct virt_dma_chan vchan; member
452 return container_of(chan, struct fsl_edma_chan, vchan.chan); in to_fsl_edma_chan()
/linux/drivers/dma/lgm/
H A Dlgm-dma.c195 struct virt_dma_chan vchan; member
292 return container_of(chan, struct ldma_chan, vchan.chan); in to_ldma_chan()
521 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_cctrl_cfg()
550 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_irq_init()
578 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_set_class()
596 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_on()
615 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_off()
638 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_hw_cfg()
662 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_cfg()
696 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_reset()
[all …]
/linux/drivers/dma/stm32/
H A Dstm32-dma.c210 struct virt_dma_chan vchan; member
237 return container_of(chan->vchan.chan.device, struct stm32_dma_device, in stm32_dma_get_dev()
243 return container_of(c, struct stm32_dma_chan, vchan.chan); in to_stm32_dma_chan()
253 return &chan->vchan.chan.dev->device; in chan2dev()
505 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
515 vchan_get_all_descriptors(&chan->vchan, &head); in stm32_dma_terminate_all()
516 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
517 vchan_dma_desc_free_list(&chan->vchan, &head); in stm32_dma_terminate_all()
526 vchan_synchronize(&chan->vchan); in stm32_dma_synchronize()
570 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma_start_transfer()
[all …]
H A Dstm32-mdma.c237 struct virt_dma_chan vchan; member
265 return container_of(chan->vchan.chan.device, struct stm32_mdma_device, in stm32_mdma_get_dev()
271 return container_of(c, struct stm32_mdma_chan, vchan.chan); in to_stm32_mdma_chan()
281 return &chan->vchan.chan.dev->device; in chan2dev()
830 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_slave_sg()
926 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_cyclic()
1112 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_memcpy()
1149 vdesc = vchan_next_desc(&chan->vchan); in stm32_mdma_start_transfer()
1190 dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan); in stm32_mdma_start_transfer()
1198 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_issue_pending()
[all …]
/linux/drivers/dma/amd/qdma/
H A Dqdma.c31 return container_of(chan, struct qdma_queue, vchan.chan); in to_qdma_queue()
470 q->vchan.desc_free = qdma_free_vdesc; in qdma_alloc_queues()
471 vchan_init(&q->vchan, &qdev->dma_dev); in qdma_alloc_queues()
549 vchan_free_chan_resources(&queue->vchan); in qdma_free_queue_resources()
613 if (!vchan_next_desc(&queue->vchan)) in qdma_xfer_start()
633 spin_lock_irqsave(&queue->vchan.lock, flags); in qdma_issue_pending()
634 if (vchan_issue_pending(&queue->vchan)) { in qdma_issue_pending()
642 spin_unlock_irqrestore(&queue->vchan.lock, flags); in qdma_issue_pending()
709 struct virt_dma_chan *vc = &q->vchan; in qdma_fill_pending_vdesc()
782 tx = vchan_tx_prep(&q->vchan, &vdesc->vdesc, flags); in qdma_prep_device_sg()
[all …]
H A Dqdma.h206 struct virt_dma_chan vchan; member
/linux/arch/sh/drivers/dma/
H A Ddma-sysfs.c90 dma_configure_channel(channel->vchan, config); in dma_store_config()
133 dev->id = chan->vchan; in dma_create_sysfs_files()
H A Ddma-api.c69 if (channel->vchan == chan) in get_dma_channel()
233 chan->vchan = info->first_channel_nr + i + total_channels; in register_dmac()
/linux/drivers/dma/sf-pdma/
H A Dsf-pdma.h92 struct virt_dma_chan vchan; member
/linux/arch/sh/include/asm/
H A Ddma.h65 unsigned int vchan; /* Virtual channel number */ member
/linux/drivers/dma/fsl-dpaa2-qdma/
H A Ddpaa2-qdma.h72 struct virt_dma_chan vchan; member