Lines Matching +full:big +full:- +full:endian +full:- +full:desc

1 // SPDX-License-Identifier: GPL-2.0+
3 // Copyright (c) 2013-2014 Freescale Semiconductor, Inc
11 #include <linux/dma-mapping.h>
15 #include "fsl-edma-common.h"
49 spin_lock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
51 if (!fsl_chan->edesc) { in fsl_edma_tx_chan_handler()
53 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
57 if (!fsl_chan->edesc->iscyclic) { in fsl_edma_tx_chan_handler()
58 list_del(&fsl_chan->edesc->vdesc.node); in fsl_edma_tx_chan_handler()
59 vchan_cookie_complete(&fsl_chan->edesc->vdesc); in fsl_edma_tx_chan_handler()
60 fsl_chan->edesc = NULL; in fsl_edma_tx_chan_handler()
61 fsl_chan->status = DMA_COMPLETE; in fsl_edma_tx_chan_handler()
63 vchan_cyclic_callback(&fsl_chan->edesc->vdesc); in fsl_edma_tx_chan_handler()
66 if (!fsl_chan->edesc) in fsl_edma_tx_chan_handler()
69 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
78 if (fsl_chan->is_rxchan) in fsl_edma3_enable_request()
83 if (fsl_chan->is_remote) in fsl_edma3_enable_request()
93 if (!edma_readl(fsl_chan->edma, fsl_chan->mux_addr)) in fsl_edma3_enable_request()
94 edma_writel(fsl_chan->edma, fsl_chan->srcid, fsl_chan->mux_addr); in fsl_edma3_enable_request()
104 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_enable_request()
105 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request()
110 if (fsl_chan->edma->drvdata->flags & FSL_EDMA_DRV_WRAP_IO) { in fsl_edma_enable_request()
111 edma_writeb(fsl_chan->edma, EDMA_SEEI_SEEI(ch), regs->seei); in fsl_edma_enable_request()
112 edma_writeb(fsl_chan->edma, ch, regs->serq); in fsl_edma_enable_request()
114 /* ColdFire is big endian, and accesses natively in fsl_edma_enable_request()
115 * big endian I/O peripherals in fsl_edma_enable_request()
117 iowrite8(EDMA_SEEI_SEEI(ch), regs->seei); in fsl_edma_enable_request()
118 iowrite8(ch, regs->serq); in fsl_edma_enable_request()
130 edma_writel(fsl_chan->edma, 0, fsl_chan->mux_addr); in fsl_edma3_disable_request()
138 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_disable_request()
139 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request()
144 if (fsl_chan->edma->drvdata->flags & FSL_EDMA_DRV_WRAP_IO) { in fsl_edma_disable_request()
145 edma_writeb(fsl_chan->edma, ch, regs->cerq); in fsl_edma_disable_request()
146 edma_writeb(fsl_chan->edma, EDMA_CEEI_CEEI(ch), regs->ceei); in fsl_edma_disable_request()
148 /* ColdFire is big endian, and accesses natively in fsl_edma_disable_request()
149 * big endian I/O peripherals in fsl_edma_disable_request()
151 iowrite8(ch, regs->cerq); in fsl_edma_disable_request()
152 iowrite8(EDMA_CEEI_CEEI(ch), regs->ceei); in fsl_edma_disable_request()
185 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux()
188 int endian_diff[4] = {3, 1, -1, -3}; in fsl_edma_chan_mux()
189 u32 dmamux_nr = fsl_chan->edma->drvdata->dmamuxs; in fsl_edma_chan_mux()
194 chans_per_mux = fsl_chan->edma->n_chans / dmamux_nr; in fsl_edma_chan_mux()
195 ch_off = fsl_chan->vchan.chan.chan_id % chans_per_mux; in fsl_edma_chan_mux()
197 if (fsl_chan->edma->drvdata->flags & FSL_EDMA_DRV_MUX_SWAP) in fsl_edma_chan_mux()
200 muxaddr = fsl_chan->edma->muxbase[ch / chans_per_mux]; in fsl_edma_chan_mux()
203 if (fsl_chan->edma->drvdata->flags & FSL_EDMA_DRV_CONFIG32) in fsl_edma_chan_mux()
216 val = ffs(addr_width) - 1; in fsl_edma_get_tcd_attr()
226 for (i = 0; i < fsl_desc->n_tcds; i++) in fsl_edma_free_desc()
227 dma_pool_free(fsl_desc->echan->tcd_pool, fsl_desc->tcd[i].vtcd, in fsl_edma_free_desc()
228 fsl_desc->tcd[i].ptcd); in fsl_edma_free_desc()
238 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
240 fsl_chan->edesc = NULL; in fsl_edma_terminate_all()
241 fsl_chan->status = DMA_COMPLETE; in fsl_edma_terminate_all()
242 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
243 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
244 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
247 pm_runtime_allow(fsl_chan->pd_dev); in fsl_edma_terminate_all()
257 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_pause()
258 if (fsl_chan->edesc) { in fsl_edma_pause()
260 fsl_chan->status = DMA_PAUSED; in fsl_edma_pause()
262 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_pause()
271 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_resume()
272 if (fsl_chan->edesc) { in fsl_edma_resume()
274 fsl_chan->status = DMA_IN_PROGRESS; in fsl_edma_resume()
276 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_resume()
282 if (fsl_chan->dma_dir != DMA_NONE) in fsl_edma_unprep_slave_dma()
283 dma_unmap_resource(fsl_chan->vchan.chan.device->dev, in fsl_edma_unprep_slave_dma()
284 fsl_chan->dma_dev_addr, in fsl_edma_unprep_slave_dma()
285 fsl_chan->dma_dev_size, in fsl_edma_unprep_slave_dma()
286 fsl_chan->dma_dir, 0); in fsl_edma_unprep_slave_dma()
287 fsl_chan->dma_dir = DMA_NONE; in fsl_edma_unprep_slave_dma()
293 struct device *dev = fsl_chan->vchan.chan.device->dev; in fsl_edma_prep_slave_dma()
301 addr = fsl_chan->cfg.dst_addr; in fsl_edma_prep_slave_dma()
302 size = fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_slave_dma()
306 addr = fsl_chan->cfg.src_addr; in fsl_edma_prep_slave_dma()
307 size = fsl_chan->cfg.src_maxburst; in fsl_edma_prep_slave_dma()
315 if (fsl_chan->dma_dir == dma_dir) in fsl_edma_prep_slave_dma()
320 fsl_chan->dma_dev_addr = dma_map_resource(dev, addr, size, dma_dir, 0); in fsl_edma_prep_slave_dma()
321 if (dma_mapping_error(dev, fsl_chan->dma_dev_addr)) in fsl_edma_prep_slave_dma()
323 fsl_chan->dma_dev_size = size; in fsl_edma_prep_slave_dma()
324 fsl_chan->dma_dir = dma_dir; in fsl_edma_prep_slave_dma()
334 memcpy(&fsl_chan->cfg, cfg, sizeof(*cfg)); in fsl_edma_slave_config()
343 struct fsl_edma_desc *edesc = fsl_chan->edesc; in fsl_edma_desc_residue()
344 enum dma_transfer_direction dir = edesc->dirn; in fsl_edma_desc_residue()
350 /* calculate the total size in this desc */ in fsl_edma_desc_residue()
351 for (len = i = 0; i < fsl_chan->edesc->n_tcds; i++) { in fsl_edma_desc_residue()
352 nbytes = fsl_edma_get_tcd_to_cpu(fsl_chan, edesc->tcd[i].vtcd, nbytes); in fsl_edma_desc_residue()
355 len += nbytes * fsl_edma_get_tcd_to_cpu(fsl_chan, edesc->tcd[i].vtcd, biter); in fsl_edma_desc_residue()
373 for (i = 0; i < fsl_chan->edesc->n_tcds; i++) { in fsl_edma_desc_residue()
374 nbytes = fsl_edma_get_tcd_to_cpu(fsl_chan, edesc->tcd[i].vtcd, nbytes); in fsl_edma_desc_residue()
378 size = nbytes * fsl_edma_get_tcd_to_cpu(fsl_chan, edesc->tcd[i].vtcd, biter); in fsl_edma_desc_residue()
381 dma_addr = fsl_edma_get_tcd_to_cpu(fsl_chan, edesc->tcd[i].vtcd, saddr); in fsl_edma_desc_residue()
383 dma_addr = fsl_edma_get_tcd_to_cpu(fsl_chan, edesc->tcd[i].vtcd, daddr); in fsl_edma_desc_residue()
385 len -= size; in fsl_edma_desc_residue()
387 len += dma_addr + size - cur_addr; in fsl_edma_desc_residue()
408 return fsl_chan->status; in fsl_edma_tx_status()
410 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_tx_status()
411 vdesc = vchan_find_desc(&fsl_chan->vchan, cookie); in fsl_edma_tx_status()
412 if (fsl_chan->edesc && cookie == fsl_chan->edesc->vdesc.tx.cookie) in fsl_edma_tx_status()
413 txstate->residue = in fsl_edma_tx_status()
416 txstate->residue = in fsl_edma_tx_status()
419 txstate->residue = 0; in fsl_edma_tx_status()
421 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_tx_status()
423 return fsl_chan->status; in fsl_edma_tx_status()
432 * endian format. However, we need to load the TCD registers in in fsl_edma_set_tcd_regs()
433 * big- or little-endian obeying the eDMA engine model endian, in fsl_edma_set_tcd_regs()
455 if (fsl_chan->is_sw) { in fsl_edma_set_tcd_regs()
482 struct dma_slave_config *cfg = &fsl_chan->cfg; in fsl_edma_fill_tcd()
488 * endian format irrespective of the register endian model. in fsl_edma_fill_tcd()
489 * So we put the value in little endian in memory, waiting in fsl_edma_fill_tcd()
499 if (fsl_chan->is_multi_fifo) { in fsl_edma_fill_tcd()
501 burst = cfg->direction == DMA_DEV_TO_MEM ? in fsl_edma_fill_tcd()
502 cfg->src_maxburst : cfg->dst_maxburst; in fsl_edma_fill_tcd()
503 nbytes |= EDMA_V3_TCD_NBYTES_MLOFF(-(burst * 4)); in fsl_edma_fill_tcd()
505 if (cfg->direction == DMA_MEM_TO_DEV) { in fsl_edma_fill_tcd()
533 if (fsl_chan->is_rxchan) in fsl_edma_fill_tcd()
536 if (fsl_chan->is_sw) in fsl_edma_fill_tcd()
554 fsl_desc->echan = fsl_chan; in fsl_edma_alloc_desc()
555 fsl_desc->n_tcds = sg_len; in fsl_edma_alloc_desc()
557 fsl_desc->tcd[i].vtcd = dma_pool_alloc(fsl_chan->tcd_pool, in fsl_edma_alloc_desc()
558 GFP_NOWAIT, &fsl_desc->tcd[i].ptcd); in fsl_edma_alloc_desc()
559 if (!fsl_desc->tcd[i].vtcd) in fsl_edma_alloc_desc()
565 while (--i >= 0) in fsl_edma_alloc_desc()
566 dma_pool_free(fsl_chan->tcd_pool, fsl_desc->tcd[i].vtcd, in fsl_edma_alloc_desc()
567 fsl_desc->tcd[i].ptcd); in fsl_edma_alloc_desc()
596 fsl_desc->iscyclic = true; in fsl_edma_prep_dma_cyclic()
597 fsl_desc->dirn = direction; in fsl_edma_prep_dma_cyclic()
601 fsl_chan->attr = in fsl_edma_prep_dma_cyclic()
602 fsl_edma_get_tcd_attr(fsl_chan->cfg.dst_addr_width); in fsl_edma_prep_dma_cyclic()
603 nbytes = fsl_chan->cfg.dst_addr_width * in fsl_edma_prep_dma_cyclic()
604 fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_dma_cyclic()
606 fsl_chan->attr = in fsl_edma_prep_dma_cyclic()
607 fsl_edma_get_tcd_attr(fsl_chan->cfg.src_addr_width); in fsl_edma_prep_dma_cyclic()
608 nbytes = fsl_chan->cfg.src_addr_width * in fsl_edma_prep_dma_cyclic()
609 fsl_chan->cfg.src_maxburst; in fsl_edma_prep_dma_cyclic()
619 last_sg = fsl_desc->tcd[(i + 1) % sg_len].ptcd; in fsl_edma_prep_dma_cyclic()
623 dst_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_dma_cyclic()
624 soff = fsl_chan->cfg.dst_addr_width; in fsl_edma_prep_dma_cyclic()
625 doff = fsl_chan->is_multi_fifo ? 4 : 0; in fsl_edma_prep_dma_cyclic()
627 src_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_dma_cyclic()
629 soff = fsl_chan->is_multi_fifo ? 4 : 0; in fsl_edma_prep_dma_cyclic()
630 doff = fsl_chan->cfg.src_addr_width; in fsl_edma_prep_dma_cyclic()
633 src_addr = fsl_chan->cfg.src_addr; in fsl_edma_prep_dma_cyclic()
634 dst_addr = fsl_chan->cfg.dst_addr; in fsl_edma_prep_dma_cyclic()
639 fsl_edma_fill_tcd(fsl_chan, fsl_desc->tcd[i].vtcd, src_addr, dst_addr, in fsl_edma_prep_dma_cyclic()
640 fsl_chan->attr, soff, nbytes, 0, iter, in fsl_edma_prep_dma_cyclic()
645 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_dma_cyclic()
670 fsl_desc->iscyclic = false; in fsl_edma_prep_slave_sg()
671 fsl_desc->dirn = direction; in fsl_edma_prep_slave_sg()
674 fsl_chan->attr = in fsl_edma_prep_slave_sg()
675 fsl_edma_get_tcd_attr(fsl_chan->cfg.dst_addr_width); in fsl_edma_prep_slave_sg()
676 nbytes = fsl_chan->cfg.dst_addr_width * in fsl_edma_prep_slave_sg()
677 fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_slave_sg()
679 fsl_chan->attr = in fsl_edma_prep_slave_sg()
680 fsl_edma_get_tcd_attr(fsl_chan->cfg.src_addr_width); in fsl_edma_prep_slave_sg()
681 nbytes = fsl_chan->cfg.src_addr_width * in fsl_edma_prep_slave_sg()
682 fsl_chan->cfg.src_maxburst; in fsl_edma_prep_slave_sg()
688 dst_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_slave_sg()
689 soff = fsl_chan->cfg.dst_addr_width; in fsl_edma_prep_slave_sg()
692 src_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_slave_sg()
695 doff = fsl_chan->cfg.src_addr_width; in fsl_edma_prep_slave_sg()
698 src_addr = fsl_chan->cfg.src_addr; in fsl_edma_prep_slave_sg()
699 dst_addr = fsl_chan->cfg.dst_addr; in fsl_edma_prep_slave_sg()
712 fsl_chan->cfg.src_maxburst : in fsl_edma_prep_slave_sg()
713 fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_slave_sg()
716 for (j = burst; j > 1; j--) { in fsl_edma_prep_slave_sg()
727 if (i < sg_len - 1) { in fsl_edma_prep_slave_sg()
728 last_sg = fsl_desc->tcd[(i + 1)].ptcd; in fsl_edma_prep_slave_sg()
729 fsl_edma_fill_tcd(fsl_chan, fsl_desc->tcd[i].vtcd, src_addr, in fsl_edma_prep_slave_sg()
730 dst_addr, fsl_chan->attr, soff, in fsl_edma_prep_slave_sg()
735 fsl_edma_fill_tcd(fsl_chan, fsl_desc->tcd[i].vtcd, src_addr, in fsl_edma_prep_slave_sg()
736 dst_addr, fsl_chan->attr, soff, in fsl_edma_prep_slave_sg()
742 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_slave_sg()
755 fsl_desc->iscyclic = false; in fsl_edma_prep_memcpy()
757 fsl_chan->is_sw = true; in fsl_edma_prep_memcpy()
759 fsl_chan->is_remote = true; in fsl_edma_prep_memcpy()
762 fsl_edma_fill_tcd(fsl_chan, fsl_desc->tcd[0].vtcd, dma_src, dma_dst, in fsl_edma_prep_memcpy()
766 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_memcpy()
773 lockdep_assert_held(&fsl_chan->vchan.lock); in fsl_edma_xfer_desc()
775 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_edma_xfer_desc()
778 fsl_chan->edesc = to_fsl_edma_desc(vdesc); in fsl_edma_xfer_desc()
779 fsl_edma_set_tcd_regs(fsl_chan, fsl_chan->edesc->tcd[0].vtcd); in fsl_edma_xfer_desc()
781 fsl_chan->status = DMA_IN_PROGRESS; in fsl_edma_xfer_desc()
789 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
791 if (unlikely(fsl_chan->pm_state != RUNNING)) { in fsl_edma_issue_pending()
792 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
797 if (vchan_issue_pending(&fsl_chan->vchan) && !fsl_chan->edesc) in fsl_edma_issue_pending()
800 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
809 clk_prepare_enable(fsl_chan->clk); in fsl_edma_alloc_chan_resources()
811 fsl_chan->tcd_pool = dma_pool_create("tcd_pool", chan->device->dev, in fsl_edma_alloc_chan_resources()
816 if (fsl_chan->txirq) { in fsl_edma_alloc_chan_resources()
817 ret = request_irq(fsl_chan->txirq, fsl_chan->irq_handler, IRQF_SHARED, in fsl_edma_alloc_chan_resources()
818 fsl_chan->chan_name, fsl_chan); in fsl_edma_alloc_chan_resources()
821 dma_pool_destroy(fsl_chan->tcd_pool); in fsl_edma_alloc_chan_resources()
832 struct fsl_edma_engine *edma = fsl_chan->edma; in fsl_edma_free_chan_resources()
836 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_free_chan_resources()
838 if (edma->drvdata->dmamuxs) in fsl_edma_free_chan_resources()
840 fsl_chan->edesc = NULL; in fsl_edma_free_chan_resources()
841 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_free_chan_resources()
843 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_free_chan_resources()
845 if (fsl_chan->txirq) in fsl_edma_free_chan_resources()
846 free_irq(fsl_chan->txirq, fsl_chan); in fsl_edma_free_chan_resources()
848 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_free_chan_resources()
849 dma_pool_destroy(fsl_chan->tcd_pool); in fsl_edma_free_chan_resources()
850 fsl_chan->tcd_pool = NULL; in fsl_edma_free_chan_resources()
851 fsl_chan->is_sw = false; in fsl_edma_free_chan_resources()
852 fsl_chan->srcid = 0; in fsl_edma_free_chan_resources()
853 fsl_chan->is_remote = false; in fsl_edma_free_chan_resources()
855 clk_disable_unprepare(fsl_chan->clk); in fsl_edma_free_chan_resources()
863 &dmadev->channels, vchan.chan.device_node) { in fsl_edma_cleanup_vchan()
864 list_del(&chan->vchan.chan.device_node); in fsl_edma_cleanup_vchan()
865 tasklet_kill(&chan->vchan.task); in fsl_edma_cleanup_vchan()
879 bool is64 = !!(edma->drvdata->flags & FSL_EDMA_DRV_EDMA64); in fsl_edma_setup_regs()
881 edma->regs.cr = edma->membase + EDMA_CR; in fsl_edma_setup_regs()
882 edma->regs.es = edma->membase + EDMA_ES; in fsl_edma_setup_regs()
883 edma->regs.erql = edma->membase + EDMA_ERQ; in fsl_edma_setup_regs()
884 edma->regs.eeil = edma->membase + EDMA_EEI; in fsl_edma_setup_regs()
886 edma->regs.serq = edma->membase + (is64 ? EDMA64_SERQ : EDMA_SERQ); in fsl_edma_setup_regs()
887 edma->regs.cerq = edma->membase + (is64 ? EDMA64_CERQ : EDMA_CERQ); in fsl_edma_setup_regs()
888 edma->regs.seei = edma->membase + (is64 ? EDMA64_SEEI : EDMA_SEEI); in fsl_edma_setup_regs()
889 edma->regs.ceei = edma->membase + (is64 ? EDMA64_CEEI : EDMA_CEEI); in fsl_edma_setup_regs()
890 edma->regs.cint = edma->membase + (is64 ? EDMA64_CINT : EDMA_CINT); in fsl_edma_setup_regs()
891 edma->regs.cerr = edma->membase + (is64 ? EDMA64_CERR : EDMA_CERR); in fsl_edma_setup_regs()
892 edma->regs.ssrt = edma->membase + (is64 ? EDMA64_SSRT : EDMA_SSRT); in fsl_edma_setup_regs()
893 edma->regs.cdne = edma->membase + (is64 ? EDMA64_CDNE : EDMA_CDNE); in fsl_edma_setup_regs()
894 edma->regs.intl = edma->membase + (is64 ? EDMA64_INTL : EDMA_INTR); in fsl_edma_setup_regs()
895 edma->regs.errl = edma->membase + (is64 ? EDMA64_ERRL : EDMA_ERR); in fsl_edma_setup_regs()
898 edma->regs.erqh = edma->membase + EDMA64_ERQH; in fsl_edma_setup_regs()
899 edma->regs.eeih = edma->membase + EDMA64_EEIH; in fsl_edma_setup_regs()
900 edma->regs.errh = edma->membase + EDMA64_ERRH; in fsl_edma_setup_regs()
901 edma->regs.inth = edma->membase + EDMA64_INTH; in fsl_edma_setup_regs()