Lines Matching +full:ahb +full:- +full:burst +full:- +full:config
1 // SPDX-License-Identifier: GPL-2.0+
3 // drivers/dma/imx-dma.c
9 // Copyright 2012 Javier Martin, Vista Silicon <javier.martin@vista-silicon.com>
18 #include <linux/dma-mapping.h>
29 #include <linux/dma/imx-dma.h>
39 #define IMX_DMA_LENGTH_LOOP ((unsigned int)-1)
56 #define DMA_DBTOSR 0x0c /* Burst timeout status Register */
60 #define DMA_DBTOCR 0x1c /* Burst timeout control Register */
61 #define DMA_WSRA 0x40 /* W-Size Register A */
62 #define DMA_XSRA 0x44 /* X-Size Register A */
63 #define DMA_YSRA 0x48 /* Y-Size Register A */
64 #define DMA_WSRB 0x4c /* W-Size Register B */
65 #define DMA_XSRB 0x50 /* X-Size Register B */
66 #define DMA_YSRB 0x54 /* Y-Size Register B */
72 #define DMA_BLR(x) (0x94 + ((x) << 6)) /* Burst length Registers */
166 struct dma_slave_config config; member
196 .compatible = "fsl,imx1-dma", .data = (const void *)IMX1_DMA,
198 .compatible = "fsl,imx27-dma", .data = (const void *)IMX27_DMA,
207 return imxdma->devtype == IMX1_DMA; in is_imx1_dma()
212 return imxdma->devtype == IMX27_DMA; in is_imx27_dma()
224 if (!list_empty(&imxdmac->ld_active)) { in imxdma_chan_is_doing_cyclic()
225 desc = list_first_entry(&imxdmac->ld_active, struct imxdma_desc, in imxdma_chan_is_doing_cyclic()
227 if (desc->type == IMXDMA_DESC_CYCLIC) in imxdma_chan_is_doing_cyclic()
238 __raw_writel(val, imxdma->base + offset); in imx_dmav1_writel()
243 return __raw_readl(imxdma->base + offset); in imx_dmav1_readl()
248 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_hw_chain()
251 return imxdmac->hw_chaining; in imxdma_hw_chain()
257 * imxdma_sg_next - prepare next chunk for scatter-gather DMA emulation
261 struct imxdma_channel *imxdmac = to_imxdma_chan(d->desc.chan); in imxdma_sg_next()
262 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_sg_next()
263 struct scatterlist *sg = d->sg; in imxdma_sg_next()
266 now = min_t(size_t, d->len, sg_dma_len(sg)); in imxdma_sg_next()
267 if (d->len != IMX_DMA_LENGTH_LOOP) in imxdma_sg_next()
268 d->len -= now; in imxdma_sg_next()
270 if (d->direction == DMA_DEV_TO_MEM) in imxdma_sg_next()
271 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next()
272 DMA_DAR(imxdmac->channel)); in imxdma_sg_next()
274 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next()
275 DMA_SAR(imxdmac->channel)); in imxdma_sg_next()
277 imx_dmav1_writel(imxdma, now, DMA_CNTR(imxdmac->channel)); in imxdma_sg_next()
279 dev_dbg(imxdma->dev, " %s channel: %d dst 0x%08x, src 0x%08x, " in imxdma_sg_next()
280 "size 0x%08x\n", __func__, imxdmac->channel, in imxdma_sg_next()
281 imx_dmav1_readl(imxdma, DMA_DAR(imxdmac->channel)), in imxdma_sg_next()
282 imx_dmav1_readl(imxdma, DMA_SAR(imxdmac->channel)), in imxdma_sg_next()
283 imx_dmav1_readl(imxdma, DMA_CNTR(imxdmac->channel))); in imxdma_sg_next()
288 struct imxdma_channel *imxdmac = to_imxdma_chan(d->desc.chan); in imxdma_enable_hw()
289 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_enable_hw()
290 int channel = imxdmac->channel; in imxdma_enable_hw()
293 dev_dbg(imxdma->dev, "%s channel %d\n", __func__, channel); in imxdma_enable_hw()
304 d->sg && imxdma_hw_chain(imxdmac)) { in imxdma_enable_hw()
305 d->sg = sg_next(d->sg); in imxdma_enable_hw()
306 if (d->sg) { in imxdma_enable_hw()
320 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_disable_hw()
321 int channel = imxdmac->channel; in imxdma_disable_hw()
324 dev_dbg(imxdma->dev, "%s channel %d\n", __func__, channel); in imxdma_disable_hw()
327 timer_delete(&imxdmac->watchdog); in imxdma_disable_hw()
342 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_watchdog()
343 int channel = imxdmac->channel; in imxdma_watchdog()
348 tasklet_schedule(&imxdmac->dma_tasklet); in imxdma_watchdog()
349 dev_dbg(imxdma->dev, "channel %d: watchdog timeout!\n", in imxdma_watchdog()
350 imxdmac->channel); in imxdma_watchdog()
394 tasklet_schedule(&imxdma->channel[i].dma_tasklet); in imxdma_err_handler()
396 dev_warn(imxdma->dev, in imxdma_err_handler()
397 "DMA timeout on channel %d -%s%s%s%s\n", i, in imxdma_err_handler()
398 errcode & IMX_DMA_ERR_BURST ? " burst" : "", in imxdma_err_handler()
408 struct imxdma_engine *imxdma = imxdmac->imxdma; in dma_irq_handle_channel()
409 int chno = imxdmac->channel; in dma_irq_handle_channel()
413 spin_lock_irqsave(&imxdma->lock, flags); in dma_irq_handle_channel()
414 if (list_empty(&imxdmac->ld_active)) { in dma_irq_handle_channel()
415 spin_unlock_irqrestore(&imxdma->lock, flags); in dma_irq_handle_channel()
419 desc = list_first_entry(&imxdmac->ld_active, in dma_irq_handle_channel()
422 spin_unlock_irqrestore(&imxdma->lock, flags); in dma_irq_handle_channel()
424 if (desc->sg) { in dma_irq_handle_channel()
426 desc->sg = sg_next(desc->sg); in dma_irq_handle_channel()
428 if (desc->sg) { in dma_irq_handle_channel()
437 mod_timer(&imxdmac->watchdog, in dma_irq_handle_channel()
452 tasklet_schedule(&imxdmac->dma_tasklet); in dma_irq_handle_channel()
458 timer_delete(&imxdmac->watchdog); in dma_irq_handle_channel()
466 tasklet_schedule(&imxdmac->dma_tasklet); in dma_irq_handle_channel()
479 dev_dbg(imxdma->dev, "%s called, disr=0x%08x\n", __func__, disr); in dma_irq_handler()
484 dma_irq_handle_channel(&imxdma->channel[i]); in dma_irq_handler()
492 struct imxdma_channel *imxdmac = to_imxdma_chan(d->desc.chan); in imxdma_xfer_desc()
493 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_xfer_desc()
494 int slot = -1; in imxdma_xfer_desc()
498 switch (d->type) { in imxdma_xfer_desc()
502 if ((imxdma->slots_2d[i].count > 0) && in imxdma_xfer_desc()
503 ((imxdma->slots_2d[i].xsr != d->x) || in imxdma_xfer_desc()
504 (imxdma->slots_2d[i].ysr != d->y) || in imxdma_xfer_desc()
505 (imxdma->slots_2d[i].wsr != d->w))) in imxdma_xfer_desc()
511 return -EBUSY; in imxdma_xfer_desc()
513 imxdma->slots_2d[slot].xsr = d->x; in imxdma_xfer_desc()
514 imxdma->slots_2d[slot].ysr = d->y; in imxdma_xfer_desc()
515 imxdma->slots_2d[slot].wsr = d->w; in imxdma_xfer_desc()
516 imxdma->slots_2d[slot].count++; in imxdma_xfer_desc()
518 imxdmac->slot_2d = slot; in imxdma_xfer_desc()
519 imxdmac->enabled_2d = true; in imxdma_xfer_desc()
522 d->config_mem &= ~CCR_MSEL_B; in imxdma_xfer_desc()
523 d->config_port &= ~CCR_MSEL_B; in imxdma_xfer_desc()
524 imx_dmav1_writel(imxdma, d->x, DMA_XSRA); in imxdma_xfer_desc()
525 imx_dmav1_writel(imxdma, d->y, DMA_YSRA); in imxdma_xfer_desc()
526 imx_dmav1_writel(imxdma, d->w, DMA_WSRA); in imxdma_xfer_desc()
528 d->config_mem |= CCR_MSEL_B; in imxdma_xfer_desc()
529 d->config_port |= CCR_MSEL_B; in imxdma_xfer_desc()
530 imx_dmav1_writel(imxdma, d->x, DMA_XSRB); in imxdma_xfer_desc()
531 imx_dmav1_writel(imxdma, d->y, DMA_YSRB); in imxdma_xfer_desc()
532 imx_dmav1_writel(imxdma, d->w, DMA_WSRB); in imxdma_xfer_desc()
535 * We fall-through here intentionally, since a 2D transfer is in imxdma_xfer_desc()
540 imx_dmav1_writel(imxdma, d->src, DMA_SAR(imxdmac->channel)); in imxdma_xfer_desc()
541 imx_dmav1_writel(imxdma, d->dest, DMA_DAR(imxdmac->channel)); in imxdma_xfer_desc()
542 imx_dmav1_writel(imxdma, d->config_mem | (d->config_port << 2), in imxdma_xfer_desc()
543 DMA_CCR(imxdmac->channel)); in imxdma_xfer_desc()
545 imx_dmav1_writel(imxdma, d->len, DMA_CNTR(imxdmac->channel)); in imxdma_xfer_desc()
547 dev_dbg(imxdma->dev, in imxdma_xfer_desc()
549 __func__, imxdmac->channel, in imxdma_xfer_desc()
550 (unsigned long long)d->dest, in imxdma_xfer_desc()
551 (unsigned long long)d->src, d->len); in imxdma_xfer_desc()
557 if (d->direction == DMA_DEV_TO_MEM) { in imxdma_xfer_desc()
558 imx_dmav1_writel(imxdma, imxdmac->per_address, in imxdma_xfer_desc()
559 DMA_SAR(imxdmac->channel)); in imxdma_xfer_desc()
560 imx_dmav1_writel(imxdma, imxdmac->ccr_from_device, in imxdma_xfer_desc()
561 DMA_CCR(imxdmac->channel)); in imxdma_xfer_desc()
563 dev_dbg(imxdma->dev, in imxdma_xfer_desc()
565 __func__, imxdmac->channel, in imxdma_xfer_desc()
566 d->sg, d->sgcount, d->len, in imxdma_xfer_desc()
567 (unsigned long long)imxdmac->per_address); in imxdma_xfer_desc()
568 } else if (d->direction == DMA_MEM_TO_DEV) { in imxdma_xfer_desc()
569 imx_dmav1_writel(imxdma, imxdmac->per_address, in imxdma_xfer_desc()
570 DMA_DAR(imxdmac->channel)); in imxdma_xfer_desc()
571 imx_dmav1_writel(imxdma, imxdmac->ccr_to_device, in imxdma_xfer_desc()
572 DMA_CCR(imxdmac->channel)); in imxdma_xfer_desc()
574 dev_dbg(imxdma->dev, in imxdma_xfer_desc()
576 __func__, imxdmac->channel, in imxdma_xfer_desc()
577 d->sg, d->sgcount, d->len, in imxdma_xfer_desc()
578 (unsigned long long)imxdmac->per_address); in imxdma_xfer_desc()
580 dev_err(imxdma->dev, "%s channel: %d bad dma mode\n", in imxdma_xfer_desc()
581 __func__, imxdmac->channel); in imxdma_xfer_desc()
582 return -EINVAL; in imxdma_xfer_desc()
589 return -EINVAL; in imxdma_xfer_desc()
598 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_tasklet()
602 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_tasklet()
604 if (list_empty(&imxdmac->ld_active)) { in imxdma_tasklet()
606 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_tasklet()
609 desc = list_first_entry(&imxdmac->ld_active, struct imxdma_desc, node); in imxdma_tasklet()
613 * Only in non-cyclic cases it would be marked as complete in imxdma_tasklet()
618 dma_cookie_complete(&desc->desc); in imxdma_tasklet()
621 if (imxdmac->enabled_2d) { in imxdma_tasklet()
622 imxdma->slots_2d[imxdmac->slot_2d].count--; in imxdma_tasklet()
623 imxdmac->enabled_2d = false; in imxdma_tasklet()
626 list_move_tail(imxdmac->ld_active.next, &imxdmac->ld_free); in imxdma_tasklet()
628 if (!list_empty(&imxdmac->ld_queue)) { in imxdma_tasklet()
629 next_desc = list_first_entry(&imxdmac->ld_queue, in imxdma_tasklet()
631 list_move_tail(imxdmac->ld_queue.next, &imxdmac->ld_active); in imxdma_tasklet()
633 dev_warn(imxdma->dev, "%s: channel: %d couldn't xfer desc\n", in imxdma_tasklet()
634 __func__, imxdmac->channel); in imxdma_tasklet()
637 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_tasklet()
639 dmaengine_desc_get_callback_invoke(&desc->desc, NULL); in imxdma_tasklet()
645 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_terminate_all()
650 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_terminate_all()
651 list_splice_tail_init(&imxdmac->ld_active, &imxdmac->ld_free); in imxdma_terminate_all()
652 list_splice_tail_init(&imxdmac->ld_queue, &imxdmac->ld_free); in imxdma_terminate_all()
653 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_terminate_all()
662 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_config_write()
666 imxdmac->per_address = dmaengine_cfg->src_addr; in imxdma_config_write()
667 imxdmac->watermark_level = dmaengine_cfg->src_maxburst; in imxdma_config_write()
668 imxdmac->word_size = dmaengine_cfg->src_addr_width; in imxdma_config_write()
670 imxdmac->per_address = dmaengine_cfg->dst_addr; in imxdma_config_write()
671 imxdmac->watermark_level = dmaengine_cfg->dst_maxburst; in imxdma_config_write()
672 imxdmac->word_size = dmaengine_cfg->dst_addr_width; in imxdma_config_write()
675 switch (imxdmac->word_size) { in imxdma_config_write()
688 imxdmac->hw_chaining = 0; in imxdma_config_write()
690 imxdmac->ccr_from_device = (mode | IMX_DMA_TYPE_FIFO) | in imxdma_config_write()
693 imxdmac->ccr_to_device = in imxdma_config_write()
696 imx_dmav1_writel(imxdma, imxdmac->dma_request, in imxdma_config_write()
697 DMA_RSSR(imxdmac->channel)); in imxdma_config_write()
699 /* Set burst length */ in imxdma_config_write()
700 imx_dmav1_writel(imxdma, imxdmac->watermark_level * in imxdma_config_write()
701 imxdmac->word_size, DMA_BLR(imxdmac->channel)); in imxdma_config_write()
711 memcpy(&imxdmac->config, dmaengine_cfg, sizeof(*dmaengine_cfg)); in imxdma_config()
725 struct imxdma_channel *imxdmac = to_imxdma_chan(tx->chan); in imxdma_tx_submit()
726 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_tx_submit()
730 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_tx_submit()
731 list_move_tail(imxdmac->ld_free.next, &imxdmac->ld_queue); in imxdma_tx_submit()
733 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_tx_submit()
741 struct imx_dma_data *data = chan->private; in imxdma_alloc_chan_resources()
744 imxdmac->dma_request = data->dma_request; in imxdma_alloc_chan_resources()
746 while (imxdmac->descs_allocated < IMXDMA_MAX_CHAN_DESCRIPTORS) { in imxdma_alloc_chan_resources()
752 dma_async_tx_descriptor_init(&desc->desc, chan); in imxdma_alloc_chan_resources()
753 desc->desc.tx_submit = imxdma_tx_submit; in imxdma_alloc_chan_resources()
755 desc->desc.flags = DMA_CTRL_ACK; in imxdma_alloc_chan_resources()
756 desc->status = DMA_COMPLETE; in imxdma_alloc_chan_resources()
758 list_add_tail(&desc->node, &imxdmac->ld_free); in imxdma_alloc_chan_resources()
759 imxdmac->descs_allocated++; in imxdma_alloc_chan_resources()
762 if (!imxdmac->descs_allocated) in imxdma_alloc_chan_resources()
763 return -ENOMEM; in imxdma_alloc_chan_resources()
765 return imxdmac->descs_allocated; in imxdma_alloc_chan_resources()
771 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_free_chan_resources()
775 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_free_chan_resources()
778 list_splice_tail_init(&imxdmac->ld_active, &imxdmac->ld_free); in imxdma_free_chan_resources()
779 list_splice_tail_init(&imxdmac->ld_queue, &imxdmac->ld_free); in imxdma_free_chan_resources()
781 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_free_chan_resources()
783 list_for_each_entry_safe(desc, _desc, &imxdmac->ld_free, node) { in imxdma_free_chan_resources()
785 imxdmac->descs_allocated--; in imxdma_free_chan_resources()
787 INIT_LIST_HEAD(&imxdmac->ld_free); in imxdma_free_chan_resources()
789 kfree(imxdmac->sg_list); in imxdma_free_chan_resources()
790 imxdmac->sg_list = NULL; in imxdma_free_chan_resources()
803 if (list_empty(&imxdmac->ld_free) || in imxdma_prep_slave_sg()
807 desc = list_first_entry(&imxdmac->ld_free, struct imxdma_desc, node); in imxdma_prep_slave_sg()
813 imxdma_config_write(chan, &imxdmac->config, direction); in imxdma_prep_slave_sg()
815 switch (imxdmac->word_size) { in imxdma_prep_slave_sg()
817 if (sg_dma_len(sgl) & 3 || sgl->dma_address & 3) in imxdma_prep_slave_sg()
821 if (sg_dma_len(sgl) & 1 || sgl->dma_address & 1) in imxdma_prep_slave_sg()
830 desc->type = IMXDMA_DESC_SLAVE_SG; in imxdma_prep_slave_sg()
831 desc->sg = sgl; in imxdma_prep_slave_sg()
832 desc->sgcount = sg_len; in imxdma_prep_slave_sg()
833 desc->len = dma_length; in imxdma_prep_slave_sg()
834 desc->direction = direction; in imxdma_prep_slave_sg()
836 desc->src = imxdmac->per_address; in imxdma_prep_slave_sg()
838 desc->dest = imxdmac->per_address; in imxdma_prep_slave_sg()
840 desc->desc.callback = NULL; in imxdma_prep_slave_sg()
841 desc->desc.callback_param = NULL; in imxdma_prep_slave_sg()
843 return &desc->desc; in imxdma_prep_slave_sg()
852 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_prep_dma_cyclic()
857 dev_dbg(imxdma->dev, "%s channel: %d buf_len=%zu period_len=%zu\n", in imxdma_prep_dma_cyclic()
858 __func__, imxdmac->channel, buf_len, period_len); in imxdma_prep_dma_cyclic()
860 if (list_empty(&imxdmac->ld_free) || in imxdma_prep_dma_cyclic()
864 desc = list_first_entry(&imxdmac->ld_free, struct imxdma_desc, node); in imxdma_prep_dma_cyclic()
866 kfree(imxdmac->sg_list); in imxdma_prep_dma_cyclic()
868 imxdmac->sg_list = kcalloc(periods + 1, in imxdma_prep_dma_cyclic()
870 if (!imxdmac->sg_list) in imxdma_prep_dma_cyclic()
873 sg_init_table(imxdmac->sg_list, periods); in imxdma_prep_dma_cyclic()
876 sg_assign_page(&imxdmac->sg_list[i], NULL); in imxdma_prep_dma_cyclic()
877 imxdmac->sg_list[i].offset = 0; in imxdma_prep_dma_cyclic()
878 imxdmac->sg_list[i].dma_address = dma_addr; in imxdma_prep_dma_cyclic()
879 sg_dma_len(&imxdmac->sg_list[i]) = period_len; in imxdma_prep_dma_cyclic()
884 sg_chain(imxdmac->sg_list, periods + 1, imxdmac->sg_list); in imxdma_prep_dma_cyclic()
886 desc->type = IMXDMA_DESC_CYCLIC; in imxdma_prep_dma_cyclic()
887 desc->sg = imxdmac->sg_list; in imxdma_prep_dma_cyclic()
888 desc->sgcount = periods; in imxdma_prep_dma_cyclic()
889 desc->len = IMX_DMA_LENGTH_LOOP; in imxdma_prep_dma_cyclic()
890 desc->direction = direction; in imxdma_prep_dma_cyclic()
892 desc->src = imxdmac->per_address; in imxdma_prep_dma_cyclic()
894 desc->dest = imxdmac->per_address; in imxdma_prep_dma_cyclic()
896 desc->desc.callback = NULL; in imxdma_prep_dma_cyclic()
897 desc->desc.callback_param = NULL; in imxdma_prep_dma_cyclic()
899 imxdma_config_write(chan, &imxdmac->config, direction); in imxdma_prep_dma_cyclic()
901 return &desc->desc; in imxdma_prep_dma_cyclic()
909 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_prep_dma_memcpy()
912 dev_dbg(imxdma->dev, "%s channel: %d src=0x%llx dst=0x%llx len=%zu\n", in imxdma_prep_dma_memcpy()
913 __func__, imxdmac->channel, (unsigned long long)src, in imxdma_prep_dma_memcpy()
916 if (list_empty(&imxdmac->ld_free) || in imxdma_prep_dma_memcpy()
920 desc = list_first_entry(&imxdmac->ld_free, struct imxdma_desc, node); in imxdma_prep_dma_memcpy()
922 desc->type = IMXDMA_DESC_MEMCPY; in imxdma_prep_dma_memcpy()
923 desc->src = src; in imxdma_prep_dma_memcpy()
924 desc->dest = dest; in imxdma_prep_dma_memcpy()
925 desc->len = len; in imxdma_prep_dma_memcpy()
926 desc->direction = DMA_MEM_TO_MEM; in imxdma_prep_dma_memcpy()
927 desc->config_port = IMX_DMA_MEMSIZE_32 | IMX_DMA_TYPE_LINEAR; in imxdma_prep_dma_memcpy()
928 desc->config_mem = IMX_DMA_MEMSIZE_32 | IMX_DMA_TYPE_LINEAR; in imxdma_prep_dma_memcpy()
929 desc->desc.callback = NULL; in imxdma_prep_dma_memcpy()
930 desc->desc.callback_param = NULL; in imxdma_prep_dma_memcpy()
932 return &desc->desc; in imxdma_prep_dma_memcpy()
940 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_prep_dma_interleaved()
943 dev_dbg(imxdma->dev, "%s channel: %d src_start=0x%llx dst_start=0x%llx\n" in imxdma_prep_dma_interleaved()
945 imxdmac->channel, (unsigned long long)xt->src_start, in imxdma_prep_dma_interleaved()
946 (unsigned long long) xt->dst_start, in imxdma_prep_dma_interleaved()
947 str_true_false(xt->src_sgl), str_true_false(xt->dst_sgl), in imxdma_prep_dma_interleaved()
948 xt->numf, xt->frame_size); in imxdma_prep_dma_interleaved()
950 if (list_empty(&imxdmac->ld_free) || in imxdma_prep_dma_interleaved()
954 if (xt->frame_size != 1 || xt->numf <= 0 || xt->dir != DMA_MEM_TO_MEM) in imxdma_prep_dma_interleaved()
957 desc = list_first_entry(&imxdmac->ld_free, struct imxdma_desc, node); in imxdma_prep_dma_interleaved()
959 desc->type = IMXDMA_DESC_INTERLEAVED; in imxdma_prep_dma_interleaved()
960 desc->src = xt->src_start; in imxdma_prep_dma_interleaved()
961 desc->dest = xt->dst_start; in imxdma_prep_dma_interleaved()
962 desc->x = xt->sgl[0].size; in imxdma_prep_dma_interleaved()
963 desc->y = xt->numf; in imxdma_prep_dma_interleaved()
964 desc->w = xt->sgl[0].icg + desc->x; in imxdma_prep_dma_interleaved()
965 desc->len = desc->x * desc->y; in imxdma_prep_dma_interleaved()
966 desc->direction = DMA_MEM_TO_MEM; in imxdma_prep_dma_interleaved()
967 desc->config_port = IMX_DMA_MEMSIZE_32; in imxdma_prep_dma_interleaved()
968 desc->config_mem = IMX_DMA_MEMSIZE_32; in imxdma_prep_dma_interleaved()
969 if (xt->src_sgl) in imxdma_prep_dma_interleaved()
970 desc->config_mem |= IMX_DMA_TYPE_2D; in imxdma_prep_dma_interleaved()
971 if (xt->dst_sgl) in imxdma_prep_dma_interleaved()
972 desc->config_port |= IMX_DMA_TYPE_2D; in imxdma_prep_dma_interleaved()
973 desc->desc.callback = NULL; in imxdma_prep_dma_interleaved()
974 desc->desc.callback_param = NULL; in imxdma_prep_dma_interleaved()
976 return &desc->desc; in imxdma_prep_dma_interleaved()
982 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_issue_pending()
986 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_issue_pending()
987 if (list_empty(&imxdmac->ld_active) && in imxdma_issue_pending()
988 !list_empty(&imxdmac->ld_queue)) { in imxdma_issue_pending()
989 desc = list_first_entry(&imxdmac->ld_queue, in imxdma_issue_pending()
993 dev_warn(imxdma->dev, in imxdma_issue_pending()
995 __func__, imxdmac->channel); in imxdma_issue_pending()
997 list_move_tail(imxdmac->ld_queue.next, in imxdma_issue_pending()
998 &imxdmac->ld_active); in imxdma_issue_pending()
1001 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_issue_pending()
1009 if (chan->device->dev != fdata->imxdma->dev) in imxdma_filter_fn()
1012 imxdma_chan->dma_request = fdata->request; in imxdma_filter_fn()
1013 chan->private = NULL; in imxdma_filter_fn()
1021 int count = dma_spec->args_count; in imxdma_xlate()
1022 struct imxdma_engine *imxdma = ofdma->of_dma_data; in imxdma_xlate()
1030 fdata.request = dma_spec->args[0]; in imxdma_xlate()
1032 return dma_request_channel(imxdma->dma_device.cap_mask, in imxdma_xlate()
1042 imxdma = devm_kzalloc(&pdev->dev, sizeof(*imxdma), GFP_KERNEL); in imxdma_probe()
1044 return -ENOMEM; in imxdma_probe()
1046 imxdma->dev = &pdev->dev; in imxdma_probe()
1047 imxdma->devtype = (uintptr_t)of_device_get_match_data(&pdev->dev); in imxdma_probe()
1049 imxdma->base = devm_platform_ioremap_resource(pdev, 0); in imxdma_probe()
1050 if (IS_ERR(imxdma->base)) in imxdma_probe()
1051 return PTR_ERR(imxdma->base); in imxdma_probe()
1057 imxdma->dma_ipg = devm_clk_get(&pdev->dev, "ipg"); in imxdma_probe()
1058 if (IS_ERR(imxdma->dma_ipg)) in imxdma_probe()
1059 return PTR_ERR(imxdma->dma_ipg); in imxdma_probe()
1061 imxdma->dma_ahb = devm_clk_get(&pdev->dev, "ahb"); in imxdma_probe()
1062 if (IS_ERR(imxdma->dma_ahb)) in imxdma_probe()
1063 return PTR_ERR(imxdma->dma_ahb); in imxdma_probe()
1065 ret = clk_prepare_enable(imxdma->dma_ipg); in imxdma_probe()
1068 ret = clk_prepare_enable(imxdma->dma_ahb); in imxdma_probe()
1076 ret = devm_request_irq(&pdev->dev, irq, in imxdma_probe()
1079 dev_warn(imxdma->dev, "Can't register IRQ for DMA\n"); in imxdma_probe()
1082 imxdma->irq = irq; in imxdma_probe()
1090 ret = devm_request_irq(&pdev->dev, irq_err, in imxdma_probe()
1093 dev_warn(imxdma->dev, "Can't register ERRIRQ for DMA\n"); in imxdma_probe()
1096 imxdma->irq_err = irq_err; in imxdma_probe()
1103 imx_dmav1_writel(imxdma, (1 << IMX_DMA_CHANNELS) - 1, DMA_DISR); in imxdma_probe()
1106 imx_dmav1_writel(imxdma, (1 << IMX_DMA_CHANNELS) - 1, DMA_DIMR); in imxdma_probe()
1108 INIT_LIST_HEAD(&imxdma->dma_device.channels); in imxdma_probe()
1110 dma_cap_set(DMA_SLAVE, imxdma->dma_device.cap_mask); in imxdma_probe()
1111 dma_cap_set(DMA_CYCLIC, imxdma->dma_device.cap_mask); in imxdma_probe()
1112 dma_cap_set(DMA_MEMCPY, imxdma->dma_device.cap_mask); in imxdma_probe()
1113 dma_cap_set(DMA_INTERLEAVE, imxdma->dma_device.cap_mask); in imxdma_probe()
1117 imxdma->slots_2d[i].count = 0; in imxdma_probe()
1119 spin_lock_init(&imxdma->lock); in imxdma_probe()
1123 struct imxdma_channel *imxdmac = &imxdma->channel[i]; in imxdma_probe()
1126 ret = devm_request_irq(&pdev->dev, irq + i, in imxdma_probe()
1129 dev_warn(imxdma->dev, "Can't register IRQ %d " in imxdma_probe()
1135 imxdmac->irq = irq + i; in imxdma_probe()
1136 timer_setup(&imxdmac->watchdog, imxdma_watchdog, 0); in imxdma_probe()
1139 imxdmac->imxdma = imxdma; in imxdma_probe()
1141 INIT_LIST_HEAD(&imxdmac->ld_queue); in imxdma_probe()
1142 INIT_LIST_HEAD(&imxdmac->ld_free); in imxdma_probe()
1143 INIT_LIST_HEAD(&imxdmac->ld_active); in imxdma_probe()
1145 tasklet_setup(&imxdmac->dma_tasklet, imxdma_tasklet); in imxdma_probe()
1146 imxdmac->chan.device = &imxdma->dma_device; in imxdma_probe()
1147 dma_cookie_init(&imxdmac->chan); in imxdma_probe()
1148 imxdmac->channel = i; in imxdma_probe()
1151 list_add_tail(&imxdmac->chan.device_node, in imxdma_probe()
1152 &imxdma->dma_device.channels); in imxdma_probe()
1155 imxdma->dma_device.dev = &pdev->dev; in imxdma_probe()
1157 imxdma->dma_device.device_alloc_chan_resources = imxdma_alloc_chan_resources; in imxdma_probe()
1158 imxdma->dma_device.device_free_chan_resources = imxdma_free_chan_resources; in imxdma_probe()
1159 imxdma->dma_device.device_tx_status = imxdma_tx_status; in imxdma_probe()
1160 imxdma->dma_device.device_prep_slave_sg = imxdma_prep_slave_sg; in imxdma_probe()
1161 imxdma->dma_device.device_prep_dma_cyclic = imxdma_prep_dma_cyclic; in imxdma_probe()
1162 imxdma->dma_device.device_prep_dma_memcpy = imxdma_prep_dma_memcpy; in imxdma_probe()
1163 imxdma->dma_device.device_prep_interleaved_dma = imxdma_prep_dma_interleaved; in imxdma_probe()
1164 imxdma->dma_device.device_config = imxdma_config; in imxdma_probe()
1165 imxdma->dma_device.device_terminate_all = imxdma_terminate_all; in imxdma_probe()
1166 imxdma->dma_device.device_issue_pending = imxdma_issue_pending; in imxdma_probe()
1170 imxdma->dma_device.copy_align = DMAENGINE_ALIGN_4_BYTES; in imxdma_probe()
1171 dma_set_max_seg_size(imxdma->dma_device.dev, 0xffffff); in imxdma_probe()
1173 ret = dma_async_device_register(&imxdma->dma_device); in imxdma_probe()
1175 dev_err(&pdev->dev, "unable to register\n"); in imxdma_probe()
1179 if (pdev->dev.of_node) { in imxdma_probe()
1180 ret = of_dma_controller_register(pdev->dev.of_node, in imxdma_probe()
1183 dev_err(&pdev->dev, "unable to register of_dma_controller\n"); in imxdma_probe()
1191 dma_async_device_unregister(&imxdma->dma_device); in imxdma_probe()
1193 clk_disable_unprepare(imxdma->dma_ahb); in imxdma_probe()
1195 clk_disable_unprepare(imxdma->dma_ipg); in imxdma_probe()
1204 disable_irq(imxdma->irq); in imxdma_free_irq()
1205 disable_irq(imxdma->irq_err); in imxdma_free_irq()
1209 struct imxdma_channel *imxdmac = &imxdma->channel[i]; in imxdma_free_irq()
1212 disable_irq(imxdmac->irq); in imxdma_free_irq()
1214 tasklet_kill(&imxdmac->dma_tasklet); in imxdma_free_irq()
1224 dma_async_device_unregister(&imxdma->dma_device); in imxdma_remove()
1226 if (pdev->dev.of_node) in imxdma_remove()
1227 of_dma_controller_free(pdev->dev.of_node); in imxdma_remove()
1229 clk_disable_unprepare(imxdma->dma_ipg); in imxdma_remove()
1230 clk_disable_unprepare(imxdma->dma_ahb); in imxdma_remove()
1235 .name = "imx-dma",