Lines Matching +full:dma +full:- +full:engine
1 // SPDX-License-Identifier: GPL-2.0
3 * Renesas RZ/G2L DMA Controller Driver
5 * Based on imx-dma.c
9 * Copyright 2012 Javier Martin, Vista Silicon <javier.martin@vista-silicon.com>
13 #include <linux/dma-mapping.h>
17 #include <linux/irqchip/irq-renesas-rzv2h.h>
30 #include "../virt-dma.h"
99 struct dma_device engine; member
114 #define to_rz_dmac(d) container_of(d, struct rz_dmac, engine)
117 * -----------------------------------------------------------------------------
183 * -----------------------------------------------------------------------------
190 writel(val, dmac->base + offset); in rz_dmac_writel()
196 writel(val, dmac->ext_base + offset); in rz_dmac_ext_writel()
201 return readl(dmac->ext_base + offset); in rz_dmac_ext_readl()
208 writel(val, channel->ch_base + offset); in rz_dmac_ch_writel()
210 writel(val, channel->ch_cmn_base + offset); in rz_dmac_ch_writel()
217 return readl(channel->ch_base + offset); in rz_dmac_ch_readl()
219 return readl(channel->ch_cmn_base + offset); in rz_dmac_ch_readl()
223 * -----------------------------------------------------------------------------
232 channel->lmdesc.base = lmdesc; in rz_lmdesc_setup()
233 channel->lmdesc.head = lmdesc; in rz_lmdesc_setup()
234 channel->lmdesc.tail = lmdesc; in rz_lmdesc_setup()
235 nxla = channel->lmdesc.base_dma; in rz_lmdesc_setup()
236 while (lmdesc < (channel->lmdesc.base + (DMAC_NR_LMDESC - 1))) { in rz_lmdesc_setup()
237 lmdesc->header = 0; in rz_lmdesc_setup()
239 lmdesc->nxla = nxla; in rz_lmdesc_setup()
243 lmdesc->header = 0; in rz_lmdesc_setup()
244 lmdesc->nxla = channel->lmdesc.base_dma; in rz_lmdesc_setup()
248 * -----------------------------------------------------------------------------
254 struct rz_lmdesc *lmdesc = channel->lmdesc.head; in rz_dmac_lmdesc_recycle()
256 while (!(lmdesc->header & HEADER_LV)) { in rz_dmac_lmdesc_recycle()
257 lmdesc->header = 0; in rz_dmac_lmdesc_recycle()
259 if (lmdesc >= (channel->lmdesc.base + DMAC_NR_LMDESC)) in rz_dmac_lmdesc_recycle()
260 lmdesc = channel->lmdesc.base; in rz_dmac_lmdesc_recycle()
262 channel->lmdesc.head = lmdesc; in rz_dmac_lmdesc_recycle()
267 struct dma_chan *chan = &channel->vc.chan; in rz_dmac_enable_hw()
268 struct rz_dmac *dmac = to_rz_dmac(chan->device); in rz_dmac_enable_hw()
274 dev_dbg(dmac->dev, "%s channel %d\n", __func__, channel->index); in rz_dmac_enable_hw()
280 nxla = channel->lmdesc.base_dma + in rz_dmac_enable_hw()
281 (sizeof(struct rz_lmdesc) * (channel->lmdesc.head - in rz_dmac_enable_hw()
282 channel->lmdesc.base)); in rz_dmac_enable_hw()
286 chctrl = (channel->chctrl | CHCTRL_SETEN); in rz_dmac_enable_hw()
288 rz_dmac_ch_writel(channel, channel->chcfg, CHCFG, 1); in rz_dmac_enable_hw()
298 struct dma_chan *chan = &channel->vc.chan; in rz_dmac_disable_hw()
299 struct rz_dmac *dmac = to_rz_dmac(chan->device); in rz_dmac_disable_hw()
302 dev_dbg(dmac->dev, "%s channel %d\n", __func__, channel->index); in rz_dmac_disable_hw()
324 struct dma_chan *chan = &channel->vc.chan; in rz_dmac_prepare_desc_for_memcpy()
325 struct rz_dmac *dmac = to_rz_dmac(chan->device); in rz_dmac_prepare_desc_for_memcpy()
326 struct rz_lmdesc *lmdesc = channel->lmdesc.tail; in rz_dmac_prepare_desc_for_memcpy()
327 struct rz_dmac_desc *d = channel->desc; in rz_dmac_prepare_desc_for_memcpy()
331 lmdesc->sa = d->src; in rz_dmac_prepare_desc_for_memcpy()
332 lmdesc->da = d->dest; in rz_dmac_prepare_desc_for_memcpy()
333 lmdesc->tb = d->len; in rz_dmac_prepare_desc_for_memcpy()
334 lmdesc->chcfg = chcfg; in rz_dmac_prepare_desc_for_memcpy()
335 lmdesc->chitvl = 0; in rz_dmac_prepare_desc_for_memcpy()
336 lmdesc->chext = 0; in rz_dmac_prepare_desc_for_memcpy()
337 lmdesc->header = HEADER_LV; in rz_dmac_prepare_desc_for_memcpy()
339 if (dmac->has_icu) { in rz_dmac_prepare_desc_for_memcpy()
340 rzv2h_icu_register_dma_req(dmac->icu.pdev, dmac->icu.dmac_index, in rz_dmac_prepare_desc_for_memcpy()
341 channel->index, in rz_dmac_prepare_desc_for_memcpy()
344 rz_dmac_set_dmars_register(dmac, channel->index, 0); in rz_dmac_prepare_desc_for_memcpy()
347 channel->chcfg = chcfg; in rz_dmac_prepare_desc_for_memcpy()
348 channel->chctrl = CHCTRL_STG | CHCTRL_SETEN; in rz_dmac_prepare_desc_for_memcpy()
353 struct dma_chan *chan = &channel->vc.chan; in rz_dmac_prepare_descs_for_slave_sg()
354 struct rz_dmac *dmac = to_rz_dmac(chan->device); in rz_dmac_prepare_descs_for_slave_sg()
355 struct rz_dmac_desc *d = channel->desc; in rz_dmac_prepare_descs_for_slave_sg()
356 struct scatterlist *sg, *sgl = d->sg; in rz_dmac_prepare_descs_for_slave_sg()
358 unsigned int i, sg_len = d->sgcount; in rz_dmac_prepare_descs_for_slave_sg()
360 channel->chcfg |= CHCFG_SEL(channel->index) | CHCFG_DEM | CHCFG_DMS; in rz_dmac_prepare_descs_for_slave_sg()
362 if (d->direction == DMA_DEV_TO_MEM) { in rz_dmac_prepare_descs_for_slave_sg()
363 channel->chcfg |= CHCFG_SAD; in rz_dmac_prepare_descs_for_slave_sg()
364 channel->chcfg &= ~CHCFG_REQD; in rz_dmac_prepare_descs_for_slave_sg()
366 channel->chcfg |= CHCFG_DAD | CHCFG_REQD; in rz_dmac_prepare_descs_for_slave_sg()
369 lmdesc = channel->lmdesc.tail; in rz_dmac_prepare_descs_for_slave_sg()
372 if (d->direction == DMA_DEV_TO_MEM) { in rz_dmac_prepare_descs_for_slave_sg()
373 lmdesc->sa = channel->src_per_address; in rz_dmac_prepare_descs_for_slave_sg()
374 lmdesc->da = sg_dma_address(sg); in rz_dmac_prepare_descs_for_slave_sg()
376 lmdesc->sa = sg_dma_address(sg); in rz_dmac_prepare_descs_for_slave_sg()
377 lmdesc->da = channel->dst_per_address; in rz_dmac_prepare_descs_for_slave_sg()
380 lmdesc->tb = sg_dma_len(sg); in rz_dmac_prepare_descs_for_slave_sg()
381 lmdesc->chitvl = 0; in rz_dmac_prepare_descs_for_slave_sg()
382 lmdesc->chext = 0; in rz_dmac_prepare_descs_for_slave_sg()
383 if (i == (sg_len - 1)) { in rz_dmac_prepare_descs_for_slave_sg()
384 lmdesc->chcfg = (channel->chcfg & ~CHCFG_DEM); in rz_dmac_prepare_descs_for_slave_sg()
385 lmdesc->header = HEADER_LV; in rz_dmac_prepare_descs_for_slave_sg()
387 lmdesc->chcfg = channel->chcfg; in rz_dmac_prepare_descs_for_slave_sg()
388 lmdesc->header = HEADER_LV; in rz_dmac_prepare_descs_for_slave_sg()
390 if (++lmdesc >= (channel->lmdesc.base + DMAC_NR_LMDESC)) in rz_dmac_prepare_descs_for_slave_sg()
391 lmdesc = channel->lmdesc.base; in rz_dmac_prepare_descs_for_slave_sg()
394 channel->lmdesc.tail = lmdesc; in rz_dmac_prepare_descs_for_slave_sg()
396 if (dmac->has_icu) { in rz_dmac_prepare_descs_for_slave_sg()
397 rzv2h_icu_register_dma_req(dmac->icu.pdev, dmac->icu.dmac_index, in rz_dmac_prepare_descs_for_slave_sg()
398 channel->index, channel->mid_rid); in rz_dmac_prepare_descs_for_slave_sg()
400 rz_dmac_set_dmars_register(dmac, channel->index, channel->mid_rid); in rz_dmac_prepare_descs_for_slave_sg()
403 channel->chctrl = CHCTRL_SETEN; in rz_dmac_prepare_descs_for_slave_sg()
408 struct rz_dmac_desc *d = chan->desc; in rz_dmac_xfer_desc()
411 vd = vchan_next_desc(&chan->vc); in rz_dmac_xfer_desc()
415 list_del(&vd->node); in rz_dmac_xfer_desc()
417 switch (d->type) { in rz_dmac_xfer_desc()
427 return -EINVAL; in rz_dmac_xfer_desc()
436 * -----------------------------------------------------------------------------
437 * DMA engine operations
444 while (channel->descs_allocated < RZ_DMAC_MAX_CHAN_DESCRIPTORS) { in rz_dmac_alloc_chan_resources()
451 list_add_tail(&desc->node, &channel->ld_free); in rz_dmac_alloc_chan_resources()
452 channel->descs_allocated++; in rz_dmac_alloc_chan_resources()
455 if (!channel->descs_allocated) in rz_dmac_alloc_chan_resources()
456 return -ENOMEM; in rz_dmac_alloc_chan_resources()
458 return channel->descs_allocated; in rz_dmac_alloc_chan_resources()
464 struct rz_dmac *dmac = to_rz_dmac(chan->device); in rz_dmac_free_chan_resources()
465 struct rz_lmdesc *lmdesc = channel->lmdesc.base; in rz_dmac_free_chan_resources()
470 spin_lock_irqsave(&channel->vc.lock, flags); in rz_dmac_free_chan_resources()
476 list_splice_tail_init(&channel->ld_active, &channel->ld_free); in rz_dmac_free_chan_resources()
477 list_splice_tail_init(&channel->ld_queue, &channel->ld_free); in rz_dmac_free_chan_resources()
479 if (channel->mid_rid >= 0) { in rz_dmac_free_chan_resources()
480 clear_bit(channel->mid_rid, dmac->modules); in rz_dmac_free_chan_resources()
481 channel->mid_rid = -EINVAL; in rz_dmac_free_chan_resources()
484 spin_unlock_irqrestore(&channel->vc.lock, flags); in rz_dmac_free_chan_resources()
486 list_for_each_entry_safe(desc, _desc, &channel->ld_free, node) { in rz_dmac_free_chan_resources()
488 channel->descs_allocated--; in rz_dmac_free_chan_resources()
491 INIT_LIST_HEAD(&channel->ld_free); in rz_dmac_free_chan_resources()
492 vchan_free_chan_resources(&channel->vc); in rz_dmac_free_chan_resources()
500 struct rz_dmac *dmac = to_rz_dmac(chan->device); in rz_dmac_prep_dma_memcpy()
503 dev_dbg(dmac->dev, "%s channel: %d src=0x%pad dst=0x%pad len=%zu\n", in rz_dmac_prep_dma_memcpy()
504 __func__, channel->index, &src, &dest, len); in rz_dmac_prep_dma_memcpy()
506 if (list_empty(&channel->ld_free)) in rz_dmac_prep_dma_memcpy()
509 desc = list_first_entry(&channel->ld_free, struct rz_dmac_desc, node); in rz_dmac_prep_dma_memcpy()
511 desc->type = RZ_DMAC_DESC_MEMCPY; in rz_dmac_prep_dma_memcpy()
512 desc->src = src; in rz_dmac_prep_dma_memcpy()
513 desc->dest = dest; in rz_dmac_prep_dma_memcpy()
514 desc->len = len; in rz_dmac_prep_dma_memcpy()
515 desc->direction = DMA_MEM_TO_MEM; in rz_dmac_prep_dma_memcpy()
517 list_move_tail(channel->ld_free.next, &channel->ld_queue); in rz_dmac_prep_dma_memcpy()
518 return vchan_tx_prep(&channel->vc, &desc->vd, flags); in rz_dmac_prep_dma_memcpy()
533 if (list_empty(&channel->ld_free)) in rz_dmac_prep_slave_sg()
536 desc = list_first_entry(&channel->ld_free, struct rz_dmac_desc, node); in rz_dmac_prep_slave_sg()
542 desc->type = RZ_DMAC_DESC_SLAVE_SG; in rz_dmac_prep_slave_sg()
543 desc->sg = sgl; in rz_dmac_prep_slave_sg()
544 desc->sgcount = sg_len; in rz_dmac_prep_slave_sg()
545 desc->len = dma_length; in rz_dmac_prep_slave_sg()
546 desc->direction = direction; in rz_dmac_prep_slave_sg()
549 desc->src = channel->src_per_address; in rz_dmac_prep_slave_sg()
551 desc->dest = channel->dst_per_address; in rz_dmac_prep_slave_sg()
553 list_move_tail(channel->ld_free.next, &channel->ld_queue); in rz_dmac_prep_slave_sg()
554 return vchan_tx_prep(&channel->vc, &desc->vd, flags); in rz_dmac_prep_slave_sg()
564 spin_lock_irqsave(&channel->vc.lock, flags); in rz_dmac_terminate_all()
565 list_splice_tail_init(&channel->ld_active, &channel->ld_free); in rz_dmac_terminate_all()
566 list_splice_tail_init(&channel->ld_queue, &channel->ld_free); in rz_dmac_terminate_all()
567 vchan_get_all_descriptors(&channel->vc, &head); in rz_dmac_terminate_all()
568 spin_unlock_irqrestore(&channel->vc.lock, flags); in rz_dmac_terminate_all()
569 vchan_dma_desc_free_list(&channel->vc, &head); in rz_dmac_terminate_all()
577 struct rz_dmac *dmac = to_rz_dmac(chan->device); in rz_dmac_issue_pending()
581 spin_lock_irqsave(&channel->vc.lock, flags); in rz_dmac_issue_pending()
583 if (!list_empty(&channel->ld_queue)) { in rz_dmac_issue_pending()
584 desc = list_first_entry(&channel->ld_queue, in rz_dmac_issue_pending()
586 channel->desc = desc; in rz_dmac_issue_pending()
587 if (vchan_issue_pending(&channel->vc)) { in rz_dmac_issue_pending()
589 dev_warn(dmac->dev, "ch: %d couldn't issue DMA xfer\n", in rz_dmac_issue_pending()
590 channel->index); in rz_dmac_issue_pending()
592 list_move_tail(channel->ld_queue.next, in rz_dmac_issue_pending()
593 &channel->ld_active); in rz_dmac_issue_pending()
597 spin_unlock_irqrestore(&channel->vc.lock, flags); in rz_dmac_issue_pending()
628 channel->dst_per_address = config->dst_addr; in rz_dmac_config()
629 channel->chcfg &= ~CHCFG_FILL_DDS_MASK; in rz_dmac_config()
630 if (channel->dst_per_address) { in rz_dmac_config()
631 val = rz_dmac_ds_to_val_mapping(config->dst_addr_width); in rz_dmac_config()
633 return -EINVAL; in rz_dmac_config()
635 channel->chcfg |= FIELD_PREP(CHCFG_FILL_DDS_MASK, val); in rz_dmac_config()
638 channel->src_per_address = config->src_addr; in rz_dmac_config()
639 channel->chcfg &= ~CHCFG_FILL_SDS_MASK; in rz_dmac_config()
640 if (channel->src_per_address) { in rz_dmac_config()
641 val = rz_dmac_ds_to_val_mapping(config->src_addr_width); in rz_dmac_config()
643 return -EINVAL; in rz_dmac_config()
645 channel->chcfg |= FIELD_PREP(CHCFG_FILL_SDS_MASK, val); in rz_dmac_config()
658 * allocation/free during DMA read/write. in rz_dmac_virt_desc_free()
665 struct rz_dmac *dmac = to_rz_dmac(chan->device); in rz_dmac_device_synchronize()
672 dev_warn(dmac->dev, "DMA Timeout"); in rz_dmac_device_synchronize()
674 if (dmac->has_icu) { in rz_dmac_device_synchronize()
675 rzv2h_icu_register_dma_req(dmac->icu.pdev, dmac->icu.dmac_index, in rz_dmac_device_synchronize()
676 channel->index, in rz_dmac_device_synchronize()
679 rz_dmac_set_dmars_register(dmac, channel->index, 0); in rz_dmac_device_synchronize()
684 * -----------------------------------------------------------------------------
690 struct dma_chan *chan = &channel->vc.chan; in rz_dmac_irq_handle_channel()
691 struct rz_dmac *dmac = to_rz_dmac(chan->device); in rz_dmac_irq_handle_channel()
696 dev_err(dmac->dev, "DMAC err CHSTAT_%d = %08X\n", in rz_dmac_irq_handle_channel()
697 channel->index, chstat); in rz_dmac_irq_handle_channel()
726 spin_lock_irqsave(&channel->vc.lock, flags); in rz_dmac_irq_handler_thread()
728 if (list_empty(&channel->ld_active)) { in rz_dmac_irq_handler_thread()
733 desc = list_first_entry(&channel->ld_active, struct rz_dmac_desc, node); in rz_dmac_irq_handler_thread()
734 vchan_cookie_complete(&desc->vd); in rz_dmac_irq_handler_thread()
735 list_move_tail(channel->ld_active.next, &channel->ld_free); in rz_dmac_irq_handler_thread()
736 if (!list_empty(&channel->ld_queue)) { in rz_dmac_irq_handler_thread()
737 desc = list_first_entry(&channel->ld_queue, struct rz_dmac_desc, in rz_dmac_irq_handler_thread()
739 channel->desc = desc; in rz_dmac_irq_handler_thread()
741 list_move_tail(channel->ld_queue.next, &channel->ld_active); in rz_dmac_irq_handler_thread()
744 spin_unlock_irqrestore(&channel->vc.lock, flags); in rz_dmac_irq_handler_thread()
750 * -----------------------------------------------------------------------------
757 struct rz_dmac *dmac = to_rz_dmac(chan->device); in rz_dmac_chan_filter()
761 channel->mid_rid = dma_spec->args[0] & MID_RID_MASK; in rz_dmac_chan_filter()
762 ch_cfg = (dma_spec->args[0] & CHCFG_MASK) >> 10; in rz_dmac_chan_filter()
763 channel->chcfg = CHCFG_FILL_TM(ch_cfg) | CHCFG_FILL_AM(ch_cfg) | in rz_dmac_chan_filter()
766 return !test_and_set_bit(channel->mid_rid, dmac->modules); in rz_dmac_chan_filter()
774 if (dma_spec->args_count != 1) in rz_dmac_of_xlate()
777 /* Only slave DMA channels can be allocated via DT */ in rz_dmac_of_xlate()
782 ofdma->of_node); in rz_dmac_of_xlate()
786 * -----------------------------------------------------------------------------
794 struct platform_device *pdev = to_platform_device(dmac->dev); in rz_dmac_chan_probe()
800 channel->index = index; in rz_dmac_chan_probe()
801 channel->mid_rid = -EINVAL; in rz_dmac_chan_probe()
805 channel->irq = platform_get_irq_byname(pdev, pdev_irqname); in rz_dmac_chan_probe()
806 if (channel->irq < 0) in rz_dmac_chan_probe()
807 return channel->irq; in rz_dmac_chan_probe()
809 irqname = devm_kasprintf(dmac->dev, GFP_KERNEL, "%s:%u", in rz_dmac_chan_probe()
810 dev_name(dmac->dev), index); in rz_dmac_chan_probe()
812 return -ENOMEM; in rz_dmac_chan_probe()
814 ret = devm_request_threaded_irq(dmac->dev, channel->irq, in rz_dmac_chan_probe()
819 dev_err(dmac->dev, "failed to request IRQ %u (%d)\n", in rz_dmac_chan_probe()
820 channel->irq, ret); in rz_dmac_chan_probe()
826 channel->ch_base = dmac->base + CHANNEL_0_7_OFFSET + in rz_dmac_chan_probe()
828 channel->ch_cmn_base = dmac->base + CHANNEL_0_7_COMMON_BASE; in rz_dmac_chan_probe()
830 channel->ch_base = dmac->base + CHANNEL_8_15_OFFSET + in rz_dmac_chan_probe()
831 EACH_CHANNEL_OFFSET * (index - 8); in rz_dmac_chan_probe()
832 channel->ch_cmn_base = dmac->base + CHANNEL_8_15_COMMON_BASE; in rz_dmac_chan_probe()
836 lmdesc = dma_alloc_coherent(&pdev->dev, in rz_dmac_chan_probe()
838 &channel->lmdesc.base_dma, GFP_KERNEL); in rz_dmac_chan_probe()
840 dev_err(&pdev->dev, "Can't allocate memory (lmdesc)\n"); in rz_dmac_chan_probe()
841 return -ENOMEM; in rz_dmac_chan_probe()
848 channel->vc.desc_free = rz_dmac_virt_desc_free; in rz_dmac_chan_probe()
849 vchan_init(&channel->vc, &dmac->engine); in rz_dmac_chan_probe()
850 INIT_LIST_HEAD(&channel->ld_queue); in rz_dmac_chan_probe()
851 INIT_LIST_HEAD(&channel->ld_free); in rz_dmac_chan_probe()
852 INIT_LIST_HEAD(&channel->ld_active); in rz_dmac_chan_probe()
859 struct device_node *np = dev->of_node; in rz_dmac_parse_of_icu()
865 if (ret == -ENOENT) in rz_dmac_parse_of_icu()
870 dmac->has_icu = true; in rz_dmac_parse_of_icu()
872 dmac->icu.pdev = of_find_device_by_node(args.np); in rz_dmac_parse_of_icu()
874 if (!dmac->icu.pdev) { in rz_dmac_parse_of_icu()
876 return -ENODEV; in rz_dmac_parse_of_icu()
882 return -EINVAL; in rz_dmac_parse_of_icu()
884 dmac->icu.dmac_index = dmac_index; in rz_dmac_parse_of_icu()
891 struct device_node *np = dev->of_node; in rz_dmac_parse_of()
894 ret = of_property_read_u32(np, "dma-channels", &dmac->n_channels); in rz_dmac_parse_of()
896 dev_err(dev, "unable to read dma-channels property\n"); in rz_dmac_parse_of()
900 if (!dmac->n_channels || dmac->n_channels > RZ_DMAC_MAX_CHANNELS) { in rz_dmac_parse_of()
901 dev_err(dev, "invalid number of channels %u\n", dmac->n_channels); in rz_dmac_parse_of()
902 return -EINVAL; in rz_dmac_parse_of()
911 struct dma_device *engine; in rz_dmac_probe() local
918 dmac = devm_kzalloc(&pdev->dev, sizeof(*dmac), GFP_KERNEL); in rz_dmac_probe()
920 return -ENOMEM; in rz_dmac_probe()
922 dmac->dev = &pdev->dev; in rz_dmac_probe()
925 ret = rz_dmac_parse_of(&pdev->dev, dmac); in rz_dmac_probe()
929 dmac->channels = devm_kcalloc(&pdev->dev, dmac->n_channels, in rz_dmac_probe()
930 sizeof(*dmac->channels), GFP_KERNEL); in rz_dmac_probe()
931 if (!dmac->channels) in rz_dmac_probe()
932 return -ENOMEM; in rz_dmac_probe()
935 dmac->base = devm_platform_ioremap_resource(pdev, 0); in rz_dmac_probe()
936 if (IS_ERR(dmac->base)) in rz_dmac_probe()
937 return PTR_ERR(dmac->base); in rz_dmac_probe()
939 if (!dmac->has_icu) { in rz_dmac_probe()
940 dmac->ext_base = devm_platform_ioremap_resource(pdev, 1); in rz_dmac_probe()
941 if (IS_ERR(dmac->ext_base)) in rz_dmac_probe()
942 return PTR_ERR(dmac->ext_base); in rz_dmac_probe()
950 ret = devm_request_irq(&pdev->dev, irq, rz_dmac_irq_handler, 0, in rz_dmac_probe()
953 dev_err(&pdev->dev, "failed to request IRQ %u (%d)\n", in rz_dmac_probe()
959 INIT_LIST_HEAD(&dmac->engine.channels); in rz_dmac_probe()
961 dmac->rstc = devm_reset_control_array_get_optional_exclusive(&pdev->dev); in rz_dmac_probe()
962 if (IS_ERR(dmac->rstc)) in rz_dmac_probe()
963 return dev_err_probe(&pdev->dev, PTR_ERR(dmac->rstc), in rz_dmac_probe()
966 pm_runtime_enable(&pdev->dev); in rz_dmac_probe()
967 ret = pm_runtime_resume_and_get(&pdev->dev); in rz_dmac_probe()
969 dev_err(&pdev->dev, "pm_runtime_resume_and_get failed\n"); in rz_dmac_probe()
973 ret = reset_control_deassert(dmac->rstc); in rz_dmac_probe()
977 for (i = 0; i < dmac->n_channels; i++) { in rz_dmac_probe()
978 ret = rz_dmac_chan_probe(dmac, &dmac->channels[i], i); in rz_dmac_probe()
983 /* Register the DMAC as a DMA provider for DT. */ in rz_dmac_probe()
984 ret = of_dma_controller_register(pdev->dev.of_node, rz_dmac_of_xlate, in rz_dmac_probe()
989 /* Register the DMA engine device. */ in rz_dmac_probe()
990 engine = &dmac->engine; in rz_dmac_probe()
991 dma_cap_set(DMA_SLAVE, engine->cap_mask); in rz_dmac_probe()
992 dma_cap_set(DMA_MEMCPY, engine->cap_mask); in rz_dmac_probe()
996 engine->dev = &pdev->dev; in rz_dmac_probe()
998 engine->device_alloc_chan_resources = rz_dmac_alloc_chan_resources; in rz_dmac_probe()
999 engine->device_free_chan_resources = rz_dmac_free_chan_resources; in rz_dmac_probe()
1000 engine->device_tx_status = dma_cookie_status; in rz_dmac_probe()
1001 engine->device_prep_slave_sg = rz_dmac_prep_slave_sg; in rz_dmac_probe()
1002 engine->device_prep_dma_memcpy = rz_dmac_prep_dma_memcpy; in rz_dmac_probe()
1003 engine->device_config = rz_dmac_config; in rz_dmac_probe()
1004 engine->device_terminate_all = rz_dmac_terminate_all; in rz_dmac_probe()
1005 engine->device_issue_pending = rz_dmac_issue_pending; in rz_dmac_probe()
1006 engine->device_synchronize = rz_dmac_device_synchronize; in rz_dmac_probe()
1008 engine->copy_align = DMAENGINE_ALIGN_1_BYTE; in rz_dmac_probe()
1009 dma_set_max_seg_size(engine->dev, U32_MAX); in rz_dmac_probe()
1011 ret = dma_async_device_register(engine); in rz_dmac_probe()
1013 dev_err(&pdev->dev, "unable to register\n"); in rz_dmac_probe()
1019 of_dma_controller_free(pdev->dev.of_node); in rz_dmac_probe()
1021 channel_num = i ? i - 1 : 0; in rz_dmac_probe()
1023 struct rz_dmac_chan *channel = &dmac->channels[i]; in rz_dmac_probe()
1025 dma_free_coherent(&pdev->dev, in rz_dmac_probe()
1027 channel->lmdesc.base, in rz_dmac_probe()
1028 channel->lmdesc.base_dma); in rz_dmac_probe()
1031 reset_control_assert(dmac->rstc); in rz_dmac_probe()
1033 pm_runtime_put(&pdev->dev); in rz_dmac_probe()
1035 pm_runtime_disable(&pdev->dev); in rz_dmac_probe()
1045 dma_async_device_unregister(&dmac->engine); in rz_dmac_remove()
1046 of_dma_controller_free(pdev->dev.of_node); in rz_dmac_remove()
1047 for (i = 0; i < dmac->n_channels; i++) { in rz_dmac_remove()
1048 struct rz_dmac_chan *channel = &dmac->channels[i]; in rz_dmac_remove()
1050 dma_free_coherent(&pdev->dev, in rz_dmac_remove()
1052 channel->lmdesc.base, in rz_dmac_remove()
1053 channel->lmdesc.base_dma); in rz_dmac_remove()
1055 reset_control_assert(dmac->rstc); in rz_dmac_remove()
1056 pm_runtime_put(&pdev->dev); in rz_dmac_remove()
1057 pm_runtime_disable(&pdev->dev); in rz_dmac_remove()
1059 platform_device_put(dmac->icu.pdev); in rz_dmac_remove()
1063 { .compatible = "renesas,r9a09g057-dmac", },
1064 { .compatible = "renesas,rz-dmac", },
1071 .name = "rz-dmac",
1080 MODULE_DESCRIPTION("Renesas RZ/G2L DMA Controller Driver");