Lines Matching +full:jz4780 +full:- +full:dma
1 // SPDX-License-Identifier: GPL-2.0-only
6 * Pierre-Yves Mordret <pierre-yves.mordret@st.com>
10 * Inspired by stm32-dma.c and dma-jz4780.c
17 #include <linux/dma-mapping.h>
33 #include "../virt-dma.h"
201 bool m2m_hw; /* True when MDMA is triggered by STM32 DMA */
231 u32 request; /* STM32 DMA channel stream id, triggering MDMA */
232 u32 cmar; /* STM32 DMA interrupt flag clear register address */
233 u32 cmdr; /* STM32 DMA Transfer Complete flag */
265 return container_of(chan->vchan.chan.device, struct stm32_mdma_device, in stm32_mdma_get_dev()
281 return &chan->vchan.chan.dev->device; in chan2dev()
286 return mdma_dev->ddev.dev; in mdma2dev()
291 return readl_relaxed(dmadev->base + reg); in stm32_mdma_read()
296 writel_relaxed(val, dmadev->base + reg); in stm32_mdma_write()
302 void __iomem *addr = dmadev->base + reg; in stm32_mdma_set_bits()
310 void __iomem *addr = dmadev->base + reg; in stm32_mdma_clr_bits()
324 desc->count = count; in stm32_mdma_alloc_desc()
327 desc->node[i].hwdesc = in stm32_mdma_alloc_desc()
328 dma_pool_alloc(chan->desc_pool, GFP_NOWAIT, in stm32_mdma_alloc_desc()
329 &desc->node[i].hwdesc_phys); in stm32_mdma_alloc_desc()
330 if (!desc->node[i].hwdesc) in stm32_mdma_alloc_desc()
338 while (--i >= 0) in stm32_mdma_alloc_desc()
339 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, in stm32_mdma_alloc_desc()
340 desc->node[i].hwdesc_phys); in stm32_mdma_alloc_desc()
348 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan); in stm32_mdma_desc_free()
351 for (i = 0; i < desc->count; i++) in stm32_mdma_desc_free()
352 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, in stm32_mdma_desc_free()
353 desc->node[i].hwdesc_phys); in stm32_mdma_desc_free()
365 return ffs(width) - 1; in stm32_mdma_get_width()
367 dev_err(chan2dev(chan), "Dma bus width %i not supported\n", in stm32_mdma_get_width()
369 return -EINVAL; in stm32_mdma_get_width()
385 if ((((buf_len | addr) & (max_width - 1)) == 0) && in stm32_mdma_get_max_width()
410 id = chan->id; in stm32_mdma_disable_chan()
422 dmadev->base + STM32_MDMA_CISR(id), cisr, in stm32_mdma_disable_chan()
426 return -EBUSY; in stm32_mdma_disable_chan()
439 /* Disable DMA */ in stm32_mdma_stop()
445 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id)); in stm32_mdma_stop()
449 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status); in stm32_mdma_stop()
452 chan->busy = false; in stm32_mdma_stop()
464 for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) { in stm32_mdma_set_bus()
465 if (mask == dmadev->ahb_addr_masks[i]) { in stm32_mdma_set_bus()
479 struct stm32_mdma_chan_config *chan_config = &chan->chan_config; in stm32_mdma_set_xfer_param()
486 src_addr_width = chan->dma_config.src_addr_width; in stm32_mdma_set_xfer_param()
487 dst_addr_width = chan->dma_config.dst_addr_width; in stm32_mdma_set_xfer_param()
488 src_maxburst = chan->dma_config.src_maxburst; in stm32_mdma_set_xfer_param()
489 dst_maxburst = chan->dma_config.dst_maxburst; in stm32_mdma_set_xfer_param()
491 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & ~STM32_MDMA_CCR_EN; in stm32_mdma_set_xfer_param()
492 ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)); in stm32_mdma_set_xfer_param()
493 ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)); in stm32_mdma_set_xfer_param()
500 ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK; in stm32_mdma_set_xfer_param()
504 * the number of bytes - 1 in CTCR register in stm32_mdma_set_xfer_param()
508 ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1)); in stm32_mdma_set_xfer_param()
519 return -EINVAL; in stm32_mdma_set_xfer_param()
525 return -EINVAL; in stm32_mdma_set_xfer_param()
530 * - Clear SW request as in this case this is a HW one in stm32_mdma_set_xfer_param()
531 * - Clear WEX, HEX and BEX bits in stm32_mdma_set_xfer_param()
532 * - Set priority level in stm32_mdma_set_xfer_param()
536 ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level); in stm32_mdma_set_xfer_param()
540 ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request); in stm32_mdma_set_xfer_param()
544 dst_addr = chan->dma_config.dst_addr; in stm32_mdma_set_xfer_param()
547 if (chan_config->m2m_hw) in stm32_mdma_set_xfer_param()
555 if (chan_config->m2m_hw) { in stm32_mdma_set_xfer_param()
561 if (chan_config->m2m_hw) in stm32_mdma_set_xfer_param()
567 chan->mem_burst = dst_best_burst; in stm32_mdma_set_xfer_param()
573 chan->mem_width = src_addr_width; in stm32_mdma_set_xfer_param()
587 chan->mem_burst = src_best_burst; in stm32_mdma_set_xfer_param()
599 stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr); in stm32_mdma_set_xfer_param()
603 src_addr = chan->dma_config.src_addr; in stm32_mdma_set_xfer_param()
606 if (chan_config->m2m_hw) in stm32_mdma_set_xfer_param()
615 if (chan_config->m2m_hw) { in stm32_mdma_set_xfer_param()
621 if (chan_config->m2m_hw) in stm32_mdma_set_xfer_param()
632 chan->mem_width = dst_addr_width; in stm32_mdma_set_xfer_param()
657 stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr); in stm32_mdma_set_xfer_param()
661 dev_err(chan2dev(chan), "Dma direction is not supported\n"); in stm32_mdma_set_xfer_param()
662 return -EINVAL; in stm32_mdma_set_xfer_param()
675 dev_dbg(chan2dev(chan), "hwdesc: %pad\n", &node->hwdesc_phys); in stm32_mdma_dump_hwdesc()
676 dev_dbg(chan2dev(chan), "CTCR: 0x%08x\n", node->hwdesc->ctcr); in stm32_mdma_dump_hwdesc()
677 dev_dbg(chan2dev(chan), "CBNDTR: 0x%08x\n", node->hwdesc->cbndtr); in stm32_mdma_dump_hwdesc()
678 dev_dbg(chan2dev(chan), "CSAR: 0x%08x\n", node->hwdesc->csar); in stm32_mdma_dump_hwdesc()
679 dev_dbg(chan2dev(chan), "CDAR: 0x%08x\n", node->hwdesc->cdar); in stm32_mdma_dump_hwdesc()
680 dev_dbg(chan2dev(chan), "CBRUR: 0x%08x\n", node->hwdesc->cbrur); in stm32_mdma_dump_hwdesc()
681 dev_dbg(chan2dev(chan), "CLAR: 0x%08x\n", node->hwdesc->clar); in stm32_mdma_dump_hwdesc()
682 dev_dbg(chan2dev(chan), "CTBR: 0x%08x\n", node->hwdesc->ctbr); in stm32_mdma_dump_hwdesc()
683 dev_dbg(chan2dev(chan), "CMAR: 0x%08x\n", node->hwdesc->cmar); in stm32_mdma_dump_hwdesc()
684 dev_dbg(chan2dev(chan), "CMDR: 0x%08x\n\n", node->hwdesc->cmdr); in stm32_mdma_dump_hwdesc()
694 struct stm32_mdma_chan_config *config = &chan->chan_config; in stm32_mdma_setup_hwdesc()
698 hwdesc = desc->node[count].hwdesc; in stm32_mdma_setup_hwdesc()
699 hwdesc->ctcr = ctcr; in stm32_mdma_setup_hwdesc()
700 hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | in stm32_mdma_setup_hwdesc()
704 hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len); in stm32_mdma_setup_hwdesc()
705 hwdesc->csar = src_addr; in stm32_mdma_setup_hwdesc()
706 hwdesc->cdar = dst_addr; in stm32_mdma_setup_hwdesc()
707 hwdesc->cbrur = 0; in stm32_mdma_setup_hwdesc()
708 hwdesc->ctbr = ctbr; in stm32_mdma_setup_hwdesc()
709 hwdesc->cmar = config->mask_addr; in stm32_mdma_setup_hwdesc()
710 hwdesc->cmdr = config->mask_data; in stm32_mdma_setup_hwdesc()
714 hwdesc->clar = desc->node[0].hwdesc_phys; in stm32_mdma_setup_hwdesc()
716 hwdesc->clar = 0; in stm32_mdma_setup_hwdesc()
718 hwdesc->clar = desc->node[next].hwdesc_phys; in stm32_mdma_setup_hwdesc()
721 stm32_mdma_dump_hwdesc(chan, &desc->node[count]); in stm32_mdma_setup_hwdesc()
730 struct dma_slave_config *dma_config = &chan->dma_config; in stm32_mdma_setup_xfer()
731 struct stm32_mdma_chan_config *chan_config = &chan->chan_config; in stm32_mdma_setup_xfer()
737 if (chan_config->m2m_hw) in stm32_mdma_setup_xfer()
743 return -EINVAL; in stm32_mdma_setup_xfer()
748 dst_addr = dma_config->dst_addr; in stm32_mdma_setup_xfer()
749 if (chan_config->m2m_hw && (i & 1)) in stm32_mdma_setup_xfer()
757 src_addr = dma_config->src_addr; in stm32_mdma_setup_xfer()
758 if (chan_config->m2m_hw && (i & 1)) in stm32_mdma_setup_xfer()
773 i == sg_len - 1, i == 0, false); in stm32_mdma_setup_xfer()
779 desc->ccr = ccr; in stm32_mdma_setup_xfer()
790 struct stm32_mdma_chan_config *chan_config = &chan->chan_config; in stm32_mdma_prep_slave_sg()
795 * Once DMA is in setup cyclic mode the channel we cannot assign this in stm32_mdma_prep_slave_sg()
796 * channel anymore. The DMA channel needs to be aborted or terminated in stm32_mdma_prep_slave_sg()
799 if (chan->desc && chan->desc->cyclic) { in stm32_mdma_prep_slave_sg()
801 "Request not allowed when dma in cyclic mode\n"); in stm32_mdma_prep_slave_sg()
814 * In case of M2M HW transfer triggered by STM32 DMA, we do not have to clear the in stm32_mdma_prep_slave_sg()
815 * transfer complete flag by hardware in order to let the CPU rearm the STM32 DMA in stm32_mdma_prep_slave_sg()
818 if (chan_config->m2m_hw && direction == DMA_MEM_TO_DEV) { in stm32_mdma_prep_slave_sg()
822 hwdesc = desc->node[i].hwdesc; in stm32_mdma_prep_slave_sg()
823 hwdesc->cmar = 0; in stm32_mdma_prep_slave_sg()
824 hwdesc->cmdr = 0; in stm32_mdma_prep_slave_sg()
828 desc->cyclic = false; in stm32_mdma_prep_slave_sg()
830 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_slave_sg()
833 for (i = 0; i < desc->count; i++) in stm32_mdma_prep_slave_sg()
834 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, in stm32_mdma_prep_slave_sg()
835 desc->node[i].hwdesc_phys); in stm32_mdma_prep_slave_sg()
848 struct dma_slave_config *dma_config = &chan->dma_config; in stm32_mdma_prep_dma_cyclic()
849 struct stm32_mdma_chan_config *chan_config = &chan->chan_config; in stm32_mdma_prep_dma_cyclic()
856 * Once DMA is in setup cyclic mode the channel we cannot assign this in stm32_mdma_prep_dma_cyclic()
857 * channel anymore. The DMA channel needs to be aborted or terminated in stm32_mdma_prep_dma_cyclic()
860 if (chan->desc && chan->desc->cyclic) { in stm32_mdma_prep_dma_cyclic()
862 "Request not allowed when dma in cyclic mode\n"); in stm32_mdma_prep_dma_cyclic()
903 desc->ccr = ccr; in stm32_mdma_prep_dma_cyclic()
909 dst_addr = dma_config->dst_addr; in stm32_mdma_prep_dma_cyclic()
910 if (chan_config->m2m_hw && (i & 1)) in stm32_mdma_prep_dma_cyclic()
913 src_addr = dma_config->src_addr; in stm32_mdma_prep_dma_cyclic()
914 if (chan_config->m2m_hw && (i & 1)) in stm32_mdma_prep_dma_cyclic()
921 i == count - 1, i == 0, true); in stm32_mdma_prep_dma_cyclic()
924 desc->cyclic = true; in stm32_mdma_prep_dma_cyclic()
926 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_cyclic()
929 for (i = 0; i < desc->count; i++) in stm32_mdma_prep_dma_cyclic()
930 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, in stm32_mdma_prep_dma_cyclic()
931 desc->node[i].hwdesc_phys); in stm32_mdma_prep_dma_cyclic()
952 * Once DMA is in setup cyclic mode the channel we cannot assign this in stm32_mdma_prep_dma_memcpy()
953 * channel anymore. The DMA channel needs to be aborted or terminated in stm32_mdma_prep_dma_memcpy()
956 if (chan->desc && chan->desc->cyclic) { in stm32_mdma_prep_dma_memcpy()
958 "Request not allowed when dma in cyclic mode\n"); in stm32_mdma_prep_dma_memcpy()
967 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & ~STM32_MDMA_CCR_EN; in stm32_mdma_prep_dma_memcpy()
968 ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)); in stm32_mdma_prep_dma_memcpy()
969 ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)); in stm32_mdma_prep_dma_memcpy()
970 cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)); in stm32_mdma_prep_dma_memcpy()
1013 ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1)); in stm32_mdma_prep_dma_memcpy()
1045 hwdesc = desc->node[0].hwdesc; in stm32_mdma_prep_dma_memcpy()
1046 hwdesc->ctcr = ctcr; in stm32_mdma_prep_dma_memcpy()
1047 hwdesc->cbndtr = cbndtr; in stm32_mdma_prep_dma_memcpy()
1048 hwdesc->csar = src; in stm32_mdma_prep_dma_memcpy()
1049 hwdesc->cdar = dest; in stm32_mdma_prep_dma_memcpy()
1050 hwdesc->cbrur = 0; in stm32_mdma_prep_dma_memcpy()
1051 hwdesc->clar = 0; in stm32_mdma_prep_dma_memcpy()
1052 hwdesc->ctbr = ctbr; in stm32_mdma_prep_dma_memcpy()
1053 hwdesc->cmar = 0; in stm32_mdma_prep_dma_memcpy()
1054 hwdesc->cmdr = 0; in stm32_mdma_prep_dma_memcpy()
1056 stm32_mdma_dump_hwdesc(chan, &desc->node[0]); in stm32_mdma_prep_dma_memcpy()
1060 STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1)); in stm32_mdma_prep_dma_memcpy()
1066 xfer_count = min_t(size_t, len - offset, in stm32_mdma_prep_dma_memcpy()
1104 i == count - 1, i == 0, false); in stm32_mdma_prep_dma_memcpy()
1108 desc->ccr = ccr; in stm32_mdma_prep_dma_memcpy()
1110 desc->cyclic = false; in stm32_mdma_prep_dma_memcpy()
1112 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_memcpy()
1120 stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id))); in stm32_mdma_dump_reg()
1122 stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id))); in stm32_mdma_dump_reg()
1124 stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id))); in stm32_mdma_dump_reg()
1126 stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id))); in stm32_mdma_dump_reg()
1128 stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id))); in stm32_mdma_dump_reg()
1130 stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id))); in stm32_mdma_dump_reg()
1132 stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id))); in stm32_mdma_dump_reg()
1134 stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id))); in stm32_mdma_dump_reg()
1136 stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id))); in stm32_mdma_dump_reg()
1138 stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id))); in stm32_mdma_dump_reg()
1146 u32 id = chan->id; in stm32_mdma_start_transfer()
1149 vdesc = vchan_next_desc(&chan->vchan); in stm32_mdma_start_transfer()
1151 chan->desc = NULL; in stm32_mdma_start_transfer()
1155 list_del(&vdesc->node); in stm32_mdma_start_transfer()
1157 chan->desc = to_stm32_mdma_desc(vdesc); in stm32_mdma_start_transfer()
1158 hwdesc = chan->desc->node[0].hwdesc; in stm32_mdma_start_transfer()
1159 chan->curr_hwdesc = 0; in stm32_mdma_start_transfer()
1161 stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr); in stm32_mdma_start_transfer()
1162 stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr); in stm32_mdma_start_transfer()
1163 stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr); in stm32_mdma_start_transfer()
1164 stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar); in stm32_mdma_start_transfer()
1165 stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar); in stm32_mdma_start_transfer()
1166 stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur); in stm32_mdma_start_transfer()
1167 stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar); in stm32_mdma_start_transfer()
1168 stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr); in stm32_mdma_start_transfer()
1169 stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar); in stm32_mdma_start_transfer()
1170 stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr); in stm32_mdma_start_transfer()
1179 /* Start DMA */ in stm32_mdma_start_transfer()
1183 if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) { in stm32_mdma_start_transfer()
1188 chan->busy = true; in stm32_mdma_start_transfer()
1190 dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan); in stm32_mdma_start_transfer()
1198 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_issue_pending()
1200 if (!vchan_issue_pending(&chan->vchan)) in stm32_mdma_issue_pending()
1203 dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan); in stm32_mdma_issue_pending()
1205 if (!chan->desc && !chan->busy) in stm32_mdma_issue_pending()
1209 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_mdma_issue_pending()
1218 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_pause()
1220 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_mdma_pause()
1223 dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan); in stm32_mdma_pause()
1237 if (!chan->desc || (stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & STM32_MDMA_CCR_EN)) in stm32_mdma_resume()
1238 return -EPERM; in stm32_mdma_resume()
1240 hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc; in stm32_mdma_resume()
1242 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_resume()
1244 /* Re-configure control register */ in stm32_mdma_resume()
1245 stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr); in stm32_mdma_resume()
1248 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id)); in stm32_mdma_resume()
1250 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status); in stm32_mdma_resume()
1254 /* Re-start DMA */ in stm32_mdma_resume()
1255 reg = STM32_MDMA_CCR(chan->id); in stm32_mdma_resume()
1259 if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) in stm32_mdma_resume()
1262 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_mdma_resume()
1264 dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan); in stm32_mdma_resume()
1275 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_terminate_all()
1276 if (chan->desc) { in stm32_mdma_terminate_all()
1277 vchan_terminate_vdesc(&chan->desc->vdesc); in stm32_mdma_terminate_all()
1278 if (chan->busy) in stm32_mdma_terminate_all()
1280 chan->desc = NULL; in stm32_mdma_terminate_all()
1282 vchan_get_all_descriptors(&chan->vchan, &head); in stm32_mdma_terminate_all()
1283 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_mdma_terminate_all()
1285 vchan_dma_desc_free_list(&chan->vchan, &head); in stm32_mdma_terminate_all()
1294 vchan_synchronize(&chan->vchan); in stm32_mdma_synchronize()
1302 memcpy(&chan->dma_config, config, sizeof(*config)); in stm32_mdma_slave_config()
1304 /* Check if user is requesting STM32 DMA to trigger MDMA */ in stm32_mdma_slave_config()
1305 if (config->peripheral_size) { in stm32_mdma_slave_config()
1308 mdma_config = (struct stm32_mdma_dma_config *)chan->dma_config.peripheral_config; in stm32_mdma_slave_config()
1309 chan->chan_config.request = mdma_config->request; in stm32_mdma_slave_config()
1310 chan->chan_config.mask_addr = mdma_config->cmar; in stm32_mdma_slave_config()
1311 chan->chan_config.mask_data = mdma_config->cmdr; in stm32_mdma_slave_config()
1312 chan->chan_config.m2m_hw = true; in stm32_mdma_slave_config()
1328 cisr = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id)); in stm32_mdma_desc_residue()
1332 clar = stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)); in stm32_mdma_desc_residue()
1333 for (i = desc->count - 1; i >= 0; i--) { in stm32_mdma_desc_residue()
1334 hwdesc = desc->node[i].hwdesc; in stm32_mdma_desc_residue()
1336 if (hwdesc->clar == clar) in stm32_mdma_desc_residue()
1340 residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr); in stm32_mdma_desc_residue()
1342 cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)); in stm32_mdma_desc_residue()
1345 state->in_flight_bytes = 0; in stm32_mdma_desc_residue()
1346 if (chan->chan_config.m2m_hw && (cisr & STM32_MDMA_CISR_CRQA)) in stm32_mdma_desc_residue()
1347 state->in_flight_bytes = cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK; in stm32_mdma_desc_residue()
1349 if (!chan->mem_burst) in stm32_mdma_desc_residue()
1352 burst_size = chan->mem_burst * chan->mem_width; in stm32_mdma_desc_residue()
1355 residue = residue - modulo + burst_size; in stm32_mdma_desc_residue()
1374 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_tx_status()
1376 vdesc = vchan_find_desc(&chan->vchan, cookie); in stm32_mdma_tx_status()
1377 if (chan->desc && cookie == chan->desc->vdesc.tx.cookie) in stm32_mdma_tx_status()
1378 residue = stm32_mdma_desc_residue(chan, chan->desc, chan->curr_hwdesc, state); in stm32_mdma_tx_status()
1384 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_mdma_tx_status()
1391 vchan_cookie_complete(&chan->desc->vdesc); in stm32_mdma_xfer_end()
1392 chan->desc = NULL; in stm32_mdma_xfer_end()
1393 chan->busy = false; in stm32_mdma_xfer_end()
1406 status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0); in stm32_mdma_irq_handler()
1412 chan = &dmadev->chan[id]; in stm32_mdma_irq_handler()
1415 spin_lock(&chan->vchan.lock); in stm32_mdma_irq_handler()
1423 spin_unlock(&chan->vchan.lock); in stm32_mdma_irq_handler()
1424 if (chan->busy) in stm32_mdma_irq_handler()
1437 readl_relaxed(dmadev->base + STM32_MDMA_CESR(id))); in stm32_mdma_irq_handler()
1456 chan->curr_hwdesc++; in stm32_mdma_irq_handler()
1457 if (chan->desc && chan->desc->cyclic) { in stm32_mdma_irq_handler()
1458 if (chan->curr_hwdesc == chan->desc->count) in stm32_mdma_irq_handler()
1459 chan->curr_hwdesc = 0; in stm32_mdma_irq_handler()
1460 vchan_cyclic_callback(&chan->desc->vdesc); in stm32_mdma_irq_handler()
1471 dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status); in stm32_mdma_irq_handler()
1476 spin_unlock(&chan->vchan.lock); in stm32_mdma_irq_handler()
1487 chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device), in stm32_mdma_alloc_chan_resources()
1488 c->device->dev, in stm32_mdma_alloc_chan_resources()
1492 if (!chan->desc_pool) { in stm32_mdma_alloc_chan_resources()
1494 return -ENOMEM; in stm32_mdma_alloc_chan_resources()
1497 ret = pm_runtime_resume_and_get(dmadev->ddev.dev); in stm32_mdma_alloc_chan_resources()
1503 pm_runtime_put(dmadev->ddev.dev); in stm32_mdma_alloc_chan_resources()
1514 dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id); in stm32_mdma_free_chan_resources()
1516 if (chan->busy) { in stm32_mdma_free_chan_resources()
1517 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_free_chan_resources()
1519 chan->desc = NULL; in stm32_mdma_free_chan_resources()
1520 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_mdma_free_chan_resources()
1523 pm_runtime_put(dmadev->ddev.dev); in stm32_mdma_free_chan_resources()
1525 dmam_pool_destroy(chan->desc_pool); in stm32_mdma_free_chan_resources()
1526 chan->desc_pool = NULL; in stm32_mdma_free_chan_resources()
1535 if (dmadev->chan_reserved & BIT(chan->id)) in stm32_mdma_filter_fn()
1544 struct stm32_mdma_device *dmadev = ofdma->of_dma_data; in stm32_mdma_of_xlate()
1545 dma_cap_mask_t mask = dmadev->ddev.cap_mask; in stm32_mdma_of_xlate()
1550 if (dma_spec->args_count < 5) { in stm32_mdma_of_xlate()
1556 config.request = dma_spec->args[0]; in stm32_mdma_of_xlate()
1557 config.priority_level = dma_spec->args[1]; in stm32_mdma_of_xlate()
1558 config.transfer_config = dma_spec->args[2]; in stm32_mdma_of_xlate()
1559 config.mask_addr = dma_spec->args[3]; in stm32_mdma_of_xlate()
1560 config.mask_data = dma_spec->args[4]; in stm32_mdma_of_xlate()
1562 if (config.request >= dmadev->nr_requests) { in stm32_mdma_of_xlate()
1572 c = __dma_request_channel(&mask, stm32_mdma_filter_fn, &config, ofdma->of_node); in stm32_mdma_of_xlate()
1579 chan->chan_config = config; in stm32_mdma_of_xlate()
1585 { .compatible = "st,stm32h7-mdma", },
1600 of_node = pdev->dev.of_node; in stm32_mdma_probe()
1602 return -ENODEV; in stm32_mdma_probe()
1604 ret = device_property_read_u32(&pdev->dev, "dma-channels", in stm32_mdma_probe()
1608 dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n", in stm32_mdma_probe()
1612 ret = device_property_read_u32(&pdev->dev, "dma-requests", in stm32_mdma_probe()
1616 dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n", in stm32_mdma_probe()
1620 count = device_property_count_u32(&pdev->dev, "st,ahb-addr-masks"); in stm32_mdma_probe()
1624 dmadev = devm_kzalloc(&pdev->dev, in stm32_mdma_probe()
1628 return -ENOMEM; in stm32_mdma_probe()
1629 dmadev->nr_ahb_addr_masks = count; in stm32_mdma_probe()
1631 dmadev->nr_channels = nr_channels; in stm32_mdma_probe()
1632 dmadev->nr_requests = nr_requests; in stm32_mdma_probe()
1633 device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks", in stm32_mdma_probe()
1634 dmadev->ahb_addr_masks, in stm32_mdma_probe()
1637 dmadev->base = devm_platform_ioremap_resource(pdev, 0); in stm32_mdma_probe()
1638 if (IS_ERR(dmadev->base)) in stm32_mdma_probe()
1639 return PTR_ERR(dmadev->base); in stm32_mdma_probe()
1641 dmadev->clk = devm_clk_get(&pdev->dev, NULL); in stm32_mdma_probe()
1642 if (IS_ERR(dmadev->clk)) in stm32_mdma_probe()
1643 return dev_err_probe(&pdev->dev, PTR_ERR(dmadev->clk), in stm32_mdma_probe()
1646 ret = clk_prepare_enable(dmadev->clk); in stm32_mdma_probe()
1648 dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret); in stm32_mdma_probe()
1652 rst = devm_reset_control_get(&pdev->dev, NULL); in stm32_mdma_probe()
1655 if (ret == -EPROBE_DEFER) in stm32_mdma_probe()
1663 dd = &dmadev->ddev; in stm32_mdma_probe()
1664 dma_cap_set(DMA_SLAVE, dd->cap_mask); in stm32_mdma_probe()
1665 dma_cap_set(DMA_PRIVATE, dd->cap_mask); in stm32_mdma_probe()
1666 dma_cap_set(DMA_CYCLIC, dd->cap_mask); in stm32_mdma_probe()
1667 dma_cap_set(DMA_MEMCPY, dd->cap_mask); in stm32_mdma_probe()
1668 dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources; in stm32_mdma_probe()
1669 dd->device_free_chan_resources = stm32_mdma_free_chan_resources; in stm32_mdma_probe()
1670 dd->device_tx_status = stm32_mdma_tx_status; in stm32_mdma_probe()
1671 dd->device_issue_pending = stm32_mdma_issue_pending; in stm32_mdma_probe()
1672 dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg; in stm32_mdma_probe()
1673 dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic; in stm32_mdma_probe()
1674 dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy; in stm32_mdma_probe()
1675 dd->device_config = stm32_mdma_slave_config; in stm32_mdma_probe()
1676 dd->device_pause = stm32_mdma_pause; in stm32_mdma_probe()
1677 dd->device_resume = stm32_mdma_resume; in stm32_mdma_probe()
1678 dd->device_terminate_all = stm32_mdma_terminate_all; in stm32_mdma_probe()
1679 dd->device_synchronize = stm32_mdma_synchronize; in stm32_mdma_probe()
1680 dd->descriptor_reuse = true; in stm32_mdma_probe()
1682 dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | in stm32_mdma_probe()
1686 dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | in stm32_mdma_probe()
1690 dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) | in stm32_mdma_probe()
1692 dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in stm32_mdma_probe()
1693 dd->max_burst = STM32_MDMA_MAX_BURST; in stm32_mdma_probe()
1694 dd->dev = &pdev->dev; in stm32_mdma_probe()
1695 INIT_LIST_HEAD(&dd->channels); in stm32_mdma_probe()
1697 for (i = 0; i < dmadev->nr_channels; i++) { in stm32_mdma_probe()
1698 chan = &dmadev->chan[i]; in stm32_mdma_probe()
1699 chan->id = i; in stm32_mdma_probe()
1702 dmadev->chan_reserved |= BIT(i); in stm32_mdma_probe()
1704 chan->vchan.desc_free = stm32_mdma_desc_free; in stm32_mdma_probe()
1705 vchan_init(&chan->vchan, dd); in stm32_mdma_probe()
1708 dmadev->irq = platform_get_irq(pdev, 0); in stm32_mdma_probe()
1709 if (dmadev->irq < 0) { in stm32_mdma_probe()
1710 ret = dmadev->irq; in stm32_mdma_probe()
1714 ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler, in stm32_mdma_probe()
1715 0, dev_name(&pdev->dev), dmadev); in stm32_mdma_probe()
1717 dev_err(&pdev->dev, "failed to request IRQ\n"); in stm32_mdma_probe()
1727 dev_err(&pdev->dev, in stm32_mdma_probe()
1728 "STM32 MDMA DMA OF registration failed %d\n", ret); in stm32_mdma_probe()
1733 pm_runtime_set_active(&pdev->dev); in stm32_mdma_probe()
1734 pm_runtime_enable(&pdev->dev); in stm32_mdma_probe()
1735 pm_runtime_get_noresume(&pdev->dev); in stm32_mdma_probe()
1736 pm_runtime_put(&pdev->dev); in stm32_mdma_probe()
1738 dev_info(&pdev->dev, "STM32 MDMA driver registered\n"); in stm32_mdma_probe()
1743 clk_disable_unprepare(dmadev->clk); in stm32_mdma_probe()
1753 clk_disable_unprepare(dmadev->clk); in stm32_mdma_runtime_suspend()
1763 ret = clk_prepare_enable(dmadev->clk); in stm32_mdma_runtime_resume()
1784 for (id = 0; id < dmadev->nr_channels; id++) { in stm32_mdma_pm_suspend()
1788 return -EBUSY; in stm32_mdma_pm_suspend()
1814 .name = "stm32-mdma",
1829 MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");