Lines Matching +full:edma +full:- +full:err
1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (c) 2018-2019 Synopsys, Inc. and/or its affiliates.
4 * Synopsys DesignWare eDMA core driver
13 #include <linux/err.h>
16 #include <linux/dma/edma.h>
17 #include <linux/dma-mapping.h>
20 #include "dw-edma-core.h"
21 #include "dw-edma-v0-core.h"
22 #include "dw-hdma-v0-core.h"
24 #include "../virt-dma.h"
35 struct dw_edma_chip *chip = chan->dw->chip; in dw_edma_get_pci_address()
37 if (chip->ops->pci_address) in dw_edma_get_pci_address()
38 return chip->ops->pci_address(chip->dev, cpu_addr); in dw_edma_get_pci_address()
51 INIT_LIST_HEAD(&burst->list); in dw_edma_alloc_burst()
52 if (chunk->burst) { in dw_edma_alloc_burst()
54 chunk->bursts_alloc++; in dw_edma_alloc_burst()
55 list_add_tail(&burst->list, &chunk->burst->list); in dw_edma_alloc_burst()
58 chunk->bursts_alloc = 0; in dw_edma_alloc_burst()
59 chunk->burst = burst; in dw_edma_alloc_burst()
67 struct dw_edma_chip *chip = desc->chan->dw->chip; in dw_edma_alloc_chunk()
68 struct dw_edma_chan *chan = desc->chan; in dw_edma_alloc_chunk()
75 INIT_LIST_HEAD(&chunk->list); in dw_edma_alloc_chunk()
76 chunk->chan = chan; in dw_edma_alloc_chunk()
78 * inform the eDMA HW block that this is a new linked list ready in dw_edma_alloc_chunk()
80 * - Odd chunks originate CB equal to 0 in dw_edma_alloc_chunk()
81 * - Even chunks originate CB equal to 1 in dw_edma_alloc_chunk()
83 chunk->cb = !(desc->chunks_alloc % 2); in dw_edma_alloc_chunk()
84 if (chan->dir == EDMA_DIR_WRITE) { in dw_edma_alloc_chunk()
85 chunk->ll_region.paddr = chip->ll_region_wr[chan->id].paddr; in dw_edma_alloc_chunk()
86 chunk->ll_region.vaddr = chip->ll_region_wr[chan->id].vaddr; in dw_edma_alloc_chunk()
88 chunk->ll_region.paddr = chip->ll_region_rd[chan->id].paddr; in dw_edma_alloc_chunk()
89 chunk->ll_region.vaddr = chip->ll_region_rd[chan->id].vaddr; in dw_edma_alloc_chunk()
92 if (desc->chunk) { in dw_edma_alloc_chunk()
98 desc->chunks_alloc++; in dw_edma_alloc_chunk()
99 list_add_tail(&chunk->list, &desc->chunk->list); in dw_edma_alloc_chunk()
102 chunk->burst = NULL; in dw_edma_alloc_chunk()
103 desc->chunks_alloc = 0; in dw_edma_alloc_chunk()
104 desc->chunk = chunk; in dw_edma_alloc_chunk()
118 desc->chan = chan; in dw_edma_alloc_desc()
132 list_for_each_entry_safe(child, _next, &chunk->burst->list, list) { in dw_edma_free_burst()
133 list_del(&child->list); in dw_edma_free_burst()
135 chunk->bursts_alloc--; in dw_edma_free_burst()
140 chunk->burst = NULL; in dw_edma_free_burst()
147 if (!desc->chunk) in dw_edma_free_chunk()
151 list_for_each_entry_safe(child, _next, &desc->chunk->list, list) { in dw_edma_free_chunk()
153 list_del(&child->list); in dw_edma_free_chunk()
155 desc->chunks_alloc--; in dw_edma_free_chunk()
160 desc->chunk = NULL; in dw_edma_free_chunk()
176 struct dw_edma *dw = chan->dw; in dw_edma_start_transfer()
181 vd = vchan_next_desc(&chan->vc); in dw_edma_start_transfer()
189 child = list_first_entry_or_null(&desc->chunk->list, in dw_edma_start_transfer()
194 dw_edma_core_start(dw, child, !desc->xfer_sz); in dw_edma_start_transfer()
195 desc->xfer_sz += child->ll_region.sz; in dw_edma_start_transfer()
197 list_del(&child->list); in dw_edma_start_transfer()
199 desc->chunks_alloc--; in dw_edma_start_transfer()
209 if (chan->dw->chip->flags & DW_EDMA_CHIP_LOCAL) { in dw_edma_device_caps()
210 if (chan->dir == EDMA_DIR_READ) in dw_edma_device_caps()
211 caps->directions = BIT(DMA_DEV_TO_MEM); in dw_edma_device_caps()
213 caps->directions = BIT(DMA_MEM_TO_DEV); in dw_edma_device_caps()
215 if (chan->dir == EDMA_DIR_WRITE) in dw_edma_device_caps()
216 caps->directions = BIT(DMA_DEV_TO_MEM); in dw_edma_device_caps()
218 caps->directions = BIT(DMA_MEM_TO_DEV); in dw_edma_device_caps()
227 memcpy(&chan->config, config, sizeof(*config)); in dw_edma_device_config()
228 chan->configured = true; in dw_edma_device_config()
236 int err = 0; in dw_edma_device_pause() local
238 if (!chan->configured) in dw_edma_device_pause()
239 err = -EPERM; in dw_edma_device_pause()
240 else if (chan->status != EDMA_ST_BUSY) in dw_edma_device_pause()
241 err = -EPERM; in dw_edma_device_pause()
242 else if (chan->request != EDMA_REQ_NONE) in dw_edma_device_pause()
243 err = -EPERM; in dw_edma_device_pause()
245 chan->request = EDMA_REQ_PAUSE; in dw_edma_device_pause()
247 return err; in dw_edma_device_pause()
253 int err = 0; in dw_edma_device_resume() local
255 if (!chan->configured) { in dw_edma_device_resume()
256 err = -EPERM; in dw_edma_device_resume()
257 } else if (chan->status != EDMA_ST_PAUSE) { in dw_edma_device_resume()
258 err = -EPERM; in dw_edma_device_resume()
259 } else if (chan->request != EDMA_REQ_NONE) { in dw_edma_device_resume()
260 err = -EPERM; in dw_edma_device_resume()
262 chan->status = EDMA_ST_BUSY; in dw_edma_device_resume()
266 return err; in dw_edma_device_resume()
272 int err = 0; in dw_edma_device_terminate_all() local
274 if (!chan->configured) { in dw_edma_device_terminate_all()
276 } else if (chan->status == EDMA_ST_PAUSE) { in dw_edma_device_terminate_all()
277 chan->status = EDMA_ST_IDLE; in dw_edma_device_terminate_all()
278 chan->configured = false; in dw_edma_device_terminate_all()
279 } else if (chan->status == EDMA_ST_IDLE) { in dw_edma_device_terminate_all()
280 chan->configured = false; in dw_edma_device_terminate_all()
286 chan->status = EDMA_ST_IDLE; in dw_edma_device_terminate_all()
287 chan->configured = false; in dw_edma_device_terminate_all()
288 } else if (chan->request > EDMA_REQ_PAUSE) { in dw_edma_device_terminate_all()
289 err = -EPERM; in dw_edma_device_terminate_all()
291 chan->request = EDMA_REQ_STOP; in dw_edma_device_terminate_all()
294 return err; in dw_edma_device_terminate_all()
302 if (!chan->configured) in dw_edma_device_issue_pending()
305 spin_lock_irqsave(&chan->vc.lock, flags); in dw_edma_device_issue_pending()
306 if (vchan_issue_pending(&chan->vc) && chan->request == EDMA_REQ_NONE && in dw_edma_device_issue_pending()
307 chan->status == EDMA_ST_IDLE) { in dw_edma_device_issue_pending()
308 chan->status = EDMA_ST_BUSY; in dw_edma_device_issue_pending()
311 spin_unlock_irqrestore(&chan->vc.lock, flags); in dw_edma_device_issue_pending()
329 if (ret == DMA_IN_PROGRESS && chan->status == EDMA_ST_PAUSE) in dw_edma_device_tx_status()
335 spin_lock_irqsave(&chan->vc.lock, flags); in dw_edma_device_tx_status()
336 vd = vchan_find_desc(&chan->vc, cookie); in dw_edma_device_tx_status()
340 residue = desc->alloc_sz - desc->xfer_sz; in dw_edma_device_tx_status()
342 spin_unlock_irqrestore(&chan->vc.lock, flags); in dw_edma_device_tx_status()
353 struct dw_edma_chan *chan = dchan2dw_edma_chan(xfer->dchan); in dw_edma_device_transfer()
354 enum dma_transfer_direction dir = xfer->direction; in dw_edma_device_transfer()
364 if (!chan->configured) in dw_edma_device_transfer()
368 * Local Root Port/End-point Remote End-point in dw_edma_device_transfer()
369 * +-----------------------+ PCIe bus +----------------------+ in dw_edma_device_transfer()
370 * | | +-+ | | in dw_edma_device_transfer()
371 * | DEV_TO_MEM Rx Ch <----+ +---+ Tx Ch DEV_TO_MEM | in dw_edma_device_transfer()
373 * | MEM_TO_DEV Tx Ch +----+ +---> Rx Ch MEM_TO_DEV | in dw_edma_device_transfer()
374 * | | +-+ | | in dw_edma_device_transfer()
375 * +-----------------------+ +----------------------+ in dw_edma_device_transfer()
378 * If eDMA is embedded into the DW PCIe RP/EP and controlled from the in dw_edma_device_transfer()
381 * (EDMA_DIR_WRITE) - for the write operations (MEM_TO_DEV). in dw_edma_device_transfer()
384 * If eDMA is embedded into a Remote PCIe EP and is controlled by the in dw_edma_device_transfer()
387 * (DEV_TO_MEM) and the Rx channel (EDMA_DIR_READ) - for the write in dw_edma_device_transfer()
393 if (chan->dw->chip->flags & DW_EDMA_CHIP_LOCAL) { in dw_edma_device_transfer()
394 if ((chan->dir == EDMA_DIR_READ && dir != DMA_DEV_TO_MEM) || in dw_edma_device_transfer()
395 (chan->dir == EDMA_DIR_WRITE && dir != DMA_MEM_TO_DEV)) in dw_edma_device_transfer()
398 if ((chan->dir == EDMA_DIR_WRITE && dir != DMA_DEV_TO_MEM) || in dw_edma_device_transfer()
399 (chan->dir == EDMA_DIR_READ && dir != DMA_MEM_TO_DEV)) in dw_edma_device_transfer()
403 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
404 if (!xfer->xfer.cyclic.len || !xfer->xfer.cyclic.cnt) in dw_edma_device_transfer()
406 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
407 if (xfer->xfer.sg.len < 1) in dw_edma_device_transfer()
409 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
410 if (!xfer->xfer.il->numf || xfer->xfer.il->frame_size < 1) in dw_edma_device_transfer()
412 if (!xfer->xfer.il->src_inc || !xfer->xfer.il->dst_inc) in dw_edma_device_transfer()
426 if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
427 src_addr = xfer->xfer.il->src_start; in dw_edma_device_transfer()
428 dst_addr = xfer->xfer.il->dst_start; in dw_edma_device_transfer()
430 src_addr = chan->config.src_addr; in dw_edma_device_transfer()
431 dst_addr = chan->config.dst_addr; in dw_edma_device_transfer()
439 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
440 cnt = xfer->xfer.cyclic.cnt; in dw_edma_device_transfer()
441 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
442 cnt = xfer->xfer.sg.len; in dw_edma_device_transfer()
443 sg = xfer->xfer.sg.sgl; in dw_edma_device_transfer()
444 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
445 cnt = xfer->xfer.il->numf * xfer->xfer.il->frame_size; in dw_edma_device_transfer()
446 fsz = xfer->xfer.il->frame_size; in dw_edma_device_transfer()
450 if (xfer->type == EDMA_XFER_SCATTER_GATHER && !sg) in dw_edma_device_transfer()
453 if (chunk->bursts_alloc == chan->ll_max) { in dw_edma_device_transfer()
463 if (xfer->type == EDMA_XFER_CYCLIC) in dw_edma_device_transfer()
464 burst->sz = xfer->xfer.cyclic.len; in dw_edma_device_transfer()
465 else if (xfer->type == EDMA_XFER_SCATTER_GATHER) in dw_edma_device_transfer()
466 burst->sz = sg_dma_len(sg); in dw_edma_device_transfer()
467 else if (xfer->type == EDMA_XFER_INTERLEAVED) in dw_edma_device_transfer()
468 burst->sz = xfer->xfer.il->sgl[i % fsz].size; in dw_edma_device_transfer()
470 chunk->ll_region.sz += burst->sz; in dw_edma_device_transfer()
471 desc->alloc_sz += burst->sz; in dw_edma_device_transfer()
474 burst->sar = src_addr; in dw_edma_device_transfer()
475 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
476 burst->dar = xfer->xfer.cyclic.paddr; in dw_edma_device_transfer()
477 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
479 burst->dar = sg_dma_address(sg); in dw_edma_device_transfer()
487 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
488 burst->dar = dst_addr; in dw_edma_device_transfer()
491 burst->dar = dst_addr; in dw_edma_device_transfer()
492 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
493 burst->sar = xfer->xfer.cyclic.paddr; in dw_edma_device_transfer()
494 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
496 burst->sar = sg_dma_address(sg); in dw_edma_device_transfer()
504 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
505 burst->sar = src_addr; in dw_edma_device_transfer()
509 if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
511 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
512 struct dma_interleaved_template *il = xfer->xfer.il; in dw_edma_device_transfer()
513 struct data_chunk *dc = &il->sgl[i % fsz]; in dw_edma_device_transfer()
515 src_addr += burst->sz; in dw_edma_device_transfer()
516 if (il->src_sgl) in dw_edma_device_transfer()
519 dst_addr += burst->sz; in dw_edma_device_transfer()
520 if (il->dst_sgl) in dw_edma_device_transfer()
525 return vchan_tx_prep(&chan->vc, &desc->vd, xfer->flags); in dw_edma_device_transfer()
579 xfer.direction = ilt->dir; in dw_edma_device_prep_interleaved_dma()
593 spin_lock_irqsave(&chan->vc.lock, flags); in dw_edma_done_interrupt()
594 vd = vchan_next_desc(&chan->vc); in dw_edma_done_interrupt()
596 switch (chan->request) { in dw_edma_done_interrupt()
599 if (!desc->chunks_alloc) { in dw_edma_done_interrupt()
600 list_del(&vd->node); in dw_edma_done_interrupt()
606 chan->status = dw_edma_start_transfer(chan) ? EDMA_ST_BUSY : EDMA_ST_IDLE; in dw_edma_done_interrupt()
610 list_del(&vd->node); in dw_edma_done_interrupt()
612 chan->request = EDMA_REQ_NONE; in dw_edma_done_interrupt()
613 chan->status = EDMA_ST_IDLE; in dw_edma_done_interrupt()
617 chan->request = EDMA_REQ_NONE; in dw_edma_done_interrupt()
618 chan->status = EDMA_ST_PAUSE; in dw_edma_done_interrupt()
625 spin_unlock_irqrestore(&chan->vc.lock, flags); in dw_edma_done_interrupt()
633 spin_lock_irqsave(&chan->vc.lock, flags); in dw_edma_abort_interrupt()
634 vd = vchan_next_desc(&chan->vc); in dw_edma_abort_interrupt()
636 list_del(&vd->node); in dw_edma_abort_interrupt()
639 spin_unlock_irqrestore(&chan->vc.lock, flags); in dw_edma_abort_interrupt()
640 chan->request = EDMA_REQ_NONE; in dw_edma_abort_interrupt()
641 chan->status = EDMA_ST_IDLE; in dw_edma_abort_interrupt()
676 if (chan->status != EDMA_ST_IDLE) in dw_edma_alloc_chan_resources()
677 return -EBUSY; in dw_edma_alloc_chan_resources()
701 struct dw_edma_chip *chip = dw->chip; in dw_edma_channel_setup()
702 struct device *dev = chip->dev; in dw_edma_channel_setup()
709 ch_cnt = dw->wr_ch_cnt + dw->rd_ch_cnt; in dw_edma_channel_setup()
710 dma = &dw->dma; in dw_edma_channel_setup()
712 INIT_LIST_HEAD(&dma->channels); in dw_edma_channel_setup()
715 chan = &dw->chan[i]; in dw_edma_channel_setup()
717 chan->dw = dw; in dw_edma_channel_setup()
719 if (i < dw->wr_ch_cnt) { in dw_edma_channel_setup()
720 chan->id = i; in dw_edma_channel_setup()
721 chan->dir = EDMA_DIR_WRITE; in dw_edma_channel_setup()
723 chan->id = i - dw->wr_ch_cnt; in dw_edma_channel_setup()
724 chan->dir = EDMA_DIR_READ; in dw_edma_channel_setup()
727 chan->configured = false; in dw_edma_channel_setup()
728 chan->request = EDMA_REQ_NONE; in dw_edma_channel_setup()
729 chan->status = EDMA_ST_IDLE; in dw_edma_channel_setup()
731 if (chan->dir == EDMA_DIR_WRITE) in dw_edma_channel_setup()
732 chan->ll_max = (chip->ll_region_wr[chan->id].sz / EDMA_LL_SZ); in dw_edma_channel_setup()
734 chan->ll_max = (chip->ll_region_rd[chan->id].sz / EDMA_LL_SZ); in dw_edma_channel_setup()
735 chan->ll_max -= 1; in dw_edma_channel_setup()
738 str_write_read(chan->dir == EDMA_DIR_WRITE), in dw_edma_channel_setup()
739 chan->id, chan->ll_max); in dw_edma_channel_setup()
741 if (dw->nr_irqs == 1) in dw_edma_channel_setup()
743 else if (chan->dir == EDMA_DIR_WRITE) in dw_edma_channel_setup()
744 pos = chan->id % wr_alloc; in dw_edma_channel_setup()
746 pos = wr_alloc + chan->id % rd_alloc; in dw_edma_channel_setup()
748 irq = &dw->irq[pos]; in dw_edma_channel_setup()
750 if (chan->dir == EDMA_DIR_WRITE) in dw_edma_channel_setup()
751 irq->wr_mask |= BIT(chan->id); in dw_edma_channel_setup()
753 irq->rd_mask |= BIT(chan->id); in dw_edma_channel_setup()
755 irq->dw = dw; in dw_edma_channel_setup()
756 memcpy(&chan->msi, &irq->msi, sizeof(chan->msi)); in dw_edma_channel_setup()
759 str_write_read(chan->dir == EDMA_DIR_WRITE), in dw_edma_channel_setup()
760 chan->id, in dw_edma_channel_setup()
761 chan->msi.address_hi, chan->msi.address_lo, in dw_edma_channel_setup()
762 chan->msi.data); in dw_edma_channel_setup()
764 chan->vc.desc_free = vchan_free_desc; in dw_edma_channel_setup()
765 chan->vc.chan.private = chan->dir == EDMA_DIR_WRITE ? in dw_edma_channel_setup()
766 &dw->chip->dt_region_wr[chan->id] : in dw_edma_channel_setup()
767 &dw->chip->dt_region_rd[chan->id]; in dw_edma_channel_setup()
769 vchan_init(&chan->vc, dma); in dw_edma_channel_setup()
775 dma_cap_zero(dma->cap_mask); in dw_edma_channel_setup()
776 dma_cap_set(DMA_SLAVE, dma->cap_mask); in dw_edma_channel_setup()
777 dma_cap_set(DMA_CYCLIC, dma->cap_mask); in dw_edma_channel_setup()
778 dma_cap_set(DMA_PRIVATE, dma->cap_mask); in dw_edma_channel_setup()
779 dma_cap_set(DMA_INTERLEAVE, dma->cap_mask); in dw_edma_channel_setup()
780 dma->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in dw_edma_channel_setup()
781 dma->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in dw_edma_channel_setup()
782 dma->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in dw_edma_channel_setup()
783 dma->residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR; in dw_edma_channel_setup()
786 dma->dev = chip->dev; in dw_edma_channel_setup()
787 dma->device_alloc_chan_resources = dw_edma_alloc_chan_resources; in dw_edma_channel_setup()
788 dma->device_free_chan_resources = dw_edma_free_chan_resources; in dw_edma_channel_setup()
789 dma->device_caps = dw_edma_device_caps; in dw_edma_channel_setup()
790 dma->device_config = dw_edma_device_config; in dw_edma_channel_setup()
791 dma->device_pause = dw_edma_device_pause; in dw_edma_channel_setup()
792 dma->device_resume = dw_edma_device_resume; in dw_edma_channel_setup()
793 dma->device_terminate_all = dw_edma_device_terminate_all; in dw_edma_channel_setup()
794 dma->device_issue_pending = dw_edma_device_issue_pending; in dw_edma_channel_setup()
795 dma->device_tx_status = dw_edma_device_tx_status; in dw_edma_channel_setup()
796 dma->device_prep_slave_sg = dw_edma_device_prep_slave_sg; in dw_edma_channel_setup()
797 dma->device_prep_dma_cyclic = dw_edma_device_prep_dma_cyclic; in dw_edma_channel_setup()
798 dma->device_prep_interleaved_dma = dw_edma_device_prep_interleaved_dma; in dw_edma_channel_setup()
800 dma_set_max_seg_size(dma->dev, U32_MAX); in dw_edma_channel_setup()
810 (*nr_irqs)--; in dw_edma_dec_irq_alloc()
823 struct dw_edma_chip *chip = dw->chip; in dw_edma_irq_request()
824 struct device *dev = dw->chip->dev; in dw_edma_irq_request()
827 int i, err = 0; in dw_edma_irq_request() local
831 ch_cnt = dw->wr_ch_cnt + dw->rd_ch_cnt; in dw_edma_irq_request()
833 if (chip->nr_irqs < 1 || !chip->ops->irq_vector) in dw_edma_irq_request()
834 return -EINVAL; in dw_edma_irq_request()
836 dw->irq = devm_kcalloc(dev, chip->nr_irqs, sizeof(*dw->irq), GFP_KERNEL); in dw_edma_irq_request()
837 if (!dw->irq) in dw_edma_irq_request()
838 return -ENOMEM; in dw_edma_irq_request()
840 if (chip->nr_irqs == 1) { in dw_edma_irq_request()
842 irq = chip->ops->irq_vector(dev, 0); in dw_edma_irq_request()
843 err = request_irq(irq, dw_edma_interrupt_common, in dw_edma_irq_request()
844 IRQF_SHARED, dw->name, &dw->irq[0]); in dw_edma_irq_request()
845 if (err) { in dw_edma_irq_request()
846 dw->nr_irqs = 0; in dw_edma_irq_request()
847 return err; in dw_edma_irq_request()
851 get_cached_msi_msg(irq, &dw->irq[0].msi); in dw_edma_irq_request()
853 dw->nr_irqs = 1; in dw_edma_irq_request()
856 int tmp = chip->nr_irqs; in dw_edma_irq_request()
859 dw_edma_dec_irq_alloc(&tmp, wr_alloc, dw->wr_ch_cnt); in dw_edma_irq_request()
860 dw_edma_dec_irq_alloc(&tmp, rd_alloc, dw->rd_ch_cnt); in dw_edma_irq_request()
863 dw_edma_add_irq_mask(&wr_mask, *wr_alloc, dw->wr_ch_cnt); in dw_edma_irq_request()
864 dw_edma_add_irq_mask(&rd_mask, *rd_alloc, dw->rd_ch_cnt); in dw_edma_irq_request()
867 irq = chip->ops->irq_vector(dev, i); in dw_edma_irq_request()
868 err = request_irq(irq, in dw_edma_irq_request()
872 IRQF_SHARED, dw->name, in dw_edma_irq_request()
873 &dw->irq[i]); in dw_edma_irq_request()
874 if (err) in dw_edma_irq_request()
878 get_cached_msi_msg(irq, &dw->irq[i].msi); in dw_edma_irq_request()
881 dw->nr_irqs = i; in dw_edma_irq_request()
887 for (i--; i >= 0; i--) { in dw_edma_irq_request()
888 irq = chip->ops->irq_vector(dev, i); in dw_edma_irq_request()
889 free_irq(irq, &dw->irq[i]); in dw_edma_irq_request()
892 return err; in dw_edma_irq_request()
901 int i, err; in dw_edma_probe() local
904 return -EINVAL; in dw_edma_probe()
906 dev = chip->dev; in dw_edma_probe()
907 if (!dev || !chip->ops) in dw_edma_probe()
908 return -EINVAL; in dw_edma_probe()
912 return -ENOMEM; in dw_edma_probe()
914 dw->chip = chip; in dw_edma_probe()
916 if (dw->chip->mf == EDMA_MF_HDMA_NATIVE) in dw_edma_probe()
921 raw_spin_lock_init(&dw->lock); in dw_edma_probe()
923 dw->wr_ch_cnt = min_t(u16, chip->ll_wr_cnt, in dw_edma_probe()
925 dw->wr_ch_cnt = min_t(u16, dw->wr_ch_cnt, EDMA_MAX_WR_CH); in dw_edma_probe()
927 dw->rd_ch_cnt = min_t(u16, chip->ll_rd_cnt, in dw_edma_probe()
929 dw->rd_ch_cnt = min_t(u16, dw->rd_ch_cnt, EDMA_MAX_RD_CH); in dw_edma_probe()
931 if (!dw->wr_ch_cnt && !dw->rd_ch_cnt) in dw_edma_probe()
932 return -EINVAL; in dw_edma_probe()
935 dw->wr_ch_cnt, dw->rd_ch_cnt); in dw_edma_probe()
938 dw->chan = devm_kcalloc(dev, dw->wr_ch_cnt + dw->rd_ch_cnt, in dw_edma_probe()
939 sizeof(*dw->chan), GFP_KERNEL); in dw_edma_probe()
940 if (!dw->chan) in dw_edma_probe()
941 return -ENOMEM; in dw_edma_probe()
943 snprintf(dw->name, sizeof(dw->name), "dw-edma-core:%s", in dw_edma_probe()
944 dev_name(chip->dev)); in dw_edma_probe()
946 /* Disable eDMA, only to establish the ideal initial conditions */ in dw_edma_probe()
950 err = dw_edma_irq_request(dw, &wr_alloc, &rd_alloc); in dw_edma_probe()
951 if (err) in dw_edma_probe()
952 return err; in dw_edma_probe()
955 err = dw_edma_channel_setup(dw, wr_alloc, rd_alloc); in dw_edma_probe()
956 if (err) in dw_edma_probe()
962 chip->dw = dw; in dw_edma_probe()
967 for (i = (dw->nr_irqs - 1); i >= 0; i--) in dw_edma_probe()
968 free_irq(chip->ops->irq_vector(dev, i), &dw->irq[i]); in dw_edma_probe()
970 return err; in dw_edma_probe()
977 struct device *dev = chip->dev; in dw_edma_remove()
978 struct dw_edma *dw = chip->dw; in dw_edma_remove()
983 return -ENODEV; in dw_edma_remove()
985 /* Disable eDMA */ in dw_edma_remove()
989 for (i = (dw->nr_irqs - 1); i >= 0; i--) in dw_edma_remove()
990 free_irq(chip->ops->irq_vector(dev, i), &dw->irq[i]); in dw_edma_remove()
992 /* Deregister eDMA device */ in dw_edma_remove()
993 dma_async_device_unregister(&dw->dma); in dw_edma_remove()
994 list_for_each_entry_safe(chan, _chan, &dw->dma.channels, in dw_edma_remove()
996 tasklet_kill(&chan->vc.task); in dw_edma_remove()
997 list_del(&chan->vc.chan.device_node); in dw_edma_remove()
1005 MODULE_DESCRIPTION("Synopsys DesignWare eDMA controller core driver");