Lines Matching +full:burst +full:- +full:length
1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (c) 2018-2019 Synopsys, Inc. and/or its affiliates.
17 #include <linux/dma-mapping.h>
19 #include "dw-edma-core.h"
20 #include "dw-edma-v0-core.h"
21 #include "dw-hdma-v0-core.h"
23 #include "../virt-dma.h"
28 return &dchan->dev->device; in dchan2dev()
34 return &chan->vc.chan.dev->device; in chan2dev()
46 struct dw_edma_chip *chip = chan->dw->chip; in dw_edma_get_pci_address()
48 if (chip->ops->pci_address) in dw_edma_get_pci_address()
49 return chip->ops->pci_address(chip->dev, cpu_addr); in dw_edma_get_pci_address()
56 struct dw_edma_burst *burst; in dw_edma_alloc_burst() local
58 burst = kzalloc(sizeof(*burst), GFP_NOWAIT); in dw_edma_alloc_burst()
59 if (unlikely(!burst)) in dw_edma_alloc_burst()
62 INIT_LIST_HEAD(&burst->list); in dw_edma_alloc_burst()
63 if (chunk->burst) { in dw_edma_alloc_burst()
65 chunk->bursts_alloc++; in dw_edma_alloc_burst()
66 list_add_tail(&burst->list, &chunk->burst->list); in dw_edma_alloc_burst()
69 chunk->bursts_alloc = 0; in dw_edma_alloc_burst()
70 chunk->burst = burst; in dw_edma_alloc_burst()
73 return burst; in dw_edma_alloc_burst()
78 struct dw_edma_chip *chip = desc->chan->dw->chip; in dw_edma_alloc_chunk()
79 struct dw_edma_chan *chan = desc->chan; in dw_edma_alloc_chunk()
86 INIT_LIST_HEAD(&chunk->list); in dw_edma_alloc_chunk()
87 chunk->chan = chan; in dw_edma_alloc_chunk()
91 * - Odd chunks originate CB equal to 0 in dw_edma_alloc_chunk()
92 * - Even chunks originate CB equal to 1 in dw_edma_alloc_chunk()
94 chunk->cb = !(desc->chunks_alloc % 2); in dw_edma_alloc_chunk()
95 if (chan->dir == EDMA_DIR_WRITE) { in dw_edma_alloc_chunk()
96 chunk->ll_region.paddr = chip->ll_region_wr[chan->id].paddr; in dw_edma_alloc_chunk()
97 chunk->ll_region.vaddr = chip->ll_region_wr[chan->id].vaddr; in dw_edma_alloc_chunk()
99 chunk->ll_region.paddr = chip->ll_region_rd[chan->id].paddr; in dw_edma_alloc_chunk()
100 chunk->ll_region.vaddr = chip->ll_region_rd[chan->id].vaddr; in dw_edma_alloc_chunk()
103 if (desc->chunk) { in dw_edma_alloc_chunk()
109 desc->chunks_alloc++; in dw_edma_alloc_chunk()
110 list_add_tail(&chunk->list, &desc->chunk->list); in dw_edma_alloc_chunk()
113 chunk->burst = NULL; in dw_edma_alloc_chunk()
114 desc->chunks_alloc = 0; in dw_edma_alloc_chunk()
115 desc->chunk = chunk; in dw_edma_alloc_chunk()
129 desc->chan = chan; in dw_edma_alloc_desc()
143 list_for_each_entry_safe(child, _next, &chunk->burst->list, list) { in dw_edma_free_burst()
144 list_del(&child->list); in dw_edma_free_burst()
146 chunk->bursts_alloc--; in dw_edma_free_burst()
151 chunk->burst = NULL; in dw_edma_free_burst()
158 if (!desc->chunk) in dw_edma_free_chunk()
162 list_for_each_entry_safe(child, _next, &desc->chunk->list, list) { in dw_edma_free_chunk()
164 list_del(&child->list); in dw_edma_free_chunk()
166 desc->chunks_alloc--; in dw_edma_free_chunk()
171 desc->chunk = NULL; in dw_edma_free_chunk()
187 struct dw_edma *dw = chan->dw; in dw_edma_start_transfer()
192 vd = vchan_next_desc(&chan->vc); in dw_edma_start_transfer()
200 child = list_first_entry_or_null(&desc->chunk->list, in dw_edma_start_transfer()
205 dw_edma_core_start(dw, child, !desc->xfer_sz); in dw_edma_start_transfer()
206 desc->xfer_sz += child->ll_region.sz; in dw_edma_start_transfer()
208 list_del(&child->list); in dw_edma_start_transfer()
210 desc->chunks_alloc--; in dw_edma_start_transfer()
220 if (chan->dw->chip->flags & DW_EDMA_CHIP_LOCAL) { in dw_edma_device_caps()
221 if (chan->dir == EDMA_DIR_READ) in dw_edma_device_caps()
222 caps->directions = BIT(DMA_DEV_TO_MEM); in dw_edma_device_caps()
224 caps->directions = BIT(DMA_MEM_TO_DEV); in dw_edma_device_caps()
226 if (chan->dir == EDMA_DIR_WRITE) in dw_edma_device_caps()
227 caps->directions = BIT(DMA_DEV_TO_MEM); in dw_edma_device_caps()
229 caps->directions = BIT(DMA_MEM_TO_DEV); in dw_edma_device_caps()
238 memcpy(&chan->config, config, sizeof(*config)); in dw_edma_device_config()
239 chan->configured = true; in dw_edma_device_config()
249 if (!chan->configured) in dw_edma_device_pause()
250 err = -EPERM; in dw_edma_device_pause()
251 else if (chan->status != EDMA_ST_BUSY) in dw_edma_device_pause()
252 err = -EPERM; in dw_edma_device_pause()
253 else if (chan->request != EDMA_REQ_NONE) in dw_edma_device_pause()
254 err = -EPERM; in dw_edma_device_pause()
256 chan->request = EDMA_REQ_PAUSE; in dw_edma_device_pause()
266 if (!chan->configured) { in dw_edma_device_resume()
267 err = -EPERM; in dw_edma_device_resume()
268 } else if (chan->status != EDMA_ST_PAUSE) { in dw_edma_device_resume()
269 err = -EPERM; in dw_edma_device_resume()
270 } else if (chan->request != EDMA_REQ_NONE) { in dw_edma_device_resume()
271 err = -EPERM; in dw_edma_device_resume()
273 chan->status = EDMA_ST_BUSY; in dw_edma_device_resume()
285 if (!chan->configured) { in dw_edma_device_terminate_all()
287 } else if (chan->status == EDMA_ST_PAUSE) { in dw_edma_device_terminate_all()
288 chan->status = EDMA_ST_IDLE; in dw_edma_device_terminate_all()
289 chan->configured = false; in dw_edma_device_terminate_all()
290 } else if (chan->status == EDMA_ST_IDLE) { in dw_edma_device_terminate_all()
291 chan->configured = false; in dw_edma_device_terminate_all()
297 chan->status = EDMA_ST_IDLE; in dw_edma_device_terminate_all()
298 chan->configured = false; in dw_edma_device_terminate_all()
299 } else if (chan->request > EDMA_REQ_PAUSE) { in dw_edma_device_terminate_all()
300 err = -EPERM; in dw_edma_device_terminate_all()
302 chan->request = EDMA_REQ_STOP; in dw_edma_device_terminate_all()
313 if (!chan->configured) in dw_edma_device_issue_pending()
316 spin_lock_irqsave(&chan->vc.lock, flags); in dw_edma_device_issue_pending()
317 if (vchan_issue_pending(&chan->vc) && chan->request == EDMA_REQ_NONE && in dw_edma_device_issue_pending()
318 chan->status == EDMA_ST_IDLE) { in dw_edma_device_issue_pending()
319 chan->status = EDMA_ST_BUSY; in dw_edma_device_issue_pending()
322 spin_unlock_irqrestore(&chan->vc.lock, flags); in dw_edma_device_issue_pending()
340 if (ret == DMA_IN_PROGRESS && chan->status == EDMA_ST_PAUSE) in dw_edma_device_tx_status()
346 spin_lock_irqsave(&chan->vc.lock, flags); in dw_edma_device_tx_status()
347 vd = vchan_find_desc(&chan->vc, cookie); in dw_edma_device_tx_status()
351 residue = desc->alloc_sz - desc->xfer_sz; in dw_edma_device_tx_status()
353 spin_unlock_irqrestore(&chan->vc.lock, flags); in dw_edma_device_tx_status()
364 struct dw_edma_chan *chan = dchan2dw_edma_chan(xfer->dchan); in dw_edma_device_transfer()
365 enum dma_transfer_direction dir = xfer->direction; in dw_edma_device_transfer()
368 struct dw_edma_burst *burst; in dw_edma_device_transfer() local
375 if (!chan->configured) in dw_edma_device_transfer()
379 * Local Root Port/End-point Remote End-point in dw_edma_device_transfer()
380 * +-----------------------+ PCIe bus +----------------------+ in dw_edma_device_transfer()
381 * | | +-+ | | in dw_edma_device_transfer()
382 * | DEV_TO_MEM Rx Ch <----+ +---+ Tx Ch DEV_TO_MEM | in dw_edma_device_transfer()
384 * | MEM_TO_DEV Tx Ch +----+ +---> Rx Ch MEM_TO_DEV | in dw_edma_device_transfer()
385 * | | +-+ | | in dw_edma_device_transfer()
386 * +-----------------------+ +----------------------+ in dw_edma_device_transfer()
392 * (EDMA_DIR_WRITE) - for the write operations (MEM_TO_DEV). in dw_edma_device_transfer()
398 * (DEV_TO_MEM) and the Rx channel (EDMA_DIR_READ) - for the write in dw_edma_device_transfer()
404 if (chan->dw->chip->flags & DW_EDMA_CHIP_LOCAL) { in dw_edma_device_transfer()
405 if ((chan->dir == EDMA_DIR_READ && dir != DMA_DEV_TO_MEM) || in dw_edma_device_transfer()
406 (chan->dir == EDMA_DIR_WRITE && dir != DMA_MEM_TO_DEV)) in dw_edma_device_transfer()
409 if ((chan->dir == EDMA_DIR_WRITE && dir != DMA_DEV_TO_MEM) || in dw_edma_device_transfer()
410 (chan->dir == EDMA_DIR_READ && dir != DMA_MEM_TO_DEV)) in dw_edma_device_transfer()
414 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
415 if (!xfer->xfer.cyclic.len || !xfer->xfer.cyclic.cnt) in dw_edma_device_transfer()
417 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
418 if (xfer->xfer.sg.len < 1) in dw_edma_device_transfer()
420 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
421 if (!xfer->xfer.il->numf || xfer->xfer.il->frame_size < 1) in dw_edma_device_transfer()
423 if (!xfer->xfer.il->src_inc || !xfer->xfer.il->dst_inc) in dw_edma_device_transfer()
437 if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
438 src_addr = xfer->xfer.il->src_start; in dw_edma_device_transfer()
439 dst_addr = xfer->xfer.il->dst_start; in dw_edma_device_transfer()
441 src_addr = chan->config.src_addr; in dw_edma_device_transfer()
442 dst_addr = chan->config.dst_addr; in dw_edma_device_transfer()
450 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
451 cnt = xfer->xfer.cyclic.cnt; in dw_edma_device_transfer()
452 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
453 cnt = xfer->xfer.sg.len; in dw_edma_device_transfer()
454 sg = xfer->xfer.sg.sgl; in dw_edma_device_transfer()
455 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
456 cnt = xfer->xfer.il->numf * xfer->xfer.il->frame_size; in dw_edma_device_transfer()
457 fsz = xfer->xfer.il->frame_size; in dw_edma_device_transfer()
461 if (xfer->type == EDMA_XFER_SCATTER_GATHER && !sg) in dw_edma_device_transfer()
464 if (chunk->bursts_alloc == chan->ll_max) { in dw_edma_device_transfer()
470 burst = dw_edma_alloc_burst(chunk); in dw_edma_device_transfer()
471 if (unlikely(!burst)) in dw_edma_device_transfer()
474 if (xfer->type == EDMA_XFER_CYCLIC) in dw_edma_device_transfer()
475 burst->sz = xfer->xfer.cyclic.len; in dw_edma_device_transfer()
476 else if (xfer->type == EDMA_XFER_SCATTER_GATHER) in dw_edma_device_transfer()
477 burst->sz = sg_dma_len(sg); in dw_edma_device_transfer()
478 else if (xfer->type == EDMA_XFER_INTERLEAVED) in dw_edma_device_transfer()
479 burst->sz = xfer->xfer.il->sgl[i % fsz].size; in dw_edma_device_transfer()
481 chunk->ll_region.sz += burst->sz; in dw_edma_device_transfer()
482 desc->alloc_sz += burst->sz; in dw_edma_device_transfer()
485 burst->sar = src_addr; in dw_edma_device_transfer()
486 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
487 burst->dar = xfer->xfer.cyclic.paddr; in dw_edma_device_transfer()
488 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
490 burst->dar = sg_dma_address(sg); in dw_edma_device_transfer()
496 * by the same portion (data length) in dw_edma_device_transfer()
498 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
499 burst->dar = dst_addr; in dw_edma_device_transfer()
502 burst->dar = dst_addr; in dw_edma_device_transfer()
503 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
504 burst->sar = xfer->xfer.cyclic.paddr; in dw_edma_device_transfer()
505 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
507 burst->sar = sg_dma_address(sg); in dw_edma_device_transfer()
513 * by the same portion (data length) in dw_edma_device_transfer()
515 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
516 burst->sar = src_addr; in dw_edma_device_transfer()
520 if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
522 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
523 struct dma_interleaved_template *il = xfer->xfer.il; in dw_edma_device_transfer()
524 struct data_chunk *dc = &il->sgl[i % fsz]; in dw_edma_device_transfer()
526 src_addr += burst->sz; in dw_edma_device_transfer()
527 if (il->src_sgl) in dw_edma_device_transfer()
530 dst_addr += burst->sz; in dw_edma_device_transfer()
531 if (il->dst_sgl) in dw_edma_device_transfer()
536 return vchan_tx_prep(&chan->vc, &desc->vd, xfer->flags); in dw_edma_device_transfer()
590 xfer.direction = ilt->dir; in dw_edma_device_prep_interleaved_dma()
604 spin_lock_irqsave(&chan->vc.lock, flags); in dw_edma_done_interrupt()
605 vd = vchan_next_desc(&chan->vc); in dw_edma_done_interrupt()
607 switch (chan->request) { in dw_edma_done_interrupt()
610 if (!desc->chunks_alloc) { in dw_edma_done_interrupt()
611 list_del(&vd->node); in dw_edma_done_interrupt()
617 chan->status = dw_edma_start_transfer(chan) ? EDMA_ST_BUSY : EDMA_ST_IDLE; in dw_edma_done_interrupt()
621 list_del(&vd->node); in dw_edma_done_interrupt()
623 chan->request = EDMA_REQ_NONE; in dw_edma_done_interrupt()
624 chan->status = EDMA_ST_IDLE; in dw_edma_done_interrupt()
628 chan->request = EDMA_REQ_NONE; in dw_edma_done_interrupt()
629 chan->status = EDMA_ST_PAUSE; in dw_edma_done_interrupt()
636 spin_unlock_irqrestore(&chan->vc.lock, flags); in dw_edma_done_interrupt()
644 spin_lock_irqsave(&chan->vc.lock, flags); in dw_edma_abort_interrupt()
645 vd = vchan_next_desc(&chan->vc); in dw_edma_abort_interrupt()
647 list_del(&vd->node); in dw_edma_abort_interrupt()
650 spin_unlock_irqrestore(&chan->vc.lock, flags); in dw_edma_abort_interrupt()
651 chan->request = EDMA_REQ_NONE; in dw_edma_abort_interrupt()
652 chan->status = EDMA_ST_IDLE; in dw_edma_abort_interrupt()
687 if (chan->status != EDMA_ST_IDLE) in dw_edma_alloc_chan_resources()
688 return -EBUSY; in dw_edma_alloc_chan_resources()
712 struct dw_edma_chip *chip = dw->chip; in dw_edma_channel_setup()
713 struct device *dev = chip->dev; in dw_edma_channel_setup()
720 ch_cnt = dw->wr_ch_cnt + dw->rd_ch_cnt; in dw_edma_channel_setup()
721 dma = &dw->dma; in dw_edma_channel_setup()
723 INIT_LIST_HEAD(&dma->channels); in dw_edma_channel_setup()
726 chan = &dw->chan[i]; in dw_edma_channel_setup()
728 chan->dw = dw; in dw_edma_channel_setup()
730 if (i < dw->wr_ch_cnt) { in dw_edma_channel_setup()
731 chan->id = i; in dw_edma_channel_setup()
732 chan->dir = EDMA_DIR_WRITE; in dw_edma_channel_setup()
734 chan->id = i - dw->wr_ch_cnt; in dw_edma_channel_setup()
735 chan->dir = EDMA_DIR_READ; in dw_edma_channel_setup()
738 chan->configured = false; in dw_edma_channel_setup()
739 chan->request = EDMA_REQ_NONE; in dw_edma_channel_setup()
740 chan->status = EDMA_ST_IDLE; in dw_edma_channel_setup()
742 if (chan->dir == EDMA_DIR_WRITE) in dw_edma_channel_setup()
743 chan->ll_max = (chip->ll_region_wr[chan->id].sz / EDMA_LL_SZ); in dw_edma_channel_setup()
745 chan->ll_max = (chip->ll_region_rd[chan->id].sz / EDMA_LL_SZ); in dw_edma_channel_setup()
746 chan->ll_max -= 1; in dw_edma_channel_setup()
749 chan->dir == EDMA_DIR_WRITE ? "write" : "read", in dw_edma_channel_setup()
750 chan->id, chan->ll_max); in dw_edma_channel_setup()
752 if (dw->nr_irqs == 1) in dw_edma_channel_setup()
754 else if (chan->dir == EDMA_DIR_WRITE) in dw_edma_channel_setup()
755 pos = chan->id % wr_alloc; in dw_edma_channel_setup()
757 pos = wr_alloc + chan->id % rd_alloc; in dw_edma_channel_setup()
759 irq = &dw->irq[pos]; in dw_edma_channel_setup()
761 if (chan->dir == EDMA_DIR_WRITE) in dw_edma_channel_setup()
762 irq->wr_mask |= BIT(chan->id); in dw_edma_channel_setup()
764 irq->rd_mask |= BIT(chan->id); in dw_edma_channel_setup()
766 irq->dw = dw; in dw_edma_channel_setup()
767 memcpy(&chan->msi, &irq->msi, sizeof(chan->msi)); in dw_edma_channel_setup()
770 chan->dir == EDMA_DIR_WRITE ? "write" : "read", chan->id, in dw_edma_channel_setup()
771 chan->msi.address_hi, chan->msi.address_lo, in dw_edma_channel_setup()
772 chan->msi.data); in dw_edma_channel_setup()
774 chan->vc.desc_free = vchan_free_desc; in dw_edma_channel_setup()
775 chan->vc.chan.private = chan->dir == EDMA_DIR_WRITE ? in dw_edma_channel_setup()
776 &dw->chip->dt_region_wr[chan->id] : in dw_edma_channel_setup()
777 &dw->chip->dt_region_rd[chan->id]; in dw_edma_channel_setup()
779 vchan_init(&chan->vc, dma); in dw_edma_channel_setup()
785 dma_cap_zero(dma->cap_mask); in dw_edma_channel_setup()
786 dma_cap_set(DMA_SLAVE, dma->cap_mask); in dw_edma_channel_setup()
787 dma_cap_set(DMA_CYCLIC, dma->cap_mask); in dw_edma_channel_setup()
788 dma_cap_set(DMA_PRIVATE, dma->cap_mask); in dw_edma_channel_setup()
789 dma_cap_set(DMA_INTERLEAVE, dma->cap_mask); in dw_edma_channel_setup()
790 dma->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in dw_edma_channel_setup()
791 dma->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in dw_edma_channel_setup()
792 dma->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in dw_edma_channel_setup()
793 dma->residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR; in dw_edma_channel_setup()
796 dma->dev = chip->dev; in dw_edma_channel_setup()
797 dma->device_alloc_chan_resources = dw_edma_alloc_chan_resources; in dw_edma_channel_setup()
798 dma->device_free_chan_resources = dw_edma_free_chan_resources; in dw_edma_channel_setup()
799 dma->device_caps = dw_edma_device_caps; in dw_edma_channel_setup()
800 dma->device_config = dw_edma_device_config; in dw_edma_channel_setup()
801 dma->device_pause = dw_edma_device_pause; in dw_edma_channel_setup()
802 dma->device_resume = dw_edma_device_resume; in dw_edma_channel_setup()
803 dma->device_terminate_all = dw_edma_device_terminate_all; in dw_edma_channel_setup()
804 dma->device_issue_pending = dw_edma_device_issue_pending; in dw_edma_channel_setup()
805 dma->device_tx_status = dw_edma_device_tx_status; in dw_edma_channel_setup()
806 dma->device_prep_slave_sg = dw_edma_device_prep_slave_sg; in dw_edma_channel_setup()
807 dma->device_prep_dma_cyclic = dw_edma_device_prep_dma_cyclic; in dw_edma_channel_setup()
808 dma->device_prep_interleaved_dma = dw_edma_device_prep_interleaved_dma; in dw_edma_channel_setup()
810 dma_set_max_seg_size(dma->dev, U32_MAX); in dw_edma_channel_setup()
820 (*nr_irqs)--; in dw_edma_dec_irq_alloc()
833 struct dw_edma_chip *chip = dw->chip; in dw_edma_irq_request()
834 struct device *dev = dw->chip->dev; in dw_edma_irq_request()
841 ch_cnt = dw->wr_ch_cnt + dw->rd_ch_cnt; in dw_edma_irq_request()
843 if (chip->nr_irqs < 1 || !chip->ops->irq_vector) in dw_edma_irq_request()
844 return -EINVAL; in dw_edma_irq_request()
846 dw->irq = devm_kcalloc(dev, chip->nr_irqs, sizeof(*dw->irq), GFP_KERNEL); in dw_edma_irq_request()
847 if (!dw->irq) in dw_edma_irq_request()
848 return -ENOMEM; in dw_edma_irq_request()
850 if (chip->nr_irqs == 1) { in dw_edma_irq_request()
852 irq = chip->ops->irq_vector(dev, 0); in dw_edma_irq_request()
854 IRQF_SHARED, dw->name, &dw->irq[0]); in dw_edma_irq_request()
856 dw->nr_irqs = 0; in dw_edma_irq_request()
861 get_cached_msi_msg(irq, &dw->irq[0].msi); in dw_edma_irq_request()
863 dw->nr_irqs = 1; in dw_edma_irq_request()
866 int tmp = chip->nr_irqs; in dw_edma_irq_request()
869 dw_edma_dec_irq_alloc(&tmp, wr_alloc, dw->wr_ch_cnt); in dw_edma_irq_request()
870 dw_edma_dec_irq_alloc(&tmp, rd_alloc, dw->rd_ch_cnt); in dw_edma_irq_request()
873 dw_edma_add_irq_mask(&wr_mask, *wr_alloc, dw->wr_ch_cnt); in dw_edma_irq_request()
874 dw_edma_add_irq_mask(&rd_mask, *rd_alloc, dw->rd_ch_cnt); in dw_edma_irq_request()
877 irq = chip->ops->irq_vector(dev, i); in dw_edma_irq_request()
882 IRQF_SHARED, dw->name, in dw_edma_irq_request()
883 &dw->irq[i]); in dw_edma_irq_request()
888 get_cached_msi_msg(irq, &dw->irq[i].msi); in dw_edma_irq_request()
891 dw->nr_irqs = i; in dw_edma_irq_request()
897 for (i--; i >= 0; i--) { in dw_edma_irq_request()
898 irq = chip->ops->irq_vector(dev, i); in dw_edma_irq_request()
899 free_irq(irq, &dw->irq[i]); in dw_edma_irq_request()
914 return -EINVAL; in dw_edma_probe()
916 dev = chip->dev; in dw_edma_probe()
917 if (!dev || !chip->ops) in dw_edma_probe()
918 return -EINVAL; in dw_edma_probe()
922 return -ENOMEM; in dw_edma_probe()
924 dw->chip = chip; in dw_edma_probe()
926 if (dw->chip->mf == EDMA_MF_HDMA_NATIVE) in dw_edma_probe()
931 raw_spin_lock_init(&dw->lock); in dw_edma_probe()
933 dw->wr_ch_cnt = min_t(u16, chip->ll_wr_cnt, in dw_edma_probe()
935 dw->wr_ch_cnt = min_t(u16, dw->wr_ch_cnt, EDMA_MAX_WR_CH); in dw_edma_probe()
937 dw->rd_ch_cnt = min_t(u16, chip->ll_rd_cnt, in dw_edma_probe()
939 dw->rd_ch_cnt = min_t(u16, dw->rd_ch_cnt, EDMA_MAX_RD_CH); in dw_edma_probe()
941 if (!dw->wr_ch_cnt && !dw->rd_ch_cnt) in dw_edma_probe()
942 return -EINVAL; in dw_edma_probe()
945 dw->wr_ch_cnt, dw->rd_ch_cnt); in dw_edma_probe()
948 dw->chan = devm_kcalloc(dev, dw->wr_ch_cnt + dw->rd_ch_cnt, in dw_edma_probe()
949 sizeof(*dw->chan), GFP_KERNEL); in dw_edma_probe()
950 if (!dw->chan) in dw_edma_probe()
951 return -ENOMEM; in dw_edma_probe()
953 snprintf(dw->name, sizeof(dw->name), "dw-edma-core:%s", in dw_edma_probe()
954 dev_name(chip->dev)); in dw_edma_probe()
972 chip->dw = dw; in dw_edma_probe()
977 for (i = (dw->nr_irqs - 1); i >= 0; i--) in dw_edma_probe()
978 free_irq(chip->ops->irq_vector(dev, i), &dw->irq[i]); in dw_edma_probe()
987 struct device *dev = chip->dev; in dw_edma_remove()
988 struct dw_edma *dw = chip->dw; in dw_edma_remove()
993 return -ENODEV; in dw_edma_remove()
999 for (i = (dw->nr_irqs - 1); i >= 0; i--) in dw_edma_remove()
1000 free_irq(chip->ops->irq_vector(dev, i), &dw->irq[i]); in dw_edma_remove()
1003 dma_async_device_unregister(&dw->dma); in dw_edma_remove()
1004 list_for_each_entry_safe(chan, _chan, &dw->dma.channels, in dw_edma_remove()
1006 tasklet_kill(&chan->vc.task); in dw_edma_remove()
1007 list_del(&chan->vc.chan.device_node); in dw_edma_remove()