Lines Matching +full:burst +full:- +full:write

1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (c) 2018-2019 Synopsys, Inc. and/or its affiliates.
17 #include <linux/dma-mapping.h>
20 #include "dw-edma-core.h"
21 #include "dw-edma-v0-core.h"
22 #include "dw-hdma-v0-core.h"
24 #include "../virt-dma.h"
35 struct dw_edma_chip *chip = chan->dw->chip; in dw_edma_get_pci_address()
37 if (chip->ops->pci_address) in dw_edma_get_pci_address()
38 return chip->ops->pci_address(chip->dev, cpu_addr); in dw_edma_get_pci_address()
45 struct dw_edma_burst *burst; in dw_edma_alloc_burst() local
47 burst = kzalloc(sizeof(*burst), GFP_NOWAIT); in dw_edma_alloc_burst()
48 if (unlikely(!burst)) in dw_edma_alloc_burst()
51 INIT_LIST_HEAD(&burst->list); in dw_edma_alloc_burst()
52 if (chunk->burst) { in dw_edma_alloc_burst()
54 chunk->bursts_alloc++; in dw_edma_alloc_burst()
55 list_add_tail(&burst->list, &chunk->burst->list); in dw_edma_alloc_burst()
58 chunk->bursts_alloc = 0; in dw_edma_alloc_burst()
59 chunk->burst = burst; in dw_edma_alloc_burst()
62 return burst; in dw_edma_alloc_burst()
67 struct dw_edma_chip *chip = desc->chan->dw->chip; in dw_edma_alloc_chunk()
68 struct dw_edma_chan *chan = desc->chan; in dw_edma_alloc_chunk()
75 INIT_LIST_HEAD(&chunk->list); in dw_edma_alloc_chunk()
76 chunk->chan = chan; in dw_edma_alloc_chunk()
80 * - Odd chunks originate CB equal to 0 in dw_edma_alloc_chunk()
81 * - Even chunks originate CB equal to 1 in dw_edma_alloc_chunk()
83 chunk->cb = !(desc->chunks_alloc % 2); in dw_edma_alloc_chunk()
84 if (chan->dir == EDMA_DIR_WRITE) { in dw_edma_alloc_chunk()
85 chunk->ll_region.paddr = chip->ll_region_wr[chan->id].paddr; in dw_edma_alloc_chunk()
86 chunk->ll_region.vaddr = chip->ll_region_wr[chan->id].vaddr; in dw_edma_alloc_chunk()
88 chunk->ll_region.paddr = chip->ll_region_rd[chan->id].paddr; in dw_edma_alloc_chunk()
89 chunk->ll_region.vaddr = chip->ll_region_rd[chan->id].vaddr; in dw_edma_alloc_chunk()
92 if (desc->chunk) { in dw_edma_alloc_chunk()
98 desc->chunks_alloc++; in dw_edma_alloc_chunk()
99 list_add_tail(&chunk->list, &desc->chunk->list); in dw_edma_alloc_chunk()
102 chunk->burst = NULL; in dw_edma_alloc_chunk()
103 desc->chunks_alloc = 0; in dw_edma_alloc_chunk()
104 desc->chunk = chunk; in dw_edma_alloc_chunk()
118 desc->chan = chan; in dw_edma_alloc_desc()
132 list_for_each_entry_safe(child, _next, &chunk->burst->list, list) { in dw_edma_free_burst()
133 list_del(&child->list); in dw_edma_free_burst()
135 chunk->bursts_alloc--; in dw_edma_free_burst()
140 chunk->burst = NULL; in dw_edma_free_burst()
147 if (!desc->chunk) in dw_edma_free_chunk()
151 list_for_each_entry_safe(child, _next, &desc->chunk->list, list) { in dw_edma_free_chunk()
153 list_del(&child->list); in dw_edma_free_chunk()
155 desc->chunks_alloc--; in dw_edma_free_chunk()
160 desc->chunk = NULL; in dw_edma_free_chunk()
176 struct dw_edma *dw = chan->dw; in dw_edma_start_transfer()
181 vd = vchan_next_desc(&chan->vc); in dw_edma_start_transfer()
189 child = list_first_entry_or_null(&desc->chunk->list, in dw_edma_start_transfer()
194 dw_edma_core_start(dw, child, !desc->xfer_sz); in dw_edma_start_transfer()
195 desc->xfer_sz += child->ll_region.sz; in dw_edma_start_transfer()
197 list_del(&child->list); in dw_edma_start_transfer()
199 desc->chunks_alloc--; in dw_edma_start_transfer()
209 if (chan->dw->chip->flags & DW_EDMA_CHIP_LOCAL) { in dw_edma_device_caps()
210 if (chan->dir == EDMA_DIR_READ) in dw_edma_device_caps()
211 caps->directions = BIT(DMA_DEV_TO_MEM); in dw_edma_device_caps()
213 caps->directions = BIT(DMA_MEM_TO_DEV); in dw_edma_device_caps()
215 if (chan->dir == EDMA_DIR_WRITE) in dw_edma_device_caps()
216 caps->directions = BIT(DMA_DEV_TO_MEM); in dw_edma_device_caps()
218 caps->directions = BIT(DMA_MEM_TO_DEV); in dw_edma_device_caps()
227 memcpy(&chan->config, config, sizeof(*config)); in dw_edma_device_config()
228 chan->configured = true; in dw_edma_device_config()
238 if (!chan->configured) in dw_edma_device_pause()
239 err = -EPERM; in dw_edma_device_pause()
240 else if (chan->status != EDMA_ST_BUSY) in dw_edma_device_pause()
241 err = -EPERM; in dw_edma_device_pause()
242 else if (chan->request != EDMA_REQ_NONE) in dw_edma_device_pause()
243 err = -EPERM; in dw_edma_device_pause()
245 chan->request = EDMA_REQ_PAUSE; in dw_edma_device_pause()
255 if (!chan->configured) { in dw_edma_device_resume()
256 err = -EPERM; in dw_edma_device_resume()
257 } else if (chan->status != EDMA_ST_PAUSE) { in dw_edma_device_resume()
258 err = -EPERM; in dw_edma_device_resume()
259 } else if (chan->request != EDMA_REQ_NONE) { in dw_edma_device_resume()
260 err = -EPERM; in dw_edma_device_resume()
262 chan->status = EDMA_ST_BUSY; in dw_edma_device_resume()
274 if (!chan->configured) { in dw_edma_device_terminate_all()
276 } else if (chan->status == EDMA_ST_PAUSE) { in dw_edma_device_terminate_all()
277 chan->status = EDMA_ST_IDLE; in dw_edma_device_terminate_all()
278 chan->configured = false; in dw_edma_device_terminate_all()
279 } else if (chan->status == EDMA_ST_IDLE) { in dw_edma_device_terminate_all()
280 chan->configured = false; in dw_edma_device_terminate_all()
286 chan->status = EDMA_ST_IDLE; in dw_edma_device_terminate_all()
287 chan->configured = false; in dw_edma_device_terminate_all()
288 } else if (chan->request > EDMA_REQ_PAUSE) { in dw_edma_device_terminate_all()
289 err = -EPERM; in dw_edma_device_terminate_all()
291 chan->request = EDMA_REQ_STOP; in dw_edma_device_terminate_all()
302 if (!chan->configured) in dw_edma_device_issue_pending()
305 spin_lock_irqsave(&chan->vc.lock, flags); in dw_edma_device_issue_pending()
306 if (vchan_issue_pending(&chan->vc) && chan->request == EDMA_REQ_NONE && in dw_edma_device_issue_pending()
307 chan->status == EDMA_ST_IDLE) { in dw_edma_device_issue_pending()
308 chan->status = EDMA_ST_BUSY; in dw_edma_device_issue_pending()
311 spin_unlock_irqrestore(&chan->vc.lock, flags); in dw_edma_device_issue_pending()
329 if (ret == DMA_IN_PROGRESS && chan->status == EDMA_ST_PAUSE) in dw_edma_device_tx_status()
335 spin_lock_irqsave(&chan->vc.lock, flags); in dw_edma_device_tx_status()
336 vd = vchan_find_desc(&chan->vc, cookie); in dw_edma_device_tx_status()
340 residue = desc->alloc_sz - desc->xfer_sz; in dw_edma_device_tx_status()
342 spin_unlock_irqrestore(&chan->vc.lock, flags); in dw_edma_device_tx_status()
353 struct dw_edma_chan *chan = dchan2dw_edma_chan(xfer->dchan); in dw_edma_device_transfer()
354 enum dma_transfer_direction dir = xfer->direction; in dw_edma_device_transfer()
357 struct dw_edma_burst *burst; in dw_edma_device_transfer() local
364 if (!chan->configured) in dw_edma_device_transfer()
368 * Local Root Port/End-point Remote End-point in dw_edma_device_transfer()
369 * +-----------------------+ PCIe bus +----------------------+ in dw_edma_device_transfer()
370 * | | +-+ | | in dw_edma_device_transfer()
371 * | DEV_TO_MEM Rx Ch <----+ +---+ Tx Ch DEV_TO_MEM | in dw_edma_device_transfer()
373 * | MEM_TO_DEV Tx Ch +----+ +---> Rx Ch MEM_TO_DEV | in dw_edma_device_transfer()
374 * | | +-+ | | in dw_edma_device_transfer()
375 * +-----------------------+ +----------------------+ in dw_edma_device_transfer()
381 * (EDMA_DIR_WRITE) - for the write operations (MEM_TO_DEV). in dw_edma_device_transfer()
387 * (DEV_TO_MEM) and the Rx channel (EDMA_DIR_READ) - for the write in dw_edma_device_transfer()
393 if (chan->dw->chip->flags & DW_EDMA_CHIP_LOCAL) { in dw_edma_device_transfer()
394 if ((chan->dir == EDMA_DIR_READ && dir != DMA_DEV_TO_MEM) || in dw_edma_device_transfer()
395 (chan->dir == EDMA_DIR_WRITE && dir != DMA_MEM_TO_DEV)) in dw_edma_device_transfer()
398 if ((chan->dir == EDMA_DIR_WRITE && dir != DMA_DEV_TO_MEM) || in dw_edma_device_transfer()
399 (chan->dir == EDMA_DIR_READ && dir != DMA_MEM_TO_DEV)) in dw_edma_device_transfer()
403 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
404 if (!xfer->xfer.cyclic.len || !xfer->xfer.cyclic.cnt) in dw_edma_device_transfer()
406 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
407 if (xfer->xfer.sg.len < 1) in dw_edma_device_transfer()
409 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
410 if (!xfer->xfer.il->numf || xfer->xfer.il->frame_size < 1) in dw_edma_device_transfer()
412 if (!xfer->xfer.il->src_inc || !xfer->xfer.il->dst_inc) in dw_edma_device_transfer()
426 if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
427 src_addr = xfer->xfer.il->src_start; in dw_edma_device_transfer()
428 dst_addr = xfer->xfer.il->dst_start; in dw_edma_device_transfer()
430 src_addr = chan->config.src_addr; in dw_edma_device_transfer()
431 dst_addr = chan->config.dst_addr; in dw_edma_device_transfer()
439 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
440 cnt = xfer->xfer.cyclic.cnt; in dw_edma_device_transfer()
441 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
442 cnt = xfer->xfer.sg.len; in dw_edma_device_transfer()
443 sg = xfer->xfer.sg.sgl; in dw_edma_device_transfer()
444 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
445 cnt = xfer->xfer.il->numf * xfer->xfer.il->frame_size; in dw_edma_device_transfer()
446 fsz = xfer->xfer.il->frame_size; in dw_edma_device_transfer()
450 if (xfer->type == EDMA_XFER_SCATTER_GATHER && !sg) in dw_edma_device_transfer()
453 if (chunk->bursts_alloc == chan->ll_max) { in dw_edma_device_transfer()
459 burst = dw_edma_alloc_burst(chunk); in dw_edma_device_transfer()
460 if (unlikely(!burst)) in dw_edma_device_transfer()
463 if (xfer->type == EDMA_XFER_CYCLIC) in dw_edma_device_transfer()
464 burst->sz = xfer->xfer.cyclic.len; in dw_edma_device_transfer()
465 else if (xfer->type == EDMA_XFER_SCATTER_GATHER) in dw_edma_device_transfer()
466 burst->sz = sg_dma_len(sg); in dw_edma_device_transfer()
467 else if (xfer->type == EDMA_XFER_INTERLEAVED) in dw_edma_device_transfer()
468 burst->sz = xfer->xfer.il->sgl[i % fsz].size; in dw_edma_device_transfer()
470 chunk->ll_region.sz += burst->sz; in dw_edma_device_transfer()
471 desc->alloc_sz += burst->sz; in dw_edma_device_transfer()
474 burst->sar = src_addr; in dw_edma_device_transfer()
475 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
476 burst->dar = xfer->xfer.cyclic.paddr; in dw_edma_device_transfer()
477 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
479 burst->dar = sg_dma_address(sg); in dw_edma_device_transfer()
487 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
488 burst->dar = dst_addr; in dw_edma_device_transfer()
491 burst->dar = dst_addr; in dw_edma_device_transfer()
492 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer()
493 burst->sar = xfer->xfer.cyclic.paddr; in dw_edma_device_transfer()
494 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
496 burst->sar = sg_dma_address(sg); in dw_edma_device_transfer()
504 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
505 burst->sar = src_addr; in dw_edma_device_transfer()
509 if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer()
511 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer()
512 struct dma_interleaved_template *il = xfer->xfer.il; in dw_edma_device_transfer()
513 struct data_chunk *dc = &il->sgl[i % fsz]; in dw_edma_device_transfer()
515 src_addr += burst->sz; in dw_edma_device_transfer()
516 if (il->src_sgl) in dw_edma_device_transfer()
519 dst_addr += burst->sz; in dw_edma_device_transfer()
520 if (il->dst_sgl) in dw_edma_device_transfer()
525 return vchan_tx_prep(&chan->vc, &desc->vd, xfer->flags); in dw_edma_device_transfer()
579 xfer.direction = ilt->dir; in dw_edma_device_prep_interleaved_dma()
594 if (!vd->tx.callback_result) in dw_hdma_set_callback_result()
599 residue = desc->alloc_sz - desc->xfer_sz; in dw_hdma_set_callback_result()
601 res = &vd->tx_result; in dw_hdma_set_callback_result()
602 res->result = result; in dw_hdma_set_callback_result()
603 res->residue = residue; in dw_hdma_set_callback_result()
612 spin_lock_irqsave(&chan->vc.lock, flags); in dw_edma_done_interrupt()
613 vd = vchan_next_desc(&chan->vc); in dw_edma_done_interrupt()
615 switch (chan->request) { in dw_edma_done_interrupt()
618 if (!desc->chunks_alloc) { in dw_edma_done_interrupt()
621 list_del(&vd->node); in dw_edma_done_interrupt()
627 chan->status = dw_edma_start_transfer(chan) ? EDMA_ST_BUSY : EDMA_ST_IDLE; in dw_edma_done_interrupt()
631 list_del(&vd->node); in dw_edma_done_interrupt()
633 chan->request = EDMA_REQ_NONE; in dw_edma_done_interrupt()
634 chan->status = EDMA_ST_IDLE; in dw_edma_done_interrupt()
638 chan->request = EDMA_REQ_NONE; in dw_edma_done_interrupt()
639 chan->status = EDMA_ST_PAUSE; in dw_edma_done_interrupt()
646 spin_unlock_irqrestore(&chan->vc.lock, flags); in dw_edma_done_interrupt()
654 spin_lock_irqsave(&chan->vc.lock, flags); in dw_edma_abort_interrupt()
655 vd = vchan_next_desc(&chan->vc); in dw_edma_abort_interrupt()
658 list_del(&vd->node); in dw_edma_abort_interrupt()
661 spin_unlock_irqrestore(&chan->vc.lock, flags); in dw_edma_abort_interrupt()
662 chan->request = EDMA_REQ_NONE; in dw_edma_abort_interrupt()
663 chan->status = EDMA_ST_IDLE; in dw_edma_abort_interrupt()
698 if (chan->status != EDMA_ST_IDLE) in dw_edma_alloc_chan_resources()
699 return -EBUSY; in dw_edma_alloc_chan_resources()
723 struct dw_edma_chip *chip = dw->chip; in dw_edma_channel_setup()
724 struct device *dev = chip->dev; in dw_edma_channel_setup()
731 ch_cnt = dw->wr_ch_cnt + dw->rd_ch_cnt; in dw_edma_channel_setup()
732 dma = &dw->dma; in dw_edma_channel_setup()
734 INIT_LIST_HEAD(&dma->channels); in dw_edma_channel_setup()
737 chan = &dw->chan[i]; in dw_edma_channel_setup()
739 chan->dw = dw; in dw_edma_channel_setup()
741 if (i < dw->wr_ch_cnt) { in dw_edma_channel_setup()
742 chan->id = i; in dw_edma_channel_setup()
743 chan->dir = EDMA_DIR_WRITE; in dw_edma_channel_setup()
745 chan->id = i - dw->wr_ch_cnt; in dw_edma_channel_setup()
746 chan->dir = EDMA_DIR_READ; in dw_edma_channel_setup()
749 chan->configured = false; in dw_edma_channel_setup()
750 chan->request = EDMA_REQ_NONE; in dw_edma_channel_setup()
751 chan->status = EDMA_ST_IDLE; in dw_edma_channel_setup()
753 if (chan->dir == EDMA_DIR_WRITE) in dw_edma_channel_setup()
754 chan->ll_max = (chip->ll_region_wr[chan->id].sz / EDMA_LL_SZ); in dw_edma_channel_setup()
756 chan->ll_max = (chip->ll_region_rd[chan->id].sz / EDMA_LL_SZ); in dw_edma_channel_setup()
757 chan->ll_max -= 1; in dw_edma_channel_setup()
760 str_write_read(chan->dir == EDMA_DIR_WRITE), in dw_edma_channel_setup()
761 chan->id, chan->ll_max); in dw_edma_channel_setup()
763 if (dw->nr_irqs == 1) in dw_edma_channel_setup()
765 else if (chan->dir == EDMA_DIR_WRITE) in dw_edma_channel_setup()
766 pos = chan->id % wr_alloc; in dw_edma_channel_setup()
768 pos = wr_alloc + chan->id % rd_alloc; in dw_edma_channel_setup()
770 irq = &dw->irq[pos]; in dw_edma_channel_setup()
772 if (chan->dir == EDMA_DIR_WRITE) in dw_edma_channel_setup()
773 irq->wr_mask |= BIT(chan->id); in dw_edma_channel_setup()
775 irq->rd_mask |= BIT(chan->id); in dw_edma_channel_setup()
777 irq->dw = dw; in dw_edma_channel_setup()
778 memcpy(&chan->msi, &irq->msi, sizeof(chan->msi)); in dw_edma_channel_setup()
781 str_write_read(chan->dir == EDMA_DIR_WRITE), in dw_edma_channel_setup()
782 chan->id, in dw_edma_channel_setup()
783 chan->msi.address_hi, chan->msi.address_lo, in dw_edma_channel_setup()
784 chan->msi.data); in dw_edma_channel_setup()
786 chan->vc.desc_free = vchan_free_desc; in dw_edma_channel_setup()
787 chan->vc.chan.private = chan->dir == EDMA_DIR_WRITE ? in dw_edma_channel_setup()
788 &dw->chip->dt_region_wr[chan->id] : in dw_edma_channel_setup()
789 &dw->chip->dt_region_rd[chan->id]; in dw_edma_channel_setup()
791 vchan_init(&chan->vc, dma); in dw_edma_channel_setup()
797 dma_cap_zero(dma->cap_mask); in dw_edma_channel_setup()
798 dma_cap_set(DMA_SLAVE, dma->cap_mask); in dw_edma_channel_setup()
799 dma_cap_set(DMA_CYCLIC, dma->cap_mask); in dw_edma_channel_setup()
800 dma_cap_set(DMA_PRIVATE, dma->cap_mask); in dw_edma_channel_setup()
801 dma_cap_set(DMA_INTERLEAVE, dma->cap_mask); in dw_edma_channel_setup()
802 dma->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in dw_edma_channel_setup()
803 dma->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in dw_edma_channel_setup()
804 dma->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in dw_edma_channel_setup()
805 dma->residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR; in dw_edma_channel_setup()
808 dma->dev = chip->dev; in dw_edma_channel_setup()
809 dma->device_alloc_chan_resources = dw_edma_alloc_chan_resources; in dw_edma_channel_setup()
810 dma->device_free_chan_resources = dw_edma_free_chan_resources; in dw_edma_channel_setup()
811 dma->device_caps = dw_edma_device_caps; in dw_edma_channel_setup()
812 dma->device_config = dw_edma_device_config; in dw_edma_channel_setup()
813 dma->device_pause = dw_edma_device_pause; in dw_edma_channel_setup()
814 dma->device_resume = dw_edma_device_resume; in dw_edma_channel_setup()
815 dma->device_terminate_all = dw_edma_device_terminate_all; in dw_edma_channel_setup()
816 dma->device_issue_pending = dw_edma_device_issue_pending; in dw_edma_channel_setup()
817 dma->device_tx_status = dw_edma_device_tx_status; in dw_edma_channel_setup()
818 dma->device_prep_slave_sg = dw_edma_device_prep_slave_sg; in dw_edma_channel_setup()
819 dma->device_prep_dma_cyclic = dw_edma_device_prep_dma_cyclic; in dw_edma_channel_setup()
820 dma->device_prep_interleaved_dma = dw_edma_device_prep_interleaved_dma; in dw_edma_channel_setup()
822 dma_set_max_seg_size(dma->dev, U32_MAX); in dw_edma_channel_setup()
832 (*nr_irqs)--; in dw_edma_dec_irq_alloc()
845 struct dw_edma_chip *chip = dw->chip; in dw_edma_irq_request()
846 struct device *dev = dw->chip->dev; in dw_edma_irq_request()
853 ch_cnt = dw->wr_ch_cnt + dw->rd_ch_cnt; in dw_edma_irq_request()
855 if (chip->nr_irqs < 1 || !chip->ops->irq_vector) in dw_edma_irq_request()
856 return -EINVAL; in dw_edma_irq_request()
858 dw->irq = devm_kcalloc(dev, chip->nr_irqs, sizeof(*dw->irq), GFP_KERNEL); in dw_edma_irq_request()
859 if (!dw->irq) in dw_edma_irq_request()
860 return -ENOMEM; in dw_edma_irq_request()
862 if (chip->nr_irqs == 1) { in dw_edma_irq_request()
864 irq = chip->ops->irq_vector(dev, 0); in dw_edma_irq_request()
866 IRQF_SHARED, dw->name, &dw->irq[0]); in dw_edma_irq_request()
868 dw->nr_irqs = 0; in dw_edma_irq_request()
873 get_cached_msi_msg(irq, &dw->irq[0].msi); in dw_edma_irq_request()
875 dw->nr_irqs = 1; in dw_edma_irq_request()
878 int tmp = chip->nr_irqs; in dw_edma_irq_request()
881 dw_edma_dec_irq_alloc(&tmp, wr_alloc, dw->wr_ch_cnt); in dw_edma_irq_request()
882 dw_edma_dec_irq_alloc(&tmp, rd_alloc, dw->rd_ch_cnt); in dw_edma_irq_request()
885 dw_edma_add_irq_mask(&wr_mask, *wr_alloc, dw->wr_ch_cnt); in dw_edma_irq_request()
886 dw_edma_add_irq_mask(&rd_mask, *rd_alloc, dw->rd_ch_cnt); in dw_edma_irq_request()
889 irq = chip->ops->irq_vector(dev, i); in dw_edma_irq_request()
894 IRQF_SHARED, dw->name, in dw_edma_irq_request()
895 &dw->irq[i]); in dw_edma_irq_request()
900 get_cached_msi_msg(irq, &dw->irq[i].msi); in dw_edma_irq_request()
903 dw->nr_irqs = i; in dw_edma_irq_request()
909 for (i--; i >= 0; i--) { in dw_edma_irq_request()
910 irq = chip->ops->irq_vector(dev, i); in dw_edma_irq_request()
911 free_irq(irq, &dw->irq[i]); in dw_edma_irq_request()
926 return -EINVAL; in dw_edma_probe()
928 dev = chip->dev; in dw_edma_probe()
929 if (!dev || !chip->ops) in dw_edma_probe()
930 return -EINVAL; in dw_edma_probe()
934 return -ENOMEM; in dw_edma_probe()
936 dw->chip = chip; in dw_edma_probe()
938 if (dw->chip->mf == EDMA_MF_HDMA_NATIVE) in dw_edma_probe()
943 raw_spin_lock_init(&dw->lock); in dw_edma_probe()
945 dw->wr_ch_cnt = min_t(u16, chip->ll_wr_cnt, in dw_edma_probe()
947 dw->wr_ch_cnt = min_t(u16, dw->wr_ch_cnt, EDMA_MAX_WR_CH); in dw_edma_probe()
949 dw->rd_ch_cnt = min_t(u16, chip->ll_rd_cnt, in dw_edma_probe()
951 dw->rd_ch_cnt = min_t(u16, dw->rd_ch_cnt, EDMA_MAX_RD_CH); in dw_edma_probe()
953 if (!dw->wr_ch_cnt && !dw->rd_ch_cnt) in dw_edma_probe()
954 return -EINVAL; in dw_edma_probe()
957 dw->wr_ch_cnt, dw->rd_ch_cnt); in dw_edma_probe()
960 dw->chan = devm_kcalloc(dev, dw->wr_ch_cnt + dw->rd_ch_cnt, in dw_edma_probe()
961 sizeof(*dw->chan), GFP_KERNEL); in dw_edma_probe()
962 if (!dw->chan) in dw_edma_probe()
963 return -ENOMEM; in dw_edma_probe()
965 snprintf(dw->name, sizeof(dw->name), "dw-edma-core:%s", in dw_edma_probe()
966 dev_name(chip->dev)); in dw_edma_probe()
976 /* Setup write/read channels */ in dw_edma_probe()
984 chip->dw = dw; in dw_edma_probe()
989 for (i = (dw->nr_irqs - 1); i >= 0; i--) in dw_edma_probe()
990 free_irq(chip->ops->irq_vector(dev, i), &dw->irq[i]); in dw_edma_probe()
999 struct device *dev = chip->dev; in dw_edma_remove()
1000 struct dw_edma *dw = chip->dw; in dw_edma_remove()
1005 return -ENODEV; in dw_edma_remove()
1011 for (i = (dw->nr_irqs - 1); i >= 0; i--) in dw_edma_remove()
1012 free_irq(chip->ops->irq_vector(dev, i), &dw->irq[i]); in dw_edma_remove()
1015 dma_async_device_unregister(&dw->dma); in dw_edma_remove()
1016 list_for_each_entry_safe(chan, _chan, &dw->dma.channels, in dw_edma_remove()
1018 tasklet_kill(&chan->vc.task); in dw_edma_remove()
1019 list_del(&chan->vc.chan.device_node); in dw_edma_remove()