Lines Matching refs:dwc

49 static struct dw_desc *dwc_first_active(struct dw_dma_chan *dwc)  in dwc_first_active()  argument
51 return to_dw_desc(dwc->active_list.next); in dwc_first_active()
57 struct dw_dma_chan *dwc = to_dw_dma_chan(tx->chan); in dwc_tx_submit() local
61 spin_lock_irqsave(&dwc->lock, flags); in dwc_tx_submit()
70 list_add_tail(&desc->desc_node, &dwc->queue); in dwc_tx_submit()
71 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_tx_submit()
78 static struct dw_desc *dwc_desc_get(struct dw_dma_chan *dwc) in dwc_desc_get() argument
80 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_desc_get()
88 dwc->descs_allocated++; in dwc_desc_get()
90 dma_async_tx_descriptor_init(&desc->txd, &dwc->chan); in dwc_desc_get()
97 static void dwc_desc_put(struct dw_dma_chan *dwc, struct dw_desc *desc) in dwc_desc_put() argument
99 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_desc_put()
108 dwc->descs_allocated--; in dwc_desc_put()
112 dwc->descs_allocated--; in dwc_desc_put()
115 static void dwc_initialize(struct dw_dma_chan *dwc) in dwc_initialize() argument
117 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_initialize()
119 dw->initialize_chan(dwc); in dwc_initialize()
122 channel_set_bit(dw, MASK.XFER, dwc->mask); in dwc_initialize()
123 channel_set_bit(dw, MASK.ERROR, dwc->mask); in dwc_initialize()
128 static inline void dwc_dump_chan_regs(struct dw_dma_chan *dwc) in dwc_dump_chan_regs() argument
130 dev_err(chan2dev(&dwc->chan), in dwc_dump_chan_regs()
132 channel_readl(dwc, SAR), in dwc_dump_chan_regs()
133 channel_readl(dwc, DAR), in dwc_dump_chan_regs()
134 channel_readl(dwc, LLP), in dwc_dump_chan_regs()
135 channel_readl(dwc, CTL_HI), in dwc_dump_chan_regs()
136 channel_readl(dwc, CTL_LO)); in dwc_dump_chan_regs()
139 static inline void dwc_chan_disable(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_chan_disable() argument
141 channel_clear_bit(dw, CH_EN, dwc->mask); in dwc_chan_disable()
142 while (dma_readl(dw, CH_EN) & dwc->mask) in dwc_chan_disable()
149 static inline void dwc_do_single_block(struct dw_dma_chan *dwc, in dwc_do_single_block() argument
152 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_do_single_block()
161 channel_writel(dwc, SAR, lli_read(desc, sar)); in dwc_do_single_block()
162 channel_writel(dwc, DAR, lli_read(desc, dar)); in dwc_do_single_block()
163 channel_writel(dwc, CTL_LO, ctllo); in dwc_do_single_block()
164 channel_writel(dwc, CTL_HI, lli_read(desc, ctlhi)); in dwc_do_single_block()
165 channel_set_bit(dw, CH_EN, dwc->mask); in dwc_do_single_block()
168 dwc->tx_node_active = dwc->tx_node_active->next; in dwc_do_single_block()
172 static void dwc_dostart(struct dw_dma_chan *dwc, struct dw_desc *first) in dwc_dostart() argument
174 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_dostart()
175 u8 lms = DWC_LLP_LMS(dwc->dws.m_master); in dwc_dostart()
179 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_dostart()
180 dev_err(chan2dev(&dwc->chan), in dwc_dostart()
183 dwc_dump_chan_regs(dwc); in dwc_dostart()
189 if (dwc->nollp) { in dwc_dostart()
191 &dwc->flags); in dwc_dostart()
193 dev_err(chan2dev(&dwc->chan), in dwc_dostart()
198 dwc_initialize(dwc); in dwc_dostart()
201 dwc->tx_node_active = &first->tx_list; in dwc_dostart()
204 dwc_do_single_block(dwc, first); in dwc_dostart()
209 dwc_initialize(dwc); in dwc_dostart()
211 channel_writel(dwc, LLP, first->txd.phys | lms); in dwc_dostart()
212 channel_writel(dwc, CTL_LO, DWC_CTLL_LLP_D_EN | DWC_CTLL_LLP_S_EN); in dwc_dostart()
213 channel_writel(dwc, CTL_HI, 0); in dwc_dostart()
214 channel_set_bit(dw, CH_EN, dwc->mask); in dwc_dostart()
217 static void dwc_dostart_first_queued(struct dw_dma_chan *dwc) in dwc_dostart_first_queued() argument
221 if (list_empty(&dwc->queue)) in dwc_dostart_first_queued()
224 list_move(dwc->queue.next, &dwc->active_list); in dwc_dostart_first_queued()
225 desc = dwc_first_active(dwc); in dwc_dostart_first_queued()
226 dev_vdbg(chan2dev(&dwc->chan), "%s: started %u\n", __func__, desc->txd.cookie); in dwc_dostart_first_queued()
227 dwc_dostart(dwc, desc); in dwc_dostart_first_queued()
233 dwc_descriptor_complete(struct dw_dma_chan *dwc, struct dw_desc *desc, in dwc_descriptor_complete() argument
241 dev_vdbg(chan2dev(&dwc->chan), "descriptor %u complete\n", txd->cookie); in dwc_descriptor_complete()
243 spin_lock_irqsave(&dwc->lock, flags); in dwc_descriptor_complete()
254 dwc_desc_put(dwc, desc); in dwc_descriptor_complete()
255 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_descriptor_complete()
260 static void dwc_complete_all(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_complete_all() argument
266 spin_lock_irqsave(&dwc->lock, flags); in dwc_complete_all()
267 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_complete_all()
268 dev_err(chan2dev(&dwc->chan), in dwc_complete_all()
272 dwc_chan_disable(dw, dwc); in dwc_complete_all()
279 list_splice_init(&dwc->active_list, &list); in dwc_complete_all()
280 dwc_dostart_first_queued(dwc); in dwc_complete_all()
282 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_complete_all()
285 dwc_descriptor_complete(dwc, desc, true); in dwc_complete_all()
289 static inline u32 dwc_get_sent(struct dw_dma_chan *dwc) in dwc_get_sent() argument
291 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_get_sent()
292 u32 ctlhi = channel_readl(dwc, CTL_HI); in dwc_get_sent()
293 u32 ctllo = channel_readl(dwc, CTL_LO); in dwc_get_sent()
295 return dw->block2bytes(dwc, ctlhi, ctllo >> 4 & 7); in dwc_get_sent()
298 static void dwc_scan_descriptors(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_scan_descriptors() argument
306 spin_lock_irqsave(&dwc->lock, flags); in dwc_scan_descriptors()
307 llp = channel_readl(dwc, LLP); in dwc_scan_descriptors()
310 if (status_xfer & dwc->mask) { in dwc_scan_descriptors()
312 dma_writel(dw, CLEAR.XFER, dwc->mask); in dwc_scan_descriptors()
314 if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags)) { in dwc_scan_descriptors()
315 struct list_head *head, *active = dwc->tx_node_active; in dwc_scan_descriptors()
321 desc = dwc_first_active(dwc); in dwc_scan_descriptors()
334 dwc_do_single_block(dwc, child); in dwc_scan_descriptors()
336 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
341 clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags); in dwc_scan_descriptors()
344 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
346 dwc_complete_all(dw, dwc); in dwc_scan_descriptors()
350 if (list_empty(&dwc->active_list)) { in dwc_scan_descriptors()
351 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
355 if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags)) { in dwc_scan_descriptors()
356 dev_vdbg(chan2dev(&dwc->chan), "%s: soft LLP mode\n", __func__); in dwc_scan_descriptors()
357 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
361 dev_vdbg(chan2dev(&dwc->chan), "%s: llp=%pad\n", __func__, &llp); in dwc_scan_descriptors()
363 list_for_each_entry_safe(desc, _desc, &dwc->active_list, desc_node) { in dwc_scan_descriptors()
369 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
376 desc->residue -= dwc_get_sent(dwc); in dwc_scan_descriptors()
377 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
385 desc->residue -= dwc_get_sent(dwc); in dwc_scan_descriptors()
386 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
396 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
397 dwc_descriptor_complete(dwc, desc, true); in dwc_scan_descriptors()
398 spin_lock_irqsave(&dwc->lock, flags); in dwc_scan_descriptors()
401 dev_err(chan2dev(&dwc->chan), in dwc_scan_descriptors()
405 dwc_chan_disable(dw, dwc); in dwc_scan_descriptors()
407 dwc_dostart_first_queued(dwc); in dwc_scan_descriptors()
408 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
411 static inline void dwc_dump_lli(struct dw_dma_chan *dwc, struct dw_desc *desc) in dwc_dump_lli() argument
413 dev_crit(chan2dev(&dwc->chan), " desc: s0x%x d0x%x l0x%x c0x%x:%x\n", in dwc_dump_lli()
421 static void dwc_handle_error(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_handle_error() argument
427 dwc_scan_descriptors(dw, dwc); in dwc_handle_error()
429 spin_lock_irqsave(&dwc->lock, flags); in dwc_handle_error()
436 bad_desc = dwc_first_active(dwc); in dwc_handle_error()
438 list_move(dwc->queue.next, dwc->active_list.prev); in dwc_handle_error()
441 dma_writel(dw, CLEAR.ERROR, dwc->mask); in dwc_handle_error()
442 if (!list_empty(&dwc->active_list)) in dwc_handle_error()
443 dwc_dostart(dwc, dwc_first_active(dwc)); in dwc_handle_error()
452 dev_WARN(chan2dev(&dwc->chan), "Bad descriptor submitted for DMA!\n" in dwc_handle_error()
454 dwc_dump_lli(dwc, bad_desc); in dwc_handle_error()
456 dwc_dump_lli(dwc, child); in dwc_handle_error()
458 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_handle_error()
461 dwc_descriptor_complete(dwc, bad_desc, true); in dwc_handle_error()
467 struct dw_dma_chan *dwc; in dw_dma_tasklet() local
478 dwc = &dw->chan[i]; in dw_dma_tasklet()
479 if (test_bit(DW_DMA_IS_CYCLIC, &dwc->flags)) in dw_dma_tasklet()
482 dwc_handle_error(dw, dwc); in dw_dma_tasklet()
484 dwc_scan_descriptors(dw, dwc); in dw_dma_tasklet()
541 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_prep_dma_memcpy() local
548 u8 m_master = dwc->dws.m_master; in dwc_prep_dma_memcpy()
564 dwc->direction = DMA_MEM_TO_MEM; in dwc_prep_dma_memcpy()
568 ctllo = dw->prepare_ctllo(dwc) in dwc_prep_dma_memcpy()
577 desc = dwc_desc_get(dwc); in dwc_prep_dma_memcpy()
581 ctlhi = dw->bytes2block(dwc, len - offset, src_width, &xfer_count); in dwc_prep_dma_memcpy()
610 dwc_desc_put(dwc, first); in dwc_prep_dma_memcpy()
619 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_prep_slave_sg() local
621 struct dma_slave_config *sconfig = &dwc->dma_sconfig; in dwc_prep_slave_sg()
625 u8 lms = DWC_LLP_LMS(dwc->dws.m_master); in dwc_prep_slave_sg()
638 dwc->direction = direction; in dwc_prep_slave_sg()
646 ctllo = dw->prepare_ctllo(dwc) in dwc_prep_slave_sg()
665 desc = dwc_desc_get(dwc); in dwc_prep_slave_sg()
669 ctlhi = dw->bytes2block(dwc, len, mem_width, &dlen); in dwc_prep_slave_sg()
696 ctllo = dw->prepare_ctllo(dwc) in dwc_prep_slave_sg()
713 desc = dwc_desc_get(dwc); in dwc_prep_slave_sg()
717 ctlhi = dw->bytes2block(dwc, len, reg_width, &dlen); in dwc_prep_slave_sg()
759 dwc_desc_put(dwc, first); in dwc_prep_slave_sg()
765 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_filter() local
772 if (dws->channels && !(dws->channels & dwc->mask)) in dw_dma_filter()
776 memcpy(&dwc->dws, dws, sizeof(struct dw_dma_slave)); in dw_dma_filter()
784 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_verify_maxburst() local
786 dwc->dma_sconfig.src_maxburst = in dwc_verify_maxburst()
787 clamp(dwc->dma_sconfig.src_maxburst, 1U, dwc->max_burst); in dwc_verify_maxburst()
788 dwc->dma_sconfig.dst_maxburst = in dwc_verify_maxburst()
789 clamp(dwc->dma_sconfig.dst_maxburst, 1U, dwc->max_burst); in dwc_verify_maxburst()
791 dwc->dma_sconfig.src_maxburst = in dwc_verify_maxburst()
792 rounddown_pow_of_two(dwc->dma_sconfig.src_maxburst); in dwc_verify_maxburst()
793 dwc->dma_sconfig.dst_maxburst = in dwc_verify_maxburst()
794 rounddown_pow_of_two(dwc->dma_sconfig.dst_maxburst); in dwc_verify_maxburst()
801 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_verify_p_buswidth() local
805 if (dwc->dma_sconfig.direction == DMA_MEM_TO_DEV) in dwc_verify_p_buswidth()
806 reg_width = dwc->dma_sconfig.dst_addr_width; in dwc_verify_p_buswidth()
807 else if (dwc->dma_sconfig.direction == DMA_DEV_TO_MEM) in dwc_verify_p_buswidth()
808 reg_width = dwc->dma_sconfig.src_addr_width; in dwc_verify_p_buswidth()
812 max_width = dw->pdata->data_width[dwc->dws.p_master]; in dwc_verify_p_buswidth()
823 if (dwc->dma_sconfig.direction == DMA_MEM_TO_DEV) in dwc_verify_p_buswidth()
824 dwc->dma_sconfig.dst_addr_width = reg_width; in dwc_verify_p_buswidth()
826 dwc->dma_sconfig.src_addr_width = reg_width; in dwc_verify_p_buswidth()
833 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_verify_m_buswidth() local
837 mem_width = dw->pdata->data_width[dwc->dws.m_master]; in dwc_verify_m_buswidth()
850 if (dwc->dma_sconfig.direction == DMA_MEM_TO_DEV) { in dwc_verify_m_buswidth()
851 reg_width = dwc->dma_sconfig.dst_addr_width; in dwc_verify_m_buswidth()
855 dwc->dma_sconfig.src_addr_width = mem_width; in dwc_verify_m_buswidth()
856 } else if (dwc->dma_sconfig.direction == DMA_DEV_TO_MEM) { in dwc_verify_m_buswidth()
857 reg_width = dwc->dma_sconfig.src_addr_width; in dwc_verify_m_buswidth()
858 reg_burst = dwc->dma_sconfig.src_maxburst; in dwc_verify_m_buswidth()
860 dwc->dma_sconfig.dst_addr_width = min(mem_width, reg_width * reg_burst); in dwc_verify_m_buswidth()
868 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_config() local
871 memcpy(&dwc->dma_sconfig, sconfig, sizeof(*sconfig)); in dwc_config()
888 static void dwc_chan_pause(struct dw_dma_chan *dwc, bool drain) in dwc_chan_pause() argument
890 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_chan_pause()
893 dw->suspend_chan(dwc, drain); in dwc_chan_pause()
895 while (!(channel_readl(dwc, CFG_LO) & DWC_CFGL_FIFO_EMPTY) && count--) in dwc_chan_pause()
898 set_bit(DW_DMA_IS_PAUSED, &dwc->flags); in dwc_chan_pause()
903 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_pause() local
906 spin_lock_irqsave(&dwc->lock, flags); in dwc_pause()
907 dwc_chan_pause(dwc, false); in dwc_pause()
908 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_pause()
913 static inline void dwc_chan_resume(struct dw_dma_chan *dwc, bool drain) in dwc_chan_resume() argument
915 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_chan_resume()
917 dw->resume_chan(dwc, drain); in dwc_chan_resume()
919 clear_bit(DW_DMA_IS_PAUSED, &dwc->flags); in dwc_chan_resume()
924 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_resume() local
927 spin_lock_irqsave(&dwc->lock, flags); in dwc_resume()
929 if (test_bit(DW_DMA_IS_PAUSED, &dwc->flags)) in dwc_resume()
930 dwc_chan_resume(dwc, false); in dwc_resume()
932 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_resume()
939 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_terminate_all() local
945 spin_lock_irqsave(&dwc->lock, flags); in dwc_terminate_all()
947 clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags); in dwc_terminate_all()
949 dwc_chan_pause(dwc, true); in dwc_terminate_all()
951 dwc_chan_disable(dw, dwc); in dwc_terminate_all()
953 dwc_chan_resume(dwc, true); in dwc_terminate_all()
956 list_splice_init(&dwc->queue, &list); in dwc_terminate_all()
957 list_splice_init(&dwc->active_list, &list); in dwc_terminate_all()
959 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_terminate_all()
963 dwc_descriptor_complete(dwc, desc, false); in dwc_terminate_all()
968 static struct dw_desc *dwc_find_desc(struct dw_dma_chan *dwc, dma_cookie_t c) in dwc_find_desc() argument
972 list_for_each_entry(desc, &dwc->active_list, desc_node) in dwc_find_desc()
979 static u32 dwc_get_residue_and_status(struct dw_dma_chan *dwc, dma_cookie_t cookie, in dwc_get_residue_and_status() argument
986 spin_lock_irqsave(&dwc->lock, flags); in dwc_get_residue_and_status()
988 desc = dwc_find_desc(dwc, cookie); in dwc_get_residue_and_status()
990 if (desc == dwc_first_active(dwc)) { in dwc_get_residue_and_status()
992 if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags) && residue) in dwc_get_residue_and_status()
993 residue -= dwc_get_sent(dwc); in dwc_get_residue_and_status()
994 if (test_bit(DW_DMA_IS_PAUSED, &dwc->flags)) in dwc_get_residue_and_status()
1003 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_get_residue_and_status()
1012 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_tx_status() local
1019 dwc_scan_descriptors(to_dw_dma(chan->device), dwc); in dwc_tx_status()
1025 dma_set_residue(txstate, dwc_get_residue_and_status(dwc, cookie, &ret)); in dwc_tx_status()
1031 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_issue_pending() local
1034 spin_lock_irqsave(&dwc->lock, flags); in dwc_issue_pending()
1035 if (list_empty(&dwc->active_list)) in dwc_issue_pending()
1036 dwc_dostart_first_queued(dwc); in dwc_issue_pending()
1037 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_issue_pending()
1063 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_alloc_chan_resources() local
1069 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_alloc_chan_resources()
1093 dw->in_use |= dwc->mask; in dwc_alloc_chan_resources()
1100 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_free_chan_resources() local
1105 dwc->descs_allocated); in dwc_free_chan_resources()
1108 BUG_ON(!list_empty(&dwc->active_list)); in dwc_free_chan_resources()
1109 BUG_ON(!list_empty(&dwc->queue)); in dwc_free_chan_resources()
1110 BUG_ON(dma_readl(to_dw_dma(chan->device), CH_EN) & dwc->mask); in dwc_free_chan_resources()
1112 spin_lock_irqsave(&dwc->lock, flags); in dwc_free_chan_resources()
1115 memset(&dwc->dws, 0, sizeof(struct dw_dma_slave)); in dwc_free_chan_resources()
1118 channel_clear_bit(dw, MASK.XFER, dwc->mask); in dwc_free_chan_resources()
1119 channel_clear_bit(dw, MASK.BLOCK, dwc->mask); in dwc_free_chan_resources()
1120 channel_clear_bit(dw, MASK.ERROR, dwc->mask); in dwc_free_chan_resources()
1122 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_free_chan_resources()
1125 dw->in_use &= ~dwc->mask; in dwc_free_chan_resources()
1134 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_caps() local
1136 caps->max_burst = dwc->max_burst; in dwc_caps()
1145 if (dwc->nollp) in dwc_caps()
1237 struct dw_dma_chan *dwc = &dw->chan[i]; in do_dma_probe() local
1239 dwc->chan.device = &dw->dma; in do_dma_probe()
1240 dma_cookie_init(&dwc->chan); in do_dma_probe()
1242 list_add_tail(&dwc->chan.device_node, in do_dma_probe()
1245 list_add(&dwc->chan.device_node, &dw->dma.channels); in do_dma_probe()
1249 dwc->priority = pdata->nr_channels - i - 1; in do_dma_probe()
1251 dwc->priority = i; in do_dma_probe()
1253 dwc->ch_regs = &__dw_regs(dw)->CHAN[i]; in do_dma_probe()
1254 spin_lock_init(&dwc->lock); in do_dma_probe()
1255 dwc->mask = 1 << i; in do_dma_probe()
1257 INIT_LIST_HEAD(&dwc->active_list); in do_dma_probe()
1258 INIT_LIST_HEAD(&dwc->queue); in do_dma_probe()
1260 channel_clear_bit(dw, CH_EN, dwc->mask); in do_dma_probe()
1262 dwc->direction = DMA_TRANS_NONE; in do_dma_probe()
1278 dwc->block_size = in do_dma_probe()
1288 dwc->nollp = in do_dma_probe()
1291 dwc->max_burst = in do_dma_probe()
1294 dwc->block_size = pdata->block_size; in do_dma_probe()
1295 dwc->nollp = !pdata->multi_block[i]; in do_dma_probe()
1296 dwc->max_burst = pdata->max_burst[i] ?: DW_DMA_MAX_BURST; in do_dma_probe()
1365 struct dw_dma_chan *dwc, *_dwc; in do_dma_remove() local
1375 list_for_each_entry_safe(dwc, _dwc, &dw->dma.channels, in do_dma_remove()
1377 list_del(&dwc->chan.device_node); in do_dma_remove()
1378 channel_clear_bit(dw, CH_EN, dwc->mask); in do_dma_remove()