core.c (078165779608873e7b6eae1316a39c73af9f3edc) core.c (69da8be90d5e85e60b5377c47384154b9dabf592)
1/*
2 * Core driver for the Synopsys DesignWare DMA Controller
3 *
4 * Copyright (C) 2007-2008 Atmel Corporation
5 * Copyright (C) 2010-2011 ST Microelectronics
6 * Copyright (C) 2013 Intel Corporation
7 *
8 * This program is free software; you can redistribute it and/or modify

--- 124 unchanged lines hidden (view full) ---

133 dma_pool_free(dw->desc_pool, child, child->txd.phys);
134 dwc->descs_allocated--;
135 }
136
137 dma_pool_free(dw->desc_pool, desc, desc->txd.phys);
138 dwc->descs_allocated--;
139}
140
1/*
2 * Core driver for the Synopsys DesignWare DMA Controller
3 *
4 * Copyright (C) 2007-2008 Atmel Corporation
5 * Copyright (C) 2010-2011 ST Microelectronics
6 * Copyright (C) 2013 Intel Corporation
7 *
8 * This program is free software; you can redistribute it and/or modify

--- 124 unchanged lines hidden (view full) ---

133 dma_pool_free(dw->desc_pool, child, child->txd.phys);
134 dwc->descs_allocated--;
135 }
136
137 dma_pool_free(dw->desc_pool, desc, desc->txd.phys);
138 dwc->descs_allocated--;
139}
140
141static void dwc_initialize_chan_idma32(struct dw_dma_chan *dwc)
142{
143 u32 cfghi = 0;
144 u32 cfglo = 0;
145
146 /* Set default burst alignment */
147 cfglo |= IDMA32C_CFGL_DST_BURST_ALIGN | IDMA32C_CFGL_SRC_BURST_ALIGN;
148
149 /* Low 4 bits of the request lines */
150 cfghi |= IDMA32C_CFGH_DST_PER(dwc->dws.dst_id & 0xf);
151 cfghi |= IDMA32C_CFGH_SRC_PER(dwc->dws.src_id & 0xf);
152
153 /* Request line extension (2 bits) */
154 cfghi |= IDMA32C_CFGH_DST_PER_EXT(dwc->dws.dst_id >> 4 & 0x3);
155 cfghi |= IDMA32C_CFGH_SRC_PER_EXT(dwc->dws.src_id >> 4 & 0x3);
156
157 channel_writel(dwc, CFG_LO, cfglo);
158 channel_writel(dwc, CFG_HI, cfghi);
159}
160
161static void dwc_initialize_chan_dw(struct dw_dma_chan *dwc)
162{
163 struct dw_dma *dw = to_dw_dma(dwc->chan.device);
164 u32 cfghi = DWC_CFGH_FIFO_MODE;
165 u32 cfglo = DWC_CFGL_CH_PRIOR(dwc->priority);
166 bool hs_polarity = dwc->dws.hs_polarity;
167
168 cfghi |= DWC_CFGH_DST_PER(dwc->dws.dst_id);
169 cfghi |= DWC_CFGH_SRC_PER(dwc->dws.src_id);
170 cfghi |= DWC_CFGH_PROTCTL(dw->pdata->protctl);
171
172 /* Set polarity of handshake interface */
173 cfglo |= hs_polarity ? DWC_CFGL_HS_DST_POL | DWC_CFGL_HS_SRC_POL : 0;
174
175 channel_writel(dwc, CFG_LO, cfglo);
176 channel_writel(dwc, CFG_HI, cfghi);
177}
178
179static void dwc_initialize(struct dw_dma_chan *dwc)
180{
181 struct dw_dma *dw = to_dw_dma(dwc->chan.device);
182
183 if (test_bit(DW_DMA_IS_INITIALIZED, &dwc->flags))
184 return;
185
141static void dwc_initialize(struct dw_dma_chan *dwc)
142{
143 struct dw_dma *dw = to_dw_dma(dwc->chan.device);
144
145 if (test_bit(DW_DMA_IS_INITIALIZED, &dwc->flags))
146 return;
147
186 if (dw->pdata->is_idma32)
187 dwc_initialize_chan_idma32(dwc);
188 else
189 dwc_initialize_chan_dw(dwc);
148 dw->initialize_chan(dwc);
190
191 /* Enable interrupts */
192 channel_set_bit(dw, MASK.XFER, dwc->mask);
193 channel_set_bit(dw, MASK.ERROR, dwc->mask);
194
195 set_bit(DW_DMA_IS_INITIALIZED, &dwc->flags);
196}
197

--- 12 unchanged lines hidden (view full) ---

210
211static inline void dwc_chan_disable(struct dw_dma *dw, struct dw_dma_chan *dwc)
212{
213 channel_clear_bit(dw, CH_EN, dwc->mask);
214 while (dma_readl(dw, CH_EN) & dwc->mask)
215 cpu_relax();
216}
217
149
150 /* Enable interrupts */
151 channel_set_bit(dw, MASK.XFER, dwc->mask);
152 channel_set_bit(dw, MASK.ERROR, dwc->mask);
153
154 set_bit(DW_DMA_IS_INITIALIZED, &dwc->flags);
155}
156

--- 12 unchanged lines hidden (view full) ---

169
170static inline void dwc_chan_disable(struct dw_dma *dw, struct dw_dma_chan *dwc)
171{
172 channel_clear_bit(dw, CH_EN, dwc->mask);
173 while (dma_readl(dw, CH_EN) & dwc->mask)
174 cpu_relax();
175}
176
218static u32 bytes2block(struct dw_dma_chan *dwc, size_t bytes,
219 unsigned int width, size_t *len)
220{
221 struct dw_dma *dw = to_dw_dma(dwc->chan.device);
222 u32 block;
223
224 /* Always in bytes for iDMA 32-bit */
225 if (dw->pdata->is_idma32)
226 width = 0;
227
228 if ((bytes >> width) > dwc->block_size) {
229 block = dwc->block_size;
230 *len = block << width;
231 } else {
232 block = bytes >> width;
233 *len = bytes;
234 }
235
236 return block;
237}
238
239static size_t block2bytes(struct dw_dma_chan *dwc, u32 block, u32 width)
240{
241 struct dw_dma *dw = to_dw_dma(dwc->chan.device);
242
243 if (dw->pdata->is_idma32)
244 return IDMA32C_CTLH_BLOCK_TS(block);
245
246 return DWC_CTLH_BLOCK_TS(block) << width;
247}
248
249/*----------------------------------------------------------------------*/
250
251/* Perform single block transfer */
252static inline void dwc_do_single_block(struct dw_dma_chan *dwc,
253 struct dw_desc *desc)
254{
255 struct dw_dma *dw = to_dw_dma(dwc->chan.device);
256 u32 ctllo;

--- 129 unchanged lines hidden (view full) ---

386
387 list_for_each_entry_safe(desc, _desc, &list, desc_node)
388 dwc_descriptor_complete(dwc, desc, true);
389}
390
391/* Returns how many bytes were already received from source */
392static inline u32 dwc_get_sent(struct dw_dma_chan *dwc)
393{
177/*----------------------------------------------------------------------*/
178
179/* Perform single block transfer */
180static inline void dwc_do_single_block(struct dw_dma_chan *dwc,
181 struct dw_desc *desc)
182{
183 struct dw_dma *dw = to_dw_dma(dwc->chan.device);
184 u32 ctllo;

--- 129 unchanged lines hidden (view full) ---

314
315 list_for_each_entry_safe(desc, _desc, &list, desc_node)
316 dwc_descriptor_complete(dwc, desc, true);
317}
318
319/* Returns how many bytes were already received from source */
320static inline u32 dwc_get_sent(struct dw_dma_chan *dwc)
321{
322 struct dw_dma *dw = to_dw_dma(dwc->chan.device);
394 u32 ctlhi = channel_readl(dwc, CTL_HI);
395 u32 ctllo = channel_readl(dwc, CTL_LO);
396
323 u32 ctlhi = channel_readl(dwc, CTL_HI);
324 u32 ctllo = channel_readl(dwc, CTL_LO);
325
397 return block2bytes(dwc, ctlhi, ctllo >> 4 & 7);
326 return dw->block2bytes(dwc, ctlhi, ctllo >> 4 & 7);
398}
399
400static void dwc_scan_descriptors(struct dw_dma *dw, struct dw_dma_chan *dwc)
401{
402 dma_addr_t llp;
403 struct dw_desc *desc, *_desc;
404 struct dw_desc *child;
405 u32 status_xfer;

--- 240 unchanged lines hidden (view full) ---

646 struct dw_desc *first;
647 struct dw_desc *prev;
648 size_t xfer_count;
649 size_t offset;
650 u8 m_master = dwc->dws.m_master;
651 unsigned int src_width;
652 unsigned int dst_width;
653 unsigned int data_width = dw->pdata->data_width[m_master];
327}
328
329static void dwc_scan_descriptors(struct dw_dma *dw, struct dw_dma_chan *dwc)
330{
331 dma_addr_t llp;
332 struct dw_desc *desc, *_desc;
333 struct dw_desc *child;
334 u32 status_xfer;

--- 240 unchanged lines hidden (view full) ---

575 struct dw_desc *first;
576 struct dw_desc *prev;
577 size_t xfer_count;
578 size_t offset;
579 u8 m_master = dwc->dws.m_master;
580 unsigned int src_width;
581 unsigned int dst_width;
582 unsigned int data_width = dw->pdata->data_width[m_master];
654 u32 ctllo;
583 u32 ctllo, ctlhi;
655 u8 lms = DWC_LLP_LMS(m_master);
656
657 dev_vdbg(chan2dev(chan),
658 "%s: d%pad s%pad l0x%zx f0x%lx\n", __func__,
659 &dest, &src, len, flags);
660
661 if (unlikely(!len)) {
662 dev_dbg(chan2dev(chan), "%s: length is zero!\n", __func__);

--- 12 unchanged lines hidden (view full) ---

675 | DWC_CTLL_FC_M2M;
676 prev = first = NULL;
677
678 for (offset = 0; offset < len; offset += xfer_count) {
679 desc = dwc_desc_get(dwc);
680 if (!desc)
681 goto err_desc_get;
682
584 u8 lms = DWC_LLP_LMS(m_master);
585
586 dev_vdbg(chan2dev(chan),
587 "%s: d%pad s%pad l0x%zx f0x%lx\n", __func__,
588 &dest, &src, len, flags);
589
590 if (unlikely(!len)) {
591 dev_dbg(chan2dev(chan), "%s: length is zero!\n", __func__);

--- 12 unchanged lines hidden (view full) ---

604 | DWC_CTLL_FC_M2M;
605 prev = first = NULL;
606
607 for (offset = 0; offset < len; offset += xfer_count) {
608 desc = dwc_desc_get(dwc);
609 if (!desc)
610 goto err_desc_get;
611
612 ctlhi = dw->bytes2block(dwc, len - offset, src_width, &xfer_count);
613
683 lli_write(desc, sar, src + offset);
684 lli_write(desc, dar, dest + offset);
685 lli_write(desc, ctllo, ctllo);
614 lli_write(desc, sar, src + offset);
615 lli_write(desc, dar, dest + offset);
616 lli_write(desc, ctllo, ctllo);
686 lli_write(desc, ctlhi, bytes2block(dwc, len - offset, src_width, &xfer_count));
617 lli_write(desc, ctlhi, ctlhi);
687 desc->len = xfer_count;
688
689 if (!first) {
690 first = desc;
691 } else {
692 lli_write(prev, llp, desc->txd.phys | lms);
693 list_add_tail(&desc->desc_node, &first->tx_list);
694 }

--- 21 unchanged lines hidden (view full) ---

716 unsigned int sg_len, enum dma_transfer_direction direction,
717 unsigned long flags, void *context)
718{
719 struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
720 struct dw_dma *dw = to_dw_dma(chan->device);
721 struct dma_slave_config *sconfig = &dwc->dma_sconfig;
722 struct dw_desc *prev;
723 struct dw_desc *first;
618 desc->len = xfer_count;
619
620 if (!first) {
621 first = desc;
622 } else {
623 lli_write(prev, llp, desc->txd.phys | lms);
624 list_add_tail(&desc->desc_node, &first->tx_list);
625 }

--- 21 unchanged lines hidden (view full) ---

647 unsigned int sg_len, enum dma_transfer_direction direction,
648 unsigned long flags, void *context)
649{
650 struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
651 struct dw_dma *dw = to_dw_dma(chan->device);
652 struct dma_slave_config *sconfig = &dwc->dma_sconfig;
653 struct dw_desc *prev;
654 struct dw_desc *first;
724 u32 ctllo;
655 u32 ctllo, ctlhi;
725 u8 m_master = dwc->dws.m_master;
726 u8 lms = DWC_LLP_LMS(m_master);
727 dma_addr_t reg;
728 unsigned int reg_width;
729 unsigned int mem_width;
730 unsigned int data_width = dw->pdata->data_width[m_master];
731 unsigned int i;
732 struct scatterlist *sg;

--- 30 unchanged lines hidden (view full) ---

763
764 mem_width = __ffs(data_width | mem | len);
765
766slave_sg_todev_fill_desc:
767 desc = dwc_desc_get(dwc);
768 if (!desc)
769 goto err_desc_get;
770
656 u8 m_master = dwc->dws.m_master;
657 u8 lms = DWC_LLP_LMS(m_master);
658 dma_addr_t reg;
659 unsigned int reg_width;
660 unsigned int mem_width;
661 unsigned int data_width = dw->pdata->data_width[m_master];
662 unsigned int i;
663 struct scatterlist *sg;

--- 30 unchanged lines hidden (view full) ---

694
695 mem_width = __ffs(data_width | mem | len);
696
697slave_sg_todev_fill_desc:
698 desc = dwc_desc_get(dwc);
699 if (!desc)
700 goto err_desc_get;
701
702 ctlhi = dw->bytes2block(dwc, len, mem_width, &dlen);
703
771 lli_write(desc, sar, mem);
772 lli_write(desc, dar, reg);
704 lli_write(desc, sar, mem);
705 lli_write(desc, dar, reg);
773 lli_write(desc, ctlhi, bytes2block(dwc, len, mem_width, &dlen));
706 lli_write(desc, ctlhi, ctlhi);
774 lli_write(desc, ctllo, ctllo | DWC_CTLL_SRC_WIDTH(mem_width));
775 desc->len = dlen;
776
777 if (!first) {
778 first = desc;
779 } else {
780 lli_write(prev, llp, desc->txd.phys | lms);
781 list_add_tail(&desc->desc_node, &first->tx_list);

--- 27 unchanged lines hidden (view full) ---

809 mem = sg_dma_address(sg);
810 len = sg_dma_len(sg);
811
812slave_sg_fromdev_fill_desc:
813 desc = dwc_desc_get(dwc);
814 if (!desc)
815 goto err_desc_get;
816
707 lli_write(desc, ctllo, ctllo | DWC_CTLL_SRC_WIDTH(mem_width));
708 desc->len = dlen;
709
710 if (!first) {
711 first = desc;
712 } else {
713 lli_write(prev, llp, desc->txd.phys | lms);
714 list_add_tail(&desc->desc_node, &first->tx_list);

--- 27 unchanged lines hidden (view full) ---

742 mem = sg_dma_address(sg);
743 len = sg_dma_len(sg);
744
745slave_sg_fromdev_fill_desc:
746 desc = dwc_desc_get(dwc);
747 if (!desc)
748 goto err_desc_get;
749
750 ctlhi = dw->bytes2block(dwc, len, reg_width, &dlen);
751
817 lli_write(desc, sar, reg);
818 lli_write(desc, dar, mem);
752 lli_write(desc, sar, reg);
753 lli_write(desc, dar, mem);
819 lli_write(desc, ctlhi, bytes2block(dwc, len, reg_width, &dlen));
754 lli_write(desc, ctlhi, ctlhi);
820 mem_width = __ffs(data_width | mem | dlen);
821 lli_write(desc, ctllo, ctllo | DWC_CTLL_DST_WIDTH(mem_width));
822 desc->len = dlen;
823
824 if (!first) {
825 first = desc;
826 } else {
827 lli_write(prev, llp, desc->txd.phys | lms);

--- 43 unchanged lines hidden (view full) ---

871
872 return true;
873}
874EXPORT_SYMBOL_GPL(dw_dma_filter);
875
876static int dwc_config(struct dma_chan *chan, struct dma_slave_config *sconfig)
877{
878 struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
755 mem_width = __ffs(data_width | mem | dlen);
756 lli_write(desc, ctllo, ctllo | DWC_CTLL_DST_WIDTH(mem_width));
757 desc->len = dlen;
758
759 if (!first) {
760 first = desc;
761 } else {
762 lli_write(prev, llp, desc->txd.phys | lms);

--- 43 unchanged lines hidden (view full) ---

806
807 return true;
808}
809EXPORT_SYMBOL_GPL(dw_dma_filter);
810
811static int dwc_config(struct dma_chan *chan, struct dma_slave_config *sconfig)
812{
813 struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
879 struct dma_slave_config *sc = &dwc->dma_sconfig;
880 struct dw_dma *dw = to_dw_dma(chan->device);
814 struct dw_dma *dw = to_dw_dma(chan->device);
881 /*
882 * Fix sconfig's burst size according to dw_dmac. We need to convert
883 * them as:
884 * 1 -> 0, 4 -> 1, 8 -> 2, 16 -> 3.
885 *
886 * NOTE: burst size 2 is not supported by DesignWare controller.
887 * iDMA 32-bit supports it.
888 */
889 u32 s = dw->pdata->is_idma32 ? 1 : 2;
890
891 memcpy(&dwc->dma_sconfig, sconfig, sizeof(*sconfig));
892
815
816 memcpy(&dwc->dma_sconfig, sconfig, sizeof(*sconfig));
817
893 sc->src_maxburst = sc->src_maxburst > 1 ? fls(sc->src_maxburst) - s : 0;
894 sc->dst_maxburst = sc->dst_maxburst > 1 ? fls(sc->dst_maxburst) - s : 0;
818 dw->encode_maxburst(dwc, &dwc->dma_sconfig.src_maxburst);
819 dw->encode_maxburst(dwc, &dwc->dma_sconfig.dst_maxburst);
895
896 return 0;
897}
898
899static void dwc_chan_pause(struct dw_dma_chan *dwc, bool drain)
900{
901 struct dw_dma *dw = to_dw_dma(dwc->chan.device);
902 unsigned int count = 20; /* timeout iterations */
820
821 return 0;
822}
823
824static void dwc_chan_pause(struct dw_dma_chan *dwc, bool drain)
825{
826 struct dw_dma *dw = to_dw_dma(dwc->chan.device);
827 unsigned int count = 20; /* timeout iterations */
903 u32 cfglo;
904
828
905 cfglo = channel_readl(dwc, CFG_LO);
906 if (dw->pdata->is_idma32) {
907 if (drain)
908 cfglo |= IDMA32C_CFGL_CH_DRAIN;
909 else
910 cfglo &= ~IDMA32C_CFGL_CH_DRAIN;
911 }
912 channel_writel(dwc, CFG_LO, cfglo | DWC_CFGL_CH_SUSP);
829 dw->suspend_chan(dwc, drain);
830
913 while (!(channel_readl(dwc, CFG_LO) & DWC_CFGL_FIFO_EMPTY) && count--)
914 udelay(2);
915
916 set_bit(DW_DMA_IS_PAUSED, &dwc->flags);
917}
918
919static int dwc_pause(struct dma_chan *chan)
920{

--- 132 unchanged lines hidden (view full) ---

1053 spin_lock_irqsave(&dwc->lock, flags);
1054 if (list_empty(&dwc->active_list))
1055 dwc_dostart_first_queued(dwc);
1056 spin_unlock_irqrestore(&dwc->lock, flags);
1057}
1058
1059/*----------------------------------------------------------------------*/
1060
831 while (!(channel_readl(dwc, CFG_LO) & DWC_CFGL_FIFO_EMPTY) && count--)
832 udelay(2);
833
834 set_bit(DW_DMA_IS_PAUSED, &dwc->flags);
835}
836
837static int dwc_pause(struct dma_chan *chan)
838{

--- 132 unchanged lines hidden (view full) ---

971 spin_lock_irqsave(&dwc->lock, flags);
972 if (list_empty(&dwc->active_list))
973 dwc_dostart_first_queued(dwc);
974 spin_unlock_irqrestore(&dwc->lock, flags);
975}
976
977/*----------------------------------------------------------------------*/
978
1061/*
1062 * Program FIFO size of channels.
1063 *
1064 * By default full FIFO (512 bytes) is assigned to channel 0. Here we
1065 * slice FIFO on equal parts between channels.
1066 */
1067static void idma32_fifo_partition(struct dw_dma *dw)
979void do_dw_dma_off(struct dw_dma *dw)
1068{
980{
1069 u64 value = IDMA32C_FP_PSIZE_CH0(64) | IDMA32C_FP_PSIZE_CH1(64) |
1070 IDMA32C_FP_UPDATE;
1071 u64 fifo_partition = 0;
1072
1073 if (!dw->pdata->is_idma32)
1074 return;
1075
1076 /* Fill FIFO_PARTITION low bits (Channels 0..1, 4..5) */
1077 fifo_partition |= value << 0;
1078
1079 /* Fill FIFO_PARTITION high bits (Channels 2..3, 6..7) */
1080 fifo_partition |= value << 32;
1081
1082 /* Program FIFO Partition registers - 64 bytes per channel */
1083 idma32_writeq(dw, FIFO_PARTITION1, fifo_partition);
1084 idma32_writeq(dw, FIFO_PARTITION0, fifo_partition);
1085}
1086
1087static void dw_dma_off(struct dw_dma *dw)
1088{
1089 unsigned int i;
1090
1091 dma_writel(dw, CFG, 0);
1092
1093 channel_clear_bit(dw, MASK.XFER, dw->all_chan_mask);
1094 channel_clear_bit(dw, MASK.BLOCK, dw->all_chan_mask);
1095 channel_clear_bit(dw, MASK.SRC_TRAN, dw->all_chan_mask);
1096 channel_clear_bit(dw, MASK.DST_TRAN, dw->all_chan_mask);
1097 channel_clear_bit(dw, MASK.ERROR, dw->all_chan_mask);
1098
1099 while (dma_readl(dw, CFG) & DW_CFG_DMA_EN)
1100 cpu_relax();
1101
1102 for (i = 0; i < dw->dma.chancnt; i++)
1103 clear_bit(DW_DMA_IS_INITIALIZED, &dw->chan[i].flags);
1104}
1105
981 unsigned int i;
982
983 dma_writel(dw, CFG, 0);
984
985 channel_clear_bit(dw, MASK.XFER, dw->all_chan_mask);
986 channel_clear_bit(dw, MASK.BLOCK, dw->all_chan_mask);
987 channel_clear_bit(dw, MASK.SRC_TRAN, dw->all_chan_mask);
988 channel_clear_bit(dw, MASK.DST_TRAN, dw->all_chan_mask);
989 channel_clear_bit(dw, MASK.ERROR, dw->all_chan_mask);
990
991 while (dma_readl(dw, CFG) & DW_CFG_DMA_EN)
992 cpu_relax();
993
994 for (i = 0; i < dw->dma.chancnt; i++)
995 clear_bit(DW_DMA_IS_INITIALIZED, &dw->chan[i].flags);
996}
997
1106static void dw_dma_on(struct dw_dma *dw)
998void do_dw_dma_on(struct dw_dma *dw)
1107{
1108 dma_writel(dw, CFG, DW_CFG_DMA_EN);
1109}
1110
1111static int dwc_alloc_chan_resources(struct dma_chan *chan)
1112{
1113 struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
1114 struct dw_dma *dw = to_dw_dma(chan->device);

--- 19 unchanged lines hidden (view full) ---

1134 */
1135 if (chan->private && !dw_dma_filter(chan, chan->private)) {
1136 dev_warn(chan2dev(chan), "Wrong controller-specific data\n");
1137 return -EINVAL;
1138 }
1139
1140 /* Enable controller here if needed */
1141 if (!dw->in_use)
999{
1000 dma_writel(dw, CFG, DW_CFG_DMA_EN);
1001}
1002
1003static int dwc_alloc_chan_resources(struct dma_chan *chan)
1004{
1005 struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
1006 struct dw_dma *dw = to_dw_dma(chan->device);

--- 19 unchanged lines hidden (view full) ---

1026 */
1027 if (chan->private && !dw_dma_filter(chan, chan->private)) {
1028 dev_warn(chan2dev(chan), "Wrong controller-specific data\n");
1029 return -EINVAL;
1030 }
1031
1032 /* Enable controller here if needed */
1033 if (!dw->in_use)
1142 dw_dma_on(dw);
1034 do_dw_dma_on(dw);
1143 dw->in_use |= dwc->mask;
1144
1145 return 0;
1146}
1147
1148static void dwc_free_chan_resources(struct dma_chan *chan)
1149{
1150 struct dw_dma_chan *dwc = to_dw_dma_chan(chan);

--- 21 unchanged lines hidden (view full) ---

1172 channel_clear_bit(dw, MASK.BLOCK, dwc->mask);
1173 channel_clear_bit(dw, MASK.ERROR, dwc->mask);
1174
1175 spin_unlock_irqrestore(&dwc->lock, flags);
1176
1177 /* Disable controller in case it was a last user */
1178 dw->in_use &= ~dwc->mask;
1179 if (!dw->in_use)
1035 dw->in_use |= dwc->mask;
1036
1037 return 0;
1038}
1039
1040static void dwc_free_chan_resources(struct dma_chan *chan)
1041{
1042 struct dw_dma_chan *dwc = to_dw_dma_chan(chan);

--- 21 unchanged lines hidden (view full) ---

1064 channel_clear_bit(dw, MASK.BLOCK, dwc->mask);
1065 channel_clear_bit(dw, MASK.ERROR, dwc->mask);
1066
1067 spin_unlock_irqrestore(&dwc->lock, flags);
1068
1069 /* Disable controller in case it was a last user */
1070 dw->in_use &= ~dwc->mask;
1071 if (!dw->in_use)
1180 dw_dma_off(dw);
1072 do_dw_dma_off(dw);
1181
1182 dev_vdbg(chan2dev(chan), "%s: done\n", __func__);
1183}
1184
1073
1074 dev_vdbg(chan2dev(chan), "%s: done\n", __func__);
1075}
1076
1185int dw_dma_probe(struct dw_dma_chip *chip)
1077int do_dma_probe(struct dw_dma_chip *chip)
1186{
1078{
1079 struct dw_dma *dw = chip->dw;
1187 struct dw_dma_platform_data *pdata;
1080 struct dw_dma_platform_data *pdata;
1188 struct dw_dma *dw;
1189 bool autocfg = false;
1190 unsigned int dw_params;
1191 unsigned int i;
1192 int err;
1193
1081 bool autocfg = false;
1082 unsigned int dw_params;
1083 unsigned int i;
1084 int err;
1085
1194 dw = devm_kzalloc(chip->dev, sizeof(*dw), GFP_KERNEL);
1195 if (!dw)
1196 return -ENOMEM;
1197
1198 dw->pdata = devm_kzalloc(chip->dev, sizeof(*dw->pdata), GFP_KERNEL);
1199 if (!dw->pdata)
1200 return -ENOMEM;
1201
1202 dw->regs = chip->regs;
1086 dw->pdata = devm_kzalloc(chip->dev, sizeof(*dw->pdata), GFP_KERNEL);
1087 if (!dw->pdata)
1088 return -ENOMEM;
1089
1090 dw->regs = chip->regs;
1203 chip->dw = dw;
1204
1205 pm_runtime_get_sync(chip->dev);
1206
1207 if (!chip->pdata) {
1208 dw_params = dma_readl(dw, DW_PARAMS);
1209 dev_dbg(chip->dev, "DW_PARAMS: 0x%08x\n", dw_params);
1210
1211 autocfg = dw_params >> DW_PARAMS_EN & 1;

--- 33 unchanged lines hidden (view full) ---

1245 err = -ENOMEM;
1246 goto err_pdata;
1247 }
1248
1249 /* Calculate all channel mask before DMA setup */
1250 dw->all_chan_mask = (1 << pdata->nr_channels) - 1;
1251
1252 /* Force dma off, just in case */
1091
1092 pm_runtime_get_sync(chip->dev);
1093
1094 if (!chip->pdata) {
1095 dw_params = dma_readl(dw, DW_PARAMS);
1096 dev_dbg(chip->dev, "DW_PARAMS: 0x%08x\n", dw_params);
1097
1098 autocfg = dw_params >> DW_PARAMS_EN & 1;

--- 33 unchanged lines hidden (view full) ---

1132 err = -ENOMEM;
1133 goto err_pdata;
1134 }
1135
1136 /* Calculate all channel mask before DMA setup */
1137 dw->all_chan_mask = (1 << pdata->nr_channels) - 1;
1138
1139 /* Force dma off, just in case */
1253 dw_dma_off(dw);
1140 dw->disable(dw);
1254
1141
1255 idma32_fifo_partition(dw);
1256
1257 /* Device and instance ID for IRQ and DMA pool */
1142 /* Device and instance ID for IRQ and DMA pool */
1258 if (pdata->is_idma32)
1259 snprintf(dw->name, sizeof(dw->name), "idma32:dmac%d", chip->id);
1260 else
1261 snprintf(dw->name, sizeof(dw->name), "dw:dmac%d", chip->id);
1143 dw->set_device_name(dw, chip->id);
1262
1263 /* Create a pool of consistent memory blocks for hardware descriptors */
1264 dw->desc_pool = dmam_pool_create(dw->name, chip->dev,
1265 sizeof(struct dw_desc), 4, 0);
1266 if (!dw->desc_pool) {
1267 dev_err(chip->dev, "No memory for descriptors dma pool\n");
1268 err = -ENOMEM;
1269 goto err_pdata;

--- 105 unchanged lines hidden (view full) ---

1375 return 0;
1376
1377err_dma_register:
1378 free_irq(chip->irq, dw);
1379err_pdata:
1380 pm_runtime_put_sync_suspend(chip->dev);
1381 return err;
1382}
1144
1145 /* Create a pool of consistent memory blocks for hardware descriptors */
1146 dw->desc_pool = dmam_pool_create(dw->name, chip->dev,
1147 sizeof(struct dw_desc), 4, 0);
1148 if (!dw->desc_pool) {
1149 dev_err(chip->dev, "No memory for descriptors dma pool\n");
1150 err = -ENOMEM;
1151 goto err_pdata;

--- 105 unchanged lines hidden (view full) ---

1257 return 0;
1258
1259err_dma_register:
1260 free_irq(chip->irq, dw);
1261err_pdata:
1262 pm_runtime_put_sync_suspend(chip->dev);
1263 return err;
1264}
1383EXPORT_SYMBOL_GPL(dw_dma_probe);
1384
1265
1385int dw_dma_remove(struct dw_dma_chip *chip)
1266int do_dma_remove(struct dw_dma_chip *chip)
1386{
1387 struct dw_dma *dw = chip->dw;
1388 struct dw_dma_chan *dwc, *_dwc;
1389
1390 pm_runtime_get_sync(chip->dev);
1391
1267{
1268 struct dw_dma *dw = chip->dw;
1269 struct dw_dma_chan *dwc, *_dwc;
1270
1271 pm_runtime_get_sync(chip->dev);
1272
1392 dw_dma_off(dw);
1273 do_dw_dma_off(dw);
1393 dma_async_device_unregister(&dw->dma);
1394
1395 free_irq(chip->irq, dw);
1396 tasklet_kill(&dw->tasklet);
1397
1398 list_for_each_entry_safe(dwc, _dwc, &dw->dma.channels,
1399 chan.device_node) {
1400 list_del(&dwc->chan.device_node);
1401 channel_clear_bit(dw, CH_EN, dwc->mask);
1402 }
1403
1404 pm_runtime_put_sync_suspend(chip->dev);
1405 return 0;
1406}
1274 dma_async_device_unregister(&dw->dma);
1275
1276 free_irq(chip->irq, dw);
1277 tasklet_kill(&dw->tasklet);
1278
1279 list_for_each_entry_safe(dwc, _dwc, &dw->dma.channels,
1280 chan.device_node) {
1281 list_del(&dwc->chan.device_node);
1282 channel_clear_bit(dw, CH_EN, dwc->mask);
1283 }
1284
1285 pm_runtime_put_sync_suspend(chip->dev);
1286 return 0;
1287}
1407EXPORT_SYMBOL_GPL(dw_dma_remove);
1408
1288
1409int dw_dma_disable(struct dw_dma_chip *chip)
1289int do_dw_dma_disable(struct dw_dma_chip *chip)
1410{
1411 struct dw_dma *dw = chip->dw;
1412
1290{
1291 struct dw_dma *dw = chip->dw;
1292
1413 dw_dma_off(dw);
1293 dw->disable(dw);
1414 return 0;
1415}
1294 return 0;
1295}
1416EXPORT_SYMBOL_GPL(dw_dma_disable);
1296EXPORT_SYMBOL_GPL(do_dw_dma_disable);
1417
1297
1418int dw_dma_enable(struct dw_dma_chip *chip)
1298int do_dw_dma_enable(struct dw_dma_chip *chip)
1419{
1420 struct dw_dma *dw = chip->dw;
1421
1299{
1300 struct dw_dma *dw = chip->dw;
1301
1422 idma32_fifo_partition(dw);
1423
1424 dw_dma_on(dw);
1302 dw->enable(dw);
1425 return 0;
1426}
1303 return 0;
1304}
1427EXPORT_SYMBOL_GPL(dw_dma_enable);
1305EXPORT_SYMBOL_GPL(do_dw_dma_enable);
1428
1429MODULE_LICENSE("GPL v2");
1430MODULE_DESCRIPTION("Synopsys DesignWare DMA Controller core driver");
1431MODULE_AUTHOR("Haavard Skinnemoen (Atmel)");
1432MODULE_AUTHOR("Viresh Kumar <vireshk@kernel.org>");
1306
1307MODULE_LICENSE("GPL v2");
1308MODULE_DESCRIPTION("Synopsys DesignWare DMA Controller core driver");
1309MODULE_AUTHOR("Haavard Skinnemoen (Atmel)");
1310MODULE_AUTHOR("Viresh Kumar <vireshk@kernel.org>");