Lines Matching +full:dma +full:- +full:drb

1 // SPDX-License-Identifier: GPL-2.0
3 * Lightning Mountain centralized DMA controller driver
5 * Copyright (c) 2016 - 2020 Intel Corporation.
10 #include <linux/dma-mapping.h>
23 #include "../virt-dma.h"
25 #define DRIVER_NAME "lgm-dma"
68 #define DMA_MAX_CLASS (SZ_32 - 1)
107 * If header mode is set in DMA descriptor,
112 * If header mode is not set in DMA descriptor,
131 /* DMA controller capability */
142 #define DMA_ORRC_MAX_CNT (SZ_32 - 1)
149 #define DMA_MAX_DESC_NUM (SZ_8K - 1)
150 #define DMA_CHAN_BOFF_MAX (SZ_256 - 1)
154 #define DMA_HDR_LEN_MAX (SZ_16K - 1)
156 /* DMA flags */
172 #define DMA_MAX_SIZE (BIT(16) - 1)
253 struct ldma_chan *chans; /* channel list on this DMA or port */
283 old_val = readl(d->base + ofs); in ldma_update_bits()
287 writel(new_val, d->base + ofs); in ldma_update_bits()
307 return !!(c->flags & DMA_TX_CH); in ldma_chan_tx()
312 return !!(c->flags & DMA_HW_DESC); in ldma_chan_is_hw_desc()
320 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_reset()
322 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_reset()
331 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_pkt_arb_cfg()
333 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_pkt_arb_cfg()
342 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_sram_desc_cfg()
344 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_sram_desc_cfg()
352 if (d->inst->type != DMA_TYPE_TX) in ldma_dev_chan_flow_ctl_cfg()
358 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_chan_flow_ctl_cfg()
360 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_chan_flow_ctl_cfg()
369 val |= FIELD_PREP(DMA_CPOLL_CNT, d->pollcnt); in ldma_dev_global_polling_enable()
371 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_global_polling_enable()
373 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_global_polling_enable()
381 if (d->inst->type == DMA_TYPE_MCPY) in ldma_dev_desc_fetch_on_demand_cfg()
387 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_desc_fetch_on_demand_cfg()
389 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_desc_fetch_on_demand_cfg()
398 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_byte_enable_cfg()
400 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_byte_enable_cfg()
409 if (d->inst->type == DMA_TYPE_RX) in ldma_dev_orrc_cfg()
413 if (d->inst->orrc > 0 && d->inst->orrc <= DMA_ORRC_MAX_CNT) in ldma_dev_orrc_cfg()
414 val = DMA_ORRC_EN | FIELD_PREP(DMA_ORRC_ORRCNT, d->inst->orrc); in ldma_dev_orrc_cfg()
416 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_orrc_cfg()
418 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_orrc_cfg()
432 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_df_tout_cfg()
434 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_df_tout_cfg()
442 if (d->inst->type != DMA_TYPE_RX && d->inst->type != DMA_TYPE_MCPY) in ldma_dev_dburst_wr_cfg()
448 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_dburst_wr_cfg()
450 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_dburst_wr_cfg()
458 if (d->inst->type != DMA_TYPE_TX) in ldma_dev_vld_fetch_ack_cfg()
464 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_vld_fetch_ack_cfg()
466 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_vld_fetch_ack_cfg()
475 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_drb_cfg()
477 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_drb_cfg()
487 enable = !!(d->flags & DMA_DFT_DRB); in ldma_dev_cfg()
490 enable = !!(d->flags & DMA_EN_BYTE_EN); in ldma_dev_cfg()
493 enable = !!(d->flags & DMA_CHAN_FLOW_CTL); in ldma_dev_cfg()
496 enable = !!(d->flags & DMA_DESC_FOD); in ldma_dev_cfg()
499 enable = !!(d->flags & DMA_DESC_IN_SRAM); in ldma_dev_cfg()
502 enable = !!(d->flags & DMA_DBURST_WR); in ldma_dev_cfg()
505 enable = !!(d->flags & DMA_VALID_DESC_FETCH_ACK); in ldma_dev_cfg()
508 if (d->ver > DMA_VER22) { in ldma_dev_cfg()
513 dev_dbg(d->dev, "%s Controller 0x%08x configuration done\n", in ldma_dev_cfg()
514 d->inst->name, readl(d->base + DMA_CTRL)); in ldma_dev_cfg()
521 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_cctrl_cfg()
526 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_cctrl_cfg()
527 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_cctrl_cfg()
528 reg = readl(d->base + DMA_CCTRL); in ldma_chan_cctrl_cfg()
531 c->flags |= DMA_TX_CH; in ldma_chan_cctrl_cfg()
533 c->flags |= DMA_RX_CH; in ldma_chan_cctrl_cfg()
542 writel(val, d->base + DMA_CCTRL); in ldma_chan_cctrl_cfg()
543 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_cctrl_cfg()
550 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_irq_init()
555 if (c->nr < MAX_LOWER_CHANS) { in ldma_chan_irq_init()
563 cn_bit = BIT(c->nr & MASK_LOWER_CHANS); in ldma_chan_irq_init()
564 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_irq_init()
565 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_irq_init()
568 writel(0, d->base + DMA_CIE); in ldma_chan_irq_init()
569 writel(DMA_CI_ALL, d->base + DMA_CIS); in ldma_chan_irq_init()
572 writel(cn_bit, d->base + crofs); in ldma_chan_irq_init()
573 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_irq_init()
578 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_set_class()
581 if (d->inst->type == DMA_TYPE_MCPY || val > DMA_MAX_CLASS) in ldma_chan_set_class()
589 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_set_class()
596 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_on()
600 if (WARN_ON(!c->desc_init)) in ldma_chan_on()
601 return -EINVAL; in ldma_chan_on()
603 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_on()
604 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_on()
606 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_on()
608 c->onoff = DMA_CH_ON; in ldma_chan_on()
615 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_off()
620 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_off()
621 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_off()
623 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_off()
625 ret = readl_poll_timeout_atomic(d->base + DMA_CCTRL, val, in ldma_chan_off()
630 c->onoff = DMA_CH_OFF; in ldma_chan_off()
638 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_hw_cfg()
641 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_desc_hw_cfg()
642 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_desc_hw_cfg()
643 writel(lower_32_bits(desc_base), d->base + DMA_CDBA); in ldma_chan_desc_hw_cfg()
652 writel(desc_num, d->base + DMA_CDLEN); in ldma_chan_desc_hw_cfg()
653 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_desc_hw_cfg()
655 c->desc_init = true; in ldma_chan_desc_hw_cfg()
662 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_cfg()
667 dev_err(d->dev, "Channel %d must allocate descriptor first\n", in ldma_chan_desc_cfg()
668 c->nr); in ldma_chan_desc_cfg()
673 dev_err(d->dev, "Channel %d descriptor number out of range %d\n", in ldma_chan_desc_cfg()
674 c->nr, desc_num); in ldma_chan_desc_cfg()
680 c->flags |= DMA_HW_DESC; in ldma_chan_desc_cfg()
681 c->desc_cnt = desc_num; in ldma_chan_desc_cfg()
682 c->desc_phys = desc_base; in ldma_chan_desc_cfg()
688 tx = &ds->vdesc.tx; in ldma_chan_desc_cfg()
696 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_reset()
705 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_reset()
706 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_reset()
708 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_reset()
710 ret = readl_poll_timeout_atomic(d->base + DMA_CCTRL, val, in ldma_chan_reset()
715 c->rst = 1; in ldma_chan_reset()
716 c->desc_init = false; in ldma_chan_reset()
723 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_byte_offset_cfg()
732 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_byte_offset_cfg()
739 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_data_endian_cfg()
748 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_data_endian_cfg()
755 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_endian_cfg()
764 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_desc_endian_cfg()
770 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_hdr_mode_cfg()
783 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_hdr_mode_cfg()
789 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_rxwr_np_cfg()
799 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_rxwr_np_cfg()
805 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_abc_cfg()
808 if (d->ver < DMA_VER32 || ldma_chan_tx(c)) in ldma_chan_abc_cfg()
814 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_abc_cfg()
824 d = p->ldev; in ldma_port_cfg()
825 reg = FIELD_PREP(DMA_PCTRL_TXENDI, p->txendi); in ldma_port_cfg()
826 reg |= FIELD_PREP(DMA_PCTRL_RXENDI, p->rxendi); in ldma_port_cfg()
828 if (d->ver == DMA_VER22) { in ldma_port_cfg()
829 reg |= FIELD_PREP(DMA_PCTRL_TXBL, p->txbl); in ldma_port_cfg()
830 reg |= FIELD_PREP(DMA_PCTRL_RXBL, p->rxbl); in ldma_port_cfg()
832 reg |= FIELD_PREP(DMA_PCTRL_PDEN, p->pkt_drop); in ldma_port_cfg()
834 if (p->txbl == DMA_BURSTL_32DW) in ldma_port_cfg()
836 else if (p->txbl == DMA_BURSTL_16DW) in ldma_port_cfg()
841 if (p->rxbl == DMA_BURSTL_32DW) in ldma_port_cfg()
843 else if (p->rxbl == DMA_BURSTL_16DW) in ldma_port_cfg()
849 spin_lock_irqsave(&d->dev_lock, flags); in ldma_port_cfg()
850 writel(p->portid, d->base + DMA_PS); in ldma_port_cfg()
851 writel(reg, d->base + DMA_PCTRL); in ldma_port_cfg()
852 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_port_cfg()
854 reg = readl(d->base + DMA_PCTRL); /* read back */ in ldma_port_cfg()
855 dev_dbg(d->dev, "Port Control 0x%08x configuration done\n", reg); in ldma_port_cfg()
862 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_cfg()
866 reg = c->pden ? DMA_CCTRL_PDEN : 0; in ldma_chan_cfg()
867 reg |= c->onoff ? DMA_CCTRL_ON : 0; in ldma_chan_cfg()
868 reg |= c->rst ? DMA_CCTRL_RST : 0; in ldma_chan_cfg()
873 if (d->ver <= DMA_VER22) in ldma_chan_cfg()
876 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_cfg()
877 ldma_chan_set_class(c, c->nr); in ldma_chan_cfg()
878 ldma_chan_byte_offset_cfg(c, c->boff_len); in ldma_chan_cfg()
879 ldma_chan_data_endian_cfg(c, c->data_endian_en, c->data_endian); in ldma_chan_cfg()
880 ldma_chan_desc_endian_cfg(c, c->desc_endian_en, c->desc_endian); in ldma_chan_cfg()
881 ldma_chan_hdr_mode_cfg(c, c->hdrm_len, c->hdrm_csum); in ldma_chan_cfg()
882 ldma_chan_rxwr_np_cfg(c, c->desc_rx_np); in ldma_chan_cfg()
883 ldma_chan_abc_cfg(c, c->abc_en); in ldma_chan_cfg()
884 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_cfg()
887 ldma_chan_desc_hw_cfg(c, c->desc_phys, c->desc_cnt); in ldma_chan_cfg()
894 unsigned long ch_mask = (unsigned long)d->channels_mask; in ldma_dev_init()
900 spin_lock_init(&d->dev_lock); in ldma_dev_init()
904 /* DMA port initialization */ in ldma_dev_init()
905 for (i = 0; i < d->port_nrs; i++) { in ldma_dev_init()
906 p = &d->ports[i]; in ldma_dev_init()
910 /* DMA channel initialization */ in ldma_dev_init()
911 for_each_set_bit(j, &ch_mask, d->chan_nrs) { in ldma_dev_init()
912 c = &d->chans[j]; in ldma_dev_init()
919 struct fwnode_handle *fwnode = dev_fwnode(d->dev); in ldma_parse_dt()
923 if (fwnode_property_read_bool(fwnode, "intel,dma-byte-en")) in ldma_parse_dt()
924 d->flags |= DMA_EN_BYTE_EN; in ldma_parse_dt()
926 if (fwnode_property_read_bool(fwnode, "intel,dma-dburst-wr")) in ldma_parse_dt()
927 d->flags |= DMA_DBURST_WR; in ldma_parse_dt()
929 if (fwnode_property_read_bool(fwnode, "intel,dma-drb")) in ldma_parse_dt()
930 d->flags |= DMA_DFT_DRB; in ldma_parse_dt()
932 if (fwnode_property_read_u32(fwnode, "intel,dma-poll-cnt", in ldma_parse_dt()
933 &d->pollcnt)) in ldma_parse_dt()
934 d->pollcnt = DMA_DFT_POLL_CNT; in ldma_parse_dt()
936 if (d->inst->chan_fc) in ldma_parse_dt()
937 d->flags |= DMA_CHAN_FLOW_CTL; in ldma_parse_dt()
939 if (d->inst->desc_fod) in ldma_parse_dt()
940 d->flags |= DMA_DESC_FOD; in ldma_parse_dt()
942 if (d->inst->desc_in_sram) in ldma_parse_dt()
943 d->flags |= DMA_DESC_IN_SRAM; in ldma_parse_dt()
945 if (d->inst->valid_desc_fetch_ack) in ldma_parse_dt()
946 d->flags |= DMA_VALID_DESC_FETCH_ACK; in ldma_parse_dt()
948 if (d->ver > DMA_VER22) { in ldma_parse_dt()
949 if (!d->port_nrs) in ldma_parse_dt()
950 return -EINVAL; in ldma_parse_dt()
952 for (i = 0; i < d->port_nrs; i++) { in ldma_parse_dt()
953 p = &d->ports[i]; in ldma_parse_dt()
954 p->rxendi = DMA_DFT_ENDIAN; in ldma_parse_dt()
955 p->txendi = DMA_DFT_ENDIAN; in ldma_parse_dt()
956 p->rxbl = DMA_DFT_BURST; in ldma_parse_dt()
957 p->txbl = DMA_DFT_BURST; in ldma_parse_dt()
958 p->pkt_drop = DMA_PKT_DROP_DIS; in ldma_parse_dt()
968 struct ldma_chan *c = ds->chan; in dma_free_desc_resource()
970 dma_pool_free(c->desc_pool, ds->desc_hw, ds->desc_phys); in dma_free_desc_resource()
977 struct device *dev = c->vchan.chan.device->dev; in dma_alloc_desc_resource()
980 if (num > c->desc_num) { in dma_alloc_desc_resource()
981 dev_err(dev, "sg num %d exceed max %d\n", num, c->desc_num); in dma_alloc_desc_resource()
989 ds->chan = c; in dma_alloc_desc_resource()
990 ds->desc_hw = dma_pool_zalloc(c->desc_pool, GFP_ATOMIC, in dma_alloc_desc_resource()
991 &ds->desc_phys); in dma_alloc_desc_resource()
992 if (!ds->desc_hw) { in dma_alloc_desc_resource()
997 ds->desc_cnt = num; in dma_alloc_desc_resource()
1004 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_irq_en()
1007 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_irq_en()
1008 writel(c->nr, d->base + DMA_CS); in ldma_chan_irq_en()
1009 writel(DMA_CI_EOP, d->base + DMA_CIE); in ldma_chan_irq_en()
1010 writel(BIT(c->nr), d->base + DMA_IRNEN); in ldma_chan_irq_en()
1011 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_irq_en()
1017 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_issue_pending()
1020 if (d->ver == DMA_VER22) { in ldma_issue_pending()
1021 spin_lock_irqsave(&c->vchan.lock, flags); in ldma_issue_pending()
1022 if (vchan_issue_pending(&c->vchan)) { in ldma_issue_pending()
1026 vdesc = vchan_next_desc(&c->vchan); in ldma_issue_pending()
1028 c->ds = NULL; in ldma_issue_pending()
1029 spin_unlock_irqrestore(&c->vchan.lock, flags); in ldma_issue_pending()
1032 list_del(&vdesc->node); in ldma_issue_pending()
1033 c->ds = to_lgm_dma_desc(vdesc); in ldma_issue_pending()
1034 ldma_chan_desc_hw_cfg(c, c->ds->desc_phys, c->ds->desc_cnt); in ldma_issue_pending()
1037 spin_unlock_irqrestore(&c->vchan.lock, flags); in ldma_issue_pending()
1050 cancel_work_sync(&c->work); in ldma_synchronize()
1051 vchan_synchronize(&c->vchan); in ldma_synchronize()
1052 if (c->ds) in ldma_synchronize()
1053 dma_free_desc_resource(&c->ds->vdesc); in ldma_synchronize()
1062 spin_lock_irqsave(&c->vchan.lock, flags); in ldma_terminate_all()
1063 vchan_get_all_descriptors(&c->vchan, &head); in ldma_terminate_all()
1064 spin_unlock_irqrestore(&c->vchan.lock, flags); in ldma_terminate_all()
1065 vchan_dma_desc_free_list(&c->vchan, &head); in ldma_terminate_all()
1091 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_tx_status()
1094 if (d->ver == DMA_VER22) in ldma_tx_status()
1103 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in dma_chan_irq()
1107 writel(c->nr, d->base + DMA_CS); in dma_chan_irq()
1108 stat = readl(d->base + DMA_CIS); in dma_chan_irq()
1112 writel(readl(d->base + DMA_CIE) & ~DMA_CI_ALL, d->base + DMA_CIE); in dma_chan_irq()
1113 writel(stat, d->base + DMA_CIS); in dma_chan_irq()
1114 queue_work(d->wq, &c->work); in dma_chan_irq()
1124 irncr = readl(d->base + DMA_IRNCR); in dma_interrupt()
1126 dev_err(d->dev, "dummy interrupt\n"); in dma_interrupt()
1130 for_each_set_bit(cid, &irncr, d->chan_nrs) { in dma_interrupt()
1132 writel(readl(d->base + DMA_IRNEN) & ~BIT(cid), d->base + DMA_IRNEN); in dma_interrupt()
1134 writel(readl(d->base + DMA_IRNCR) | BIT(cid), d->base + DMA_IRNCR); in dma_interrupt()
1136 c = &d->chans[cid]; in dma_interrupt()
1145 struct ldma_port *p = c->port; in prep_slave_burst_len()
1146 struct dma_slave_config *cfg = &c->config; in prep_slave_burst_len()
1148 if (cfg->dst_maxburst) in prep_slave_burst_len()
1149 cfg->src_maxburst = cfg->dst_maxburst; in prep_slave_burst_len()
1152 p->txbl = ilog2(cfg->src_maxburst); in prep_slave_burst_len()
1153 p->rxbl = p->txbl; in prep_slave_burst_len()
1162 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_prep_slave_sg()
1173 if (d->ver > DMA_VER22) in ldma_prep_slave_sg()
1174 return ldma_chan_desc_cfg(chan, sgl->dma_address, sglen); in ldma_prep_slave_sg()
1179 num += DIV_ROUND_UP(avail, DMA_MAX_SIZE) - 1; in ldma_prep_slave_sg()
1186 c->ds = ds; in ldma_prep_slave_sg()
1198 hw_ds = &ds->desc_hw[num]; in ldma_prep_slave_sg()
1201 hw_ds->field &= ~DESC_SOP; in ldma_prep_slave_sg()
1202 hw_ds->field |= FIELD_PREP(DESC_SOP, 1); in ldma_prep_slave_sg()
1204 hw_ds->field &= ~DESC_EOP; in ldma_prep_slave_sg()
1205 hw_ds->field |= FIELD_PREP(DESC_EOP, 1); in ldma_prep_slave_sg()
1209 hw_ds->field &= ~DESC_SOP; in ldma_prep_slave_sg()
1210 hw_ds->field |= FIELD_PREP(DESC_SOP, 1); in ldma_prep_slave_sg()
1212 hw_ds->field &= ~DESC_EOP; in ldma_prep_slave_sg()
1213 hw_ds->field |= FIELD_PREP(DESC_EOP, 0); in ldma_prep_slave_sg()
1214 } else if (num == (sglen - 1)) { in ldma_prep_slave_sg()
1215 hw_ds->field &= ~DESC_SOP; in ldma_prep_slave_sg()
1216 hw_ds->field |= FIELD_PREP(DESC_SOP, 0); in ldma_prep_slave_sg()
1217 hw_ds->field &= ~DESC_EOP; in ldma_prep_slave_sg()
1218 hw_ds->field |= FIELD_PREP(DESC_EOP, 1); in ldma_prep_slave_sg()
1220 hw_ds->field &= ~DESC_SOP; in ldma_prep_slave_sg()
1221 hw_ds->field |= FIELD_PREP(DESC_SOP, 0); in ldma_prep_slave_sg()
1223 hw_ds->field &= ~DESC_EOP; in ldma_prep_slave_sg()
1224 hw_ds->field |= FIELD_PREP(DESC_EOP, 0); in ldma_prep_slave_sg()
1229 hw_ds->addr = (u32)addr; in ldma_prep_slave_sg()
1231 hw_ds->field &= ~DESC_DATA_LEN; in ldma_prep_slave_sg()
1232 hw_ds->field |= FIELD_PREP(DESC_DATA_LEN, len); in ldma_prep_slave_sg()
1234 hw_ds->field &= ~DESC_C; in ldma_prep_slave_sg()
1235 hw_ds->field |= FIELD_PREP(DESC_C, 0); in ldma_prep_slave_sg()
1237 hw_ds->field &= ~DESC_BYTE_OFF; in ldma_prep_slave_sg()
1238 hw_ds->field |= FIELD_PREP(DESC_BYTE_OFF, addr & 0x3); in ldma_prep_slave_sg()
1242 hw_ds->field &= ~DESC_OWN; in ldma_prep_slave_sg()
1243 hw_ds->field |= FIELD_PREP(DESC_OWN, DMA_OWN); in ldma_prep_slave_sg()
1249 avail -= len; in ldma_prep_slave_sg()
1253 ds->size = total; in ldma_prep_slave_sg()
1256 return vchan_tx_prep(&c->vchan, &ds->vdesc, DMA_CTRL_ACK); in ldma_prep_slave_sg()
1264 memcpy(&c->config, cfg, sizeof(c->config)); in ldma_slave_config()
1272 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_alloc_chan_resources()
1273 struct device *dev = c->vchan.chan.device->dev; in ldma_alloc_chan_resources()
1276 if (d->ver > DMA_VER22) { in ldma_alloc_chan_resources()
1277 c->flags |= CHAN_IN_USE; in ldma_alloc_chan_resources()
1281 if (c->desc_pool) in ldma_alloc_chan_resources()
1282 return c->desc_num; in ldma_alloc_chan_resources()
1284 desc_sz = c->desc_num * sizeof(struct dw2_desc); in ldma_alloc_chan_resources()
1285 c->desc_pool = dma_pool_create(c->name, dev, desc_sz, in ldma_alloc_chan_resources()
1288 if (!c->desc_pool) { in ldma_alloc_chan_resources()
1290 return -ENOMEM; in ldma_alloc_chan_resources()
1293 return c->desc_num; in ldma_alloc_chan_resources()
1299 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_free_chan_resources()
1301 if (d->ver == DMA_VER22) { in ldma_free_chan_resources()
1302 dma_pool_destroy(c->desc_pool); in ldma_free_chan_resources()
1303 c->desc_pool = NULL; in ldma_free_chan_resources()
1307 c->flags &= ~CHAN_IN_USE; in ldma_free_chan_resources()
1314 struct dma_async_tx_descriptor *tx = &c->ds->vdesc.tx; in dma_work()
1315 struct virt_dma_chan *vc = &c->vchan; in dma_work()
1321 spin_lock_irqsave(&c->vchan.lock, flags); in dma_work()
1322 list_splice_tail_init(&vc->desc_completed, &head); in dma_work()
1323 spin_unlock_irqrestore(&c->vchan.lock, flags); in dma_work()
1331 list_del(&vd->node); in dma_work()
1336 c->ds = NULL; in dma_work()
1343 p->txbl = ilog2(burst); in update_burst_len_v22()
1345 p->rxbl = ilog2(burst); in update_burst_len_v22()
1352 p->txbl = burst; in update_burst_len_v3X()
1354 p->rxbl = burst; in update_burst_len_v3X()
1360 struct ldma_dev *d = ofdma->of_dma_data; in update_client_configs()
1361 u32 chan_id = spec->args[0]; in update_client_configs()
1362 u32 port_id = spec->args[1]; in update_client_configs()
1363 u32 burst = spec->args[2]; in update_client_configs()
1367 if (chan_id >= d->chan_nrs || port_id >= d->port_nrs) in update_client_configs()
1370 p = &d->ports[port_id]; in update_client_configs()
1371 c = &d->chans[chan_id]; in update_client_configs()
1372 c->port = p; in update_client_configs()
1374 if (d->ver == DMA_VER22) in update_client_configs()
1387 struct ldma_dev *d = ofdma->of_dma_data; in ldma_xlate()
1388 u32 chan_id = spec->args[0]; in ldma_xlate()
1391 if (!spec->args_count) in ldma_xlate()
1395 if (spec->args_count > 1) { in ldma_xlate()
1401 return dma_get_slave_channel(&d->chans[chan_id].vchan.chan); in ldma_xlate()
1408 c = &d->chans[i]; in ldma_dma_init_v22()
1409 c->nr = i; /* Real channel number */ in ldma_dma_init_v22()
1410 c->rst = DMA_CHAN_RST; in ldma_dma_init_v22()
1411 c->desc_num = DMA_DFT_DESC_NUM; in ldma_dma_init_v22()
1412 snprintf(c->name, sizeof(c->name), "chan%d", c->nr); in ldma_dma_init_v22()
1413 INIT_WORK(&c->work, dma_work); in ldma_dma_init_v22()
1414 c->vchan.desc_free = dma_free_desc_resource; in ldma_dma_init_v22()
1415 vchan_init(&c->vchan, &d->dma_dev); in ldma_dma_init_v22()
1422 c = &d->chans[i]; in ldma_dma_init_v3X()
1423 c->data_endian = DMA_DFT_ENDIAN; in ldma_dma_init_v3X()
1424 c->desc_endian = DMA_DFT_ENDIAN; in ldma_dma_init_v3X()
1425 c->data_endian_en = false; in ldma_dma_init_v3X()
1426 c->desc_endian_en = false; in ldma_dma_init_v3X()
1427 c->desc_rx_np = false; in ldma_dma_init_v3X()
1428 c->flags |= DEVICE_ALLOC_DESC; in ldma_dma_init_v3X()
1429 c->onoff = DMA_CH_OFF; in ldma_dma_init_v3X()
1430 c->rst = DMA_CHAN_RST; in ldma_dma_init_v3X()
1431 c->abc_en = true; in ldma_dma_init_v3X()
1432 c->hdrm_csum = false; in ldma_dma_init_v3X()
1433 c->boff_len = 0; in ldma_dma_init_v3X()
1434 c->nr = i; in ldma_dma_init_v3X()
1435 c->vchan.desc_free = dma_free_desc_resource; in ldma_dma_init_v3X()
1436 vchan_init(&c->vchan, &d->dma_dev); in ldma_dma_init_v3X()
1443 ret = device_property_read_u32(d->dev, "dma-channels", &d->chan_nrs); in ldma_init_v22()
1445 dev_err(d->dev, "unable to read dma-channels property\n"); in ldma_init_v22()
1449 d->irq = platform_get_irq(pdev, 0); in ldma_init_v22()
1450 if (d->irq < 0) in ldma_init_v22()
1451 return d->irq; in ldma_init_v22()
1453 ret = devm_request_irq(&pdev->dev, d->irq, dma_interrupt, 0, in ldma_init_v22()
1458 d->wq = alloc_ordered_workqueue("dma_wq", WQ_MEM_RECLAIM | in ldma_init_v22()
1460 if (!d->wq) in ldma_init_v22()
1461 return -ENOMEM; in ldma_init_v22()
1470 clk_disable_unprepare(d->core_clk); in ldma_clk_disable()
1471 reset_control_assert(d->rst); in ldma_clk_disable()
1553 { .compatible = "intel,lgm-cdma", .data = &dma0},
1554 { .compatible = "intel,lgm-dma2tx", .data = &dma2tx},
1555 { .compatible = "intel,lgm-dma1rx", .data = &dma1rx},
1556 { .compatible = "intel,lgm-dma1tx", .data = &dma1tx},
1557 { .compatible = "intel,lgm-dma0tx", .data = &dma0tx},
1558 { .compatible = "intel,lgm-dma3", .data = &dma3},
1559 { .compatible = "intel,lgm-toe-dma30", .data = &toe_dma30},
1560 { .compatible = "intel,lgm-toe-dma31", .data = &toe_dma31},
1566 struct device *dev = &pdev->dev; in intel_ldma_probe()
1577 return -ENOMEM; in intel_ldma_probe()
1580 d->dev = &pdev->dev; in intel_ldma_probe()
1582 d->inst = device_get_match_data(dev); in intel_ldma_probe()
1583 if (!d->inst) { in intel_ldma_probe()
1585 return -ENODEV; in intel_ldma_probe()
1588 d->base = devm_platform_ioremap_resource(pdev, 0); in intel_ldma_probe()
1589 if (IS_ERR(d->base)) in intel_ldma_probe()
1590 return PTR_ERR(d->base); in intel_ldma_probe()
1592 /* Power up and reset the dma engine, some DMAs always on?? */ in intel_ldma_probe()
1593 d->core_clk = devm_clk_get_optional(dev, NULL); in intel_ldma_probe()
1594 if (IS_ERR(d->core_clk)) in intel_ldma_probe()
1595 return PTR_ERR(d->core_clk); in intel_ldma_probe()
1597 d->rst = devm_reset_control_get_optional(dev, NULL); in intel_ldma_probe()
1598 if (IS_ERR(d->rst)) in intel_ldma_probe()
1599 return PTR_ERR(d->rst); in intel_ldma_probe()
1601 clk_prepare_enable(d->core_clk); in intel_ldma_probe()
1602 reset_control_deassert(d->rst); in intel_ldma_probe()
1610 id = readl(d->base + DMA_ID); in intel_ldma_probe()
1611 d->chan_nrs = FIELD_GET(DMA_ID_CHNR, id); in intel_ldma_probe()
1612 d->port_nrs = FIELD_GET(DMA_ID_PNR, id); in intel_ldma_probe()
1613 d->ver = FIELD_GET(DMA_ID_REV, id); in intel_ldma_probe()
1616 d->flags |= DMA_ADDR_36BIT; in intel_ldma_probe()
1622 d->flags |= DMA_DATA_128BIT; in intel_ldma_probe()
1626 dev_err(dev, "No usable DMA configuration\n"); in intel_ldma_probe()
1630 if (d->ver == DMA_VER22) { in intel_ldma_probe()
1636 ret = device_property_read_u32(dev, "dma-channel-mask", &d->channels_mask); in intel_ldma_probe()
1638 d->channels_mask = GENMASK(d->chan_nrs - 1, 0); in intel_ldma_probe()
1640 dma_dev = &d->dma_dev; in intel_ldma_probe()
1642 dma_cap_zero(dma_dev->cap_mask); in intel_ldma_probe()
1643 dma_cap_set(DMA_SLAVE, dma_dev->cap_mask); in intel_ldma_probe()
1646 INIT_LIST_HEAD(&dma_dev->channels); in intel_ldma_probe()
1649 d->ports = devm_kcalloc(dev, d->port_nrs, sizeof(*p), GFP_KERNEL); in intel_ldma_probe()
1650 if (!d->ports) in intel_ldma_probe()
1651 return -ENOMEM; in intel_ldma_probe()
1654 d->chans = devm_kcalloc(d->dev, d->chan_nrs, sizeof(*c), GFP_KERNEL); in intel_ldma_probe()
1655 if (!d->chans) in intel_ldma_probe()
1656 return -ENOMEM; in intel_ldma_probe()
1658 for (i = 0; i < d->port_nrs; i++) { in intel_ldma_probe()
1659 p = &d->ports[i]; in intel_ldma_probe()
1660 p->portid = i; in intel_ldma_probe()
1661 p->ldev = d; in intel_ldma_probe()
1664 dma_dev->dev = &pdev->dev; in intel_ldma_probe()
1666 ch_mask = (unsigned long)d->channels_mask; in intel_ldma_probe()
1667 for_each_set_bit(j, &ch_mask, d->chan_nrs) { in intel_ldma_probe()
1668 if (d->ver == DMA_VER22) in intel_ldma_probe()
1678 dma_dev->device_alloc_chan_resources = ldma_alloc_chan_resources; in intel_ldma_probe()
1679 dma_dev->device_free_chan_resources = ldma_free_chan_resources; in intel_ldma_probe()
1680 dma_dev->device_terminate_all = ldma_terminate_all; in intel_ldma_probe()
1681 dma_dev->device_issue_pending = ldma_issue_pending; in intel_ldma_probe()
1682 dma_dev->device_tx_status = ldma_tx_status; in intel_ldma_probe()
1683 dma_dev->device_resume = ldma_resume_chan; in intel_ldma_probe()
1684 dma_dev->device_pause = ldma_pause_chan; in intel_ldma_probe()
1685 dma_dev->device_prep_slave_sg = ldma_prep_slave_sg; in intel_ldma_probe()
1687 if (d->ver == DMA_VER22) { in intel_ldma_probe()
1688 dma_dev->device_config = ldma_slave_config; in intel_ldma_probe()
1689 dma_dev->device_synchronize = ldma_synchronize; in intel_ldma_probe()
1690 dma_dev->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in intel_ldma_probe()
1691 dma_dev->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in intel_ldma_probe()
1692 dma_dev->directions = BIT(DMA_MEM_TO_DEV) | in intel_ldma_probe()
1694 dma_dev->residue_granularity = in intel_ldma_probe()
1704 dev_err(dev, "Failed to register slave DMA engine device\n"); in intel_ldma_probe()
1708 ret = of_dma_controller_register(pdev->dev.of_node, ldma_xlate, d); in intel_ldma_probe()
1710 dev_err(dev, "Failed to register of DMA controller\n"); in intel_ldma_probe()
1715 dev_info(dev, "Init done - rev: %x, ports: %d channels: %d\n", d->ver, in intel_ldma_probe()
1716 d->port_nrs, d->chan_nrs); in intel_ldma_probe()
1731 * before its DMA clients of some are platform specific and also to provide
1732 * registered DMA channels and DMA capabilities to clients before their