Lines Matching +full:rx +full:- +full:inactive

1 // SPDX-License-Identifier: GPL-2.0+
7 * https://github.com/microchip-ung/sparx-5_reginfo
15 #include <linux/dma-mapping.h>
30 *dataptr = fdma->dma + (sizeof(struct fdma_dcb) * fdma->n_dcbs) + in sparx5_fdma_tx_dataptr_cb()
31 ((dcb * fdma->n_dbs + db) * fdma->db_size); in sparx5_fdma_tx_dataptr_cb()
39 struct sparx5 *sparx5 = fdma->priv; in sparx5_fdma_rx_dataptr_cb()
40 struct sparx5_rx *rx = &sparx5->rx; in sparx5_fdma_rx_dataptr_cb() local
43 skb = __netdev_alloc_skb(rx->ndev, fdma->db_size, GFP_ATOMIC); in sparx5_fdma_rx_dataptr_cb()
45 return -ENOMEM; in sparx5_fdma_rx_dataptr_cb()
47 *dataptr = virt_to_phys(skb->data); in sparx5_fdma_rx_dataptr_cb()
49 rx->skb[dcb][db] = skb; in sparx5_fdma_rx_dataptr_cb()
54 static void sparx5_fdma_rx_activate(struct sparx5 *sparx5, struct sparx5_rx *rx) in sparx5_fdma_rx_activate() argument
56 struct fdma *fdma = &rx->fdma; in sparx5_fdma_rx_activate()
59 spx5_wr(((u64)fdma->dma) & GENMASK(31, 0), sparx5, in sparx5_fdma_rx_activate()
60 FDMA_DCB_LLP(fdma->channel_id)); in sparx5_fdma_rx_activate()
61 spx5_wr(((u64)fdma->dma) >> 32, sparx5, in sparx5_fdma_rx_activate()
62 FDMA_DCB_LLP1(fdma->channel_id)); in sparx5_fdma_rx_activate()
64 /* Set the number of RX DBs to be used, and DB end-of-frame interrupt */ in sparx5_fdma_rx_activate()
65 spx5_wr(FDMA_CH_CFG_CH_DCB_DB_CNT_SET(fdma->n_dbs) | in sparx5_fdma_rx_activate()
68 sparx5, FDMA_CH_CFG(fdma->channel_id)); in sparx5_fdma_rx_activate()
70 /* Set the RX Watermark to max */ in sparx5_fdma_rx_activate()
75 /* Start RX fdma */ in sparx5_fdma_rx_activate()
79 /* Enable RX channel DB interrupt */ in sparx5_fdma_rx_activate()
80 spx5_rmw(BIT(fdma->channel_id), in sparx5_fdma_rx_activate()
81 BIT(fdma->channel_id) & FDMA_INTR_DB_ENA_INTR_DB_ENA, in sparx5_fdma_rx_activate()
84 /* Activate the RX channel */ in sparx5_fdma_rx_activate()
85 spx5_wr(BIT(fdma->channel_id), sparx5, FDMA_CH_ACTIVATE); in sparx5_fdma_rx_activate()
88 static void sparx5_fdma_rx_deactivate(struct sparx5 *sparx5, struct sparx5_rx *rx) in sparx5_fdma_rx_deactivate() argument
90 struct fdma *fdma = &rx->fdma; in sparx5_fdma_rx_deactivate()
92 /* Deactivate the RX channel */ in sparx5_fdma_rx_deactivate()
93 spx5_rmw(0, BIT(fdma->channel_id) & FDMA_CH_ACTIVATE_CH_ACTIVATE, in sparx5_fdma_rx_deactivate()
96 /* Disable RX channel DB interrupt */ in sparx5_fdma_rx_deactivate()
97 spx5_rmw(0, BIT(fdma->channel_id) & FDMA_INTR_DB_ENA_INTR_DB_ENA, in sparx5_fdma_rx_deactivate()
100 /* Stop RX fdma */ in sparx5_fdma_rx_deactivate()
107 struct fdma *fdma = &tx->fdma; in sparx5_fdma_tx_activate()
110 spx5_wr(((u64)fdma->dma) & GENMASK(31, 0), sparx5, in sparx5_fdma_tx_activate()
111 FDMA_DCB_LLP(fdma->channel_id)); in sparx5_fdma_tx_activate()
112 spx5_wr(((u64)fdma->dma) >> 32, sparx5, in sparx5_fdma_tx_activate()
113 FDMA_DCB_LLP1(fdma->channel_id)); in sparx5_fdma_tx_activate()
115 /* Set the number of TX DBs to be used, and DB end-of-frame interrupt */ in sparx5_fdma_tx_activate()
116 spx5_wr(FDMA_CH_CFG_CH_DCB_DB_CNT_SET(fdma->n_dbs) | in sparx5_fdma_tx_activate()
119 sparx5, FDMA_CH_CFG(fdma->channel_id)); in sparx5_fdma_tx_activate()
126 spx5_wr(BIT(fdma->channel_id), sparx5, FDMA_CH_ACTIVATE); in sparx5_fdma_tx_activate()
132 spx5_rmw(0, BIT(tx->fdma.channel_id) & FDMA_CH_ACTIVATE_CH_ACTIVATE, in sparx5_fdma_tx_deactivate()
138 /* Reload the RX channel */ in sparx5_fdma_reload()
139 spx5_wr(BIT(fdma->channel_id), sparx5, FDMA_CH_RELOAD); in sparx5_fdma_reload()
142 static bool sparx5_fdma_rx_get_frame(struct sparx5 *sparx5, struct sparx5_rx *rx) in sparx5_fdma_rx_get_frame() argument
144 struct fdma *fdma = &rx->fdma; in sparx5_fdma_rx_get_frame()
154 skb = rx->skb[fdma->dcb_index][fdma->db_index]; in sparx5_fdma_rx_get_frame()
157 sparx5_ifh_parse(sparx5, (u32 *)skb->data, &fi); in sparx5_fdma_rx_get_frame()
159 port = fi.src_port < sparx5->data->consts->n_ports ? in sparx5_fdma_rx_get_frame()
160 sparx5->ports[fi.src_port] : in sparx5_fdma_rx_get_frame()
162 if (!port || !port->ndev) { in sparx5_fdma_rx_get_frame()
163 dev_err(sparx5->dev, "Data on inactive port %d\n", fi.src_port); in sparx5_fdma_rx_get_frame()
167 skb->dev = port->ndev; in sparx5_fdma_rx_get_frame()
169 if (likely(!(skb->dev->features & NETIF_F_RXFCS))) in sparx5_fdma_rx_get_frame()
170 skb_trim(skb, skb->len - ETH_FCS_LEN); in sparx5_fdma_rx_get_frame()
173 skb->protocol = eth_type_trans(skb, skb->dev); in sparx5_fdma_rx_get_frame()
177 if (test_bit(port->portno, sparx5->bridge_mask)) in sparx5_fdma_rx_get_frame()
178 skb->offload_fwd_mark = 1; in sparx5_fdma_rx_get_frame()
179 skb->dev->stats.rx_bytes += skb->len; in sparx5_fdma_rx_get_frame()
180 skb->dev->stats.rx_packets++; in sparx5_fdma_rx_get_frame()
181 rx->packets++; in sparx5_fdma_rx_get_frame()
188 struct sparx5_rx *rx = container_of(napi, struct sparx5_rx, napi); in sparx5_fdma_napi_callback() local
189 struct sparx5 *sparx5 = container_of(rx, struct sparx5, rx); in sparx5_fdma_napi_callback()
190 struct fdma *fdma = &rx->fdma; in sparx5_fdma_napi_callback()
193 while (counter < weight && sparx5_fdma_rx_get_frame(sparx5, rx)) { in sparx5_fdma_napi_callback()
199 fdma_dcb_add(fdma, fdma->dcb_index, in sparx5_fdma_napi_callback()
200 FDMA_DCB_INFO_DATAL(fdma->db_size), in sparx5_fdma_napi_callback()
206 napi_complete_done(&rx->napi, counter); in sparx5_fdma_napi_callback()
207 spx5_rmw(BIT(fdma->channel_id), in sparx5_fdma_napi_callback()
208 BIT(fdma->channel_id) & FDMA_INTR_DB_ENA_INTR_DB_ENA, in sparx5_fdma_napi_callback()
218 struct sparx5_tx *tx = &sparx5->tx; in sparx5_fdma_xmit()
219 struct fdma *fdma = &tx->fdma; in sparx5_fdma_xmit()
224 if (!fdma_db_is_done(fdma_db_get(fdma, fdma->dcb_index, 0))) in sparx5_fdma_xmit()
225 return -EINVAL; in sparx5_fdma_xmit()
228 virt_addr = ((u8 *)fdma->dcbs + in sparx5_fdma_xmit()
229 (sizeof(struct fdma_dcb) * fdma->n_dcbs) + in sparx5_fdma_xmit()
230 ((fdma->dcb_index * fdma->n_dbs) * fdma->db_size)); in sparx5_fdma_xmit()
233 memcpy(virt_addr + IFH_LEN * 4, skb->data, skb->len); in sparx5_fdma_xmit()
235 fdma_dcb_add(fdma, fdma->dcb_index, 0, in sparx5_fdma_xmit()
239 FDMA_DCB_STATUS_BLOCKL(skb->len + IFH_LEN * 4 + 4)); in sparx5_fdma_xmit()
252 struct sparx5_rx *rx = &sparx5->rx; in sparx5_fdma_rx_alloc() local
253 struct fdma *fdma = &rx->fdma; in sparx5_fdma_rx_alloc()
260 fdma_dcbs_init(fdma, FDMA_DCB_INFO_DATAL(fdma->db_size), in sparx5_fdma_rx_alloc()
263 netif_napi_add_weight(rx->ndev, &rx->napi, sparx5_fdma_napi_callback, in sparx5_fdma_rx_alloc()
265 napi_enable(&rx->napi); in sparx5_fdma_rx_alloc()
266 sparx5_fdma_rx_activate(sparx5, rx); in sparx5_fdma_rx_alloc()
272 struct sparx5_tx *tx = &sparx5->tx; in sparx5_fdma_tx_alloc()
273 struct fdma *fdma = &tx->fdma; in sparx5_fdma_tx_alloc()
280 fdma_dcbs_init(fdma, FDMA_DCB_INFO_DATAL(fdma->db_size), in sparx5_fdma_tx_alloc()
287 struct sparx5_rx *rx, int channel) in sparx5_fdma_rx_init() argument
289 struct fdma *fdma = &rx->fdma; in sparx5_fdma_rx_init()
292 fdma->channel_id = channel; in sparx5_fdma_rx_init()
293 fdma->n_dcbs = FDMA_DCB_MAX; in sparx5_fdma_rx_init()
294 fdma->n_dbs = FDMA_RX_DCB_MAX_DBS; in sparx5_fdma_rx_init()
295 fdma->priv = sparx5; in sparx5_fdma_rx_init()
296 fdma->db_size = ALIGN(FDMA_XTR_BUFFER_SIZE, PAGE_SIZE); in sparx5_fdma_rx_init()
297 fdma->size = fdma_get_size(&sparx5->rx.fdma); in sparx5_fdma_rx_init()
298 fdma->ops.dataptr_cb = &sparx5_fdma_rx_dataptr_cb; in sparx5_fdma_rx_init()
299 fdma->ops.nextptr_cb = &fdma_nextptr_cb; in sparx5_fdma_rx_init()
301 for (idx = 0; idx < sparx5->data->consts->n_ports; ++idx) { in sparx5_fdma_rx_init()
302 struct sparx5_port *port = sparx5->ports[idx]; in sparx5_fdma_rx_init()
304 if (port && port->ndev) { in sparx5_fdma_rx_init()
305 rx->ndev = port->ndev; in sparx5_fdma_rx_init()
314 struct fdma *fdma = &tx->fdma; in sparx5_fdma_tx_init()
316 fdma->channel_id = channel; in sparx5_fdma_tx_init()
317 fdma->n_dcbs = FDMA_DCB_MAX; in sparx5_fdma_tx_init()
318 fdma->n_dbs = FDMA_TX_DCB_MAX_DBS; in sparx5_fdma_tx_init()
319 fdma->priv = sparx5; in sparx5_fdma_tx_init()
320 fdma->db_size = ALIGN(FDMA_XTR_BUFFER_SIZE, PAGE_SIZE); in sparx5_fdma_tx_init()
321 fdma->size = fdma_get_size_contiguous(&sparx5->tx.fdma); in sparx5_fdma_tx_init()
322 fdma->ops.dataptr_cb = &sparx5_fdma_tx_dataptr_cb; in sparx5_fdma_tx_init()
323 fdma->ops.nextptr_cb = &fdma_nextptr_cb; in sparx5_fdma_tx_init()
337 napi_schedule(&sparx5->rx.napi); in sparx5_fdma_handler()
342 dev_err_ratelimited(sparx5->dev, in sparx5_fdma_handler()
431 sparx5_fdma_rx_init(sparx5, &sparx5->rx, FDMA_XTR_CHANNEL); in sparx5_fdma_start()
432 sparx5_fdma_tx_init(sparx5, &sparx5->tx, FDMA_INJ_CHANNEL); in sparx5_fdma_start()
435 dev_err(sparx5->dev, "Could not allocate RX buffers: %d\n", err); in sparx5_fdma_start()
440 dev_err(sparx5->dev, "Could not allocate TX buffers: %d\n", err); in sparx5_fdma_start()
455 napi_disable(&sparx5->rx.napi); in sparx5_fdma_stop()
457 sparx5_fdma_rx_deactivate(sparx5, &sparx5->rx); in sparx5_fdma_stop()
458 sparx5_fdma_tx_deactivate(sparx5, &sparx5->tx); in sparx5_fdma_stop()
459 /* Wait for the RX channel to stop */ in sparx5_fdma_stop()
463 fdma_free_phys(&sparx5->rx.fdma); in sparx5_fdma_stop()
464 fdma_free_phys(&sparx5->tx.fdma); in sparx5_fdma_stop()