Lines Matching full:q
15 static dma_addr_t ionic_tx_map_single(struct ionic_queue *q,
18 static dma_addr_t ionic_tx_map_frag(struct ionic_queue *q,
22 static void ionic_tx_desc_unmap_bufs(struct ionic_queue *q,
25 static void ionic_tx_clean(struct ionic_queue *q,
30 static inline void ionic_txq_post(struct ionic_queue *q, bool ring_dbell) in ionic_txq_post() argument
36 ionic_q_post(q, ring_dbell); in ionic_txq_post()
39 static inline void ionic_rxq_post(struct ionic_queue *q, bool ring_dbell) in ionic_rxq_post() argument
41 ionic_q_post(q, ring_dbell); in ionic_rxq_post()
44 bool ionic_txq_poke_doorbell(struct ionic_queue *q) in ionic_txq_poke_doorbell() argument
50 netdev = q->lif->netdev; in ionic_txq_poke_doorbell()
51 netdev_txq = netdev_get_tx_queue(netdev, q->index); in ionic_txq_poke_doorbell()
55 if (q->tail_idx == q->head_idx) { in ionic_txq_poke_doorbell()
61 then = q->dbell_jiffies; in ionic_txq_poke_doorbell()
64 if (dif > q->dbell_deadline) { in ionic_txq_poke_doorbell()
65 ionic_dbell_ring(q->lif->kern_dbpage, q->hw_type, in ionic_txq_poke_doorbell()
66 q->dbval | q->head_idx); in ionic_txq_poke_doorbell()
68 q->dbell_jiffies = now; in ionic_txq_poke_doorbell()
76 bool ionic_rxq_poke_doorbell(struct ionic_queue *q) in ionic_rxq_poke_doorbell() argument
82 if (q->tail_idx == q->head_idx) in ionic_rxq_poke_doorbell()
86 then = q->dbell_jiffies; in ionic_rxq_poke_doorbell()
89 if (dif > q->dbell_deadline) { in ionic_rxq_poke_doorbell()
90 ionic_dbell_ring(q->lif->kern_dbpage, q->hw_type, in ionic_rxq_poke_doorbell()
91 q->dbval | q->head_idx); in ionic_rxq_poke_doorbell()
93 q->dbell_jiffies = now; in ionic_rxq_poke_doorbell()
95 dif = 2 * q->dbell_deadline; in ionic_rxq_poke_doorbell()
99 q->dbell_deadline = dif; in ionic_rxq_poke_doorbell()
105 static inline struct ionic_txq_sg_elem *ionic_tx_sg_elems(struct ionic_queue *q) in ionic_tx_sg_elems() argument
107 if (likely(q->sg_desc_size == sizeof(struct ionic_txq_sg_desc_v1))) in ionic_tx_sg_elems()
108 return q->txq_sgl_v1[q->head_idx].elems; in ionic_tx_sg_elems()
110 return q->txq_sgl[q->head_idx].elems; in ionic_tx_sg_elems()
114 struct ionic_queue *q) in q_to_ndq() argument
116 return netdev_get_tx_queue(netdev, q->index); in q_to_ndq()
129 static void __ionic_rx_put_buf(struct ionic_queue *q, in __ionic_rx_put_buf() argument
136 page_pool_put_full_page(q->page_pool, buf_info->page, recycle_direct); in __ionic_rx_put_buf()
143 static void ionic_rx_put_buf(struct ionic_queue *q, in ionic_rx_put_buf() argument
146 __ionic_rx_put_buf(q, buf_info, false); in ionic_rx_put_buf()
149 static void ionic_rx_put_buf_direct(struct ionic_queue *q, in ionic_rx_put_buf_direct() argument
152 __ionic_rx_put_buf(q, buf_info, true); in ionic_rx_put_buf_direct()
155 static void ionic_rx_add_skb_frag(struct ionic_queue *q, in ionic_rx_add_skb_frag() argument
162 page_pool_dma_sync_for_cpu(q->page_pool, in ionic_rx_add_skb_frag()
179 static struct sk_buff *ionic_rx_build_skb(struct ionic_queue *q, in ionic_rx_build_skb() argument
194 skb = napi_get_frags(&q_to_qcq(q)->napi); in ionic_rx_build_skb()
197 dev_name(q->dev), q->name); in ionic_rx_build_skb()
198 q_to_rx_stats(q)->alloc_err++; in ionic_rx_build_skb()
211 ionic_rx_add_skb_frag(q, skb, buf_info, headroom, frag_len, synced); in ionic_rx_build_skb()
219 ionic_rx_add_skb_frag(q, skb, buf_info, 0, frag_len, synced); in ionic_rx_build_skb()
231 struct ionic_queue *q, in ionic_rx_copybreak() argument
239 struct device *dev = q->dev; in ionic_rx_copybreak()
245 skb = napi_alloc_skb(&q_to_qcq(q)->napi, len); in ionic_rx_copybreak()
248 dev_name(dev), q->name); in ionic_rx_copybreak()
249 q_to_rx_stats(q)->alloc_err++; in ionic_rx_copybreak()
255 page_pool_dma_sync_for_cpu(q->page_pool, in ionic_rx_copybreak()
266 ionic_rx_put_buf_direct(q, buf_info); in ionic_rx_copybreak()
269 ionic_rx_put_buf_direct(q, buf_info); in ionic_rx_copybreak()
274 static void ionic_xdp_tx_desc_clean(struct ionic_queue *q, in ionic_xdp_tx_desc_clean() argument
292 ionic_tx_desc_unmap_bufs(q, desc_info); in ionic_xdp_tx_desc_clean()
304 static int ionic_xdp_post_frame(struct ionic_queue *q, struct xdp_frame *frame, in ionic_xdp_post_frame() argument
316 desc_info = &q->tx_info[q->head_idx]; in ionic_xdp_post_frame()
317 desc = &q->txq[q->head_idx]; in ionic_xdp_post_frame()
319 stats = q_to_tx_stats(q); in ionic_xdp_post_frame()
324 dma_sync_single_for_device(q->dev, dma_addr, in ionic_xdp_post_frame()
327 dma_addr = ionic_tx_map_single(q, frame->data, len); in ionic_xdp_post_frame()
351 elem = ionic_tx_sg_elems(q); in ionic_xdp_post_frame()
358 dma_sync_single_for_device(q->dev, dma_addr, in ionic_xdp_post_frame()
362 dma_addr = ionic_tx_map_frag(q, frag, 0, in ionic_xdp_post_frame()
365 ionic_tx_desc_unmap_bufs(q, desc_info); in ionic_xdp_post_frame()
392 ionic_txq_post(q, ring_doorbell); in ionic_xdp_post_frame()
422 txq = &lif->txqcqs[qi]->q; in ionic_xdp_xmit()
458 static void ionic_xdp_rx_unlink_bufs(struct ionic_queue *q, in ionic_xdp_rx_unlink_bufs() argument
607 static void ionic_rx_clean(struct ionic_queue *q, in ionic_rx_clean() argument
612 struct net_device *netdev = q->lif->netdev; in ionic_rx_clean()
613 struct ionic_qcq *qcq = q_to_qcq(q); in ionic_rx_clean()
621 stats = q_to_rx_stats(q); in ionic_rx_clean()
628 dev_dbg(q->dev, "q%d drop comp->status %d comp->len %d desc->len %d\n", in ionic_rx_clean()
629 q->index, comp->status, comp->len, q->rxq[q->head_idx].len); in ionic_rx_clean()
640 if (ionic_run_xdp(stats, netdev, xdp_prog, q, desc_info->bufs, len)) in ionic_rx_clean()
646 use_copybreak = len <= q->lif->rx_copybreak; in ionic_rx_clean()
648 skb = ionic_rx_copybreak(netdev, q, desc_info, in ionic_rx_clean()
652 skb = ionic_rx_build_skb(q, desc_info, headroom, len, in ionic_rx_clean()
660 skb_record_rx_queue(skb, q->index); in ionic_rx_clean()
700 if (unlikely(q->features & IONIC_RXQ_F_HWSTAMP)) { in ionic_rx_clean()
713 skb_hwtstamps(skb)->hwtstamp = ionic_lif_phc_ktime(q->lif, hwstamp); in ionic_rx_clean()
729 struct ionic_queue *q = cq->bound_q; in __ionic_rx_service() local
738 if (q->tail_idx == q->head_idx) in __ionic_rx_service()
741 if (q->tail_idx != le16_to_cpu(comp->comp_index)) in __ionic_rx_service()
744 desc_info = &q->rx_info[q->tail_idx]; in __ionic_rx_service()
745 q->tail_idx = (q->tail_idx + 1) & (q->num_descs - 1); in __ionic_rx_service()
747 /* clean the related q entry, only one per qc completion */ in __ionic_rx_service()
748 ionic_rx_clean(q, desc_info, comp, xdp_prog); in __ionic_rx_service()
758 static inline void ionic_write_cmb_desc(struct ionic_queue *q, in ionic_write_cmb_desc() argument
764 if (unlikely(q_to_qcq(q)->flags & IONIC_QCQ_F_CMB_RINGS)) in ionic_write_cmb_desc()
765 memcpy_toio(&q->cmb_txq[q->head_idx], desc, sizeof(q->cmb_txq[0])); in ionic_write_cmb_desc()
768 void ionic_rx_fill(struct ionic_queue *q, struct bpf_prog *xdp_prog) in ionic_rx_fill() argument
770 struct net_device *netdev = q->lif->netdev; in ionic_rx_fill()
787 n_fill = ionic_q_space_avail(q); in ionic_rx_fill()
790 q->num_descs / IONIC_RX_FILL_DIV); in ionic_rx_fill()
815 desc = &q->rxq[q->head_idx]; in ionic_rx_fill()
816 desc_info = &q->rx_info[q->head_idx]; in ionic_rx_fill()
824 buf_info->page = page_pool_alloc(q->page_pool, in ionic_rx_fill()
840 sg_elem = q->rxq_sgl[q->head_idx].elems; in ionic_rx_fill()
841 for (j = 0; remain_len > 0 && j < q->max_sg_elems; j++, sg_elem++) { in ionic_rx_fill()
846 ionic_rx_put_buf_direct(q, buf_info); in ionic_rx_fill()
851 buf_info->page = page_pool_alloc(q->page_pool, in ionic_rx_fill()
869 if (j < q->max_sg_elems) in ionic_rx_fill()
876 ionic_write_cmb_desc(q, desc); in ionic_rx_fill()
878 ionic_rxq_post(q, false); in ionic_rx_fill()
881 ionic_dbell_ring(q->lif->kern_dbpage, q->hw_type, in ionic_rx_fill()
882 q->dbval | q->head_idx); in ionic_rx_fill()
884 q->dbell_deadline = IONIC_RX_MIN_DOORBELL_DEADLINE; in ionic_rx_fill()
885 q->dbell_jiffies = jiffies; in ionic_rx_fill()
888 void ionic_rx_empty(struct ionic_queue *q) in ionic_rx_empty() argument
893 for (i = 0; i < q->num_descs; i++) { in ionic_rx_empty()
894 desc_info = &q->rx_info[i]; in ionic_rx_empty()
896 ionic_rx_put_buf(q, &desc_info->bufs[j]); in ionic_rx_empty()
900 q->head_idx = 0; in ionic_rx_empty()
901 q->tail_idx = 0; in ionic_rx_empty()
914 lif = qcq->q.lif; in ionic_dim_update()
964 ionic_txq_poke_doorbell(&qcq->q); in ionic_tx_napi()
980 struct ionic_queue *q = cq->bound_q; in ionic_rx_cq_service() local
987 xdp_prog = READ_ONCE(q->xdp_prog); in ionic_rx_cq_service()
997 ionic_rx_fill(q, xdp_prog); in ionic_rx_cq_service()
1029 ionic_rxq_poke_doorbell(&qcq->q); in ionic_rx_napi()
1071 ionic_rxq_poke_doorbell(&rxqcq->q); in ionic_txrx_napi()
1073 ionic_txq_poke_doorbell(&txqcq->q); in ionic_txrx_napi()
1079 static dma_addr_t ionic_tx_map_single(struct ionic_queue *q, in ionic_tx_map_single() argument
1082 struct device *dev = q->dev; in ionic_tx_map_single()
1088 dev_name(dev), q->name); in ionic_tx_map_single()
1089 q_to_tx_stats(q)->dma_map_err++; in ionic_tx_map_single()
1095 static dma_addr_t ionic_tx_map_frag(struct ionic_queue *q, in ionic_tx_map_frag() argument
1099 struct device *dev = q->dev; in ionic_tx_map_frag()
1105 dev_name(dev), q->name); in ionic_tx_map_frag()
1106 q_to_tx_stats(q)->dma_map_err++; in ionic_tx_map_frag()
1112 static int ionic_tx_map_skb(struct ionic_queue *q, struct sk_buff *skb, in ionic_tx_map_skb() argument
1116 struct device *dev = q->dev; in ionic_tx_map_skb()
1122 dma_addr = ionic_tx_map_single(q, skb->data, skb_headlen(skb)); in ionic_tx_map_skb()
1132 dma_addr = ionic_tx_map_frag(q, frag, 0, skb_frag_size(frag)); in ionic_tx_map_skb()
1157 static void ionic_tx_desc_unmap_bufs(struct ionic_queue *q, in ionic_tx_desc_unmap_bufs() argument
1161 struct device *dev = q->dev; in ionic_tx_desc_unmap_bufs()
1177 static void ionic_tx_clean(struct ionic_queue *q, in ionic_tx_clean() argument
1182 struct ionic_tx_stats *stats = q_to_tx_stats(q); in ionic_tx_clean()
1183 struct ionic_qcq *qcq = q_to_qcq(q); in ionic_tx_clean()
1187 ionic_xdp_tx_desc_clean(q->partner, desc_info, in_napi); in ionic_tx_clean()
1190 if (unlikely(__netif_subqueue_stopped(q->lif->netdev, q->index))) in ionic_tx_clean()
1191 netif_wake_subqueue(q->lif->netdev, q->index); in ionic_tx_clean()
1196 ionic_tx_desc_unmap_bufs(q, desc_info); in ionic_tx_clean()
1202 if (unlikely(ionic_txq_hwstamp_enabled(q))) { in ionic_tx_clean()
1217 hwts.hwtstamp = ionic_lif_phc_ktime(q->lif, hwstamp); in ionic_tx_clean()
1241 struct ionic_queue *q = cq->bound_q; in ionic_tx_service() local
1252 /* clean the related q entries, there could be in ionic_tx_service()
1253 * several q entries completed for each cq completion in ionic_tx_service()
1256 desc_info = &q->tx_info[q->tail_idx]; in ionic_tx_service()
1258 index = q->tail_idx; in ionic_tx_service()
1259 q->tail_idx = (q->tail_idx + 1) & (q->num_descs - 1); in ionic_tx_service()
1260 ionic_tx_clean(q, desc_info, comp, in_napi); in ionic_tx_service()
1295 struct ionic_queue *q = cq->bound_q; in ionic_tx_cq_service() local
1297 if (likely(!ionic_txq_hwstamp_enabled(q))) in ionic_tx_cq_service()
1298 netif_txq_completed_wake(q_to_ndq(q->lif->netdev, q), in ionic_tx_cq_service()
1300 ionic_q_space_avail(q), in ionic_tx_cq_service()
1317 void ionic_tx_empty(struct ionic_queue *q) in ionic_tx_empty() argument
1324 while (q->head_idx != q->tail_idx) { in ionic_tx_empty()
1325 desc_info = &q->tx_info[q->tail_idx]; in ionic_tx_empty()
1327 q->tail_idx = (q->tail_idx + 1) & (q->num_descs - 1); in ionic_tx_empty()
1328 ionic_tx_clean(q, desc_info, NULL, false); in ionic_tx_empty()
1336 if (likely(!ionic_txq_hwstamp_enabled(q))) { in ionic_tx_empty()
1337 struct netdev_queue *ndq = q_to_ndq(q->lif->netdev, q); in ionic_tx_empty()
1389 static void ionic_tx_tso_post(struct net_device *netdev, struct ionic_queue *q, in ionic_tx_tso_post() argument
1413 ionic_write_cmb_desc(q, desc); in ionic_tx_tso_post()
1417 if (likely(!ionic_txq_hwstamp_enabled(q))) in ionic_tx_tso_post()
1418 netdev_tx_sent_queue(q_to_ndq(netdev, q), skb->len); in ionic_tx_tso_post()
1419 ionic_txq_post(q, false); in ionic_tx_tso_post()
1421 ionic_txq_post(q, done); in ionic_tx_tso_post()
1425 static int ionic_tx_tso(struct net_device *netdev, struct ionic_queue *q, in ionic_tx_tso() argument
1428 struct ionic_tx_stats *stats = q_to_tx_stats(q); in ionic_tx_tso()
1467 desc_info = &q->tx_info[q->head_idx]; in ionic_tx_tso()
1468 if (unlikely(ionic_tx_map_skb(q, skb, desc_info))) in ionic_tx_tso()
1512 desc = &q->txq[q->head_idx]; in ionic_tx_tso()
1513 elem = ionic_tx_sg_elems(q); in ionic_tx_tso()
1531 ionic_tx_tso_post(netdev, q, desc, skb, desc_addr, desc_nsge, in ionic_tx_tso()
1536 desc_info = &q->tx_info[q->head_idx]; in ionic_tx_tso()
1548 static void ionic_tx_calc_csum(struct ionic_queue *q, struct sk_buff *skb, in ionic_tx_calc_csum() argument
1551 struct ionic_txq_desc *desc = &q->txq[q->head_idx]; in ionic_tx_calc_csum()
1553 struct ionic_tx_stats *stats = q_to_tx_stats(q); in ionic_tx_calc_csum()
1579 ionic_write_cmb_desc(q, desc); in ionic_tx_calc_csum()
1587 static void ionic_tx_calc_no_csum(struct ionic_queue *q, struct sk_buff *skb, in ionic_tx_calc_no_csum() argument
1590 struct ionic_txq_desc *desc = &q->txq[q->head_idx]; in ionic_tx_calc_no_csum()
1592 struct ionic_tx_stats *stats = q_to_tx_stats(q); in ionic_tx_calc_no_csum()
1618 ionic_write_cmb_desc(q, desc); in ionic_tx_calc_no_csum()
1623 static void ionic_tx_skb_frags(struct ionic_queue *q, struct sk_buff *skb, in ionic_tx_skb_frags() argument
1627 struct ionic_tx_stats *stats = q_to_tx_stats(q); in ionic_tx_skb_frags()
1631 elem = ionic_tx_sg_elems(q); in ionic_tx_skb_frags()
1640 static int ionic_tx(struct net_device *netdev, struct ionic_queue *q, in ionic_tx() argument
1643 struct ionic_tx_desc_info *desc_info = &q->tx_info[q->head_idx]; in ionic_tx()
1644 struct ionic_tx_stats *stats = q_to_tx_stats(q); in ionic_tx()
1647 if (unlikely(ionic_tx_map_skb(q, skb, desc_info))) in ionic_tx()
1654 ionic_tx_calc_csum(q, skb, desc_info); in ionic_tx()
1656 ionic_tx_calc_no_csum(q, skb, desc_info); in ionic_tx()
1659 ionic_tx_skb_frags(q, skb, desc_info); in ionic_tx()
1665 if (likely(!ionic_txq_hwstamp_enabled(q))) { in ionic_tx()
1666 struct netdev_queue *ndq = q_to_ndq(netdev, q); in ionic_tx()
1668 if (unlikely(!ionic_q_has_space(q, MAX_SKB_FRAGS + 1))) in ionic_tx()
1673 ionic_txq_post(q, ring_dbell); in ionic_tx()
1678 static int ionic_tx_descs_needed(struct ionic_queue *q, struct sk_buff *skb) in ionic_tx_descs_needed() argument
1703 if (unlikely(nr_frags > q->max_sg_elems)) { in ionic_tx_descs_needed()
1737 if (desc_bufs > q->max_sg_elems + 1) { in ionic_tx_descs_needed()
1760 q_to_tx_stats(q)->linearize++; in ionic_tx_descs_needed()
1770 struct ionic_queue *q; in ionic_start_hwstamp_xmit() local
1778 q = &lif->hwstamp_txq->q; in ionic_start_hwstamp_xmit()
1779 ndescs = ionic_tx_descs_needed(q, skb); in ionic_start_hwstamp_xmit()
1783 if (unlikely(!ionic_q_has_space(q, ndescs))) in ionic_start_hwstamp_xmit()
1788 err = ionic_tx_tso(netdev, q, skb); in ionic_start_hwstamp_xmit()
1790 err = ionic_tx(netdev, q, skb); in ionic_start_hwstamp_xmit()
1798 q->drop++; in ionic_start_hwstamp_xmit()
1807 struct ionic_queue *q; in ionic_start_xmit() local
1822 q = &lif->txqcqs[queue_index]->q; in ionic_start_xmit()
1824 ndescs = ionic_tx_descs_needed(q, skb); in ionic_start_xmit()
1828 if (!netif_txq_maybe_stop(q_to_ndq(netdev, q), in ionic_start_xmit()
1829 ionic_q_space_avail(q), in ionic_start_xmit()
1834 err = ionic_tx_tso(netdev, q, skb); in ionic_start_xmit()
1836 err = ionic_tx(netdev, q, skb); in ionic_start_xmit()
1844 q->drop++; in ionic_start_xmit()