Lines Matching refs:rxq
1109 struct sge_rxq *rxq; in t4_setup_vi_queues() local
1166 for_each_rxq(vi, i, rxq) { in t4_setup_vi_queues()
1167 rc = alloc_rxq(vi, rxq, i, intr_idx, maxp); in t4_setup_vi_queues()
1217 struct sge_rxq *rxq; in t4_teardown_vi_queues() local
1260 for_each_rxq(vi, i, rxq) { in t4_teardown_vi_queues()
1261 free_rxq(vi, rxq); in t4_teardown_vi_queues()
1374 MPASS(irq->rxq != NULL); in t4_vi_intr()
1375 t4_intr(irq->rxq); in t4_vi_intr()
1572 struct sge_rxq *rxq = iq_to_rxq(iq); in service_iq_fl() local
1583 struct lro_ctrl *lro = &rxq->lro; in service_iq_fl()
1608 fl = &rxq->fl; in service_iq_fl()
1633 if (__predict_true(eth_rx(sc, rxq, d, lq) == 0)) in service_iq_fl()
1933 eth_rx(struct adapter *sc, struct sge_rxq *rxq, const struct iq_desc *d, in eth_rx() argument
1937 if_t ifp = rxq->ifp; in eth_rx()
1938 struct sge_fl *fl = &rxq->fl; in eth_rx()
1942 struct lro_ctrl *lro = &rxq->lro; in eth_rx()
2045 rxq->rxcsum++; in eth_rx()
2084 rxq->vxlan_rxcsum++; in eth_rx()
2098 rxq->vlan_extraction++; in eth_rx()
2102 if (rxq->iq.flags & IQ_RX_TIMESTAMP) { in eth_rx()
2117 if (rxq->iq.flags & IQ_LRO_ENABLED && tnl_type == 0 && in eth_rx()
2255 struct sge_rxq *rxq; in t4_update_fl_bufsize() local
2263 for_each_rxq(vi, i, rxq) { in t4_update_fl_bufsize()
2264 fl = &rxq->fl; in t4_update_fl_bufsize()
3955 alloc_rxq(struct vi_info *vi, struct sge_rxq *rxq, int idx, int intr_idx, in alloc_rxq() argument
3964 if (!(rxq->iq.flags & IQ_SW_ALLOCATED)) { in alloc_rxq()
3965 MPASS(!(rxq->iq.flags & IQ_HW_ALLOCATED)); in alloc_rxq()
3967 rc = tcp_lro_init_args(&rxq->lro, ifp, lro_entries, lro_mbufs); in alloc_rxq()
3970 MPASS(rxq->lro.ifp == ifp); /* also indicates LRO init'ed */ in alloc_rxq()
3972 rxq->ifp = ifp; in alloc_rxq()
3979 init_iq(&rxq->iq, sc, vi->tmr_idx, vi->pktc_idx, vi->qsize_rxq, in alloc_rxq()
3983 rxq->iq.flags |= IQ_LRO_ENABLED; in alloc_rxq()
3986 rxq->iq.flags |= IQ_RX_TIMESTAMP; in alloc_rxq()
3989 init_fl(sc, &rxq->fl, vi->qsize_rxq / 8, maxp, name); in alloc_rxq()
3990 rc = alloc_iq_fl(vi, &rxq->iq, &rxq->fl, &vi->ctx, oid); in alloc_rxq()
3995 tcp_lro_free(&rxq->lro); in alloc_rxq()
3996 rxq->lro.ifp = NULL; in alloc_rxq()
4000 MPASS(rxq->iq.flags & IQ_SW_ALLOCATED); in alloc_rxq()
4001 add_rxq_sysctls(&vi->ctx, oid, rxq); in alloc_rxq()
4004 if (!(rxq->iq.flags & IQ_HW_ALLOCATED)) { in alloc_rxq()
4005 MPASS(rxq->iq.flags & IQ_SW_ALLOCATED); in alloc_rxq()
4006 rc = alloc_iq_fl_hwq(vi, &rxq->iq, &rxq->fl); in alloc_rxq()
4011 MPASS(rxq->iq.flags & IQ_HW_ALLOCATED); in alloc_rxq()
4014 sc->sge.iq_base = rxq->iq.abs_id - rxq->iq.cntxt_id; in alloc_rxq()
4016 KASSERT(rxq->iq.cntxt_id + sc->sge.iq_base == rxq->iq.abs_id, in alloc_rxq()
4025 FL_LOCK(&rxq->fl); in alloc_rxq()
4026 refill_fl(sc, &rxq->fl, 128); in alloc_rxq()
4027 FL_UNLOCK(&rxq->fl); in alloc_rxq()
4037 free_rxq(struct vi_info *vi, struct sge_rxq *rxq) in free_rxq() argument
4039 if (rxq->iq.flags & IQ_HW_ALLOCATED) { in free_rxq()
4040 MPASS(rxq->iq.flags & IQ_SW_ALLOCATED); in free_rxq()
4041 free_iq_fl_hwq(vi->adapter, &rxq->iq, &rxq->fl); in free_rxq()
4042 MPASS(!(rxq->iq.flags & IQ_HW_ALLOCATED)); in free_rxq()
4045 if (rxq->iq.flags & IQ_SW_ALLOCATED) { in free_rxq()
4046 MPASS(!(rxq->iq.flags & IQ_HW_ALLOCATED)); in free_rxq()
4048 tcp_lro_free(&rxq->lro); in free_rxq()
4050 free_iq_fl(vi->adapter, &rxq->iq, &rxq->fl); in free_rxq()
4051 MPASS(!(rxq->iq.flags & IQ_SW_ALLOCATED)); in free_rxq()
4052 bzero(rxq, sizeof(*rxq)); in free_rxq()
4058 struct sge_rxq *rxq) in add_rxq_sysctls() argument
4068 &rxq->lro.lro_queued, 0, NULL); in add_rxq_sysctls()
4070 &rxq->lro.lro_flushed, 0, NULL); in add_rxq_sysctls()
4073 &rxq->rxcsum, "# of times hardware assisted with checksum"); in add_rxq_sysctls()
4075 &rxq->vlan_extraction, "# of times hardware extracted 802.1Q tag"); in add_rxq_sysctls()
4077 &rxq->vxlan_rxcsum, in add_rxq_sysctls()
4616 &sc->sge.rxq[iqidx].iq, name); in alloc_txq()
4837 &sc->sge.rxq[iqidx].iq, name); in alloc_ofld_txq()