Lines Matching refs:fl
1561 move_to_next_rxbuf(struct sge_fl *fl) in move_to_next_rxbuf() argument
1564 fl->rx_offset = 0; in move_to_next_rxbuf()
1565 if (__predict_false((++fl->cidx & 7) == 0)) { in move_to_next_rxbuf()
1566 uint16_t cidx = fl->cidx >> 3; in move_to_next_rxbuf()
1568 if (__predict_false(cidx == fl->sidx)) in move_to_next_rxbuf()
1569 fl->cidx = cidx = 0; in move_to_next_rxbuf()
1570 fl->hw_cidx = cidx; in move_to_next_rxbuf()
1581 struct sge_fl *fl; in service_iq_fl() local
1616 fl = &rxq->fl; in service_iq_fl()
1617 fl_hw_cidx = fl->hw_cidx; /* stable snapshot */ in service_iq_fl()
1629 if (fl->rx_offset > 0) in service_iq_fl()
1630 move_to_next_rxbuf(fl); in service_iq_fl()
1633 if (IDXDIFF(fl->hw_cidx, fl_hw_cidx, fl->sidx) > 4) { in service_iq_fl()
1634 FL_LOCK(fl); in service_iq_fl()
1635 refill_fl(sc, fl, 64); in service_iq_fl()
1636 FL_UNLOCK(fl); in service_iq_fl()
1637 fl_hw_cidx = fl->hw_cidx; in service_iq_fl()
1645 m0 = get_fl_payload(sc, fl, lq); in service_iq_fl()
1722 FL_LOCK(fl); in service_iq_fl()
1723 starved = refill_fl(sc, fl, 64); in service_iq_fl()
1724 FL_UNLOCK(fl); in service_iq_fl()
1726 add_fl_to_sfl(sc, fl); in service_iq_fl()
1755 get_scatter_segment(struct adapter *sc, struct sge_fl *fl, int fr_offset, in get_scatter_segment() argument
1759 struct fl_sdesc *sd = &fl->sdesc[fl->cidx]; in get_scatter_segment()
1765 if (fl->flags & FL_BUF_PACKING) { in get_scatter_segment()
1768 blen = rxb->size2 - fl->rx_offset; /* max possible in this buf */ in get_scatter_segment()
1770 payload = sd->cl + fl->rx_offset; in get_scatter_segment()
1773 pad = roundup2(l, fl->buf_boundary) - l; in get_scatter_segment()
1774 if (fl->rx_offset + len + pad < rxb->size2) in get_scatter_segment()
1776 MPASS(fl->rx_offset + blen <= rxb->size2); in get_scatter_segment()
1778 MPASS(fl->rx_offset == 0); /* not packing */ in get_scatter_segment()
1800 if (fl->flags & FL_BUF_PACKING) { in get_scatter_segment()
1801 fl->rx_offset += blen; in get_scatter_segment()
1802 MPASS(fl->rx_offset <= rxb->size2); in get_scatter_segment()
1803 if (fl->rx_offset < rxb->size2) in get_scatter_segment()
1806 } else if (fl->flags & FL_BUF_PACKING) { in get_scatter_segment()
1817 fl->rx_offset += blen; in get_scatter_segment()
1818 MPASS(fl->rx_offset <= rxb->size2); in get_scatter_segment()
1819 if (fl->rx_offset < rxb->size2) in get_scatter_segment()
1826 move_to_next_rxbuf(fl); in get_scatter_segment()
1832 get_fl_payload(struct adapter *sc, struct sge_fl *fl, const u_int plen) in get_fl_payload() argument
1837 if (__predict_false(fl->flags & FL_BUF_RESUME)) { in get_fl_payload()
1838 M_ASSERTPKTHDR(fl->m0); in get_fl_payload()
1839 MPASS(fl->m0->m_pkthdr.len == plen); in get_fl_payload()
1840 MPASS(fl->remaining < plen); in get_fl_payload()
1842 m0 = fl->m0; in get_fl_payload()
1843 pnext = fl->pnext; in get_fl_payload()
1844 remaining = fl->remaining; in get_fl_payload()
1845 fl->flags &= ~FL_BUF_RESUME; in get_fl_payload()
1854 m0 = get_scatter_segment(sc, fl, 0, plen); in get_fl_payload()
1861 MPASS(fl->rx_offset == 0); in get_fl_payload()
1862 m = get_scatter_segment(sc, fl, plen - remaining, remaining); in get_fl_payload()
1864 fl->m0 = m0; in get_fl_payload()
1865 fl->pnext = pnext; in get_fl_payload()
1866 fl->remaining = remaining; in get_fl_payload()
1867 fl->flags |= FL_BUF_RESUME; in get_fl_payload()
1881 skip_scatter_segment(struct adapter *sc, struct sge_fl *fl, int fr_offset, in skip_scatter_segment() argument
1884 struct fl_sdesc *sd = &fl->sdesc[fl->cidx]; in skip_scatter_segment()
1888 if (fl->flags & FL_BUF_PACKING) { in skip_scatter_segment()
1891 blen = rxb->size2 - fl->rx_offset; /* max possible in this buf */ in skip_scatter_segment()
1895 pad = roundup2(l, fl->buf_boundary) - l; in skip_scatter_segment()
1896 if (fl->rx_offset + len + pad < rxb->size2) in skip_scatter_segment()
1898 fl->rx_offset += blen; in skip_scatter_segment()
1899 MPASS(fl->rx_offset <= rxb->size2); in skip_scatter_segment()
1900 if (fl->rx_offset < rxb->size2) in skip_scatter_segment()
1903 MPASS(fl->rx_offset == 0); /* not packing */ in skip_scatter_segment()
1907 move_to_next_rxbuf(fl); in skip_scatter_segment()
1912 skip_fl_payload(struct adapter *sc, struct sge_fl *fl, int plen) in skip_fl_payload() argument
1919 len = skip_scatter_segment(sc, fl, fr_offset, remaining); in skip_fl_payload()
1926 get_segment_len(struct adapter *sc, struct sge_fl *fl, int plen) in get_segment_len() argument
1929 struct fl_sdesc *sd = &fl->sdesc[fl->cidx]; in get_segment_len()
1932 if (fl->flags & FL_BUF_PACKING) in get_segment_len()
1933 len = rxb->size2 - fl->rx_offset; in get_segment_len()
1946 struct sge_fl *fl = &rxq->fl; in eth_rx() local
1990 __predict_true((fl->flags & FL_BUF_RESUME) == 0)) { in eth_rx()
1991 struct fl_sdesc *sd = &fl->sdesc[fl->cidx]; in eth_rx()
1995 slen = get_segment_len(sc, fl, plen) - in eth_rx()
1997 frame = sd->cl + fl->rx_offset + sc->params.sge.fl_pktshift; in eth_rx()
2002 skip_fl_payload(sc, fl, plen); in eth_rx()
2006 skip_fl_payload(sc, fl, plen); in eth_rx()
2011 m0 = get_fl_payload(sc, fl, plen); in eth_rx()
2267 struct sge_fl *fl; in t4_update_fl_bufsize() local
2272 fl = &rxq->fl; in t4_update_fl_bufsize()
2274 FL_LOCK(fl); in t4_update_fl_bufsize()
2275 fl->zidx = find_refill_source(sc, maxp, in t4_update_fl_bufsize()
2276 fl->flags & FL_BUF_PACKING); in t4_update_fl_bufsize()
2277 FL_UNLOCK(fl); in t4_update_fl_bufsize()
2282 fl = &ofld_rxq->fl; in t4_update_fl_bufsize()
2284 FL_LOCK(fl); in t4_update_fl_bufsize()
2285 fl->zidx = find_refill_source(sc, maxp, in t4_update_fl_bufsize()
2286 fl->flags & FL_BUF_PACKING); in t4_update_fl_bufsize()
2287 FL_UNLOCK(fl); in t4_update_fl_bufsize()
3382 init_fl(struct adapter *sc, struct sge_fl *fl, int qsize, int maxp, char *name) in init_fl() argument
3386 fl->qsize = qsize; in init_fl()
3387 fl->sidx = qsize - sc->params.sge.spg_len / EQ_ESIZE; in init_fl()
3388 strlcpy(fl->lockname, name, sizeof(fl->lockname)); in init_fl()
3389 mtx_init(&fl->fl_lock, fl->lockname, NULL, MTX_DEF); in init_fl()
3393 fl->flags |= FL_BUF_PACKING; in init_fl()
3394 fl->zidx = find_refill_source(sc, maxp, fl->flags & FL_BUF_PACKING); in init_fl()
3395 fl->safe_zidx = sc->sge.safe_zidx; in init_fl()
3396 if (fl->flags & FL_BUF_PACKING) { in init_fl()
3397 fl->lowat = roundup2(sp->fl_starve_threshold2, 8); in init_fl()
3398 fl->buf_boundary = sp->pack_boundary; in init_fl()
3400 fl->lowat = roundup2(sp->fl_starve_threshold, 8); in init_fl()
3401 fl->buf_boundary = 16; in init_fl()
3403 if (fl_pad && fl->buf_boundary < sp->pad_boundary) in init_fl()
3404 fl->buf_boundary = sp->pad_boundary; in init_fl()
3476 alloc_iq_fl(struct vi_info *vi, struct sge_iq *iq, struct sge_fl *fl, in alloc_iq_fl() argument
3491 if (fl) { in alloc_iq_fl()
3492 len = fl->qsize * EQ_ESIZE; in alloc_iq_fl()
3493 rc = alloc_ring(sc, len, &fl->desc_tag, &fl->desc_map, in alloc_iq_fl()
3494 &fl->ba, (void **)&fl->desc); in alloc_iq_fl()
3502 fl->sdesc = malloc(fl->sidx * 8 * sizeof(struct fl_sdesc), in alloc_iq_fl()
3505 add_fl_sysctls(sc, ctx, oid, fl); in alloc_iq_fl()
3519 free_iq_fl(struct adapter *sc, struct sge_iq *iq, struct sge_fl *fl) in free_iq_fl() argument
3523 if (fl) { in free_iq_fl()
3525 free_ring(sc, fl->desc_tag, fl->desc_map, fl->ba, fl->desc); in free_iq_fl()
3526 free_fl_buffers(sc, fl); in free_iq_fl()
3527 free(fl->sdesc, M_CXGBE); in free_iq_fl()
3528 mtx_destroy(&fl->fl_lock); in free_iq_fl()
3529 bzero(fl, sizeof(*fl)); in free_iq_fl()
3543 alloc_iq_fl_hwq(struct vi_info *vi, struct sge_iq *iq, struct sge_fl *fl) in alloc_iq_fl_hwq() argument
3592 if (fl) { in alloc_iq_fl_hwq()
3593 bzero(fl->desc, fl->sidx * EQ_ESIZE + sc->params.sge.spg_len); in alloc_iq_fl_hwq()
3598 (fl->flags & FL_BUF_PACKING ? F_FW_IQ_CMD_FL0PACKEN : in alloc_iq_fl_hwq()
3611 c.fl0size = htobe16(fl->qsize); in alloc_iq_fl_hwq()
3612 c.fl0addr = htobe64(fl->ba); in alloc_iq_fl_hwq()
3633 if (fl) { in alloc_iq_fl_hwq()
3638 MPASS(!(fl->flags & FL_BUF_RESUME)); in alloc_iq_fl_hwq()
3639 for (i = 0; i < fl->sidx * 8; i++) in alloc_iq_fl_hwq()
3640 MPASS(fl->sdesc[i].cl == NULL); in alloc_iq_fl_hwq()
3642 fl->cntxt_id = be16toh(c.fl0id); in alloc_iq_fl_hwq()
3643 fl->pidx = fl->cidx = fl->hw_cidx = fl->dbidx = 0; in alloc_iq_fl_hwq()
3644 fl->rx_offset = 0; in alloc_iq_fl_hwq()
3645 fl->flags &= ~(FL_STARVING | FL_DOOMED); in alloc_iq_fl_hwq()
3647 cntxt_id = fl->cntxt_id - sc->sge.eq_start; in alloc_iq_fl_hwq()
3652 sc->sge.eqmap[cntxt_id] = (void *)fl; in alloc_iq_fl_hwq()
3654 qid = fl->cntxt_id; in alloc_iq_fl_hwq()
3667 fl->udb = (volatile void *)udb; in alloc_iq_fl_hwq()
3669 fl->dbval = V_QID(qid) | sc->chip_params->sge_fl_db; in alloc_iq_fl_hwq()
3671 FL_LOCK(fl); in alloc_iq_fl_hwq()
3673 refill_fl(sc, fl, fl->lowat); in alloc_iq_fl_hwq()
3674 FL_UNLOCK(fl); in alloc_iq_fl_hwq()
3694 free_iq_fl_hwq(struct adapter *sc, struct sge_iq *iq, struct sge_fl *fl) in free_iq_fl_hwq() argument
3700 iq->cntxt_id, fl ? fl->cntxt_id : 0xffff, 0xffff); in free_iq_fl_hwq()
3734 struct sysctl_oid *oid, struct sge_fl *fl) in add_fl_sysctls() argument
3747 &fl->ba, "bus address of descriptor ring"); in add_fl_sysctls()
3749 fl->sidx * EQ_ESIZE + sc->params.sge.spg_len, in add_fl_sysctls()
3752 &fl->cntxt_id, 0, "SGE context id of the freelist"); in add_fl_sysctls()
3756 fl->flags & FL_BUF_PACKING ? 1 : 0, "packing enabled"); in add_fl_sysctls()
3757 SYSCTL_ADD_UINT(ctx, children, OID_AUTO, "cidx", CTLFLAG_RD, &fl->cidx, in add_fl_sysctls()
3759 if (fl->flags & FL_BUF_PACKING) { in add_fl_sysctls()
3761 CTLFLAG_RD, &fl->rx_offset, 0, "packing rx offset"); in add_fl_sysctls()
3763 SYSCTL_ADD_UINT(ctx, children, OID_AUTO, "pidx", CTLFLAG_RD, &fl->pidx, in add_fl_sysctls()
3766 CTLFLAG_RD, &fl->cl_allocated, "# of clusters allocated"); in add_fl_sysctls()
3768 CTLFLAG_RD, &fl->cl_recycled, "# of clusters recycled"); in add_fl_sysctls()
3770 CTLFLAG_RD, &fl->cl_fast_recycled, "# of clusters recycled (fast)"); in add_fl_sysctls()
3997 init_fl(sc, &rxq->fl, vi->qsize_rxq / 8, maxp, name); in alloc_rxq()
3998 rc = alloc_iq_fl(vi, &rxq->iq, &rxq->fl, &vi->ctx, oid); in alloc_rxq()
4014 rc = alloc_iq_fl_hwq(vi, &rxq->iq, &rxq->fl); in alloc_rxq()
4033 FL_LOCK(&rxq->fl); in alloc_rxq()
4034 refill_fl(sc, &rxq->fl, 128); in alloc_rxq()
4035 FL_UNLOCK(&rxq->fl); in alloc_rxq()
4049 free_iq_fl_hwq(vi->adapter, &rxq->iq, &rxq->fl); in free_rxq()
4058 free_iq_fl(vi->adapter, &rxq->iq, &rxq->fl); in free_rxq()
4114 init_fl(sc, &ofld_rxq->fl, vi->qsize_rxq / 8, maxp, name); in alloc_ofld_rxq()
4115 rc = alloc_iq_fl(vi, &ofld_rxq->iq, &ofld_rxq->fl, &vi->ctx, in alloc_ofld_rxq()
4135 rc = alloc_iq_fl_hwq(vi, &ofld_rxq->iq, &ofld_rxq->fl); in alloc_ofld_rxq()
4154 free_iq_fl_hwq(vi->adapter, &ofld_rxq->iq, &ofld_rxq->fl); in free_ofld_rxq()
4160 free_iq_fl(vi->adapter, &ofld_rxq->iq, &ofld_rxq->fl); in free_ofld_rxq()
4960 ring_fl_db(struct adapter *sc, struct sge_fl *fl) in ring_fl_db() argument
4964 n = IDXDIFF(fl->pidx >> 3, fl->dbidx, fl->sidx); in ring_fl_db()
4968 v = fl->dbval | V_PIDX(n); in ring_fl_db()
4969 if (fl->udb) in ring_fl_db()
4970 *fl->udb = htole32(v); in ring_fl_db()
4973 IDXINCR(fl->dbidx, n, fl->sidx); in ring_fl_db()
4984 refill_fl(struct adapter *sc, struct sge_fl *fl, int n) in refill_fl() argument
4992 uint16_t max_pidx, zidx = fl->zidx; in refill_fl()
4993 uint16_t hw_cidx = fl->hw_cidx; /* stable snapshot */ in refill_fl()
4995 FL_LOCK_ASSERT_OWNED(fl); in refill_fl()
5002 max_pidx = __predict_false(hw_cidx == 0) ? fl->sidx - 1 : hw_cidx - 1; in refill_fl()
5003 if (fl->pidx == max_pidx * 8) in refill_fl()
5006 d = &fl->desc[fl->pidx]; in refill_fl()
5007 sd = &fl->sdesc[fl->pidx]; in refill_fl()
5022 fl->cl_fast_recycled++; in refill_fl()
5035 fl->cl_recycled++; in refill_fl()
5044 if (zidx != fl->safe_zidx) { in refill_fl()
5045 zidx = fl->safe_zidx; in refill_fl()
5052 fl->cl_allocated++; in refill_fl()
5059 if (fl->flags & FL_BUF_PACKING) { in refill_fl()
5070 if (__predict_false((++fl->pidx & 7) == 0)) { in refill_fl()
5071 uint16_t pidx = fl->pidx >> 3; in refill_fl()
5073 if (__predict_false(pidx == fl->sidx)) { in refill_fl()
5074 fl->pidx = 0; in refill_fl()
5076 sd = fl->sdesc; in refill_fl()
5077 d = fl->desc; in refill_fl()
5082 if (IDXDIFF(pidx, fl->dbidx, fl->sidx) >= 4) in refill_fl()
5083 ring_fl_db(sc, fl); in refill_fl()
5087 if ((fl->pidx >> 3) != fl->dbidx) in refill_fl()
5088 ring_fl_db(sc, fl); in refill_fl()
5090 return (FL_RUNNING_LOW(fl) && !(fl->flags & FL_STARVING)); in refill_fl()
5100 struct sge_fl *fl, *fl_temp; in refill_sfl() local
5103 TAILQ_FOREACH_SAFE(fl, &sc->sfl, link, fl_temp) { in refill_sfl()
5104 FL_LOCK(fl); in refill_sfl()
5105 refill_fl(sc, fl, 64); in refill_sfl()
5106 if (FL_NOT_RUNNING_LOW(fl) || fl->flags & FL_DOOMED) { in refill_sfl()
5107 TAILQ_REMOVE(&sc->sfl, fl, link); in refill_sfl()
5108 fl->flags &= ~FL_STARVING; in refill_sfl()
5110 FL_UNLOCK(fl); in refill_sfl()
5123 free_fl_buffers(struct adapter *sc, struct sge_fl *fl) in free_fl_buffers() argument
5129 sd = fl->sdesc; in free_fl_buffers()
5130 for (i = 0; i < fl->sidx * 8; i++, sd++) { in free_fl_buffers()
5136 else if (fl->flags & FL_BUF_PACKING) { in free_fl_buffers()
5147 if (fl->flags & FL_BUF_RESUME) { in free_fl_buffers()
5148 m_freem(fl->m0); in free_fl_buffers()
5149 fl->flags &= ~FL_BUF_RESUME; in free_fl_buffers()
6343 add_fl_to_sfl(struct adapter *sc, struct sge_fl *fl) in add_fl_to_sfl() argument
6346 FL_LOCK(fl); in add_fl_to_sfl()
6347 if ((fl->flags & FL_DOOMED) == 0) { in add_fl_to_sfl()
6348 fl->flags |= FL_STARVING; in add_fl_to_sfl()
6349 TAILQ_INSERT_TAIL(&sc->sfl, fl, link); in add_fl_to_sfl()
6352 FL_UNLOCK(fl); in add_fl_to_sfl()