Lines Matching full:bf

128     struct ieee80211_node *ni, struct ath_buf *bf, struct mbuf *m0);
133 struct ath_tid *tid, struct ath_buf *bf);
139 struct ath_buf *bf; in ath_tx_alq_post() local
144 bf = bf_first; in ath_tx_alq_post()
146 while (bf != NULL) { in ath_tx_alq_post()
148 if (bf->bf_nseg == 0) in ath_tx_alq_post()
150 n = ((bf->bf_nseg - 1) / sc->sc_tx_nmaps) + 1; in ath_tx_alq_post()
151 for (i = 0, ds = (const char *) bf->bf_desc; in ath_tx_alq_post()
159 bf = bf->bf_next; in ath_tx_alq_post()
196 ath_tx_set_retry(struct ath_softc *sc, struct ath_buf *bf) in ath_tx_set_retry() argument
200 wh = mtod(bf->bf_m, struct ieee80211_frame *); in ath_tx_set_retry()
202 if (bf->bf_state.bfs_isretried == 0) { in ath_tx_set_retry()
204 bus_dmamap_sync(sc->sc_dmat, bf->bf_dmamap, in ath_tx_set_retry()
207 bf->bf_state.bfs_isretried = 1; in ath_tx_set_retry()
208 bf->bf_state.bfs_retries ++; in ath_tx_set_retry()
257 struct ath_buf *bf, *next; in ath_txfrag_cleanup() local
261 TAILQ_FOREACH_SAFE(bf, frags, bf_list, next) { in ath_txfrag_cleanup()
262 /* NB: bf assumed clean */ in ath_txfrag_cleanup()
263 TAILQ_REMOVE(frags, bf, bf_list); in ath_txfrag_cleanup()
264 ath_returnbuf_head(sc, bf); in ath_txfrag_cleanup()
279 struct ath_buf *bf; in ath_txfrag_setup() local
284 bf = _ath_getbuf_locked(sc, ATH_BUFTYPE_NORMAL); in ath_txfrag_setup()
285 if (bf == NULL) { /* out of buffers, cleanup */ in ath_txfrag_setup()
292 TAILQ_INSERT_TAIL(frags, bf, bf_list); in ath_txfrag_setup()
300 ath_tx_dmasetup(struct ath_softc *sc, struct ath_buf *bf, struct mbuf *m0) in ath_tx_dmasetup() argument
309 error = bus_dmamap_load_mbuf_sg(sc->sc_dmat, bf->bf_dmamap, m0, in ath_tx_dmasetup()
310 bf->bf_segs, &bf->bf_nseg, in ath_tx_dmasetup()
314 bf->bf_nseg = ATH_MAX_SCATTER + 1; in ath_tx_dmasetup()
325 if (bf->bf_nseg > ATH_MAX_SCATTER) { /* too many desc's, linearize */ in ath_tx_dmasetup()
334 error = bus_dmamap_load_mbuf_sg(sc->sc_dmat, bf->bf_dmamap, m0, in ath_tx_dmasetup()
335 bf->bf_segs, &bf->bf_nseg, in ath_tx_dmasetup()
342 KASSERT(bf->bf_nseg <= ATH_MAX_SCATTER, in ath_tx_dmasetup()
343 ("too many segments after defrag; nseg %u", bf->bf_nseg)); in ath_tx_dmasetup()
344 } else if (bf->bf_nseg == 0) { /* null packet, discard */ in ath_tx_dmasetup()
351 bus_dmamap_sync(sc->sc_dmat, bf->bf_dmamap, BUS_DMASYNC_PREWRITE); in ath_tx_dmasetup()
352 bf->bf_m = m0; in ath_tx_dmasetup()
364 struct ath_buf *bf, bool is_aggr, int is_first_subframe, in ath_tx_chaindesclist() argument
396 ds = (char *) bf->bf_desc; in ath_tx_chaindesclist()
400 for (i = 0; i < bf->bf_nseg; i++) { in ath_tx_chaindesclist()
401 bufAddrList[bp] = bf->bf_segs[i].ds_addr; in ath_tx_chaindesclist()
402 segLenList[bp] = bf->bf_segs[i].ds_len; in ath_tx_chaindesclist()
409 if ((i != bf->bf_nseg - 1) && (bp < numTxMaps)) in ath_tx_chaindesclist()
417 if (i == bf->bf_nseg - 1) in ath_tx_chaindesclist()
421 bf->bf_daddr + dd->dd_descsize * (dsp + 1)); in ath_tx_chaindesclist()
432 , bf->bf_descid /* XXX desc id */ in ath_tx_chaindesclist()
433 , bf->bf_state.bfs_tx_queue in ath_tx_chaindesclist()
435 , i == bf->bf_nseg - 1 /* last segment */ in ath_tx_chaindesclist()
469 bf->bf_state.bfs_ndelim); in ath_tx_chaindesclist()
472 bf->bf_lastds = (struct ath_desc *) ds; in ath_tx_chaindesclist()
486 bus_dmamap_sync(sc->sc_dmat, bf->bf_dmamap, BUS_DMASYNC_PREWRITE); in ath_tx_chaindesclist()
506 struct ath_buf *bf) in ath_tx_set_ratectrl() argument
508 struct ath_rc_series *rc = bf->bf_state.bfs_rc; in ath_tx_set_ratectrl()
511 if (! bf->bf_state.bfs_ismrr) in ath_tx_set_ratectrl()
518 else if (bf->bf_state.bfs_txflags & HAL_TXDESC_NOACK) { in ath_tx_set_ratectrl()
533 ath_buf_set_rate(sc, ni, bf); in ath_tx_set_ratectrl()
535 ath_hal_setupxtxdesc(sc->sc_ah, bf->bf_desc in ath_tx_set_ratectrl()
547 * bf->bf_next.
552 struct ath_buf *bf, *bf_prev = NULL; in ath_tx_setds_11n() local
559 bf = bf_first; in ath_tx_setds_11n()
561 if (bf->bf_state.bfs_txrate0 == 0) in ath_tx_setds_11n()
562 DPRINTF(sc, ATH_DEBUG_SW_TX_AGGR, "%s: bf=%p, txrate0=%d\n", in ath_tx_setds_11n()
563 __func__, bf, 0); in ath_tx_setds_11n()
564 if (bf->bf_state.bfs_rc[0].ratecode == 0) in ath_tx_setds_11n()
565 DPRINTF(sc, ATH_DEBUG_SW_TX_AGGR, "%s: bf=%p, rix0=%d\n", in ath_tx_setds_11n()
566 __func__, bf, 0); in ath_tx_setds_11n()
572 while (bf != NULL) { in ath_tx_setds_11n()
574 "%s: bf=%p, nseg=%d, pktlen=%d, seqno=%d\n", in ath_tx_setds_11n()
575 __func__, bf, bf->bf_nseg, bf->bf_state.bfs_pktlen, in ath_tx_setds_11n()
576 SEQNO(bf->bf_state.bfs_seqno)); in ath_tx_setds_11n()
582 ath_hal_setuptxdesc(sc->sc_ah, bf->bf_desc in ath_tx_setds_11n()
583 , bf->bf_state.bfs_pktlen /* packet length */ in ath_tx_setds_11n()
584 , bf->bf_state.bfs_hdrlen /* header length */ in ath_tx_setds_11n()
585 , bf->bf_state.bfs_atype /* Atheros packet type */ in ath_tx_setds_11n()
586 , bf->bf_state.bfs_txpower /* txpower */ in ath_tx_setds_11n()
587 , bf->bf_state.bfs_txrate0 in ath_tx_setds_11n()
588 , bf->bf_state.bfs_try0 /* series 0 rate/tries */ in ath_tx_setds_11n()
589 , bf->bf_state.bfs_keyix /* key cache index */ in ath_tx_setds_11n()
590 , bf->bf_state.bfs_txantenna /* antenna mode */ in ath_tx_setds_11n()
591 , bf->bf_state.bfs_txflags | HAL_TXDESC_INTREQ /* flags */ in ath_tx_setds_11n()
592 , bf->bf_state.bfs_ctsrate /* rts/cts rate */ in ath_tx_setds_11n()
593 , bf->bf_state.bfs_ctsduration /* rts/cts duration */ in ath_tx_setds_11n()
600 if (bf == bf_first) { in ath_tx_setds_11n()
604 ath_tx_set_ratectrl(sc, bf->bf_node, bf); in ath_tx_setds_11n()
611 ath_tx_chaindesclist(sc, ds0, bf, in ath_tx_setds_11n()
613 !! (bf == bf_first), /* is_first_subframe */ in ath_tx_setds_11n()
614 !! (bf->bf_next == NULL) /* is_last_subframe */ in ath_tx_setds_11n()
617 if (bf == bf_first) { in ath_tx_setds_11n()
624 bf->bf_state.bfs_al, in ath_tx_setds_11n()
625 bf->bf_state.bfs_ndelim); in ath_tx_setds_11n()
634 bf->bf_daddr); in ath_tx_setds_11n()
637 bf_prev = bf; in ath_tx_setds_11n()
638 bf = bf->bf_next; in ath_tx_setds_11n()
698 struct ath_buf *bf) in ath_tx_handoff_mcast() argument
702 KASSERT((bf->bf_flags & ATH_BUF_BUSY) == 0, in ath_tx_handoff_mcast()
703 ("%s: busy status 0x%x", __func__, bf->bf_flags)); in ath_tx_handoff_mcast()
709 if (bf->bf_state.bfs_tx_queue != sc->sc_cabq->axq_qnum) { in ath_tx_handoff_mcast()
711 "%s: bf=%p, bfs_tx_queue=%d, axq_qnum=%d\n", in ath_tx_handoff_mcast()
712 __func__, bf, bf->bf_state.bfs_tx_queue, in ath_tx_handoff_mcast()
730 bf->bf_daddr); in ath_tx_handoff_mcast()
732 ATH_TXQ_INSERT_TAIL(txq, bf, bf_list); in ath_tx_handoff_mcast()
741 struct ath_buf *bf) in ath_tx_handoff_hw() argument
755 KASSERT((bf->bf_flags & ATH_BUF_BUSY) == 0, in ath_tx_handoff_hw()
756 ("%s: busy status 0x%x", __func__, bf->bf_flags)); in ath_tx_handoff_hw()
802 ATH_TXQ_INSERT_TAIL(txq, bf, bf_list); in ath_tx_handoff_hw()
804 "ath_tx_handoff: non-tdma: txq=%u, add bf=%p " in ath_tx_handoff_hw()
807 bf, in ath_tx_handoff_hw()
817 *txq->axq_link = bf->bf_daddr; in ath_tx_handoff_hw()
821 (caddr_t)bf->bf_daddr, bf->bf_desc, in ath_tx_handoff_hw()
827 (caddr_t)bf->bf_daddr, bf->bf_desc, in ath_tx_handoff_hw()
828 bf->bf_lastds); in ath_tx_handoff_hw()
861 * Ensure that the bf TXQ matches this TXQ, so later in ath_tx_handoff_hw()
864 if (bf->bf_state.bfs_tx_queue != txq->axq_qnum) { in ath_tx_handoff_hw()
866 "%s: bf=%p, bfs_tx_queue=%d, axq_qnum=%d\n", in ath_tx_handoff_hw()
867 __func__, bf, bf->bf_state.bfs_tx_queue, in ath_tx_handoff_hw()
874 if (bf->bf_state.bfs_aggr) in ath_tx_handoff_hw()
880 ath_hal_gettxdesclinkptr(ah, bf->bf_lastds, &txq->axq_link); in ath_tx_handoff_hw()
911 struct ath_buf *bf, *bf_last; in ath_legacy_tx_dma_restart() local
916 bf = TAILQ_FIRST(&txq->axq_q); in ath_legacy_tx_dma_restart()
919 if (bf == NULL) in ath_legacy_tx_dma_restart()
923 "%s: Q%d: bf=%p, bf_last=%p, daddr=0x%08x\n", in ath_legacy_tx_dma_restart()
926 bf, in ath_legacy_tx_dma_restart()
928 (uint32_t) bf->bf_daddr); in ath_legacy_tx_dma_restart()
944 ath_hal_puttxbuf(sc->sc_ah, txq->axq_qnum, bf->bf_daddr); in ath_legacy_tx_dma_restart()
959 struct ath_buf *bf) in ath_legacy_xmit_handoff() argument
965 ath_tx_alq_post(sc, bf); in ath_legacy_xmit_handoff()
969 ath_tx_handoff_mcast(sc, txq, bf); in ath_legacy_xmit_handoff()
971 ath_tx_handoff_hw(sc, txq, bf); in ath_legacy_xmit_handoff()
1050 ath_tx_calc_protection(struct ath_softc *sc, struct ath_buf *bf) in ath_tx_calc_protection() argument
1059 flags = bf->bf_state.bfs_txflags; in ath_tx_calc_protection()
1060 rix = bf->bf_state.bfs_rc[0].rix; in ath_tx_calc_protection()
1061 shortPreamble = bf->bf_state.bfs_shpream; in ath_tx_calc_protection()
1062 wh = mtod(bf->bf_m, struct ieee80211_frame *); in ath_tx_calc_protection()
1065 if (bf->bf_flags & ATH_BUF_TOA_PROBE) { in ath_tx_calc_protection()
1068 bf->bf_state.bfs_doprot = 0; in ath_tx_calc_protection()
1080 bf->bf_state.bfs_doprot = 1; in ath_tx_calc_protection()
1113 bf->bf_state.bfs_txflags = flags; in ath_tx_calc_protection()
1123 ath_tx_calc_duration(struct ath_softc *sc, struct ath_buf *bf) in ath_tx_calc_duration() argument
1131 int isfrag = bf->bf_m->m_flags & M_FRAG; in ath_tx_calc_duration()
1133 flags = bf->bf_state.bfs_txflags; in ath_tx_calc_duration()
1134 rix = bf->bf_state.bfs_rc[0].rix; in ath_tx_calc_duration()
1135 shortPreamble = bf->bf_state.bfs_shpream; in ath_tx_calc_duration()
1136 wh = mtod(bf->bf_m, struct ieee80211_frame *); in ath_tx_calc_duration()
1161 bf->bf_nextfraglen, in ath_tx_calc_duration()
1171 bf->bf_state.bfs_ismrr = 0; in ath_tx_calc_duration()
1172 bf->bf_state.bfs_try0 = ATH_TXMGTTRY; in ath_tx_calc_duration()
1262 ath_tx_set_rtscts(struct ath_softc *sc, struct ath_buf *bf) in ath_tx_set_rtscts() argument
1266 uint8_t rix = bf->bf_state.bfs_rc[0].rix; in ath_tx_set_rtscts()
1273 if ((bf->bf_state.bfs_txflags & in ath_tx_set_rtscts()
1276 bf->bf_state.bfs_ctsrate = 0; in ath_tx_set_rtscts()
1277 bf->bf_state.bfs_ctsduration = 0; in ath_tx_set_rtscts()
1285 if (bf->bf_state.bfs_doprot) in ath_tx_set_rtscts()
1288 rix = bf->bf_state.bfs_rc[0].rix; in ath_tx_set_rtscts()
1294 if (bf->bf_state.bfs_ctsrate0 != 0) in ath_tx_set_rtscts()
1295 cix = ath_tx_findrix(sc, bf->bf_state.bfs_ctsrate0); in ath_tx_set_rtscts()
1302 bf->bf_state.bfs_shpream); in ath_tx_set_rtscts()
1307 bf->bf_state.bfs_shpream, bf->bf_state.bfs_pktlen, in ath_tx_set_rtscts()
1308 rt, bf->bf_state.bfs_txflags); in ath_tx_set_rtscts()
1311 bf->bf_state.bfs_ctsrate = ctsrate; in ath_tx_set_rtscts()
1312 bf->bf_state.bfs_ctsduration = ctsduration; in ath_tx_set_rtscts()
1318 bf->bf_state.bfs_ismrr = 0; in ath_tx_set_rtscts()
1319 bf->bf_state.bfs_try0 = in ath_tx_set_rtscts()
1320 bf->bf_state.bfs_rc[0].tries = ATH_TXMGTTRY; /* XXX ew */ in ath_tx_set_rtscts()
1334 ath_tx_setds(struct ath_softc *sc, struct ath_buf *bf) in ath_tx_setds() argument
1336 struct ath_desc *ds = bf->bf_desc; in ath_tx_setds()
1339 if (bf->bf_state.bfs_txrate0 == 0) in ath_tx_setds()
1341 "%s: bf=%p, txrate0=%d\n", __func__, bf, 0); in ath_tx_setds()
1344 , bf->bf_state.bfs_pktlen /* packet length */ in ath_tx_setds()
1345 , bf->bf_state.bfs_hdrlen /* header length */ in ath_tx_setds()
1346 , bf->bf_state.bfs_atype /* Atheros packet type */ in ath_tx_setds()
1347 , bf->bf_state.bfs_txpower /* txpower */ in ath_tx_setds()
1348 , bf->bf_state.bfs_txrate0 in ath_tx_setds()
1349 , bf->bf_state.bfs_try0 /* series 0 rate/tries */ in ath_tx_setds()
1350 , bf->bf_state.bfs_keyix /* key cache index */ in ath_tx_setds()
1351 , bf->bf_state.bfs_txantenna /* antenna mode */ in ath_tx_setds()
1352 , bf->bf_state.bfs_txflags /* flags */ in ath_tx_setds()
1353 , bf->bf_state.bfs_ctsrate /* rts/cts rate */ in ath_tx_setds()
1354 , bf->bf_state.bfs_ctsduration /* rts/cts duration */ in ath_tx_setds()
1360 bf->bf_lastds = ds; in ath_tx_setds()
1361 bf->bf_last = bf; in ath_tx_setds()
1364 ath_tx_set_ratectrl(sc, bf->bf_node, bf); in ath_tx_setds()
1365 ath_tx_chaindesclist(sc, ds, bf, 0, 0, 0); in ath_tx_setds()
1382 ath_tx_do_ratelookup(struct ath_softc *sc, struct ath_buf *bf, int tid, in ath_tx_do_ratelookup() argument
1390 if (! bf->bf_state.bfs_doratelookup) in ath_tx_do_ratelookup()
1394 bzero(bf->bf_state.bfs_rc, sizeof(bf->bf_state.bfs_rc)); in ath_tx_do_ratelookup()
1396 ATH_NODE_LOCK(ATH_NODE(bf->bf_node)); in ath_tx_do_ratelookup()
1397 ath_rate_findrate(sc, ATH_NODE(bf->bf_node), bf->bf_state.bfs_shpream, in ath_tx_do_ratelookup()
1401 bf->bf_state.bfs_rc[0].rix = rix; in ath_tx_do_ratelookup()
1402 bf->bf_state.bfs_rc[0].ratecode = rate; in ath_tx_do_ratelookup()
1403 bf->bf_state.bfs_rc[0].tries = try0; in ath_tx_do_ratelookup()
1405 if (bf->bf_state.bfs_ismrr && try0 != ATH_TXMAXTRY) in ath_tx_do_ratelookup()
1406 ath_rate_getxtxrates(sc, ATH_NODE(bf->bf_node), rix, in ath_tx_do_ratelookup()
1407 is_aggr, bf->bf_state.bfs_rc); in ath_tx_do_ratelookup()
1408 ATH_NODE_UNLOCK(ATH_NODE(bf->bf_node)); in ath_tx_do_ratelookup()
1412 bf->bf_state.bfs_try0 = try0; in ath_tx_do_ratelookup()
1413 bf->bf_state.bfs_txrate0 = rate; in ath_tx_do_ratelookup()
1414 bf->bf_state.bfs_rc_maxpktlen = maxpktlen; in ath_tx_do_ratelookup()
1422 struct ath_buf *bf) in ath_tx_update_clrdmask() argument
1424 struct ath_node *an = ATH_NODE(bf->bf_node); in ath_tx_update_clrdmask()
1429 bf->bf_state.bfs_txflags |= HAL_TXDESC_CLRDMASK; in ath_tx_update_clrdmask()
1509 struct ath_buf *bf) in ath_tx_xmit_normal() argument
1511 struct ath_node *an = ATH_NODE(bf->bf_node); in ath_tx_xmit_normal()
1512 struct ath_tid *tid = &an->an_tid[bf->bf_state.bfs_tid]; in ath_tx_xmit_normal()
1525 bf->bf_state.bfs_txflags |= HAL_TXDESC_CLRDMASK; in ath_tx_xmit_normal()
1528 ath_tx_do_ratelookup(sc, bf, tid->tid, bf->bf_state.bfs_pktlen, false); in ath_tx_xmit_normal()
1529 ath_tx_calc_duration(sc, bf); in ath_tx_xmit_normal()
1530 ath_tx_calc_protection(sc, bf); in ath_tx_xmit_normal()
1531 ath_tx_set_rtscts(sc, bf); in ath_tx_xmit_normal()
1532 ath_tx_rate_fill_rcflags(sc, bf); in ath_tx_xmit_normal()
1533 ath_tx_setds(sc, bf); in ath_tx_xmit_normal()
1539 bf->bf_comp = ath_tx_normal_comp; in ath_tx_xmit_normal()
1542 ath_tx_handoff(sc, txq, bf); in ath_tx_xmit_normal()
1563 struct ath_buf *bf, struct mbuf *m0, struct ath_txq *txq) in ath_tx_normal_setup() argument
1621 error = ath_tx_dmasetup(sc, bf, m0); in ath_tx_normal_setup()
1625 bf->bf_node = ni; /* NB: held reference */ in ath_tx_normal_setup()
1626 m0 = bf->bf_m; /* NB: may have changed */ in ath_tx_normal_setup()
1630 ds = bf->bf_desc; in ath_tx_normal_setup()
1711 bf->bf_state.bfs_doratelookup = 1; in ath_tx_normal_setup()
1811 bf->bf_flags |= ATH_BUF_TOA_PROBE; in ath_tx_normal_setup()
1872 bzero(&bf->bf_state.bfs_rc, sizeof(bf->bf_state.bfs_rc)); in ath_tx_normal_setup()
1878 bf->bf_state.bfs_rc[0].rix = rix; in ath_tx_normal_setup()
1879 bf->bf_state.bfs_rc[0].tries = try0; in ath_tx_normal_setup()
1880 bf->bf_state.bfs_rc[0].ratecode = txrate; in ath_tx_normal_setup()
1883 bf->bf_state.bfs_pktlen = pktlen; in ath_tx_normal_setup()
1884 bf->bf_state.bfs_hdrlen = hdrlen; in ath_tx_normal_setup()
1885 bf->bf_state.bfs_atype = atype; in ath_tx_normal_setup()
1886 bf->bf_state.bfs_txpower = ieee80211_get_node_txpower(ni); in ath_tx_normal_setup()
1887 bf->bf_state.bfs_txrate0 = txrate; in ath_tx_normal_setup()
1888 bf->bf_state.bfs_try0 = try0; in ath_tx_normal_setup()
1889 bf->bf_state.bfs_keyix = keyix; in ath_tx_normal_setup()
1890 bf->bf_state.bfs_txantenna = sc->sc_txantenna; in ath_tx_normal_setup()
1891 bf->bf_state.bfs_txflags = flags; in ath_tx_normal_setup()
1892 bf->bf_state.bfs_shpream = shortPreamble; in ath_tx_normal_setup()
1895 bf->bf_state.bfs_ctsrate0 = 0; /* ie, no hard-coded ctsrate */ in ath_tx_normal_setup()
1896 bf->bf_state.bfs_ctsrate = 0; /* calculated later */ in ath_tx_normal_setup()
1897 bf->bf_state.bfs_ctsduration = 0; in ath_tx_normal_setup()
1898 bf->bf_state.bfs_ismrr = ismrr; in ath_tx_normal_setup()
1916 struct ath_buf *bf, struct mbuf *m0) in ath_tx_start() argument
2016 bf->bf_state.bfs_tid = tid; in ath_tx_start()
2017 bf->bf_state.bfs_tx_queue = txq->axq_qnum; in ath_tx_start()
2018 bf->bf_state.bfs_pri = pri; in ath_tx_start()
2036 bf->bf_state.bfs_tx_queue = sc->sc_cabq->axq_qnum; in ath_tx_start()
2042 bf->bf_state.bfs_dobaw = 0; in ath_tx_start()
2059 seqno = ath_tx_tid_seqno_assign(sc, ni, bf, m0); in ath_tx_start()
2068 bf->bf_state.bfs_dobaw = 1; in ath_tx_start()
2076 bf->bf_state.bfs_seqno = M_SEQNO_GET(m0) << IEEE80211_SEQ_SEQ_SHIFT; in ath_tx_start()
2085 r = ath_tx_normal_setup(sc, ni, bf, m0, txq); in ath_tx_start()
2090 m0 = bf->bf_m; in ath_tx_start()
2120 "%s: bf=%p: mcastq: TX'ing\n", __func__, bf); in ath_tx_start()
2121 bf->bf_state.bfs_txflags |= HAL_TXDESC_CLRDMASK; in ath_tx_start()
2122 ath_tx_xmit_normal(sc, txq, bf); in ath_tx_start()
2125 ath_tx_swq(sc, ni, txq, queue_to_head, bf); in ath_tx_start()
2127 bf->bf_state.bfs_txflags |= HAL_TXDESC_CLRDMASK; in ath_tx_start()
2128 ath_tx_xmit_normal(sc, txq, bf); in ath_tx_start()
2135 bf->bf_state.bfs_txflags |= HAL_TXDESC_CLRDMASK; in ath_tx_start()
2141 ath_tx_leak_count_update(sc, tid, bf); in ath_tx_start()
2142 ath_tx_xmit_normal(sc, txq, bf); in ath_tx_start()
2149 struct ath_buf *bf, struct mbuf *m0, in ath_tx_raw_start() argument
2185 "ath_tx_raw_start: ni=%p, bf=%p, raw", ni, bf); in ath_tx_raw_start()
2232 bf->bf_state.bfs_dobaw = 0; in ath_tx_raw_start()
2234 error = ath_tx_dmasetup(sc, bf, m0); in ath_tx_raw_start()
2237 m0 = bf->bf_m; /* NB: may have changed */ in ath_tx_raw_start()
2240 bf->bf_node = ni; /* NB: held reference */ in ath_tx_raw_start()
2249 bf->bf_state.bfs_doprot = 1; in ath_tx_raw_start()
2281 bf->bf_flags |= ATH_BUF_TOA_PROBE; in ath_tx_raw_start()
2298 bf->bf_state.bfs_ctsrate0 = params->ibp_ctsrate; in ath_tx_raw_start()
2327 ds = bf->bf_desc; in ath_tx_raw_start()
2331 bf->bf_state.bfs_pktlen = pktlen; in ath_tx_raw_start()
2332 bf->bf_state.bfs_hdrlen = hdrlen; in ath_tx_raw_start()
2333 bf->bf_state.bfs_atype = atype; in ath_tx_raw_start()
2334 bf->bf_state.bfs_txpower = MIN(params->ibp_power, in ath_tx_raw_start()
2336 bf->bf_state.bfs_txrate0 = txrate; in ath_tx_raw_start()
2337 bf->bf_state.bfs_try0 = try0; in ath_tx_raw_start()
2338 bf->bf_state.bfs_keyix = keyix; in ath_tx_raw_start()
2339 bf->bf_state.bfs_txantenna = txantenna; in ath_tx_raw_start()
2340 bf->bf_state.bfs_txflags = flags; in ath_tx_raw_start()
2341 bf->bf_state.bfs_shpream = in ath_tx_raw_start()
2345 bf->bf_state.bfs_tid = WME_AC_TO_TID(pri); in ath_tx_raw_start()
2346 bf->bf_state.bfs_tx_queue = sc->sc_ac2q[pri]->axq_qnum; in ath_tx_raw_start()
2347 bf->bf_state.bfs_pri = pri; in ath_tx_raw_start()
2350 bf->bf_state.bfs_ctsrate = 0; in ath_tx_raw_start()
2351 bf->bf_state.bfs_ctsduration = 0; in ath_tx_raw_start()
2352 bf->bf_state.bfs_ismrr = ismrr; in ath_tx_raw_start()
2355 bzero(&bf->bf_state.bfs_rc, sizeof(bf->bf_state.bfs_rc)); in ath_tx_raw_start()
2357 bf->bf_state.bfs_rc[0].rix = rix; in ath_tx_raw_start()
2358 bf->bf_state.bfs_rc[0].tries = try0; in ath_tx_raw_start()
2359 bf->bf_state.bfs_rc[0].ratecode = txrate; in ath_tx_raw_start()
2365 bf->bf_state.bfs_rc[1].rix = rix; in ath_tx_raw_start()
2366 bf->bf_state.bfs_rc[1].tries = params->ibp_try1; in ath_tx_raw_start()
2369 bf->bf_state.bfs_rc[2].rix = rix; in ath_tx_raw_start()
2370 bf->bf_state.bfs_rc[2].tries = params->ibp_try2; in ath_tx_raw_start()
2373 bf->bf_state.bfs_rc[3].rix = rix; in ath_tx_raw_start()
2374 bf->bf_state.bfs_rc[3].tries = params->ibp_try3; in ath_tx_raw_start()
2380 ath_tx_rate_fill_rcflags(sc, bf); in ath_tx_raw_start()
2397 bf->bf_state.bfs_txflags |= HAL_TXDESC_CLRDMASK; in ath_tx_raw_start()
2404 ath_tx_xmit_normal(sc, sc->sc_ac2q[pri], bf); in ath_tx_raw_start()
2408 ath_tx_swq(sc, ni, sc->sc_ac2q[pri], queue_to_head, bf); in ath_tx_raw_start()
2410 bf->bf_state.bfs_txflags |= HAL_TXDESC_CLRDMASK; in ath_tx_raw_start()
2411 ath_tx_xmit_normal(sc, sc->sc_ac2q[pri], bf); in ath_tx_raw_start()
2415 bf->bf_state.bfs_txflags |= HAL_TXDESC_CLRDMASK; in ath_tx_raw_start()
2421 ath_tx_leak_count_update(sc, tid, bf); in ath_tx_raw_start()
2422 ath_tx_xmit_normal(sc, sc->sc_ac2q[pri], bf); in ath_tx_raw_start()
2438 struct ath_buf *bf; in ath_raw_xmit() local
2489 bf = ath_getbuf(sc, ATH_BUFTYPE_MGMT); in ath_raw_xmit()
2490 if (bf == NULL) { in ath_raw_xmit()
2496 ATH_KTR(sc, ATH_KTR_TX, 3, "ath_raw_xmit: m=%p, params=%p, bf=%p\n", in ath_raw_xmit()
2497 m, params, bf); in ath_raw_xmit()
2504 if (ath_tx_start(sc, ni, bf, m)) { in ath_raw_xmit()
2513 if (ath_tx_raw_start(sc, ni, bf, m, params)) { in ath_raw_xmit()
2543 "bf=%p", in ath_raw_xmit()
2546 bf); in ath_raw_xmit()
2548 ath_returnbuf_head(sc, bf); in ath_raw_xmit()
2652 struct ath_tid *tid, struct ath_buf *bf) in ath_tx_addto_baw() argument
2659 if (bf->bf_state.bfs_isretried) in ath_tx_addto_baw()
2664 if (! bf->bf_state.bfs_dobaw) { in ath_tx_addto_baw()
2667 __func__, SEQNO(bf->bf_state.bfs_seqno), in ath_tx_addto_baw()
2671 if (bf->bf_state.bfs_addedbaw) in ath_tx_addto_baw()
2675 __func__, tid->tid, SEQNO(bf->bf_state.bfs_seqno), in ath_tx_addto_baw()
2684 SEQNO(bf->bf_state.bfs_seqno))) { in ath_tx_addto_baw()
2686 "%s: bf=%p: outside of BAW?? tid=%d, seqno %d; window %d:%d; " in ath_tx_addto_baw()
2688 __func__, bf, tid->tid, SEQNO(bf->bf_state.bfs_seqno), in ath_tx_addto_baw()
2697 index = ATH_BA_INDEX(tap->txa_start, SEQNO(bf->bf_state.bfs_seqno)); in ath_tx_addto_baw()
2702 __func__, tid->tid, SEQNO(bf->bf_state.bfs_seqno), in ath_tx_addto_baw()
2715 "%s: BA bf: %p; seqno=%d ; new bf: %p; seqno=%d\n", in ath_tx_addto_baw()
2719 bf, in ath_tx_addto_baw()
2720 SEQNO(bf->bf_state.bfs_seqno) in ath_tx_addto_baw()
2723 tid->tx_buf[cindex] = bf; in ath_tx_addto_baw()
2775 "%s: old bf=%p, new bf=%p\n", __func__, old_bf, new_bf); in ath_tx_switch_baw_buf()
2790 struct ath_tid *tid, const struct ath_buf *bf) in ath_tx_update_baw() argument
2794 int seqno = SEQNO(bf->bf_state.bfs_seqno); in ath_tx_update_baw()
2818 if (tid->tx_buf[cindex] != bf) { in ath_tx_update_baw()
2820 "%s: comp bf=%p, seq=%d; slot bf=%p, seqno=%d\n", in ath_tx_update_baw()
2821 __func__, bf, SEQNO(bf->bf_state.bfs_seqno), in ath_tx_update_baw()
2841 struct ath_buf *bf) in ath_tx_leak_count_update() argument
2848 wh = mtod(bf->bf_m, struct ieee80211_frame *); in ath_tx_leak_count_update()
2872 bus_dmamap_sync(sc->sc_dmat, bf->bf_dmamap, in ath_tx_leak_count_update()
2979 struct ath_buf *bf, struct mbuf *m0) in ath_tx_tid_seqno_assign() argument
3045 struct ath_txq *txq, struct ath_buf *bf) in ath_tx_xmit_aggr() argument
3047 struct ath_tid *tid = &an->an_tid[bf->bf_state.bfs_tid]; in ath_tx_xmit_aggr()
3056 ATH_TID_INSERT_HEAD(tid, bf, bf_list); in ath_tx_xmit_aggr()
3062 if (bf->bf_state.bfs_dobaw && in ath_tx_xmit_aggr()
3064 SEQNO(bf->bf_state.bfs_seqno)))) { in ath_tx_xmit_aggr()
3065 ATH_TID_INSERT_HEAD(tid, bf, bf_list); in ath_tx_xmit_aggr()
3080 if (bf->bf_state.bfs_aggr != 0 || bf->bf_state.bfs_nframes > 1) { in ath_tx_xmit_aggr()
3083 bf->bf_state.bfs_aggr, bf->bf_state.bfs_nframes); in ath_tx_xmit_aggr()
3084 bf->bf_state.bfs_aggr = 0; in ath_tx_xmit_aggr()
3085 bf->bf_state.bfs_nframes = 1; in ath_tx_xmit_aggr()
3089 ath_tx_update_clrdmask(sc, tid, bf); in ath_tx_xmit_aggr()
3092 ath_tx_do_ratelookup(sc, bf, tid->tid, bf->bf_state.bfs_pktlen, in ath_tx_xmit_aggr()
3094 ath_tx_calc_duration(sc, bf); in ath_tx_xmit_aggr()
3095 ath_tx_calc_protection(sc, bf); in ath_tx_xmit_aggr()
3096 ath_tx_set_rtscts(sc, bf); in ath_tx_xmit_aggr()
3097 ath_tx_rate_fill_rcflags(sc, bf); in ath_tx_xmit_aggr()
3098 ath_tx_setds(sc, bf); in ath_tx_xmit_aggr()
3107 if (bf->bf_state.bfs_dobaw) { in ath_tx_xmit_aggr()
3108 ath_tx_addto_baw(sc, an, tid, bf); in ath_tx_xmit_aggr()
3109 bf->bf_state.bfs_addedbaw = 1; in ath_tx_xmit_aggr()
3113 bf->bf_comp = ath_tx_aggr_comp; in ath_tx_xmit_aggr()
3120 ath_tx_leak_count_update(sc, tid, bf); in ath_tx_xmit_aggr()
3123 ath_tx_handoff(sc, txq, bf); in ath_tx_xmit_aggr()
3134 struct ath_txq *txq, int queue_to_head, struct ath_buf *bf) in ath_tx_swq() argument
3140 struct mbuf *m0 = bf->bf_m; in ath_tx_swq()
3150 DPRINTF(sc, ATH_DEBUG_SW_TX, "%s: bf=%p, pri=%d, tid=%d, qos=%d\n", in ath_tx_swq()
3151 __func__, bf, pri, tid, IEEE80211_QOS_HAS_SEQ(wh)); in ath_tx_swq()
3155 bf->bf_state.bfs_tid = tid; in ath_tx_swq()
3156 bf->bf_state.bfs_tx_queue = txq->axq_qnum; in ath_tx_swq()
3157 bf->bf_state.bfs_pri = pri; in ath_tx_swq()
3176 ATH_TID_INSERT_HEAD(atid, bf, bf_list); in ath_tx_swq()
3178 ATH_TID_INSERT_TAIL(atid, bf, bf_list); in ath_tx_swq()
3182 ATH_TID_INSERT_TAIL(atid, bf, bf_list); in ath_tx_swq()
3204 ATH_TID_INSERT_TAIL(atid, bf, bf_list); in ath_tx_swq()
3219 bf = ATH_TID_FIRST(atid); in ath_tx_swq()
3220 ATH_TID_REMOVE(atid, bf, bf_list); in ath_tx_swq()
3227 bf->bf_state.bfs_aggr = 0; in ath_tx_swq()
3228 bf->bf_state.bfs_nframes = 1; in ath_tx_swq()
3231 ath_tx_xmit_aggr(sc, an, txq, bf); in ath_tx_swq()
3261 ath_tx_update_clrdmask(sc, atid, bf); in ath_tx_swq()
3268 ath_tx_leak_count_update(sc, atid, bf); in ath_tx_swq()
3273 ath_tx_xmit_normal(sc, txq, bf); in ath_tx_swq()
3277 ATH_TID_INSERT_TAIL(atid, bf, bf_list); in ath_tx_swq()
3427 struct ath_buf *bf) in ath_tx_tid_filt_addbuf() argument
3436 DPRINTF(sc, ATH_DEBUG_SW_TX_FILT, "%s: bf=%p\n", __func__, bf); in ath_tx_tid_filt_addbuf()
3439 ath_tx_set_retry(sc, bf); in ath_tx_tid_filt_addbuf()
3442 ATH_TID_FILT_INSERT_TAIL(tid, bf, bf_list); in ath_tx_tid_filt_addbuf()
3452 struct ath_buf *bf) in ath_tx_tid_filt_comp_buf() argument
3465 ath_tx_tid_filt_addbuf(sc, tid, bf); in ath_tx_tid_filt_comp_buf()
3478 struct ath_buf *bf; in ath_tx_tid_filt_comp_complete() local
3497 while ((bf = ATH_TID_FILT_LAST(tid, ath_bufhead_s)) != NULL) { in ath_tx_tid_filt_comp_complete()
3498 ATH_TID_FILT_REMOVE(tid, bf, bf_list); in ath_tx_tid_filt_comp_complete()
3499 ATH_TID_INSERT_HEAD(tid, bf, bf_list); in ath_tx_tid_filt_comp_complete()
3515 * since the buffer may be cloned, bf must be not touched after this
3520 struct ath_buf *bf) in ath_tx_tid_filt_comp_single() argument
3530 if (bf->bf_state.bfs_retries > SWMAX_RETRIES) { in ath_tx_tid_filt_comp_single()
3533 "%s: bf=%p, seqno=%d, exceeded retries\n", in ath_tx_tid_filt_comp_single()
3535 bf, in ath_tx_tid_filt_comp_single()
3536 SEQNO(bf->bf_state.bfs_seqno)); in ath_tx_tid_filt_comp_single()
3545 if (bf->bf_flags & ATH_BUF_BUSY) { in ath_tx_tid_filt_comp_single()
3546 nbf = ath_tx_retry_clone(sc, tid->an, tid, bf); in ath_tx_tid_filt_comp_single()
3549 __func__, bf, nbf); in ath_tx_tid_filt_comp_single()
3551 nbf = bf; in ath_tx_tid_filt_comp_single()
3557 __func__, bf); in ath_tx_tid_filt_comp_single()
3573 struct ath_buf *bf, *bf_next, *nbf; in ath_tx_tid_filt_comp_aggr() local
3577 bf = bf_first; in ath_tx_tid_filt_comp_aggr()
3578 while (bf) { in ath_tx_tid_filt_comp_aggr()
3579 bf_next = bf->bf_next; in ath_tx_tid_filt_comp_aggr()
3580 bf->bf_next = NULL; /* Remove it from the aggr list */ in ath_tx_tid_filt_comp_aggr()
3585 if (bf->bf_state.bfs_retries > SWMAX_RETRIES) { in ath_tx_tid_filt_comp_aggr()
3588 "%s: tid=%d, bf=%p, seqno=%d, exceeded retries\n", in ath_tx_tid_filt_comp_aggr()
3591 bf, in ath_tx_tid_filt_comp_aggr()
3592 SEQNO(bf->bf_state.bfs_seqno)); in ath_tx_tid_filt_comp_aggr()
3593 TAILQ_INSERT_TAIL(bf_q, bf, bf_list); in ath_tx_tid_filt_comp_aggr()
3597 if (bf->bf_flags & ATH_BUF_BUSY) { in ath_tx_tid_filt_comp_aggr()
3598 nbf = ath_tx_retry_clone(sc, tid->an, tid, bf); in ath_tx_tid_filt_comp_aggr()
3601 __func__, tid->tid, bf, nbf, SEQNO(bf->bf_state.bfs_seqno)); in ath_tx_tid_filt_comp_aggr()
3603 nbf = bf; in ath_tx_tid_filt_comp_aggr()
3613 __func__, tid->tid, bf, SEQNO(bf->bf_state.bfs_seqno)); in ath_tx_tid_filt_comp_aggr()
3614 TAILQ_INSERT_TAIL(bf_q, bf, bf_list); in ath_tx_tid_filt_comp_aggr()
3619 bf = bf_next; in ath_tx_tid_filt_comp_aggr()
3805 struct ath_tid *tid, ath_bufhead *bf_cq, struct ath_buf *bf) in ath_tx_tid_drain_pkt() argument
3815 bf->bf_state.bfs_dobaw) { in ath_tx_tid_drain_pkt()
3821 if (bf->bf_state.bfs_retries > 0) { in ath_tx_tid_drain_pkt()
3822 ath_tx_update_baw(sc, an, tid, bf); in ath_tx_tid_drain_pkt()
3823 bf->bf_state.bfs_dobaw = 0; in ath_tx_tid_drain_pkt()
3829 if (! bf->bf_state.bfs_addedbaw) in ath_tx_tid_drain_pkt()
3832 __func__, SEQNO(bf->bf_state.bfs_seqno)); in ath_tx_tid_drain_pkt()
3837 bf->bf_next = NULL; in ath_tx_tid_drain_pkt()
3840 TAILQ_INSERT_TAIL(bf_cq, bf, bf_list); in ath_tx_tid_drain_pkt()
3845 const char *pfx, struct ath_tid *tid, struct ath_buf *bf) in ath_tx_tid_drain_print() argument
3855 "%s: %s: %6D: bf=%p: addbaw=%d, dobaw=%d, " in ath_tx_tid_drain_print()
3861 bf, in ath_tx_tid_drain_print()
3862 bf->bf_state.bfs_addedbaw, in ath_tx_tid_drain_print()
3863 bf->bf_state.bfs_dobaw, in ath_tx_tid_drain_print()
3864 SEQNO(bf->bf_state.bfs_seqno), in ath_tx_tid_drain_print()
3865 bf->bf_state.bfs_retries); in ath_tx_tid_drain_print()
3867 "%s: %s: %6D: bf=%p: txq[%d] axq_depth=%d, axq_aggr_depth=%d\n", in ath_tx_tid_drain_print()
3872 bf, in ath_tx_tid_drain_print()
3877 "%s: %s: %6D: bf=%p: tid txq_depth=%d hwq_depth=%d, bar_wait=%d, " in ath_tx_tid_drain_print()
3883 bf, in ath_tx_tid_drain_print()
3906 mtod(bf->bf_m, const uint8_t *), in ath_tx_tid_drain_print()
3907 bf->bf_m->m_len, 0, -1); in ath_tx_tid_drain_print()
3929 struct ath_buf *bf; in ath_tx_tid_drain() local
3941 bf = ATH_TID_FIRST(tid); in ath_tx_tid_drain()
3942 if (bf == NULL) { in ath_tx_tid_drain()
3947 ath_tx_tid_drain_print(sc, an, "norm", tid, bf); in ath_tx_tid_drain()
3951 ATH_TID_REMOVE(tid, bf, bf_list); in ath_tx_tid_drain()
3952 ath_tx_tid_drain_pkt(sc, an, tid, bf_cq, bf); in ath_tx_tid_drain()
3958 bf = ATH_TID_FILT_FIRST(tid); in ath_tx_tid_drain()
3959 if (bf == NULL) in ath_tx_tid_drain()
3963 ath_tx_tid_drain_print(sc, an, "filt", tid, bf); in ath_tx_tid_drain()
3967 ATH_TID_FILT_REMOVE(tid, bf, bf_list); in ath_tx_tid_drain()
3968 ath_tx_tid_drain_pkt(sc, an, tid, bf_cq, bf); in ath_tx_tid_drain()
4089 struct ath_buf *bf; in ath_tx_node_flush() local
4130 while ((bf = TAILQ_FIRST(&bf_cq)) != NULL) { in ath_tx_node_flush()
4131 TAILQ_REMOVE(&bf_cq, bf, bf_list); in ath_tx_node_flush()
4132 ath_tx_default_comp(sc, bf, 0); in ath_tx_node_flush()
4144 struct ath_buf *bf; in ath_tx_txq_drain() local
4161 while ((bf = TAILQ_FIRST(&bf_cq)) != NULL) { in ath_tx_txq_drain()
4162 TAILQ_REMOVE(&bf_cq, bf, bf_list); in ath_tx_txq_drain()
4163 ath_tx_default_comp(sc, bf, 0); in ath_tx_txq_drain()
4184 ath_tx_normal_comp(struct ath_softc *sc, struct ath_buf *bf, int fail) in ath_tx_normal_comp() argument
4186 struct ieee80211_node *ni = bf->bf_node; in ath_tx_normal_comp()
4188 int tid = bf->bf_state.bfs_tid; in ath_tx_normal_comp()
4190 struct ath_tx_status *ts = &bf->bf_status.ds_txstat; in ath_tx_normal_comp()
4195 DPRINTF(sc, ATH_DEBUG_SW_TX, "%s: bf=%p: fail=%d, hwq_depth now %d\n", in ath_tx_normal_comp()
4196 __func__, bf, fail, atid->hwq_depth - 1); in ath_tx_normal_comp()
4212 ath_tx_tid_filt_comp_buf(sc, atid, bf); in ath_tx_normal_comp()
4253 if (fail == 0 && ((bf->bf_state.bfs_txflags & HAL_TXDESC_NOACK) == 0)) in ath_tx_normal_comp()
4254 ath_tx_update_ratectrl(sc, ni, bf->bf_state.bfs_rc, in ath_tx_normal_comp()
4256 bf->bf_state.bfs_pktlen, in ath_tx_normal_comp()
4257 bf->bf_state.bfs_pktlen, in ath_tx_normal_comp()
4260 ath_tx_default_comp(sc, bf, fail); in ath_tx_normal_comp()
4271 ath_tx_comp_cleanup_unaggr(struct ath_softc *sc, struct ath_buf *bf) in ath_tx_comp_cleanup_unaggr() argument
4273 struct ieee80211_node *ni = bf->bf_node; in ath_tx_comp_cleanup_unaggr()
4275 int tid = bf->bf_state.bfs_tid; in ath_tx_comp_cleanup_unaggr()
4285 if (bf->bf_state.bfs_dobaw) { in ath_tx_comp_cleanup_unaggr()
4286 ath_tx_update_baw(sc, an, atid, bf); in ath_tx_comp_cleanup_unaggr()
4287 if (!bf->bf_state.bfs_addedbaw) in ath_tx_comp_cleanup_unaggr()
4290 __func__, SEQNO(bf->bf_state.bfs_seqno)); in ath_tx_comp_cleanup_unaggr()
4302 ath_tx_default_comp(sc, bf, 0); in ath_tx_comp_cleanup_unaggr()
4315 struct ath_buf *bf, *bf_next; in ath_tx_tid_cleanup_frame() local
4327 bf = bf_head; in ath_tx_tid_cleanup_frame()
4328 while (bf != NULL) { in ath_tx_tid_cleanup_frame()
4329 bf_next = bf->bf_next; /* next aggregate frame, or NULL */ in ath_tx_tid_cleanup_frame()
4339 if (bf->bf_state.bfs_addedbaw) { in ath_tx_tid_cleanup_frame()
4340 ath_tx_update_baw(sc, an, atid, bf); in ath_tx_tid_cleanup_frame()
4341 bf->bf_state.bfs_dobaw = 0; in ath_tx_tid_cleanup_frame()
4347 bf->bf_comp = ath_tx_normal_comp; in ath_tx_tid_cleanup_frame()
4348 bf->bf_next = NULL; in ath_tx_tid_cleanup_frame()
4353 TAILQ_INSERT_TAIL(bf_cq, bf, bf_list); in ath_tx_tid_cleanup_frame()
4358 bf = bf_next; in ath_tx_tid_cleanup_frame()
4380 struct ath_buf *bf, *bf_next; in ath_tx_tid_cleanup() local
4394 while ((bf = ATH_TID_FILT_LAST(atid, ath_bufhead_s)) != NULL) { in ath_tx_tid_cleanup()
4395 ATH_TID_FILT_REMOVE(atid, bf, bf_list); in ath_tx_tid_cleanup()
4396 ATH_TID_INSERT_HEAD(atid, bf, bf_list); in ath_tx_tid_cleanup()
4405 bf = ATH_TID_FIRST(atid); in ath_tx_tid_cleanup()
4406 while (bf) { in ath_tx_tid_cleanup()
4411 bf_next = TAILQ_NEXT(bf, bf_list); in ath_tx_tid_cleanup()
4416 ath_tx_tid_cleanup_frame(sc, an, tid, bf, bf_cq); in ath_tx_tid_cleanup()
4421 bf = bf_next; in ath_tx_tid_cleanup()
4448 struct ath_tid *tid, struct ath_buf *bf) in ath_tx_retry_clone() argument
4456 * works out, 'bf' will have no DMA mapping, no mbuf in ath_tx_retry_clone()
4459 nbf = ath_buf_clone(sc, bf); in ath_tx_retry_clone()
4493 if (bf->bf_state.bfs_dobaw) in ath_tx_retry_clone()
4494 ath_tx_switch_baw_buf(sc, an, tid, bf, nbf); in ath_tx_retry_clone()
4497 ath_freebuf(sc, bf); in ath_tx_retry_clone()
4513 ath_tx_aggr_retry_unaggr(struct ath_softc *sc, struct ath_buf *bf) in ath_tx_aggr_retry_unaggr() argument
4515 struct ieee80211_node *ni = bf->bf_node; in ath_tx_aggr_retry_unaggr()
4517 int tid = bf->bf_state.bfs_tid; in ath_tx_aggr_retry_unaggr()
4533 if ((bf->bf_state.bfs_retries < SWMAX_RETRIES) && in ath_tx_aggr_retry_unaggr()
4534 (bf->bf_flags & ATH_BUF_BUSY)) { in ath_tx_aggr_retry_unaggr()
4536 nbf = ath_tx_retry_clone(sc, an, atid, bf); in ath_tx_aggr_retry_unaggr()
4538 /* bf has been freed at this point */ in ath_tx_aggr_retry_unaggr()
4539 bf = nbf; in ath_tx_aggr_retry_unaggr()
4541 bf->bf_state.bfs_retries = SWMAX_RETRIES + 1; in ath_tx_aggr_retry_unaggr()
4544 if (bf->bf_state.bfs_retries >= SWMAX_RETRIES) { in ath_tx_aggr_retry_unaggr()
4547 __func__, SEQNO(bf->bf_state.bfs_seqno)); in ath_tx_aggr_retry_unaggr()
4551 if (bf->bf_state.bfs_dobaw) { in ath_tx_aggr_retry_unaggr()
4552 ath_tx_update_baw(sc, an, atid, bf); in ath_tx_aggr_retry_unaggr()
4553 if (! bf->bf_state.bfs_addedbaw) in ath_tx_aggr_retry_unaggr()
4556 __func__, SEQNO(bf->bf_state.bfs_seqno)); in ath_tx_aggr_retry_unaggr()
4558 bf->bf_state.bfs_dobaw = 0; in ath_tx_aggr_retry_unaggr()
4569 /* Free buffer, bf is free after this call */ in ath_tx_aggr_retry_unaggr()
4570 ath_tx_default_comp(sc, bf, 0); in ath_tx_aggr_retry_unaggr()
4579 ath_tx_set_retry(sc, bf); in ath_tx_aggr_retry_unaggr()
4586 ATH_TID_INSERT_HEAD(atid, bf, bf_list); in ath_tx_aggr_retry_unaggr()
4603 ath_tx_retry_subframe(struct ath_softc *sc, struct ath_buf *bf, in ath_tx_retry_subframe() argument
4606 struct ieee80211_node *ni = bf->bf_node; in ath_tx_retry_subframe()
4608 int tid = bf->bf_state.bfs_tid; in ath_tx_retry_subframe()
4614 ath_hal_clr11n_aggr(sc->sc_ah, bf->bf_desc); in ath_tx_retry_subframe()
4615 ath_hal_set11nburstduration(sc->sc_ah, bf->bf_desc, 0); in ath_tx_retry_subframe()
4617 /* ath_hal_set11n_virtualmorefrag(sc->sc_ah, bf->bf_desc, 0); */ in ath_tx_retry_subframe()
4627 if ((bf->bf_state.bfs_retries < SWMAX_RETRIES) && in ath_tx_retry_subframe()
4628 (bf->bf_flags & ATH_BUF_BUSY)) { in ath_tx_retry_subframe()
4630 nbf = ath_tx_retry_clone(sc, an, atid, bf); in ath_tx_retry_subframe()
4632 /* bf has been freed at this point */ in ath_tx_retry_subframe()
4633 bf = nbf; in ath_tx_retry_subframe()
4635 bf->bf_state.bfs_retries = SWMAX_RETRIES + 1; in ath_tx_retry_subframe()
4638 if (bf->bf_state.bfs_retries >= SWMAX_RETRIES) { in ath_tx_retry_subframe()
4642 __func__, SEQNO(bf->bf_state.bfs_seqno)); in ath_tx_retry_subframe()
4643 ath_tx_update_baw(sc, an, atid, bf); in ath_tx_retry_subframe()
4644 if (!bf->bf_state.bfs_addedbaw) in ath_tx_retry_subframe()
4647 __func__, SEQNO(bf->bf_state.bfs_seqno)); in ath_tx_retry_subframe()
4648 bf->bf_state.bfs_dobaw = 0; in ath_tx_retry_subframe()
4652 ath_tx_set_retry(sc, bf); in ath_tx_retry_subframe()
4654 bf->bf_next = NULL; /* Just to make sure */ in ath_tx_retry_subframe()
4657 bf->bf_state.bfs_aggr = 0; in ath_tx_retry_subframe()
4658 bf->bf_state.bfs_ndelim = 0; /* ??? needed? */ in ath_tx_retry_subframe()
4659 bf->bf_state.bfs_nframes = 1; in ath_tx_retry_subframe()
4661 TAILQ_INSERT_TAIL(bf_q, bf, bf_list); in ath_tx_retry_subframe()
4674 struct ath_buf *bf_next, *bf; in ath_tx_comp_aggr_error() local
4697 bf = bf_first; in ath_tx_comp_aggr_error()
4698 while (bf) { in ath_tx_comp_aggr_error()
4699 bf_next = bf->bf_next; in ath_tx_comp_aggr_error()
4700 bf->bf_next = NULL; /* Remove it from the aggr list */ in ath_tx_comp_aggr_error()
4702 if (ath_tx_retry_subframe(sc, bf, &bf_q)) { in ath_tx_comp_aggr_error()
4704 bf->bf_next = NULL; in ath_tx_comp_aggr_error()
4705 TAILQ_INSERT_TAIL(&bf_cq, bf, bf_list); in ath_tx_comp_aggr_error()
4707 bf = bf_next; in ath_tx_comp_aggr_error()
4711 while ((bf = TAILQ_LAST(&bf_q, ath_bufhead_s)) != NULL) { in ath_tx_comp_aggr_error()
4712 TAILQ_REMOVE(&bf_q, bf, bf_list); in ath_tx_comp_aggr_error()
4713 ATH_TID_INSERT_HEAD(tid, bf, bf_list); in ath_tx_comp_aggr_error()
4742 while ((bf = TAILQ_FIRST(&bf_cq)) != NULL) { in ath_tx_comp_aggr_error()
4743 TAILQ_REMOVE(&bf_cq, bf, bf_list); in ath_tx_comp_aggr_error()
4744 ath_tx_default_comp(sc, bf, 0); in ath_tx_comp_aggr_error()
4757 struct ath_buf *bf, *bf_next; in ath_tx_comp_cleanup_aggr() local
4769 bf = bf_first; in ath_tx_comp_cleanup_aggr()
4770 while (bf) { in ath_tx_comp_cleanup_aggr()
4772 if (bf->bf_state.bfs_dobaw) { in ath_tx_comp_cleanup_aggr()
4773 ath_tx_update_baw(sc, an, atid, bf); in ath_tx_comp_cleanup_aggr()
4774 if (!bf->bf_state.bfs_addedbaw) in ath_tx_comp_cleanup_aggr()
4777 __func__, SEQNO(bf->bf_state.bfs_seqno)); in ath_tx_comp_cleanup_aggr()
4779 bf = bf->bf_next; in ath_tx_comp_cleanup_aggr()
4802 bf = bf_first; in ath_tx_comp_cleanup_aggr()
4803 while (bf) { in ath_tx_comp_cleanup_aggr()
4804 bf_next = bf->bf_next; in ath_tx_comp_cleanup_aggr()
4805 bf->bf_next = NULL; in ath_tx_comp_cleanup_aggr()
4806 ath_tx_default_comp(sc, bf, 1); in ath_tx_comp_cleanup_aggr()
4807 bf = bf_next; in ath_tx_comp_cleanup_aggr()
4821 //struct ath_desc *ds = bf->bf_lastds;
4833 struct ath_buf *bf, *bf_next; local
4903 TAILQ_FOREACH_SAFE(bf, &bf_cq, bf_list, bf_next) {
4904 if (bf->bf_state.bfs_addedbaw)
4906 if (bf->bf_state.bfs_dobaw) {
4907 ath_tx_update_baw(sc, an, atid, bf);
4908 if (!bf->bf_state.bfs_addedbaw)
4912 SEQNO(bf->bf_state.bfs_seqno));
4914 bf->bf_state.bfs_dobaw = 0;
5018 bf = bf_first;
5038 while (bf) {
5041 SEQNO(bf->bf_state.bfs_seqno));
5042 bf_next = bf->bf_next;
5043 bf->bf_next = NULL; /* Remove it from the aggr list */
5046 "%s: checking bf=%p seqno=%d; ack=%d\n",
5047 __func__, bf, SEQNO(bf->bf_state.bfs_seqno),
5052 ath_tx_update_baw(sc, an, atid, bf);
5053 bf->bf_state.bfs_dobaw = 0;
5054 if (!bf->bf_state.bfs_addedbaw)
5057 __func__, SEQNO(bf->bf_state.bfs_seqno));
5058 bf->bf_next = NULL;
5059 TAILQ_INSERT_TAIL(&bf_cq, bf, bf_list);
5062 if (ath_tx_retry_subframe(sc, bf, &bf_q)) {
5064 bf->bf_next = NULL;
5065 TAILQ_INSERT_TAIL(&bf_cq, bf, bf_list);
5069 bf = bf_next;
5085 "%s: num frames seen=%d; bf nframes=%d\n",
5113 while ((bf = TAILQ_LAST(&bf_q, ath_bufhead_s)) != NULL) {
5114 TAILQ_REMOVE(&bf_q, bf, bf_list);
5115 ATH_TID_INSERT_HEAD(atid, bf, bf_list);
5147 while ((bf = TAILQ_FIRST(&bf_cq)) != NULL) {
5148 TAILQ_REMOVE(&bf_cq, bf, bf_list);
5149 ath_tx_default_comp(sc, bf, 0);
5161 ath_tx_aggr_comp_unaggr(struct ath_softc *sc, struct ath_buf *bf, int fail) argument
5163 struct ieee80211_node *ni = bf->bf_node;
5165 int tid = bf->bf_state.bfs_tid;
5172 * bf pointer.
5174 ts = bf->bf_status.ds_txstat;
5182 if (fail == 0 && ((bf->bf_state.bfs_txflags & HAL_TXDESC_NOACK) == 0))
5183 ath_tx_update_ratectrl(sc, ni, bf->bf_state.bfs_rc,
5184 &bf->bf_status.ds_txstat,
5185 bf->bf_state.bfs_pktlen,
5186 bf->bf_state.bfs_pktlen,
5200 "%s: bf=%p: tid=%d, hwq_depth=%d, seqno=%d\n",
5201 __func__, bf, bf->bf_state.bfs_tid, atid->hwq_depth,
5202 SEQNO(bf->bf_state.bfs_seqno));
5231 ath_tx_comp_cleanup_unaggr(sc, bf);
5255 freeframe = ath_tx_tid_filt_comp_single(sc, atid, bf);
5257 * If freeframe=0 then bf is no longer ours; don't
5262 if (bf->bf_state.bfs_addedbaw)
5264 if (bf->bf_state.bfs_dobaw) {
5265 ath_tx_update_baw(sc, an, atid, bf);
5266 if (!bf->bf_state.bfs_addedbaw)
5269 __func__, SEQNO(bf->bf_state.bfs_seqno));
5271 bf->bf_state.bfs_dobaw = 0;
5290 * cloned and bf is still valid. Just complete/free it.
5293 ath_tx_default_comp(sc, bf, fail);
5308 ath_tx_aggr_retry_unaggr(sc, bf);
5314 __func__, tid, SEQNO(bf->bf_state.bfs_seqno));
5315 if (bf->bf_state.bfs_dobaw) {
5316 ath_tx_update_baw(sc, an, atid, bf);
5317 bf->bf_state.bfs_dobaw = 0;
5318 if (!bf->bf_state.bfs_addedbaw)
5321 __func__, SEQNO(bf->bf_state.bfs_seqno));
5345 ath_tx_default_comp(sc, bf, fail);
5346 /* bf is freed at this point */
5350 ath_tx_aggr_comp(struct ath_softc *sc, struct ath_buf *bf, int fail) argument
5352 if (bf->bf_state.bfs_aggr)
5353 ath_tx_aggr_comp_aggr(sc, bf, fail);
5355 ath_tx_aggr_comp_unaggr(sc, bf, fail);
5370 struct ath_buf *bf; local
5383 TAILQ_FOREACH(bf, &tid->tid_q, bf_list) {
5391 SEQNO(bf->bf_state.bfs_seqno)))) {
5396 if (! bf->bf_state.bfs_dobaw) {
5400 nbytes += bf->bf_state.bfs_pktlen;
5426 struct ath_buf *bf; local
5461 bf = ATH_TID_FIRST(tid);
5462 if (bf == NULL) {
5470 if (! bf->bf_state.bfs_dobaw) {
5474 ATH_TID_REMOVE(tid, bf, bf_list);
5476 if (bf->bf_state.bfs_nframes > 1)
5480 bf->bf_state.bfs_aggr,
5481 bf->bf_state.bfs_nframes);
5489 bf->bf_state.bfs_aggr = 0;
5490 bf->bf_state.bfs_nframes = 1;
5493 ath_tx_update_clrdmask(sc, tid, bf);
5495 ath_tx_do_ratelookup(sc, bf, tid->tid,
5496 bf->bf_state.bfs_pktlen, false);
5497 ath_tx_calc_duration(sc, bf);
5498 ath_tx_calc_protection(sc, bf);
5499 ath_tx_set_rtscts(sc, bf);
5500 ath_tx_rate_fill_rcflags(sc, bf);
5501 ath_tx_setds(sc, bf);
5502 ath_hal_clr11n_aggr(sc->sc_ah, bf->bf_desc);
5518 ath_tx_do_ratelookup(sc, bf, tid->tid, swq_pktbytes, true);
5525 ath_tx_calc_duration(sc, bf);
5526 ath_tx_calc_protection(sc, bf);
5528 ath_tx_set_rtscts(sc, bf);
5529 ath_tx_rate_fill_rcflags(sc, bf);
5546 bf = TAILQ_FIRST(&bf_q);
5556 if (bf->bf_state.bfs_nframes == 1) {
5561 ath_tx_update_clrdmask(sc, tid, bf);
5563 bf->bf_state.bfs_aggr = 0;
5564 bf->bf_state.bfs_ndelim = 0;
5565 ath_tx_setds(sc, bf);
5566 ath_hal_clr11n_aggr(sc->sc_ah, bf->bf_desc);
5575 __func__, bf->bf_state.bfs_nframes,
5576 bf->bf_state.bfs_al);
5577 bf->bf_state.bfs_aggr = 1;
5578 sc->sc_aggr_stats.aggr_pkts[bf->bf_state.bfs_nframes]++;
5582 ath_tx_update_clrdmask(sc, tid, bf);
5587 ath_tx_calc_duration(sc, bf);
5588 ath_tx_calc_protection(sc, bf);
5595 ath_tx_set_rtscts(sc, bf);
5602 ath_tx_setds_11n(sc, bf);
5606 bf->bf_comp = ath_tx_aggr_comp;
5608 if (bf->bf_state.bfs_tid == IEEE80211_NONQOS_TID)
5617 ath_tx_leak_count_update(sc, tid, bf);
5620 ath_tx_handoff(sc, txq, bf);
5657 struct ath_buf *bf; local
5684 bf = ATH_TID_FIRST(tid);
5685 if (bf == NULL) {
5689 ATH_TID_REMOVE(tid, bf, bf_list);
5692 if (tid->tid != bf->bf_state.bfs_tid) {
5694 " tid %d\n", __func__, bf->bf_state.bfs_tid,
5698 bf->bf_comp = ath_tx_normal_comp;
5704 bf->bf_state.bfs_txflags |= HAL_TXDESC_CLRDMASK;
5707 ath_tx_update_clrdmask(sc, tid, bf);
5710 ath_tx_do_ratelookup(sc, bf, tid->tid,
5711 bf->bf_state.bfs_pktlen, false);
5712 ath_tx_calc_duration(sc, bf);
5713 ath_tx_calc_protection(sc, bf);
5714 ath_tx_set_rtscts(sc, bf);
5715 ath_tx_rate_fill_rcflags(sc, bf);
5716 ath_tx_setds(sc, bf);
5723 ath_tx_leak_count_update(sc, tid, bf);
5730 ath_tx_handoff(sc, txq, bf);
5935 * a "bf->bf_state.bfs_dobaw" flag, and this isn't set for these
6046 struct ath_buf *bf; local
6103 while ((bf = TAILQ_FIRST(&bf_cq)) != NULL) {
6104 TAILQ_REMOVE(&bf_cq, bf, bf_list);
6105 ath_tx_default_comp(sc, bf, 1);
6122 struct ath_buf *bf; local
6156 while ((bf = TAILQ_FIRST(&bf_cq)) != NULL) {
6157 TAILQ_REMOVE(&bf_cq, bf, bf_list);
6158 ath_tx_default_comp(sc, bf, 1);