Lines Matching full:tid

57 			       struct ath_atx_tid *tid, struct sk_buff *skb);
70 static void ath_tx_update_baw(struct ath_atx_tid *tid, struct ath_buf *bf);
73 struct ath_atx_tid *tid,
121 void ath_tx_queue_tid(struct ath_softc *sc, struct ath_atx_tid *tid) in ath_tx_queue_tid() argument
124 container_of((void *)tid, struct ieee80211_txq, drv_priv); in ath_tx_queue_tid()
133 struct ath_atx_tid *tid = (struct ath_atx_tid *) queue->drv_priv; in ath9k_wake_tx_queue() local
134 struct ath_txq *txq = tid->txq; in ath9k_wake_tx_queue()
138 tid->tidno); in ath9k_wake_tx_queue()
153 static void ath_send_bar(struct ath_atx_tid *tid, u16 seqno) in ath_send_bar() argument
155 if (!tid->an->sta) in ath_send_bar()
158 ieee80211_send_bar(tid->an->vif, tid->an->sta->addr, tid->tidno, in ath_send_bar()
233 ath_tid_pull(struct ath_atx_tid *tid, struct sk_buff **skbuf) in ath_tid_pull() argument
235 struct ieee80211_txq *txq = container_of((void*)tid, struct ieee80211_txq, drv_priv); in ath_tid_pull()
236 struct ath_softc *sc = tid->an->sc; in ath_tid_pull()
239 .txq = tid->txq, in ath_tid_pull()
240 .sta = tid->an->sta, in ath_tid_pull()
257 if (tid->txq == sc->tx.txq_map[q]) { in ath_tid_pull()
260 ++tid->txq->pending_frames; in ath_tid_pull()
267 static int ath_tid_dequeue(struct ath_atx_tid *tid, in ath_tid_dequeue() argument
271 *skb = __skb_dequeue(&tid->retry_q); in ath_tid_dequeue()
273 ret = ath_tid_pull(tid, skb); in ath_tid_dequeue()
278 static void ath_tx_flush_tid(struct ath_softc *sc, struct ath_atx_tid *tid) in ath_tx_flush_tid() argument
280 struct ath_txq *txq = tid->txq; in ath_tx_flush_tid()
292 while ((skb = __skb_dequeue(&tid->retry_q))) { in ath_tx_flush_tid()
302 ath_tx_update_baw(tid, bf); in ath_tx_flush_tid()
312 ath_send_bar(tid, tid->seq_start); in ath_tx_flush_tid()
317 static void ath_tx_update_baw(struct ath_atx_tid *tid, struct ath_buf *bf) in ath_tx_update_baw() argument
326 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_update_baw()
327 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1); in ath_tx_update_baw()
329 __clear_bit(cindex, tid->tx_buf); in ath_tx_update_baw()
331 while (tid->baw_head != tid->baw_tail && !test_bit(tid->baw_head, tid->tx_buf)) { in ath_tx_update_baw()
332 INCR(tid->seq_start, IEEE80211_SEQ_MAX); in ath_tx_update_baw()
333 INCR(tid->baw_head, ATH_TID_MAX_BUFS); in ath_tx_update_baw()
334 if (tid->bar_index >= 0) in ath_tx_update_baw()
335 tid->bar_index--; in ath_tx_update_baw()
339 static void ath_tx_addto_baw(struct ath_atx_tid *tid, struct ath_buf *bf) in ath_tx_addto_baw() argument
348 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_addto_baw()
349 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1); in ath_tx_addto_baw()
350 __set_bit(cindex, tid->tx_buf); in ath_tx_addto_baw()
353 if (index >= ((tid->baw_tail - tid->baw_head) & in ath_tx_addto_baw()
355 tid->baw_tail = cindex; in ath_tx_addto_baw()
356 INCR(tid->baw_tail, ATH_TID_MAX_BUFS); in ath_tx_addto_baw()
361 struct ath_atx_tid *tid) in ath_tid_drain() argument
373 while (ath_tid_dequeue(tid, &skb) == 0) { in ath_tid_drain()
486 struct ath_atx_tid *tid, in ath_tx_complete_aggr() argument
531 seq_first = tid->seq_start; in ath_tx_complete_aggr()
535 * The hardware occasionally sends a tx status for the wrong TID. in ath_tx_complete_aggr()
539 * Only BlockAcks have a TID and therefore normal Acks cannot be in ath_tx_complete_aggr()
542 if (isba && tid->tidno != ts->tid) in ath_tx_complete_aggr()
578 if (!BAW_WITHIN(tid->seq_start, tid->baw_size, seqno) || in ath_tx_complete_aggr()
579 !tid->active) { in ath_tx_complete_aggr()
620 ath_tx_update_baw(tid, bf); in ath_tx_complete_aggr()
650 ath_tx_update_baw(tid, bf); in ath_tx_complete_aggr()
676 ieee80211_sta_set_buffered(sta, tid->tidno, true); in ath_tx_complete_aggr()
678 skb_queue_splice_tail(&bf_pending, &tid->retry_q); in ath_tx_complete_aggr()
680 ath_tx_queue_tid(sc, tid); in ath_tx_complete_aggr()
682 tid->clear_ps_filter = true; in ath_tx_complete_aggr()
689 if (BAW_WITHIN(tid->seq_start, tid->baw_size, bar_seq)) in ath_tx_complete_aggr()
690 tid->bar_index = ATH_BA_INDEX(tid->seq_start, bar_seq); in ath_tx_complete_aggr()
693 ath_send_bar(tid, ATH_BA_INDEX2SEQ(seq_first, bar_index + 1)); in ath_tx_complete_aggr()
711 u8 tid) in ath_tx_count_airtime() argument
722 ieee80211_sta_register_airtime(sta, tid, airtime, 0); in ath_tx_count_airtime()
733 struct ath_atx_tid *tid = NULL; in ath_tx_process_buffer() local
751 tid = ath_get_skb_tid(an, bf->bf_mpdu); in ath_tx_process_buffer()
752 ath_tx_count_airtime(sc, sta, bf, ts, tid->tidno); in ath_tx_process_buffer()
754 tid->clear_ps_filter = true; in ath_tx_process_buffer()
768 ath_tx_complete_aggr(sc, txq, bf, bf_head, sta, tid, ts, txok); in ath_tx_process_buffer()
797 struct ath_atx_tid *tid) in ath_lookup_rate() argument
804 int q = tid->txq->mac80211_qnum; in ath_lookup_rate()
857 if (tid->an->maxampdu) in ath_lookup_rate()
858 aggr_limit = min(aggr_limit, tid->an->maxampdu); in ath_lookup_rate()
867 static int ath_compute_num_delims(struct ath_softc *sc, struct ath_atx_tid *tid, in ath_compute_num_delims() argument
909 if (tid->an->mpdudensity == 0) in ath_compute_num_delims()
918 nsymbols = NUM_SYMBOLS_PER_USEC_HALFGI(tid->an->mpdudensity); in ath_compute_num_delims()
920 nsymbols = NUM_SYMBOLS_PER_USEC(tid->an->mpdudensity); in ath_compute_num_delims()
939 struct ath_atx_tid *tid, struct ath_buf **buf) in ath_tx_get_tid_subframe() argument
949 ret = ath_tid_dequeue(tid, &skb); in ath_tx_get_tid_subframe()
956 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_get_tid_subframe()
978 if (!tid->active) in ath_tx_get_tid_subframe()
990 if (!BAW_WITHIN(tid->seq_start, tid->baw_size, seqno)) { in ath_tx_get_tid_subframe()
991 __skb_queue_tail(&tid->retry_q, skb); in ath_tx_get_tid_subframe()
996 if (!skb_queue_is_first(&tid->retry_q, skb) && in ath_tx_get_tid_subframe()
1005 if (tid->bar_index > ATH_BA_INDEX(tid->seq_start, seqno)) { in ath_tx_get_tid_subframe()
1011 ath_tx_update_baw(tid, bf); in ath_tx_get_tid_subframe()
1017 ath_tx_addto_baw(tid, bf); in ath_tx_get_tid_subframe()
1028 struct ath_atx_tid *tid, struct list_head *bf_q, in ath_tx_form_aggr() argument
1035 al_delta, h_baw = tid->baw_size / 2; in ath_tx_form_aggr()
1042 aggr_limit = ath_lookup_rate(sc, bf, tid); in ath_tx_form_aggr()
1069 ndelim = ath_compute_num_delims(sc, tid, bf_first, fi->framelen, in ath_tx_form_aggr()
1085 ret = ath_tx_get_tid_subframe(sc, txq, tid, &bf); in ath_tx_form_aggr()
1091 __skb_queue_tail(&tid->retry_q, bf->bf_mpdu); in ath_tx_form_aggr()
1497 struct ath_atx_tid *tid, struct list_head *bf_q, in ath_tx_form_burst() argument
1515 ret = ath_tx_get_tid_subframe(sc, txq, tid, &bf); in ath_tx_form_burst()
1521 __skb_queue_tail(&tid->retry_q, bf->bf_mpdu); in ath_tx_form_burst()
1525 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath_tx_form_burst()
1530 struct ath_atx_tid *tid) in ath_tx_sched_aggr() argument
1540 ret = ath_tx_get_tid_subframe(sc, txq, tid, &bf); in ath_tx_sched_aggr()
1548 __skb_queue_tail(&tid->retry_q, bf->bf_mpdu); in ath_tx_sched_aggr()
1552 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath_tx_sched_aggr()
1554 aggr_len = ath_tx_form_aggr(sc, txq, tid, &bf_q, bf); in ath_tx_sched_aggr()
1556 ath_tx_form_burst(sc, txq, tid, &bf_q, bf); in ath_tx_sched_aggr()
1561 if (tid->clear_ps_filter || tid->an->no_ps_filter) { in ath_tx_sched_aggr()
1562 tid->clear_ps_filter = false; in ath_tx_sched_aggr()
1572 u16 tid, u16 *ssn) in ath_tx_aggr_start() argument
1583 txtid = ATH_AN_2_TID(an, tid); in ath_tx_aggr_start()
1611 void ath_tx_aggr_stop(struct ath_softc *sc, struct ieee80211_sta *sta, u16 tid) in ath_tx_aggr_stop() argument
1615 struct ath_atx_tid *txtid = ATH_AN_2_TID(an, tid); in ath_tx_aggr_stop()
1630 struct ath_atx_tid *tid; in ath_tx_aggr_sleep() local
1636 tid = ath_node_to_tid(an, tidno); in ath_tx_aggr_sleep()
1638 if (!skb_queue_empty(&tid->retry_q)) in ath_tx_aggr_sleep()
1639 ieee80211_sta_set_buffered(sta, tid->tidno, true); in ath_tx_aggr_sleep()
1647 struct ath_atx_tid *tid; in ath_tx_aggr_wakeup() local
1654 tid = ath_node_to_tid(an, tidno); in ath_tx_aggr_wakeup()
1655 txq = tid->txq; in ath_tx_aggr_wakeup()
1658 tid->clear_ps_filter = true; in ath_tx_aggr_wakeup()
1659 if (!skb_queue_empty(&tid->retry_q)) { in ath_tx_aggr_wakeup()
1660 ath_tx_queue_tid(sc, tid); in ath_tx_aggr_wakeup()
1706 struct ath_atx_tid *tid; in ath9k_release_buffered_frames() local
1711 tid = ATH_AN_2_TID(an, i); in ath9k_release_buffered_frames()
1713 ath_txq_lock(sc, tid->txq); in ath9k_release_buffered_frames()
1716 tid, &bf); in ath9k_release_buffered_frames()
1722 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath9k_release_buffered_frames()
1732 if (an->sta && skb_queue_empty(&tid->retry_q)) in ath9k_release_buffered_frames()
1735 ath_txq_unlock_complete(sc, tid->txq); in ath9k_release_buffered_frames()
1977 /* For each acq entry, for each tid, try to schedule packets
1985 struct ath_atx_tid *tid; in ath_txq_schedule() local
2004 tid = (struct ath_atx_tid *)queue->drv_priv; in ath_txq_schedule()
2006 ret = ath_tx_sched_aggr(sc, txq, tid); in ath_txq_schedule()
2009 force = !skb_queue_empty(&tid->retry_q); in ath_txq_schedule()
2109 struct ath_atx_tid *tid, struct sk_buff *skb) in ath_tx_send_normal() argument
2119 if (tid && (tx_info->flags & IEEE80211_TX_CTL_AMPDU)) { in ath_tx_send_normal()
2121 ath_tx_addto_baw(tid, bf); in ath_tx_send_normal()
2215 struct ath_atx_tid *tid, in ath_tx_setup_buffer() argument
2233 if (tid && ieee80211_is_data_present(hdr->frame_control)) { in ath_tx_setup_buffer()
2235 seqno = tid->seq_next; in ath_tx_setup_buffer()
2236 hdr->seq_ctrl = cpu_to_le16(tid->seq_next << IEEE80211_SEQ_SEQ_SHIFT); in ath_tx_setup_buffer()
2242 INCR(tid->seq_next, IEEE80211_SEQ_MAX); in ath_tx_setup_buffer()
2334 struct ath_atx_tid *tid = NULL; in ath_tx_start() local
2358 tid = ath_get_skb_tid(an, skb); in ath_tx_start()
2367 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_start()
2383 ath_tx_send_normal(sc, txq, tid, skb); in ath_tx_start()
2858 struct ath_atx_tid *tid; in ath_tx_node_init() local
2862 tid = ath_node_to_tid(an, tidno); in ath_tx_node_init()
2863 tid->an = an; in ath_tx_node_init()
2864 tid->tidno = tidno; in ath_tx_node_init()
2865 tid->seq_start = tid->seq_next = 0; in ath_tx_node_init()
2866 tid->baw_size = WME_MAX_BA; in ath_tx_node_init()
2867 tid->baw_head = tid->baw_tail = 0; in ath_tx_node_init()
2868 tid->active = false; in ath_tx_node_init()
2869 tid->clear_ps_filter = true; in ath_tx_node_init()
2870 __skb_queue_head_init(&tid->retry_q); in ath_tx_node_init()
2871 INIT_LIST_HEAD(&tid->list); in ath_tx_node_init()
2873 tid->txq = sc->tx.txq_map[acno]; in ath_tx_node_init()
2882 struct ath_atx_tid *tid; in ath_tx_node_cleanup() local
2889 tid = ath_node_to_tid(an, tidno); in ath_tx_node_cleanup()
2890 txq = tid->txq; in ath_tx_node_cleanup()
2894 if (!list_empty(&tid->list)) in ath_tx_node_cleanup()
2895 list_del_init(&tid->list); in ath_tx_node_cleanup()
2897 ath_tid_drain(sc, txq, tid); in ath_tx_node_cleanup()
2898 tid->active = false; in ath_tx_node_cleanup()