Lines Matching +full:txpower +full:-

2  * Copyright (c) 2008-2011 Atheros Communications Inc.
17 #include <linux/dma-mapping.h>
33 #define TIME_SYMBOLS_HALFGI(t) (((t) * 5 - 4) / 18)
35 #define NUM_SYMBOLS_PER_USEC_HALFGI(_usec) (((_usec*5)-4)/18)
49 { 104, 216 }, /* 3: 16-QAM 1/2 */
50 { 156, 324 }, /* 4: 16-QAM 3/4 */
51 { 208, 432 }, /* 5: 64-QAM 2/3 */
52 { 234, 486 }, /* 6: 64-QAM 3/4 */
53 { 260, 540 }, /* 7: 64-QAM 5/6 */
93 struct ieee80211_sta *sta = info->status.status_driver_data[0]; in ath_tx_status()
95 if (info->flags & (IEEE80211_TX_CTL_REQ_TX_STATUS | in ath_tx_status()
108 __releases(&txq->axq_lock) in ath_txq_unlock_complete()
110 struct ieee80211_hw *hw = sc->hw; in ath_txq_unlock_complete()
115 skb_queue_splice_init(&txq->complete_q, &q); in ath_txq_unlock_complete()
116 spin_unlock_bh(&txq->axq_lock); in ath_txq_unlock_complete()
127 ieee80211_schedule_txq(sc->hw, queue); in ath_tx_queue_tid()
132 struct ath_softc *sc = hw->priv; in ath9k_wake_tx_queue()
133 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath9k_wake_tx_queue()
134 struct ath_atx_tid *tid = (struct ath_atx_tid *) queue->drv_priv; in ath9k_wake_tx_queue()
135 struct ath_txq *txq = tid->txq; in ath9k_wake_tx_queue()
138 queue->sta ? queue->sta->addr : queue->vif->addr, in ath9k_wake_tx_queue()
139 tid->tidno); in ath9k_wake_tx_queue()
150 sizeof(tx_info->status.status_driver_data)); in get_frame_info()
151 return (struct ath_frame_info *) &tx_info->status.status_driver_data[0]; in get_frame_info()
156 if (!tid->an->sta) in ath_send_bar()
159 ieee80211_send_bar(tid->an->vif, tid->an->sta->addr, tid->tidno, in ath_send_bar()
172 ratetbl = rcu_dereference(sta->rates); in ath_merge_ratetbl()
176 if (tx_info->control.rates[0].idx < 0 || in ath_merge_ratetbl()
177 tx_info->control.rates[0].count == 0) in ath_merge_ratetbl()
181 bf->rates[0] = tx_info->control.rates[0]; in ath_merge_ratetbl()
186 bf->rates[i].idx = ratetbl->rate[i].idx; in ath_merge_ratetbl()
187 bf->rates[i].flags = ratetbl->rate[i].flags; in ath_merge_ratetbl()
188 if (tx_info->control.use_rts) in ath_merge_ratetbl()
189 bf->rates[i].count = ratetbl->rate[i].count_rts; in ath_merge_ratetbl()
190 else if (tx_info->control.use_cts_prot) in ath_merge_ratetbl()
191 bf->rates[i].count = ratetbl->rate[i].count_cts; in ath_merge_ratetbl()
193 bf->rates[i].count = ratetbl->rate[i].count; in ath_merge_ratetbl()
204 tx_info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_set_rates()
207 ieee80211_get_tx_rates(vif, sta, bf->bf_mpdu, bf->rates, in ath_set_rates()
208 ARRAY_SIZE(bf->rates)); in ath_set_rates()
215 int q = fi->txq; in ath_txq_skb_done()
220 txq = sc->tx.txq_map[q]; in ath_txq_skb_done()
221 if (WARN_ON(--txq->pending_frames < 0)) in ath_txq_skb_done()
222 txq->pending_frames = 0; in ath_txq_skb_done()
229 u8 tidno = skb->priority & IEEE80211_QOS_CTL_TID_MASK; in ath_get_skb_tid()
237 struct ath_softc *sc = tid->an->sc; in ath_tid_pull()
238 struct ieee80211_hw *hw = sc->hw; in ath_tid_pull()
240 .txq = tid->txq, in ath_tid_pull()
241 .sta = tid->an->sta, in ath_tid_pull()
249 return -ENOENT; in ath_tid_pull()
258 if (tid->txq == sc->tx.txq_map[q]) { in ath_tid_pull()
260 fi->txq = q; in ath_tid_pull()
261 ++tid->txq->pending_frames; in ath_tid_pull()
272 *skb = __skb_dequeue(&tid->retry_q); in ath_tid_dequeue()
281 struct ath_txq *txq = tid->txq; in ath_tx_flush_tid()
293 while ((skb = __skb_dequeue(&tid->retry_q))) { in ath_tx_flush_tid()
295 bf = fi->bf; in ath_tx_flush_tid()
298 ieee80211_free_txskb(sc->hw, skb); in ath_tx_flush_tid()
302 if (fi->baw_tracked) { in ath_tx_flush_tid()
307 list_add_tail(&bf->list, &bf_head); in ath_tx_flush_tid()
313 ath_send_bar(tid, tid->seq_start); in ath_tx_flush_tid()
321 struct ath_frame_info *fi = get_frame_info(bf->bf_mpdu); in ath_tx_update_baw()
322 u16 seqno = bf->bf_state.seqno; in ath_tx_update_baw()
325 if (!fi->baw_tracked) in ath_tx_update_baw()
328 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_update_baw()
329 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1); in ath_tx_update_baw()
331 __clear_bit(cindex, tid->tx_buf); in ath_tx_update_baw()
333 while (tid->baw_head != tid->baw_tail && !test_bit(tid->baw_head, tid->tx_buf)) { in ath_tx_update_baw()
334 INCR(tid->seq_start, IEEE80211_SEQ_MAX); in ath_tx_update_baw()
335 INCR(tid->baw_head, ATH_TID_MAX_BUFS); in ath_tx_update_baw()
336 if (tid->bar_index >= 0) in ath_tx_update_baw()
337 tid->bar_index--; in ath_tx_update_baw()
344 struct ath_frame_info *fi = get_frame_info(bf->bf_mpdu); in ath_tx_addto_baw()
345 u16 seqno = bf->bf_state.seqno; in ath_tx_addto_baw()
348 if (fi->baw_tracked) in ath_tx_addto_baw()
351 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_addto_baw()
352 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1); in ath_tx_addto_baw()
353 __set_bit(cindex, tid->tx_buf); in ath_tx_addto_baw()
354 fi->baw_tracked = 1; in ath_tx_addto_baw()
356 if (index >= ((tid->baw_tail - tid->baw_head) & in ath_tx_addto_baw()
357 (ATH_TID_MAX_BUFS - 1))) { in ath_tx_addto_baw()
358 tid->baw_tail = cindex; in ath_tx_addto_baw()
359 INCR(tid->baw_tail, ATH_TID_MAX_BUFS); in ath_tx_addto_baw()
378 bf = fi->bf; in ath_tid_drain()
385 list_add_tail(&bf->list, &bf_head); in ath_tid_drain()
394 struct ath_buf *bf = fi->bf; in ath_tx_set_retry()
396 int prev = fi->retries; in ath_tx_set_retry()
398 TX_STAT_INC(sc, txq->axq_qnum, a_retries); in ath_tx_set_retry()
399 fi->retries += count; in ath_tx_set_retry()
404 hdr = (struct ieee80211_hdr *)skb->data; in ath_tx_set_retry()
405 hdr->frame_control |= cpu_to_le16(IEEE80211_FCTL_RETRY); in ath_tx_set_retry()
406 dma_sync_single_for_device(sc->dev, bf->bf_buf_addr, in ath_tx_set_retry()
414 spin_lock_bh(&sc->tx.txbuflock); in ath_tx_get_buffer()
416 if (unlikely(list_empty(&sc->tx.txbuf))) { in ath_tx_get_buffer()
417 spin_unlock_bh(&sc->tx.txbuflock); in ath_tx_get_buffer()
421 bf = list_first_entry(&sc->tx.txbuf, struct ath_buf, list); in ath_tx_get_buffer()
422 list_del(&bf->list); in ath_tx_get_buffer()
424 spin_unlock_bh(&sc->tx.txbuflock); in ath_tx_get_buffer()
431 spin_lock_bh(&sc->tx.txbuflock); in ath_tx_return_buffer()
432 list_add_tail(&bf->list, &sc->tx.txbuf); in ath_tx_return_buffer()
433 spin_unlock_bh(&sc->tx.txbuflock); in ath_tx_return_buffer()
446 tbf->bf_mpdu = bf->bf_mpdu; in ath_clone_txbuf()
447 tbf->bf_buf_addr = bf->bf_buf_addr; in ath_clone_txbuf()
448 memcpy(tbf->bf_desc, bf->bf_desc, sc->sc_ah->caps.tx_desc_len); in ath_clone_txbuf()
449 tbf->bf_state = bf->bf_state; in ath_clone_txbuf()
450 tbf->bf_state.stale = false; in ath_clone_txbuf()
471 seq_st = ts->ts_seqnum; in ath_tx_count_frames()
472 memcpy(ba, &ts->ba, WME_BA_BMP_SIZE >> 3); in ath_tx_count_frames()
476 ba_index = ATH_BA_INDEX(seq_st, bf->bf_state.seqno); in ath_tx_count_frames()
482 bf = bf->bf_next; in ath_tx_count_frames()
496 struct ath_buf *bf_next, *bf_last = bf->bf_lastbf; in ath_tx_complete_aggr()
506 bool flush = !!(ts->ts_status & ATH9K_TX_FLUSH); in ath_tx_complete_aggr()
508 int bar_index = -1; in ath_tx_complete_aggr()
510 skb = bf->bf_mpdu; in ath_tx_complete_aggr()
513 memcpy(rates, bf->rates, sizeof(rates)); in ath_tx_complete_aggr()
515 retries = ts->ts_longretry + 1; in ath_tx_complete_aggr()
516 for (i = 0; i < ts->ts_rateindex; i++) in ath_tx_complete_aggr()
522 bf_next = bf->bf_next; in ath_tx_complete_aggr()
524 if (!bf->bf_state.stale || bf_next != NULL) in ath_tx_complete_aggr()
525 list_move_tail(&bf->list, &bf_head); in ath_tx_complete_aggr()
534 an = (struct ath_node *)sta->drv_priv; in ath_tx_complete_aggr()
535 seq_first = tid->seq_start; in ath_tx_complete_aggr()
536 isba = ts->ts_flags & ATH9K_TX_BA; in ath_tx_complete_aggr()
546 if (isba && tid->tidno != ts->tid) in ath_tx_complete_aggr()
553 if (ts->ts_flags & ATH9K_TX_BA) { in ath_tx_complete_aggr()
554 seq_st = ts->ts_seqnum; in ath_tx_complete_aggr()
555 memcpy(ba, &ts->ba, WME_BA_BMP_SIZE >> 3); in ath_tx_complete_aggr()
564 if (sc->sc_ah->opmode == NL80211_IFTYPE_STATION) in ath_tx_complete_aggr()
573 u16 seqno = bf->bf_state.seqno; in ath_tx_complete_aggr()
576 bf_next = bf->bf_next; in ath_tx_complete_aggr()
578 skb = bf->bf_mpdu; in ath_tx_complete_aggr()
582 if (!BAW_WITHIN(tid->seq_start, tid->baw_size, seqno) || in ath_tx_complete_aggr()
583 !tid->active) { in ath_tx_complete_aggr()
598 } else if (fi->retries < ATH_MAX_SW_RETRIES) { in ath_tx_complete_aggr()
599 if (txok || !an->sleeping) in ath_tx_complete_aggr()
600 ath_tx_set_retry(sc, txq, bf->bf_mpdu, in ath_tx_complete_aggr()
616 if (bf_next != NULL || !bf_last->bf_state.stale) in ath_tx_complete_aggr()
617 list_move_tail(&bf->list, &bf_head); in ath_tx_complete_aggr()
621 * complete the acked-ones/xretried ones; update in ath_tx_complete_aggr()
622 * block-ack window in ath_tx_complete_aggr()
627 memcpy(tx_info->control.rates, rates, sizeof(rates)); in ath_tx_complete_aggr()
630 if (bf == bf->bf_lastbf) in ath_tx_complete_aggr()
631 ath_dynack_sample_tx_ts(sc->sc_ah, in ath_tx_complete_aggr()
632 bf->bf_mpdu, in ath_tx_complete_aggr()
639 if (tx_info->flags & IEEE80211_TX_STATUS_EOSP) { in ath_tx_complete_aggr()
640 tx_info->flags &= ~IEEE80211_TX_STATUS_EOSP; in ath_tx_complete_aggr()
643 /* retry the un-acked ones */ in ath_tx_complete_aggr()
644 if (bf->bf_next == NULL && bf_last->bf_state.stale) { in ath_tx_complete_aggr()
664 fi->bf = tbf; in ath_tx_complete_aggr()
677 /* prepend un-acked frames to the beginning of the pending frame queue */ in ath_tx_complete_aggr()
679 if (an->sleeping) in ath_tx_complete_aggr()
680 ieee80211_sta_set_buffered(sta, tid->tidno, true); in ath_tx_complete_aggr()
682 skb_queue_splice_tail(&bf_pending, &tid->retry_q); in ath_tx_complete_aggr()
683 if (!an->sleeping) { in ath_tx_complete_aggr()
685 if (ts->ts_status & (ATH9K_TXERR_FILT | ATH9K_TXERR_XRETRY)) in ath_tx_complete_aggr()
686 tid->clear_ps_filter = true; in ath_tx_complete_aggr()
693 if (BAW_WITHIN(tid->seq_start, tid->baw_size, bar_seq)) in ath_tx_complete_aggr()
694 tid->bar_index = ATH_BA_INDEX(tid->seq_start, bar_seq); in ath_tx_complete_aggr()
707 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(bf->bf_mpdu); in bf_is_ampdu_not_probing()
708 return bf_isampdu(bf) && !(info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE); in bf_is_ampdu_not_probing()
720 airtime += ts->duration * (ts->ts_longretry + 1); in ath_tx_count_airtime()
721 for(i = 0; i < ts->ts_rateindex; i++) { in ath_tx_count_airtime()
722 int rate_dur = ath9k_hw_get_duration(sc->sc_ah, bf->bf_desc, i); in ath_tx_count_airtime()
723 airtime += rate_dur * bf->rates[i].count; in ath_tx_count_airtime()
733 struct ieee80211_hw *hw = sc->hw; in ath_tx_process_buffer()
740 txok = !(ts->ts_status & ATH9K_TXERR_MASK); in ath_tx_process_buffer()
741 flush = !!(ts->ts_status & ATH9K_TX_FLUSH); in ath_tx_process_buffer()
742 txq->axq_tx_inprogress = false; in ath_tx_process_buffer()
744 txq->axq_depth--; in ath_tx_process_buffer()
746 txq->axq_ampdu_depth--; in ath_tx_process_buffer()
748 ts->duration = ath9k_hw_get_duration(sc->sc_ah, bf->bf_desc, in ath_tx_process_buffer()
749 ts->ts_rateindex); in ath_tx_process_buffer()
751 hdr = (struct ieee80211_hdr *) bf->bf_mpdu->data; in ath_tx_process_buffer()
752 sta = ieee80211_find_sta_by_ifaddr(hw, hdr->addr1, hdr->addr2); in ath_tx_process_buffer()
754 struct ath_node *an = (struct ath_node *)sta->drv_priv; in ath_tx_process_buffer()
755 tid = ath_get_skb_tid(sc, an, bf->bf_mpdu); in ath_tx_process_buffer()
756 ath_tx_count_airtime(sc, sta, bf, ts, tid->tidno); in ath_tx_process_buffer()
757 if (ts->ts_status & (ATH9K_TXERR_FILT | ATH9K_TXERR_XRETRY)) in ath_tx_process_buffer()
758 tid->clear_ps_filter = true; in ath_tx_process_buffer()
763 info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_tx_process_buffer()
764 memcpy(info->control.rates, bf->rates, in ath_tx_process_buffer()
765 sizeof(info->control.rates)); in ath_tx_process_buffer()
767 ath_dynack_sample_tx_ts(sc->sc_ah, bf->bf_mpdu, ts, in ath_tx_process_buffer()
785 skb = bf->bf_mpdu; in ath_lookup_legacy()
787 rates = tx_info->control.rates; in ath_lookup_legacy()
808 int q = tid->txq->mac80211_qnum; in ath_lookup_rate()
811 skb = bf->bf_mpdu; in ath_lookup_rate()
813 rates = bf->rates; in ath_lookup_rate()
840 frmlen = sc->tx.max_aggr_framelen[q][modeidx][rates[i].idx]; in ath_lookup_rate()
849 if (tx_info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE || legacy) in ath_lookup_rate()
861 if (tid->an->maxampdu) in ath_lookup_rate()
862 aggr_limit = min(aggr_limit, tid->an->maxampdu); in ath_lookup_rate()
880 struct ath_frame_info *fi = get_frame_info(bf->bf_mpdu); in ath_compute_num_delims()
888 * TODO - this could be improved to be dependent on the rate. in ath_compute_num_delims()
891 if ((fi->keyix != ATH9K_TXKEYIX_INVALID) && in ath_compute_num_delims()
892 !(sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA)) in ath_compute_num_delims()
899 if (first_subfrm && !AR_SREV_9580_10_OR_LATER(sc->sc_ah) && in ath_compute_num_delims()
900 (sc->sc_ah->ent_mode & AR_ENT_OTP_MIN_PKT_SIZE_DISABLE)) in ath_compute_num_delims()
913 if (tid->an->mpdudensity == 0) in ath_compute_num_delims()
916 rix = bf->rates[0].idx; in ath_compute_num_delims()
917 flags = bf->rates[0].flags; in ath_compute_num_delims()
922 nsymbols = NUM_SYMBOLS_PER_USEC_HALFGI(tid->an->mpdudensity); in ath_compute_num_delims()
924 nsymbols = NUM_SYMBOLS_PER_USEC(tid->an->mpdudensity); in ath_compute_num_delims()
934 mindelim = (minlen - frmlen) / ATH_AGGR_DELIM_SZ; in ath_compute_num_delims()
958 bf = fi->bf; in ath_tx_get_tid_subframe()
959 if (!fi->bf) in ath_tx_get_tid_subframe()
962 bf->bf_state.stale = false; in ath_tx_get_tid_subframe()
966 ieee80211_free_txskb(sc->hw, skb); in ath_tx_get_tid_subframe()
970 bf->bf_next = NULL; in ath_tx_get_tid_subframe()
971 bf->bf_lastbf = bf; in ath_tx_get_tid_subframe()
974 tx_info->flags &= ~(IEEE80211_TX_CTL_CLEAR_PS_FILT | in ath_tx_get_tid_subframe()
982 if (!tid->active) in ath_tx_get_tid_subframe()
983 tx_info->flags &= ~IEEE80211_TX_CTL_AMPDU; in ath_tx_get_tid_subframe()
985 if (!(tx_info->flags & IEEE80211_TX_CTL_AMPDU)) { in ath_tx_get_tid_subframe()
986 bf->bf_state.bf_type = 0; in ath_tx_get_tid_subframe()
990 bf->bf_state.bf_type = BUF_AMPDU | BUF_AGGR; in ath_tx_get_tid_subframe()
991 seqno = bf->bf_state.seqno; in ath_tx_get_tid_subframe()
993 /* do not step over block-ack window */ in ath_tx_get_tid_subframe()
994 if (!BAW_WITHIN(tid->seq_start, tid->baw_size, seqno)) { in ath_tx_get_tid_subframe()
995 __skb_queue_tail(&tid->retry_q, skb); in ath_tx_get_tid_subframe()
1000 if (!skb_queue_is_first(&tid->retry_q, skb) && in ath_tx_get_tid_subframe()
1006 return -EINPROGRESS; in ath_tx_get_tid_subframe()
1009 if (tid->bar_index > ATH_BA_INDEX(tid->seq_start, seqno)) { in ath_tx_get_tid_subframe()
1014 list_add(&bf->list, &bf_head); in ath_tx_get_tid_subframe()
1035 #define PADBYTES(_len) ((4 - ((_len) % 4)) % 4) in ath_tx_form_aggr()
1039 al_delta, h_baw = tid->baw_size / 2; in ath_tx_form_aggr()
1050 skb = bf->bf_mpdu; in ath_tx_form_aggr()
1054 al_delta = ATH_AGGR_DELIM_SZ + fi->framelen; in ath_tx_form_aggr()
1060 tx_info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_tx_form_aggr()
1061 if ((tx_info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE) || in ath_tx_form_aggr()
1062 !(tx_info->flags & IEEE80211_TX_CTL_AMPDU)) in ath_tx_form_aggr()
1073 ndelim = ath_compute_num_delims(sc, tid, bf_first, fi->framelen, in ath_tx_form_aggr()
1078 bf->bf_next = NULL; in ath_tx_form_aggr()
1081 bf->bf_state.ndelim = ndelim; in ath_tx_form_aggr()
1083 list_add_tail(&bf->list, bf_q); in ath_tx_form_aggr()
1085 bf_prev->bf_next = bf; in ath_tx_form_aggr()
1095 __skb_queue_tail(&tid->retry_q, bf->bf_mpdu); in ath_tx_form_aggr()
1098 bf->bf_lastbf = bf_prev; in ath_tx_form_aggr()
1101 al = get_frame_info(bf->bf_mpdu)->framelen; in ath_tx_form_aggr()
1102 bf->bf_state.bf_type = BUF_AMPDU; in ath_tx_form_aggr()
1104 TX_STAT_INC(sc, txq->axq_qnum, a_aggr); in ath_tx_form_aggr()
1112 * rix - rate index
1113 * pktlen - total bytes (delims + data + fcs + pads + pad delims)
1114 * width - 0 for 20 MHz, 1 for 40 MHz
1115 * half_gi - to use 4us v/s 3.6 us for symbol time
1127 nsymbols = (nbits + nsymbits - 1) / nsymbits; in ath_pkt_duration()
1146 usec -= L_STF + L_LTF + L_SIG + HT_SIG + HT_STF + HT_LTF(streams); in ath_max_framelen()
1149 bits -= OFDM_PLCP_BITS; in ath_max_framelen()
1166 cur_ht20 = sc->tx.max_aggr_framelen[queue][MCS_HT20]; in ath_update_max_aggr_framelen()
1167 cur_ht20_sgi = sc->tx.max_aggr_framelen[queue][MCS_HT20_SGI]; in ath_update_max_aggr_framelen()
1168 cur_ht40 = sc->tx.max_aggr_framelen[queue][MCS_HT40]; in ath_update_max_aggr_framelen()
1169 cur_ht40_sgi = sc->tx.max_aggr_framelen[queue][MCS_HT40_SGI]; in ath_update_max_aggr_framelen()
1185 struct ath_hw *ah = sc->sc_ah; in ath_get_rate_txpower()
1188 if (sc->tx99_state || !ah->tpc_enabled) in ath_get_rate_txpower()
1191 skb = bf->bf_mpdu; in ath_get_rate_txpower()
1195 is_2ghz = info->band == NL80211_BAND_2GHZ; in ath_get_rate_txpower()
1196 is_5ghz = info->band == NL80211_BAND_5GHZ; in ath_get_rate_txpower()
1197 use_stbc = is_mcs && rateidx < 8 && (info->flags & in ath_get_rate_txpower()
1209 int txpower = fi->tx_power; in ath_get_rate_txpower() local
1213 struct ar5416_eeprom_def *eep = &ah->eeprom.def; in ath_get_rate_txpower()
1214 u16 eeprom_rev = ah->eep_ops->get_eeprom_rev(ah); in ath_get_rate_txpower()
1219 pmodal = &eep->modalHeader[is_2ghz]; in ath_get_rate_txpower()
1220 power_ht40delta = pmodal->ht40PowerIncForPdadc; in ath_get_rate_txpower()
1224 txpower += power_ht40delta; in ath_get_rate_txpower()
1229 txpower -= 2 * AR9287_PWR_TABLE_OFFSET_DB; in ath_get_rate_txpower()
1233 power_offset = ah->eep_ops->get_eeprom(ah, in ath_get_rate_txpower()
1235 txpower -= 2 * power_offset; in ath_get_rate_txpower()
1239 txpower -= 2; in ath_get_rate_txpower()
1241 txpower = max(txpower, 0); in ath_get_rate_txpower()
1242 max_power = min_t(u8, ah->tx_power[rateidx], txpower); in ath_get_rate_txpower()
1250 } else if (!bf->bf_state.bfs_paprd) { in ath_get_rate_txpower()
1252 max_power = min_t(u8, ah->tx_power_stbc[rateidx], in ath_get_rate_txpower()
1253 fi->tx_power); in ath_get_rate_txpower()
1255 max_power = min_t(u8, ah->tx_power[rateidx], in ath_get_rate_txpower()
1256 fi->tx_power); in ath_get_rate_txpower()
1258 max_power = ah->paprd_training_power; in ath_get_rate_txpower()
1267 struct ath_hw *ah = sc->sc_ah; in ath_buf_set_rate()
1274 struct ath_frame_info *fi = get_frame_info(bf->bf_mpdu); in ath_buf_set_rate()
1275 u32 rts_thresh = sc->hw->wiphy->rts_threshold; in ath_buf_set_rate()
1279 skb = bf->bf_mpdu; in ath_buf_set_rate()
1281 rates = bf->rates; in ath_buf_set_rate()
1282 hdr = (struct ieee80211_hdr *)skb->data; in ath_buf_set_rate()
1284 /* set dur_update_en for l-sig computation except for PS-Poll frames */ in ath_buf_set_rate()
1285 info->dur_update = !ieee80211_is_pspoll(hdr->frame_control); in ath_buf_set_rate()
1286 info->rtscts_rate = fi->rtscts_rate; in ath_buf_set_rate()
1288 for (i = 0; i < ARRAY_SIZE(bf->rates); i++) { in ath_buf_set_rate()
1296 info->rates[i].Tries = rates[i].count; in ath_buf_set_rate()
1303 unlikely(rts_thresh != (u32) -1)) { in ath_buf_set_rate()
1309 info->rates[i].RateFlags |= ATH9K_RATESERIES_RTS_CTS; in ath_buf_set_rate()
1310 info->flags |= ATH9K_TXDESC_RTSENA; in ath_buf_set_rate()
1312 info->rates[i].RateFlags |= ATH9K_RATESERIES_RTS_CTS; in ath_buf_set_rate()
1313 info->flags |= ATH9K_TXDESC_CTSENA; in ath_buf_set_rate()
1317 info->rates[i].RateFlags |= ATH9K_RATESERIES_2040; in ath_buf_set_rate()
1319 info->rates[i].RateFlags |= ATH9K_RATESERIES_HALFGI; in ath_buf_set_rate()
1327 info->rates[i].Rate = rix | 0x80; in ath_buf_set_rate()
1328 info->rates[i].ChSel = ath_txchainmask_reduction(sc, in ath_buf_set_rate()
1329 ah->txchainmask, info->rates[i].Rate); in ath_buf_set_rate()
1330 info->rates[i].PktDuration = ath_pkt_duration(sc, rix, len, in ath_buf_set_rate()
1332 if (rix < 8 && (tx_info->flags & IEEE80211_TX_CTL_STBC)) in ath_buf_set_rate()
1333 info->rates[i].RateFlags |= ATH9K_RATESERIES_STBC; in ath_buf_set_rate()
1334 if (rix >= 8 && fi->dyn_smps) { in ath_buf_set_rate()
1335 info->rates[i].RateFlags |= in ath_buf_set_rate()
1337 info->flags |= ATH9K_TXDESC_CTSENA; in ath_buf_set_rate()
1340 info->txpower[i] = ath_get_rate_txpower(sc, bf, rix, in ath_buf_set_rate()
1346 rate = &common->sbands[tx_info->band].bitrates[rates[i].idx]; in ath_buf_set_rate()
1347 if ((tx_info->band == NL80211_BAND_2GHZ) && in ath_buf_set_rate()
1348 !(rate->flags & IEEE80211_RATE_ERP_G)) in ath_buf_set_rate()
1353 info->rates[i].Rate = rate->hw_value; in ath_buf_set_rate()
1354 if (rate->hw_value_short) { in ath_buf_set_rate()
1356 info->rates[i].Rate |= rate->hw_value_short; in ath_buf_set_rate()
1361 if (bf->bf_state.bfs_paprd) in ath_buf_set_rate()
1362 info->rates[i].ChSel = ah->txchainmask; in ath_buf_set_rate()
1364 info->rates[i].ChSel = ath_txchainmask_reduction(sc, in ath_buf_set_rate()
1365 ah->txchainmask, info->rates[i].Rate); in ath_buf_set_rate()
1367 info->rates[i].PktDuration = ath9k_hw_computetxtime(sc->sc_ah, in ath_buf_set_rate()
1368 phy, rate->bitrate * 100, len, rix, is_sp); in ath_buf_set_rate()
1370 is_cck = IS_CCK_RATE(info->rates[i].Rate); in ath_buf_set_rate()
1371 info->txpower[i] = ath_get_rate_txpower(sc, bf, rix, false, in ath_buf_set_rate()
1375 /* For AR5416 - RTS cannot be followed by a frame larger than 8K */ in ath_buf_set_rate()
1376 if (bf_isaggr(bf) && (len > sc->sc_ah->caps.rts_aggr_limit)) in ath_buf_set_rate()
1377 info->flags &= ~ATH9K_TXDESC_RTSENA; in ath_buf_set_rate()
1380 if (info->flags & ATH9K_TXDESC_RTSENA) in ath_buf_set_rate()
1381 info->flags &= ~ATH9K_TXDESC_CTSENA; in ath_buf_set_rate()
1390 hdr = (struct ieee80211_hdr *)skb->data; in get_hw_packet_type()
1391 fc = hdr->frame_control; in get_hw_packet_type()
1410 struct ath_hw *ah = sc->sc_ah; in ath_tx_fill_desc()
1413 u32 rts_thresh = sc->hw->wiphy->rts_threshold; in ath_tx_fill_desc()
1419 info.qcu = txq->axq_qnum; in ath_tx_fill_desc()
1422 struct sk_buff *skb = bf->bf_mpdu; in ath_tx_fill_desc()
1425 bool aggr = !!(bf->bf_state.bf_type & BUF_AGGR); in ath_tx_fill_desc()
1428 if (bf->bf_next) in ath_tx_fill_desc()
1429 info.link = bf->bf_next->bf_daddr; in ath_tx_fill_desc()
1431 info.link = (sc->tx99_state) ? bf->bf_daddr : 0; in ath_tx_fill_desc()
1436 if (!sc->tx99_state) in ath_tx_fill_desc()
1438 if ((tx_info->flags & IEEE80211_TX_CTL_CLEAR_PS_FILT) || in ath_tx_fill_desc()
1439 txq == sc->tx.uapsdq) in ath_tx_fill_desc()
1442 if (tx_info->flags & IEEE80211_TX_CTL_NO_ACK) in ath_tx_fill_desc()
1444 if (tx_info->flags & IEEE80211_TX_CTL_LDPC) in ath_tx_fill_desc()
1447 if (bf->bf_state.bfs_paprd) in ath_tx_fill_desc()
1448 info.flags |= (u32) bf->bf_state.bfs_paprd << in ath_tx_fill_desc()
1459 unlikely(rts_thresh != (u32) -1)) { in ath_tx_fill_desc()
1468 len = fi->framelen; in ath_tx_fill_desc()
1473 info.buf_addr[0] = bf->bf_buf_addr; in ath_tx_fill_desc()
1474 info.buf_len[0] = skb->len; in ath_tx_fill_desc()
1475 info.pkt_len = fi->framelen; in ath_tx_fill_desc()
1476 info.keyix = fi->keyix; in ath_tx_fill_desc()
1477 info.keytype = fi->keytype; in ath_tx_fill_desc()
1482 else if (bf == bf_first->bf_lastbf) in ath_tx_fill_desc()
1487 info.ndelim = bf->bf_state.ndelim; in ath_tx_fill_desc()
1491 if (bf == bf_first->bf_lastbf) in ath_tx_fill_desc()
1494 ath9k_hw_set_txdesc(ah, bf->bf_desc, &info); in ath_tx_fill_desc()
1495 bf = bf->bf_next; in ath_tx_fill_desc()
1511 list_add_tail(&bf->list, bf_q); in ath_tx_form_burst()
1513 bf_prev->bf_next = bf; in ath_tx_form_burst()
1523 tx_info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_tx_form_burst()
1524 if (tx_info->flags & IEEE80211_TX_CTL_AMPDU) { in ath_tx_form_burst()
1525 __skb_queue_tail(&tid->retry_q, bf->bf_mpdu); in ath_tx_form_burst()
1529 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath_tx_form_burst()
1548 tx_info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_tx_sched_aggr()
1549 aggr = !!(tx_info->flags & IEEE80211_TX_CTL_AMPDU); in ath_tx_sched_aggr()
1550 if ((aggr && txq->axq_ampdu_depth >= ATH_AGGR_MIN_QDEPTH) || in ath_tx_sched_aggr()
1551 (!aggr && txq->axq_depth >= ATH_NON_AGGR_MIN_QDEPTH)) { in ath_tx_sched_aggr()
1552 __skb_queue_tail(&tid->retry_q, bf->bf_mpdu); in ath_tx_sched_aggr()
1553 return -EBUSY; in ath_tx_sched_aggr()
1556 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath_tx_sched_aggr()
1563 return -EAGAIN; in ath_tx_sched_aggr()
1565 if (tid->clear_ps_filter || tid->an->no_ps_filter) { in ath_tx_sched_aggr()
1566 tid->clear_ps_filter = false; in ath_tx_sched_aggr()
1567 tx_info->flags |= IEEE80211_TX_CTL_CLEAR_PS_FILT; in ath_tx_sched_aggr()
1578 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_aggr_start()
1586 an = (struct ath_node *)sta->drv_priv; in ath_tx_aggr_start()
1588 txq = txtid->txq; in ath_tx_aggr_start()
1593 * in HT IBSS when a beacon with HT-info is received after the station in ath_tx_aggr_start()
1596 if (sta->deflink.ht_cap.ht_supported) { in ath_tx_aggr_start()
1597 an->maxampdu = (1 << (IEEE80211_HT_MAX_AMPDU_FACTOR + in ath_tx_aggr_start()
1598 sta->deflink.ht_cap.ampdu_factor)) - 1; in ath_tx_aggr_start()
1599 density = ath9k_parse_mpdudensity(sta->deflink.ht_cap.ampdu_density); in ath_tx_aggr_start()
1600 an->mpdudensity = density; in ath_tx_aggr_start()
1603 txtid->active = true; in ath_tx_aggr_start()
1604 *ssn = txtid->seq_start = txtid->seq_next; in ath_tx_aggr_start()
1605 txtid->bar_index = -1; in ath_tx_aggr_start()
1607 memset(txtid->tx_buf, 0, sizeof(txtid->tx_buf)); in ath_tx_aggr_start()
1608 txtid->baw_head = txtid->baw_tail = 0; in ath_tx_aggr_start()
1617 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_aggr_stop()
1618 struct ath_node *an = (struct ath_node *)sta->drv_priv; in ath_tx_aggr_stop()
1620 struct ath_txq *txq = txtid->txq; in ath_tx_aggr_stop()
1625 txtid->active = false; in ath_tx_aggr_stop()
1633 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_aggr_sleep()
1642 if (!skb_queue_empty(&tid->retry_q)) in ath_tx_aggr_sleep()
1643 ieee80211_sta_set_buffered(sta, tid->tidno, true); in ath_tx_aggr_sleep()
1650 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_aggr_wakeup()
1659 txq = tid->txq; in ath_tx_aggr_wakeup()
1662 tid->clear_ps_filter = true; in ath_tx_aggr_wakeup()
1663 if (!skb_queue_empty(&tid->retry_q)) { in ath_tx_aggr_wakeup()
1686 hdr = (struct ieee80211_hdr *) bf->bf_mpdu->data; in ath9k_set_moredata()
1687 if ((hdr->frame_control & mask) != mask_val) { in ath9k_set_moredata()
1688 hdr->frame_control = (hdr->frame_control & ~mask) | mask_val; in ath9k_set_moredata()
1689 dma_sync_single_for_device(sc->dev, bf->bf_buf_addr, in ath9k_set_moredata()
1700 struct ath_softc *sc = hw->priv; in ath9k_release_buffered_frames()
1701 struct ath_node *an = (struct ath_node *)sta->drv_priv; in ath9k_release_buffered_frames()
1702 struct ath_txq *txq = sc->tx.uapsdq; in ath9k_release_buffered_frames()
1717 ath_txq_lock(sc, tid->txq); in ath9k_release_buffered_frames()
1719 ret = ath_tx_get_tid_subframe(sc, sc->tx.uapsdq, in ath9k_release_buffered_frames()
1725 list_add_tail(&bf->list, &bf_q); in ath9k_release_buffered_frames()
1726 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath9k_release_buffered_frames()
1728 bf->bf_state.bf_type &= ~BUF_AGGR; in ath9k_release_buffered_frames()
1730 bf_tail->bf_next = bf; in ath9k_release_buffered_frames()
1733 nframes--; in ath9k_release_buffered_frames()
1734 TX_STAT_INC(sc, txq->axq_qnum, a_queued_hw); in ath9k_release_buffered_frames()
1736 if (an->sta && skb_queue_empty(&tid->retry_q)) in ath9k_release_buffered_frames()
1737 ieee80211_sta_set_buffered(an->sta, i, false); in ath9k_release_buffered_frames()
1739 ath_txq_unlock_complete(sc, tid->txq); in ath9k_release_buffered_frames()
1748 info = IEEE80211_SKB_CB(bf_tail->bf_mpdu); in ath9k_release_buffered_frames()
1749 info->flags |= IEEE80211_TX_STATUS_EOSP; in ath9k_release_buffered_frames()
1764 struct ath_hw *ah = sc->sc_ah; in ath_txq_setup()
1793 * The UAPSD queue is an exception, since we take a desc- in ath_txq_setup()
1796 if (ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) { in ath_txq_setup()
1806 if (axq_qnum == -1) { in ath_txq_setup()
1814 struct ath_txq *txq = &sc->tx.txq[axq_qnum]; in ath_txq_setup()
1816 txq->axq_qnum = axq_qnum; in ath_txq_setup()
1817 txq->mac80211_qnum = -1; in ath_txq_setup()
1818 txq->axq_link = NULL; in ath_txq_setup()
1819 __skb_queue_head_init(&txq->complete_q); in ath_txq_setup()
1820 INIT_LIST_HEAD(&txq->axq_q); in ath_txq_setup()
1821 spin_lock_init(&txq->axq_lock); in ath_txq_setup()
1822 txq->axq_depth = 0; in ath_txq_setup()
1823 txq->axq_ampdu_depth = 0; in ath_txq_setup()
1824 txq->axq_tx_inprogress = false; in ath_txq_setup()
1825 sc->tx.txqsetup |= 1<<axq_qnum; in ath_txq_setup()
1827 txq->txq_headidx = txq->txq_tailidx = 0; in ath_txq_setup()
1829 INIT_LIST_HEAD(&txq->txq_fifo[i]); in ath_txq_setup()
1831 return &sc->tx.txq[axq_qnum]; in ath_txq_setup()
1837 struct ath_hw *ah = sc->sc_ah; in ath_txq_update()
1841 BUG_ON(sc->tx.txq[qnum].axq_qnum != qnum); in ath_txq_update()
1844 qi.tqi_aifs = qinfo->tqi_aifs; in ath_txq_update()
1845 qi.tqi_cwmin = qinfo->tqi_cwmin; in ath_txq_update()
1846 qi.tqi_cwmax = qinfo->tqi_cwmax; in ath_txq_update()
1847 qi.tqi_burstTime = qinfo->tqi_burstTime; in ath_txq_update()
1848 qi.tqi_readyTime = qinfo->tqi_readyTime; in ath_txq_update()
1851 ath_err(ath9k_hw_common(sc->sc_ah), in ath_txq_update()
1853 error = -EIO; in ath_txq_update()
1864 struct ath_beacon_config *cur_conf = &sc->cur_chan->beacon; in ath_cabq_update()
1865 int qnum = sc->beacon.cabq->axq_qnum; in ath_cabq_update()
1867 ath9k_hw_get_txq_props(sc->sc_ah, qnum, &qi); in ath_cabq_update()
1869 qi.tqi_readyTime = (TU_TO_USEC(cur_conf->beacon_interval) * in ath_cabq_update()
1890 if (bf->bf_state.stale) { in ath_drain_txq_list()
1891 list_del(&bf->list); in ath_drain_txq_list()
1897 lastbf = bf->bf_lastbf; in ath_drain_txq_list()
1898 list_cut_position(&bf_head, list, &lastbf->list); in ath_drain_txq_list()
1914 if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) { in ath_draintxq()
1915 int idx = txq->txq_tailidx; in ath_draintxq()
1917 while (!list_empty(&txq->txq_fifo[idx])) { in ath_draintxq()
1918 ath_drain_txq_list(sc, txq, &txq->txq_fifo[idx]); in ath_draintxq()
1922 txq->txq_tailidx = idx; in ath_draintxq()
1925 txq->axq_link = NULL; in ath_draintxq()
1926 txq->axq_tx_inprogress = false; in ath_draintxq()
1927 ath_drain_txq_list(sc, txq, &txq->axq_q); in ath_draintxq()
1935 struct ath_hw *ah = sc->sc_ah; in ath_drain_all_txq()
1936 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_drain_all_txq()
1941 if (test_bit(ATH_OP_INVALID, &common->op_flags)) in ath_drain_all_txq()
1951 if (!sc->tx.txq[i].axq_depth) in ath_drain_all_txq()
1954 if (ath9k_hw_numtxpending(ah, sc->tx.txq[i].axq_qnum)) in ath_drain_all_txq()
1968 txq = &sc->tx.txq[i]; in ath_drain_all_txq()
1977 ath9k_hw_releasetxqueue(sc->sc_ah, txq->axq_qnum); in ath_tx_cleanupq()
1978 sc->tx.txqsetup &= ~(1<<txq->axq_qnum); in ath_tx_cleanupq()
1986 struct ieee80211_hw *hw = sc->hw; in ath_txq_schedule()
1987 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_txq_schedule()
1992 if (txq->mac80211_qnum < 0) in ath_txq_schedule()
1995 if (test_bit(ATH_OP_HW_RESET, &common->op_flags)) in ath_txq_schedule()
1998 ieee80211_txq_schedule_start(hw, txq->mac80211_qnum); in ath_txq_schedule()
1999 spin_lock_bh(&sc->chan_lock); in ath_txq_schedule()
2002 if (sc->cur_chan->stopped) in ath_txq_schedule()
2005 while ((queue = ieee80211_next_txq(hw, txq->mac80211_qnum))) { in ath_txq_schedule()
2008 tid = (struct ath_atx_tid *)queue->drv_priv; in ath_txq_schedule()
2013 force = !skb_queue_empty(&tid->retry_q); in ath_txq_schedule()
2019 spin_unlock_bh(&sc->chan_lock); in ath_txq_schedule()
2020 ieee80211_txq_schedule_end(hw, txq->mac80211_qnum); in ath_txq_schedule()
2029 txq = sc->tx.txq_map[i]; in ath_txq_schedule_all()
2031 spin_lock_bh(&txq->axq_lock); in ath_txq_schedule_all()
2033 spin_unlock_bh(&txq->axq_lock); in ath_txq_schedule_all()
2048 struct ath_hw *ah = sc->sc_ah; in ath_tx_txqaddbuf()
2062 edma = !!(ah->caps.hw_caps & ATH9K_HW_CAP_EDMA); in ath_tx_txqaddbuf()
2064 bf_last = list_entry(head->prev, struct ath_buf, list); in ath_tx_txqaddbuf()
2067 txq->axq_qnum, txq->axq_depth); in ath_tx_txqaddbuf()
2069 if (edma && list_empty(&txq->txq_fifo[txq->txq_headidx])) { in ath_tx_txqaddbuf()
2070 list_splice_tail_init(head, &txq->txq_fifo[txq->txq_headidx]); in ath_tx_txqaddbuf()
2071 INCR(txq->txq_headidx, ATH_TXFIFO_DEPTH); in ath_tx_txqaddbuf()
2074 list_splice_tail_init(head, &txq->axq_q); in ath_tx_txqaddbuf()
2076 if (txq->axq_link) { in ath_tx_txqaddbuf()
2077 ath9k_hw_set_desc_link(ah, txq->axq_link, bf->bf_daddr); in ath_tx_txqaddbuf()
2079 txq->axq_qnum, txq->axq_link, in ath_tx_txqaddbuf()
2080 ito64(bf->bf_daddr), bf->bf_desc); in ath_tx_txqaddbuf()
2084 txq->axq_link = bf_last->bf_desc; in ath_tx_txqaddbuf()
2088 TX_STAT_INC(sc, txq->axq_qnum, puttxbuf); in ath_tx_txqaddbuf()
2089 ath9k_hw_puttxbuf(ah, txq->axq_qnum, bf->bf_daddr); in ath_tx_txqaddbuf()
2091 txq->axq_qnum, ito64(bf->bf_daddr), bf->bf_desc); in ath_tx_txqaddbuf()
2094 if (!edma || sc->tx99_state) { in ath_tx_txqaddbuf()
2095 TX_STAT_INC(sc, txq->axq_qnum, txstart); in ath_tx_txqaddbuf()
2096 ath9k_hw_txstart(ah, txq->axq_qnum); in ath_tx_txqaddbuf()
2101 txq->axq_depth++; in ath_tx_txqaddbuf()
2103 txq->axq_ampdu_depth++; in ath_tx_txqaddbuf()
2105 bf_last = bf->bf_lastbf; in ath_tx_txqaddbuf()
2106 bf = bf_last->bf_next; in ath_tx_txqaddbuf()
2107 bf_last->bf_next = NULL; in ath_tx_txqaddbuf()
2118 struct ath_buf *bf = fi->bf; in ath_tx_send_normal()
2121 list_add_tail(&bf->list, &bf_head); in ath_tx_send_normal()
2122 bf->bf_state.bf_type = 0; in ath_tx_send_normal()
2123 if (tid && (tx_info->flags & IEEE80211_TX_CTL_AMPDU)) { in ath_tx_send_normal()
2124 bf->bf_state.bf_type = BUF_AMPDU; in ath_tx_send_normal()
2128 bf->bf_next = NULL; in ath_tx_send_normal()
2129 bf->bf_lastbf = bf; in ath_tx_send_normal()
2130 ath_tx_fill_desc(sc, bf, txq, fi->framelen); in ath_tx_send_normal()
2132 TX_STAT_INC(sc, txq->axq_qnum, queued); in ath_tx_send_normal()
2141 struct ieee80211_key_conf *hw_key = tx_info->control.hw_key; in setup_frame_info()
2142 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in setup_frame_info()
2148 u8 txpower; in setup_frame_info() local
2155 if (tx_info->control.vif && in setup_frame_info()
2156 tx_info->control.vif->bss_conf.use_short_preamble) in setup_frame_info()
2163 an = (struct ath_node *) sta->drv_priv; in setup_frame_info()
2165 if (tx_info->control.vif) { in setup_frame_info()
2166 struct ieee80211_vif *vif = tx_info->control.vif; in setup_frame_info()
2167 if (vif->bss_conf.txpower == INT_MIN) in setup_frame_info()
2169 txpower = 2 * vif->bss_conf.txpower; in setup_frame_info()
2173 sc = hw->priv; in setup_frame_info()
2175 txpower = sc->cur_chan->cur_txpower; in setup_frame_info()
2179 fi->txq = -1; in setup_frame_info()
2181 fi->keyix = hw_key->hw_key_idx; in setup_frame_info()
2182 else if (an && ieee80211_is_data(hdr->frame_control) && an->ps_key > 0) in setup_frame_info()
2183 fi->keyix = an->ps_key; in setup_frame_info()
2185 fi->keyix = ATH9K_TXKEYIX_INVALID; in setup_frame_info()
2186 fi->dyn_smps = sta && sta->deflink.smps_mode == IEEE80211_SMPS_DYNAMIC; in setup_frame_info()
2187 fi->keytype = keytype; in setup_frame_info()
2188 fi->framelen = framelen; in setup_frame_info()
2189 fi->tx_power = txpower; in setup_frame_info()
2193 fi->rtscts_rate = rate->hw_value; in setup_frame_info()
2195 fi->rtscts_rate |= rate->hw_value_short; in setup_frame_info()
2200 struct ath_hw *ah = sc->sc_ah; in ath_txchainmask_reduction()
2201 struct ath9k_channel *curchan = ah->curchan; in ath_txchainmask_reduction()
2203 if ((ah->caps.hw_caps & ATH9K_HW_CAP_APM) && IS_CHAN_5GHZ(curchan) && in ath_txchainmask_reduction()
2222 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_setup_buffer()
2224 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in ath_tx_setup_buffer()
2237 if (tid && ieee80211_is_data_present(hdr->frame_control)) { in ath_tx_setup_buffer()
2238 fragno = le16_to_cpu(hdr->seq_ctrl) & IEEE80211_SCTL_FRAG; in ath_tx_setup_buffer()
2239 seqno = tid->seq_next; in ath_tx_setup_buffer()
2240 hdr->seq_ctrl = cpu_to_le16(tid->seq_next << IEEE80211_SEQ_SEQ_SHIFT); in ath_tx_setup_buffer()
2243 hdr->seq_ctrl |= cpu_to_le16(fragno); in ath_tx_setup_buffer()
2245 if (!ieee80211_has_morefrags(hdr->frame_control)) in ath_tx_setup_buffer()
2246 INCR(tid->seq_next, IEEE80211_SEQ_MAX); in ath_tx_setup_buffer()
2248 bf->bf_state.seqno = seqno; in ath_tx_setup_buffer()
2251 bf->bf_mpdu = skb; in ath_tx_setup_buffer()
2253 bf->bf_buf_addr = dma_map_single(sc->dev, skb->data, in ath_tx_setup_buffer()
2254 skb->len, DMA_TO_DEVICE); in ath_tx_setup_buffer()
2255 if (unlikely(dma_mapping_error(sc->dev, bf->bf_buf_addr))) { in ath_tx_setup_buffer()
2256 bf->bf_mpdu = NULL; in ath_tx_setup_buffer()
2257 bf->bf_buf_addr = 0; in ath_tx_setup_buffer()
2258 ath_err(ath9k_hw_common(sc->sc_ah), in ath_tx_setup_buffer()
2264 fi->bf = bf; in ath_tx_setup_buffer()
2271 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data; in ath_assign_seq()
2273 struct ieee80211_vif *vif = info->control.vif; in ath_assign_seq()
2276 if (!(info->flags & IEEE80211_TX_CTL_ASSIGN_SEQ)) in ath_assign_seq()
2282 avp = (struct ath_vif *)vif->drv_priv; in ath_assign_seq()
2284 if (info->flags & IEEE80211_TX_CTL_FIRST_FRAGMENT) in ath_assign_seq()
2285 avp->seq_no += 0x10; in ath_assign_seq()
2287 hdr->seq_ctrl &= cpu_to_le16(IEEE80211_SCTL_FRAG); in ath_assign_seq()
2288 hdr->seq_ctrl |= cpu_to_le16(avp->seq_no); in ath_assign_seq()
2294 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data; in ath_tx_prepare()
2296 struct ieee80211_sta *sta = txctl->sta; in ath_tx_prepare()
2297 struct ieee80211_vif *vif = info->control.vif; in ath_tx_prepare()
2299 struct ath_softc *sc = hw->priv; in ath_tx_prepare()
2300 int frmlen = skb->len + FCS_LEN; in ath_tx_prepare()
2305 txctl->an = (struct ath_node *)sta->drv_priv; in ath_tx_prepare()
2306 else if (vif && ieee80211_is_data(hdr->frame_control)) { in ath_tx_prepare()
2307 avp = (void *)vif->drv_priv; in ath_tx_prepare()
2308 txctl->an = &avp->mcast_node; in ath_tx_prepare()
2311 if (info->control.hw_key) in ath_tx_prepare()
2312 frmlen += info->control.hw_key->icv_len; in ath_tx_prepare()
2314 ath_assign_seq(ath9k_hw_common(sc->sc_ah), skb); in ath_tx_prepare()
2316 if ((vif && vif->type != NL80211_IFTYPE_AP && in ath_tx_prepare()
2317 vif->type != NL80211_IFTYPE_AP_VLAN) || in ath_tx_prepare()
2318 !ieee80211_is_data(hdr->frame_control)) in ath_tx_prepare()
2319 info->flags |= IEEE80211_TX_CTL_CLEAR_PS_FILT; in ath_tx_prepare()
2322 padpos = ieee80211_hdrlen(hdr->frame_control); in ath_tx_prepare()
2324 if (padsize && skb->len > padpos) { in ath_tx_prepare()
2326 return -ENOMEM; in ath_tx_prepare()
2329 memmove(skb->data, skb->data + padsize, padpos); in ath_tx_prepare()
2342 struct ieee80211_sta *sta = txctl->sta; in ath_tx_start()
2343 struct ieee80211_vif *vif = info->control.vif; in ath_tx_start()
2345 struct ath_softc *sc = hw->priv; in ath_tx_start()
2346 struct ath_txq *txq = txctl->txq; in ath_tx_start()
2353 ps_resp = !!(info->control.flags & IEEE80211_TX_CTRL_PS_RESPONSE); in ath_tx_start()
2367 txq = sc->tx.uapsdq; in ath_tx_start()
2369 if (txctl->sta) { in ath_tx_start()
2370 an = (struct ath_node *) sta->drv_priv; in ath_tx_start()
2375 if (txq == sc->tx.txq_map[q]) { in ath_tx_start()
2376 fi->txq = q; in ath_tx_start()
2377 ++txq->pending_frames; in ath_tx_start()
2383 if (txctl->paprd) in ath_tx_start()
2386 ieee80211_free_txskb(sc->hw, skb); in ath_tx_start()
2390 bf->bf_state.bfs_paprd = txctl->paprd; in ath_tx_start()
2392 if (txctl->paprd) in ath_tx_start()
2393 bf->bf_state.bfs_paprd_timestamp = jiffies; in ath_tx_start()
2407 struct ath_softc *sc = hw->priv; in ath_tx_cabq()
2409 .txq = sc->beacon.cabq in ath_tx_cabq()
2419 sc->cur_chan->beacon.beacon_interval * 1000 * in ath_tx_cabq()
2420 sc->cur_chan->beacon.dtim_period / ATH_BCBUF; in ath_tx_cabq()
2432 bf->bf_lastbf = bf; in ath_tx_cabq()
2434 ath_buf_set_rate(sc, bf, &info, fi->framelen, false); in ath_tx_cabq()
2437 bf_tail->bf_next = bf; in ath_tx_cabq()
2439 list_add_tail(&bf->list, &bf_q); in ath_tx_cabq()
2462 TX_STAT_INC(sc, txctl.txq->axq_qnum, queued); in ath_tx_cabq()
2475 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_complete()
2476 struct ieee80211_hdr * hdr = (struct ieee80211_hdr *)skb->data; in ath_tx_complete()
2482 if (sc->sc_ah->caldata) in ath_tx_complete()
2483 set_bit(PAPRD_PACKET_SENT, &sc->sc_ah->caldata->cal_flags); in ath_tx_complete()
2486 if (tx_info->flags & IEEE80211_TX_CTL_NO_ACK) in ath_tx_complete()
2487 tx_info->flags |= IEEE80211_TX_STAT_NOACK_TRANSMITTED; in ath_tx_complete()
2489 tx_info->flags |= IEEE80211_TX_STAT_ACK; in ath_tx_complete()
2492 if (tx_info->flags & IEEE80211_TX_CTL_REQ_TX_STATUS) { in ath_tx_complete()
2493 padpos = ieee80211_hdrlen(hdr->frame_control); in ath_tx_complete()
2495 if (padsize && skb->len>padpos+padsize) { in ath_tx_complete()
2500 memmove(skb->data + padsize, skb->data, padpos); in ath_tx_complete()
2505 spin_lock_irqsave(&sc->sc_pm_lock, flags); in ath_tx_complete()
2506 if ((sc->ps_flags & PS_WAIT_FOR_TX_ACK) && !txq->axq_depth) { in ath_tx_complete()
2507 sc->ps_flags &= ~PS_WAIT_FOR_TX_ACK; in ath_tx_complete()
2510 sc->ps_flags & (PS_WAIT_FOR_BEACON | in ath_tx_complete()
2515 spin_unlock_irqrestore(&sc->sc_pm_lock, flags); in ath_tx_complete()
2518 tx_info->status.status_driver_data[0] = sta; in ath_tx_complete()
2519 __skb_queue_tail(&txq->complete_q, skb); in ath_tx_complete()
2527 struct sk_buff *skb = bf->bf_mpdu; in ath_tx_complete_buf()
2535 if (ts->ts_status & ATH9K_TXERR_FILT) in ath_tx_complete_buf()
2536 tx_info->flags |= IEEE80211_TX_STAT_TX_FILTERED; in ath_tx_complete_buf()
2538 dma_unmap_single(sc->dev, bf->bf_buf_addr, skb->len, DMA_TO_DEVICE); in ath_tx_complete_buf()
2539 bf->bf_buf_addr = 0; in ath_tx_complete_buf()
2540 if (sc->tx99_state) in ath_tx_complete_buf()
2543 if (bf->bf_state.bfs_paprd) { in ath_tx_complete_buf()
2545 bf->bf_state.bfs_paprd_timestamp + in ath_tx_complete_buf()
2549 complete(&sc->paprd_complete); in ath_tx_complete_buf()
2555 /* At this point, skb (bf->bf_mpdu) is consumed...make sure we don't in ath_tx_complete_buf()
2558 bf->bf_mpdu = NULL; in ath_tx_complete_buf()
2563 spin_lock_irqsave(&sc->tx.txbuflock, flags); in ath_tx_complete_buf()
2564 list_splice_tail_init(bf_q, &sc->tx.txbuf); in ath_tx_complete_buf()
2565 spin_unlock_irqrestore(&sc->tx.txbuflock, flags); in ath_tx_complete_buf()
2570 void *ptr = &tx_info->status; in ath_clear_tx_status()
2572 memset(ptr + sizeof(tx_info->status.rates), 0, in ath_clear_tx_status()
2573 sizeof(tx_info->status) - in ath_clear_tx_status()
2574 sizeof(tx_info->status.rates) - in ath_clear_tx_status()
2575 sizeof(tx_info->status.status_driver_data)); in ath_clear_tx_status()
2582 struct sk_buff *skb = bf->bf_mpdu; in ath_tx_rc_status()
2583 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in ath_tx_rc_status()
2585 struct ieee80211_hw *hw = sc->hw; in ath_tx_rc_status()
2586 struct ath_hw *ah = sc->sc_ah; in ath_tx_rc_status()
2592 tx_info->status.ack_signal = ts->ts_rssi; in ath_tx_rc_status()
2594 tx_rateindex = ts->ts_rateindex; in ath_tx_rc_status()
2595 WARN_ON(tx_rateindex >= hw->max_rates); in ath_tx_rc_status()
2597 if (tx_info->flags & IEEE80211_TX_CTL_AMPDU) { in ath_tx_rc_status()
2598 tx_info->flags |= IEEE80211_TX_STAT_AMPDU; in ath_tx_rc_status()
2602 tx_info->status.ampdu_len = nframes; in ath_tx_rc_status()
2603 tx_info->status.ampdu_ack_len = nframes - nbad; in ath_tx_rc_status()
2605 tx_info->status.rates[tx_rateindex].count = ts->ts_longretry + 1; in ath_tx_rc_status()
2607 for (i = tx_rateindex + 1; i < hw->max_rates; i++) { in ath_tx_rc_status()
2608 tx_info->status.rates[i].count = 0; in ath_tx_rc_status()
2609 tx_info->status.rates[i].idx = -1; in ath_tx_rc_status()
2612 if ((ts->ts_status & ATH9K_TXERR_FILT) == 0 && in ath_tx_rc_status()
2613 (tx_info->flags & IEEE80211_TX_CTL_NO_ACK) == 0) { in ath_tx_rc_status()
2619 * hw->max_rate_tries times to affect how rate control updates in ath_tx_rc_status()
2626 if (unlikely(ts->ts_flags & (ATH9K_TX_DATA_UNDERRUN | in ath_tx_rc_status()
2628 ieee80211_is_data(hdr->frame_control) && in ath_tx_rc_status()
2629 ah->tx_trig_level >= sc->sc_ah->config.max_txtrig_level) in ath_tx_rc_status()
2630 tx_info->status.rates[tx_rateindex].count = in ath_tx_rc_status()
2631 hw->max_rate_tries; in ath_tx_rc_status()
2637 struct ath_hw *ah = sc->sc_ah; in ath_tx_processq()
2646 txq->axq_qnum, ath9k_hw_gettxbuf(sc->sc_ah, txq->axq_qnum), in ath_tx_processq()
2647 txq->axq_link); in ath_tx_processq()
2651 if (test_bit(ATH_OP_HW_RESET, &common->op_flags)) in ath_tx_processq()
2654 if (list_empty(&txq->axq_q)) { in ath_tx_processq()
2655 txq->axq_link = NULL; in ath_tx_processq()
2659 bf = list_first_entry(&txq->axq_q, struct ath_buf, list); in ath_tx_processq()
2663 * after sw writes TxE and before hw re-load the last in ath_tx_processq()
2666 * holding descriptor - software does so by marking in ath_tx_processq()
2670 if (bf->bf_state.stale) { in ath_tx_processq()
2672 if (list_is_last(&bf_held->list, &txq->axq_q)) in ath_tx_processq()
2675 bf = list_entry(bf_held->list.next, struct ath_buf, in ath_tx_processq()
2679 lastbf = bf->bf_lastbf; in ath_tx_processq()
2680 ds = lastbf->bf_desc; in ath_tx_processq()
2684 if (status == -EINPROGRESS) in ath_tx_processq()
2687 TX_STAT_INC(sc, txq->axq_qnum, txprocdesc); in ath_tx_processq()
2694 lastbf->bf_state.stale = true; in ath_tx_processq()
2696 if (!list_is_singular(&lastbf->list)) in ath_tx_processq()
2698 &txq->axq_q, lastbf->list.prev); in ath_tx_processq()
2701 list_del(&bf_held->list); in ath_tx_processq()
2712 struct ath_hw *ah = sc->sc_ah; in ath_tx_tasklet()
2713 u32 qcumask = ((1 << ATH9K_NUM_TX_QUEUES) - 1) & ah->intr_txqs; in ath_tx_tasklet()
2719 ath_tx_processq(sc, &sc->tx.txq[i]); in ath_tx_tasklet()
2727 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_edma_tasklet()
2728 struct ath_hw *ah = sc->sc_ah; in ath_tx_edma_tasklet()
2737 if (test_bit(ATH_OP_HW_RESET, &common->op_flags)) in ath_tx_edma_tasklet()
2741 if (status == -EINPROGRESS) in ath_tx_edma_tasklet()
2743 if (status == -EIO) { in ath_tx_edma_tasklet()
2749 if (ts.qid == sc->beacon.beaconq) { in ath_tx_edma_tasklet()
2750 sc->beacon.tx_processed = true; in ath_tx_edma_tasklet()
2751 sc->beacon.tx_last = !(ts.ts_status & ATH9K_TXERR_MASK); in ath_tx_edma_tasklet()
2762 txq = &sc->tx.txq[ts.qid]; in ath_tx_edma_tasklet()
2766 TX_STAT_INC(sc, txq->axq_qnum, txprocdesc); in ath_tx_edma_tasklet()
2768 fifo_list = &txq->txq_fifo[txq->txq_tailidx]; in ath_tx_edma_tasklet()
2775 if (bf->bf_state.stale) { in ath_tx_edma_tasklet()
2776 list_del(&bf->list); in ath_tx_edma_tasklet()
2781 lastbf = bf->bf_lastbf; in ath_tx_edma_tasklet()
2784 if (list_is_last(&lastbf->list, fifo_list)) { in ath_tx_edma_tasklet()
2786 INCR(txq->txq_tailidx, ATH_TXFIFO_DEPTH); in ath_tx_edma_tasklet()
2788 if (!list_empty(&txq->axq_q)) { in ath_tx_edma_tasklet()
2792 txq->axq_link = NULL; in ath_tx_edma_tasklet()
2793 list_splice_tail_init(&txq->axq_q, &bf_q); in ath_tx_edma_tasklet()
2797 lastbf->bf_state.stale = true; in ath_tx_edma_tasklet()
2800 lastbf->list.prev); in ath_tx_edma_tasklet()
2815 struct ath_descdma *dd = &sc->txsdma; in ath_txstatus_setup()
2816 u8 txs_len = sc->sc_ah->caps.txs_len; in ath_txstatus_setup()
2818 dd->dd_desc_len = size * txs_len; in ath_txstatus_setup()
2819 dd->dd_desc = dmam_alloc_coherent(sc->dev, dd->dd_desc_len, in ath_txstatus_setup()
2820 &dd->dd_desc_paddr, GFP_KERNEL); in ath_txstatus_setup()
2821 if (!dd->dd_desc) in ath_txstatus_setup()
2822 return -ENOMEM; in ath_txstatus_setup()
2833 ath9k_hw_setup_statusring(sc->sc_ah, sc->txsdma.dd_desc, in ath_tx_edma_init()
2834 sc->txsdma.dd_desc_paddr, in ath_tx_edma_init()
2842 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_init()
2845 spin_lock_init(&sc->tx.txbuflock); in ath_tx_init()
2847 error = ath_descdma_setup(sc, &sc->tx.txdma, &sc->tx.txbuf, in ath_tx_init()
2855 error = ath_descdma_setup(sc, &sc->beacon.bdma, &sc->beacon.bbuf, in ath_tx_init()
2863 if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) in ath_tx_init()
2876 tid->an = an; in ath_tx_node_init()
2877 tid->tidno = tidno; in ath_tx_node_init()
2878 tid->seq_start = tid->seq_next = 0; in ath_tx_node_init()
2879 tid->baw_size = WME_MAX_BA; in ath_tx_node_init()
2880 tid->baw_head = tid->baw_tail = 0; in ath_tx_node_init()
2881 tid->active = false; in ath_tx_node_init()
2882 tid->clear_ps_filter = true; in ath_tx_node_init()
2883 __skb_queue_head_init(&tid->retry_q); in ath_tx_node_init()
2884 INIT_LIST_HEAD(&tid->list); in ath_tx_node_init()
2886 tid->txq = sc->tx.txq_map[acno]; in ath_tx_node_init()
2888 if (!an->sta) in ath_tx_node_init()
2903 txq = tid->txq; in ath_tx_node_cleanup()
2907 if (!list_empty(&tid->list)) in ath_tx_node_cleanup()
2908 list_del_init(&tid->list); in ath_tx_node_cleanup()
2911 tid->active = false; in ath_tx_node_cleanup()
2915 if (!an->sta) in ath_tx_node_cleanup()
2927 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data; in ath9k_tx99_send()
2929 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath9k_tx99_send()
2933 padpos = ieee80211_hdrlen(hdr->frame_control); in ath9k_tx99_send()
2936 if (padsize && skb->len > padpos) { in ath9k_tx99_send()
2940 return -EINVAL; in ath9k_tx99_send()
2944 memmove(skb->data, skb->data + padsize, padpos); in ath9k_tx99_send()
2947 fi->keyix = ATH9K_TXKEYIX_INVALID; in ath9k_tx99_send()
2948 fi->framelen = skb->len + FCS_LEN; in ath9k_tx99_send()
2949 fi->keytype = ATH9K_KEY_TYPE_CLEAR; in ath9k_tx99_send()
2951 bf = ath_tx_setup_buffer(sc, txctl->txq, NULL, skb); in ath9k_tx99_send()
2954 return -EINVAL; in ath9k_tx99_send()
2957 ath_set_rates(sc->tx99_vif, NULL, bf); in ath9k_tx99_send()
2959 ath9k_hw_set_desc_link(sc->sc_ah, bf->bf_desc, bf->bf_daddr); in ath9k_tx99_send()
2960 ath9k_hw_tx99_start(sc->sc_ah, txctl->txq->axq_qnum); in ath9k_tx99_send()
2962 ath_tx_send_normal(sc, txctl->txq, NULL, skb); in ath9k_tx99_send()