Lines Matching +full:rates +full:- +full:mcs

1 // SPDX-License-Identifier: BSD-3-Clause-Clear
3 * Copyright (c) 2018-2019 The Linux Foundation. All rights reserved.
19 struct ath11k_base *ab = arvif->ar->ab; in ath11k_dp_tx_get_encap_type()
21 if (test_bit(ATH11K_FLAG_RAW_MODE, &ab->dev_flags)) in ath11k_dp_tx_get_encap_type()
24 if (tx_info->flags & IEEE80211_TX_CTL_HW_80211_ENCAP) in ath11k_dp_tx_get_encap_type()
32 struct ieee80211_hdr *hdr = (void *)skb->data; in ath11k_dp_tx_encap_nwifi()
35 if (!ieee80211_is_data_qos(hdr->frame_control)) in ath11k_dp_tx_encap_nwifi()
39 memmove(skb->data + IEEE80211_QOS_CTL_LEN, in ath11k_dp_tx_encap_nwifi()
41 skb->data, (void *)qos_ctl - (void *)skb->data); in ath11k_dp_tx_encap_nwifi()
43 skb->data, qos_ctl - (u8 *)skb->data); in ath11k_dp_tx_encap_nwifi()
47 hdr = (void *)skb->data; in ath11k_dp_tx_encap_nwifi()
48 hdr->frame_control &= ~__cpu_to_le16(IEEE80211_STYPE_QOS_DATA); in ath11k_dp_tx_encap_nwifi()
53 struct ieee80211_hdr *hdr = (void *)skb->data; in ath11k_dp_tx_get_tid()
56 if (cb->flags & ATH11K_SKB_HW_80211_ENCAP) in ath11k_dp_tx_get_tid()
57 return skb->priority & IEEE80211_QOS_CTL_TID_MASK; in ath11k_dp_tx_get_tid()
58 else if (!ieee80211_is_data_qos(hdr->frame_control)) in ath11k_dp_tx_get_tid()
61 return skb->priority & IEEE80211_QOS_CTL_TID_MASK; in ath11k_dp_tx_get_tid()
89 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx()
90 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx()
95 struct ieee80211_hdr *hdr = (void *)skb->data; in ath11k_dp_tx()
109 if (unlikely(test_bit(ATH11K_FLAG_CRASH_FLUSH, &ar->ab->dev_flags))) in ath11k_dp_tx()
110 return -ESHUTDOWN; in ath11k_dp_tx()
112 if (unlikely(!(info->flags & IEEE80211_TX_CTL_HW_80211_ENCAP) && in ath11k_dp_tx()
113 !ieee80211_is_data(hdr->frame_control))) in ath11k_dp_tx()
114 return -ENOTSUPP; in ath11k_dp_tx()
116 pool_id = skb_get_queue_mapping(skb) & (ATH11K_HW_MAX_QUEUES - 1); in ath11k_dp_tx()
118 ring_selector = ab->hw_params.hw_ops->get_ring_selector(skb); in ath11k_dp_tx()
123 ti.ring_id = ring_selector % ab->hw_params.max_tx_ring; in ath11k_dp_tx()
124 ti.rbm_id = ab->hw_params.hal_params->tcl2wbm_rbm_map[ti.ring_id].rbm_id; in ath11k_dp_tx()
128 tx_ring = &dp->tx_ring[ti.ring_id]; in ath11k_dp_tx()
130 spin_lock_bh(&tx_ring->tx_idr_lock); in ath11k_dp_tx()
131 ret = idr_alloc(&tx_ring->txbuf_idr, skb, 0, in ath11k_dp_tx()
132 DP_TX_IDR_SIZE - 1, GFP_ATOMIC); in ath11k_dp_tx()
133 spin_unlock_bh(&tx_ring->tx_idr_lock); in ath11k_dp_tx()
136 if (ring_map == (BIT(ab->hw_params.max_tx_ring) - 1) || in ath11k_dp_tx()
137 !ab->hw_params.tcl_ring_retry) { in ath11k_dp_tx()
138 atomic_inc(&ab->soc_stats.tx_err.misc_fail); in ath11k_dp_tx()
139 return -ENOSPC; in ath11k_dp_tx()
147 ti.desc_id = FIELD_PREP(DP_TX_DESC_ID_MAC_ID, ar->pdev_idx) | in ath11k_dp_tx()
152 if (ieee80211_has_a4(hdr->frame_control) && in ath11k_dp_tx()
153 is_multicast_ether_addr(hdr->addr3) && arsta && in ath11k_dp_tx()
154 arsta->use_4addr_set) { in ath11k_dp_tx()
155 ti.meta_data_flags = arsta->tcl_metadata; in ath11k_dp_tx()
158 ti.meta_data_flags = arvif->tcl_metadata; in ath11k_dp_tx()
162 if (skb_cb->flags & ATH11K_SKB_CIPHER_SET) { in ath11k_dp_tx()
164 ath11k_dp_tx_get_encrypt_type(skb_cb->cipher); in ath11k_dp_tx()
166 if (ieee80211_has_protected(hdr->frame_control)) in ath11k_dp_tx()
173 ti.addr_search_flags = arvif->hal_addr_search_flags; in ath11k_dp_tx()
174 ti.search_type = arvif->search_type; in ath11k_dp_tx()
177 ti.lmac_id = ar->lmac_id; in ath11k_dp_tx()
178 ti.bss_ast_hash = arvif->ast_hash; in ath11k_dp_tx()
179 ti.bss_ast_idx = arvif->ast_idx; in ath11k_dp_tx()
182 if (likely(skb->ip_summed == CHECKSUM_PARTIAL && in ath11k_dp_tx()
191 if (ieee80211_vif_is_mesh(arvif->vif)) in ath11k_dp_tx()
203 if (!test_bit(ATH11K_FLAG_RAW_MODE, &ab->dev_flags)) { in ath11k_dp_tx()
204 ret = -EINVAL; in ath11k_dp_tx()
214 ret = -EINVAL; in ath11k_dp_tx()
215 atomic_inc(&ab->soc_stats.tx_err.misc_fail); in ath11k_dp_tx()
219 ti.paddr = dma_map_single(ab->dev, skb->data, skb->len, DMA_TO_DEVICE); in ath11k_dp_tx()
220 if (unlikely(dma_mapping_error(ab->dev, ti.paddr))) { in ath11k_dp_tx()
221 atomic_inc(&ab->soc_stats.tx_err.misc_fail); in ath11k_dp_tx()
223 ret = -ENOMEM; in ath11k_dp_tx()
227 ti.data_len = skb->len; in ath11k_dp_tx()
228 skb_cb->paddr = ti.paddr; in ath11k_dp_tx()
229 skb_cb->vif = arvif->vif; in ath11k_dp_tx()
230 skb_cb->ar = ar; in ath11k_dp_tx()
232 hal_ring_id = tx_ring->tcl_data_ring.ring_id; in ath11k_dp_tx()
233 tcl_ring = &ab->hal.srng_list[hal_ring_id]; in ath11k_dp_tx()
235 spin_lock_bh(&tcl_ring->lock); in ath11k_dp_tx()
245 ab->soc_stats.tx_err.desc_na[ti.ring_id]++; in ath11k_dp_tx()
246 spin_unlock_bh(&tcl_ring->lock); in ath11k_dp_tx()
247 ret = -ENOMEM; in ath11k_dp_tx()
254 if (unlikely(ring_map != (BIT(ab->hw_params.max_tx_ring)) - 1) && in ath11k_dp_tx()
255 ab->hw_params.tcl_ring_retry && ab->hw_params.max_tx_ring > 1) { in ath11k_dp_tx()
268 ath11k_dp_shadow_start_timer(ab, tcl_ring, &dp->tx_ring_timer[ti.ring_id]); in ath11k_dp_tx()
270 spin_unlock_bh(&tcl_ring->lock); in ath11k_dp_tx()
273 skb->data, skb->len); in ath11k_dp_tx()
275 atomic_inc(&ar->dp.num_tx_pending); in ath11k_dp_tx()
280 dma_unmap_single(ab->dev, ti.paddr, ti.data_len, DMA_TO_DEVICE); in ath11k_dp_tx()
283 spin_lock_bh(&tx_ring->tx_idr_lock); in ath11k_dp_tx()
284 idr_remove(&tx_ring->txbuf_idr, in ath11k_dp_tx()
286 spin_unlock_bh(&tx_ring->tx_idr_lock); in ath11k_dp_tx()
302 spin_lock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_free_txbuf()
303 msdu = idr_remove(&tx_ring->txbuf_idr, msdu_id); in ath11k_dp_tx_free_txbuf()
304 spin_unlock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_free_txbuf()
314 dma_unmap_single(ab->dev, skb_cb->paddr, msdu->len, DMA_TO_DEVICE); in ath11k_dp_tx_free_txbuf()
317 ar = ab->pdevs[mac_id].ar; in ath11k_dp_tx_free_txbuf()
318 if (atomic_dec_and_test(&ar->dp.num_tx_pending)) in ath11k_dp_tx_free_txbuf()
319 wake_up(&ar->dp.tx_empty_waitq); in ath11k_dp_tx_free_txbuf()
334 spin_lock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_htt_tx_complete_buf()
335 msdu = idr_remove(&tx_ring->txbuf_idr, ts->msdu_id); in ath11k_dp_tx_htt_tx_complete_buf()
336 spin_unlock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_htt_tx_complete_buf()
340 ts->msdu_id); in ath11k_dp_tx_htt_tx_complete_buf()
347 ar = skb_cb->ar; in ath11k_dp_tx_htt_tx_complete_buf()
349 if (atomic_dec_and_test(&ar->dp.num_tx_pending)) in ath11k_dp_tx_htt_tx_complete_buf()
350 wake_up(&ar->dp.tx_empty_waitq); in ath11k_dp_tx_htt_tx_complete_buf()
352 dma_unmap_single(ab->dev, skb_cb->paddr, msdu->len, DMA_TO_DEVICE); in ath11k_dp_tx_htt_tx_complete_buf()
354 if (!skb_cb->vif) { in ath11k_dp_tx_htt_tx_complete_buf()
359 memset(&info->status, 0, sizeof(info->status)); in ath11k_dp_tx_htt_tx_complete_buf()
361 if (ts->acked) { in ath11k_dp_tx_htt_tx_complete_buf()
362 if (!(info->flags & IEEE80211_TX_CTL_NO_ACK)) { in ath11k_dp_tx_htt_tx_complete_buf()
363 info->flags |= IEEE80211_TX_STAT_ACK; in ath11k_dp_tx_htt_tx_complete_buf()
364 info->status.ack_signal = ATH11K_DEFAULT_NOISE_FLOOR + in ath11k_dp_tx_htt_tx_complete_buf()
365 ts->ack_rssi; in ath11k_dp_tx_htt_tx_complete_buf()
366 info->status.flags |= in ath11k_dp_tx_htt_tx_complete_buf()
369 info->flags |= IEEE80211_TX_STAT_NOACK_TRANSMITTED; in ath11k_dp_tx_htt_tx_complete_buf()
373 spin_lock_bh(&ab->base_lock); in ath11k_dp_tx_htt_tx_complete_buf()
374 peer = ath11k_peer_find_by_id(ab, ts->peer_id); in ath11k_dp_tx_htt_tx_complete_buf()
375 if (!peer || !peer->sta) { in ath11k_dp_tx_htt_tx_complete_buf()
378 ts->peer_id); in ath11k_dp_tx_htt_tx_complete_buf()
379 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_htt_tx_complete_buf()
383 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_htt_tx_complete_buf()
385 status.sta = peer->sta; in ath11k_dp_tx_htt_tx_complete_buf()
389 ieee80211_tx_status_ext(ar->hw, &status); in ath11k_dp_tx_htt_tx_complete_buf()
412 status_desc->info0); in ath11k_dp_tx_process_htt_tx_complete()
420 status_desc->info1); in ath11k_dp_tx_process_htt_tx_complete()
422 if (FIELD_GET(HTT_TX_WBM_COMP_INFO2_VALID, status_desc->info2)) in ath11k_dp_tx_process_htt_tx_complete()
424 status_desc->info2); in ath11k_dp_tx_process_htt_tx_complete()
450 struct ath11k_per_peer_tx_stats *peer_stats = &ar->cached_stats; in ath11k_dp_tx_cache_peer_stats()
452 if (ts->try_cnt > 1) { in ath11k_dp_tx_cache_peer_stats()
453 peer_stats->retry_pkts += ts->try_cnt - 1; in ath11k_dp_tx_cache_peer_stats()
454 peer_stats->retry_bytes += (ts->try_cnt - 1) * msdu->len; in ath11k_dp_tx_cache_peer_stats()
456 if (ts->status != HAL_WBM_TQM_REL_REASON_FRAME_ACKED) { in ath11k_dp_tx_cache_peer_stats()
457 peer_stats->failed_pkts += 1; in ath11k_dp_tx_cache_peer_stats()
458 peer_stats->failed_bytes += msdu->len; in ath11k_dp_tx_cache_peer_stats()
465 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_update_txcompl()
466 struct ath11k_per_peer_tx_stats *peer_stats = &ar->cached_stats; in ath11k_dp_tx_update_txcompl()
474 u8 mcs, rate_idx = 0, ofdma; in ath11k_dp_tx_update_txcompl() local
477 spin_lock_bh(&ab->base_lock); in ath11k_dp_tx_update_txcompl()
478 peer = ath11k_peer_find_by_id(ab, ts->peer_id); in ath11k_dp_tx_update_txcompl()
479 if (!peer || !peer->sta) { in ath11k_dp_tx_update_txcompl()
481 "failed to find the peer by id %u\n", ts->peer_id); in ath11k_dp_tx_update_txcompl()
485 sta = peer->sta; in ath11k_dp_tx_update_txcompl()
486 arsta = (struct ath11k_sta *)sta->drv_priv; in ath11k_dp_tx_update_txcompl()
488 memset(&arsta->txrate, 0, sizeof(arsta->txrate)); in ath11k_dp_tx_update_txcompl()
490 ts->rate_stats); in ath11k_dp_tx_update_txcompl()
491 mcs = FIELD_GET(HAL_TX_RATE_STATS_INFO0_MCS, in ath11k_dp_tx_update_txcompl()
492 ts->rate_stats); in ath11k_dp_tx_update_txcompl()
494 ts->rate_stats); in ath11k_dp_tx_update_txcompl()
495 bw = FIELD_GET(HAL_TX_RATE_STATS_INFO0_BW, ts->rate_stats); in ath11k_dp_tx_update_txcompl()
496 ru_tones = FIELD_GET(HAL_TX_RATE_STATS_INFO0_TONES_IN_RU, ts->rate_stats); in ath11k_dp_tx_update_txcompl()
497 ofdma = FIELD_GET(HAL_TX_RATE_STATS_INFO0_OFDMA_TX, ts->rate_stats); in ath11k_dp_tx_update_txcompl()
499 /* This is to prefer choose the real NSS value arsta->last_txrate.nss, in ath11k_dp_tx_update_txcompl()
502 if (arsta->last_txrate.nss) in ath11k_dp_tx_update_txcompl()
503 arsta->txrate.nss = arsta->last_txrate.nss; in ath11k_dp_tx_update_txcompl()
505 arsta->txrate.nss = arsta->peer_nss; in ath11k_dp_tx_update_txcompl()
509 ret = ath11k_mac_hw_ratecode_to_legacy_rate(mcs, in ath11k_dp_tx_update_txcompl()
515 arsta->txrate.legacy = rate; in ath11k_dp_tx_update_txcompl()
517 if (mcs > 7) { in ath11k_dp_tx_update_txcompl()
518 ath11k_warn(ab, "Invalid HT mcs index %d\n", mcs); in ath11k_dp_tx_update_txcompl()
522 if (arsta->txrate.nss != 0) in ath11k_dp_tx_update_txcompl()
523 arsta->txrate.mcs = mcs + 8 * (arsta->txrate.nss - 1); in ath11k_dp_tx_update_txcompl()
524 arsta->txrate.flags = RATE_INFO_FLAGS_MCS; in ath11k_dp_tx_update_txcompl()
526 arsta->txrate.flags |= RATE_INFO_FLAGS_SHORT_GI; in ath11k_dp_tx_update_txcompl()
528 if (mcs > 9) { in ath11k_dp_tx_update_txcompl()
529 ath11k_warn(ab, "Invalid VHT mcs index %d\n", mcs); in ath11k_dp_tx_update_txcompl()
533 arsta->txrate.mcs = mcs; in ath11k_dp_tx_update_txcompl()
534 arsta->txrate.flags = RATE_INFO_FLAGS_VHT_MCS; in ath11k_dp_tx_update_txcompl()
536 arsta->txrate.flags |= RATE_INFO_FLAGS_SHORT_GI; in ath11k_dp_tx_update_txcompl()
538 if (mcs > 11) { in ath11k_dp_tx_update_txcompl()
539 ath11k_warn(ab, "Invalid HE mcs index %d\n", mcs); in ath11k_dp_tx_update_txcompl()
543 arsta->txrate.mcs = mcs; in ath11k_dp_tx_update_txcompl()
544 arsta->txrate.flags = RATE_INFO_FLAGS_HE_MCS; in ath11k_dp_tx_update_txcompl()
545 arsta->txrate.he_gi = ath11k_mac_he_gi_to_nl80211_he_gi(sgi); in ath11k_dp_tx_update_txcompl()
548 arsta->txrate.bw = ath11k_mac_bw_to_mac80211_bw(bw); in ath11k_dp_tx_update_txcompl()
550 arsta->txrate.bw = RATE_INFO_BW_HE_RU; in ath11k_dp_tx_update_txcompl()
551 arsta->txrate.he_ru_alloc = in ath11k_dp_tx_update_txcompl()
559 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_update_txcompl()
568 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_complete_msdu()
575 if (WARN_ON_ONCE(ts->buf_rel_source != HAL_WBM_REL_SRC_MODULE_TQM)) { in ath11k_dp_tx_complete_msdu()
582 dma_unmap_single(ab->dev, skb_cb->paddr, msdu->len, DMA_TO_DEVICE); in ath11k_dp_tx_complete_msdu()
584 if (unlikely(!rcu_access_pointer(ab->pdevs_active[ar->pdev_idx]))) { in ath11k_dp_tx_complete_msdu()
589 if (unlikely(!skb_cb->vif)) { in ath11k_dp_tx_complete_msdu()
595 memset(&info->status, 0, sizeof(info->status)); in ath11k_dp_tx_complete_msdu()
598 info->status.rates[0].idx = -1; in ath11k_dp_tx_complete_msdu()
600 if (ts->status == HAL_WBM_TQM_REL_REASON_FRAME_ACKED && in ath11k_dp_tx_complete_msdu()
601 !(info->flags & IEEE80211_TX_CTL_NO_ACK)) { in ath11k_dp_tx_complete_msdu()
602 info->flags |= IEEE80211_TX_STAT_ACK; in ath11k_dp_tx_complete_msdu()
603 info->status.ack_signal = ATH11K_DEFAULT_NOISE_FLOOR + in ath11k_dp_tx_complete_msdu()
604 ts->ack_rssi; in ath11k_dp_tx_complete_msdu()
605 info->status.flags |= IEEE80211_TX_STATUS_ACK_SIGNAL_VALID; in ath11k_dp_tx_complete_msdu()
608 if (ts->status == HAL_WBM_TQM_REL_REASON_CMD_REMOVE_TX && in ath11k_dp_tx_complete_msdu()
609 (info->flags & IEEE80211_TX_CTL_NO_ACK)) in ath11k_dp_tx_complete_msdu()
610 info->flags |= IEEE80211_TX_STAT_NOACK_TRANSMITTED; in ath11k_dp_tx_complete_msdu()
613 ab->hw_params.single_pdev_only) { in ath11k_dp_tx_complete_msdu()
614 if (ts->flags & HAL_TX_STATUS_FLAGS_FIRST_MSDU) { in ath11k_dp_tx_complete_msdu()
615 if (ar->last_ppdu_id == 0) { in ath11k_dp_tx_complete_msdu()
616 ar->last_ppdu_id = ts->ppdu_id; in ath11k_dp_tx_complete_msdu()
617 } else if (ar->last_ppdu_id == ts->ppdu_id || in ath11k_dp_tx_complete_msdu()
618 ar->cached_ppdu_id == ar->last_ppdu_id) { in ath11k_dp_tx_complete_msdu()
619 ar->cached_ppdu_id = ar->last_ppdu_id; in ath11k_dp_tx_complete_msdu()
620 ar->cached_stats.is_ampdu = true; in ath11k_dp_tx_complete_msdu()
622 memset(&ar->cached_stats, 0, in ath11k_dp_tx_complete_msdu()
625 ar->cached_stats.is_ampdu = false; in ath11k_dp_tx_complete_msdu()
627 memset(&ar->cached_stats, 0, in ath11k_dp_tx_complete_msdu()
630 ar->last_ppdu_id = ts->ppdu_id; in ath11k_dp_tx_complete_msdu()
636 spin_lock_bh(&ab->base_lock); in ath11k_dp_tx_complete_msdu()
637 peer = ath11k_peer_find_by_id(ab, ts->peer_id); in ath11k_dp_tx_complete_msdu()
638 if (!peer || !peer->sta) { in ath11k_dp_tx_complete_msdu()
641 ts->peer_id); in ath11k_dp_tx_complete_msdu()
642 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_complete_msdu()
646 arsta = (struct ath11k_sta *)peer->sta->drv_priv; in ath11k_dp_tx_complete_msdu()
647 status.sta = peer->sta; in ath11k_dp_tx_complete_msdu()
650 rate = arsta->last_txrate; in ath11k_dp_tx_complete_msdu()
655 status.rates = &status_rate; in ath11k_dp_tx_complete_msdu()
658 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_complete_msdu()
660 ieee80211_tx_status_ext(ar->hw, &status); in ath11k_dp_tx_complete_msdu()
667 ts->buf_rel_source = in ath11k_dp_tx_status_parse()
668 FIELD_GET(HAL_WBM_RELEASE_INFO0_REL_SRC_MODULE, desc->info0); in ath11k_dp_tx_status_parse()
669 if (unlikely(ts->buf_rel_source != HAL_WBM_REL_SRC_MODULE_FW && in ath11k_dp_tx_status_parse()
670 ts->buf_rel_source != HAL_WBM_REL_SRC_MODULE_TQM)) in ath11k_dp_tx_status_parse()
673 if (unlikely(ts->buf_rel_source == HAL_WBM_REL_SRC_MODULE_FW)) in ath11k_dp_tx_status_parse()
676 ts->status = FIELD_GET(HAL_WBM_RELEASE_INFO0_TQM_RELEASE_REASON, in ath11k_dp_tx_status_parse()
677 desc->info0); in ath11k_dp_tx_status_parse()
678 ts->ppdu_id = FIELD_GET(HAL_WBM_RELEASE_INFO1_TQM_STATUS_NUMBER, in ath11k_dp_tx_status_parse()
679 desc->info1); in ath11k_dp_tx_status_parse()
680 ts->try_cnt = FIELD_GET(HAL_WBM_RELEASE_INFO1_TRANSMIT_COUNT, in ath11k_dp_tx_status_parse()
681 desc->info1); in ath11k_dp_tx_status_parse()
682 ts->ack_rssi = FIELD_GET(HAL_WBM_RELEASE_INFO2_ACK_FRAME_RSSI, in ath11k_dp_tx_status_parse()
683 desc->info2); in ath11k_dp_tx_status_parse()
684 if (desc->info2 & HAL_WBM_RELEASE_INFO2_FIRST_MSDU) in ath11k_dp_tx_status_parse()
685 ts->flags |= HAL_TX_STATUS_FLAGS_FIRST_MSDU; in ath11k_dp_tx_status_parse()
686 ts->peer_id = FIELD_GET(HAL_WBM_RELEASE_INFO3_PEER_ID, desc->info3); in ath11k_dp_tx_status_parse()
687 ts->tid = FIELD_GET(HAL_WBM_RELEASE_INFO3_TID, desc->info3); in ath11k_dp_tx_status_parse()
688 if (desc->rate_stats.info0 & HAL_TX_RATE_STATS_INFO0_VALID) in ath11k_dp_tx_status_parse()
689 ts->rate_stats = desc->rate_stats.info0; in ath11k_dp_tx_status_parse()
691 ts->rate_stats = 0; in ath11k_dp_tx_status_parse()
697 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_completion_handler()
698 int hal_ring_id = dp->tx_ring[ring_id].tcl_comp_ring.ring_id; in ath11k_dp_tx_completion_handler()
699 struct hal_srng *status_ring = &ab->hal.srng_list[hal_ring_id]; in ath11k_dp_tx_completion_handler()
702 struct dp_tx_ring *tx_ring = &dp->tx_ring[ring_id]; in ath11k_dp_tx_completion_handler()
707 spin_lock_bh(&status_ring->lock); in ath11k_dp_tx_completion_handler()
711 while ((ATH11K_TX_COMPL_NEXT(tx_ring->tx_status_head) != in ath11k_dp_tx_completion_handler()
712 tx_ring->tx_status_tail) && in ath11k_dp_tx_completion_handler()
714 memcpy(&tx_ring->tx_status[tx_ring->tx_status_head], in ath11k_dp_tx_completion_handler()
716 tx_ring->tx_status_head = in ath11k_dp_tx_completion_handler()
717 ATH11K_TX_COMPL_NEXT(tx_ring->tx_status_head); in ath11k_dp_tx_completion_handler()
721 (ATH11K_TX_COMPL_NEXT(tx_ring->tx_status_head) == in ath11k_dp_tx_completion_handler()
722 tx_ring->tx_status_tail))) { in ath11k_dp_tx_completion_handler()
729 spin_unlock_bh(&status_ring->lock); in ath11k_dp_tx_completion_handler()
731 while (ATH11K_TX_COMPL_NEXT(tx_ring->tx_status_tail) != tx_ring->tx_status_head) { in ath11k_dp_tx_completion_handler()
735 tx_ring->tx_status_tail = in ath11k_dp_tx_completion_handler()
736 ATH11K_TX_COMPL_NEXT(tx_ring->tx_status_tail); in ath11k_dp_tx_completion_handler()
737 tx_status = &tx_ring->tx_status[tx_ring->tx_status_tail]; in ath11k_dp_tx_completion_handler()
741 tx_status->buf_addr_info.info1); in ath11k_dp_tx_completion_handler()
753 spin_lock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_completion_handler()
754 msdu = idr_remove(&tx_ring->txbuf_idr, msdu_id); in ath11k_dp_tx_completion_handler()
758 spin_unlock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_completion_handler()
762 spin_unlock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_completion_handler()
764 ar = ab->pdevs[mac_id].ar; in ath11k_dp_tx_completion_handler()
766 if (atomic_dec_and_test(&ar->dp.num_tx_pending)) in ath11k_dp_tx_completion_handler()
767 wake_up(&ar->dp.tx_empty_waitq); in ath11k_dp_tx_completion_handler()
779 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_send_reo_cmd()
784 if (test_bit(ATH11K_FLAG_CRASH_FLUSH, &ab->dev_flags)) in ath11k_dp_tx_send_reo_cmd()
785 return -ESHUTDOWN; in ath11k_dp_tx_send_reo_cmd()
787 cmd_ring = &ab->hal.srng_list[dp->reo_cmd_ring.ring_id]; in ath11k_dp_tx_send_reo_cmd()
796 return -EINVAL; in ath11k_dp_tx_send_reo_cmd()
808 return -ENOMEM; in ath11k_dp_tx_send_reo_cmd()
810 memcpy(&dp_cmd->data, rx_tid, sizeof(struct dp_rx_tid)); in ath11k_dp_tx_send_reo_cmd()
811 dp_cmd->cmd_num = cmd_num; in ath11k_dp_tx_send_reo_cmd()
812 dp_cmd->handler = cb; in ath11k_dp_tx_send_reo_cmd()
814 spin_lock_bh(&dp->reo_cmd_lock); in ath11k_dp_tx_send_reo_cmd()
815 list_add_tail(&dp_cmd->list, &dp->reo_cmd_list); in ath11k_dp_tx_send_reo_cmd()
816 spin_unlock_bh(&dp->reo_cmd_lock); in ath11k_dp_tx_send_reo_cmd()
838 if (!ab->hw_params.rx_mac_buf_ring) { in ath11k_dp_tx_get_ring_id_type()
843 ret = -EINVAL; in ath11k_dp_tx_get_ring_id_type()
879 ret = -EINVAL; in ath11k_dp_tx_get_ring_id_type()
888 struct hal_srng *srng = &ab->hal.srng_list[ring_id]; in ath11k_dp_tx_htt_srng_setup()
900 return -ENOMEM; in ath11k_dp_tx_htt_srng_setup()
915 cmd = (struct htt_srng_setup_cmd *)skb->data; in ath11k_dp_tx_htt_srng_setup()
916 cmd->info0 = FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO0_MSG_TYPE, in ath11k_dp_tx_htt_srng_setup()
920 cmd->info0 |= FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO0_PDEV_ID, in ath11k_dp_tx_htt_srng_setup()
923 cmd->info0 |= FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO0_PDEV_ID, in ath11k_dp_tx_htt_srng_setup()
925 cmd->info0 |= FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO0_RING_TYPE, in ath11k_dp_tx_htt_srng_setup()
927 cmd->info0 |= FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO0_RING_ID, htt_ring_id); in ath11k_dp_tx_htt_srng_setup()
929 cmd->ring_base_addr_lo = params.ring_base_paddr & in ath11k_dp_tx_htt_srng_setup()
932 cmd->ring_base_addr_hi = (u64)params.ring_base_paddr >> in ath11k_dp_tx_htt_srng_setup()
942 cmd->info1 = FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO1_RING_ENTRY_SIZE, in ath11k_dp_tx_htt_srng_setup()
944 cmd->info1 |= FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO1_RING_SIZE, in ath11k_dp_tx_htt_srng_setup()
946 cmd->info1 |= FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO1_RING_FLAGS_MSI_SWAP, in ath11k_dp_tx_htt_srng_setup()
948 cmd->info1 |= FIELD_PREP( in ath11k_dp_tx_htt_srng_setup()
951 cmd->info1 |= FIELD_PREP( in ath11k_dp_tx_htt_srng_setup()
955 cmd->info1 |= HTT_SRNG_SETUP_CMD_INFO1_RING_LOOP_CNT_DIS; in ath11k_dp_tx_htt_srng_setup()
957 cmd->ring_head_off32_remote_addr_lo = hp_addr & HAL_ADDR_LSB_REG_MASK; in ath11k_dp_tx_htt_srng_setup()
958 cmd->ring_head_off32_remote_addr_hi = (u64)hp_addr >> in ath11k_dp_tx_htt_srng_setup()
961 cmd->ring_tail_off32_remote_addr_lo = tp_addr & HAL_ADDR_LSB_REG_MASK; in ath11k_dp_tx_htt_srng_setup()
962 cmd->ring_tail_off32_remote_addr_hi = (u64)tp_addr >> in ath11k_dp_tx_htt_srng_setup()
965 cmd->ring_msi_addr_lo = lower_32_bits(params.msi_addr); in ath11k_dp_tx_htt_srng_setup()
966 cmd->ring_msi_addr_hi = upper_32_bits(params.msi_addr); in ath11k_dp_tx_htt_srng_setup()
967 cmd->msi_data = params.msi_data; in ath11k_dp_tx_htt_srng_setup()
969 cmd->intr_info = FIELD_PREP( in ath11k_dp_tx_htt_srng_setup()
972 cmd->intr_info |= FIELD_PREP( in ath11k_dp_tx_htt_srng_setup()
976 cmd->info2 = 0; in ath11k_dp_tx_htt_srng_setup()
978 cmd->info2 = FIELD_PREP( in ath11k_dp_tx_htt_srng_setup()
985 cmd->ring_msi_addr_lo, cmd->ring_msi_addr_hi, in ath11k_dp_tx_htt_srng_setup()
986 cmd->msi_data, ring_id, ring_type, cmd->intr_info, cmd->info2); in ath11k_dp_tx_htt_srng_setup()
988 ret = ath11k_htc_send(&ab->htc, ab->dp.eid, skb); in ath11k_dp_tx_htt_srng_setup()
1004 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_htt_h2t_ver_req_msg()
1010 init_completion(&dp->htt_tgt_version_received); in ath11k_dp_tx_htt_h2t_ver_req_msg()
1014 return -ENOMEM; in ath11k_dp_tx_htt_h2t_ver_req_msg()
1017 cmd = (struct htt_ver_req_cmd *)skb->data; in ath11k_dp_tx_htt_h2t_ver_req_msg()
1018 cmd->ver_reg_info = FIELD_PREP(HTT_VER_REQ_INFO_MSG_ID, in ath11k_dp_tx_htt_h2t_ver_req_msg()
1021 ret = ath11k_htc_send(&ab->htc, dp->eid, skb); in ath11k_dp_tx_htt_h2t_ver_req_msg()
1027 ret = wait_for_completion_timeout(&dp->htt_tgt_version_received, in ath11k_dp_tx_htt_h2t_ver_req_msg()
1031 return -ETIMEDOUT; in ath11k_dp_tx_htt_h2t_ver_req_msg()
1034 if (dp->htt_tgt_ver_major != HTT_TARGET_VERSION_MAJOR) { in ath11k_dp_tx_htt_h2t_ver_req_msg()
1036 dp->htt_tgt_ver_major, HTT_TARGET_VERSION_MAJOR); in ath11k_dp_tx_htt_h2t_ver_req_msg()
1037 return -ENOTSUPP; in ath11k_dp_tx_htt_h2t_ver_req_msg()
1045 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1046 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1054 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1057 return -ENOMEM; in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1060 cmd = (struct htt_ppdu_stats_cfg_cmd *)skb->data; in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1061 cmd->msg = FIELD_PREP(HTT_PPDU_STATS_CFG_MSG_TYPE, in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1064 pdev_mask = 1 << (ar->pdev_idx + i); in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1065 cmd->msg |= FIELD_PREP(HTT_PPDU_STATS_CFG_PDEV_ID, pdev_mask); in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1066 cmd->msg |= FIELD_PREP(HTT_PPDU_STATS_CFG_TLV_TYPE_BITMASK, mask); in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1068 ret = ath11k_htc_send(&ab->htc, dp->eid, skb); in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1084 struct hal_srng *srng = &ab->hal.srng_list[ring_id]; in ath11k_dp_tx_htt_rx_filter_setup()
1094 return -ENOMEM; in ath11k_dp_tx_htt_rx_filter_setup()
1106 cmd = (struct htt_rx_ring_selection_cfg_cmd *)skb->data; in ath11k_dp_tx_htt_rx_filter_setup()
1107 cmd->info0 = FIELD_PREP(HTT_RX_RING_SELECTION_CFG_CMD_INFO0_MSG_TYPE, in ath11k_dp_tx_htt_rx_filter_setup()
1111 cmd->info0 |= in ath11k_dp_tx_htt_rx_filter_setup()
1115 cmd->info0 |= in ath11k_dp_tx_htt_rx_filter_setup()
1118 cmd->info0 |= FIELD_PREP(HTT_RX_RING_SELECTION_CFG_CMD_INFO0_RING_ID, in ath11k_dp_tx_htt_rx_filter_setup()
1120 cmd->info0 |= FIELD_PREP(HTT_RX_RING_SELECTION_CFG_CMD_INFO0_SS, in ath11k_dp_tx_htt_rx_filter_setup()
1122 cmd->info0 |= FIELD_PREP(HTT_RX_RING_SELECTION_CFG_CMD_INFO0_PS, in ath11k_dp_tx_htt_rx_filter_setup()
1125 cmd->info1 = FIELD_PREP(HTT_RX_RING_SELECTION_CFG_CMD_INFO1_BUF_SIZE, in ath11k_dp_tx_htt_rx_filter_setup()
1127 cmd->pkt_type_en_flags0 = tlv_filter->pkt_filter_flags0; in ath11k_dp_tx_htt_rx_filter_setup()
1128 cmd->pkt_type_en_flags1 = tlv_filter->pkt_filter_flags1; in ath11k_dp_tx_htt_rx_filter_setup()
1129 cmd->pkt_type_en_flags2 = tlv_filter->pkt_filter_flags2; in ath11k_dp_tx_htt_rx_filter_setup()
1130 cmd->pkt_type_en_flags3 = tlv_filter->pkt_filter_flags3; in ath11k_dp_tx_htt_rx_filter_setup()
1131 cmd->rx_filter_tlv = tlv_filter->rx_filter; in ath11k_dp_tx_htt_rx_filter_setup()
1133 ret = ath11k_htc_send(&ab->htc, ab->dp.eid, skb); in ath11k_dp_tx_htt_rx_filter_setup()
1150 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1151 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1160 return -ENOMEM; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1164 cmd = (struct htt_ext_stats_cfg_cmd *)skb->data; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1166 cmd->hdr.msg_type = HTT_H2T_MSG_TYPE_EXT_STATS_CFG; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1168 if (ab->hw_params.single_pdev_only) in ath11k_dp_tx_htt_h2t_ext_stats_req()
1171 pdev_id = ar->pdev->pdev_id; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1173 cmd->hdr.pdev_mask = 1 << pdev_id; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1175 cmd->hdr.stats_type = type; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1176 cmd->cfg_param0 = cfg_params->cfg0; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1177 cmd->cfg_param1 = cfg_params->cfg1; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1178 cmd->cfg_param2 = cfg_params->cfg2; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1179 cmd->cfg_param3 = cfg_params->cfg3; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1180 cmd->cookie_lsb = lower_32_bits(cookie); in ath11k_dp_tx_htt_h2t_ext_stats_req()
1181 cmd->cookie_msb = upper_32_bits(cookie); in ath11k_dp_tx_htt_h2t_ext_stats_req()
1183 ret = ath11k_htc_send(&ab->htc, dp->eid, skb); in ath11k_dp_tx_htt_h2t_ext_stats_req()
1196 struct ath11k_pdev_dp *dp = &ar->dp; in ath11k_dp_tx_htt_monitor_mode_ring_config()
1197 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_htt_monitor_mode_ring_config()
1201 if (ab->hw_params.full_monitor_mode) { in ath11k_dp_tx_htt_monitor_mode_ring_config()
1203 dp->mac_id, !reset); in ath11k_dp_tx_htt_monitor_mode_ring_config()
1210 ring_id = dp->rxdma_mon_buf_ring.refill_buf_ring.ring_id; in ath11k_dp_tx_htt_monitor_mode_ring_config()
1230 if (ab->hw_params.rxdma1_enable) { in ath11k_dp_tx_htt_monitor_mode_ring_config()
1231 ret = ath11k_dp_tx_htt_rx_filter_setup(ar->ab, ring_id, dp->mac_id, in ath11k_dp_tx_htt_monitor_mode_ring_config()
1237 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_tx_htt_monitor_mode_ring_config()
1238 ring_id = dp->rx_mac_buf_ring[i].ring_id; in ath11k_dp_tx_htt_monitor_mode_ring_config()
1239 ret = ath11k_dp_tx_htt_rx_filter_setup(ar->ab, ring_id, in ath11k_dp_tx_htt_monitor_mode_ring_config()
1240 dp->mac_id + i, in ath11k_dp_tx_htt_monitor_mode_ring_config()
1250 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_tx_htt_monitor_mode_ring_config()
1251 ring_id = dp->rx_mon_status_refill_ring[i].refill_buf_ring.ring_id; in ath11k_dp_tx_htt_monitor_mode_ring_config()
1263 dp->mac_id + i, in ath11k_dp_tx_htt_monitor_mode_ring_config()
1269 if (!ar->ab->hw_params.rxdma1_enable) in ath11k_dp_tx_htt_monitor_mode_ring_config()
1270 mod_timer(&ar->ab->mon_reap_timer, jiffies + in ath11k_dp_tx_htt_monitor_mode_ring_config()
1285 return -ENOMEM; in ath11k_dp_tx_htt_rx_full_mon_setup()
1288 cmd = (struct htt_rx_full_monitor_mode_cfg_cmd *)skb->data; in ath11k_dp_tx_htt_rx_full_mon_setup()
1290 cmd->info0 = FIELD_PREP(HTT_RX_FULL_MON_MODE_CFG_CMD_INFO0_MSG_TYPE, in ath11k_dp_tx_htt_rx_full_mon_setup()
1293 cmd->info0 |= FIELD_PREP(HTT_RX_FULL_MON_MODE_CFG_CMD_INFO0_PDEV_ID, mac_id); in ath11k_dp_tx_htt_rx_full_mon_setup()
1295 cmd->cfg = HTT_RX_FULL_MON_MODE_CFG_CMD_CFG_ENABLE | in ath11k_dp_tx_htt_rx_full_mon_setup()
1299 cmd->cfg |= HTT_RX_FULL_MON_MODE_CFG_CMD_CFG_ZERO_MPDUS_END | in ath11k_dp_tx_htt_rx_full_mon_setup()
1303 ret = ath11k_htc_send(&ab->htc, ab->dp.eid, skb); in ath11k_dp_tx_htt_rx_full_mon_setup()