Lines Matching +full:rates +full:- +full:mcs

1 // SPDX-License-Identifier: BSD-3-Clause-Clear
3 * Copyright (c) 2018-2019 The Linux Foundation. All rights reserved.
4 * Copyright (c) 2021-2024 Qualcomm Innovation Center, Inc. All rights reserved.
20 struct ath11k_base *ab = arvif->ar->ab; in ath11k_dp_tx_get_encap_type()
22 if (test_bit(ATH11K_FLAG_RAW_MODE, &ab->dev_flags)) in ath11k_dp_tx_get_encap_type()
25 if (tx_info->flags & IEEE80211_TX_CTL_HW_80211_ENCAP) in ath11k_dp_tx_get_encap_type()
33 struct ieee80211_hdr *hdr = (void *)skb->data; in ath11k_dp_tx_encap_nwifi()
36 if (!ieee80211_is_data_qos(hdr->frame_control)) in ath11k_dp_tx_encap_nwifi()
40 memmove(skb->data + IEEE80211_QOS_CTL_LEN, in ath11k_dp_tx_encap_nwifi()
41 skb->data, (void *)qos_ctl - (void *)skb->data); in ath11k_dp_tx_encap_nwifi()
44 hdr = (void *)skb->data; in ath11k_dp_tx_encap_nwifi()
45 hdr->frame_control &= ~__cpu_to_le16(IEEE80211_STYPE_QOS_DATA); in ath11k_dp_tx_encap_nwifi()
50 struct ieee80211_hdr *hdr = (void *)skb->data; in ath11k_dp_tx_get_tid()
53 if (cb->flags & ATH11K_SKB_HW_80211_ENCAP) in ath11k_dp_tx_get_tid()
54 return skb->priority & IEEE80211_QOS_CTL_TID_MASK; in ath11k_dp_tx_get_tid()
55 else if (!ieee80211_is_data_qos(hdr->frame_control)) in ath11k_dp_tx_get_tid()
58 return skb->priority & IEEE80211_QOS_CTL_TID_MASK; in ath11k_dp_tx_get_tid()
86 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx()
87 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx()
92 struct ieee80211_hdr *hdr = (void *)skb->data; in ath11k_dp_tx()
102 if (unlikely(test_bit(ATH11K_FLAG_CRASH_FLUSH, &ar->ab->dev_flags))) in ath11k_dp_tx()
103 return -ESHUTDOWN; in ath11k_dp_tx()
105 if (unlikely(!(info->flags & IEEE80211_TX_CTL_HW_80211_ENCAP) && in ath11k_dp_tx()
106 !ieee80211_is_data(hdr->frame_control))) in ath11k_dp_tx()
107 return -EOPNOTSUPP; in ath11k_dp_tx()
109 pool_id = skb_get_queue_mapping(skb) & (ATH11K_HW_MAX_QUEUES - 1); in ath11k_dp_tx()
111 ring_selector = ab->hw_params.hw_ops->get_ring_selector(skb); in ath11k_dp_tx()
116 ti.ring_id = ring_selector % ab->hw_params.max_tx_ring; in ath11k_dp_tx()
117 ti.rbm_id = ab->hw_params.hal_params->tcl2wbm_rbm_map[ti.ring_id].rbm_id; in ath11k_dp_tx()
121 tx_ring = &dp->tx_ring[ti.ring_id]; in ath11k_dp_tx()
123 spin_lock_bh(&tx_ring->tx_idr_lock); in ath11k_dp_tx()
124 ret = idr_alloc(&tx_ring->txbuf_idr, skb, 0, in ath11k_dp_tx()
125 DP_TX_IDR_SIZE - 1, GFP_ATOMIC); in ath11k_dp_tx()
126 spin_unlock_bh(&tx_ring->tx_idr_lock); in ath11k_dp_tx()
129 if (ring_map == (BIT(ab->hw_params.max_tx_ring) - 1) || in ath11k_dp_tx()
130 !ab->hw_params.tcl_ring_retry) { in ath11k_dp_tx()
131 atomic_inc(&ab->soc_stats.tx_err.misc_fail); in ath11k_dp_tx()
132 return -ENOSPC; in ath11k_dp_tx()
140 ti.desc_id = FIELD_PREP(DP_TX_DESC_ID_MAC_ID, ar->pdev_idx) | in ath11k_dp_tx()
145 if (ieee80211_has_a4(hdr->frame_control) && in ath11k_dp_tx()
146 is_multicast_ether_addr(hdr->addr3) && arsta && in ath11k_dp_tx()
147 arsta->use_4addr_set) { in ath11k_dp_tx()
148 ti.meta_data_flags = arsta->tcl_metadata; in ath11k_dp_tx()
151 ti.meta_data_flags = arvif->tcl_metadata; in ath11k_dp_tx()
155 if (skb_cb->flags & ATH11K_SKB_CIPHER_SET) { in ath11k_dp_tx()
157 ath11k_dp_tx_get_encrypt_type(skb_cb->cipher); in ath11k_dp_tx()
159 if (ieee80211_has_protected(hdr->frame_control)) in ath11k_dp_tx()
166 ti.addr_search_flags = arvif->hal_addr_search_flags; in ath11k_dp_tx()
167 ti.search_type = arvif->search_type; in ath11k_dp_tx()
170 ti.lmac_id = ar->lmac_id; in ath11k_dp_tx()
171 ti.bss_ast_hash = arvif->ast_hash; in ath11k_dp_tx()
172 ti.bss_ast_idx = arvif->ast_idx; in ath11k_dp_tx()
175 if (likely(skb->ip_summed == CHECKSUM_PARTIAL && in ath11k_dp_tx()
184 if (ieee80211_vif_is_mesh(arvif->vif)) in ath11k_dp_tx()
196 if (!test_bit(ATH11K_FLAG_RAW_MODE, &ab->dev_flags)) { in ath11k_dp_tx()
197 ret = -EINVAL; in ath11k_dp_tx()
207 ret = -EINVAL; in ath11k_dp_tx()
208 atomic_inc(&ab->soc_stats.tx_err.misc_fail); in ath11k_dp_tx()
212 ti.paddr = dma_map_single(ab->dev, skb->data, skb->len, DMA_TO_DEVICE); in ath11k_dp_tx()
213 if (unlikely(dma_mapping_error(ab->dev, ti.paddr))) { in ath11k_dp_tx()
214 atomic_inc(&ab->soc_stats.tx_err.misc_fail); in ath11k_dp_tx()
216 ret = -ENOMEM; in ath11k_dp_tx()
220 ti.data_len = skb->len; in ath11k_dp_tx()
221 skb_cb->paddr = ti.paddr; in ath11k_dp_tx()
222 skb_cb->vif = arvif->vif; in ath11k_dp_tx()
223 skb_cb->ar = ar; in ath11k_dp_tx()
225 hal_ring_id = tx_ring->tcl_data_ring.ring_id; in ath11k_dp_tx()
226 tcl_ring = &ab->hal.srng_list[hal_ring_id]; in ath11k_dp_tx()
228 spin_lock_bh(&tcl_ring->lock); in ath11k_dp_tx()
238 ab->soc_stats.tx_err.desc_na[ti.ring_id]++; in ath11k_dp_tx()
239 spin_unlock_bh(&tcl_ring->lock); in ath11k_dp_tx()
240 ret = -ENOMEM; in ath11k_dp_tx()
247 if (unlikely(ring_map != (BIT(ab->hw_params.max_tx_ring)) - 1) && in ath11k_dp_tx()
248 ab->hw_params.tcl_ring_retry && ab->hw_params.max_tx_ring > 1) { in ath11k_dp_tx()
261 ath11k_dp_shadow_start_timer(ab, tcl_ring, &dp->tx_ring_timer[ti.ring_id]); in ath11k_dp_tx()
263 spin_unlock_bh(&tcl_ring->lock); in ath11k_dp_tx()
266 skb->data, skb->len); in ath11k_dp_tx()
268 atomic_inc(&ar->dp.num_tx_pending); in ath11k_dp_tx()
273 dma_unmap_single(ab->dev, ti.paddr, ti.data_len, DMA_TO_DEVICE); in ath11k_dp_tx()
276 spin_lock_bh(&tx_ring->tx_idr_lock); in ath11k_dp_tx()
277 idr_remove(&tx_ring->txbuf_idr, in ath11k_dp_tx()
279 spin_unlock_bh(&tx_ring->tx_idr_lock); in ath11k_dp_tx()
295 spin_lock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_free_txbuf()
296 msdu = idr_remove(&tx_ring->txbuf_idr, msdu_id); in ath11k_dp_tx_free_txbuf()
297 spin_unlock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_free_txbuf()
307 dma_unmap_single(ab->dev, skb_cb->paddr, msdu->len, DMA_TO_DEVICE); in ath11k_dp_tx_free_txbuf()
310 ar = ab->pdevs[mac_id].ar; in ath11k_dp_tx_free_txbuf()
311 if (atomic_dec_and_test(&ar->dp.num_tx_pending)) in ath11k_dp_tx_free_txbuf()
312 wake_up(&ar->dp.tx_empty_waitq); in ath11k_dp_tx_free_txbuf()
327 spin_lock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_htt_tx_complete_buf()
328 msdu = idr_remove(&tx_ring->txbuf_idr, ts->msdu_id); in ath11k_dp_tx_htt_tx_complete_buf()
329 spin_unlock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_htt_tx_complete_buf()
333 ts->msdu_id); in ath11k_dp_tx_htt_tx_complete_buf()
340 ar = skb_cb->ar; in ath11k_dp_tx_htt_tx_complete_buf()
342 if (atomic_dec_and_test(&ar->dp.num_tx_pending)) in ath11k_dp_tx_htt_tx_complete_buf()
343 wake_up(&ar->dp.tx_empty_waitq); in ath11k_dp_tx_htt_tx_complete_buf()
345 dma_unmap_single(ab->dev, skb_cb->paddr, msdu->len, DMA_TO_DEVICE); in ath11k_dp_tx_htt_tx_complete_buf()
347 if (!skb_cb->vif) { in ath11k_dp_tx_htt_tx_complete_buf()
348 ieee80211_free_txskb(ar->hw, msdu); in ath11k_dp_tx_htt_tx_complete_buf()
352 memset(&info->status, 0, sizeof(info->status)); in ath11k_dp_tx_htt_tx_complete_buf()
354 if (ts->acked) { in ath11k_dp_tx_htt_tx_complete_buf()
355 if (!(info->flags & IEEE80211_TX_CTL_NO_ACK)) { in ath11k_dp_tx_htt_tx_complete_buf()
356 info->flags |= IEEE80211_TX_STAT_ACK; in ath11k_dp_tx_htt_tx_complete_buf()
357 info->status.ack_signal = ts->ack_rssi; in ath11k_dp_tx_htt_tx_complete_buf()
360 ab->wmi_ab.svc_map)) in ath11k_dp_tx_htt_tx_complete_buf()
361 info->status.ack_signal += ATH11K_DEFAULT_NOISE_FLOOR; in ath11k_dp_tx_htt_tx_complete_buf()
363 info->status.flags |= in ath11k_dp_tx_htt_tx_complete_buf()
366 info->flags |= IEEE80211_TX_STAT_NOACK_TRANSMITTED; in ath11k_dp_tx_htt_tx_complete_buf()
370 spin_lock_bh(&ab->base_lock); in ath11k_dp_tx_htt_tx_complete_buf()
371 peer = ath11k_peer_find_by_id(ab, ts->peer_id); in ath11k_dp_tx_htt_tx_complete_buf()
372 if (!peer || !peer->sta) { in ath11k_dp_tx_htt_tx_complete_buf()
375 ts->peer_id); in ath11k_dp_tx_htt_tx_complete_buf()
376 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_htt_tx_complete_buf()
377 ieee80211_free_txskb(ar->hw, msdu); in ath11k_dp_tx_htt_tx_complete_buf()
380 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_htt_tx_complete_buf()
382 status.sta = peer->sta; in ath11k_dp_tx_htt_tx_complete_buf()
386 ieee80211_tx_status_ext(ar->hw, &status); in ath11k_dp_tx_htt_tx_complete_buf()
401 status_desc->info0); in ath11k_dp_tx_process_htt_tx_complete()
409 status_desc->info1); in ath11k_dp_tx_process_htt_tx_complete()
411 if (FIELD_GET(HTT_TX_WBM_COMP_INFO2_VALID, status_desc->info2)) in ath11k_dp_tx_process_htt_tx_complete()
413 status_desc->info2); in ath11k_dp_tx_process_htt_tx_complete()
439 struct ath11k_per_peer_tx_stats *peer_stats = &ar->cached_stats; in ath11k_dp_tx_cache_peer_stats()
441 if (ts->try_cnt > 1) { in ath11k_dp_tx_cache_peer_stats()
442 peer_stats->retry_pkts += ts->try_cnt - 1; in ath11k_dp_tx_cache_peer_stats()
443 peer_stats->retry_bytes += (ts->try_cnt - 1) * msdu->len; in ath11k_dp_tx_cache_peer_stats()
445 if (ts->status != HAL_WBM_TQM_REL_REASON_FRAME_ACKED) { in ath11k_dp_tx_cache_peer_stats()
446 peer_stats->failed_pkts += 1; in ath11k_dp_tx_cache_peer_stats()
447 peer_stats->failed_bytes += msdu->len; in ath11k_dp_tx_cache_peer_stats()
454 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_update_txcompl()
455 struct ath11k_per_peer_tx_stats *peer_stats = &ar->cached_stats; in ath11k_dp_tx_update_txcompl()
463 u8 mcs, rate_idx = 0, ofdma; in ath11k_dp_tx_update_txcompl() local
466 spin_lock_bh(&ab->base_lock); in ath11k_dp_tx_update_txcompl()
467 peer = ath11k_peer_find_by_id(ab, ts->peer_id); in ath11k_dp_tx_update_txcompl()
468 if (!peer || !peer->sta) { in ath11k_dp_tx_update_txcompl()
470 "failed to find the peer by id %u\n", ts->peer_id); in ath11k_dp_tx_update_txcompl()
474 sta = peer->sta; in ath11k_dp_tx_update_txcompl()
477 memset(&arsta->txrate, 0, sizeof(arsta->txrate)); in ath11k_dp_tx_update_txcompl()
479 ts->rate_stats); in ath11k_dp_tx_update_txcompl()
480 mcs = FIELD_GET(HAL_TX_RATE_STATS_INFO0_MCS, in ath11k_dp_tx_update_txcompl()
481 ts->rate_stats); in ath11k_dp_tx_update_txcompl()
483 ts->rate_stats); in ath11k_dp_tx_update_txcompl()
484 bw = FIELD_GET(HAL_TX_RATE_STATS_INFO0_BW, ts->rate_stats); in ath11k_dp_tx_update_txcompl()
485 ru_tones = FIELD_GET(HAL_TX_RATE_STATS_INFO0_TONES_IN_RU, ts->rate_stats); in ath11k_dp_tx_update_txcompl()
486 ofdma = FIELD_GET(HAL_TX_RATE_STATS_INFO0_OFDMA_TX, ts->rate_stats); in ath11k_dp_tx_update_txcompl()
488 /* This is to prefer choose the real NSS value arsta->last_txrate.nss, in ath11k_dp_tx_update_txcompl()
491 if (arsta->last_txrate.nss) in ath11k_dp_tx_update_txcompl()
492 arsta->txrate.nss = arsta->last_txrate.nss; in ath11k_dp_tx_update_txcompl()
494 arsta->txrate.nss = arsta->peer_nss; in ath11k_dp_tx_update_txcompl()
498 ret = ath11k_mac_hw_ratecode_to_legacy_rate(mcs, in ath11k_dp_tx_update_txcompl()
504 arsta->txrate.legacy = rate; in ath11k_dp_tx_update_txcompl()
506 if (mcs > 7) { in ath11k_dp_tx_update_txcompl()
507 ath11k_warn(ab, "Invalid HT mcs index %d\n", mcs); in ath11k_dp_tx_update_txcompl()
511 if (arsta->txrate.nss != 0) in ath11k_dp_tx_update_txcompl()
512 arsta->txrate.mcs = mcs + 8 * (arsta->txrate.nss - 1); in ath11k_dp_tx_update_txcompl()
513 arsta->txrate.flags = RATE_INFO_FLAGS_MCS; in ath11k_dp_tx_update_txcompl()
515 arsta->txrate.flags |= RATE_INFO_FLAGS_SHORT_GI; in ath11k_dp_tx_update_txcompl()
517 if (mcs > 9) { in ath11k_dp_tx_update_txcompl()
518 ath11k_warn(ab, "Invalid VHT mcs index %d\n", mcs); in ath11k_dp_tx_update_txcompl()
522 arsta->txrate.mcs = mcs; in ath11k_dp_tx_update_txcompl()
523 arsta->txrate.flags = RATE_INFO_FLAGS_VHT_MCS; in ath11k_dp_tx_update_txcompl()
525 arsta->txrate.flags |= RATE_INFO_FLAGS_SHORT_GI; in ath11k_dp_tx_update_txcompl()
527 if (mcs > 11) { in ath11k_dp_tx_update_txcompl()
528 ath11k_warn(ab, "Invalid HE mcs index %d\n", mcs); in ath11k_dp_tx_update_txcompl()
532 arsta->txrate.mcs = mcs; in ath11k_dp_tx_update_txcompl()
533 arsta->txrate.flags = RATE_INFO_FLAGS_HE_MCS; in ath11k_dp_tx_update_txcompl()
534 arsta->txrate.he_gi = ath11k_mac_he_gi_to_nl80211_he_gi(sgi); in ath11k_dp_tx_update_txcompl()
537 arsta->txrate.bw = ath11k_mac_bw_to_mac80211_bw(bw); in ath11k_dp_tx_update_txcompl()
539 arsta->txrate.bw = RATE_INFO_BW_HE_RU; in ath11k_dp_tx_update_txcompl()
540 arsta->txrate.he_ru_alloc = in ath11k_dp_tx_update_txcompl()
548 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_update_txcompl()
557 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_complete_msdu()
564 if (WARN_ON_ONCE(ts->buf_rel_source != HAL_WBM_REL_SRC_MODULE_TQM)) { in ath11k_dp_tx_complete_msdu()
571 dma_unmap_single(ab->dev, skb_cb->paddr, msdu->len, DMA_TO_DEVICE); in ath11k_dp_tx_complete_msdu()
573 if (unlikely(!rcu_access_pointer(ab->pdevs_active[ar->pdev_idx]))) { in ath11k_dp_tx_complete_msdu()
574 ieee80211_free_txskb(ar->hw, msdu); in ath11k_dp_tx_complete_msdu()
578 if (unlikely(!skb_cb->vif)) { in ath11k_dp_tx_complete_msdu()
579 ieee80211_free_txskb(ar->hw, msdu); in ath11k_dp_tx_complete_msdu()
584 memset(&info->status, 0, sizeof(info->status)); in ath11k_dp_tx_complete_msdu()
587 info->status.rates[0].idx = -1; in ath11k_dp_tx_complete_msdu()
589 if (ts->status == HAL_WBM_TQM_REL_REASON_FRAME_ACKED && in ath11k_dp_tx_complete_msdu()
590 !(info->flags & IEEE80211_TX_CTL_NO_ACK)) { in ath11k_dp_tx_complete_msdu()
591 info->flags |= IEEE80211_TX_STAT_ACK; in ath11k_dp_tx_complete_msdu()
592 info->status.ack_signal = ts->ack_rssi; in ath11k_dp_tx_complete_msdu()
595 ab->wmi_ab.svc_map)) in ath11k_dp_tx_complete_msdu()
596 info->status.ack_signal += ATH11K_DEFAULT_NOISE_FLOOR; in ath11k_dp_tx_complete_msdu()
598 info->status.flags |= IEEE80211_TX_STATUS_ACK_SIGNAL_VALID; in ath11k_dp_tx_complete_msdu()
601 if (ts->status == HAL_WBM_TQM_REL_REASON_CMD_REMOVE_TX && in ath11k_dp_tx_complete_msdu()
602 (info->flags & IEEE80211_TX_CTL_NO_ACK)) in ath11k_dp_tx_complete_msdu()
603 info->flags |= IEEE80211_TX_STAT_NOACK_TRANSMITTED; in ath11k_dp_tx_complete_msdu()
606 ab->hw_params.single_pdev_only) { in ath11k_dp_tx_complete_msdu()
607 if (ts->flags & HAL_TX_STATUS_FLAGS_FIRST_MSDU) { in ath11k_dp_tx_complete_msdu()
608 if (ar->last_ppdu_id == 0) { in ath11k_dp_tx_complete_msdu()
609 ar->last_ppdu_id = ts->ppdu_id; in ath11k_dp_tx_complete_msdu()
610 } else if (ar->last_ppdu_id == ts->ppdu_id || in ath11k_dp_tx_complete_msdu()
611 ar->cached_ppdu_id == ar->last_ppdu_id) { in ath11k_dp_tx_complete_msdu()
612 ar->cached_ppdu_id = ar->last_ppdu_id; in ath11k_dp_tx_complete_msdu()
613 ar->cached_stats.is_ampdu = true; in ath11k_dp_tx_complete_msdu()
615 memset(&ar->cached_stats, 0, in ath11k_dp_tx_complete_msdu()
618 ar->cached_stats.is_ampdu = false; in ath11k_dp_tx_complete_msdu()
620 memset(&ar->cached_stats, 0, in ath11k_dp_tx_complete_msdu()
623 ar->last_ppdu_id = ts->ppdu_id; in ath11k_dp_tx_complete_msdu()
629 spin_lock_bh(&ab->base_lock); in ath11k_dp_tx_complete_msdu()
630 peer = ath11k_peer_find_by_id(ab, ts->peer_id); in ath11k_dp_tx_complete_msdu()
631 if (!peer || !peer->sta) { in ath11k_dp_tx_complete_msdu()
634 ts->peer_id); in ath11k_dp_tx_complete_msdu()
635 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_complete_msdu()
636 ieee80211_free_txskb(ar->hw, msdu); in ath11k_dp_tx_complete_msdu()
639 arsta = ath11k_sta_to_arsta(peer->sta); in ath11k_dp_tx_complete_msdu()
640 status.sta = peer->sta; in ath11k_dp_tx_complete_msdu()
643 rate = arsta->last_txrate; in ath11k_dp_tx_complete_msdu()
648 status.rates = &status_rate; in ath11k_dp_tx_complete_msdu()
651 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_complete_msdu()
653 ieee80211_tx_status_ext(ar->hw, &status); in ath11k_dp_tx_complete_msdu()
660 ts->buf_rel_source = in ath11k_dp_tx_status_parse()
661 FIELD_GET(HAL_WBM_RELEASE_INFO0_REL_SRC_MODULE, desc->info0); in ath11k_dp_tx_status_parse()
662 if (unlikely(ts->buf_rel_source != HAL_WBM_REL_SRC_MODULE_FW && in ath11k_dp_tx_status_parse()
663 ts->buf_rel_source != HAL_WBM_REL_SRC_MODULE_TQM)) in ath11k_dp_tx_status_parse()
666 if (unlikely(ts->buf_rel_source == HAL_WBM_REL_SRC_MODULE_FW)) in ath11k_dp_tx_status_parse()
669 ts->status = FIELD_GET(HAL_WBM_RELEASE_INFO0_TQM_RELEASE_REASON, in ath11k_dp_tx_status_parse()
670 desc->info0); in ath11k_dp_tx_status_parse()
671 ts->ppdu_id = FIELD_GET(HAL_WBM_RELEASE_INFO1_TQM_STATUS_NUMBER, in ath11k_dp_tx_status_parse()
672 desc->info1); in ath11k_dp_tx_status_parse()
673 ts->try_cnt = FIELD_GET(HAL_WBM_RELEASE_INFO1_TRANSMIT_COUNT, in ath11k_dp_tx_status_parse()
674 desc->info1); in ath11k_dp_tx_status_parse()
675 ts->ack_rssi = FIELD_GET(HAL_WBM_RELEASE_INFO2_ACK_FRAME_RSSI, in ath11k_dp_tx_status_parse()
676 desc->info2); in ath11k_dp_tx_status_parse()
677 if (desc->info2 & HAL_WBM_RELEASE_INFO2_FIRST_MSDU) in ath11k_dp_tx_status_parse()
678 ts->flags |= HAL_TX_STATUS_FLAGS_FIRST_MSDU; in ath11k_dp_tx_status_parse()
679 ts->peer_id = FIELD_GET(HAL_WBM_RELEASE_INFO3_PEER_ID, desc->info3); in ath11k_dp_tx_status_parse()
680 ts->tid = FIELD_GET(HAL_WBM_RELEASE_INFO3_TID, desc->info3); in ath11k_dp_tx_status_parse()
681 if (desc->rate_stats.info0 & HAL_TX_RATE_STATS_INFO0_VALID) in ath11k_dp_tx_status_parse()
682 ts->rate_stats = desc->rate_stats.info0; in ath11k_dp_tx_status_parse()
684 ts->rate_stats = 0; in ath11k_dp_tx_status_parse()
690 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_completion_handler()
691 int hal_ring_id = dp->tx_ring[ring_id].tcl_comp_ring.ring_id; in ath11k_dp_tx_completion_handler()
692 struct hal_srng *status_ring = &ab->hal.srng_list[hal_ring_id]; in ath11k_dp_tx_completion_handler()
695 struct dp_tx_ring *tx_ring = &dp->tx_ring[ring_id]; in ath11k_dp_tx_completion_handler()
700 spin_lock_bh(&status_ring->lock); in ath11k_dp_tx_completion_handler()
704 while ((ATH11K_TX_COMPL_NEXT(tx_ring->tx_status_head) != in ath11k_dp_tx_completion_handler()
705 tx_ring->tx_status_tail) && in ath11k_dp_tx_completion_handler()
707 memcpy(&tx_ring->tx_status[tx_ring->tx_status_head], in ath11k_dp_tx_completion_handler()
709 tx_ring->tx_status_head = in ath11k_dp_tx_completion_handler()
710 ATH11K_TX_COMPL_NEXT(tx_ring->tx_status_head); in ath11k_dp_tx_completion_handler()
714 (ATH11K_TX_COMPL_NEXT(tx_ring->tx_status_head) == in ath11k_dp_tx_completion_handler()
715 tx_ring->tx_status_tail))) { in ath11k_dp_tx_completion_handler()
722 spin_unlock_bh(&status_ring->lock); in ath11k_dp_tx_completion_handler()
724 while (ATH11K_TX_COMPL_NEXT(tx_ring->tx_status_tail) != tx_ring->tx_status_head) { in ath11k_dp_tx_completion_handler()
728 tx_ring->tx_status_tail = in ath11k_dp_tx_completion_handler()
729 ATH11K_TX_COMPL_NEXT(tx_ring->tx_status_tail); in ath11k_dp_tx_completion_handler()
730 tx_status = &tx_ring->tx_status[tx_ring->tx_status_tail]; in ath11k_dp_tx_completion_handler()
734 tx_status->buf_addr_info.info1); in ath11k_dp_tx_completion_handler()
746 spin_lock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_completion_handler()
747 msdu = idr_remove(&tx_ring->txbuf_idr, msdu_id); in ath11k_dp_tx_completion_handler()
751 spin_unlock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_completion_handler()
755 spin_unlock(&tx_ring->tx_idr_lock); in ath11k_dp_tx_completion_handler()
757 ar = ab->pdevs[mac_id].ar; in ath11k_dp_tx_completion_handler()
759 if (atomic_dec_and_test(&ar->dp.num_tx_pending)) in ath11k_dp_tx_completion_handler()
760 wake_up(&ar->dp.tx_empty_waitq); in ath11k_dp_tx_completion_handler()
772 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_send_reo_cmd()
777 if (test_bit(ATH11K_FLAG_CRASH_FLUSH, &ab->dev_flags)) in ath11k_dp_tx_send_reo_cmd()
778 return -ESHUTDOWN; in ath11k_dp_tx_send_reo_cmd()
780 cmd_ring = &ab->hal.srng_list[dp->reo_cmd_ring.ring_id]; in ath11k_dp_tx_send_reo_cmd()
789 return -EINVAL; in ath11k_dp_tx_send_reo_cmd()
801 return -ENOMEM; in ath11k_dp_tx_send_reo_cmd()
803 memcpy(&dp_cmd->data, rx_tid, sizeof(struct dp_rx_tid)); in ath11k_dp_tx_send_reo_cmd()
804 dp_cmd->cmd_num = cmd_num; in ath11k_dp_tx_send_reo_cmd()
805 dp_cmd->handler = cb; in ath11k_dp_tx_send_reo_cmd()
807 spin_lock_bh(&dp->reo_cmd_lock); in ath11k_dp_tx_send_reo_cmd()
808 list_add_tail(&dp_cmd->list, &dp->reo_cmd_list); in ath11k_dp_tx_send_reo_cmd()
809 spin_unlock_bh(&dp->reo_cmd_lock); in ath11k_dp_tx_send_reo_cmd()
831 if (!ab->hw_params.rx_mac_buf_ring) { in ath11k_dp_tx_get_ring_id_type()
836 ret = -EINVAL; in ath11k_dp_tx_get_ring_id_type()
872 ret = -EINVAL; in ath11k_dp_tx_get_ring_id_type()
881 struct hal_srng *srng = &ab->hal.srng_list[ring_id]; in ath11k_dp_tx_htt_srng_setup()
893 return -ENOMEM; in ath11k_dp_tx_htt_srng_setup()
908 cmd = (struct htt_srng_setup_cmd *)skb->data; in ath11k_dp_tx_htt_srng_setup()
909 cmd->info0 = FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO0_MSG_TYPE, in ath11k_dp_tx_htt_srng_setup()
913 cmd->info0 |= FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO0_PDEV_ID, in ath11k_dp_tx_htt_srng_setup()
916 cmd->info0 |= FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO0_PDEV_ID, in ath11k_dp_tx_htt_srng_setup()
918 cmd->info0 |= FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO0_RING_TYPE, in ath11k_dp_tx_htt_srng_setup()
920 cmd->info0 |= FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO0_RING_ID, htt_ring_id); in ath11k_dp_tx_htt_srng_setup()
922 cmd->ring_base_addr_lo = params.ring_base_paddr & in ath11k_dp_tx_htt_srng_setup()
925 cmd->ring_base_addr_hi = (u64)params.ring_base_paddr >> in ath11k_dp_tx_htt_srng_setup()
935 cmd->info1 = FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO1_RING_ENTRY_SIZE, in ath11k_dp_tx_htt_srng_setup()
937 cmd->info1 |= FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO1_RING_SIZE, in ath11k_dp_tx_htt_srng_setup()
939 cmd->info1 |= FIELD_PREP(HTT_SRNG_SETUP_CMD_INFO1_RING_FLAGS_MSI_SWAP, in ath11k_dp_tx_htt_srng_setup()
941 cmd->info1 |= FIELD_PREP( in ath11k_dp_tx_htt_srng_setup()
944 cmd->info1 |= FIELD_PREP( in ath11k_dp_tx_htt_srng_setup()
948 cmd->info1 |= HTT_SRNG_SETUP_CMD_INFO1_RING_LOOP_CNT_DIS; in ath11k_dp_tx_htt_srng_setup()
950 cmd->ring_head_off32_remote_addr_lo = hp_addr & HAL_ADDR_LSB_REG_MASK; in ath11k_dp_tx_htt_srng_setup()
951 cmd->ring_head_off32_remote_addr_hi = (u64)hp_addr >> in ath11k_dp_tx_htt_srng_setup()
954 cmd->ring_tail_off32_remote_addr_lo = tp_addr & HAL_ADDR_LSB_REG_MASK; in ath11k_dp_tx_htt_srng_setup()
955 cmd->ring_tail_off32_remote_addr_hi = (u64)tp_addr >> in ath11k_dp_tx_htt_srng_setup()
958 cmd->ring_msi_addr_lo = lower_32_bits(params.msi_addr); in ath11k_dp_tx_htt_srng_setup()
959 cmd->ring_msi_addr_hi = upper_32_bits(params.msi_addr); in ath11k_dp_tx_htt_srng_setup()
960 cmd->msi_data = params.msi_data; in ath11k_dp_tx_htt_srng_setup()
962 cmd->intr_info = FIELD_PREP( in ath11k_dp_tx_htt_srng_setup()
965 cmd->intr_info |= FIELD_PREP( in ath11k_dp_tx_htt_srng_setup()
969 cmd->info2 = 0; in ath11k_dp_tx_htt_srng_setup()
971 cmd->info2 = FIELD_PREP( in ath11k_dp_tx_htt_srng_setup()
978 cmd->ring_msi_addr_lo, cmd->ring_msi_addr_hi, in ath11k_dp_tx_htt_srng_setup()
979 cmd->msi_data, ring_id, ring_type, cmd->intr_info, cmd->info2); in ath11k_dp_tx_htt_srng_setup()
981 ret = ath11k_htc_send(&ab->htc, ab->dp.eid, skb); in ath11k_dp_tx_htt_srng_setup()
997 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_htt_h2t_ver_req_msg()
1003 init_completion(&dp->htt_tgt_version_received); in ath11k_dp_tx_htt_h2t_ver_req_msg()
1007 return -ENOMEM; in ath11k_dp_tx_htt_h2t_ver_req_msg()
1010 cmd = (struct htt_ver_req_cmd *)skb->data; in ath11k_dp_tx_htt_h2t_ver_req_msg()
1011 cmd->ver_reg_info = FIELD_PREP(HTT_VER_REQ_INFO_MSG_ID, in ath11k_dp_tx_htt_h2t_ver_req_msg()
1014 ret = ath11k_htc_send(&ab->htc, dp->eid, skb); in ath11k_dp_tx_htt_h2t_ver_req_msg()
1020 ret = wait_for_completion_timeout(&dp->htt_tgt_version_received, in ath11k_dp_tx_htt_h2t_ver_req_msg()
1024 return -ETIMEDOUT; in ath11k_dp_tx_htt_h2t_ver_req_msg()
1027 if (dp->htt_tgt_ver_major != HTT_TARGET_VERSION_MAJOR) { in ath11k_dp_tx_htt_h2t_ver_req_msg()
1029 dp->htt_tgt_ver_major, HTT_TARGET_VERSION_MAJOR); in ath11k_dp_tx_htt_h2t_ver_req_msg()
1030 return -EOPNOTSUPP; in ath11k_dp_tx_htt_h2t_ver_req_msg()
1038 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1039 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1047 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) { in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1050 return -ENOMEM; in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1053 cmd = (struct htt_ppdu_stats_cfg_cmd *)skb->data; in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1054 cmd->msg = FIELD_PREP(HTT_PPDU_STATS_CFG_MSG_TYPE, in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1057 pdev_mask = 1 << (ar->pdev_idx + i); in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1058 cmd->msg |= FIELD_PREP(HTT_PPDU_STATS_CFG_PDEV_ID, pdev_mask); in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1059 cmd->msg |= FIELD_PREP(HTT_PPDU_STATS_CFG_TLV_TYPE_BITMASK, mask); in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1061 ret = ath11k_htc_send(&ab->htc, dp->eid, skb); in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1077 struct hal_srng *srng = &ab->hal.srng_list[ring_id]; in ath11k_dp_tx_htt_rx_filter_setup()
1087 return -ENOMEM; in ath11k_dp_tx_htt_rx_filter_setup()
1099 cmd = (struct htt_rx_ring_selection_cfg_cmd *)skb->data; in ath11k_dp_tx_htt_rx_filter_setup()
1100 cmd->info0 = FIELD_PREP(HTT_RX_RING_SELECTION_CFG_CMD_INFO0_MSG_TYPE, in ath11k_dp_tx_htt_rx_filter_setup()
1104 cmd->info0 |= in ath11k_dp_tx_htt_rx_filter_setup()
1108 cmd->info0 |= in ath11k_dp_tx_htt_rx_filter_setup()
1111 cmd->info0 |= FIELD_PREP(HTT_RX_RING_SELECTION_CFG_CMD_INFO0_RING_ID, in ath11k_dp_tx_htt_rx_filter_setup()
1113 cmd->info0 |= FIELD_PREP(HTT_RX_RING_SELECTION_CFG_CMD_INFO0_SS, in ath11k_dp_tx_htt_rx_filter_setup()
1115 cmd->info0 |= FIELD_PREP(HTT_RX_RING_SELECTION_CFG_CMD_INFO0_PS, in ath11k_dp_tx_htt_rx_filter_setup()
1118 cmd->info1 = FIELD_PREP(HTT_RX_RING_SELECTION_CFG_CMD_INFO1_BUF_SIZE, in ath11k_dp_tx_htt_rx_filter_setup()
1120 cmd->pkt_type_en_flags0 = tlv_filter->pkt_filter_flags0; in ath11k_dp_tx_htt_rx_filter_setup()
1121 cmd->pkt_type_en_flags1 = tlv_filter->pkt_filter_flags1; in ath11k_dp_tx_htt_rx_filter_setup()
1122 cmd->pkt_type_en_flags2 = tlv_filter->pkt_filter_flags2; in ath11k_dp_tx_htt_rx_filter_setup()
1123 cmd->pkt_type_en_flags3 = tlv_filter->pkt_filter_flags3; in ath11k_dp_tx_htt_rx_filter_setup()
1124 cmd->rx_filter_tlv = tlv_filter->rx_filter; in ath11k_dp_tx_htt_rx_filter_setup()
1126 ret = ath11k_htc_send(&ab->htc, ab->dp.eid, skb); in ath11k_dp_tx_htt_rx_filter_setup()
1143 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1144 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1153 return -ENOMEM; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1157 cmd = (struct htt_ext_stats_cfg_cmd *)skb->data; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1159 cmd->hdr.msg_type = HTT_H2T_MSG_TYPE_EXT_STATS_CFG; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1161 if (ab->hw_params.single_pdev_only) in ath11k_dp_tx_htt_h2t_ext_stats_req()
1164 pdev_id = ar->pdev->pdev_id; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1166 cmd->hdr.pdev_mask = 1 << pdev_id; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1168 cmd->hdr.stats_type = type; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1169 cmd->cfg_param0 = cfg_params->cfg0; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1170 cmd->cfg_param1 = cfg_params->cfg1; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1171 cmd->cfg_param2 = cfg_params->cfg2; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1172 cmd->cfg_param3 = cfg_params->cfg3; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1173 cmd->cookie_lsb = lower_32_bits(cookie); in ath11k_dp_tx_htt_h2t_ext_stats_req()
1174 cmd->cookie_msb = upper_32_bits(cookie); in ath11k_dp_tx_htt_h2t_ext_stats_req()
1176 ret = ath11k_htc_send(&ab->htc, dp->eid, skb); in ath11k_dp_tx_htt_h2t_ext_stats_req()
1189 struct ath11k_pdev_dp *dp = &ar->dp; in ath11k_dp_tx_htt_monitor_mode_ring_config()
1190 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_htt_monitor_mode_ring_config()
1194 if (ab->hw_params.full_monitor_mode) { in ath11k_dp_tx_htt_monitor_mode_ring_config()
1196 dp->mac_id, !reset); in ath11k_dp_tx_htt_monitor_mode_ring_config()
1203 ring_id = dp->rxdma_mon_buf_ring.refill_buf_ring.ring_id; in ath11k_dp_tx_htt_monitor_mode_ring_config()
1223 if (ab->hw_params.rxdma1_enable) { in ath11k_dp_tx_htt_monitor_mode_ring_config()
1224 ret = ath11k_dp_tx_htt_rx_filter_setup(ar->ab, ring_id, dp->mac_id, in ath11k_dp_tx_htt_monitor_mode_ring_config()
1230 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) { in ath11k_dp_tx_htt_monitor_mode_ring_config()
1231 ring_id = dp->rx_mac_buf_ring[i].ring_id; in ath11k_dp_tx_htt_monitor_mode_ring_config()
1232 ret = ath11k_dp_tx_htt_rx_filter_setup(ar->ab, ring_id, in ath11k_dp_tx_htt_monitor_mode_ring_config()
1233 dp->mac_id + i, in ath11k_dp_tx_htt_monitor_mode_ring_config()
1243 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) { in ath11k_dp_tx_htt_monitor_mode_ring_config()
1244 ring_id = dp->rx_mon_status_refill_ring[i].refill_buf_ring.ring_id; in ath11k_dp_tx_htt_monitor_mode_ring_config()
1256 dp->mac_id + i, in ath11k_dp_tx_htt_monitor_mode_ring_config()
1262 if (!ar->ab->hw_params.rxdma1_enable) in ath11k_dp_tx_htt_monitor_mode_ring_config()
1263 mod_timer(&ar->ab->mon_reap_timer, jiffies + in ath11k_dp_tx_htt_monitor_mode_ring_config()
1278 return -ENOMEM; in ath11k_dp_tx_htt_rx_full_mon_setup()
1281 cmd = (struct htt_rx_full_monitor_mode_cfg_cmd *)skb->data; in ath11k_dp_tx_htt_rx_full_mon_setup()
1283 cmd->info0 = FIELD_PREP(HTT_RX_FULL_MON_MODE_CFG_CMD_INFO0_MSG_TYPE, in ath11k_dp_tx_htt_rx_full_mon_setup()
1286 cmd->info0 |= FIELD_PREP(HTT_RX_FULL_MON_MODE_CFG_CMD_INFO0_PDEV_ID, mac_id); in ath11k_dp_tx_htt_rx_full_mon_setup()
1288 cmd->cfg = HTT_RX_FULL_MON_MODE_CFG_CMD_CFG_ENABLE | in ath11k_dp_tx_htt_rx_full_mon_setup()
1292 cmd->cfg |= HTT_RX_FULL_MON_MODE_CFG_CMD_CFG_ZERO_MPDUS_END | in ath11k_dp_tx_htt_rx_full_mon_setup()
1296 ret = ath11k_htc_send(&ab->htc, ab->dp.eid, skb); in ath11k_dp_tx_htt_rx_full_mon_setup()