Lines Matching refs:htt

34 	hash_for_each_possible(ar->htt.rx_ring.skb_table, rxcb, hlist, paddr)  in ath10k_htt_rx_find_skb_paddr()
42 static void ath10k_htt_rx_ring_free(struct ath10k_htt *htt) in ath10k_htt_rx_ring_free() argument
49 if (htt->rx_ring.in_ord_rx) { in ath10k_htt_rx_ring_free()
50 hash_for_each_safe(htt->rx_ring.skb_table, i, n, rxcb, hlist) { in ath10k_htt_rx_ring_free()
52 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
59 for (i = 0; i < htt->rx_ring.size; i++) { in ath10k_htt_rx_ring_free()
60 skb = htt->rx_ring.netbufs_ring[i]; in ath10k_htt_rx_ring_free()
65 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
72 htt->rx_ring.fill_cnt = 0; in ath10k_htt_rx_ring_free()
73 hash_init(htt->rx_ring.skb_table); in ath10k_htt_rx_ring_free()
74 memset(htt->rx_ring.netbufs_ring, 0, in ath10k_htt_rx_ring_free()
75 htt->rx_ring.size * sizeof(htt->rx_ring.netbufs_ring[0])); in ath10k_htt_rx_ring_free()
78 static size_t ath10k_htt_get_rx_ring_size_32(struct ath10k_htt *htt) in ath10k_htt_get_rx_ring_size_32() argument
80 return htt->rx_ring.size * sizeof(htt->rx_ring.paddrs_ring_32); in ath10k_htt_get_rx_ring_size_32()
83 static size_t ath10k_htt_get_rx_ring_size_64(struct ath10k_htt *htt) in ath10k_htt_get_rx_ring_size_64() argument
85 return htt->rx_ring.size * sizeof(htt->rx_ring.paddrs_ring_64); in ath10k_htt_get_rx_ring_size_64()
88 static void ath10k_htt_config_paddrs_ring_32(struct ath10k_htt *htt, in ath10k_htt_config_paddrs_ring_32() argument
91 htt->rx_ring.paddrs_ring_32 = vaddr; in ath10k_htt_config_paddrs_ring_32()
94 static void ath10k_htt_config_paddrs_ring_64(struct ath10k_htt *htt, in ath10k_htt_config_paddrs_ring_64() argument
97 htt->rx_ring.paddrs_ring_64 = vaddr; in ath10k_htt_config_paddrs_ring_64()
100 static void ath10k_htt_set_paddrs_ring_32(struct ath10k_htt *htt, in ath10k_htt_set_paddrs_ring_32() argument
103 htt->rx_ring.paddrs_ring_32[idx] = __cpu_to_le32(paddr); in ath10k_htt_set_paddrs_ring_32()
106 static void ath10k_htt_set_paddrs_ring_64(struct ath10k_htt *htt, in ath10k_htt_set_paddrs_ring_64() argument
109 htt->rx_ring.paddrs_ring_64[idx] = __cpu_to_le64(paddr); in ath10k_htt_set_paddrs_ring_64()
112 static void ath10k_htt_reset_paddrs_ring_32(struct ath10k_htt *htt, int idx) in ath10k_htt_reset_paddrs_ring_32() argument
114 htt->rx_ring.paddrs_ring_32[idx] = 0; in ath10k_htt_reset_paddrs_ring_32()
117 static void ath10k_htt_reset_paddrs_ring_64(struct ath10k_htt *htt, int idx) in ath10k_htt_reset_paddrs_ring_64() argument
119 htt->rx_ring.paddrs_ring_64[idx] = 0; in ath10k_htt_reset_paddrs_ring_64()
122 static void *ath10k_htt_get_vaddr_ring_32(struct ath10k_htt *htt) in ath10k_htt_get_vaddr_ring_32() argument
124 return (void *)htt->rx_ring.paddrs_ring_32; in ath10k_htt_get_vaddr_ring_32()
127 static void *ath10k_htt_get_vaddr_ring_64(struct ath10k_htt *htt) in ath10k_htt_get_vaddr_ring_64() argument
129 return (void *)htt->rx_ring.paddrs_ring_64; in ath10k_htt_get_vaddr_ring_64()
132 static int __ath10k_htt_rx_ring_fill_n(struct ath10k_htt *htt, int num) in __ath10k_htt_rx_ring_fill_n() argument
134 struct ath10k_hw_params *hw = &htt->ar->hw_params; in __ath10k_htt_rx_ring_fill_n()
148 idx = __le32_to_cpu(*htt->rx_ring.alloc_idx.vaddr); in __ath10k_htt_rx_ring_fill_n()
150 if (idx < 0 || idx >= htt->rx_ring.size) { in __ath10k_htt_rx_ring_fill_n()
151 ath10k_err(htt->ar, "rx ring index is not valid, firmware malfunctioning?\n"); in __ath10k_htt_rx_ring_fill_n()
152 idx &= htt->rx_ring.size_mask; in __ath10k_htt_rx_ring_fill_n()
173 paddr = dma_map_single(htt->ar->dev, skb->data, in __ath10k_htt_rx_ring_fill_n()
177 if (unlikely(dma_mapping_error(htt->ar->dev, paddr))) { in __ath10k_htt_rx_ring_fill_n()
185 htt->rx_ring.netbufs_ring[idx] = skb; in __ath10k_htt_rx_ring_fill_n()
186 ath10k_htt_set_paddrs_ring(htt, paddr, idx); in __ath10k_htt_rx_ring_fill_n()
187 htt->rx_ring.fill_cnt++; in __ath10k_htt_rx_ring_fill_n()
189 if (htt->rx_ring.in_ord_rx) { in __ath10k_htt_rx_ring_fill_n()
190 hash_add(htt->rx_ring.skb_table, in __ath10k_htt_rx_ring_fill_n()
197 idx &= htt->rx_ring.size_mask; in __ath10k_htt_rx_ring_fill_n()
206 *htt->rx_ring.alloc_idx.vaddr = __cpu_to_le32(idx); in __ath10k_htt_rx_ring_fill_n()
210 static int ath10k_htt_rx_ring_fill_n(struct ath10k_htt *htt, int num) in ath10k_htt_rx_ring_fill_n() argument
212 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_ring_fill_n()
213 return __ath10k_htt_rx_ring_fill_n(htt, num); in ath10k_htt_rx_ring_fill_n()
216 static void ath10k_htt_rx_msdu_buff_replenish(struct ath10k_htt *htt) in ath10k_htt_rx_msdu_buff_replenish() argument
236 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_msdu_buff_replenish()
237 num_deficit = htt->rx_ring.fill_level - htt->rx_ring.fill_cnt; in ath10k_htt_rx_msdu_buff_replenish()
240 ret = ath10k_htt_rx_ring_fill_n(htt, num_to_fill); in ath10k_htt_rx_msdu_buff_replenish()
248 mod_timer(&htt->rx_ring.refill_retry_timer, jiffies + in ath10k_htt_rx_msdu_buff_replenish()
251 mod_timer(&htt->rx_ring.refill_retry_timer, jiffies + in ath10k_htt_rx_msdu_buff_replenish()
254 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_msdu_buff_replenish()
259 struct ath10k_htt *htt = from_timer(htt, t, rx_ring.refill_retry_timer); in ath10k_htt_rx_ring_refill_retry() local
261 ath10k_htt_rx_msdu_buff_replenish(htt); in ath10k_htt_rx_ring_refill_retry()
266 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_ring_refill() local
272 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_ring_refill()
273 ret = ath10k_htt_rx_ring_fill_n(htt, (htt->rx_ring.fill_level - in ath10k_htt_rx_ring_refill()
274 htt->rx_ring.fill_cnt)); in ath10k_htt_rx_ring_refill()
277 ath10k_htt_rx_ring_free(htt); in ath10k_htt_rx_ring_refill()
279 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_ring_refill()
284 void ath10k_htt_rx_free(struct ath10k_htt *htt) in ath10k_htt_rx_free() argument
286 if (htt->ar->bus_param.dev_type == ATH10K_DEV_TYPE_HL) in ath10k_htt_rx_free()
289 del_timer_sync(&htt->rx_ring.refill_retry_timer); in ath10k_htt_rx_free()
291 skb_queue_purge(&htt->rx_msdus_q); in ath10k_htt_rx_free()
292 skb_queue_purge(&htt->rx_in_ord_compl_q); in ath10k_htt_rx_free()
293 skb_queue_purge(&htt->tx_fetch_ind_q); in ath10k_htt_rx_free()
295 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_free()
296 ath10k_htt_rx_ring_free(htt); in ath10k_htt_rx_free()
297 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_free()
299 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
300 ath10k_htt_get_rx_ring_size(htt), in ath10k_htt_rx_free()
301 ath10k_htt_get_vaddr_ring(htt), in ath10k_htt_rx_free()
302 htt->rx_ring.base_paddr); in ath10k_htt_rx_free()
304 ath10k_htt_config_paddrs_ring(htt, NULL); in ath10k_htt_rx_free()
306 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
307 sizeof(*htt->rx_ring.alloc_idx.vaddr), in ath10k_htt_rx_free()
308 htt->rx_ring.alloc_idx.vaddr, in ath10k_htt_rx_free()
309 htt->rx_ring.alloc_idx.paddr); in ath10k_htt_rx_free()
310 htt->rx_ring.alloc_idx.vaddr = NULL; in ath10k_htt_rx_free()
312 kfree(htt->rx_ring.netbufs_ring); in ath10k_htt_rx_free()
313 htt->rx_ring.netbufs_ring = NULL; in ath10k_htt_rx_free()
316 static inline struct sk_buff *ath10k_htt_rx_netbuf_pop(struct ath10k_htt *htt) in ath10k_htt_rx_netbuf_pop() argument
318 struct ath10k *ar = htt->ar; in ath10k_htt_rx_netbuf_pop()
322 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_netbuf_pop()
324 if (htt->rx_ring.fill_cnt == 0) { in ath10k_htt_rx_netbuf_pop()
329 idx = htt->rx_ring.sw_rd_idx.msdu_payld; in ath10k_htt_rx_netbuf_pop()
330 msdu = htt->rx_ring.netbufs_ring[idx]; in ath10k_htt_rx_netbuf_pop()
331 htt->rx_ring.netbufs_ring[idx] = NULL; in ath10k_htt_rx_netbuf_pop()
332 ath10k_htt_reset_paddrs_ring(htt, idx); in ath10k_htt_rx_netbuf_pop()
335 idx &= htt->rx_ring.size_mask; in ath10k_htt_rx_netbuf_pop()
336 htt->rx_ring.sw_rd_idx.msdu_payld = idx; in ath10k_htt_rx_netbuf_pop()
337 htt->rx_ring.fill_cnt--; in ath10k_htt_rx_netbuf_pop()
339 dma_unmap_single(htt->ar->dev, in ath10k_htt_rx_netbuf_pop()
350 static int ath10k_htt_rx_amsdu_pop(struct ath10k_htt *htt, in ath10k_htt_rx_amsdu_pop() argument
353 struct ath10k *ar = htt->ar; in ath10k_htt_rx_amsdu_pop()
363 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_amsdu_pop()
368 msdu = ath10k_htt_rx_netbuf_pop(htt); in ath10k_htt_rx_amsdu_pop()
419 msdu = ath10k_htt_rx_netbuf_pop(htt); in ath10k_htt_rx_amsdu_pop()
466 static struct sk_buff *ath10k_htt_rx_pop_paddr(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr() argument
469 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr()
473 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr()
481 htt->rx_ring.fill_cnt--; in ath10k_htt_rx_pop_paddr()
483 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_pop_paddr()
501 static int ath10k_htt_rx_handle_amsdu_mon_32(struct ath10k_htt *htt, in ath10k_htt_rx_handle_amsdu_mon_32() argument
505 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handle_amsdu_mon_32()
535 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_32()
550 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_32()
578 ath10k_htt_rx_handle_amsdu_mon_64(struct ath10k_htt *htt, in ath10k_htt_rx_handle_amsdu_mon_64() argument
582 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handle_amsdu_mon_64()
612 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_64()
631 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_64()
663 static int ath10k_htt_rx_pop_paddr32_list(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr32_list() argument
667 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr32_list()
677 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr32_list()
685 msdu = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_pop_paddr32_list()
692 ret = ath10k_htt_rx_handle_amsdu_mon_32(htt, msdu, in ath10k_htt_rx_pop_paddr32_list()
717 ath10k_warn(htt->ar, "tried to pop an incomplete frame, oops!\n"); in ath10k_htt_rx_pop_paddr32_list()
728 static int ath10k_htt_rx_pop_paddr64_list(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr64_list() argument
732 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr64_list()
742 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr64_list()
749 msdu = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_pop_paddr64_list()
756 ret = ath10k_htt_rx_handle_amsdu_mon_64(htt, msdu, in ath10k_htt_rx_pop_paddr64_list()
781 ath10k_warn(htt->ar, "tried to pop an incomplete frame, oops!\n"); in ath10k_htt_rx_pop_paddr64_list()
792 int ath10k_htt_rx_alloc(struct ath10k_htt *htt) in ath10k_htt_rx_alloc() argument
794 struct ath10k *ar = htt->ar; in ath10k_htt_rx_alloc()
798 struct timer_list *timer = &htt->rx_ring.refill_retry_timer; in ath10k_htt_rx_alloc()
803 htt->rx_confused = false; in ath10k_htt_rx_alloc()
808 htt->rx_ring.size = HTT_RX_RING_SIZE; in ath10k_htt_rx_alloc()
809 htt->rx_ring.size_mask = htt->rx_ring.size - 1; in ath10k_htt_rx_alloc()
810 htt->rx_ring.fill_level = ar->hw_params.rx_ring_fill_level; in ath10k_htt_rx_alloc()
812 if (!is_power_of_2(htt->rx_ring.size)) { in ath10k_htt_rx_alloc()
817 htt->rx_ring.netbufs_ring = in ath10k_htt_rx_alloc()
818 kcalloc(htt->rx_ring.size, sizeof(struct sk_buff *), in ath10k_htt_rx_alloc()
820 if (!htt->rx_ring.netbufs_ring) in ath10k_htt_rx_alloc()
823 size = ath10k_htt_get_rx_ring_size(htt); in ath10k_htt_rx_alloc()
825 vaddr_ring = dma_alloc_coherent(htt->ar->dev, size, &paddr, GFP_KERNEL); in ath10k_htt_rx_alloc()
829 ath10k_htt_config_paddrs_ring(htt, vaddr_ring); in ath10k_htt_rx_alloc()
830 htt->rx_ring.base_paddr = paddr; in ath10k_htt_rx_alloc()
832 vaddr = dma_alloc_coherent(htt->ar->dev, in ath10k_htt_rx_alloc()
833 sizeof(*htt->rx_ring.alloc_idx.vaddr), in ath10k_htt_rx_alloc()
838 htt->rx_ring.alloc_idx.vaddr = vaddr; in ath10k_htt_rx_alloc()
839 htt->rx_ring.alloc_idx.paddr = paddr; in ath10k_htt_rx_alloc()
840 htt->rx_ring.sw_rd_idx.msdu_payld = htt->rx_ring.size_mask; in ath10k_htt_rx_alloc()
841 *htt->rx_ring.alloc_idx.vaddr = 0; in ath10k_htt_rx_alloc()
846 spin_lock_init(&htt->rx_ring.lock); in ath10k_htt_rx_alloc()
848 spin_lock_init(&htt->tx_fetch_ind_q.lock); in ath10k_htt_rx_alloc()
851 htt->rx_ring.fill_cnt = 0; in ath10k_htt_rx_alloc()
852 htt->rx_ring.sw_rd_idx.msdu_payld = 0; in ath10k_htt_rx_alloc()
853 hash_init(htt->rx_ring.skb_table); in ath10k_htt_rx_alloc()
855 skb_queue_head_init(&htt->rx_msdus_q); in ath10k_htt_rx_alloc()
856 skb_queue_head_init(&htt->rx_in_ord_compl_q); in ath10k_htt_rx_alloc()
857 skb_queue_head_init(&htt->tx_fetch_ind_q); in ath10k_htt_rx_alloc()
858 atomic_set(&htt->num_mpdus_ready, 0); in ath10k_htt_rx_alloc()
861 htt->rx_ring.size, htt->rx_ring.fill_level); in ath10k_htt_rx_alloc()
865 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_alloc()
866 ath10k_htt_get_rx_ring_size(htt), in ath10k_htt_rx_alloc()
868 htt->rx_ring.base_paddr); in ath10k_htt_rx_alloc()
869 ath10k_htt_config_paddrs_ring(htt, NULL); in ath10k_htt_rx_alloc()
871 kfree(htt->rx_ring.netbufs_ring); in ath10k_htt_rx_alloc()
872 htt->rx_ring.netbufs_ring = NULL; in ath10k_htt_rx_alloc()
1376 skb_queue_tail(&ar->htt.rx_msdus_q, skb); in ath10k_htt_rx_h_queue_msdu()
2459 static int ath10k_htt_rx_handle_amsdu(struct ath10k_htt *htt) in ath10k_htt_rx_handle_amsdu() argument
2461 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handle_amsdu()
2462 struct ieee80211_rx_status *rx_status = &htt->rx_status; in ath10k_htt_rx_handle_amsdu()
2474 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_handle_amsdu()
2475 if (htt->rx_confused) { in ath10k_htt_rx_handle_amsdu()
2476 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_handle_amsdu()
2479 ret = ath10k_htt_rx_amsdu_pop(htt, &amsdu); in ath10k_htt_rx_handle_amsdu()
2480 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_handle_amsdu()
2488 htt->rx_confused = true; in ath10k_htt_rx_handle_amsdu()
2593 static bool ath10k_htt_rx_proc_rx_ind_hl(struct ath10k_htt *htt, in ath10k_htt_rx_proc_rx_ind_hl() argument
2599 struct ath10k *ar = htt->ar; in ath10k_htt_rx_proc_rx_ind_hl()
2897 static bool ath10k_htt_rx_proc_rx_frag_ind_hl(struct ath10k_htt *htt, in ath10k_htt_rx_proc_rx_frag_ind_hl() argument
2901 struct ath10k *ar = htt->ar; in ath10k_htt_rx_proc_rx_frag_ind_hl()
2954 return ath10k_htt_rx_proc_rx_ind_hl(htt, &resp->rx_ind_hl, skb, in ath10k_htt_rx_proc_rx_frag_ind_hl()
3016 return ath10k_htt_rx_proc_rx_ind_hl(htt, &resp->rx_ind_hl, skb, in ath10k_htt_rx_proc_rx_frag_ind_hl()
3043 return ath10k_htt_rx_proc_rx_ind_hl(htt, &resp->rx_ind_hl, skb, in ath10k_htt_rx_proc_rx_frag_ind_hl()
3055 static void ath10k_htt_rx_proc_rx_ind_ll(struct ath10k_htt *htt, in ath10k_htt_rx_proc_rx_ind_ll() argument
3058 struct ath10k *ar = htt->ar; in ath10k_htt_rx_proc_rx_ind_ll()
3078 atomic_add(mpdu_count, &htt->num_mpdus_ready); in ath10k_htt_rx_proc_rx_ind_ll()
3087 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_tx_compl_ind() local
3156 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_rx_tx_compl_ind()
3157 } else if (!kfifo_put(&htt->txdone_fifo, tx_done)) { in ath10k_htt_rx_tx_compl_ind()
3160 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_rx_tx_compl_ind()
3361 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_h_rx_offload() local
3362 struct ieee80211_rx_status *status = &htt->rx_status; in ath10k_htt_rx_h_rx_offload()
3410 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_in_ord_ind() local
3412 struct ieee80211_rx_status *status = &htt->rx_status; in ath10k_htt_rx_in_ord_ind()
3423 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_in_ord_ind()
3425 if (htt->rx_confused) in ath10k_htt_rx_in_ord_ind()
3453 ret = ath10k_htt_rx_pop_paddr64_list(htt, &resp->rx_in_ord_ind, in ath10k_htt_rx_in_ord_ind()
3456 ret = ath10k_htt_rx_pop_paddr32_list(htt, &resp->rx_in_ord_ind, in ath10k_htt_rx_in_ord_ind()
3461 htt->rx_confused = true; in ath10k_htt_rx_in_ord_ind()
3493 htt->rx_confused = true; in ath10k_htt_rx_in_ord_ind()
3564 if (!ar->htt.tx_q_state.enabled) { in ath10k_htt_rx_tx_fetch_ind()
3569 if (ar->htt.tx_q_state.mode == HTT_TX_MODE_SWITCH_PUSH) { in ath10k_htt_rx_tx_fetch_ind()
3588 if (unlikely(peer_id >= ar->htt.tx_q_state.num_peers) || in ath10k_htt_rx_tx_fetch_ind()
3589 unlikely(tid >= ar->htt.tx_q_state.num_tids)) { in ath10k_htt_rx_tx_fetch_ind()
3740 ar->htt.tx_q_state.enabled = enable; in ath10k_htt_rx_tx_mode_switch_ind()
3741 ar->htt.tx_q_state.mode = mode; in ath10k_htt_rx_tx_mode_switch_ind()
3742 ar->htt.tx_q_state.num_push_allowed = threshold; in ath10k_htt_rx_tx_mode_switch_ind()
3752 if (unlikely(peer_id >= ar->htt.tx_q_state.num_peers) || in ath10k_htt_rx_tx_mode_switch_ind()
3753 unlikely(tid >= ar->htt.tx_q_state.num_tids)) { in ath10k_htt_rx_tx_mode_switch_ind()
3773 spin_lock_bh(&ar->htt.tx_lock); in ath10k_htt_rx_tx_mode_switch_ind()
3776 spin_unlock_bh(&ar->htt.tx_lock); in ath10k_htt_rx_tx_mode_switch_ind()
4050 if (ar->htt.disable_tx_comp) { in ath10k_update_per_peer_tx_stats()
4223 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
4234 if (resp->hdr.msg_type >= ar->htt.t2h_msg_types_max) { in ath10k_htt_t2h_msg_handler()
4236 resp->hdr.msg_type, ar->htt.t2h_msg_types_max); in ath10k_htt_t2h_msg_handler()
4239 type = ar->htt.t2h_msg_types[resp->hdr.msg_type]; in ath10k_htt_t2h_msg_handler()
4243 htt->target_version_major = resp->ver_resp.major; in ath10k_htt_t2h_msg_handler()
4244 htt->target_version_minor = resp->ver_resp.minor; in ath10k_htt_t2h_msg_handler()
4245 complete(&htt->target_version_received); in ath10k_htt_t2h_msg_handler()
4250 ath10k_htt_rx_proc_rx_ind_ll(htt, &resp->rx_ind); in ath10k_htt_t2h_msg_handler()
4252 skb_queue_tail(&htt->rx_indication_head, skb); in ath10k_htt_t2h_msg_handler()
4262 ath10k_peer_map_event(htt, &ev); in ath10k_htt_t2h_msg_handler()
4269 ath10k_peer_unmap_event(htt, &ev); in ath10k_htt_t2h_msg_handler()
4274 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
4276 struct ath10k_htc_ep *ep = &ar->htc.endpoint[htt->eid]; in ath10k_htt_t2h_msg_handler()
4302 if (htt->disable_tx_comp) { in ath10k_htt_t2h_msg_handler()
4308 status = ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_t2h_msg_handler()
4310 spin_lock_bh(&htt->tx_lock); in ath10k_htt_t2h_msg_handler()
4311 ath10k_htt_tx_mgmt_dec_pending(htt); in ath10k_htt_t2h_msg_handler()
4312 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_t2h_msg_handler()
4317 ath10k_htt_rx_tx_compl_ind(htt->ar, skb); in ath10k_htt_t2h_msg_handler()
4320 struct ath10k *ar = htt->ar; in ath10k_htt_t2h_msg_handler()
4335 atomic_inc(&htt->num_mpdus_ready); in ath10k_htt_t2h_msg_handler()
4337 return ath10k_htt_rx_proc_rx_frag_ind(htt, in ath10k_htt_t2h_msg_handler()
4378 skb_queue_tail(&htt->rx_in_ord_compl_q, skb); in ath10k_htt_t2h_msg_handler()
4382 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
4384 struct ath10k_htc_ep *ep = &ar->htc.endpoint[htt->eid]; in ath10k_htt_t2h_msg_handler()
4396 if (htt->disable_tx_comp) { in ath10k_htt_t2h_msg_handler()
4426 skb_queue_tail(&htt->tx_fetch_ind_q, tx_fetch_ind); in ath10k_htt_t2h_msg_handler()
4463 if (skb_queue_empty(&ar->htt.rx_msdus_q)) in ath10k_htt_rx_deliver_msdu()
4466 skb = skb_dequeue(&ar->htt.rx_msdus_q); in ath10k_htt_rx_deliver_msdu()
4479 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_hl_indication() local
4485 skb = skb_dequeue(&htt->rx_indication_head); in ath10k_htt_rx_hl_indication()
4491 release = ath10k_htt_rx_proc_rx_ind_hl(htt, in ath10k_htt_rx_hl_indication()
4501 skb_queue_len(&htt->rx_indication_head)); in ath10k_htt_rx_hl_indication()
4509 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_txrx_compl_task() local
4528 while ((skb = skb_dequeue(&htt->rx_in_ord_compl_q))) { in ath10k_htt_txrx_compl_task()
4529 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_txrx_compl_task()
4531 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_txrx_compl_task()
4540 while (atomic_read(&htt->num_mpdus_ready)) { in ath10k_htt_txrx_compl_task()
4541 ret = ath10k_htt_rx_handle_amsdu(htt); in ath10k_htt_txrx_compl_task()
4546 atomic_dec(&htt->num_mpdus_ready); in ath10k_htt_txrx_compl_task()
4557 if ((quota < budget) && !kfifo_is_empty(&htt->txdone_fifo)) in ath10k_htt_txrx_compl_task()
4565 while (kfifo_get(&htt->txdone_fifo, &tx_done)) in ath10k_htt_txrx_compl_task()
4566 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_txrx_compl_task()
4570 spin_lock_irqsave(&htt->tx_fetch_ind_q.lock, flags); in ath10k_htt_txrx_compl_task()
4571 skb_queue_splice_init(&htt->tx_fetch_ind_q, &tx_ind_q); in ath10k_htt_txrx_compl_task()
4572 spin_unlock_irqrestore(&htt->tx_fetch_ind_q.lock, flags); in ath10k_htt_txrx_compl_task()
4580 ath10k_htt_rx_msdu_buff_replenish(htt); in ath10k_htt_txrx_compl_task()
4610 void ath10k_htt_set_rx_ops(struct ath10k_htt *htt) in ath10k_htt_set_rx_ops() argument
4612 struct ath10k *ar = htt->ar; in ath10k_htt_set_rx_ops()
4615 htt->rx_ops = &htt_rx_ops_hl; in ath10k_htt_set_rx_ops()
4617 htt->rx_ops = &htt_rx_ops_64; in ath10k_htt_set_rx_ops()
4619 htt->rx_ops = &htt_rx_ops_32; in ath10k_htt_set_rx_ops()