Lines Matching full:ab
19 struct ath11k_base *ab = arvif->ar->ab; in ath11k_dp_tx_get_encap_type() local
21 if (test_bit(ATH11K_FLAG_RAW_MODE, &ab->dev_flags)) in ath11k_dp_tx_get_encap_type()
89 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx() local
90 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx()
109 if (unlikely(test_bit(ATH11K_FLAG_CRASH_FLUSH, &ar->ab->dev_flags))) in ath11k_dp_tx()
118 ring_selector = ab->hw_params.hw_ops->get_ring_selector(skb); in ath11k_dp_tx()
123 ti.ring_id = ring_selector % ab->hw_params.max_tx_ring; in ath11k_dp_tx()
124 ti.rbm_id = ab->hw_params.hal_params->tcl2wbm_rbm_map[ti.ring_id].rbm_id; in ath11k_dp_tx()
136 if (ring_map == (BIT(ab->hw_params.max_tx_ring) - 1) || in ath11k_dp_tx()
137 !ab->hw_params.tcl_ring_retry) { in ath11k_dp_tx()
138 atomic_inc(&ab->soc_stats.tx_err.misc_fail); in ath11k_dp_tx()
203 if (!test_bit(ATH11K_FLAG_RAW_MODE, &ab->dev_flags)) { in ath11k_dp_tx()
215 atomic_inc(&ab->soc_stats.tx_err.misc_fail); in ath11k_dp_tx()
219 ti.paddr = dma_map_single(ab->dev, skb->data, skb->len, DMA_TO_DEVICE); in ath11k_dp_tx()
220 if (unlikely(dma_mapping_error(ab->dev, ti.paddr))) { in ath11k_dp_tx()
221 atomic_inc(&ab->soc_stats.tx_err.misc_fail); in ath11k_dp_tx()
222 ath11k_warn(ab, "failed to DMA map data Tx buffer\n"); in ath11k_dp_tx()
233 tcl_ring = &ab->hal.srng_list[hal_ring_id]; in ath11k_dp_tx()
237 ath11k_hal_srng_access_begin(ab, tcl_ring); in ath11k_dp_tx()
239 hal_tcl_desc = (void *)ath11k_hal_srng_src_get_next_entry(ab, tcl_ring); in ath11k_dp_tx()
244 ath11k_hal_srng_access_end(ab, tcl_ring); in ath11k_dp_tx()
245 ab->soc_stats.tx_err.desc_na[ti.ring_id]++; in ath11k_dp_tx()
254 if (unlikely(ring_map != (BIT(ab->hw_params.max_tx_ring)) - 1) && in ath11k_dp_tx()
255 ab->hw_params.tcl_ring_retry && ab->hw_params.max_tx_ring > 1) { in ath11k_dp_tx()
263 ath11k_hal_tx_cmd_desc_setup(ab, hal_tcl_desc + in ath11k_dp_tx()
266 ath11k_hal_srng_access_end(ab, tcl_ring); in ath11k_dp_tx()
268 ath11k_dp_shadow_start_timer(ab, tcl_ring, &dp->tx_ring_timer[ti.ring_id]); in ath11k_dp_tx()
272 ath11k_dbg_dump(ab, ATH11K_DBG_DP_TX, NULL, "dp tx msdu: ", in ath11k_dp_tx()
280 dma_unmap_single(ab->dev, ti.paddr, ti.data_len, DMA_TO_DEVICE); in ath11k_dp_tx()
294 static void ath11k_dp_tx_free_txbuf(struct ath11k_base *ab, u8 mac_id, in ath11k_dp_tx_free_txbuf() argument
307 ath11k_warn(ab, "tx completion for unknown msdu_id %d\n", in ath11k_dp_tx_free_txbuf()
314 dma_unmap_single(ab->dev, skb_cb->paddr, msdu->len, DMA_TO_DEVICE); in ath11k_dp_tx_free_txbuf()
317 ar = ab->pdevs[mac_id].ar; in ath11k_dp_tx_free_txbuf()
323 ath11k_dp_tx_htt_tx_complete_buf(struct ath11k_base *ab, in ath11k_dp_tx_htt_tx_complete_buf() argument
339 ath11k_warn(ab, "htt tx completion for unknown msdu_id %d\n", in ath11k_dp_tx_htt_tx_complete_buf()
352 dma_unmap_single(ab->dev, skb_cb->paddr, msdu->len, DMA_TO_DEVICE); in ath11k_dp_tx_htt_tx_complete_buf()
373 spin_lock_bh(&ab->base_lock); in ath11k_dp_tx_htt_tx_complete_buf()
374 peer = ath11k_peer_find_by_id(ab, ts->peer_id); in ath11k_dp_tx_htt_tx_complete_buf()
376 ath11k_dbg(ab, ATH11K_DBG_DATA, in ath11k_dp_tx_htt_tx_complete_buf()
379 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_htt_tx_complete_buf()
383 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_htt_tx_complete_buf()
393 ath11k_dp_tx_process_htt_tx_complete(struct ath11k_base *ab, in ath11k_dp_tx_process_htt_tx_complete() argument
428 ath11k_dp_tx_htt_tx_complete_buf(ab, tx_ring, &ts); in ath11k_dp_tx_process_htt_tx_complete()
433 ath11k_dp_tx_free_txbuf(ab, mac_id, msdu_id, tx_ring); in ath11k_dp_tx_process_htt_tx_complete()
441 ath11k_warn(ab, "Unknown htt tx status %d\n", wbm_status); in ath11k_dp_tx_process_htt_tx_complete()
465 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_update_txcompl() local
477 spin_lock_bh(&ab->base_lock); in ath11k_dp_tx_update_txcompl()
478 peer = ath11k_peer_find_by_id(ab, ts->peer_id); in ath11k_dp_tx_update_txcompl()
480 ath11k_dbg(ab, ATH11K_DBG_DP_TX, in ath11k_dp_tx_update_txcompl()
518 ath11k_warn(ab, "Invalid HT mcs index %d\n", mcs); in ath11k_dp_tx_update_txcompl()
529 ath11k_warn(ab, "Invalid VHT mcs index %d\n", mcs); in ath11k_dp_tx_update_txcompl()
539 ath11k_warn(ab, "Invalid HE mcs index %d\n", mcs); in ath11k_dp_tx_update_txcompl()
559 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_update_txcompl()
568 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_complete_msdu() local
582 dma_unmap_single(ab->dev, skb_cb->paddr, msdu->len, DMA_TO_DEVICE); in ath11k_dp_tx_complete_msdu()
584 if (unlikely(!rcu_access_pointer(ab->pdevs_active[ar->pdev_idx]))) { in ath11k_dp_tx_complete_msdu()
613 ab->hw_params.single_pdev_only) { in ath11k_dp_tx_complete_msdu()
636 spin_lock_bh(&ab->base_lock); in ath11k_dp_tx_complete_msdu()
637 peer = ath11k_peer_find_by_id(ab, ts->peer_id); in ath11k_dp_tx_complete_msdu()
639 ath11k_dbg(ab, ATH11K_DBG_DATA, in ath11k_dp_tx_complete_msdu()
642 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_complete_msdu()
658 spin_unlock_bh(&ab->base_lock); in ath11k_dp_tx_complete_msdu()
663 static inline void ath11k_dp_tx_status_parse(struct ath11k_base *ab, in ath11k_dp_tx_status_parse() argument
694 void ath11k_dp_tx_completion_handler(struct ath11k_base *ab, int ring_id) in ath11k_dp_tx_completion_handler() argument
697 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_completion_handler()
699 struct hal_srng *status_ring = &ab->hal.srng_list[hal_ring_id]; in ath11k_dp_tx_completion_handler()
709 ath11k_hal_srng_access_begin(ab, status_ring); in ath11k_dp_tx_completion_handler()
713 (desc = ath11k_hal_srng_dst_get_next_entry(ab, status_ring))) { in ath11k_dp_tx_completion_handler()
720 if (unlikely((ath11k_hal_srng_dst_peek(ab, status_ring) != NULL) && in ath11k_dp_tx_completion_handler()
724 …ath11k_warn(ab, "Unable to process some of the tx_status ring desc because status_fifo is full\n"); in ath11k_dp_tx_completion_handler()
727 ath11k_hal_srng_access_end(ab, status_ring); in ath11k_dp_tx_completion_handler()
738 ath11k_dp_tx_status_parse(ab, tx_status, &ts); in ath11k_dp_tx_completion_handler()
746 ath11k_dp_tx_process_htt_tx_complete(ab, in ath11k_dp_tx_completion_handler()
756 ath11k_warn(ab, "tx completion for unknown msdu_id %d\n", in ath11k_dp_tx_completion_handler()
764 ar = ab->pdevs[mac_id].ar; in ath11k_dp_tx_completion_handler()
773 int ath11k_dp_tx_send_reo_cmd(struct ath11k_base *ab, struct dp_rx_tid *rx_tid, in ath11k_dp_tx_send_reo_cmd() argument
779 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_send_reo_cmd()
784 if (test_bit(ATH11K_FLAG_CRASH_FLUSH, &ab->dev_flags)) in ath11k_dp_tx_send_reo_cmd()
787 cmd_ring = &ab->hal.srng_list[dp->reo_cmd_ring.ring_id]; in ath11k_dp_tx_send_reo_cmd()
788 cmd_num = ath11k_hal_reo_cmd_send(ab, cmd_ring, type, cmd); in ath11k_dp_tx_send_reo_cmd()
822 ath11k_dp_tx_get_ring_id_type(struct ath11k_base *ab, in ath11k_dp_tx_get_ring_id_type() argument
838 if (!ab->hw_params.rx_mac_buf_ring) { in ath11k_dp_tx_get_ring_id_type()
878 ath11k_warn(ab, "Unsupported ring type in DP :%d\n", ring_type); in ath11k_dp_tx_get_ring_id_type()
884 int ath11k_dp_tx_htt_srng_setup(struct ath11k_base *ab, u32 ring_id, in ath11k_dp_tx_htt_srng_setup() argument
888 struct hal_srng *srng = &ab->hal.srng_list[ring_id]; in ath11k_dp_tx_htt_srng_setup()
898 skb = ath11k_htc_alloc_skb(ab, len); in ath11k_dp_tx_htt_srng_setup()
903 ath11k_hal_srng_get_params(ab, srng, ¶ms); in ath11k_dp_tx_htt_srng_setup()
905 hp_addr = ath11k_hal_srng_get_hp_addr(ab, srng); in ath11k_dp_tx_htt_srng_setup()
906 tp_addr = ath11k_hal_srng_get_tp_addr(ab, srng); in ath11k_dp_tx_htt_srng_setup()
908 ret = ath11k_dp_tx_get_ring_id_type(ab, mac_id, ring_id, in ath11k_dp_tx_htt_srng_setup()
935 ret = ath11k_hal_srng_get_entrysize(ab, ring_type); in ath11k_dp_tx_htt_srng_setup()
983 ath11k_dbg(ab, ATH11K_DBG_DP_TX, in ath11k_dp_tx_htt_srng_setup()
988 ret = ath11k_htc_send(&ab->htc, ab->dp.eid, skb); in ath11k_dp_tx_htt_srng_setup()
1002 int ath11k_dp_tx_htt_h2t_ver_req_msg(struct ath11k_base *ab) in ath11k_dp_tx_htt_h2t_ver_req_msg() argument
1004 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_htt_h2t_ver_req_msg()
1012 skb = ath11k_htc_alloc_skb(ab, len); in ath11k_dp_tx_htt_h2t_ver_req_msg()
1021 ret = ath11k_htc_send(&ab->htc, dp->eid, skb); in ath11k_dp_tx_htt_h2t_ver_req_msg()
1030 ath11k_warn(ab, "htt target version request timed out\n"); in ath11k_dp_tx_htt_h2t_ver_req_msg()
1035 ath11k_err(ab, "unsupported htt major version %d supported version is %d\n", in ath11k_dp_tx_htt_h2t_ver_req_msg()
1045 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_htt_h2t_ppdu_stats_req() local
1046 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1054 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1055 skb = ath11k_htc_alloc_skb(ab, len); in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1068 ret = ath11k_htc_send(&ab->htc, dp->eid, skb); in ath11k_dp_tx_htt_h2t_ppdu_stats_req()
1078 int ath11k_dp_tx_htt_rx_filter_setup(struct ath11k_base *ab, u32 ring_id, in ath11k_dp_tx_htt_rx_filter_setup() argument
1084 struct hal_srng *srng = &ab->hal.srng_list[ring_id]; in ath11k_dp_tx_htt_rx_filter_setup()
1092 skb = ath11k_htc_alloc_skb(ab, len); in ath11k_dp_tx_htt_rx_filter_setup()
1097 ath11k_hal_srng_get_params(ab, srng, ¶ms); in ath11k_dp_tx_htt_rx_filter_setup()
1099 ret = ath11k_dp_tx_get_ring_id_type(ab, mac_id, ring_id, in ath11k_dp_tx_htt_rx_filter_setup()
1133 ret = ath11k_htc_send(&ab->htc, ab->dp.eid, skb); in ath11k_dp_tx_htt_rx_filter_setup()
1150 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_htt_h2t_ext_stats_req() local
1151 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_tx_htt_h2t_ext_stats_req()
1158 skb = ath11k_htc_alloc_skb(ab, len); in ath11k_dp_tx_htt_h2t_ext_stats_req()
1168 if (ab->hw_params.single_pdev_only) in ath11k_dp_tx_htt_h2t_ext_stats_req()
1183 ret = ath11k_htc_send(&ab->htc, dp->eid, skb); in ath11k_dp_tx_htt_h2t_ext_stats_req()
1185 ath11k_warn(ab, "failed to send htt type stats request: %d", in ath11k_dp_tx_htt_h2t_ext_stats_req()
1197 struct ath11k_base *ab = ar->ab; in ath11k_dp_tx_htt_monitor_mode_ring_config() local
1201 if (ab->hw_params.full_monitor_mode) { in ath11k_dp_tx_htt_monitor_mode_ring_config()
1202 ret = ath11k_dp_tx_htt_rx_full_mon_setup(ab, in ath11k_dp_tx_htt_monitor_mode_ring_config()
1205 ath11k_err(ab, "failed to setup full monitor %d\n", ret); in ath11k_dp_tx_htt_monitor_mode_ring_config()
1230 if (ab->hw_params.rxdma1_enable) { in ath11k_dp_tx_htt_monitor_mode_ring_config()
1231 ret = ath11k_dp_tx_htt_rx_filter_setup(ar->ab, ring_id, dp->mac_id, in ath11k_dp_tx_htt_monitor_mode_ring_config()
1237 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_tx_htt_monitor_mode_ring_config()
1239 ret = ath11k_dp_tx_htt_rx_filter_setup(ar->ab, ring_id, in ath11k_dp_tx_htt_monitor_mode_ring_config()
1250 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_tx_htt_monitor_mode_ring_config()
1262 ret = ath11k_dp_tx_htt_rx_filter_setup(ab, ring_id, in ath11k_dp_tx_htt_monitor_mode_ring_config()
1269 if (!ar->ab->hw_params.rxdma1_enable) in ath11k_dp_tx_htt_monitor_mode_ring_config()
1270 mod_timer(&ar->ab->mon_reap_timer, jiffies + in ath11k_dp_tx_htt_monitor_mode_ring_config()
1276 int ath11k_dp_tx_htt_rx_full_mon_setup(struct ath11k_base *ab, int mac_id, in ath11k_dp_tx_htt_rx_full_mon_setup() argument
1283 skb = ath11k_htc_alloc_skb(ab, len); in ath11k_dp_tx_htt_rx_full_mon_setup()
1303 ret = ath11k_htc_send(&ab->htc, ab->dp.eid, skb); in ath11k_dp_tx_htt_rx_full_mon_setup()