Lines Matching +full:shadow +full:- +full:interrupts

1 // SPDX-License-Identifier: BSD-3-Clause-Clear
3 * Copyright (c) 2018-2021 The Linux Foundation. All rights reserved.
4 * Copyright (c) 2021-2022 Qualcomm Innovation Center, Inc. All rights reserved.
12 /* CE0: host->target HTC control and raw streams */
20 /* CE1: target->host HTT + HTC control */
29 /* CE2: target->host WMI */
38 /* CE3: host->target WMI (mac0) */
46 /* CE4: host->target HTT */
54 /* CE5: target->host pktlog */
71 /* CE7: host->target WMI (mac1) */
127 /* CE14: target->host dbg log */
146 /* CE0: host->target HTC control and raw streams */
154 /* CE1: target->host HTT + HTC control */
163 /* CE2: target->host WMI */
172 /* CE3: host->target WMI (mac0) */
180 /* CE4: host->target HTT */
188 /* CE5: target->host pktlog */
204 /* CE7: host->target WMI (mac1) */
225 struct ath12k_base *ab = pipe->ab; in ath12k_ce_rx_buf_enqueue_pipe()
226 struct ath12k_ce_ring *ring = pipe->dest_ring; in ath12k_ce_rx_buf_enqueue_pipe()
229 unsigned int nentries_mask = ring->nentries_mask; in ath12k_ce_rx_buf_enqueue_pipe()
233 lockdep_assert_held(&ab->ce.ce_lock); in ath12k_ce_rx_buf_enqueue_pipe()
235 write_index = ring->write_index; in ath12k_ce_rx_buf_enqueue_pipe()
237 srng = &ab->hal.srng_list[ring->hal_ring_id]; in ath12k_ce_rx_buf_enqueue_pipe()
239 spin_lock_bh(&srng->lock); in ath12k_ce_rx_buf_enqueue_pipe()
244 ret = -ENOSPC; in ath12k_ce_rx_buf_enqueue_pipe()
250 ret = -ENOSPC; in ath12k_ce_rx_buf_enqueue_pipe()
256 ring->skb[write_index] = skb; in ath12k_ce_rx_buf_enqueue_pipe()
258 ring->write_index = write_index; in ath12k_ce_rx_buf_enqueue_pipe()
260 pipe->rx_buf_needed--; in ath12k_ce_rx_buf_enqueue_pipe()
266 spin_unlock_bh(&srng->lock); in ath12k_ce_rx_buf_enqueue_pipe()
273 struct ath12k_base *ab = pipe->ab; in ath12k_ce_rx_post_pipe()
278 if (!(pipe->dest_ring || pipe->status_ring)) in ath12k_ce_rx_post_pipe()
281 spin_lock_bh(&ab->ce.ce_lock); in ath12k_ce_rx_post_pipe()
282 while (pipe->rx_buf_needed) { in ath12k_ce_rx_post_pipe()
283 skb = dev_alloc_skb(pipe->buf_sz); in ath12k_ce_rx_post_pipe()
285 ret = -ENOMEM; in ath12k_ce_rx_post_pipe()
289 WARN_ON_ONCE(!IS_ALIGNED((unsigned long)skb->data, 4)); in ath12k_ce_rx_post_pipe()
291 paddr = dma_map_single(ab->dev, skb->data, in ath12k_ce_rx_post_pipe()
292 skb->len + skb_tailroom(skb), in ath12k_ce_rx_post_pipe()
294 if (unlikely(dma_mapping_error(ab->dev, paddr))) { in ath12k_ce_rx_post_pipe()
297 ret = -EIO; in ath12k_ce_rx_post_pipe()
301 ATH12K_SKB_RXCB(skb)->paddr = paddr; in ath12k_ce_rx_post_pipe()
306 dma_unmap_single(ab->dev, paddr, in ath12k_ce_rx_post_pipe()
307 skb->len + skb_tailroom(skb), in ath12k_ce_rx_post_pipe()
315 spin_unlock_bh(&ab->ce.ce_lock); in ath12k_ce_rx_post_pipe()
322 struct ath12k_base *ab = pipe->ab; in ath12k_ce_completed_recv_next()
329 spin_lock_bh(&ab->ce.ce_lock); in ath12k_ce_completed_recv_next()
331 sw_index = pipe->dest_ring->sw_index; in ath12k_ce_completed_recv_next()
332 nentries_mask = pipe->dest_ring->nentries_mask; in ath12k_ce_completed_recv_next()
334 srng = &ab->hal.srng_list[pipe->status_ring->hal_ring_id]; in ath12k_ce_completed_recv_next()
336 spin_lock_bh(&srng->lock); in ath12k_ce_completed_recv_next()
342 ret = -EIO; in ath12k_ce_completed_recv_next()
348 ret = -EIO; in ath12k_ce_completed_recv_next()
352 *skb = pipe->dest_ring->skb[sw_index]; in ath12k_ce_completed_recv_next()
353 pipe->dest_ring->skb[sw_index] = NULL; in ath12k_ce_completed_recv_next()
356 pipe->dest_ring->sw_index = sw_index; in ath12k_ce_completed_recv_next()
358 pipe->rx_buf_needed++; in ath12k_ce_completed_recv_next()
362 spin_unlock_bh(&srng->lock); in ath12k_ce_completed_recv_next()
364 spin_unlock_bh(&ab->ce.ce_lock); in ath12k_ce_completed_recv_next()
371 struct ath12k_base *ab = pipe->ab; in ath12k_ce_recv_process_cb()
379 max_nbytes = skb->len + skb_tailroom(skb); in ath12k_ce_recv_process_cb()
380 dma_unmap_single(ab->dev, ATH12K_SKB_RXCB(skb)->paddr, in ath12k_ce_recv_process_cb()
396 pipe->pipe_num, skb->len); in ath12k_ce_recv_process_cb()
397 pipe->recv_cb(ab, skb); in ath12k_ce_recv_process_cb()
401 if (ret && ret != -ENOSPC) { in ath12k_ce_recv_process_cb()
403 pipe->pipe_num, ret); in ath12k_ce_recv_process_cb()
404 mod_timer(&ab->rx_replenish_retry, in ath12k_ce_recv_process_cb()
411 struct ath12k_base *ab = pipe->ab; in ath12k_ce_completed_send_next()
418 spin_lock_bh(&ab->ce.ce_lock); in ath12k_ce_completed_send_next()
420 sw_index = pipe->src_ring->sw_index; in ath12k_ce_completed_send_next()
421 nentries_mask = pipe->src_ring->nentries_mask; in ath12k_ce_completed_send_next()
423 srng = &ab->hal.srng_list[pipe->src_ring->hal_ring_id]; in ath12k_ce_completed_send_next()
425 spin_lock_bh(&srng->lock); in ath12k_ce_completed_send_next()
431 skb = ERR_PTR(-EIO); in ath12k_ce_completed_send_next()
435 skb = pipe->src_ring->skb[sw_index]; in ath12k_ce_completed_send_next()
437 pipe->src_ring->skb[sw_index] = NULL; in ath12k_ce_completed_send_next()
440 pipe->src_ring->sw_index = sw_index; in ath12k_ce_completed_send_next()
443 spin_unlock_bh(&srng->lock); in ath12k_ce_completed_send_next()
445 spin_unlock_bh(&ab->ce.ce_lock); in ath12k_ce_completed_send_next()
452 struct ath12k_base *ab = pipe->ab; in ath12k_ce_send_done_cb()
459 dma_unmap_single(ab->dev, ATH12K_SKB_CB(skb)->paddr, skb->len, in ath12k_ce_send_done_cb()
485 ring_params->msi_addr = addr_lo; in ath12k_ce_srng_msi_ring_params_setup()
486 ring_params->msi_addr |= (dma_addr_t)(((uint64_t)addr_hi) << 32); in ath12k_ce_srng_msi_ring_params_setup()
487 ring_params->msi_data = (msi_data_idx % msi_data_count) + msi_data_start; in ath12k_ce_srng_msi_ring_params_setup()
488 ring_params->flags |= HAL_SRNG_FLAGS_MSI_INTR; in ath12k_ce_srng_msi_ring_params_setup()
498 params.ring_base_paddr = ce_ring->base_addr_ce_space; in ath12k_ce_init_ring()
499 params.ring_base_vaddr = ce_ring->base_addr_owner_space; in ath12k_ce_init_ring()
500 params.num_entries = ce_ring->nentries; in ath12k_ce_init_ring()
502 if (!(CE_ATTR_DIS_INTR & ab->hw_params->host_ce_config[ce_id].flags)) in ath12k_ce_init_ring()
507 if (!(CE_ATTR_DIS_INTR & ab->hw_params->host_ce_config[ce_id].flags)) in ath12k_ce_init_ring()
511 params.max_buffer_len = ab->hw_params->host_ce_config[ce_id].src_sz_max; in ath12k_ce_init_ring()
512 if (!(ab->hw_params->host_ce_config[ce_id].flags & CE_ATTR_DIS_INTR)) { in ath12k_ce_init_ring()
515 params.low_threshold = ce_ring->nentries - 3; in ath12k_ce_init_ring()
519 if (!(ab->hw_params->host_ce_config[ce_id].flags & CE_ATTR_DIS_INTR)) { in ath12k_ce_init_ring()
526 return -EINVAL; in ath12k_ce_init_ring()
538 ce_ring->hal_ring_id = ret; in ath12k_ce_init_ring()
551 return ERR_PTR(-ENOMEM); in ath12k_ce_alloc_ring()
553 ce_ring->nentries = nentries; in ath12k_ce_alloc_ring()
554 ce_ring->nentries_mask = nentries - 1; in ath12k_ce_alloc_ring()
559 ce_ring->base_addr_owner_space_unaligned = in ath12k_ce_alloc_ring()
560 dma_alloc_coherent(ab->dev, in ath12k_ce_alloc_ring()
563 if (!ce_ring->base_addr_owner_space_unaligned) { in ath12k_ce_alloc_ring()
565 return ERR_PTR(-ENOMEM); in ath12k_ce_alloc_ring()
568 ce_ring->base_addr_ce_space_unaligned = base_addr; in ath12k_ce_alloc_ring()
570 ce_ring->base_addr_owner_space = in ath12k_ce_alloc_ring()
571 PTR_ALIGN(ce_ring->base_addr_owner_space_unaligned, in ath12k_ce_alloc_ring()
574 ce_ring->base_addr_ce_space = ALIGN(ce_ring->base_addr_ce_space_unaligned, in ath12k_ce_alloc_ring()
582 struct ath12k_ce_pipe *pipe = &ab->ce.ce_pipe[ce_id]; in ath12k_ce_alloc_pipe()
583 const struct ce_attr *attr = &ab->hw_params->host_ce_config[ce_id]; in ath12k_ce_alloc_pipe()
588 pipe->attr_flags = attr->flags; in ath12k_ce_alloc_pipe()
590 if (attr->src_nentries) { in ath12k_ce_alloc_pipe()
591 pipe->send_cb = ath12k_ce_send_done_cb; in ath12k_ce_alloc_pipe()
592 nentries = roundup_pow_of_two(attr->src_nentries); in ath12k_ce_alloc_pipe()
597 pipe->src_ring = ring; in ath12k_ce_alloc_pipe()
600 if (attr->dest_nentries) { in ath12k_ce_alloc_pipe()
601 pipe->recv_cb = attr->recv_cb; in ath12k_ce_alloc_pipe()
602 nentries = roundup_pow_of_two(attr->dest_nentries); in ath12k_ce_alloc_pipe()
607 pipe->dest_ring = ring; in ath12k_ce_alloc_pipe()
613 pipe->status_ring = ring; in ath12k_ce_alloc_pipe()
621 struct ath12k_ce_pipe *pipe = &ab->ce.ce_pipe[ce_id]; in ath12k_ce_per_engine_service()
623 if (pipe->send_cb) in ath12k_ce_per_engine_service()
624 pipe->send_cb(pipe); in ath12k_ce_per_engine_service()
626 if (pipe->recv_cb) in ath12k_ce_per_engine_service()
632 struct ath12k_ce_pipe *pipe = &ab->ce.ce_pipe[pipe_id]; in ath12k_ce_poll_send_completed()
634 if ((pipe->attr_flags & CE_ATTR_DIS_INTR) && pipe->send_cb) in ath12k_ce_poll_send_completed()
635 pipe->send_cb(pipe); in ath12k_ce_poll_send_completed()
641 struct ath12k_ce_pipe *pipe = &ab->ce.ce_pipe[pipe_id]; in ath12k_ce_send()
651 * the CE has interrupts disabled and the used entries is more than the in ath12k_ce_send()
654 if (pipe->attr_flags & CE_ATTR_DIS_INTR) { in ath12k_ce_send()
655 spin_lock_bh(&ab->ce.ce_lock); in ath12k_ce_send()
656 write_index = pipe->src_ring->write_index; in ath12k_ce_send()
658 sw_index = pipe->src_ring->sw_index; in ath12k_ce_send()
661 num_used = write_index - sw_index; in ath12k_ce_send()
663 num_used = pipe->src_ring->nentries - sw_index + in ath12k_ce_send()
666 spin_unlock_bh(&ab->ce.ce_lock); in ath12k_ce_send()
669 ath12k_ce_poll_send_completed(ab, pipe->pipe_num); in ath12k_ce_send()
672 if (test_bit(ATH12K_FLAG_CRASH_FLUSH, &ab->dev_flags)) in ath12k_ce_send()
673 return -ESHUTDOWN; in ath12k_ce_send()
675 spin_lock_bh(&ab->ce.ce_lock); in ath12k_ce_send()
677 write_index = pipe->src_ring->write_index; in ath12k_ce_send()
678 nentries_mask = pipe->src_ring->nentries_mask; in ath12k_ce_send()
680 srng = &ab->hal.srng_list[pipe->src_ring->hal_ring_id]; in ath12k_ce_send()
682 spin_lock_bh(&srng->lock); in ath12k_ce_send()
688 ret = -ENOBUFS; in ath12k_ce_send()
695 ret = -ENOBUFS; in ath12k_ce_send()
699 if (pipe->attr_flags & CE_ATTR_BYTE_SWAP_DATA) in ath12k_ce_send()
702 ath12k_hal_ce_src_set_desc(desc, ATH12K_SKB_CB(skb)->paddr, in ath12k_ce_send()
703 skb->len, transfer_id, byte_swap_data); in ath12k_ce_send()
705 pipe->src_ring->skb[write_index] = skb; in ath12k_ce_send()
706 pipe->src_ring->write_index = CE_RING_IDX_INCR(nentries_mask, in ath12k_ce_send()
712 spin_unlock_bh(&srng->lock); in ath12k_ce_send()
714 spin_unlock_bh(&ab->ce.ce_lock); in ath12k_ce_send()
721 struct ath12k_base *ab = pipe->ab; in ath12k_ce_rx_pipe_cleanup()
722 struct ath12k_ce_ring *ring = pipe->dest_ring; in ath12k_ce_rx_pipe_cleanup()
726 if (!(ring && pipe->buf_sz)) in ath12k_ce_rx_pipe_cleanup()
729 for (i = 0; i < ring->nentries; i++) { in ath12k_ce_rx_pipe_cleanup()
730 skb = ring->skb[i]; in ath12k_ce_rx_pipe_cleanup()
734 ring->skb[i] = NULL; in ath12k_ce_rx_pipe_cleanup()
735 dma_unmap_single(ab->dev, ATH12K_SKB_RXCB(skb)->paddr, in ath12k_ce_rx_pipe_cleanup()
736 skb->len + skb_tailroom(skb), DMA_FROM_DEVICE); in ath12k_ce_rx_pipe_cleanup()
746 for (pipe_num = 0; pipe_num < ab->hw_params->ce_count; pipe_num++) { in ath12k_ce_cleanup_pipes()
747 pipe = &ab->ce.ce_pipe[pipe_num]; in ath12k_ce_cleanup_pipes()
750 /* Cleanup any src CE's which have interrupts disabled */ in ath12k_ce_cleanup_pipes()
763 for (i = 0; i < ab->hw_params->ce_count; i++) { in ath12k_ce_rx_post_buf()
764 pipe = &ab->ce.ce_pipe[i]; in ath12k_ce_rx_post_buf()
767 if (ret == -ENOSPC) in ath12k_ce_rx_post_buf()
772 mod_timer(&ab->rx_replenish_retry, in ath12k_ce_rx_post_buf()
791 for (i = 0; i < ab->hw_params->ce_count; i++) { in ath12k_ce_shadow_config()
792 if (ab->hw_params->host_ce_config[i].src_nentries) in ath12k_ce_shadow_config()
795 if (ab->hw_params->host_ce_config[i].dest_nentries) { in ath12k_ce_shadow_config()
805 if (!ab->hw_params->supports_shadow_regs) in ath12k_ce_get_shadow_config()
810 /* shadow is already configured */ in ath12k_ce_get_shadow_config()
814 /* shadow isn't configured yet, configure now. in ath12k_ce_get_shadow_config()
815 * non-CE srngs are configured firstly, then in ath12k_ce_get_shadow_config()
821 /* get the shadow configuration */ in ath12k_ce_get_shadow_config()
831 ath12k_ce_get_shadow_config(ab, &ab->qmi.ce_cfg.shadow_reg_v3, in ath12k_ce_init_pipes()
832 &ab->qmi.ce_cfg.shadow_reg_v3_len); in ath12k_ce_init_pipes()
834 for (i = 0; i < ab->hw_params->ce_count; i++) { in ath12k_ce_init_pipes()
835 pipe = &ab->ce.ce_pipe[i]; in ath12k_ce_init_pipes()
837 if (pipe->src_ring) { in ath12k_ce_init_pipes()
838 ret = ath12k_ce_init_ring(ab, pipe->src_ring, i, in ath12k_ce_init_pipes()
847 pipe->src_ring->write_index = 0; in ath12k_ce_init_pipes()
848 pipe->src_ring->sw_index = 0; in ath12k_ce_init_pipes()
851 if (pipe->dest_ring) { in ath12k_ce_init_pipes()
852 ret = ath12k_ce_init_ring(ab, pipe->dest_ring, i, in ath12k_ce_init_pipes()
861 pipe->rx_buf_needed = pipe->dest_ring->nentries ? in ath12k_ce_init_pipes()
862 pipe->dest_ring->nentries - 2 : 0; in ath12k_ce_init_pipes()
864 pipe->dest_ring->write_index = 0; in ath12k_ce_init_pipes()
865 pipe->dest_ring->sw_index = 0; in ath12k_ce_init_pipes()
868 if (pipe->status_ring) { in ath12k_ce_init_pipes()
869 ret = ath12k_ce_init_ring(ab, pipe->status_ring, i, in ath12k_ce_init_pipes()
878 pipe->status_ring->write_index = 0; in ath12k_ce_init_pipes()
879 pipe->status_ring->sw_index = 0; in ath12k_ce_init_pipes()
892 for (i = 0; i < ab->hw_params->ce_count; i++) { in ath12k_ce_free_pipes()
893 pipe = &ab->ce.ce_pipe[i]; in ath12k_ce_free_pipes()
895 if (pipe->src_ring) { in ath12k_ce_free_pipes()
897 dma_free_coherent(ab->dev, in ath12k_ce_free_pipes()
898 pipe->src_ring->nentries * desc_sz + in ath12k_ce_free_pipes()
900 pipe->src_ring->base_addr_owner_space, in ath12k_ce_free_pipes()
901 pipe->src_ring->base_addr_ce_space); in ath12k_ce_free_pipes()
902 kfree(pipe->src_ring); in ath12k_ce_free_pipes()
903 pipe->src_ring = NULL; in ath12k_ce_free_pipes()
906 if (pipe->dest_ring) { in ath12k_ce_free_pipes()
908 dma_free_coherent(ab->dev, in ath12k_ce_free_pipes()
909 pipe->dest_ring->nentries * desc_sz + in ath12k_ce_free_pipes()
911 pipe->dest_ring->base_addr_owner_space, in ath12k_ce_free_pipes()
912 pipe->dest_ring->base_addr_ce_space); in ath12k_ce_free_pipes()
913 kfree(pipe->dest_ring); in ath12k_ce_free_pipes()
914 pipe->dest_ring = NULL; in ath12k_ce_free_pipes()
917 if (pipe->status_ring) { in ath12k_ce_free_pipes()
920 dma_free_coherent(ab->dev, in ath12k_ce_free_pipes()
921 pipe->status_ring->nentries * desc_sz + in ath12k_ce_free_pipes()
923 pipe->status_ring->base_addr_owner_space, in ath12k_ce_free_pipes()
924 pipe->status_ring->base_addr_ce_space); in ath12k_ce_free_pipes()
925 kfree(pipe->status_ring); in ath12k_ce_free_pipes()
926 pipe->status_ring = NULL; in ath12k_ce_free_pipes()
938 spin_lock_init(&ab->ce.ce_lock); in ath12k_ce_alloc_pipes()
940 for (i = 0; i < ab->hw_params->ce_count; i++) { in ath12k_ce_alloc_pipes()
941 attr = &ab->hw_params->host_ce_config[i]; in ath12k_ce_alloc_pipes()
942 pipe = &ab->ce.ce_pipe[i]; in ath12k_ce_alloc_pipes()
943 pipe->pipe_num = i; in ath12k_ce_alloc_pipes()
944 pipe->ab = ab; in ath12k_ce_alloc_pipes()
945 pipe->buf_sz = attr->src_sz_max; in ath12k_ce_alloc_pipes()
960 if (ce_id >= ab->hw_params->ce_count) in ath12k_ce_get_attr_flags()
961 return -EINVAL; in ath12k_ce_get_attr_flags()
963 return ab->hw_params->host_ce_config[ce_id].flags; in ath12k_ce_get_attr_flags()