Lines Matching +full:modem +full:- +full:remoteproc

1 // SPDX-License-Identifier: ISC
15 #include <linux/remoteproc/qcom_rproc.h>
46 "vdd-0.8-cx-mx",
47 "vdd-1.8-xo",
48 "vdd-1.3-rfa",
49 "vdd-3.3-ch0",
50 "vdd-3.3-ch1",
136 /* CE0: host->target HTC control streams */
145 /* CE1: target->host HTT + HTC control */
154 /* CE2: target->host WMI */
163 /* CE3: host->target WMI */
172 /* CE4: host->target HTT */
181 /* CE5: target->host HTT (ipa_uc->target ) */
214 /* CE9 target->host HTT */
223 /* CE10: target->host HTT */
232 /* CE11: target -> host PKTLOG */
243 /* CE0: host->target HTC control and raw streams */
253 /* CE1: target->host HTT + HTC control */
263 /* CE2: target->host WMI */
273 /* CE3: host->target WMI */
283 /* CE4: host->target HTT */
293 /* CE5: target->host HTT (HIF->HTT) */
333 /* CE9 target->host HTT */
343 /* CE10 target->host HTT */
367 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
372 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
377 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
382 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
387 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
392 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
397 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
402 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
407 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
412 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
417 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
422 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
427 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
432 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
437 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
442 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
450 { /* in = DL = target -> host */
452 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
455 { /* in = DL = target -> host */
457 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
460 { /* in = DL = target -> host pktlog */
462 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
478 iowrite32(value, ar_snoc->mem + offset); in ath10k_snoc_write32()
486 val = ioread32(ar_snoc->mem + offset); in ath10k_snoc_read32()
493 struct ath10k_ce_pipe *ce_pipe = pipe->ce_hdl; in __ath10k_snoc_rx_post_buf()
494 struct ath10k *ar = pipe->hif_ce_state; in __ath10k_snoc_rx_post_buf()
500 skb = dev_alloc_skb(pipe->buf_sz); in __ath10k_snoc_rx_post_buf()
502 return -ENOMEM; in __ath10k_snoc_rx_post_buf()
504 WARN_ONCE((unsigned long)skb->data & 3, "unaligned skb"); in __ath10k_snoc_rx_post_buf()
506 paddr = dma_map_single(ar->dev, skb->data, in __ath10k_snoc_rx_post_buf()
507 skb->len + skb_tailroom(skb), in __ath10k_snoc_rx_post_buf()
509 if (unlikely(dma_mapping_error(ar->dev, paddr))) { in __ath10k_snoc_rx_post_buf()
512 return -EIO; in __ath10k_snoc_rx_post_buf()
515 ATH10K_SKB_RXCB(skb)->paddr = paddr; in __ath10k_snoc_rx_post_buf()
517 spin_lock_bh(&ce->ce_lock); in __ath10k_snoc_rx_post_buf()
518 ret = ce_pipe->ops->ce_rx_post_buf(ce_pipe, skb, paddr); in __ath10k_snoc_rx_post_buf()
519 spin_unlock_bh(&ce->ce_lock); in __ath10k_snoc_rx_post_buf()
521 dma_unmap_single(ar->dev, paddr, skb->len + skb_tailroom(skb), in __ath10k_snoc_rx_post_buf()
532 struct ath10k *ar = pipe->hif_ce_state; in ath10k_snoc_rx_post_pipe()
535 struct ath10k_ce_pipe *ce_pipe = pipe->ce_hdl; in ath10k_snoc_rx_post_pipe()
538 if (pipe->buf_sz == 0) in ath10k_snoc_rx_post_pipe()
541 if (!ce_pipe->dest_ring) in ath10k_snoc_rx_post_pipe()
544 spin_lock_bh(&ce->ce_lock); in ath10k_snoc_rx_post_pipe()
546 spin_unlock_bh(&ce->ce_lock); in ath10k_snoc_rx_post_pipe()
547 while (num--) { in ath10k_snoc_rx_post_pipe()
550 if (ret == -ENOSPC) in ath10k_snoc_rx_post_pipe()
553 mod_timer(&ar_snoc->rx_post_retry, jiffies + in ath10k_snoc_rx_post_pipe()
566 ath10k_snoc_rx_post_pipe(&ar_snoc->pipe_info[i]); in ath10k_snoc_rx_post()
573 struct ath10k *ar = ce_state->ar; in ath10k_snoc_process_rx_cb()
575 struct ath10k_snoc_pipe *pipe_info = &ar_snoc->pipe_info[ce_state->id]; in ath10k_snoc_process_rx_cb()
585 max_nbytes = skb->len + skb_tailroom(skb); in ath10k_snoc_process_rx_cb()
586 dma_unmap_single(ar->dev, ATH10K_SKB_RXCB(skb)->paddr, in ath10k_snoc_process_rx_cb()
602 ce_state->id, skb->len); in ath10k_snoc_process_rx_cb()
618 * HTT Rx (target->host) is processed. in ath10k_snoc_htt_htc_rx_cb()
620 ath10k_ce_per_engine_service(ce_state->ar, CE_POLL_PIPE); in ath10k_snoc_htt_htc_rx_cb()
641 ath10k_ce_per_engine_service(ce_state->ar, CE_POLL_PIPE); in ath10k_snoc_htt_rx_cb()
649 struct ath10k *ar = ar_snoc->ar; in ath10k_snoc_rx_replenish_retry()
656 struct ath10k *ar = ce_state->ar; in ath10k_snoc_htc_tx_cb()
674 struct ath10k *ar = ce_state->ar; in ath10k_snoc_htt_tx_cb()
681 dma_unmap_single(ar->dev, ATH10K_SKB_CB(skb)->paddr, in ath10k_snoc_htt_tx_cb()
682 skb->len, DMA_TO_DEVICE); in ath10k_snoc_htt_tx_cb()
696 snoc_pipe = &ar_snoc->pipe_info[pipe_id]; in ath10k_snoc_hif_tx_sg()
697 ce_pipe = snoc_pipe->ce_hdl; in ath10k_snoc_hif_tx_sg()
698 spin_lock_bh(&ce->ce_lock); in ath10k_snoc_hif_tx_sg()
700 for (i = 0; i < n_items - 1; i++) { in ath10k_snoc_hif_tx_sg()
728 spin_unlock_bh(&ce->ce_lock); in ath10k_snoc_hif_tx_sg()
733 for (; i > 0; i--) in ath10k_snoc_hif_tx_sg()
736 spin_unlock_bh(&ce->ce_lock); in ath10k_snoc_hif_tx_sg()
743 target_info->version = ATH10K_HW_WCN3990; in ath10k_snoc_hif_get_target_info()
744 target_info->type = ATH10K_HW_WCN3990; in ath10k_snoc_hif_get_target_info()
755 return ath10k_ce_num_free_src_entries(ar_snoc->pipe_info[pipe].ce_hdl); in ath10k_snoc_hif_get_free_queue_number()
787 if (__le32_to_cpu(entry->service_id) != service_id) in ath10k_snoc_hif_map_service_to_pipe()
790 switch (__le32_to_cpu(entry->pipedir)) { in ath10k_snoc_hif_map_service_to_pipe()
795 *dl_pipe = __le32_to_cpu(entry->pipenum); in ath10k_snoc_hif_map_service_to_pipe()
800 *ul_pipe = __le32_to_cpu(entry->pipenum); in ath10k_snoc_hif_map_service_to_pipe()
806 *dl_pipe = __le32_to_cpu(entry->pipenum); in ath10k_snoc_hif_map_service_to_pipe()
807 *ul_pipe = __le32_to_cpu(entry->pipenum); in ath10k_snoc_hif_map_service_to_pipe()
815 return -ENOENT; in ath10k_snoc_hif_map_service_to_pipe()
836 disable_irq(ar_snoc->ce_irqs[id].irq_line); in ath10k_snoc_irq_disable()
845 enable_irq(ar_snoc->ce_irqs[id].irq_line); in ath10k_snoc_irq_enable()
856 ar = snoc_pipe->hif_ce_state; in ath10k_snoc_rx_pipe_cleanup()
857 ce_pipe = snoc_pipe->ce_hdl; in ath10k_snoc_rx_pipe_cleanup()
858 ce_ring = ce_pipe->dest_ring; in ath10k_snoc_rx_pipe_cleanup()
863 if (!snoc_pipe->buf_sz) in ath10k_snoc_rx_pipe_cleanup()
866 for (i = 0; i < ce_ring->nentries; i++) { in ath10k_snoc_rx_pipe_cleanup()
867 skb = ce_ring->per_transfer_context[i]; in ath10k_snoc_rx_pipe_cleanup()
871 ce_ring->per_transfer_context[i] = NULL; in ath10k_snoc_rx_pipe_cleanup()
873 dma_unmap_single(ar->dev, ATH10K_SKB_RXCB(skb)->paddr, in ath10k_snoc_rx_pipe_cleanup()
874 skb->len + skb_tailroom(skb), in ath10k_snoc_rx_pipe_cleanup()
888 ar = snoc_pipe->hif_ce_state; in ath10k_snoc_tx_pipe_cleanup()
889 ce_pipe = snoc_pipe->ce_hdl; in ath10k_snoc_tx_pipe_cleanup()
890 ce_ring = ce_pipe->src_ring; in ath10k_snoc_tx_pipe_cleanup()
895 if (!snoc_pipe->buf_sz) in ath10k_snoc_tx_pipe_cleanup()
898 for (i = 0; i < ce_ring->nentries; i++) { in ath10k_snoc_tx_pipe_cleanup()
899 skb = ce_ring->per_transfer_context[i]; in ath10k_snoc_tx_pipe_cleanup()
903 ce_ring->per_transfer_context[i] = NULL; in ath10k_snoc_tx_pipe_cleanup()
915 timer_delete_sync(&ar_snoc->rx_post_retry); in ath10k_snoc_buffer_cleanup()
917 pipe_info = &ar_snoc->pipe_info[pipe_num]; in ath10k_snoc_buffer_cleanup()
925 if (!test_bit(ATH10K_FLAG_CRASH_FLUSH, &ar->dev_flags)) in ath10k_snoc_hif_stop()
937 bitmap_clear(ar_snoc->pending_ce_irqs, 0, CE_COUNT_MAX); in ath10k_snoc_hif_start()
939 dev_set_threaded(ar->napi_dev, true); in ath10k_snoc_hif_start()
942 if (!test_bit(ATH10K_SNOC_FLAG_RECOVERY, &ar_snoc->flags)) in ath10k_snoc_hif_start()
946 clear_bit(ATH10K_SNOC_FLAG_RECOVERY, &ar_snoc->flags); in ath10k_snoc_hif_start()
1012 return -EINVAL; in ath10k_snoc_wlan_enable()
1026 ret = regulator_bulk_enable(ar_snoc->num_vregs, ar_snoc->vregs); in ath10k_hw_power_on()
1030 ret = clk_bulk_prepare_enable(ar_snoc->num_clks, ar_snoc->clks); in ath10k_hw_power_on()
1037 regulator_bulk_disable(ar_snoc->num_vregs, ar_snoc->vregs); in ath10k_hw_power_on()
1047 clk_bulk_disable_unprepare(ar_snoc->num_clks, ar_snoc->clks); in ath10k_hw_power_off()
1049 return regulator_bulk_disable(ar_snoc->num_vregs, ar_snoc->vregs); in ath10k_hw_power_off()
1062 if (!test_bit(ATH10K_FLAG_CRASH_FLUSH, &ar->dev_flags) || in ath10k_snoc_wlan_disable()
1063 !test_bit(ATH10K_SNOC_FLAG_RECOVERY, &ar_snoc->flags)) in ath10k_snoc_wlan_disable()
1082 __func__, ar->state); in ath10k_snoc_hif_power_up()
1137 if (!device_may_wakeup(ar->dev)) in ath10k_snoc_hif_suspend()
1138 return -EPERM; in ath10k_snoc_hif_suspend()
1140 ret = enable_irq_wake(ar_snoc->ce_irqs[ATH10K_SNOC_WAKE_IRQ].irq_line); in ath10k_snoc_hif_suspend()
1156 if (!device_may_wakeup(ar->dev)) in ath10k_snoc_hif_resume()
1157 return -EPERM; in ath10k_snoc_hif_resume()
1159 ret = disable_irq_wake(ar_snoc->ce_irqs[ATH10K_SNOC_WAKE_IRQ].irq_line); in ath10k_snoc_hif_resume()
1203 if (ar_snoc->ce_irqs[i].irq_line == irq) in ath10k_snoc_get_ce_id_from_irq()
1208 return -EINVAL; in ath10k_snoc_get_ce_id_from_irq()
1217 if (ce_id < 0 || ce_id >= ARRAY_SIZE(ar_snoc->pipe_info)) { in ath10k_snoc_per_engine_handler()
1224 set_bit(ce_id, ar_snoc->pending_ce_irqs); in ath10k_snoc_per_engine_handler()
1226 napi_schedule(&ar->napi); in ath10k_snoc_per_engine_handler()
1238 if (test_bit(ATH10K_FLAG_CRASH_FLUSH, &ar->dev_flags)) { in ath10k_snoc_napi_poll()
1244 if (test_and_clear_bit(ce_id, ar_snoc->pending_ce_irqs)) { in ath10k_snoc_napi_poll()
1259 netif_napi_add(ar->napi_dev, &ar->napi, ath10k_snoc_napi_poll); in ath10k_snoc_init_napi()
1268 ret = request_irq(ar_snoc->ce_irqs[id].irq_line, in ath10k_snoc_request_irq()
1282 for (id -= 1; id >= 0; id--) in ath10k_snoc_request_irq()
1283 free_irq(ar_snoc->ce_irqs[id].irq_line, ar); in ath10k_snoc_request_irq()
1294 free_irq(ar_snoc->ce_irqs[id].irq_line, ar); in ath10k_snoc_free_irq()
1304 pdev = ar_snoc->dev; in ath10k_snoc_resource_init()
1308 return -EINVAL; in ath10k_snoc_resource_init()
1311 ar_snoc->mem_pa = res->start; in ath10k_snoc_resource_init()
1312 ar_snoc->mem = devm_ioremap(&pdev->dev, ar_snoc->mem_pa, in ath10k_snoc_resource_init()
1314 if (!ar_snoc->mem) { in ath10k_snoc_resource_init()
1316 &ar_snoc->mem_pa); in ath10k_snoc_resource_init()
1317 return -EINVAL; in ath10k_snoc_resource_init()
1321 ret = platform_get_irq(ar_snoc->dev, i); in ath10k_snoc_resource_init()
1324 ar_snoc->ce_irqs[i].irq_line = ret; in ath10k_snoc_resource_init()
1327 ret = device_property_read_u32(&pdev->dev, "qcom,xo-cal-data", in ath10k_snoc_resource_init()
1328 &ar_snoc->xo_cal_data); in ath10k_snoc_resource_init()
1329 ath10k_dbg(ar, ATH10K_DBG_SNOC, "snoc xo-cal-data return %d\n", ret); in ath10k_snoc_resource_init()
1331 ar_snoc->xo_cal_supported = true; in ath10k_snoc_resource_init()
1333 ar_snoc->xo_cal_data); in ath10k_snoc_resource_init()
1342 struct device *dev = &ar_snoc->dev->dev; in ath10k_snoc_quirks_init()
1345 of_property_read_string(dev->of_node, "firmware-name", &ar->board_name); in ath10k_snoc_quirks_init()
1347 if (of_property_read_bool(dev->of_node, "qcom,snoc-host-cap-8bit-quirk")) in ath10k_snoc_quirks_init()
1348 set_bit(ATH10K_SNOC_FLAG_8BIT_HOST_CAP_QUIRK, &ar_snoc->flags); in ath10k_snoc_quirks_init()
1357 if (test_bit(ATH10K_SNOC_FLAG_UNREGISTERING, &ar_snoc->flags)) in ath10k_snoc_fw_indication()
1362 if (test_bit(ATH10K_SNOC_FLAG_REGISTERED, &ar_snoc->flags)) { in ath10k_snoc_fw_indication()
1368 bus_params.chip_id = ar_snoc->target_info.soc_version; in ath10k_snoc_fw_indication()
1375 set_bit(ATH10K_SNOC_FLAG_REGISTERED, &ar_snoc->flags); in ath10k_snoc_fw_indication()
1378 set_bit(ATH10K_SNOC_FLAG_RECOVERY, &ar_snoc->flags); in ath10k_snoc_fw_indication()
1379 set_bit(ATH10K_FLAG_CRASH_FLUSH, &ar->dev_flags); in ath10k_snoc_fw_indication()
1383 return -EINVAL; in ath10k_snoc_fw_indication()
1396 timer_setup(&ar_snoc->rx_post_retry, ath10k_snoc_rx_replenish_retry, 0); in ath10k_snoc_setup_resource()
1397 spin_lock_init(&ce->ce_lock); in ath10k_snoc_setup_resource()
1399 pipe = &ar_snoc->pipe_info[i]; in ath10k_snoc_setup_resource()
1400 pipe->ce_hdl = &ce->ce_states[i]; in ath10k_snoc_setup_resource()
1401 pipe->pipe_num = i; in ath10k_snoc_setup_resource()
1402 pipe->hif_ce_state = ar; in ath10k_snoc_setup_resource()
1411 pipe->buf_sz = host_ce_config_wlan[i].src_sz_max; in ath10k_snoc_setup_resource()
1422 netif_napi_del(&ar->napi); in ath10k_snoc_release_resource()
1436 if (!crash_data || !crash_data->ramdump_buf) in ath10k_msa_dump_memory()
1443 current_region = &mem_layout->region_table.regions[0]; in ath10k_msa_dump_memory()
1445 buf = crash_data->ramdump_buf; in ath10k_msa_dump_memory()
1446 buf_len = crash_data->ramdump_buf_len; in ath10k_msa_dump_memory()
1452 buf_len -= sizeof(*hdr); in ath10k_msa_dump_memory()
1454 hdr->region_type = cpu_to_le32(current_region->type); in ath10k_msa_dump_memory()
1455 hdr->start = cpu_to_le32((unsigned long)ar->msa.vaddr); in ath10k_msa_dump_memory()
1456 hdr->length = cpu_to_le32(ar->msa.mem_size); in ath10k_msa_dump_memory()
1458 if (current_region->len < ar->msa.mem_size) { in ath10k_msa_dump_memory()
1459 memcpy(buf, ar->msa.vaddr, current_region->len); in ath10k_msa_dump_memory()
1461 current_region->len, ar->msa.mem_size); in ath10k_msa_dump_memory()
1463 memcpy(buf, ar->msa.vaddr, ar->msa.mem_size); in ath10k_msa_dump_memory()
1472 mutex_lock(&ar->dump_mutex); in ath10k_snoc_fw_crashed_dump()
1474 spin_lock_bh(&ar->data_lock); in ath10k_snoc_fw_crashed_dump()
1475 ar->stats.fw_crash_counter++; in ath10k_snoc_fw_crashed_dump()
1476 spin_unlock_bh(&ar->data_lock); in ath10k_snoc_fw_crashed_dump()
1481 scnprintf(guid, sizeof(guid), "%pUl", &crash_data->guid); in ath10k_snoc_fw_crashed_dump()
1488 mutex_unlock(&ar->dump_mutex); in ath10k_snoc_fw_crashed_dump()
1495 struct ath10k *ar = ar_snoc->ar; in ath10k_snoc_modem_notify()
1500 ath10k_dbg(ar, ATH10K_DBG_SNOC, "received modem starting event\n"); in ath10k_snoc_modem_notify()
1501 clear_bit(ATH10K_SNOC_FLAG_MODEM_STOPPED, &ar_snoc->flags); in ath10k_snoc_modem_notify()
1505 ath10k_dbg(ar, ATH10K_DBG_SNOC, "received modem running event\n"); in ath10k_snoc_modem_notify()
1509 ath10k_dbg(ar, ATH10K_DBG_SNOC, "received modem %s event\n", in ath10k_snoc_modem_notify()
1510 notify_data->crashed ? "crashed" : "stopping"); in ath10k_snoc_modem_notify()
1511 if (!notify_data->crashed) in ath10k_snoc_modem_notify()
1512 set_bit(ATH10K_SNOC_FLAG_MODEM_STOPPED, &ar_snoc->flags); in ath10k_snoc_modem_notify()
1514 clear_bit(ATH10K_SNOC_FLAG_MODEM_STOPPED, &ar_snoc->flags); in ath10k_snoc_modem_notify()
1518 ath10k_dbg(ar, ATH10K_DBG_SNOC, "received modem offline event\n"); in ath10k_snoc_modem_notify()
1535 ar_snoc->nb.notifier_call = ath10k_snoc_modem_notify; in ath10k_modem_init()
1537 notifier = qcom_register_ssr_notifier("mpss", &ar_snoc->nb); in ath10k_modem_init()
1540 ath10k_err(ar, "failed to initialize modem notifier: %d\n", ret); in ath10k_modem_init()
1544 ar_snoc->notifier = notifier; in ath10k_modem_init()
1554 ret = qcom_unregister_ssr_notifier(ar_snoc->notifier, &ar_snoc->nb); in ath10k_modem_deinit()
1561 struct device *dev = ar->dev; in ath10k_setup_msa_resources()
1566 node = of_parse_phandle(dev->of_node, "memory-region", 0); in ath10k_setup_msa_resources()
1575 ar->msa.paddr = r.start; in ath10k_setup_msa_resources()
1576 ar->msa.mem_size = resource_size(&r); in ath10k_setup_msa_resources()
1577 ar->msa.vaddr = devm_memremap(dev, ar->msa.paddr, in ath10k_setup_msa_resources()
1578 ar->msa.mem_size, in ath10k_setup_msa_resources()
1580 if (IS_ERR(ar->msa.vaddr)) { in ath10k_setup_msa_resources()
1583 return PTR_ERR(ar->msa.vaddr); in ath10k_setup_msa_resources()
1586 ar->msa.vaddr = dmam_alloc_coherent(dev, msa_size, in ath10k_setup_msa_resources()
1587 &ar->msa.paddr, in ath10k_setup_msa_resources()
1589 if (!ar->msa.vaddr) { in ath10k_setup_msa_resources()
1591 return -ENOMEM; in ath10k_setup_msa_resources()
1593 ar->msa.mem_size = msa_size; in ath10k_setup_msa_resources()
1597 &ar->msa.paddr, in ath10k_setup_msa_resources()
1598 ar->msa.vaddr); in ath10k_setup_msa_resources()
1606 struct device *host_dev = &ar_snoc->dev->dev; in ath10k_fw_init()
1613 node = of_get_child_by_name(host_dev->of_node, "wifi-firmware"); in ath10k_fw_init()
1615 ar_snoc->use_tz = true; in ath10k_fw_init()
1620 info.fwnode = &node->fwnode; in ath10k_fw_init()
1622 info.name = node->name; in ath10k_fw_init()
1631 pdev->dev.of_node = node; in ath10k_fw_init()
1633 ret = of_dma_configure(&pdev->dev, node, true); in ath10k_fw_init()
1639 ar_snoc->fw.dev = &pdev->dev; in ath10k_fw_init()
1641 iommu_dom = iommu_paging_domain_alloc(ar_snoc->fw.dev); in ath10k_fw_init()
1648 ret = iommu_attach_device(iommu_dom, ar_snoc->fw.dev); in ath10k_fw_init()
1654 ar_snoc->fw.iommu_domain = iommu_dom; in ath10k_fw_init()
1655 ar_snoc->fw.fw_start_addr = ar->msa.paddr; in ath10k_fw_init()
1657 ret = iommu_map(iommu_dom, ar_snoc->fw.fw_start_addr, in ath10k_fw_init()
1658 ar->msa.paddr, ar->msa.mem_size, in ath10k_fw_init()
1670 iommu_detach_device(iommu_dom, ar_snoc->fw.dev); in ath10k_fw_init()
1685 const size_t mapped_size = ar_snoc->fw.mapped_mem_size; in ath10k_fw_deinit()
1689 if (ar_snoc->use_tz) in ath10k_fw_deinit()
1692 iommu = ar_snoc->fw.iommu_domain; in ath10k_fw_deinit()
1694 unmapped_size = iommu_unmap(iommu, ar_snoc->fw.fw_start_addr, in ath10k_fw_deinit()
1700 iommu_detach_device(iommu, ar_snoc->fw.dev); in ath10k_fw_deinit()
1703 platform_device_unregister(to_platform_device(ar_snoc->fw.dev)); in ath10k_fw_deinit()
1709 { .compatible = "qcom,wcn3990-wifi",
1726 dev = &pdev->dev; in ath10k_snoc_probe()
1730 return -EINVAL; in ath10k_snoc_probe()
1733 ret = dma_set_mask_and_coherent(dev, drv_data->dma_mask); in ath10k_snoc_probe()
1740 drv_data->hw_rev, &ath10k_snoc_hif_ops); in ath10k_snoc_probe()
1743 return -ENOMEM; in ath10k_snoc_probe()
1747 ar_snoc->dev = pdev; in ath10k_snoc_probe()
1749 ar_snoc->ar = ar; in ath10k_snoc_probe()
1750 ar_snoc->ce.bus_ops = &ath10k_snoc_bus_ops; in ath10k_snoc_probe()
1751 ar->ce_priv = &ar_snoc->ce; in ath10k_snoc_probe()
1752 msa_size = drv_data->msa_size; in ath10k_snoc_probe()
1773 ar_snoc->num_vregs = ARRAY_SIZE(ath10k_regulators); in ath10k_snoc_probe()
1774 ar_snoc->vregs = devm_kcalloc(&pdev->dev, ar_snoc->num_vregs, in ath10k_snoc_probe()
1775 sizeof(*ar_snoc->vregs), GFP_KERNEL); in ath10k_snoc_probe()
1776 if (!ar_snoc->vregs) { in ath10k_snoc_probe()
1777 ret = -ENOMEM; in ath10k_snoc_probe()
1780 for (i = 0; i < ar_snoc->num_vregs; i++) in ath10k_snoc_probe()
1781 ar_snoc->vregs[i].supply = ath10k_regulators[i]; in ath10k_snoc_probe()
1783 ret = devm_regulator_bulk_get(&pdev->dev, ar_snoc->num_vregs, in ath10k_snoc_probe()
1784 ar_snoc->vregs); in ath10k_snoc_probe()
1788 ar_snoc->num_clks = ARRAY_SIZE(ath10k_clocks); in ath10k_snoc_probe()
1789 ar_snoc->clks = devm_kcalloc(&pdev->dev, ar_snoc->num_clks, in ath10k_snoc_probe()
1790 sizeof(*ar_snoc->clks), GFP_KERNEL); in ath10k_snoc_probe()
1791 if (!ar_snoc->clks) { in ath10k_snoc_probe()
1792 ret = -ENOMEM; in ath10k_snoc_probe()
1796 for (i = 0; i < ar_snoc->num_clks; i++) in ath10k_snoc_probe()
1797 ar_snoc->clks[i].id = ath10k_clocks[i]; in ath10k_snoc_probe()
1799 ret = devm_clk_bulk_get_optional(&pdev->dev, ar_snoc->num_clks, in ath10k_snoc_probe()
1800 ar_snoc->clks); in ath10k_snoc_probe()
1854 set_bit(ATH10K_SNOC_FLAG_UNREGISTERING, &ar_snoc->flags); in ath10k_snoc_free_resources()
1874 reinit_completion(&ar->driver_recovery); in ath10k_snoc_remove()
1876 if (test_bit(ATH10K_SNOC_FLAG_RECOVERY, &ar_snoc->flags)) in ath10k_snoc_remove()
1877 wait_for_completion_timeout(&ar->driver_recovery, 3 * HZ); in ath10k_snoc_remove()