Lines Matching +full:hdr +full:- +full:engine

1 // SPDX-License-Identifier: ISC
46 "vdd-0.8-cx-mx",
47 "vdd-1.8-xo",
48 "vdd-1.3-rfa",
49 "vdd-3.3-ch0",
50 "vdd-3.3-ch1",
136 /* CE0: host->target HTC control streams */
145 /* CE1: target->host HTT + HTC control */
154 /* CE2: target->host WMI */
163 /* CE3: host->target WMI */
172 /* CE4: host->target HTT */
181 /* CE5: target->host HTT (ipa_uc->target ) */
214 /* CE9 target->host HTT */
223 /* CE10: target->host HTT */
232 /* CE11: target -> host PKTLOG */
243 /* CE0: host->target HTC control and raw streams */
253 /* CE1: target->host HTT + HTC control */
263 /* CE2: target->host WMI */
273 /* CE3: host->target WMI */
283 /* CE4: host->target HTT */
293 /* CE5: target->host HTT (HIF->HTT) */
333 /* CE9 target->host HTT */
343 /* CE10 target->host HTT */
367 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
372 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
377 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
382 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
387 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
392 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
397 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
402 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
407 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
412 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
417 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
422 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
427 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
432 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
437 __cpu_to_le32(PIPEDIR_OUT), /* out = UL = host -> target */
442 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
450 { /* in = DL = target -> host */
452 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
455 { /* in = DL = target -> host */
457 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
460 { /* in = DL = target -> host pktlog */
462 __cpu_to_le32(PIPEDIR_IN), /* in = DL = target -> host */
478 iowrite32(value, ar_snoc->mem + offset); in ath10k_snoc_write32()
486 val = ioread32(ar_snoc->mem + offset); in ath10k_snoc_read32()
493 struct ath10k_ce_pipe *ce_pipe = pipe->ce_hdl; in __ath10k_snoc_rx_post_buf()
494 struct ath10k *ar = pipe->hif_ce_state; in __ath10k_snoc_rx_post_buf()
500 skb = dev_alloc_skb(pipe->buf_sz); in __ath10k_snoc_rx_post_buf()
502 return -ENOMEM; in __ath10k_snoc_rx_post_buf()
504 WARN_ONCE((unsigned long)skb->data & 3, "unaligned skb"); in __ath10k_snoc_rx_post_buf()
506 paddr = dma_map_single(ar->dev, skb->data, in __ath10k_snoc_rx_post_buf()
507 skb->len + skb_tailroom(skb), in __ath10k_snoc_rx_post_buf()
509 if (unlikely(dma_mapping_error(ar->dev, paddr))) { in __ath10k_snoc_rx_post_buf()
512 return -EIO; in __ath10k_snoc_rx_post_buf()
515 ATH10K_SKB_RXCB(skb)->paddr = paddr; in __ath10k_snoc_rx_post_buf()
517 spin_lock_bh(&ce->ce_lock); in __ath10k_snoc_rx_post_buf()
518 ret = ce_pipe->ops->ce_rx_post_buf(ce_pipe, skb, paddr); in __ath10k_snoc_rx_post_buf()
519 spin_unlock_bh(&ce->ce_lock); in __ath10k_snoc_rx_post_buf()
521 dma_unmap_single(ar->dev, paddr, skb->len + skb_tailroom(skb), in __ath10k_snoc_rx_post_buf()
532 struct ath10k *ar = pipe->hif_ce_state; in ath10k_snoc_rx_post_pipe()
535 struct ath10k_ce_pipe *ce_pipe = pipe->ce_hdl; in ath10k_snoc_rx_post_pipe()
538 if (pipe->buf_sz == 0) in ath10k_snoc_rx_post_pipe()
541 if (!ce_pipe->dest_ring) in ath10k_snoc_rx_post_pipe()
544 spin_lock_bh(&ce->ce_lock); in ath10k_snoc_rx_post_pipe()
546 spin_unlock_bh(&ce->ce_lock); in ath10k_snoc_rx_post_pipe()
547 while (num--) { in ath10k_snoc_rx_post_pipe()
550 if (ret == -ENOSPC) in ath10k_snoc_rx_post_pipe()
553 mod_timer(&ar_snoc->rx_post_retry, jiffies + in ath10k_snoc_rx_post_pipe()
566 ath10k_snoc_rx_post_pipe(&ar_snoc->pipe_info[i]); in ath10k_snoc_rx_post()
573 struct ath10k *ar = ce_state->ar; in ath10k_snoc_process_rx_cb()
575 struct ath10k_snoc_pipe *pipe_info = &ar_snoc->pipe_info[ce_state->id]; in ath10k_snoc_process_rx_cb()
585 max_nbytes = skb->len + skb_tailroom(skb); in ath10k_snoc_process_rx_cb()
586 dma_unmap_single(ar->dev, ATH10K_SKB_RXCB(skb)->paddr, in ath10k_snoc_process_rx_cb()
602 ce_state->id, skb->len); in ath10k_snoc_process_rx_cb()
618 * HTT Rx (target->host) is processed. in ath10k_snoc_htt_htc_rx_cb()
620 ath10k_ce_per_engine_service(ce_state->ar, CE_POLL_PIPE); in ath10k_snoc_htt_htc_rx_cb()
641 ath10k_ce_per_engine_service(ce_state->ar, CE_POLL_PIPE); in ath10k_snoc_htt_rx_cb()
648 struct ath10k *ar = ar_snoc->ar; in ath10k_snoc_rx_replenish_retry()
655 struct ath10k *ar = ce_state->ar; in ath10k_snoc_htc_tx_cb()
673 struct ath10k *ar = ce_state->ar; in ath10k_snoc_htt_tx_cb()
680 dma_unmap_single(ar->dev, ATH10K_SKB_CB(skb)->paddr, in ath10k_snoc_htt_tx_cb()
681 skb->len, DMA_TO_DEVICE); in ath10k_snoc_htt_tx_cb()
695 snoc_pipe = &ar_snoc->pipe_info[pipe_id]; in ath10k_snoc_hif_tx_sg()
696 ce_pipe = snoc_pipe->ce_hdl; in ath10k_snoc_hif_tx_sg()
697 spin_lock_bh(&ce->ce_lock); in ath10k_snoc_hif_tx_sg()
699 for (i = 0; i < n_items - 1; i++) { in ath10k_snoc_hif_tx_sg()
727 spin_unlock_bh(&ce->ce_lock); in ath10k_snoc_hif_tx_sg()
732 for (; i > 0; i--) in ath10k_snoc_hif_tx_sg()
735 spin_unlock_bh(&ce->ce_lock); in ath10k_snoc_hif_tx_sg()
742 target_info->version = ATH10K_HW_WCN3990; in ath10k_snoc_hif_get_target_info()
743 target_info->type = ATH10K_HW_WCN3990; in ath10k_snoc_hif_get_target_info()
754 return ath10k_ce_num_free_src_entries(ar_snoc->pipe_info[pipe].ce_hdl); in ath10k_snoc_hif_get_free_queue_number()
786 if (__le32_to_cpu(entry->service_id) != service_id) in ath10k_snoc_hif_map_service_to_pipe()
789 switch (__le32_to_cpu(entry->pipedir)) { in ath10k_snoc_hif_map_service_to_pipe()
794 *dl_pipe = __le32_to_cpu(entry->pipenum); in ath10k_snoc_hif_map_service_to_pipe()
799 *ul_pipe = __le32_to_cpu(entry->pipenum); in ath10k_snoc_hif_map_service_to_pipe()
805 *dl_pipe = __le32_to_cpu(entry->pipenum); in ath10k_snoc_hif_map_service_to_pipe()
806 *ul_pipe = __le32_to_cpu(entry->pipenum); in ath10k_snoc_hif_map_service_to_pipe()
814 return -ENOENT; in ath10k_snoc_hif_map_service_to_pipe()
847 ar = snoc_pipe->hif_ce_state; in ath10k_snoc_rx_pipe_cleanup()
848 ce_pipe = snoc_pipe->ce_hdl; in ath10k_snoc_rx_pipe_cleanup()
849 ce_ring = ce_pipe->dest_ring; in ath10k_snoc_rx_pipe_cleanup()
854 if (!snoc_pipe->buf_sz) in ath10k_snoc_rx_pipe_cleanup()
857 for (i = 0; i < ce_ring->nentries; i++) { in ath10k_snoc_rx_pipe_cleanup()
858 skb = ce_ring->per_transfer_context[i]; in ath10k_snoc_rx_pipe_cleanup()
862 ce_ring->per_transfer_context[i] = NULL; in ath10k_snoc_rx_pipe_cleanup()
864 dma_unmap_single(ar->dev, ATH10K_SKB_RXCB(skb)->paddr, in ath10k_snoc_rx_pipe_cleanup()
865 skb->len + skb_tailroom(skb), in ath10k_snoc_rx_pipe_cleanup()
879 ar = snoc_pipe->hif_ce_state; in ath10k_snoc_tx_pipe_cleanup()
880 ce_pipe = snoc_pipe->ce_hdl; in ath10k_snoc_tx_pipe_cleanup()
881 ce_ring = ce_pipe->src_ring; in ath10k_snoc_tx_pipe_cleanup()
886 if (!snoc_pipe->buf_sz) in ath10k_snoc_tx_pipe_cleanup()
889 for (i = 0; i < ce_ring->nentries; i++) { in ath10k_snoc_tx_pipe_cleanup()
890 skb = ce_ring->per_transfer_context[i]; in ath10k_snoc_tx_pipe_cleanup()
894 ce_ring->per_transfer_context[i] = NULL; in ath10k_snoc_tx_pipe_cleanup()
906 del_timer_sync(&ar_snoc->rx_post_retry); in ath10k_snoc_buffer_cleanup()
908 pipe_info = &ar_snoc->pipe_info[pipe_num]; in ath10k_snoc_buffer_cleanup()
916 if (!test_bit(ATH10K_FLAG_CRASH_FLUSH, &ar->dev_flags)) in ath10k_snoc_hif_stop()
928 bitmap_clear(ar_snoc->pending_ce_irqs, 0, CE_COUNT_MAX); in ath10k_snoc_hif_start()
930 dev_set_threaded(&ar->napi_dev, true); in ath10k_snoc_hif_start()
935 clear_bit(ATH10K_SNOC_FLAG_RECOVERY, &ar_snoc->flags); in ath10k_snoc_hif_start()
949 ath10k_err(ar, "failed to initialize copy engine pipe %d: %d\n", in ath10k_snoc_init_pipes()
1001 return -EINVAL; in ath10k_snoc_wlan_enable()
1015 ret = regulator_bulk_enable(ar_snoc->num_vregs, ar_snoc->vregs); in ath10k_hw_power_on()
1019 ret = clk_bulk_prepare_enable(ar_snoc->num_clks, ar_snoc->clks); in ath10k_hw_power_on()
1026 regulator_bulk_disable(ar_snoc->num_vregs, ar_snoc->vregs); in ath10k_hw_power_on()
1036 clk_bulk_disable_unprepare(ar_snoc->num_clks, ar_snoc->clks); in ath10k_hw_power_off()
1038 return regulator_bulk_disable(ar_snoc->num_vregs, ar_snoc->vregs); in ath10k_hw_power_off()
1051 if (!test_bit(ATH10K_FLAG_CRASH_FLUSH, &ar->dev_flags) || in ath10k_snoc_wlan_disable()
1052 !test_bit(ATH10K_SNOC_FLAG_RECOVERY, &ar_snoc->flags)) in ath10k_snoc_wlan_disable()
1071 __func__, ar->state); in ath10k_snoc_hif_power_up()
1124 if (!device_may_wakeup(ar->dev)) in ath10k_snoc_hif_suspend()
1125 return -EPERM; in ath10k_snoc_hif_suspend()
1127 ret = enable_irq_wake(ar_snoc->ce_irqs[ATH10K_SNOC_WAKE_IRQ].irq_line); in ath10k_snoc_hif_suspend()
1143 if (!device_may_wakeup(ar->dev)) in ath10k_snoc_hif_resume()
1144 return -EPERM; in ath10k_snoc_hif_resume()
1146 ret = disable_irq_wake(ar_snoc->ce_irqs[ATH10K_SNOC_WAKE_IRQ].irq_line); in ath10k_snoc_hif_resume()
1190 if (ar_snoc->ce_irqs[i].irq_line == irq) in ath10k_snoc_get_ce_id_from_irq()
1195 return -EINVAL; in ath10k_snoc_get_ce_id_from_irq()
1204 if (ce_id < 0 || ce_id >= ARRAY_SIZE(ar_snoc->pipe_info)) { in ath10k_snoc_per_engine_handler()
1211 set_bit(ce_id, ar_snoc->pending_ce_irqs); in ath10k_snoc_per_engine_handler()
1213 napi_schedule(&ar->napi); in ath10k_snoc_per_engine_handler()
1225 if (test_bit(ATH10K_FLAG_CRASH_FLUSH, &ar->dev_flags)) { in ath10k_snoc_napi_poll()
1231 if (test_and_clear_bit(ce_id, ar_snoc->pending_ce_irqs)) { in ath10k_snoc_napi_poll()
1246 netif_napi_add(&ar->napi_dev, &ar->napi, ath10k_snoc_napi_poll); in ath10k_snoc_init_napi()
1255 ret = request_irq(ar_snoc->ce_irqs[id].irq_line, in ath10k_snoc_request_irq()
1269 for (id -= 1; id >= 0; id--) in ath10k_snoc_request_irq()
1270 free_irq(ar_snoc->ce_irqs[id].irq_line, ar); in ath10k_snoc_request_irq()
1281 free_irq(ar_snoc->ce_irqs[id].irq_line, ar); in ath10k_snoc_free_irq()
1291 pdev = ar_snoc->dev; in ath10k_snoc_resource_init()
1295 return -EINVAL; in ath10k_snoc_resource_init()
1298 ar_snoc->mem_pa = res->start; in ath10k_snoc_resource_init()
1299 ar_snoc->mem = devm_ioremap(&pdev->dev, ar_snoc->mem_pa, in ath10k_snoc_resource_init()
1301 if (!ar_snoc->mem) { in ath10k_snoc_resource_init()
1303 &ar_snoc->mem_pa); in ath10k_snoc_resource_init()
1304 return -EINVAL; in ath10k_snoc_resource_init()
1308 ret = platform_get_irq(ar_snoc->dev, i); in ath10k_snoc_resource_init()
1311 ar_snoc->ce_irqs[i].irq_line = ret; in ath10k_snoc_resource_init()
1314 ret = device_property_read_u32(&pdev->dev, "qcom,xo-cal-data", in ath10k_snoc_resource_init()
1315 &ar_snoc->xo_cal_data); in ath10k_snoc_resource_init()
1316 ath10k_dbg(ar, ATH10K_DBG_SNOC, "snoc xo-cal-data return %d\n", ret); in ath10k_snoc_resource_init()
1318 ar_snoc->xo_cal_supported = true; in ath10k_snoc_resource_init()
1320 ar_snoc->xo_cal_data); in ath10k_snoc_resource_init()
1329 struct device *dev = &ar_snoc->dev->dev; in ath10k_snoc_quirks_init()
1331 if (of_property_read_bool(dev->of_node, "qcom,snoc-host-cap-8bit-quirk")) in ath10k_snoc_quirks_init()
1332 set_bit(ATH10K_SNOC_FLAG_8BIT_HOST_CAP_QUIRK, &ar_snoc->flags); in ath10k_snoc_quirks_init()
1341 if (test_bit(ATH10K_SNOC_FLAG_UNREGISTERING, &ar_snoc->flags)) in ath10k_snoc_fw_indication()
1346 if (test_bit(ATH10K_SNOC_FLAG_REGISTERED, &ar_snoc->flags)) { in ath10k_snoc_fw_indication()
1352 bus_params.chip_id = ar_snoc->target_info.soc_version; in ath10k_snoc_fw_indication()
1359 set_bit(ATH10K_SNOC_FLAG_REGISTERED, &ar_snoc->flags); in ath10k_snoc_fw_indication()
1362 set_bit(ATH10K_SNOC_FLAG_RECOVERY, &ar_snoc->flags); in ath10k_snoc_fw_indication()
1363 set_bit(ATH10K_FLAG_CRASH_FLUSH, &ar->dev_flags); in ath10k_snoc_fw_indication()
1367 return -EINVAL; in ath10k_snoc_fw_indication()
1380 timer_setup(&ar_snoc->rx_post_retry, ath10k_snoc_rx_replenish_retry, 0); in ath10k_snoc_setup_resource()
1381 spin_lock_init(&ce->ce_lock); in ath10k_snoc_setup_resource()
1383 pipe = &ar_snoc->pipe_info[i]; in ath10k_snoc_setup_resource()
1384 pipe->ce_hdl = &ce->ce_states[i]; in ath10k_snoc_setup_resource()
1385 pipe->pipe_num = i; in ath10k_snoc_setup_resource()
1386 pipe->hif_ce_state = ar; in ath10k_snoc_setup_resource()
1390 ath10k_err(ar, "failed to allocate copy engine pipe %d: %d\n", in ath10k_snoc_setup_resource()
1395 pipe->buf_sz = host_ce_config_wlan[i].src_sz_max; in ath10k_snoc_setup_resource()
1406 netif_napi_del(&ar->napi); in ath10k_snoc_release_resource()
1416 struct ath10k_dump_ram_data_hdr *hdr; in ath10k_msa_dump_memory() local
1420 if (!crash_data || !crash_data->ramdump_buf) in ath10k_msa_dump_memory()
1427 current_region = &mem_layout->region_table.regions[0]; in ath10k_msa_dump_memory()
1429 buf = crash_data->ramdump_buf; in ath10k_msa_dump_memory()
1430 buf_len = crash_data->ramdump_buf_len; in ath10k_msa_dump_memory()
1434 hdr = (void *)buf; in ath10k_msa_dump_memory()
1435 buf += sizeof(*hdr); in ath10k_msa_dump_memory()
1436 buf_len -= sizeof(*hdr); in ath10k_msa_dump_memory()
1438 hdr->region_type = cpu_to_le32(current_region->type); in ath10k_msa_dump_memory()
1439 hdr->start = cpu_to_le32((unsigned long)ar->msa.vaddr); in ath10k_msa_dump_memory()
1440 hdr->length = cpu_to_le32(ar->msa.mem_size); in ath10k_msa_dump_memory()
1442 if (current_region->len < ar->msa.mem_size) { in ath10k_msa_dump_memory()
1443 memcpy(buf, ar->msa.vaddr, current_region->len); in ath10k_msa_dump_memory()
1445 current_region->len, ar->msa.mem_size); in ath10k_msa_dump_memory()
1447 memcpy(buf, ar->msa.vaddr, ar->msa.mem_size); in ath10k_msa_dump_memory()
1456 mutex_lock(&ar->dump_mutex); in ath10k_snoc_fw_crashed_dump()
1458 spin_lock_bh(&ar->data_lock); in ath10k_snoc_fw_crashed_dump()
1459 ar->stats.fw_crash_counter++; in ath10k_snoc_fw_crashed_dump()
1460 spin_unlock_bh(&ar->data_lock); in ath10k_snoc_fw_crashed_dump()
1465 scnprintf(guid, sizeof(guid), "%pUl", &crash_data->guid); in ath10k_snoc_fw_crashed_dump()
1472 mutex_unlock(&ar->dump_mutex); in ath10k_snoc_fw_crashed_dump()
1479 struct ath10k *ar = ar_snoc->ar; in ath10k_snoc_modem_notify()
1485 clear_bit(ATH10K_SNOC_FLAG_MODEM_STOPPED, &ar_snoc->flags); in ath10k_snoc_modem_notify()
1494 notify_data->crashed ? "crashed" : "stopping"); in ath10k_snoc_modem_notify()
1495 if (!notify_data->crashed) in ath10k_snoc_modem_notify()
1496 set_bit(ATH10K_SNOC_FLAG_MODEM_STOPPED, &ar_snoc->flags); in ath10k_snoc_modem_notify()
1498 clear_bit(ATH10K_SNOC_FLAG_MODEM_STOPPED, &ar_snoc->flags); in ath10k_snoc_modem_notify()
1519 ar_snoc->nb.notifier_call = ath10k_snoc_modem_notify; in ath10k_modem_init()
1521 notifier = qcom_register_ssr_notifier("mpss", &ar_snoc->nb); in ath10k_modem_init()
1528 ar_snoc->notifier = notifier; in ath10k_modem_init()
1538 ret = qcom_unregister_ssr_notifier(ar_snoc->notifier, &ar_snoc->nb); in ath10k_modem_deinit()
1545 struct device *dev = ar->dev; in ath10k_setup_msa_resources()
1550 node = of_parse_phandle(dev->of_node, "memory-region", 0); in ath10k_setup_msa_resources()
1559 ar->msa.paddr = r.start; in ath10k_setup_msa_resources()
1560 ar->msa.mem_size = resource_size(&r); in ath10k_setup_msa_resources()
1561 ar->msa.vaddr = devm_memremap(dev, ar->msa.paddr, in ath10k_setup_msa_resources()
1562 ar->msa.mem_size, in ath10k_setup_msa_resources()
1564 if (IS_ERR(ar->msa.vaddr)) { in ath10k_setup_msa_resources()
1567 return PTR_ERR(ar->msa.vaddr); in ath10k_setup_msa_resources()
1570 ar->msa.vaddr = dmam_alloc_coherent(dev, msa_size, in ath10k_setup_msa_resources()
1571 &ar->msa.paddr, in ath10k_setup_msa_resources()
1573 if (!ar->msa.vaddr) { in ath10k_setup_msa_resources()
1575 return -ENOMEM; in ath10k_setup_msa_resources()
1577 ar->msa.mem_size = msa_size; in ath10k_setup_msa_resources()
1581 &ar->msa.paddr, in ath10k_setup_msa_resources()
1582 ar->msa.vaddr); in ath10k_setup_msa_resources()
1590 struct device *host_dev = &ar_snoc->dev->dev; in ath10k_fw_init()
1597 node = of_get_child_by_name(host_dev->of_node, "wifi-firmware"); in ath10k_fw_init()
1599 ar_snoc->use_tz = true; in ath10k_fw_init()
1604 info.fwnode = &node->fwnode; in ath10k_fw_init()
1606 info.name = node->name; in ath10k_fw_init()
1615 pdev->dev.of_node = node; in ath10k_fw_init()
1617 ret = of_dma_configure(&pdev->dev, node, true); in ath10k_fw_init()
1623 ar_snoc->fw.dev = &pdev->dev; in ath10k_fw_init()
1628 ret = -ENOMEM; in ath10k_fw_init()
1632 ret = iommu_attach_device(iommu_dom, ar_snoc->fw.dev); in ath10k_fw_init()
1638 ar_snoc->fw.iommu_domain = iommu_dom; in ath10k_fw_init()
1639 ar_snoc->fw.fw_start_addr = ar->msa.paddr; in ath10k_fw_init()
1641 ret = iommu_map(iommu_dom, ar_snoc->fw.fw_start_addr, in ath10k_fw_init()
1642 ar->msa.paddr, ar->msa.mem_size, in ath10k_fw_init()
1654 iommu_detach_device(iommu_dom, ar_snoc->fw.dev); in ath10k_fw_init()
1669 const size_t mapped_size = ar_snoc->fw.mapped_mem_size; in ath10k_fw_deinit()
1673 if (ar_snoc->use_tz) in ath10k_fw_deinit()
1676 iommu = ar_snoc->fw.iommu_domain; in ath10k_fw_deinit()
1678 unmapped_size = iommu_unmap(iommu, ar_snoc->fw.fw_start_addr, in ath10k_fw_deinit()
1684 iommu_detach_device(iommu, ar_snoc->fw.dev); in ath10k_fw_deinit()
1687 platform_device_unregister(to_platform_device(ar_snoc->fw.dev)); in ath10k_fw_deinit()
1693 { .compatible = "qcom,wcn3990-wifi",
1710 dev = &pdev->dev; in ath10k_snoc_probe()
1714 return -EINVAL; in ath10k_snoc_probe()
1717 ret = dma_set_mask_and_coherent(dev, drv_data->dma_mask); in ath10k_snoc_probe()
1724 drv_data->hw_rev, &ath10k_snoc_hif_ops); in ath10k_snoc_probe()
1727 return -ENOMEM; in ath10k_snoc_probe()
1731 ar_snoc->dev = pdev; in ath10k_snoc_probe()
1733 ar_snoc->ar = ar; in ath10k_snoc_probe()
1734 ar_snoc->ce.bus_ops = &ath10k_snoc_bus_ops; in ath10k_snoc_probe()
1735 ar->ce_priv = &ar_snoc->ce; in ath10k_snoc_probe()
1736 msa_size = drv_data->msa_size; in ath10k_snoc_probe()
1757 ar_snoc->num_vregs = ARRAY_SIZE(ath10k_regulators); in ath10k_snoc_probe()
1758 ar_snoc->vregs = devm_kcalloc(&pdev->dev, ar_snoc->num_vregs, in ath10k_snoc_probe()
1759 sizeof(*ar_snoc->vregs), GFP_KERNEL); in ath10k_snoc_probe()
1760 if (!ar_snoc->vregs) { in ath10k_snoc_probe()
1761 ret = -ENOMEM; in ath10k_snoc_probe()
1764 for (i = 0; i < ar_snoc->num_vregs; i++) in ath10k_snoc_probe()
1765 ar_snoc->vregs[i].supply = ath10k_regulators[i]; in ath10k_snoc_probe()
1767 ret = devm_regulator_bulk_get(&pdev->dev, ar_snoc->num_vregs, in ath10k_snoc_probe()
1768 ar_snoc->vregs); in ath10k_snoc_probe()
1772 ar_snoc->num_clks = ARRAY_SIZE(ath10k_clocks); in ath10k_snoc_probe()
1773 ar_snoc->clks = devm_kcalloc(&pdev->dev, ar_snoc->num_clks, in ath10k_snoc_probe()
1774 sizeof(*ar_snoc->clks), GFP_KERNEL); in ath10k_snoc_probe()
1775 if (!ar_snoc->clks) { in ath10k_snoc_probe()
1776 ret = -ENOMEM; in ath10k_snoc_probe()
1780 for (i = 0; i < ar_snoc->num_clks; i++) in ath10k_snoc_probe()
1781 ar_snoc->clks[i].id = ath10k_clocks[i]; in ath10k_snoc_probe()
1783 ret = devm_clk_bulk_get_optional(&pdev->dev, ar_snoc->num_clks, in ath10k_snoc_probe()
1784 ar_snoc->clks); in ath10k_snoc_probe()
1838 set_bit(ATH10K_SNOC_FLAG_UNREGISTERING, &ar_snoc->flags); in ath10k_snoc_free_resources()
1858 reinit_completion(&ar->driver_recovery); in ath10k_snoc_remove()
1860 if (test_bit(ATH10K_SNOC_FLAG_RECOVERY, &ar_snoc->flags)) in ath10k_snoc_remove()
1861 wait_for_completion_timeout(&ar->driver_recovery, 3 * HZ); in ath10k_snoc_remove()