Lines Matching +full:fast +full:- +full:clk
91 #include "fw-iw/iw4965.ucode.hex"
493 mutex_enter(&sc->sc_glock); in iwk_attach()
494 sc->sc_flags &= ~IWK_F_SUSPEND; in iwk_attach()
495 mutex_exit(&sc->sc_glock); in iwk_attach()
497 if (sc->sc_flags & IWK_F_RUNNING) in iwk_attach()
500 mutex_enter(&sc->sc_glock); in iwk_attach()
501 sc->sc_flags |= IWK_F_LAZY_RESUME; in iwk_attach()
502 mutex_exit(&sc->sc_glock); in iwk_attach()
519 sc->sc_dip = dip; in iwk_attach()
521 err = ddi_regs_map_setup(dip, 0, &sc->sc_cfg_base, 0, 0, in iwk_attach()
522 &iwk_reg_accattr, &sc->sc_cfg_handle); in iwk_attach()
528 sc->sc_rev = ddi_get8(sc->sc_cfg_handle, in iwk_attach()
529 (uint8_t *)(sc->sc_cfg_base + PCI_CONF_REVID)); in iwk_attach()
530 ddi_put8(sc->sc_cfg_handle, (uint8_t *)(sc->sc_cfg_base + 0x41), 0); in iwk_attach()
531 sc->sc_clsz = ddi_get16(sc->sc_cfg_handle, in iwk_attach()
532 (uint16_t *)(sc->sc_cfg_base + PCI_CONF_CACHE_LINESZ)); in iwk_attach()
533 if (!sc->sc_clsz) in iwk_attach()
534 sc->sc_clsz = 16; in iwk_attach()
535 sc->sc_clsz = (sc->sc_clsz << 2); in iwk_attach()
536 sc->sc_dmabuf_sz = roundup(0x1000 + sizeof (struct ieee80211_frame) + in iwk_attach()
539 IEEE80211_WEP_CRCLEN), sc->sc_clsz); in iwk_attach()
543 err = ddi_regs_map_setup(dip, 1, &sc->sc_base, in iwk_attach()
544 0, 0, &iwk_reg_accattr, &sc->sc_handle); in iwk_attach()
565 sc->sc_intr_htable = kmem_zalloc(sizeof (ddi_intr_handle_t), KM_SLEEP); in iwk_attach()
567 err = ddi_intr_alloc(dip, sc->sc_intr_htable, DDI_INTR_TYPE_FIXED, 0, in iwk_attach()
575 err = ddi_intr_get_pri(sc->sc_intr_htable[0], &sc->sc_intr_pri); in iwk_attach()
582 mutex_init(&sc->sc_glock, NULL, MUTEX_DRIVER, in iwk_attach()
583 DDI_INTR_PRI(sc->sc_intr_pri)); in iwk_attach()
584 mutex_init(&sc->sc_tx_lock, NULL, MUTEX_DRIVER, in iwk_attach()
585 DDI_INTR_PRI(sc->sc_intr_pri)); in iwk_attach()
586 mutex_init(&sc->sc_mt_lock, NULL, MUTEX_DRIVER, in iwk_attach()
587 DDI_INTR_PRI(sc->sc_intr_pri)); in iwk_attach()
588 mutex_init(&sc->sc_ibss.node_tb_lock, NULL, MUTEX_DRIVER, in iwk_attach()
589 DDI_INTR_PRI(sc->sc_intr_pri)); in iwk_attach()
591 cv_init(&sc->sc_fw_cv, NULL, CV_DRIVER, NULL); in iwk_attach()
592 cv_init(&sc->sc_cmd_cv, NULL, CV_DRIVER, NULL); in iwk_attach()
593 cv_init(&sc->sc_tx_cv, "tx-ring", CV_DRIVER, NULL); in iwk_attach()
597 cv_init(&sc->sc_mt_cv, NULL, CV_DRIVER, NULL); in iwk_attach()
598 sc->sc_mf_thread = NULL; in iwk_attach()
599 sc->sc_mf_thread_switch = 0; in iwk_attach()
638 if (LE_16(sc->sc_eep_map.calib_version) < EEP_TX_POWER_VERSION_NEW) { in iwk_attach()
652 sc->sc_hdr = (iwk_firmware_hdr_t *)iwk_fw_bin; in iwk_attach()
665 ic = &sc->sc_ic; in iwk_attach()
666 ic->ic_phytype = IEEE80211_T_OFDM; in iwk_attach()
667 ic->ic_opmode = IEEE80211_M_STA; /* default to BSS mode */ in iwk_attach()
668 ic->ic_state = IEEE80211_S_INIT; in iwk_attach()
669 ic->ic_maxrssi = 100; /* experimental number */ in iwk_attach()
670 ic->ic_caps = IEEE80211_C_SHPREAMBLE | IEEE80211_C_TXPMGT | in iwk_attach()
675 ic->ic_caps |= IEEE80211_C_AES_CCM; in iwk_attach()
679 ic->ic_caps |= IEEE80211_C_WPA; in iwk_attach()
683 ic->ic_caps |= IEEE80211_C_IBSS; in iwk_attach()
686 ic->ic_sup_rates[IEEE80211_MODE_11B] = iwk_rateset_11b; in iwk_attach()
687 ic->ic_sup_rates[IEEE80211_MODE_11G] = iwk_rateset_11g; in iwk_attach()
691 ic->ic_sup_channels[i].ich_freq = in iwk_attach()
693 ic->ic_sup_channels[i].ich_flags = in iwk_attach()
698 ic->ic_ibss_chan = &ic->ic_sup_channels[0]; in iwk_attach()
700 ic->ic_xmit = iwk_send; in iwk_attach()
709 (void) snprintf(ic->ic_wpadoor, MAX_IEEE80211STR, "%s_%s%d", WPA_DOOR, in iwk_attach()
716 sc->sc_newstate = ic->ic_newstate; in iwk_attach()
717 ic->ic_newstate = iwk_newstate; in iwk_attach()
718 ic->ic_watchdog = iwk_watchdog; in iwk_attach()
719 sc->sc_recv_mgmt = ic->ic_recv_mgmt; in iwk_attach()
720 ic->ic_recv_mgmt = iwk_recv_mgmt; in iwk_attach()
721 ic->ic_node_alloc = iwk_node_alloc; in iwk_attach()
722 ic->ic_node_free = iwk_node_free; in iwk_attach()
723 ic->ic_crypto.cs_key_set = iwk_key_set; in iwk_attach()
728 ic->ic_def_txkey = 0; in iwk_attach()
729 err = ddi_intr_add_softint(dip, &sc->sc_soft_hdl, DDI_INTR_SOFTPRI_MAX, in iwk_attach()
740 err = ddi_intr_add_handler(sc->sc_intr_htable[0], iwk_intr, in iwk_attach()
748 err = ddi_intr_enable(sc->sc_intr_htable[0]); in iwk_attach()
759 wd.wd_opmode = ic->ic_opmode; in iwk_attach()
760 IEEE80211_ADDR_COPY(wd.wd_bssid, ic->ic_macaddr); in iwk_attach()
769 macp->m_type_ident = MAC_PLUGIN_IDENT_WIFI; in iwk_attach()
770 macp->m_driver = sc; in iwk_attach()
771 macp->m_dip = dip; in iwk_attach()
772 macp->m_src_addr = ic->ic_macaddr; in iwk_attach()
773 macp->m_callbacks = &iwk_m_callbacks; in iwk_attach()
774 macp->m_min_sdu = 0; in iwk_attach()
775 macp->m_max_sdu = IEEE80211_MTU; in iwk_attach()
776 macp->m_pdata = &wd; in iwk_attach()
777 macp->m_pdata_size = sizeof (wd); in iwk_attach()
782 err = mac_register(macp, &ic->ic_mach); in iwk_attach()
803 mac_link_update(ic->ic_mach, LINK_STATE_DOWN); in iwk_attach()
809 sc->sc_mf_thread_switch = 1; in iwk_attach()
810 if (sc->sc_mf_thread == NULL) in iwk_attach()
811 sc->sc_mf_thread = thread_create((caddr_t)NULL, 0, in iwk_attach()
814 sc->sc_flags |= IWK_F_ATTACHED; in iwk_attach()
818 (void) ddi_intr_disable(sc->sc_intr_htable[0]); in iwk_attach()
820 (void) ddi_intr_remove_handler(sc->sc_intr_htable[0]); in iwk_attach()
823 (void) ddi_intr_remove_softint(sc->sc_soft_hdl); in iwk_attach()
824 sc->sc_soft_hdl = NULL; in iwk_attach()
838 (void) ddi_intr_free(sc->sc_intr_htable[0]); in iwk_attach()
840 kmem_free(sc->sc_intr_htable, sizeof (ddi_intr_handle_t)); in iwk_attach()
842 ddi_regs_map_free(&sc->sc_handle); in iwk_attach()
844 ddi_regs_map_free(&sc->sc_cfg_handle); in iwk_attach()
864 mutex_enter(&sc->sc_glock); in iwk_detach()
865 sc->sc_flags |= IWK_F_SUSPEND; in iwk_detach()
866 mutex_exit(&sc->sc_glock); in iwk_detach()
867 if (sc->sc_flags & IWK_F_RUNNING) { in iwk_detach()
877 if (!(sc->sc_flags & IWK_F_ATTACHED)) in iwk_detach()
880 err = mac_disable(sc->sc_ic.ic_mach); in iwk_detach()
887 mutex_enter(&sc->sc_mt_lock); in iwk_detach()
888 sc->sc_mf_thread_switch = 0; in iwk_detach()
889 while (sc->sc_mf_thread != NULL) { in iwk_detach()
890 if (cv_wait_sig(&sc->sc_mt_cv, &sc->sc_mt_lock) == 0) in iwk_detach()
893 mutex_exit(&sc->sc_mt_lock); in iwk_detach()
901 (void) mac_unregister(sc->sc_ic.ic_mach); in iwk_detach()
903 mutex_enter(&sc->sc_glock); in iwk_detach()
908 mutex_exit(&sc->sc_glock); in iwk_detach()
910 (void) ddi_intr_disable(sc->sc_intr_htable[0]); in iwk_detach()
911 (void) ddi_intr_remove_handler(sc->sc_intr_htable[0]); in iwk_detach()
912 (void) ddi_intr_free(sc->sc_intr_htable[0]); in iwk_detach()
913 kmem_free(sc->sc_intr_htable, sizeof (ddi_intr_handle_t)); in iwk_detach()
915 (void) ddi_intr_remove_softint(sc->sc_soft_hdl); in iwk_detach()
916 sc->sc_soft_hdl = NULL; in iwk_detach()
921 ieee80211_detach(&sc->sc_ic); in iwk_detach()
925 ddi_regs_map_free(&sc->sc_handle); in iwk_detach()
926 ddi_regs_map_free(&sc->sc_cfg_handle); in iwk_detach()
936 * This function is called when the system is single-threaded at high
955 sc->sc_flags |= IWK_F_QUIESCED; in iwk_quiesce()
965 cv_destroy(&sc->sc_mt_cv); in iwk_destroy_locks()
966 mutex_destroy(&sc->sc_mt_lock); in iwk_destroy_locks()
967 cv_destroy(&sc->sc_tx_cv); in iwk_destroy_locks()
968 cv_destroy(&sc->sc_cmd_cv); in iwk_destroy_locks()
969 cv_destroy(&sc->sc_fw_cv); in iwk_destroy_locks()
970 mutex_destroy(&sc->sc_tx_lock); in iwk_destroy_locks()
971 mutex_destroy(&sc->sc_glock); in iwk_destroy_locks()
988 err = ddi_dma_alloc_handle(sc->sc_dip, dma_attr_p, in iwk_alloc_dma_mem()
989 DDI_DMA_SLEEP, NULL, &dma_p->dma_hdl); in iwk_alloc_dma_mem()
991 dma_p->dma_hdl = NULL; in iwk_alloc_dma_mem()
998 err = ddi_dma_mem_alloc(dma_p->dma_hdl, memsize, acc_attr_p, in iwk_alloc_dma_mem()
1000 DDI_DMA_SLEEP, NULL, &vaddr, &dma_p->alength, &dma_p->acc_hdl); in iwk_alloc_dma_mem()
1002 ddi_dma_free_handle(&dma_p->dma_hdl); in iwk_alloc_dma_mem()
1003 dma_p->dma_hdl = NULL; in iwk_alloc_dma_mem()
1004 dma_p->acc_hdl = NULL; in iwk_alloc_dma_mem()
1011 dma_p->mem_va = vaddr; in iwk_alloc_dma_mem()
1012 err = ddi_dma_addr_bind_handle(dma_p->dma_hdl, NULL, in iwk_alloc_dma_mem()
1013 vaddr, dma_p->alength, dma_flags, DDI_DMA_SLEEP, NULL, in iwk_alloc_dma_mem()
1014 &dma_p->cookie, &dma_p->ncookies); in iwk_alloc_dma_mem()
1016 ddi_dma_mem_free(&dma_p->acc_hdl); in iwk_alloc_dma_mem()
1017 ddi_dma_free_handle(&dma_p->dma_hdl); in iwk_alloc_dma_mem()
1018 dma_p->acc_hdl = NULL; in iwk_alloc_dma_mem()
1019 dma_p->dma_hdl = NULL; in iwk_alloc_dma_mem()
1023 dma_p->nslots = ~0U; in iwk_alloc_dma_mem()
1024 dma_p->size = ~0U; in iwk_alloc_dma_mem()
1025 dma_p->token = ~0U; in iwk_alloc_dma_mem()
1026 dma_p->offset = 0; in iwk_alloc_dma_mem()
1036 if (dma_p->dma_hdl != NULL) { in iwk_free_dma_mem()
1037 if (dma_p->ncookies) { in iwk_free_dma_mem()
1038 (void) ddi_dma_unbind_handle(dma_p->dma_hdl); in iwk_free_dma_mem()
1039 dma_p->ncookies = 0; in iwk_free_dma_mem()
1041 ddi_dma_free_handle(&dma_p->dma_hdl); in iwk_free_dma_mem()
1042 dma_p->dma_hdl = NULL; in iwk_free_dma_mem()
1045 if (dma_p->acc_hdl != NULL) { in iwk_free_dma_mem()
1046 ddi_dma_mem_free(&dma_p->acc_hdl); in iwk_free_dma_mem()
1047 dma_p->acc_hdl = NULL; in iwk_free_dma_mem()
1063 * |HDR|<-TEXT->|<-DATA->|<-INIT_TEXT->|<-INIT_DATA->|<-BOOT->| in iwk_alloc_fw_dma()
1065 t = (char *)(sc->sc_hdr + 1); in iwk_alloc_fw_dma()
1066 err = iwk_alloc_dma_mem(sc, LE_32(sc->sc_hdr->textsz), in iwk_alloc_fw_dma()
1069 &sc->sc_dma_fw_text); in iwk_alloc_fw_dma()
1070 dma_p = &sc->sc_dma_fw_text; in iwk_alloc_fw_dma()
1072 dma_p->ncookies, dma_p->cookie.dmac_address, in iwk_alloc_fw_dma()
1073 dma_p->cookie.dmac_size)); in iwk_alloc_fw_dma()
1079 (void) memcpy(dma_p->mem_va, t, LE_32(sc->sc_hdr->textsz)); in iwk_alloc_fw_dma()
1081 t += LE_32(sc->sc_hdr->textsz); in iwk_alloc_fw_dma()
1082 err = iwk_alloc_dma_mem(sc, LE_32(sc->sc_hdr->datasz), in iwk_alloc_fw_dma()
1085 &sc->sc_dma_fw_data); in iwk_alloc_fw_dma()
1086 dma_p = &sc->sc_dma_fw_data; in iwk_alloc_fw_dma()
1088 dma_p->ncookies, dma_p->cookie.dmac_address, in iwk_alloc_fw_dma()
1089 dma_p->cookie.dmac_size)); in iwk_alloc_fw_dma()
1095 (void) memcpy(dma_p->mem_va, t, LE_32(sc->sc_hdr->datasz)); in iwk_alloc_fw_dma()
1097 err = iwk_alloc_dma_mem(sc, LE_32(sc->sc_hdr->datasz), in iwk_alloc_fw_dma()
1100 &sc->sc_dma_fw_data_bak); in iwk_alloc_fw_dma()
1101 dma_p = &sc->sc_dma_fw_data_bak; in iwk_alloc_fw_dma()
1104 dma_p->ncookies, dma_p->cookie.dmac_address, in iwk_alloc_fw_dma()
1105 dma_p->cookie.dmac_size)); in iwk_alloc_fw_dma()
1111 (void) memcpy(dma_p->mem_va, t, LE_32(sc->sc_hdr->datasz)); in iwk_alloc_fw_dma()
1113 t += LE_32(sc->sc_hdr->datasz); in iwk_alloc_fw_dma()
1114 err = iwk_alloc_dma_mem(sc, LE_32(sc->sc_hdr->init_textsz), in iwk_alloc_fw_dma()
1117 &sc->sc_dma_fw_init_text); in iwk_alloc_fw_dma()
1118 dma_p = &sc->sc_dma_fw_init_text; in iwk_alloc_fw_dma()
1121 dma_p->ncookies, dma_p->cookie.dmac_address, in iwk_alloc_fw_dma()
1122 dma_p->cookie.dmac_size)); in iwk_alloc_fw_dma()
1128 (void) memcpy(dma_p->mem_va, t, LE_32(sc->sc_hdr->init_textsz)); in iwk_alloc_fw_dma()
1130 t += LE_32(sc->sc_hdr->init_textsz); in iwk_alloc_fw_dma()
1131 err = iwk_alloc_dma_mem(sc, LE_32(sc->sc_hdr->init_datasz), in iwk_alloc_fw_dma()
1134 &sc->sc_dma_fw_init_data); in iwk_alloc_fw_dma()
1135 dma_p = &sc->sc_dma_fw_init_data; in iwk_alloc_fw_dma()
1138 dma_p->ncookies, dma_p->cookie.dmac_address, in iwk_alloc_fw_dma()
1139 dma_p->cookie.dmac_size)); in iwk_alloc_fw_dma()
1145 (void) memcpy(dma_p->mem_va, t, LE_32(sc->sc_hdr->init_datasz)); in iwk_alloc_fw_dma()
1147 sc->sc_boot = t + LE_32(sc->sc_hdr->init_datasz); in iwk_alloc_fw_dma()
1155 iwk_free_dma_mem(&sc->sc_dma_fw_text); in iwk_free_fw_dma()
1156 iwk_free_dma_mem(&sc->sc_dma_fw_data); in iwk_free_fw_dma()
1157 iwk_free_dma_mem(&sc->sc_dma_fw_data_bak); in iwk_free_fw_dma()
1158 iwk_free_dma_mem(&sc->sc_dma_fw_init_text); in iwk_free_fw_dma()
1159 iwk_free_dma_mem(&sc->sc_dma_fw_init_data); in iwk_free_fw_dma()
1171 /* must be aligned on a 4K-page boundary */ in iwk_alloc_shared()
1175 &sc->sc_dma_sh); in iwk_alloc_shared()
1178 sc->sc_shared = (iwk_shared_t *)sc->sc_dma_sh.mem_va; in iwk_alloc_shared()
1180 dma_p = &sc->sc_dma_sh; in iwk_alloc_shared()
1182 dma_p->ncookies, dma_p->cookie.dmac_address, in iwk_alloc_shared()
1183 dma_p->cookie.dmac_size)); in iwk_alloc_shared()
1194 iwk_free_dma_mem(&sc->sc_dma_sh); in iwk_free_shared()
1206 /* must be aligned on a 4K-page boundary */ in iwk_alloc_kw()
1210 &sc->sc_dma_kw); in iwk_alloc_kw()
1214 dma_p = &sc->sc_dma_kw; in iwk_alloc_kw()
1216 dma_p->ncookies, dma_p->cookie.dmac_address, in iwk_alloc_kw()
1217 dma_p->cookie.dmac_size)); in iwk_alloc_kw()
1228 iwk_free_dma_mem(&sc->sc_dma_kw); in iwk_free_kw()
1239 ring = &sc->sc_rxq; in iwk_alloc_rx_ring()
1240 ring->cur = 0; in iwk_alloc_rx_ring()
1245 &ring->dma_desc); in iwk_alloc_rx_ring()
1250 ring->desc = (uint32_t *)ring->dma_desc.mem_va; in iwk_alloc_rx_ring()
1251 dma_p = &ring->dma_desc; in iwk_alloc_rx_ring()
1253 dma_p->ncookies, dma_p->cookie.dmac_address, in iwk_alloc_rx_ring()
1254 dma_p->cookie.dmac_size)); in iwk_alloc_rx_ring()
1260 data = &ring->data[i]; in iwk_alloc_rx_ring()
1261 err = iwk_alloc_dma_mem(sc, sc->sc_dmabuf_sz, in iwk_alloc_rx_ring()
1264 &data->dma_data); in iwk_alloc_rx_ring()
1271 * the physical address bit [8-36] are used, in iwk_alloc_rx_ring()
1272 * instead of bit [0-31] in 3945. in iwk_alloc_rx_ring()
1274 ring->desc[i] = (uint32_t) in iwk_alloc_rx_ring()
1275 (data->dma_data.cookie.dmac_address >> 8); in iwk_alloc_rx_ring()
1277 dma_p = &ring->data[0].dma_data; in iwk_alloc_rx_ring()
1280 dma_p->ncookies, dma_p->cookie.dmac_address, in iwk_alloc_rx_ring()
1281 dma_p->cookie.dmac_size)); in iwk_alloc_rx_ring()
1283 IWK_DMA_SYNC(ring->dma_desc, DDI_DMA_SYNC_FORDEV); in iwk_alloc_rx_ring()
1310 sc->sc_rxq.cur = 0; in iwk_reset_rx_ring()
1319 if (sc->sc_rxq.data[i].dma_data.dma_hdl) in iwk_free_rx_ring()
1320 IWK_DMA_SYNC(sc->sc_rxq.data[i].dma_data, in iwk_free_rx_ring()
1322 iwk_free_dma_mem(&sc->sc_rxq.data[i].dma_data); in iwk_free_rx_ring()
1325 if (sc->sc_rxq.dma_desc.dma_hdl) in iwk_free_rx_ring()
1326 IWK_DMA_SYNC(sc->sc_rxq.dma_desc, DDI_DMA_SYNC_FORDEV); in iwk_free_rx_ring()
1327 iwk_free_dma_mem(&sc->sc_rxq.dma_desc); in iwk_free_rx_ring()
1342 ring->qid = qid; in iwk_alloc_tx_ring()
1343 ring->count = TFD_QUEUE_SIZE_MAX; in iwk_alloc_tx_ring()
1344 ring->window = slots; in iwk_alloc_tx_ring()
1345 ring->queued = 0; in iwk_alloc_tx_ring()
1346 ring->cur = 0; in iwk_alloc_tx_ring()
1352 &ring->dma_desc); in iwk_alloc_tx_ring()
1358 dma_p = &ring->dma_desc; in iwk_alloc_tx_ring()
1360 dma_p->ncookies, dma_p->cookie.dmac_address, in iwk_alloc_tx_ring()
1361 dma_p->cookie.dmac_size)); in iwk_alloc_tx_ring()
1363 desc_h = (iwk_tx_desc_t *)ring->dma_desc.mem_va; in iwk_alloc_tx_ring()
1364 paddr_desc_h = ring->dma_desc.cookie.dmac_address; in iwk_alloc_tx_ring()
1370 &ring->dma_cmd); in iwk_alloc_tx_ring()
1376 dma_p = &ring->dma_cmd; in iwk_alloc_tx_ring()
1378 dma_p->ncookies, dma_p->cookie.dmac_address, in iwk_alloc_tx_ring()
1379 dma_p->cookie.dmac_size)); in iwk_alloc_tx_ring()
1381 cmd_h = (iwk_cmd_t *)ring->dma_cmd.mem_va; in iwk_alloc_tx_ring()
1382 paddr_cmd_h = ring->dma_cmd.cookie.dmac_address; in iwk_alloc_tx_ring()
1387 ring->data = kmem_zalloc(sizeof (iwk_tx_data_t) * TFD_QUEUE_SIZE_MAX, in iwk_alloc_tx_ring()
1389 if (ring->data == NULL) { in iwk_alloc_tx_ring()
1395 data = &ring->data[i]; in iwk_alloc_tx_ring()
1396 err = iwk_alloc_dma_mem(sc, sc->sc_dmabuf_sz, in iwk_alloc_tx_ring()
1399 &data->dma_data); in iwk_alloc_tx_ring()
1406 data->desc = desc_h + i; in iwk_alloc_tx_ring()
1407 data->paddr_desc = paddr_desc_h + in iwk_alloc_tx_ring()
1408 _PTRDIFF(data->desc, desc_h); in iwk_alloc_tx_ring()
1409 data->cmd = cmd_h + i; /* (i % slots); */ in iwk_alloc_tx_ring()
1411 data->paddr_cmd = paddr_cmd_h + in iwk_alloc_tx_ring()
1412 _PTRDIFF(data->cmd, cmd_h); in iwk_alloc_tx_ring()
1414 dma_p = &ring->data[0].dma_data; in iwk_alloc_tx_ring()
1417 dma_p->ncookies, dma_p->cookie.dmac_address, in iwk_alloc_tx_ring()
1418 dma_p->cookie.dmac_size)); in iwk_alloc_tx_ring()
1423 if (ring->data) in iwk_alloc_tx_ring()
1424 kmem_free(ring->data, in iwk_alloc_tx_ring()
1438 IWK_WRITE(sc, IWK_FH_TCSR_CHNL_TX_CONFIG_REG(ring->qid), 0); in iwk_reset_tx_ring()
1441 IWK_FH_TSSR_TX_STATUS_REG_MSK_CHNL_IDLE(ring->qid)) in iwk_reset_tx_ring()
1447 ring->qid)); in iwk_reset_tx_ring()
1451 for (i = 0; i < ring->count; i++) { in iwk_reset_tx_ring()
1452 data = &ring->data[i]; in iwk_reset_tx_ring()
1453 IWK_DMA_SYNC(data->dma_data, DDI_DMA_SYNC_FORDEV); in iwk_reset_tx_ring()
1456 ring->queued = 0; in iwk_reset_tx_ring()
1457 ring->cur = 0; in iwk_reset_tx_ring()
1466 if (ring->dma_desc.dma_hdl != NULL) in iwk_free_tx_ring()
1467 IWK_DMA_SYNC(ring->dma_desc, DDI_DMA_SYNC_FORDEV); in iwk_free_tx_ring()
1468 iwk_free_dma_mem(&ring->dma_desc); in iwk_free_tx_ring()
1470 if (ring->dma_cmd.dma_hdl != NULL) in iwk_free_tx_ring()
1471 IWK_DMA_SYNC(ring->dma_cmd, DDI_DMA_SYNC_FORDEV); in iwk_free_tx_ring()
1472 iwk_free_dma_mem(&ring->dma_cmd); in iwk_free_tx_ring()
1474 if (ring->data != NULL) { in iwk_free_tx_ring()
1475 for (i = 0; i < ring->count; i++) { in iwk_free_tx_ring()
1476 if (ring->data[i].dma_data.dma_hdl) in iwk_free_tx_ring()
1477 IWK_DMA_SYNC(ring->data[i].dma_data, in iwk_free_tx_ring()
1479 iwk_free_dma_mem(&ring->data[i].dma_data); in iwk_free_tx_ring()
1481 kmem_free(ring->data, ring->count * sizeof (iwk_tx_data_t)); in iwk_free_tx_ring()
1493 err = iwk_alloc_tx_ring(sc, &sc->sc_txq[i], TFD_TX_CMD_SLOTS, in iwk_ring_init()
1498 err = iwk_alloc_tx_ring(sc, &sc->sc_txq[IWK_CMD_QUEUE_NUM], in iwk_ring_init()
1517 while (--i >= 0) { in iwk_ring_free()
1518 iwk_free_tx_ring(sc, &sc->sc_txq[i]); in iwk_ring_free()
1531 return (&amrr->in); in iwk_node_alloc()
1537 ieee80211com_t *ic = in->in_ic; in iwk_node_free()
1539 ic->ic_node_cleanup(in); in iwk_node_free()
1540 if (in->in_wpa_ie != NULL) in iwk_node_free()
1541 ieee80211_free(in->in_wpa_ie); in iwk_node_free()
1550 ieee80211_node_t *in = ic->ic_bss; in iwk_newstate()
1551 enum ieee80211_state ostate = ic->ic_state; in iwk_newstate()
1554 mutex_enter(&sc->sc_glock); in iwk_newstate()
1562 sc->sc_flags |= IWK_F_SCANNING; in iwk_newstate()
1563 sc->sc_scan_pending = 0; in iwk_newstate()
1570 sc->sc_config.assoc_id = 0; in iwk_newstate()
1571 sc->sc_config.filter_flags &= in iwk_newstate()
1575 "flags %x filter_flags %x\n", sc->sc_config.chan, in iwk_newstate()
1576 sc->sc_config.flags, sc->sc_config.filter_flags)); in iwk_newstate()
1578 err = iwk_cmd(sc, REPLY_RXON, &sc->sc_config, in iwk_newstate()
1583 sc->sc_flags &= ~IWK_F_SCANNING; in iwk_newstate()
1584 mutex_exit(&sc->sc_glock); in iwk_newstate()
1597 sc->sc_flags &= ~IWK_F_SCANNING; in iwk_newstate()
1598 mutex_exit(&sc->sc_glock); in iwk_newstate()
1607 sc->sc_flags |= IWK_F_SCANNING; in iwk_newstate()
1608 sc->sc_scan_pending = 0; in iwk_newstate()
1613 mutex_exit(&sc->sc_glock); in iwk_newstate()
1615 err = sc->sc_newstate(ic, nstate, arg); in iwk_newstate()
1616 mutex_enter(&sc->sc_glock); in iwk_newstate()
1620 sc->sc_flags &= ~IWK_F_SCANNING; in iwk_newstate()
1623 mutex_exit(&sc->sc_glock); in iwk_newstate()
1629 sc->sc_clk = 0; in iwk_newstate()
1634 sc->sc_flags &= ~IWK_F_SCANNING; in iwk_newstate()
1638 sc->sc_config.assoc_id = 0; in iwk_newstate()
1639 sc->sc_config.filter_flags &= ~LE_32(RXON_FILTER_ASSOC_MSK); in iwk_newstate()
1649 mutex_exit(&sc->sc_glock); in iwk_newstate()
1656 sc->sc_flags &= ~IWK_F_SCANNING; in iwk_newstate()
1659 if (ic->ic_opmode == IEEE80211_M_MONITOR) { in iwk_newstate()
1667 if (ic->ic_opmode == IEEE80211_M_IBSS) { in iwk_newstate()
1677 mutex_exit(&sc->sc_glock); in iwk_newstate()
1683 if (ic->ic_opmode != IEEE80211_M_IBSS) { in iwk_newstate()
1690 mutex_exit(&sc->sc_glock); in iwk_newstate()
1696 sc->sc_tempera = iwk_curr_tempera(sc); in iwk_newstate()
1706 mutex_exit(&sc->sc_glock); in iwk_newstate()
1710 if (ic->ic_opmode == IEEE80211_M_IBSS) { in iwk_newstate()
1719 mutex_exit(&sc->sc_glock); in iwk_newstate()
1725 mutex_enter(&sc->sc_mt_lock); in iwk_newstate()
1726 if (ic->ic_fixed_rate == IEEE80211_FIXED_RATE_NONE) { in iwk_newstate()
1727 sc->sc_flags |= IWK_F_RATE_AUTO_CTL; in iwk_newstate()
1729 i = in->in_rates.ir_nrates - 1; in iwk_newstate()
1731 i--; in iwk_newstate()
1732 in->in_txrate = i; in iwk_newstate()
1734 sc->sc_flags &= ~IWK_F_RATE_AUTO_CTL; in iwk_newstate()
1736 mutex_exit(&sc->sc_mt_lock); in iwk_newstate()
1744 sc->sc_flags &= ~IWK_F_SCANNING; in iwk_newstate()
1752 sc->sc_flags &= ~IWK_F_SCANNING; in iwk_newstate()
1758 mutex_exit(&sc->sc_glock); in iwk_newstate()
1760 err = sc->sc_newstate(ic, nstate, arg); in iwk_newstate()
1764 mutex_enter(&sc->sc_glock); in iwk_newstate()
1774 mutex_exit(&sc->sc_glock); in iwk_newstate()
1783 mutex_exit(&sc->sc_glock); in iwk_newstate()
1787 mutex_exit(&sc->sc_glock); in iwk_newstate()
1798 struct ieee80211com *ic = &sc->sc_ic; in iwk_watchdog()
1800 timeout_id_t timeout_id = ic->ic_watchdog_timer; in iwk_watchdog()
1805 if ((ic->ic_state != IEEE80211_S_AUTH) && in iwk_watchdog()
1806 (ic->ic_state != IEEE80211_S_ASSOC)) in iwk_watchdog()
1809 if (ic->ic_bss->in_fails > 0) { in iwk_watchdog()
1811 "node (0x%x)\n", timeout_id, &ic->ic_bss)); in iwk_watchdog()
1812 ieee80211_new_state(ic, IEEE80211_S_INIT, -1); in iwk_watchdog()
1816 timeout_id, &ic->ic_bss, ic->ic_bss->in_fails + 1)); in iwk_watchdog()
1830 switch (k->wk_cipher->ic_cipher) { in iwk_key_set()
1839 sc->sc_config.filter_flags &= ~LE_32(RXON_FILTER_DIS_DECRYPT_MSK | in iwk_key_set()
1842 mutex_enter(&sc->sc_glock); in iwk_key_set()
1849 } else if (ic->ic_opmode == IEEE80211_M_IBSS) { in iwk_key_set()
1850 mutex_exit(&sc->sc_glock); in iwk_key_set()
1851 mutex_enter(&sc->sc_ibss.node_tb_lock); in iwk_key_set()
1858 if (sc->sc_ibss.ibss_node_tb[index1].used && in iwk_key_set()
1859 IEEE80211_ADDR_EQ(sc->sc_ibss. in iwk_key_set()
1868 mutex_exit(&sc->sc_ibss.node_tb_lock); in iwk_key_set()
1874 if (k->wk_flags & IEEE80211_KEY_XMIT) { in iwk_key_set()
1875 sc->sc_ibss.ibss_node_tb[index1]. in iwk_key_set()
1877 sc->sc_ibss.ibss_node_tb[index1]. in iwk_key_set()
1878 node.keyp = k->wk_keyix; in iwk_key_set()
1880 sc->sc_ibss.ibss_node_tb[index1]. in iwk_key_set()
1882 sc->sc_ibss.ibss_node_tb[index1]. in iwk_key_set()
1883 node.keyp = k->wk_keyix + 4; in iwk_key_set()
1886 (void) memcpy(sc->sc_ibss.ibss_node_tb[index1].node.key, in iwk_key_set()
1887 k->wk_key, k->wk_keylen); in iwk_key_set()
1888 sc->sc_ibss.ibss_node_tb[index1].node.key_flags |= in iwk_key_set()
1889 (STA_KEY_FLG_CCMP | (1 << 3) | (k->wk_keyix << 8)); in iwk_key_set()
1890 sc->sc_ibss.ibss_node_tb[index1].node.key_flags = in iwk_key_set()
1891 LE_16(sc->sc_ibss.ibss_node_tb[index1]. in iwk_key_set()
1893 sc->sc_ibss.ibss_node_tb[index1].node.sta_mask = in iwk_key_set()
1895 sc->sc_ibss.ibss_node_tb[index1].node.control = 1; in iwk_key_set()
1897 mutex_enter(&sc->sc_glock); in iwk_key_set()
1899 &sc->sc_ibss.ibss_node_tb[index1].node, in iwk_key_set()
1904 mutex_exit(&sc->sc_glock); in iwk_key_set()
1905 mutex_exit(&sc->sc_ibss.node_tb_lock); in iwk_key_set()
1908 mutex_exit(&sc->sc_glock); in iwk_key_set()
1910 mutex_exit(&sc->sc_ibss.node_tb_lock); in iwk_key_set()
1913 IEEE80211_ADDR_COPY(node.bssid, ic->ic_bss->in_bssid); in iwk_key_set()
1916 if (k->wk_flags & IEEE80211_KEY_XMIT) { in iwk_key_set()
1918 node.keyp = k->wk_keyix; in iwk_key_set()
1921 node.keyp = k->wk_keyix + 4; in iwk_key_set()
1923 (void) memcpy(node.key, k->wk_key, k->wk_keylen); in iwk_key_set()
1924 node.key_flags |= (STA_KEY_FLG_CCMP | (1 << 3) | (k->wk_keyix << 8)); in iwk_key_set()
1932 mutex_exit(&sc->sc_glock); in iwk_key_set()
1935 mutex_exit(&sc->sc_glock); in iwk_key_set()
2008 for (; wlen > 0; wlen--, data++, addr += 4) in iwk_reg_write_region_4()
2017 * retain the bootstrap program even when the card is in a power-saving
2018 * power-down state. The BSM loads the small program into ARC processor's
2019 * instruction memory when triggered by power-up.
2029 uint32_t *boot_fw = (uint32_t *)sc->sc_boot; in iwk_load_firmware()
2030 uint32_t size = LE_32(sc->sc_hdr->bootsz); in iwk_load_firmware()
2034 * The physical address bit [4-35] of the initialize uCode. in iwk_load_firmware()
2041 sc->sc_dma_fw_init_text.cookie.dmac_address >> 4); in iwk_load_firmware()
2043 sc->sc_dma_fw_init_data.cookie.dmac_address >> 4); in iwk_load_firmware()
2045 sc->sc_dma_fw_init_text.cookie.dmac_size); in iwk_load_firmware()
2047 sc->sc_dma_fw_init_data.cookie.dmac_size); in iwk_load_firmware()
2075 /* for future power-save mode use */ in iwk_load_firmware()
2087 ieee80211com_t *ic = &sc->sc_ic; in iwk_rx_intr()
2088 iwk_rx_ring_t *ring = &sc->sc_rxq; in iwk_rx_intr()
2100 /* assuming not 11n here. cope with 11n in phase-II */ in iwk_rx_intr()
2102 if (stat->cfg_phy_cnt > 20) { in iwk_rx_intr()
2107 stat->non_cfg_phy[i] = LE_16(stat->non_cfg_phy[i]); in iwk_rx_intr()
2109 phyinfo = (struct iwk_rx_non_cfg_phy *)stat->non_cfg_phy; in iwk_rx_intr()
2110 agc = (phyinfo->agc_info & IWK_AGC_DB_MASK) >> IWK_AGC_DB_POS; in iwk_rx_intr()
2112 ants = (LE_16(stat->phy_flags) & RX_PHY_FLAGS_ANTENNAE_MASK) >> in iwk_rx_intr()
2116 mrssi = MAX(mrssi, phyinfo->rssi_info[i << 1]); in iwk_rx_intr()
2118 t = mrssi - agc - 44; /* t is the dBM value */ in iwk_rx_intr()
2122 rssi = (100 * 75 * 75 - (-20 - t) * (15 * 75 + 62 * (-20 - t))) / in iwk_rx_intr()
2128 len = LE_16(stat->byte_count); in iwk_rx_intr()
2129 tail = (uint32_t *)((caddr_t)(stat + 1) + stat->cfg_phy_cnt + len); in iwk_rx_intr()
2134 "cfg_phy_count=%x tail=%x", ring->cur, sizeof (*stat), in iwk_rx_intr()
2135 len, stat->rate.r.s.rate, LE_16(stat->channel), in iwk_rx_intr()
2136 LE_32(stat->timestampl), stat->non_cfg_phy_cnt, in iwk_rx_intr()
2137 stat->cfg_phy_cnt, LE_32(crc))); in iwk_rx_intr()
2139 if ((len < 16) || (len > sc->sc_dmabuf_sz)) { in iwk_rx_intr()
2152 sc->sc_rx_err++; in iwk_rx_intr()
2157 ((uint8_t *)(stat + 1)+ stat->cfg_phy_cnt); in iwk_rx_intr()
2159 sc->sc_assoc_id = *((uint16_t *)(wh + 1) + 2); in iwk_rx_intr()
2161 sc->sc_assoc_id)); in iwk_rx_intr()
2170 (void) memcpy(mp->b_wptr, wh, len); in iwk_rx_intr()
2171 mp->b_wptr += len; in iwk_rx_intr()
2176 sc->sc_rx_nobuf++; in iwk_rx_intr()
2188 ieee80211com_t *ic = &sc->sc_ic; in iwk_tx_intr()
2189 iwk_tx_ring_t *ring = &sc->sc_txq[desc->hdr.qid & 0x3]; in iwk_tx_intr()
2191 iwk_amrr_t *amrr = (iwk_amrr_t *)ic->ic_bss; in iwk_tx_intr()
2196 desc->hdr.qid, desc->hdr.idx, stat->ntries, stat->frame_count, in iwk_tx_intr()
2197 stat->bt_kill_count, stat->rate.r.s.rate, in iwk_tx_intr()
2198 LE_16(stat->duration), LE_32(stat->status))); in iwk_tx_intr()
2200 amrr->txcnt++; in iwk_tx_intr()
2201 IWK_DBG((IWK_DEBUG_RATECTL, "tx: %d cnt\n", amrr->txcnt)); in iwk_tx_intr()
2202 if (stat->ntries > 0) { in iwk_tx_intr()
2203 amrr->retrycnt++; in iwk_tx_intr()
2204 sc->sc_tx_retries++; in iwk_tx_intr()
2206 sc->sc_tx_retries)); in iwk_tx_intr()
2209 sc->sc_tx_timer = 0; in iwk_tx_intr()
2211 mutex_enter(&sc->sc_tx_lock); in iwk_tx_intr()
2212 ring->queued--; in iwk_tx_intr()
2213 if (ring->queued < 0) in iwk_tx_intr()
2214 ring->queued = 0; in iwk_tx_intr()
2215 if ((sc->sc_need_reschedule) && (ring->queued <= (ring->count << 3))) { in iwk_tx_intr()
2216 sc->sc_need_reschedule = 0; in iwk_tx_intr()
2217 mutex_exit(&sc->sc_tx_lock); in iwk_tx_intr()
2218 mac_tx_update(ic->ic_mach); in iwk_tx_intr()
2219 mutex_enter(&sc->sc_tx_lock); in iwk_tx_intr()
2221 mutex_exit(&sc->sc_tx_lock); in iwk_tx_intr()
2227 if ((desc->hdr.qid & 7) != 4) { in iwk_cmd_intr()
2230 mutex_enter(&sc->sc_glock); in iwk_cmd_intr()
2231 sc->sc_flags |= IWK_F_CMD_DONE; in iwk_cmd_intr()
2232 cv_signal(&sc->sc_cmd_cv); in iwk_cmd_intr()
2233 mutex_exit(&sc->sc_glock); in iwk_cmd_intr()
2236 desc->hdr.qid, desc->hdr.idx, desc->hdr.flags, in iwk_cmd_intr()
2237 desc->hdr.type)); in iwk_cmd_intr()
2251 ar->ucode_minor, ar->ucode_minor, ar->ver_type, ar->ver_subtype)); in iwk_ucode_alive()
2253 if (LE_32(ar->is_valid) != UCODE_VALID_OK) { in iwk_ucode_alive()
2257 if (ar->ver_subtype == INITIALIZE_SUBTYPE) { in iwk_ucode_alive()
2260 (void) memcpy(&sc->sc_card_alive_init, ar, in iwk_ucode_alive()
2265 sc->sc_dma_fw_text.cookie.dmac_address >> 4); in iwk_ucode_alive()
2267 sc->sc_dma_fw_data_bak.cookie.dmac_address >> 4); in iwk_ucode_alive()
2269 sc->sc_dma_fw_data.cookie.dmac_size); in iwk_ucode_alive()
2271 sc->sc_dma_fw_text.cookie.dmac_size | 0x80000000); in iwk_ucode_alive()
2275 (void) memcpy(&sc->sc_card_alive_run, ar, in iwk_ucode_alive()
2284 sc->sc_scd_base = iwk_reg_read(sc, SCD_SRAM_BASE_ADDR); in iwk_ucode_alive()
2287 for (base = sc->sc_scd_base + SCD_CONTEXT_DATA_OFFSET, i = 0; in iwk_ucode_alive()
2292 for (base = sc->sc_scd_base + SCD_TX_STTS_BITMAP_OFFSET; in iwk_ucode_alive()
2297 for (base = sc->sc_scd_base + SCD_TRANSLATE_TBL_OFFSET; in iwk_ucode_alive()
2302 sc->sc_dma_sh.cookie.dmac_address >> 10); in iwk_ucode_alive()
2309 iwk_mem_write(sc, sc->sc_scd_base + in iwk_ucode_alive()
2312 iwk_mem_write(sc, sc->sc_scd_base + in iwk_ucode_alive()
2316 /* interrupt enable on each queue0-7 */ in iwk_ucode_alive()
2318 (1 << IWK_NUM_QUEUES) - 1); in iwk_ucode_alive()
2319 /* enable each channel 0-7 */ in iwk_ucode_alive()
2323 * queue 0-7 maps to FIFO 0-7 and in iwk_ucode_alive()
2324 * all queues work under FIFO mode (none-scheduler-ack) in iwk_ucode_alive()
2335 sc->sc_flags |= IWK_F_FW_INIT; in iwk_ucode_alive()
2336 cv_signal(&sc->sc_fw_cv); in iwk_ucode_alive()
2346 ieee80211com_t *ic = &sc->sc_ic; in iwk_rx_softintr()
2351 mutex_enter(&sc->sc_glock); in iwk_rx_softintr()
2352 if (sc->sc_rx_softint_pending != 1) { in iwk_rx_softintr()
2353 mutex_exit(&sc->sc_glock); in iwk_rx_softintr()
2358 mutex_exit(&sc->sc_glock); in iwk_rx_softintr()
2364 index = sc->sc_shared->val0 & 0xfff; in iwk_rx_softintr()
2366 while (sc->sc_rxq.cur != index) { in iwk_rx_softintr()
2367 data = &sc->sc_rxq.data[sc->sc_rxq.cur]; in iwk_rx_softintr()
2368 desc = (iwk_rx_desc_t *)data->dma_data.mem_va; in iwk_rx_softintr()
2372 index, sc->sc_rxq.cur, desc->hdr.qid, desc->hdr.idx, in iwk_rx_softintr()
2373 desc->hdr.flags, desc->hdr.type, LE_32(desc->len))); in iwk_rx_softintr()
2376 if (!(desc->hdr.qid & 0x80) && in iwk_rx_softintr()
2377 (desc->hdr.type != REPLY_RX_PHY_CMD) && in iwk_rx_softintr()
2378 (desc->hdr.type != REPLY_TX) && in iwk_rx_softintr()
2379 (desc->hdr.type != REPLY_TX_PWR_TABLE_CMD) && in iwk_rx_softintr()
2380 (desc->hdr.type != REPLY_PHY_CALIBRATION_CMD) && in iwk_rx_softintr()
2381 (desc->hdr.type != SENSITIVITY_CMD)) in iwk_rx_softintr()
2384 switch (desc->hdr.type) { in iwk_rx_softintr()
2414 sc->sc_ostate = sc->sc_ic.ic_state; in iwk_rx_softintr()
2415 ieee80211_new_state(&sc->sc_ic, in iwk_rx_softintr()
2416 IEEE80211_S_INIT, -1); in iwk_rx_softintr()
2417 sc->sc_flags |= in iwk_rx_softintr()
2429 scan->chan, LE_32(scan->status))); in iwk_rx_softintr()
2431 ic->ic_curchan = &ic->ic_sup_channels[scan->chan]; in iwk_rx_softintr()
2441 scan->chan, scan->nchan, scan->status)); in iwk_rx_softintr()
2443 sc->sc_scan_pending++; in iwk_rx_softintr()
2452 sc->sc_rxq.cur = (sc->sc_rxq.cur + 1) % RX_QUEUE_SIZE; in iwk_rx_softintr()
2459 index = (index == 0) ? RX_QUEUE_SIZE - 1 : index - 1; in iwk_rx_softintr()
2462 mutex_enter(&sc->sc_glock); in iwk_rx_softintr()
2463 /* re-enable interrupts */ in iwk_rx_softintr()
2465 sc->sc_rx_softint_pending = 0; in iwk_rx_softintr()
2466 mutex_exit(&sc->sc_glock); in iwk_rx_softintr()
2478 mutex_enter(&sc->sc_glock); in iwk_intr()
2480 if (sc->sc_flags & IWK_F_SUSPEND) { in iwk_intr()
2481 mutex_exit(&sc->sc_glock); in iwk_intr()
2487 mutex_exit(&sc->sc_glock); in iwk_intr()
2501 if (sc->sc_soft_hdl == NULL) { in iwk_intr()
2502 mutex_exit(&sc->sc_glock); in iwk_intr()
2507 mutex_exit(&sc->sc_glock); in iwk_intr()
2514 sc->sc_ostate = sc->sc_ic.ic_state; in iwk_intr()
2516 /* not capable of fast recovery */ in iwk_intr()
2518 ieee80211_new_state(&sc->sc_ic, IEEE80211_S_INIT, -1); in iwk_intr()
2520 sc->sc_flags |= IWK_F_HW_ERR_RECOVER; in iwk_intr()
2532 sc->sc_rx_softint_pending = 1; in iwk_intr()
2533 (void) ddi_intr_trigger_softint(sc->sc_soft_hdl, NULL); in iwk_intr()
2540 /* re-enable interrupts */ in iwk_intr()
2542 mutex_exit(&sc->sc_glock); in iwk_intr()
2602 ieee80211com_t *ic = &sc->sc_ic; in iwk_m_tx()
2605 if (sc->sc_flags & IWK_F_SUSPEND) { in iwk_m_tx()
2610 if (ic->ic_state != IEEE80211_S_RUN) { in iwk_m_tx()
2615 if ((sc->sc_flags & IWK_F_HW_ERR_RECOVER) && in iwk_m_tx()
2622 next = mp->b_next; in iwk_m_tx()
2623 mp->b_next = NULL; in iwk_m_tx()
2625 mp->b_next = next; in iwk_m_tx()
2651 ring = &sc->sc_txq[0]; in iwk_send()
2652 data = &ring->data[ring->cur]; in iwk_send()
2653 desc = data->desc; in iwk_send()
2654 cmd = data->cmd; in iwk_send()
2658 mutex_enter(&sc->sc_tx_lock); in iwk_send()
2659 if (sc->sc_flags & IWK_F_SUSPEND) { in iwk_send()
2660 mutex_exit(&sc->sc_tx_lock); in iwk_send()
2669 if (ring->queued > ring->count - 64) { in iwk_send()
2671 sc->sc_need_reschedule = 1; in iwk_send()
2672 mutex_exit(&sc->sc_tx_lock); in iwk_send()
2677 sc->sc_tx_nobuf++; in iwk_send()
2681 mutex_exit(&sc->sc_tx_lock); in iwk_send()
2693 for (off = 0, m0 = mp; m0 != NULL; m0 = m0->b_cont) { in iwk_send()
2695 (void) memcpy(m->b_rptr + off, m0->b_rptr, mblen); in iwk_send()
2698 m->b_wptr += off; in iwk_send()
2701 wh = (struct ieee80211_frame *)m->b_rptr; in iwk_send()
2703 if (ic->ic_opmode == IEEE80211_M_IBSS && in iwk_send()
2704 (!(IEEE80211_IS_MULTICAST(wh->i_addr1)))) { in iwk_send()
2705 mutex_enter(&sc->sc_glock); in iwk_send()
2706 mutex_enter(&sc->sc_ibss.node_tb_lock); in iwk_send()
2713 if (sc->sc_ibss.ibss_node_tb[index1].used && in iwk_send()
2714 IEEE80211_ADDR_EQ(sc->sc_ibss. in iwk_send()
2716 wh->i_addr1)) { in iwk_send()
2726 wh->i_addr1, &index2); in iwk_send()
2731 mutex_exit(&sc->sc_ibss.node_tb_lock); in iwk_send()
2732 mutex_exit(&sc->sc_glock); in iwk_send()
2734 sc->sc_tx_err++; in iwk_send()
2742 mutex_exit(&sc->sc_ibss.node_tb_lock); in iwk_send()
2743 mutex_exit(&sc->sc_glock); in iwk_send()
2746 in = ieee80211_find_txnode(ic, wh->i_addr1); in iwk_send()
2750 sc->sc_tx_err++; in iwk_send()
2756 cmd->hdr.type = REPLY_TX; in iwk_send()
2757 cmd->hdr.flags = 0; in iwk_send()
2758 cmd->hdr.qid = ring->qid; in iwk_send()
2759 cmd->hdr.idx = ring->cur; in iwk_send()
2761 tx = (iwk_tx_cmd_t *)cmd->data; in iwk_send()
2762 tx->tx_flags = 0; in iwk_send()
2764 if (IEEE80211_IS_MULTICAST(wh->i_addr1)) { in iwk_send()
2765 tx->tx_flags &= ~(LE_32(TX_CMD_FLG_ACK_MSK)); in iwk_send()
2767 tx->tx_flags |= LE_32(TX_CMD_FLG_ACK_MSK); in iwk_send()
2770 if (wh->i_fc[1] & IEEE80211_FC1_WEP) { in iwk_send()
2774 sc->sc_tx_err++; in iwk_send()
2779 if (k->wk_cipher->ic_cipher == IEEE80211_CIPHER_AES_CCM) { in iwk_send()
2780 tx->sec_ctl = 2; /* for CCMP */ in iwk_send()
2781 tx->tx_flags |= LE_32(TX_CMD_FLG_ACK_MSK); in iwk_send()
2782 (void) memcpy(&tx->key, k->wk_key, k->wk_keylen); in iwk_send()
2786 wh = (struct ieee80211_frame *)m->b_rptr; in iwk_send()
2797 if ((wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) == in iwk_send()
2800 rate = in->in_rates.ir_rates[0]; in iwk_send()
2806 * tx->initial_rate_index = LINK_QUAL_MAX_RETRY_NUM - 1; in iwk_send()
2808 * tx->tx_flags |= (LE_32(TX_CMD_FLG_STA_RATE_MSK)); in iwk_send()
2809 * rate = in->in_rates.ir_rates[in->in_txrate]; in iwk_send()
2810 * tx->initial_rate_index = 1; in iwk_send()
2816 if (ic->ic_fixed_rate != IEEE80211_FIXED_RATE_NONE) { in iwk_send()
2817 rate = ic->ic_fixed_rate; in iwk_send()
2819 rate = in->in_rates.ir_rates[in->in_txrate]; in iwk_send()
2824 in->in_txrate, in->in_rates.ir_nrates, rate)); in iwk_send()
2826 tx->tx_flags |= (LE_32(TX_CMD_FLG_SEQ_CTL_MSK)); in iwk_send()
2830 tx->tx_flags |= LE_32(TX_CMD_FLG_MH_PAD_MSK); in iwk_send()
2833 if (IEEE80211_IS_MULTICAST(wh->i_addr1)) { in iwk_send()
2834 tx->sta_id = IWK_BROADCAST_ID; in iwk_send()
2836 if (ic->ic_opmode == IEEE80211_M_IBSS) in iwk_send()
2837 tx->sta_id = index; in iwk_send()
2839 tx->sta_id = IWK_AP_ID; in iwk_send()
2842 if ((wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) == in iwk_send()
2845 if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) == in iwk_send()
2847 tx->tx_flags |= LE_32(TX_CMD_FLG_TSF_MSK); in iwk_send()
2849 if (((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) == in iwk_send()
2851 ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) == in iwk_send()
2853 tx->timeout.pm_frame_timeout = LE_16(3); in iwk_send()
2855 tx->timeout.pm_frame_timeout = LE_16(2); in iwk_send()
2857 tx->timeout.pm_frame_timeout = 0; in iwk_send()
2862 tx->rate.r.rate_n_flags = LE_32(iwk_rate_to_plcp(rate) | masks); in iwk_send()
2865 LE_32(tx->tx_flags))); in iwk_send()
2867 tx->rts_retry_limit = 60; in iwk_send()
2868 tx->data_retry_limit = 15; in iwk_send()
2870 tx->stop_time.life_time = LE_32(0xffffffff); in iwk_send()
2872 tx->len = LE_16(len); in iwk_send()
2874 tx->dram_lsb_ptr = in iwk_send()
2875 LE_32(data->paddr_cmd + 4 + offsetof(iwk_tx_cmd_t, scratch)); in iwk_send()
2876 tx->dram_msb_ptr = 0; in iwk_send()
2877 tx->driver_txop = 0; in iwk_send()
2878 tx->next_frame_len = 0; in iwk_send()
2880 (void) memcpy(tx + 1, m->b_rptr, hdrlen); in iwk_send()
2881 m->b_rptr += hdrlen; in iwk_send()
2882 (void) memcpy(data->dma_data.mem_va, m->b_rptr, len - hdrlen); in iwk_send()
2885 ring->qid, ring->cur, len)); in iwk_send()
2891 desc->val0 = 2 << 24; in iwk_send()
2892 desc->pa[0].tb1_addr = data->paddr_cmd; in iwk_send()
2893 desc->pa[0].val1 = ((len0 << 4) & 0xfff0) | in iwk_send()
2894 ((data->dma_data.cookie.dmac_address & 0xffff) << 16); in iwk_send()
2895 desc->pa[0].val2 = in iwk_send()
2896 ((data->dma_data.cookie.dmac_address & 0xffff0000) >> 16) | in iwk_send()
2897 ((len - hdrlen) << 20); in iwk_send()
2900 data->paddr_cmd, data->dma_data.cookie.dmac_address, in iwk_send()
2901 len0, len - hdrlen, LE_32(desc->pa[0].val1), in iwk_send()
2902 LE_32(desc->pa[0].val2))); in iwk_send()
2904 mutex_enter(&sc->sc_tx_lock); in iwk_send()
2905 ring->queued++; in iwk_send()
2906 mutex_exit(&sc->sc_tx_lock); in iwk_send()
2909 sc->sc_shared->queues_byte_cnt_tbls[ring->qid]. in iwk_send()
2910 tfd_offset[ring->cur].val = 8 + len; in iwk_send()
2911 if (ring->cur < IWK_MAX_WIN_SIZE) { in iwk_send()
2912 sc->sc_shared->queues_byte_cnt_tbls[ring->qid]. in iwk_send()
2913 tfd_offset[IWK_QUEUE_SIZE + ring->cur].val = 8 + len; in iwk_send()
2916 IWK_DMA_SYNC(data->dma_data, DDI_DMA_SYNC_FORDEV); in iwk_send()
2917 IWK_DMA_SYNC(ring->dma_desc, DDI_DMA_SYNC_FORDEV); in iwk_send()
2919 ring->cur = (ring->cur + 1) % ring->count; in iwk_send()
2920 IWK_WRITE(sc, HBUS_TARG_WRPTR, ring->qid << 8 | ring->cur); in iwk_send()
2925 ic->ic_stats.is_tx_bytes += len; in iwk_send()
2926 ic->ic_stats.is_tx_frags++; in iwk_send()
2928 if (sc->sc_tx_timer == 0) in iwk_send()
2929 sc->sc_tx_timer = 4; in iwk_send()
2939 ieee80211com_t *ic = &sc->sc_ic; in iwk_m_ioctl()
2948 oldmod = ic->ic_opmode; in iwk_m_ioctl()
2955 if ((0 == err || ENETRESET == err) && (oldmod != ic->ic_opmode) && in iwk_m_ioctl()
2956 (ic->ic_opmode == IEEE80211_M_STA)) { in iwk_m_ioctl()
2958 (void) memset(&sc->sc_config, 0, sizeof (iwk_rxon_cmd_t)); in iwk_m_ioctl()
2959 IEEE80211_ADDR_COPY(sc->sc_config.node_addr, ic->ic_macaddr); in iwk_m_ioctl()
2960 IEEE80211_ADDR_COPY(sc->sc_config.wlap_bssid, ic->ic_macaddr); in iwk_m_ioctl()
2961 sc->sc_config.chan = in iwk_m_ioctl()
2962 LE_16(ieee80211_chan2ieee(ic, ic->ic_curchan)); in iwk_m_ioctl()
2963 sc->sc_config.flags = LE_32(RXON_FLG_TSF2HOST_MSK | in iwk_m_ioctl()
2966 sc->sc_config.flags &= LE_32(~RXON_FLG_CCK_MSK); in iwk_m_ioctl()
2967 switch (ic->ic_opmode) { in iwk_m_ioctl()
2969 sc->sc_config.dev_type = RXON_DEV_TYPE_ESS; in iwk_m_ioctl()
2970 sc->sc_config.filter_flags |= in iwk_m_ioctl()
2977 sc->sc_config.dev_type = RXON_DEV_TYPE_IBSS; in iwk_m_ioctl()
2978 sc->sc_config.flags |= in iwk_m_ioctl()
2980 sc->sc_config.filter_flags = in iwk_m_ioctl()
2986 sc->sc_config.dev_type = RXON_DEV_TYPE_AP; in iwk_m_ioctl()
2989 sc->sc_config.dev_type = RXON_DEV_TYPE_SNIFFER; in iwk_m_ioctl()
2990 sc->sc_config.filter_flags |= in iwk_m_ioctl()
2996 sc->sc_config.cck_basic_rates = 0x0f; in iwk_m_ioctl()
2997 sc->sc_config.ofdm_basic_rates = 0xff; in iwk_m_ioctl()
2998 sc->sc_config.ofdm_ht_single_stream_basic_rates = 0xff; in iwk_m_ioctl()
2999 sc->sc_config.ofdm_ht_dual_stream_basic_rates = 0xff; in iwk_m_ioctl()
3001 mutex_enter(&sc->sc_glock); in iwk_m_ioctl()
3002 sc->sc_config.rx_chain = LE_16(RXON_RX_CHAIN_DRIVER_FORCE_MSK | in iwk_m_ioctl()
3006 err1 = iwk_cmd(sc, REPLY_RXON, &sc->sc_config, in iwk_m_ioctl()
3020 txpower.channel = sc->sc_config.chan; in iwk_m_ioctl()
3069 mutex_exit(&sc->sc_glock); in iwk_m_ioctl()
3070 ieee80211_new_state(ic, IEEE80211_S_INIT, -1); in iwk_m_ioctl()
3082 if (ic->ic_des_esslen) { in iwk_m_ioctl()
3083 if (sc->sc_flags & IWK_F_RUNNING) { in iwk_m_ioctl()
3087 IEEE80211_S_SCAN, -1); in iwk_m_ioctl()
3104 err = ieee80211_getprop(&sc->sc_ic, pr_name, wldp_pr_num, in iwk_m_getprop()
3116 ieee80211com_t *ic = &sc->sc_ic; in iwk_m_setprop()
3122 if (ic->ic_des_esslen) { in iwk_m_setprop()
3123 if (sc->sc_flags & IWK_F_RUNNING) { in iwk_m_setprop()
3127 IEEE80211_S_SCAN, -1); in iwk_m_setprop()
3141 ieee80211com_t *ic = &sc->sc_ic; in iwk_m_propinfo()
3151 ieee80211com_t *ic = &sc->sc_ic; in iwk_m_stat()
3154 mutex_enter(&sc->sc_glock); in iwk_m_stat()
3157 in = ic->ic_bss; in iwk_m_stat()
3158 *val = ((ic->ic_fixed_rate == IEEE80211_FIXED_RATE_NONE) ? in iwk_m_stat()
3159 IEEE80211_RATE(in->in_txrate) : in iwk_m_stat()
3160 ic->ic_fixed_rate) / 2 * 1000000; in iwk_m_stat()
3163 *val = sc->sc_tx_nobuf; in iwk_m_stat()
3166 *val = sc->sc_rx_nobuf; in iwk_m_stat()
3169 *val = sc->sc_rx_err; in iwk_m_stat()
3172 *val = ic->ic_stats.is_rx_bytes; in iwk_m_stat()
3175 *val = ic->ic_stats.is_rx_frags; in iwk_m_stat()
3178 *val = ic->ic_stats.is_tx_bytes; in iwk_m_stat()
3181 *val = ic->ic_stats.is_tx_frags; in iwk_m_stat()
3185 *val = sc->sc_tx_err; in iwk_m_stat()
3188 *val = sc->sc_tx_retries; in iwk_m_stat()
3200 mutex_exit(&sc->sc_glock); in iwk_m_stat()
3203 mutex_exit(&sc->sc_glock); in iwk_m_stat()
3206 mutex_exit(&sc->sc_glock); in iwk_m_stat()
3216 ieee80211com_t *ic = &sc->sc_ic; in iwk_m_start()
3224 * the 'plumb' succeed. The iwk_thread() tries to re-init in iwk_m_start()
3227 mutex_enter(&sc->sc_glock); in iwk_m_start()
3228 sc->sc_flags |= IWK_F_HW_ERR_RECOVER; in iwk_m_start()
3229 mutex_exit(&sc->sc_glock); in iwk_m_start()
3233 ieee80211_new_state(ic, IEEE80211_S_INIT, -1); in iwk_m_start()
3235 mutex_enter(&sc->sc_glock); in iwk_m_start()
3236 sc->sc_flags |= IWK_F_RUNNING; in iwk_m_start()
3237 mutex_exit(&sc->sc_glock); in iwk_m_start()
3246 ieee80211com_t *ic = &sc->sc_ic; in iwk_m_stop()
3249 ieee80211_new_state(ic, IEEE80211_S_INIT, -1); in iwk_m_stop()
3251 mutex_enter(&sc->sc_mt_lock); in iwk_m_stop()
3252 sc->sc_flags &= ~IWK_F_HW_ERR_RECOVER; in iwk_m_stop()
3253 sc->sc_flags &= ~IWK_F_RATE_AUTO_CTL; in iwk_m_stop()
3254 mutex_exit(&sc->sc_mt_lock); in iwk_m_stop()
3255 mutex_enter(&sc->sc_glock); in iwk_m_stop()
3256 sc->sc_flags &= ~IWK_F_RUNNING; in iwk_m_stop()
3257 mutex_exit(&sc->sc_glock); in iwk_m_stop()
3265 ieee80211com_t *ic = &sc->sc_ic; in iwk_m_unicst()
3268 if (!IEEE80211_ADDR_EQ(ic->ic_macaddr, macaddr)) { in iwk_m_unicst()
3269 IEEE80211_ADDR_COPY(ic->ic_macaddr, macaddr); in iwk_m_unicst()
3270 mutex_enter(&sc->sc_glock); in iwk_m_unicst()
3272 mutex_exit(&sc->sc_glock); in iwk_m_unicst()
3302 ieee80211com_t *ic = &sc->sc_ic; in iwk_thread()
3303 clock_t clk; in iwk_thread() local
3307 mutex_enter(&sc->sc_mt_lock); in iwk_thread()
3308 while (sc->sc_mf_thread_switch) { in iwk_thread()
3311 sc->sc_flags &= ~IWK_F_RADIO_OFF; in iwk_thread()
3313 sc->sc_flags |= IWK_F_RADIO_OFF; in iwk_thread()
3318 if ((sc->sc_flags & IWK_F_SUSPEND) || in iwk_thread()
3319 (sc->sc_flags & IWK_F_RADIO_OFF)) { in iwk_thread()
3320 mutex_exit(&sc->sc_mt_lock); in iwk_thread()
3322 mutex_enter(&sc->sc_mt_lock); in iwk_thread()
3329 if (ic->ic_mach && in iwk_thread()
3330 (sc->sc_flags & IWK_F_HW_ERR_RECOVER)) { in iwk_thread()
3340 bcopy(&sc->sc_config, &sc->sc_config_save, in iwk_thread()
3341 sizeof (sc->sc_config)); in iwk_thread()
3343 mutex_exit(&sc->sc_mt_lock); in iwk_thread()
3344 ieee80211_new_state(ic, IEEE80211_S_INIT, -1); in iwk_thread()
3346 mutex_enter(&sc->sc_mt_lock); in iwk_thread()
3357 sc->sc_flags |= IWK_F_RUNNING; in iwk_thread()
3361 sc->sc_flags &= ~IWK_F_HW_ERR_RECOVER; in iwk_thread()
3363 mutex_exit(&sc->sc_mt_lock); in iwk_thread()
3365 if (sc->sc_ostate != IEEE80211_S_INIT) in iwk_thread()
3368 mutex_enter(&sc->sc_mt_lock); in iwk_thread()
3372 if (ic->ic_mach && (sc->sc_flags & IWK_F_LAZY_RESUME)) { in iwk_thread()
3376 sc->sc_flags &= ~IWK_F_LAZY_RESUME; in iwk_thread()
3377 mutex_exit(&sc->sc_mt_lock); in iwk_thread()
3384 ieee80211_new_state(&sc->sc_ic, IEEE80211_S_INIT, -1); in iwk_thread()
3385 mutex_enter(&sc->sc_mt_lock); in iwk_thread()
3388 if (ic->ic_mach && in iwk_thread()
3389 (sc->sc_flags & IWK_F_SCANNING) && sc->sc_scan_pending) { in iwk_thread()
3393 sc->sc_scan_pending--; in iwk_thread()
3394 mutex_exit(&sc->sc_mt_lock); in iwk_thread()
3396 if (sc->sc_flags & IWK_F_SCANNING) in iwk_thread()
3398 mutex_enter(&sc->sc_mt_lock); in iwk_thread()
3404 if (ic->ic_mach && in iwk_thread()
3405 (sc->sc_flags & IWK_F_RATE_AUTO_CTL)) { in iwk_thread()
3406 clk = ddi_get_lbolt(); in iwk_thread()
3407 if (clk > sc->sc_clk + drv_usectohz(500000)) { in iwk_thread()
3412 if ((ic->ic_state == IEEE80211_S_RUN) && in iwk_thread()
3413 (ic->ic_beaconmiss++ > 50)) { /* 5 seconds */ in iwk_thread()
3416 IEEE80211_S_INIT, -1); in iwk_thread()
3419 mutex_exit(&sc->sc_mt_lock); in iwk_thread()
3421 mutex_enter(&sc->sc_mt_lock); in iwk_thread()
3423 if (sc->sc_tx_timer) { in iwk_thread()
3426 sc->sc_tx_timer--; in iwk_thread()
3427 if (sc->sc_tx_timer == 0) { in iwk_thread()
3428 sc->sc_flags |= IWK_F_HW_ERR_RECOVER; in iwk_thread()
3429 sc->sc_ostate = IEEE80211_S_RUN; in iwk_thread()
3439 sc->sc_mf_thread = NULL; in iwk_thread()
3440 cv_signal(&sc->sc_mt_cv); in iwk_thread()
3441 mutex_exit(&sc->sc_mt_lock); in iwk_thread()
3451 iwk_tx_ring_t *ring = &sc->sc_txq[IWK_CMD_QUEUE_NUM]; in iwk_cmd()
3454 clock_t clk; in iwk_cmd() local
3456 ASSERT(size <= sizeof (cmd->data)); in iwk_cmd()
3457 ASSERT(mutex_owned(&sc->sc_glock)); in iwk_cmd()
3460 desc = ring->data[ring->cur].desc; in iwk_cmd()
3461 cmd = ring->data[ring->cur].cmd; in iwk_cmd()
3463 cmd->hdr.type = (uint8_t)code; in iwk_cmd()
3464 cmd->hdr.flags = 0; in iwk_cmd()
3465 cmd->hdr.qid = ring->qid; in iwk_cmd()
3466 cmd->hdr.idx = ring->cur; in iwk_cmd()
3467 (void) memcpy(cmd->data, buf, size); in iwk_cmd()
3470 desc->val0 = 1 << 24; in iwk_cmd()
3471 desc->pa[0].tb1_addr = in iwk_cmd()
3472 (uint32_t)(ring->data[ring->cur].paddr_cmd & 0xffffffff); in iwk_cmd()
3473 desc->pa[0].val1 = ((4 + size) << 4) & 0xfff0; in iwk_cmd()
3476 sc->sc_shared->queues_byte_cnt_tbls[ring->qid]. in iwk_cmd()
3477 tfd_offset[ring->cur].val = 8; in iwk_cmd()
3478 if (ring->cur < IWK_MAX_WIN_SIZE) { in iwk_cmd()
3479 sc->sc_shared->queues_byte_cnt_tbls[ring->qid]. in iwk_cmd()
3480 tfd_offset[IWK_QUEUE_SIZE + ring->cur].val = 8; in iwk_cmd()
3482 ring->cur = (ring->cur + 1) % ring->count; in iwk_cmd()
3483 IWK_WRITE(sc, HBUS_TARG_WRPTR, ring->qid << 8 | ring->cur); in iwk_cmd()
3488 sc->sc_flags &= ~IWK_F_CMD_DONE; in iwk_cmd()
3489 clk = ddi_get_lbolt() + drv_usectohz(2000000); in iwk_cmd()
3490 while (!(sc->sc_flags & IWK_F_CMD_DONE)) { in iwk_cmd()
3491 if (cv_timedwait(&sc->sc_cmd_cv, &sc->sc_glock, clk) < in iwk_cmd()
3495 if (sc->sc_flags & IWK_F_CMD_DONE) in iwk_cmd()
3518 ieee80211com_t *ic = &sc->sc_ic; in iwk_hw_set_before_auth()
3519 ieee80211_node_t *in = ic->ic_bss; in iwk_hw_set_before_auth()
3526 if (in->in_chan == IEEE80211_CHAN_ANYC) { in iwk_hw_set_before_auth()
3529 LE_16(ieee80211_chan2ieee(ic, in->in_chan))); in iwk_hw_set_before_auth()
3534 IEEE80211_ADDR_COPY(sc->sc_config.bssid, in->in_bssid); in iwk_hw_set_before_auth()
3535 sc->sc_config.chan = LE_16(ieee80211_chan2ieee(ic, in->in_chan)); in iwk_hw_set_before_auth()
3536 if (ic->ic_curmode == IEEE80211_MODE_11B) { in iwk_hw_set_before_auth()
3537 sc->sc_config.cck_basic_rates = 0x03; in iwk_hw_set_before_auth()
3538 sc->sc_config.ofdm_basic_rates = 0; in iwk_hw_set_before_auth()
3539 } else if ((in->in_chan != IEEE80211_CHAN_ANYC) && in iwk_hw_set_before_auth()
3540 (IEEE80211_IS_CHAN_5GHZ(in->in_chan))) { in iwk_hw_set_before_auth()
3541 sc->sc_config.cck_basic_rates = 0; in iwk_hw_set_before_auth()
3542 sc->sc_config.ofdm_basic_rates = 0x15; in iwk_hw_set_before_auth()
3544 sc->sc_config.cck_basic_rates = 0x0f; in iwk_hw_set_before_auth()
3545 sc->sc_config.ofdm_basic_rates = 0xff; in iwk_hw_set_before_auth()
3548 sc->sc_config.flags &= ~LE_32(RXON_FLG_SHORT_PREAMBLE_MSK | in iwk_hw_set_before_auth()
3551 if (ic->ic_flags & IEEE80211_F_SHSLOT) in iwk_hw_set_before_auth()
3552 sc->sc_config.flags |= LE_32(RXON_FLG_SHORT_SLOT_MSK); in iwk_hw_set_before_auth()
3554 sc->sc_config.flags &= LE_32(~RXON_FLG_SHORT_SLOT_MSK); in iwk_hw_set_before_auth()
3556 if (ic->ic_flags & IEEE80211_F_SHPREAMBLE) in iwk_hw_set_before_auth()
3557 sc->sc_config.flags |= LE_32(RXON_FLG_SHORT_PREAMBLE_MSK); in iwk_hw_set_before_auth()
3559 sc->sc_config.flags &= LE_32(~RXON_FLG_SHORT_PREAMBLE_MSK); in iwk_hw_set_before_auth()
3564 LE_16(sc->sc_config.chan), LE_32(sc->sc_config.flags), in iwk_hw_set_before_auth()
3565 LE_32(sc->sc_config.filter_flags), in iwk_hw_set_before_auth()
3566 sc->sc_config.cck_basic_rates, sc->sc_config.ofdm_basic_rates, in iwk_hw_set_before_auth()
3567 sc->sc_config.bssid[0], sc->sc_config.bssid[1], in iwk_hw_set_before_auth()
3568 sc->sc_config.bssid[2], sc->sc_config.bssid[3], in iwk_hw_set_before_auth()
3569 sc->sc_config.bssid[4], sc->sc_config.bssid[5])); in iwk_hw_set_before_auth()
3570 err = iwk_cmd(sc, REPLY_RXON, &sc->sc_config, in iwk_hw_set_before_auth()
3575 sc->sc_config.chan); in iwk_hw_set_before_auth()
3580 sc->sc_tempera = iwk_curr_tempera(sc); in iwk_hw_set_before_auth()
3592 IEEE80211_ADDR_COPY(node.bssid, in->in_bssid); in iwk_hw_set_before_auth()
3603 rs = ic->ic_sup_rates[ieee80211_chan2mode(ic, ic->ic_curchan)]; in iwk_hw_set_before_auth()
3606 rate = rs.ir_rates[rs.ir_nrates - i]; in iwk_hw_set_before_auth()
3639 ieee80211com_t *ic = &sc->sc_ic; in iwk_scan()
3640 iwk_tx_ring_t *ring = &sc->sc_txq[IWK_CMD_QUEUE_NUM]; in iwk_scan()
3647 ieee80211_node_t *in = ic->ic_bss; in iwk_scan()
3654 data = &ring->data[ring->cur]; in iwk_scan()
3655 desc = data->desc; in iwk_scan()
3656 cmd = (iwk_cmd_t *)data->dma_data.mem_va; in iwk_scan()
3658 cmd->hdr.type = REPLY_SCAN_CMD; in iwk_scan()
3659 cmd->hdr.flags = 0; in iwk_scan()
3660 cmd->hdr.qid = ring->qid; in iwk_scan()
3661 cmd->hdr.idx = ring->cur | 0x40; in iwk_scan()
3663 hdr = (iwk_scan_hdr_t *)cmd->data; in iwk_scan()
3665 hdr->nchan = 1; in iwk_scan()
3666 hdr->quiet_time = LE_16(50); in iwk_scan()
3667 hdr->quiet_plcp_th = LE_16(1); in iwk_scan()
3669 hdr->flags = LE_32(RXON_FLG_BAND_24G_MSK | RXON_FLG_AUTO_DETECT_MSK); in iwk_scan()
3670 hdr->rx_chain = LE_16(RXON_RX_CHAIN_DRIVER_FORCE_MSK | in iwk_scan()
3675 hdr->tx_cmd.tx_flags = LE_32(TX_CMD_FLG_SEQ_CTL_MSK); in iwk_scan()
3676 hdr->tx_cmd.sta_id = IWK_BROADCAST_ID; in iwk_scan()
3677 hdr->tx_cmd.stop_time.life_time = LE_32(0xffffffff); in iwk_scan()
3678 hdr->tx_cmd.tx_flags |= LE_32(0x200); in iwk_scan()
3679 hdr->tx_cmd.rate.r.rate_n_flags = LE_32(iwk_rate_to_plcp(2)); in iwk_scan()
3680 hdr->tx_cmd.rate.r.rate_n_flags |= in iwk_scan()
3682 hdr->direct_scan[0].len = ic->ic_des_esslen; in iwk_scan()
3683 hdr->direct_scan[0].id = IEEE80211_ELEMID_SSID; in iwk_scan()
3685 if (ic->ic_des_esslen) { in iwk_scan()
3686 bcopy(ic->ic_des_essid, essid, ic->ic_des_esslen); in iwk_scan()
3687 essid[ic->ic_des_esslen] = '\0'; in iwk_scan()
3690 bcopy(ic->ic_des_essid, hdr->direct_scan[0].ssid, in iwk_scan()
3691 ic->ic_des_esslen); in iwk_scan()
3693 bzero(hdr->direct_scan[0].ssid, in iwk_scan()
3694 sizeof (hdr->direct_scan[0].ssid)); in iwk_scan()
3700 wh->i_fc[0] = IEEE80211_FC0_VERSION_0 | IEEE80211_FC0_TYPE_MGT | in iwk_scan()
3702 wh->i_fc[1] = IEEE80211_FC1_DIR_NODS; in iwk_scan()
3703 (void) memset(wh->i_addr1, 0xff, 6); in iwk_scan()
3704 IEEE80211_ADDR_COPY(wh->i_addr2, ic->ic_macaddr); in iwk_scan()
3705 (void) memset(wh->i_addr3, 0xff, 6); in iwk_scan()
3706 *(uint16_t *)&wh->i_dur[0] = 0; in iwk_scan()
3707 *(uint16_t *)&wh->i_seq[0] = 0; in iwk_scan()
3712 if (in->in_esslen) { in iwk_scan()
3713 bcopy(in->in_essid, essid, in->in_esslen); in iwk_scan()
3714 essid[in->in_esslen] = '\0'; in iwk_scan()
3719 *frm++ = in->in_esslen; in iwk_scan()
3720 (void) memcpy(frm, in->in_essid, in->in_esslen); in iwk_scan()
3721 frm += in->in_esslen; in iwk_scan()
3723 mode = ieee80211_chan2mode(ic, ic->ic_curchan); in iwk_scan()
3724 rs = &ic->ic_sup_rates[mode]; in iwk_scan()
3728 nrates = rs->ir_nrates; in iwk_scan()
3732 (void) memcpy(frm, rs->ir_rates, nrates); in iwk_scan()
3736 if (rs->ir_nrates > IEEE80211_RATE_SIZE) { in iwk_scan()
3737 nrates = rs->ir_nrates - IEEE80211_RATE_SIZE; in iwk_scan()
3740 (void) memcpy(frm, rs->ir_rates + IEEE80211_RATE_SIZE, nrates); in iwk_scan()
3745 if (ic->ic_opt_ie != NULL) { in iwk_scan()
3746 (void) memcpy(frm, ic->ic_opt_ie, ic->ic_opt_ie_len); in iwk_scan()
3747 frm += ic->ic_opt_ie_len; in iwk_scan()
3751 hdr->tx_cmd.len = LE_16(_PTRDIFF(frm, wh)); in iwk_scan()
3752 hdr->len = LE_16(hdr->nchan * sizeof (iwk_scan_chan_t) + in iwk_scan()
3753 LE_16(hdr->tx_cmd.len) + sizeof (iwk_scan_hdr_t)); in iwk_scan()
3760 for (i = 1; i <= hdr->nchan; i++, chan++) { in iwk_scan()
3761 if (ic->ic_des_esslen) { in iwk_scan()
3762 chan->type = 3; in iwk_scan()
3764 chan->type = 1; in iwk_scan()
3767 chan->chan = ieee80211_chan2ieee(ic, ic->ic_curchan); in iwk_scan()
3768 chan->tpc.tx_gain = 0x3f; in iwk_scan()
3769 chan->tpc.dsp_atten = 110; in iwk_scan()
3770 chan->active_dwell = LE_16(50); in iwk_scan()
3771 chan->passive_dwell = LE_16(120); in iwk_scan()
3779 desc->val0 = 1 << 24; in iwk_scan()
3780 desc->pa[0].tb1_addr = in iwk_scan()
3781 (uint32_t)(data->dma_data.cookie.dmac_address & 0xffffffff); in iwk_scan()
3782 desc->pa[0].val1 = (pktlen << 4) & 0xfff0; in iwk_scan()
3788 sc->sc_shared->queues_byte_cnt_tbls[ring->qid]. in iwk_scan()
3789 tfd_offset[ring->cur].val = 8; in iwk_scan()
3790 if (ring->cur < IWK_MAX_WIN_SIZE) { in iwk_scan()
3791 sc->sc_shared->queues_byte_cnt_tbls[ring->qid]. in iwk_scan()
3792 tfd_offset[IWK_QUEUE_SIZE + ring->cur].val = 8; in iwk_scan()
3796 ring->cur = (ring->cur + 1) % ring->count; in iwk_scan()
3797 IWK_WRITE(sc, HBUS_TARG_WRPTR, ring->qid << 8 | ring->cur); in iwk_scan()
3805 ieee80211com_t *ic = &sc->sc_ic; in iwk_config()
3840 (void) memset(&sc->sc_config, 0, sizeof (iwk_rxon_cmd_t)); in iwk_config()
3841 IEEE80211_ADDR_COPY(sc->sc_config.node_addr, ic->ic_macaddr); in iwk_config()
3842 IEEE80211_ADDR_COPY(sc->sc_config.wlap_bssid, ic->ic_macaddr); in iwk_config()
3843 sc->sc_config.chan = LE_16(ieee80211_chan2ieee(ic, ic->ic_curchan)); in iwk_config()
3844 sc->sc_config.flags = LE_32(RXON_FLG_TSF2HOST_MSK | in iwk_config()
3846 sc->sc_config.flags &= LE_32(~RXON_FLG_CCK_MSK); in iwk_config()
3847 switch (ic->ic_opmode) { in iwk_config()
3849 sc->sc_config.dev_type = RXON_DEV_TYPE_ESS; in iwk_config()
3850 sc->sc_config.filter_flags |= LE_32(RXON_FILTER_ACCEPT_GRP_MSK | in iwk_config()
3856 sc->sc_config.dev_type = RXON_DEV_TYPE_IBSS; in iwk_config()
3857 sc->sc_config.flags |= LE_32(RXON_FLG_SHORT_PREAMBLE_MSK); in iwk_config()
3858 sc->sc_config.filter_flags = LE_32(RXON_FILTER_ACCEPT_GRP_MSK | in iwk_config()
3863 sc->sc_config.dev_type = RXON_DEV_TYPE_AP; in iwk_config()
3866 sc->sc_config.dev_type = RXON_DEV_TYPE_SNIFFER; in iwk_config()
3867 sc->sc_config.filter_flags |= LE_32(RXON_FILTER_ACCEPT_GRP_MSK | in iwk_config()
3871 sc->sc_config.cck_basic_rates = 0x0f; in iwk_config()
3872 sc->sc_config.ofdm_basic_rates = 0xff; in iwk_config()
3874 sc->sc_config.ofdm_ht_single_stream_basic_rates = 0xff; in iwk_config()
3875 sc->sc_config.ofdm_ht_dual_stream_basic_rates = 0xff; in iwk_config()
3879 sc->sc_config.rx_chain = LE_16(RXON_RX_CHAIN_DRIVER_FORCE_MSK | in iwk_config()
3884 err = iwk_cmd(sc, REPLY_RXON, &sc->sc_config, in iwk_config()
3892 sc->sc_tempera = iwk_curr_tempera(sc); in iwk_config()
4024 if ((sc->sc_rev & 0x80) == 0x80 && (sc->sc_rev & 0x7f) < 8) { in iwk_preinit()
4025 tmp = ddi_get32(sc->sc_cfg_handle, in iwk_preinit()
4026 (uint32_t *)(sc->sc_cfg_base + 0xe8)); in iwk_preinit()
4027 ddi_put32(sc->sc_cfg_handle, in iwk_preinit()
4028 (uint32_t *)(sc->sc_cfg_base + 0xe8), in iwk_preinit()
4033 vlink = ddi_get8(sc->sc_cfg_handle, in iwk_preinit()
4034 (uint8_t *)(sc->sc_cfg_base + 0xf0)); in iwk_preinit()
4035 ddi_put8(sc->sc_cfg_handle, (uint8_t *)(sc->sc_cfg_base + 0xf0), in iwk_preinit()
4100 uint16_t addr, eep_sz = sizeof (sc->sc_eep_map); in iwk_eep_load()
4101 uint16_t *eep_p = (uint16_t *)&sc->sc_eep_map; in iwk_eep_load()
4147 ieee80211com_t *ic = &sc->sc_ic; in iwk_get_mac_from_eep()
4148 struct iwk_eep *ep = &sc->sc_eep_map; in iwk_get_mac_from_eep()
4150 IEEE80211_ADDR_COPY(ic->ic_macaddr, ep->mac_address); in iwk_get_mac_from_eep()
4153 ic->ic_macaddr[0], ic->ic_macaddr[1], ic->ic_macaddr[2], in iwk_get_mac_from_eep()
4154 ic->ic_macaddr[3], ic->ic_macaddr[4], ic->ic_macaddr[5])); in iwk_get_mac_from_eep()
4161 clock_t clk; in iwk_init() local
4164 mutex_enter(&sc->sc_glock); in iwk_init()
4165 sc->sc_flags &= ~IWK_F_FW_INIT; in iwk_init()
4181 sc->sc_rxq.dma_desc.cookie.dmac_address >> 8); in iwk_init()
4184 ((uint32_t)(sc->sc_dma_sh.cookie.dmac_address + in iwk_init()
4195 (RX_QUEUE_SIZE - 1) & ~0x7); in iwk_init()
4203 sc->sc_dma_kw.cookie.dmac_address >> 4); in iwk_init()
4207 sc->sc_txq[qid].dma_desc.cookie.dmac_address >> 8); in iwk_init()
4231 (void) memcpy(sc->sc_dma_fw_data_bak.mem_va, in iwk_init()
4232 sc->sc_dma_fw_data.mem_va, in iwk_init()
4233 sc->sc_dma_fw_data.alength); in iwk_init()
4253 clk = ddi_get_lbolt() + drv_usectohz(2000000); in iwk_init()
4254 while (!(sc->sc_flags & IWK_F_FW_INIT)) { in iwk_init()
4255 if (cv_timedwait(&sc->sc_fw_cv, &sc->sc_glock, clk) < 0) in iwk_init()
4258 if (!(sc->sc_flags & IWK_F_FW_INIT)) { in iwk_init()
4275 mutex_exit(&sc->sc_glock); in iwk_init()
4280 mutex_exit(&sc->sc_glock); in iwk_init()
4290 if (!(sc->sc_flags & IWK_F_QUIESCED)) in iwk_stop()
4291 mutex_enter(&sc->sc_glock); in iwk_stop()
4301 iwk_reset_tx_ring(sc, &sc->sc_txq[i]); in iwk_stop()
4314 sc->sc_tx_timer = 0; in iwk_stop()
4315 sc->sc_flags &= ~IWK_F_SCANNING; in iwk_stop()
4316 sc->sc_scan_pending = 0; in iwk_stop()
4321 if (!(sc->sc_flags & IWK_F_QUIESCED)) in iwk_stop()
4322 mutex_exit(&sc->sc_glock); in iwk_stop()
4329 * INRIA Sophia - Projet Planete
4330 * http://www-sop.inria.fr/rapports/sophia/RR-5208.html
4333 ((amrr)->retrycnt < (amrr)->txcnt / 10)
4335 ((amrr)->retrycnt > (amrr)->txcnt / 3)
4337 ((amrr)->txcnt > 100)
4339 ((in)->in_txrate == 0)
4341 ((in)->in_txrate == (in)->in_rates.ir_nrates - 1)
4343 ((in)->in_txrate++)
4345 ((in)->in_txrate--)
4347 { (amrr)->txcnt = (amrr)->retrycnt = 0; }
4355 amrr->success = 0; in iwk_amrr_init()
4356 amrr->recovery = 0; in iwk_amrr_init()
4357 amrr->txcnt = amrr->retrycnt = 0; in iwk_amrr_init()
4358 amrr->success_threshold = IWK_AMRR_MIN_SUCCESS_THRESHOLD; in iwk_amrr_init()
4364 ieee80211com_t *ic = &sc->sc_ic; in iwk_amrr_timeout()
4367 if (ic->ic_opmode == IEEE80211_M_STA) in iwk_amrr_timeout()
4368 iwk_amrr_ratectl(NULL, ic->ic_bss); in iwk_amrr_timeout()
4370 ieee80211_iterate_nodes(&ic->ic_sta, iwk_amrr_ratectl, NULL); in iwk_amrr_timeout()
4371 sc->sc_clk = ddi_get_lbolt(); in iwk_amrr_timeout()
4382 amrr->success++; in iwk_amrr_ratectl()
4383 if (amrr->success >= amrr->success_threshold && in iwk_amrr_ratectl()
4385 amrr->recovery = 1; in iwk_amrr_ratectl()
4386 amrr->success = 0; in iwk_amrr_ratectl()
4390 in->in_txrate, amrr->txcnt, amrr->retrycnt)); in iwk_amrr_ratectl()
4393 amrr->recovery = 0; in iwk_amrr_ratectl()
4396 amrr->success = 0; in iwk_amrr_ratectl()
4398 if (amrr->recovery) { in iwk_amrr_ratectl()
4399 amrr->success_threshold++; in iwk_amrr_ratectl()
4400 if (amrr->success_threshold > in iwk_amrr_ratectl()
4402 amrr->success_threshold = in iwk_amrr_ratectl()
4405 amrr->success_threshold = in iwk_amrr_ratectl()
4411 in->in_txrate, amrr->txcnt, amrr->retrycnt)); in iwk_amrr_ratectl()
4414 amrr->recovery = 0; /* paper is incorrect */ in iwk_amrr_ratectl()
4434 r1 = (int32_t)LE_32(sc->sc_card_alive_init.therm_r1[1]); in iwk_curr_tempera()
4435 r2 = (int32_t)LE_32(sc->sc_card_alive_init.therm_r2[1]); in iwk_curr_tempera()
4436 r3 = (int32_t)LE_32(sc->sc_card_alive_init.therm_r3[1]); in iwk_curr_tempera()
4437 r4_u = LE_32(sc->sc_card_alive_init.therm_r4[1]); in iwk_curr_tempera()
4439 r1 = (int32_t)LE_32(sc->sc_card_alive_init.therm_r1[0]); in iwk_curr_tempera()
4440 r2 = (int32_t)LE_32(sc->sc_card_alive_init.therm_r2[0]); in iwk_curr_tempera()
4441 r3 = (int32_t)LE_32(sc->sc_card_alive_init.therm_r3[0]); in iwk_curr_tempera()
4442 r4_u = LE_32(sc->sc_card_alive_init.therm_r4[0]); in iwk_curr_tempera()
4445 if (sc->sc_flags & IWK_F_STATISTICS) { in iwk_curr_tempera()
4446 r4_s = (int32_t)(LE_32(sc->sc_statistics.general.temperature) << in iwk_curr_tempera()
4447 (31-23)) >> (31-23); in iwk_curr_tempera()
4449 r4_s = (int32_t)(r4_u << (31-23)) >> (31-23); in iwk_curr_tempera()
4452 IWK_DBG((IWK_DEBUG_CALIBRATION, "temperature R[1-4]: %d %d %d %d\n", in iwk_curr_tempera()
4462 tempera = TEMPERATURE_CALIB_A_VAL * (r4_s - r2); in iwk_curr_tempera()
4463 tempera /= (r3 - r1); in iwk_curr_tempera()
4475 return (LE_32(sc->sc_config.flags) & RXON_FLG_BAND_24G_MSK); in iwk_is_24G_band()
4481 return ((LE_32(sc->sc_config.flags) & in iwk_is_fat_channel()
4483 (LE_32(sc->sc_config.flags) & RXON_FLG_CHANNEL_MODE_MIXED_MSK)); in iwk_is_fat_channel()
4565 chan = channel - 4; in iwk_get_eep_channel()
4572 return (&sc->sc_eep_map.band_24_channels[i]); in iwk_get_eep_channel()
4577 return (&sc->sc_eep_map.band_52_channels[i]); in iwk_get_eep_channel()
4583 return (&sc->sc_eep_map.band_1_channels[i]); in iwk_get_eep_channel()
4589 return (&sc->sc_eep_map.band_2_channels[i]); in iwk_get_eep_channel()
4594 return (&sc->sc_eep_map.band_3_channels[i]); in iwk_get_eep_channel()
4599 return (&sc->sc_eep_map.band_4_channels[i]); in iwk_get_eep_channel()
4604 return (&sc->sc_eep_map.band_5_channels[i]); in iwk_get_eep_channel()
4618 int32_t b_n = -1; in iwk_band_number()
4621 if (0 == sc->sc_eep_map.calib_info.band_info_tbl[b_n].ch_from) { in iwk_band_number()
4626 (uint16_t)sc->sc_eep_map.calib_info. in iwk_band_number()
4629 (uint16_t)sc->sc_eep_map.calib_info. in iwk_band_number()
4644 sign = -sign; in iwk_division()
4645 num = -num; in iwk_division()
4649 sign = -sign; in iwk_division()
4650 denom = -denom; in iwk_division()
4667 (void) iwk_division((x2-x)*(y1-y2), (x2-x1), &val); in iwk_interpolate_value()
4688 (uint32_t)sc->sc_eep_map.calib_info.band_info_tbl[ban_n].ch1.ch_num; in iwk_channel_interpolate()
4690 (uint32_t)sc->sc_eep_map.calib_info.band_info_tbl[ban_n].ch2.ch_num; in iwk_channel_interpolate()
4692 chan_info->ch_num = (uint8_t)channel; /* given channel number */ in iwk_channel_interpolate()
4703 &(sc->sc_eep_map.calib_info. in iwk_channel_interpolate()
4706 &(sc->sc_eep_map.calib_info.band_info_tbl[ban_n]. in iwk_channel_interpolate()
4708 m_p = &(chan_info->measure[c][m]); in iwk_channel_interpolate()
4714 m_p->actual_pow = iwk_interpolate_value(channel, in iwk_channel_interpolate()
4715 ch1_n, m1_p->actual_pow, in iwk_channel_interpolate()
4716 ch2_n, m2_p->actual_pow); in iwk_channel_interpolate()
4719 m_p->gain_idx = iwk_interpolate_value(channel, in iwk_channel_interpolate()
4720 ch1_n, m1_p->gain_idx, in iwk_channel_interpolate()
4721 ch2_n, m2_p->gain_idx); in iwk_channel_interpolate()
4724 m_p->temperature = iwk_interpolate_value(channel, in iwk_channel_interpolate()
4725 ch1_n, m1_p->temperature, in iwk_channel_interpolate()
4726 ch2_n, m2_p->temperature); in iwk_channel_interpolate()
4732 m_p->pa_det = iwk_interpolate_value(channel, ch1_n, in iwk_channel_interpolate()
4733 m1_p->pa_det, in iwk_channel_interpolate()
4734 ch2_n, m2_p->pa_det); in iwk_channel_interpolate()
4755 (void) iwk_division(curr_voltage-eep_voltage, in iwk_voltage_compensation()
4761 if ((vol_comp < -2) || (vol_comp > 2)) { in iwk_voltage_compensation()
4770 * ratios from 3:1 to 4.5:1 of degrees (Celsius) per half-dB gain adjust
4776 {9, 2}, /* group 0 5.2, ch 34-43 */
4777 {4, 1}, /* group 1 5.2, ch 44-70 */
4778 {4, 1}, /* group 2 5.2, ch 71-124 */
4779 {4, 1}, /* group 3 5.2, ch 125-200 */
4784 * bit-rate-dependent table to prevent Tx distortion, in half-dB units,
4837 channel = LE_16(sc->sc_config.chan); in iwk_txpower_table_cmd_init()
4849 if (is_fat && (LE_32(sc->sc_config.flags) & in iwk_txpower_table_cmd_init()
4872 sc->sc_user_txpower = (int32_t)eep_chan_p->max_power_avg; in iwk_txpower_table_cmd_init()
4873 if (sc->sc_user_txpower < IWK_TX_POWER_TARGET_POWER_MIN) { in iwk_txpower_table_cmd_init()
4877 } else if (sc->sc_user_txpower > IWK_TX_POWER_TARGET_POWER_MAX) { in iwk_txpower_table_cmd_init()
4883 target_power = 2 * sc->sc_user_txpower; in iwk_txpower_table_cmd_init()
4894 channel -= 2; in iwk_txpower_table_cmd_init()
4903 sc->sc_eep_map.calib_info.saturation_power24; in iwk_txpower_table_cmd_init()
4906 sc->sc_eep_map.calib_info.saturation_power52; in iwk_txpower_table_cmd_init()
4919 regu_power = (int32_t)eep_chan_p->max_power_avg * 2; in iwk_txpower_table_cmd_init()
4935 eep_voltage = (int32_t)LE_16(sc->sc_eep_map.calib_info.voltage); in iwk_txpower_table_cmd_init()
4936 init_voltage = (int32_t)LE_32(sc->sc_card_alive_init.voltage); in iwk_txpower_table_cmd_init()
4942 if (sc->sc_tempera >= IWK_TX_POWER_TEMPERATURE_MIN) { in iwk_txpower_table_cmd_init()
4943 temperature = sc->sc_tempera; in iwk_txpower_table_cmd_init()
4947 if (sc->sc_tempera <= IWK_TX_POWER_TEMPERATURE_MAX) { in iwk_txpower_table_cmd_init()
4948 temperature = sc->sc_tempera; in iwk_txpower_table_cmd_init()
4961 interpo_temp = measure_p->temperature; in iwk_txpower_table_cmd_init()
4965 (temperature-interpo_temp)*degrees_per_05db_denom, in iwk_txpower_table_cmd_init()
4968 interpo_gain_idx[c] = measure_p->gain_idx; in iwk_txpower_table_cmd_init()
4969 interpo_actual_pow[c] = measure_p->actual_pow; in iwk_txpower_table_cmd_init()
4978 curr_regu_power = regu_power - in iwk_txpower_table_cmd_init()
4986 power_limit = saturation_power - back_off_table[r]; in iwk_txpower_table_cmd_init()
4999 LE_32(sc->sc_card_alive_init. in iwk_txpower_table_cmd_init()
5009 txpower_gains_idx = interpo_gain_idx[c] - in iwk_txpower_table_cmd_init()
5010 (target_power - interpo_actual_pow[c]) - in iwk_txpower_table_cmd_init()
5011 tempera_comp[c] - voltage_compensation + in iwk_txpower_table_cmd_init()
5054 tp_db->ht_ofdm_power[r].dw = LE_32(txpower_gains.dw); in iwk_txpower_table_cmd_init()
5056 tp_db->legacy_cck_power.dw = LE_32(txpower_gains.dw); in iwk_txpower_table_cmd_init()
5072 if (sc->sc_flags & IWK_F_SCANNING) { in iwk_tx_power_calibration()
5078 cmd.channel = sc->sc_config.chan; in iwk_tx_power_calibration()
5095 sc->sc_last_tempera = sc->sc_tempera; in iwk_tx_power_calibration()
5107 mutex_enter(&sc->sc_glock); in iwk_statistics_notify()
5109 is_diff = (sc->sc_statistics.general.temperature != in iwk_statistics_notify()
5110 statistics_p->general.temperature) || in iwk_statistics_notify()
5111 (LE_32(sc->sc_statistics.flag) & in iwk_statistics_notify()
5113 (LE_32(statistics_p->flag) & STATISTICS_REPLY_FLG_FAT_MODE_MSK); in iwk_statistics_notify()
5116 (void) memcpy(&sc->sc_statistics, statistics_p, in iwk_statistics_notify()
5119 sc->sc_flags |= IWK_F_STATISTICS; in iwk_statistics_notify()
5121 if (!(sc->sc_flags & IWK_F_SCANNING)) { in iwk_statistics_notify()
5131 mutex_exit(&sc->sc_glock); in iwk_statistics_notify()
5136 sc->sc_tempera = iwk_curr_tempera(sc); in iwk_statistics_notify()
5139 if (((sc->sc_tempera - sc->sc_last_tempera) >= 3) || in iwk_statistics_notify()
5140 ((sc->sc_last_tempera - sc->sc_tempera) >= 3)) { in iwk_statistics_notify()
5145 mutex_exit(&sc->sc_glock); in iwk_statistics_notify()
5151 return (LE_32(sc->sc_config.filter_flags) & RXON_FILTER_ASSOC_MSK); in iwk_is_associated()
5161 gain_diff_p = &sc->sc_rxgain_diff; in iwk_rxgain_diff_init()
5167 gain_diff_p->gain_diff_chain[i] = CHAIN_GAIN_DIFF_INIT_VAL; in iwk_rxgain_diff_init()
5183 gain_diff_p->state = IWK_GAIN_DIFF_ACCUMULATE; in iwk_rxgain_diff_init()
5207 &sc->sc_statistics.rx.general; in iwk_rxgain_diff()
5208 struct iwk_rx_gain_diff *gain_diff_p = &sc->sc_rxgain_diff; in iwk_rxgain_diff()
5211 LE_32(rx_general_p->interference_data_flag)) { in iwk_rxgain_diff()
5215 if (IWK_GAIN_DIFF_ACCUMULATE != gain_diff_p->state) { in iwk_rxgain_diff()
5220 channel_n = sc->sc_config.chan; /* channel number */ in iwk_rxgain_diff()
5222 if ((channel_n != (LE_32(sc->sc_statistics.flag) >> 16)) || in iwk_rxgain_diff()
5224 (LE_32(sc->sc_statistics.flag) & in iwk_rxgain_diff()
5231 noise_chain_a = LE_32(rx_general_p->beacon_silence_rssi_a) & 0xFF; in iwk_rxgain_diff()
5232 noise_chain_b = LE_32(rx_general_p->beacon_silence_rssi_b) & 0xFF; in iwk_rxgain_diff()
5233 noise_chain_c = LE_32(rx_general_p->beacon_silence_rssi_c) & 0xFF; in iwk_rxgain_diff()
5236 beacon_chain_a = LE_32(rx_general_p->beacon_rssi_a) & 0xFF; in iwk_rxgain_diff()
5237 beacon_chain_b = LE_32(rx_general_p->beacon_rssi_b) & 0xFF; in iwk_rxgain_diff()
5238 beacon_chain_c = LE_32(rx_general_p->beacon_rssi_c) & 0xFF; in iwk_rxgain_diff()
5240 gain_diff_p->beacon_count++; in iwk_rxgain_diff()
5243 gain_diff_p->noise_stren_a += noise_chain_a; in iwk_rxgain_diff()
5244 gain_diff_p->noise_stren_b += noise_chain_b; in iwk_rxgain_diff()
5245 gain_diff_p->noise_stren_c += noise_chain_c; in iwk_rxgain_diff()
5248 gain_diff_p->beacon_stren_a += beacon_chain_a; in iwk_rxgain_diff()
5249 gain_diff_p->beacon_stren_b += beacon_chain_b; in iwk_rxgain_diff()
5250 gain_diff_p->beacon_stren_c += beacon_chain_c; in iwk_rxgain_diff()
5252 if (BEACON_NUM_20 == gain_diff_p->beacon_count) { in iwk_rxgain_diff()
5254 beacon_aver[0] = (gain_diff_p->beacon_stren_a) / BEACON_NUM_20; in iwk_rxgain_diff()
5255 beacon_aver[1] = (gain_diff_p->beacon_stren_b) / BEACON_NUM_20; in iwk_rxgain_diff()
5256 beacon_aver[2] = (gain_diff_p->beacon_stren_c) / BEACON_NUM_20; in iwk_rxgain_diff()
5259 noise_aver[0] = (gain_diff_p->noise_stren_a) / BEACON_NUM_20; in iwk_rxgain_diff()
5260 noise_aver[1] = (gain_diff_p->noise_stren_b) / BEACON_NUM_20; in iwk_rxgain_diff()
5261 noise_aver[2] = (gain_diff_p->noise_stren_b) / BEACON_NUM_20; in iwk_rxgain_diff()
5267 gain_diff_p->connected_chains = 1 << 0; in iwk_rxgain_diff()
5270 gain_diff_p->connected_chains = 1 << 1; in iwk_rxgain_diff()
5273 gain_diff_p->connected_chains = 1 << 2; in iwk_rxgain_diff()
5279 beacon_diff = beacon_aver[max_beacon_chain_n] - in iwk_rxgain_diff()
5282 gain_diff_p->disconnect_chain[i] = 1; in iwk_rxgain_diff()
5284 gain_diff_p->connected_chains |= in iwk_rxgain_diff()
5294 if (gain_diff_p->disconnect_chain[0] && in iwk_rxgain_diff()
5295 gain_diff_p->disconnect_chain[1]) { in iwk_rxgain_diff()
5297 gain_diff_p->disconnect_chain[0] = 0; in iwk_rxgain_diff()
5298 gain_diff_p->connected_chains |= (1 << 0); in iwk_rxgain_diff()
5300 gain_diff_p->disconnect_chain[1] = 0; in iwk_rxgain_diff()
5301 gain_diff_p->connected_chains |= (1 << 1); in iwk_rxgain_diff()
5306 if (!gain_diff_p->disconnect_chain[0]) { in iwk_rxgain_diff()
5310 if (!gain_diff_p->disconnect_chain[i] && in iwk_rxgain_diff()
5321 if (!gain_diff_p->disconnect_chain[i] && in iwk_rxgain_diff()
5329 gain_diff_p->gain_diff_chain[min_noise_chain_n] = 0; in iwk_rxgain_diff()
5333 if (!gain_diff_p->disconnect_chain[i] && in iwk_rxgain_diff()
5335 gain_diff_p->gain_diff_chain[i])) { in iwk_rxgain_diff()
5337 noise_diff = noise_aver[i] - in iwk_rxgain_diff()
5339 gain_diff_p->gain_diff_chain[i] = in iwk_rxgain_diff()
5342 if (gain_diff_p->gain_diff_chain[i] > 3) { in iwk_rxgain_diff()
5343 gain_diff_p->gain_diff_chain[i] = 3; in iwk_rxgain_diff()
5346 gain_diff_p->gain_diff_chain[i] |= (1 << 2); in iwk_rxgain_diff()
5348 gain_diff_p->gain_diff_chain[i] = 0; in iwk_rxgain_diff()
5352 if (!gain_diff_p->gain_diff_send) { in iwk_rxgain_diff()
5353 gain_diff_p->gain_diff_send = 1; in iwk_rxgain_diff()
5358 cmd.diff_gain_a = gain_diff_p->gain_diff_chain[0]; in iwk_rxgain_diff()
5359 cmd.diff_gain_b = gain_diff_p->gain_diff_chain[1]; in iwk_rxgain_diff()
5360 cmd.diff_gain_c = gain_diff_p->gain_diff_chain[2]; in iwk_rxgain_diff()
5372 gain_diff_p->state = IWK_GAIN_DIFF_CALIBRATED; in iwk_rxgain_diff()
5375 gain_diff_p->beacon_stren_a = 0; in iwk_rxgain_diff()
5376 gain_diff_p->beacon_stren_b = 0; in iwk_rxgain_diff()
5377 gain_diff_p->beacon_stren_c = 0; in iwk_rxgain_diff()
5379 gain_diff_p->noise_stren_a = 0; in iwk_rxgain_diff()
5380 gain_diff_p->noise_stren_b = 0; in iwk_rxgain_diff()
5381 gain_diff_p->noise_stren_c = 0; in iwk_rxgain_diff()
5392 struct iwk_rx_sensitivity *rx_sens_p = &sc->sc_rx_sens; in iwk_rx_sens_init()
5397 rx_sens_p->auto_corr_ofdm_x4 = 90; in iwk_rx_sens_init()
5398 rx_sens_p->auto_corr_mrc_ofdm_x4 = 170; in iwk_rx_sens_init()
5399 rx_sens_p->auto_corr_ofdm_x1 = 105; in iwk_rx_sens_init()
5400 rx_sens_p->auto_corr_mrc_ofdm_x1 = 220; in iwk_rx_sens_init()
5402 rx_sens_p->auto_corr_cck_x4 = 125; in iwk_rx_sens_init()
5403 rx_sens_p->auto_corr_mrc_cck_x4 = 200; in iwk_rx_sens_init()
5404 rx_sens_p->min_energy_det_cck = 100; in iwk_rx_sens_init()
5406 rx_sens_p->flags &= (~IWK_SENSITIVITY_CALIB_ALLOW_MSK); in iwk_rx_sens_init()
5407 rx_sens_p->flags &= (~IWK_SENSITIVITY_OFDM_UPDATE_MSK); in iwk_rx_sens_init()
5408 rx_sens_p->flags &= (~IWK_SENSITIVITY_CCK_UPDATE_MSK); in iwk_rx_sens_init()
5410 rx_sens_p->last_bad_plcp_cnt_ofdm = 0; in iwk_rx_sens_init()
5411 rx_sens_p->last_false_alarm_cnt_ofdm = 0; in iwk_rx_sens_init()
5412 rx_sens_p->last_bad_plcp_cnt_cck = 0; in iwk_rx_sens_init()
5413 rx_sens_p->last_false_alarm_cnt_cck = 0; in iwk_rx_sens_init()
5415 rx_sens_p->cck_curr_state = IWK_TOO_MANY_FALSE_ALARM; in iwk_rx_sens_init()
5416 rx_sens_p->cck_prev_state = IWK_TOO_MANY_FALSE_ALARM; in iwk_rx_sens_init()
5417 rx_sens_p->cck_no_false_alarm_num = 0; in iwk_rx_sens_init()
5418 rx_sens_p->cck_beacon_idx = 0; in iwk_rx_sens_init()
5421 rx_sens_p->cck_beacon_min[i] = 0; in iwk_rx_sens_init()
5424 rx_sens_p->cck_noise_idx = 0; in iwk_rx_sens_init()
5425 rx_sens_p->cck_noise_ref = 0; in iwk_rx_sens_init()
5428 rx_sens_p->cck_noise_max[i] = 0; in iwk_rx_sens_init()
5431 rx_sens_p->cck_noise_diff = 0; in iwk_rx_sens_init()
5432 rx_sens_p->cck_no_false_alarm_num = 0; in iwk_rx_sens_init()
5437 LE_16(rx_sens_p->auto_corr_ofdm_x4); in iwk_rx_sens_init()
5439 LE_16(rx_sens_p->auto_corr_mrc_ofdm_x4); in iwk_rx_sens_init()
5441 LE_16(rx_sens_p->auto_corr_ofdm_x1); in iwk_rx_sens_init()
5443 LE_16(rx_sens_p->auto_corr_mrc_ofdm_x1); in iwk_rx_sens_init()
5446 LE_16(rx_sens_p->auto_corr_cck_x4); in iwk_rx_sens_init()
5448 LE_16(rx_sens_p->auto_corr_mrc_cck_x4); in iwk_rx_sens_init()
5450 LE_16(rx_sens_p->min_energy_det_cck); in iwk_rx_sens_init()
5466 rx_sens_p->flags |= IWK_SENSITIVITY_CALIB_ALLOW_MSK; in iwk_rx_sens_init()
5480 &sc->sc_statistics.rx.general; in iwk_rx_sens()
5481 struct iwk_rx_sensitivity *rx_sens_p = &sc->sc_rx_sens; in iwk_rx_sens()
5484 if (!(rx_sens_p->flags & IWK_SENSITIVITY_CALIB_ALLOW_MSK)) { in iwk_rx_sens()
5491 LE_32(rx_general_p->interference_data_flag)) { in iwk_rx_sens()
5498 actual_rx_time = LE_32(rx_general_p->channel_load); in iwk_rx_sens()
5521 if ((!(rx_sens_p->flags & IWK_SENSITIVITY_OFDM_UPDATE_MSK)) && in iwk_rx_sens()
5522 (!(rx_sens_p->flags & IWK_SENSITIVITY_CCK_UPDATE_MSK))) { in iwk_rx_sens()
5529 rx_sens_p->auto_corr_ofdm_x4; in iwk_rx_sens()
5531 rx_sens_p->auto_corr_mrc_ofdm_x4; in iwk_rx_sens()
5533 rx_sens_p->auto_corr_ofdm_x1; in iwk_rx_sens()
5535 rx_sens_p->auto_corr_mrc_ofdm_x1; in iwk_rx_sens()
5538 rx_sens_p->auto_corr_cck_x4; in iwk_rx_sens()
5540 rx_sens_p->auto_corr_mrc_cck_x4; in iwk_rx_sens()
5542 rx_sens_p->min_energy_det_cck; in iwk_rx_sens()
5578 &sc->sc_statistics.rx.general; in iwk_cck_sens()
5579 struct iwk_rx_sensitivity *rx_sens_p = &sc->sc_rx_sens; in iwk_cck_sens()
5581 cck_fa = LE_32(sc->sc_statistics.rx.cck.false_alarm_cnt); in iwk_cck_sens()
5582 cck_bp = LE_32(sc->sc_statistics.rx.cck.plcp_err); in iwk_cck_sens()
5585 if (rx_sens_p->last_false_alarm_cnt_cck > cck_fa) { in iwk_cck_sens()
5586 temp = rx_sens_p->last_false_alarm_cnt_cck; in iwk_cck_sens()
5587 rx_sens_p->last_false_alarm_cnt_cck = cck_fa; in iwk_cck_sens()
5588 cck_fa += (0xFFFFFFFF - temp); in iwk_cck_sens()
5590 cck_fa -= rx_sens_p->last_false_alarm_cnt_cck; in iwk_cck_sens()
5591 rx_sens_p->last_false_alarm_cnt_cck += cck_fa; in iwk_cck_sens()
5595 if (rx_sens_p->last_bad_plcp_cnt_cck > cck_bp) { in iwk_cck_sens()
5596 temp = rx_sens_p->last_bad_plcp_cnt_cck; in iwk_cck_sens()
5597 rx_sens_p->last_bad_plcp_cnt_cck = cck_bp; in iwk_cck_sens()
5598 cck_bp += (0xFFFFFFFF - temp); in iwk_cck_sens()
5600 cck_bp -= rx_sens_p->last_bad_plcp_cnt_cck; in iwk_cck_sens()
5601 rx_sens_p->last_bad_plcp_cnt_cck += cck_bp; in iwk_cck_sens()
5608 rx_sens_p->cck_noise_diff = 0; in iwk_cck_sens()
5611 (uint8_t)((LE_32(rx_general_p->beacon_silence_rssi_a) & 0xFF00) >> in iwk_cck_sens()
5614 (uint8_t)((LE_32(rx_general_p->beacon_silence_rssi_b) & 0xFF00) >> in iwk_cck_sens()
5617 (uint8_t)((LE_32(rx_general_p->beacon_silence_rssi_c) & 0xFF00) >> in iwk_cck_sens()
5620 beacon_a = LE_32(rx_general_p->beacon_energy_a); in iwk_cck_sens()
5621 beacon_b = LE_32(rx_general_p->beacon_energy_b); in iwk_cck_sens()
5622 beacon_c = LE_32(rx_general_p->beacon_energy_c); in iwk_cck_sens()
5634 rx_sens_p->cck_noise_max[rx_sens_p->cck_noise_idx] = max_noise_abc; in iwk_cck_sens()
5635 rx_sens_p->cck_noise_idx++; in iwk_cck_sens()
5636 if (rx_sens_p->cck_noise_idx >= 20) { in iwk_cck_sens()
5637 rx_sens_p->cck_noise_idx = 0; in iwk_cck_sens()
5641 max_noise_20 = rx_sens_p->cck_noise_max[0]; in iwk_cck_sens()
5643 if (rx_sens_p->cck_noise_max[i] >= max_noise_20) { in iwk_cck_sens()
5644 max_noise_20 = rx_sens_p->cck_noise_max[i]; in iwk_cck_sens()
5658 rx_sens_p->cck_beacon_min[rx_sens_p->cck_beacon_idx] = min_beacon_abc; in iwk_cck_sens()
5659 rx_sens_p->cck_beacon_idx++; in iwk_cck_sens()
5660 if (rx_sens_p->cck_beacon_idx >= 10) { in iwk_cck_sens()
5661 rx_sens_p->cck_beacon_idx = 0; in iwk_cck_sens()
5665 max_beacon_10 = rx_sens_p->cck_beacon_min[0]; in iwk_cck_sens()
5667 if (rx_sens_p->cck_beacon_min[i] >= max_beacon_10) { in iwk_cck_sens()
5668 max_beacon_10 = rx_sens_p->cck_beacon_min[i]; in iwk_cck_sens()
5677 rx_sens_p->cck_no_false_alarm_num++; in iwk_cck_sens()
5679 rx_sens_p->cck_no_false_alarm_num = 0; in iwk_cck_sens()
5688 rx_sens_p->cck_curr_state = IWK_TOO_MANY_FALSE_ALARM; in iwk_cck_sens()
5690 if (rx_sens_p->auto_corr_cck_x4 > 160) { in iwk_cck_sens()
5691 rx_sens_p->cck_noise_ref = max_noise_20; in iwk_cck_sens()
5693 if (rx_sens_p->min_energy_det_cck > 2) { in iwk_cck_sens()
5694 rx_sens_p->min_energy_det_cck -= 2; in iwk_cck_sens()
5698 if (rx_sens_p->auto_corr_cck_x4 < 160) { in iwk_cck_sens()
5699 rx_sens_p->auto_corr_cck_x4 = 160 + 1; in iwk_cck_sens()
5701 if ((rx_sens_p->auto_corr_cck_x4 + 3) < 200) { in iwk_cck_sens()
5702 rx_sens_p->auto_corr_cck_x4 += 3; in iwk_cck_sens()
5704 rx_sens_p->auto_corr_cck_x4 = 200; in iwk_cck_sens()
5708 if ((rx_sens_p->auto_corr_mrc_cck_x4 + 3) < 400) { in iwk_cck_sens()
5709 rx_sens_p->auto_corr_mrc_cck_x4 += 3; in iwk_cck_sens()
5711 rx_sens_p->auto_corr_mrc_cck_x4 = 400; in iwk_cck_sens()
5714 rx_sens_p->flags |= IWK_SENSITIVITY_CCK_UPDATE_MSK; in iwk_cck_sens()
5717 rx_sens_p->cck_curr_state = IWK_TOO_FEW_FALSE_ALARM; in iwk_cck_sens()
5719 rx_sens_p->cck_noise_diff = (int32_t)rx_sens_p->cck_noise_ref - in iwk_cck_sens()
5722 if ((rx_sens_p->cck_prev_state != IWK_TOO_MANY_FALSE_ALARM) && in iwk_cck_sens()
5723 ((rx_sens_p->cck_noise_diff > 2) || in iwk_cck_sens()
5724 (rx_sens_p->cck_no_false_alarm_num > 100))) { in iwk_cck_sens()
5725 if ((rx_sens_p->min_energy_det_cck + 2) < 97) { in iwk_cck_sens()
5726 rx_sens_p->min_energy_det_cck += 2; in iwk_cck_sens()
5728 rx_sens_p->min_energy_det_cck = 97; in iwk_cck_sens()
5731 if ((rx_sens_p->auto_corr_cck_x4 - 3) > 125) { in iwk_cck_sens()
5732 rx_sens_p->auto_corr_cck_x4 -= 3; in iwk_cck_sens()
5734 rx_sens_p->auto_corr_cck_x4 = 125; in iwk_cck_sens()
5737 if ((rx_sens_p->auto_corr_mrc_cck_x4 -3) > 200) { in iwk_cck_sens()
5738 rx_sens_p->auto_corr_mrc_cck_x4 -= 3; in iwk_cck_sens()
5740 rx_sens_p->auto_corr_mrc_cck_x4 = 200; in iwk_cck_sens()
5743 rx_sens_p->flags |= IWK_SENSITIVITY_CCK_UPDATE_MSK; in iwk_cck_sens()
5745 rx_sens_p->flags &= (~IWK_SENSITIVITY_CCK_UPDATE_MSK); in iwk_cck_sens()
5748 rx_sens_p->cck_curr_state = IWK_GOOD_RANGE_FALSE_ALARM; in iwk_cck_sens()
5750 rx_sens_p->cck_noise_ref = max_noise_20; in iwk_cck_sens()
5752 if (IWK_TOO_MANY_FALSE_ALARM == rx_sens_p->cck_prev_state) { in iwk_cck_sens()
5753 rx_sens_p->min_energy_det_cck -= 8; in iwk_cck_sens()
5756 rx_sens_p->flags &= (~IWK_SENSITIVITY_CCK_UPDATE_MSK); in iwk_cck_sens()
5759 if (rx_sens_p->min_energy_det_cck < max_beacon_10) { in iwk_cck_sens()
5760 rx_sens_p->min_energy_det_cck = (uint16_t)max_beacon_10; in iwk_cck_sens()
5763 rx_sens_p->cck_prev_state = rx_sens_p->cck_curr_state; in iwk_cck_sens()
5778 struct iwk_rx_sensitivity *rx_sens_p = &sc->sc_rx_sens; in iwk_ofdm_sens()
5780 ofdm_fa = LE_32(sc->sc_statistics.rx.ofdm.false_alarm_cnt); in iwk_ofdm_sens()
5781 ofdm_bp = LE_32(sc->sc_statistics.rx.ofdm.plcp_err); in iwk_ofdm_sens()
5784 if (rx_sens_p->last_false_alarm_cnt_ofdm > ofdm_fa) { in iwk_ofdm_sens()
5785 temp = rx_sens_p->last_false_alarm_cnt_ofdm; in iwk_ofdm_sens()
5786 rx_sens_p->last_false_alarm_cnt_ofdm = ofdm_fa; in iwk_ofdm_sens()
5787 ofdm_fa += (0xFFFFFFFF - temp); in iwk_ofdm_sens()
5789 ofdm_fa -= rx_sens_p->last_false_alarm_cnt_ofdm; in iwk_ofdm_sens()
5790 rx_sens_p->last_false_alarm_cnt_ofdm += ofdm_fa; in iwk_ofdm_sens()
5794 if (rx_sens_p->last_bad_plcp_cnt_ofdm > ofdm_bp) { in iwk_ofdm_sens()
5795 temp = rx_sens_p->last_bad_plcp_cnt_ofdm; in iwk_ofdm_sens()
5796 rx_sens_p->last_bad_plcp_cnt_ofdm = ofdm_bp; in iwk_ofdm_sens()
5797 ofdm_bp += (0xFFFFFFFF - temp); in iwk_ofdm_sens()
5799 ofdm_bp -= rx_sens_p->last_bad_plcp_cnt_ofdm; in iwk_ofdm_sens()
5800 rx_sens_p->last_bad_plcp_cnt_ofdm += ofdm_bp; in iwk_ofdm_sens()
5809 temp1 = rx_sens_p->auto_corr_ofdm_x4 + 1; in iwk_ofdm_sens()
5810 rx_sens_p->auto_corr_ofdm_x4 = (temp1 <= 120) ? temp1 : 120; in iwk_ofdm_sens()
5812 temp1 = rx_sens_p->auto_corr_mrc_ofdm_x4 + 1; in iwk_ofdm_sens()
5813 rx_sens_p->auto_corr_mrc_ofdm_x4 = in iwk_ofdm_sens()
5816 temp1 = rx_sens_p->auto_corr_ofdm_x1 + 1; in iwk_ofdm_sens()
5817 rx_sens_p->auto_corr_ofdm_x1 = (temp1 <= 140) ? temp1 : 140; in iwk_ofdm_sens()
5819 temp1 = rx_sens_p->auto_corr_mrc_ofdm_x1 + 1; in iwk_ofdm_sens()
5820 rx_sens_p->auto_corr_mrc_ofdm_x1 = in iwk_ofdm_sens()
5823 rx_sens_p->flags |= IWK_SENSITIVITY_OFDM_UPDATE_MSK; in iwk_ofdm_sens()
5826 temp1 = rx_sens_p->auto_corr_ofdm_x4 - 1; in iwk_ofdm_sens()
5827 rx_sens_p->auto_corr_ofdm_x4 = (temp1 >= 85) ? temp1 : 85; in iwk_ofdm_sens()
5829 temp1 = rx_sens_p->auto_corr_mrc_ofdm_x4 - 1; in iwk_ofdm_sens()
5830 rx_sens_p->auto_corr_mrc_ofdm_x4 = in iwk_ofdm_sens()
5833 temp1 = rx_sens_p->auto_corr_ofdm_x1 - 1; in iwk_ofdm_sens()
5834 rx_sens_p->auto_corr_ofdm_x1 = (temp1 >= 105) ? temp1 : 105; in iwk_ofdm_sens()
5836 temp1 = rx_sens_p->auto_corr_mrc_ofdm_x1 - 1; in iwk_ofdm_sens()
5837 rx_sens_p->auto_corr_mrc_ofdm_x1 = in iwk_ofdm_sens()
5840 rx_sens_p->flags |= IWK_SENSITIVITY_OFDM_UPDATE_MSK; in iwk_ofdm_sens()
5843 rx_sens_p->flags &= (~IWK_SENSITIVITY_OFDM_UPDATE_MSK); in iwk_ofdm_sens()
5861 sc->sc_recv_mgmt(ic, mp, in, subtype, rssi, rstamp); in iwk_recv_mgmt()
5863 mutex_enter(&sc->sc_glock); in iwk_recv_mgmt()
5866 if (sc->sc_ibss.ibss_beacon.syncbeacon && in == ic->ic_bss && in iwk_recv_mgmt()
5867 ic->ic_state == IEEE80211_S_RUN) { in iwk_recv_mgmt()
5869 &sc->sc_ibss.ibss_beacon.iwk_boff, in iwk_recv_mgmt()
5870 sc->sc_ibss.ibss_beacon.mp, 0)) { in iwk_recv_mgmt()
5871 bcopy(sc->sc_ibss.ibss_beacon.mp->b_rptr, in iwk_recv_mgmt()
5872 sc->sc_ibss.ibss_beacon.beacon_cmd. in iwk_recv_mgmt()
5874 MBLKL(sc->sc_ibss.ibss_beacon.mp)); in iwk_recv_mgmt()
5877 &sc->sc_ibss.ibss_beacon.beacon_cmd, in iwk_recv_mgmt()
5878 sc->sc_ibss.ibss_beacon.beacon_cmd_len, 1); in iwk_recv_mgmt()
5883 sc->sc_ibss.ibss_beacon.syncbeacon = 0; in iwk_recv_mgmt()
5885 if (ic->ic_opmode == IEEE80211_M_IBSS && in iwk_recv_mgmt()
5886 ic->ic_state == IEEE80211_S_RUN) { in iwk_recv_mgmt()
5887 wh = (struct ieee80211_frame *)mp->b_rptr; in iwk_recv_mgmt()
5888 mutex_enter(&sc->sc_ibss.node_tb_lock); in iwk_recv_mgmt()
5894 if (sc->sc_ibss.ibss_node_tb[index1].used && in iwk_recv_mgmt()
5895 IEEE80211_ADDR_EQ(sc->sc_ibss. in iwk_recv_mgmt()
5897 wh->i_addr2)) { in iwk_recv_mgmt()
5906 wh->i_addr2, &index2); in iwk_recv_mgmt()
5913 mutex_exit(&sc->sc_ibss.node_tb_lock); in iwk_recv_mgmt()
5919 mutex_exit(&sc->sc_glock); in iwk_recv_mgmt()
5925 * a 256-entry history of uCode execution within a circular buffer.
5936 * uint32_t event_id; range 0 - 1500
5938 * uint32_t data; event_id-specific data value
5944 * iwk_write_event_log - Write event log to dmesg
5960 log_event_table_ptr = LE_32(sc->sc_card_alive_run.log_event_table_ptr); in iwk_write_event_log()
5988 num_events = log_size - idx; in iwk_write_event_log()
6047 * uint32_t data1; error-specific data
6048 * uint32_t data2; error-specific data
6055 * iwk_write_error_log - Write error log to dmesg
6062 err_ptr = LE_32(sc->sc_card_alive_run.error_event_table_ptr); in iwk_write_error_log()
6113 ieee80211_node_t *in = ic->ic_bss; in iwk_run_state_config_ibss()
6116 mutex_enter(&sc->sc_ibss.node_tb_lock); in iwk_run_state_config_ibss()
6123 sc->sc_ibss.ibss_node_tb[i].used = 0; in iwk_run_state_config_ibss()
6124 (void) memset(&sc->sc_ibss.ibss_node_tb[i].node, in iwk_run_state_config_ibss()
6129 sc->sc_ibss.node_number = 0; in iwk_run_state_config_ibss()
6131 mutex_exit(&sc->sc_ibss.node_tb_lock); in iwk_run_state_config_ibss()
6136 sc->sc_config.dev_type = RXON_DEV_TYPE_IBSS; in iwk_run_state_config_ibss()
6138 sc->sc_config.flags |= LE_32(RXON_FLG_SHORT_PREAMBLE_MSK); in iwk_run_state_config_ibss()
6139 sc->sc_config.filter_flags = in iwk_run_state_config_ibss()
6144 sc->sc_config.assoc_id = 0; in iwk_run_state_config_ibss()
6146 IEEE80211_ADDR_COPY(sc->sc_config.bssid, in->in_bssid); in iwk_run_state_config_ibss()
6147 sc->sc_config.chan = LE_16(ieee80211_chan2ieee(ic, in iwk_run_state_config_ibss()
6148 in->in_chan)); in iwk_run_state_config_ibss()
6150 if (ic->ic_curmode == IEEE80211_MODE_11B) { in iwk_run_state_config_ibss()
6151 sc->sc_config.cck_basic_rates = 0x03; in iwk_run_state_config_ibss()
6152 sc->sc_config.ofdm_basic_rates = 0; in iwk_run_state_config_ibss()
6153 } else if ((in->in_chan != IEEE80211_CHAN_ANYC) && in iwk_run_state_config_ibss()
6154 (IEEE80211_IS_CHAN_5GHZ(in->in_chan))) { in iwk_run_state_config_ibss()
6155 sc->sc_config.cck_basic_rates = 0; in iwk_run_state_config_ibss()
6156 sc->sc_config.ofdm_basic_rates = 0x15; in iwk_run_state_config_ibss()
6159 sc->sc_config.cck_basic_rates = 0x0f; in iwk_run_state_config_ibss()
6160 sc->sc_config.ofdm_basic_rates = 0xff; in iwk_run_state_config_ibss()
6163 sc->sc_config.flags &= in iwk_run_state_config_ibss()
6167 if (ic->ic_flags & IEEE80211_F_SHSLOT) { in iwk_run_state_config_ibss()
6168 sc->sc_config.flags |= in iwk_run_state_config_ibss()
6172 if (ic->ic_flags & IEEE80211_F_SHPREAMBLE) { in iwk_run_state_config_ibss()
6173 sc->sc_config.flags |= in iwk_run_state_config_ibss()
6177 sc->sc_config.filter_flags |= in iwk_run_state_config_ibss()
6180 err = iwk_cmd(sc, REPLY_RXON, &sc->sc_config, in iwk_run_state_config_ibss()
6196 ieee80211_node_t *in = ic->ic_bss; in iwk_run_state_config_sta()
6200 if (sc->sc_assoc_id != in->in_associd) { in iwk_run_state_config_sta()
6204 in->in_associd, sc->sc_assoc_id); in iwk_run_state_config_sta()
6206 sc->sc_config.assoc_id = LE_16(in->in_associd & 0x3fff); in iwk_run_state_config_sta()
6212 sc->sc_config.flags &= in iwk_run_state_config_sta()
6216 if (ic->ic_flags & IEEE80211_F_SHSLOT) in iwk_run_state_config_sta()
6217 sc->sc_config.flags |= in iwk_run_state_config_sta()
6220 if (ic->ic_flags & IEEE80211_F_SHPREAMBLE) in iwk_run_state_config_sta()
6221 sc->sc_config.flags |= in iwk_run_state_config_sta()
6224 sc->sc_config.filter_flags |= in iwk_run_state_config_sta()
6227 if (ic->ic_opmode != IEEE80211_M_STA) in iwk_run_state_config_sta()
6228 sc->sc_config.filter_flags |= in iwk_run_state_config_sta()
6233 sc->sc_config.chan, sc->sc_config.flags, in iwk_run_state_config_sta()
6234 sc->sc_config.filter_flags)); in iwk_run_state_config_sta()
6236 err = iwk_cmd(sc, REPLY_RXON, &sc->sc_config, in iwk_run_state_config_sta()
6250 ieee80211com_t *ic = &sc->sc_ic; in iwk_fast_recover()
6253 mutex_enter(&sc->sc_glock); in iwk_fast_recover()
6256 bcopy(&sc->sc_config_save, &sc->sc_config, in iwk_fast_recover()
6257 sizeof (sc->sc_config)); in iwk_fast_recover()
6260 sc->sc_config.assoc_id = 0; in iwk_fast_recover()
6261 sc->sc_config.filter_flags &= in iwk_fast_recover()
6267 mutex_exit(&sc->sc_glock); in iwk_fast_recover()
6271 bcopy(&sc->sc_config_save, &sc->sc_config, in iwk_fast_recover()
6272 sizeof (sc->sc_config)); in iwk_fast_recover()
6279 mutex_exit(&sc->sc_glock); in iwk_fast_recover()
6284 sc->sc_tempera = iwk_curr_tempera(sc); in iwk_fast_recover()
6294 mutex_exit(&sc->sc_glock); in iwk_fast_recover()
6306 mutex_exit(&sc->sc_glock); in iwk_fast_recover()
6315 mutex_exit(&sc->sc_glock); in iwk_fast_recover()
6322 mutex_exit(&sc->sc_glock); in iwk_fast_recover()
6325 if (ic->ic_flags & IEEE80211_F_PRIVACY) { in iwk_fast_recover()
6327 if (ic->ic_nw_keys[i].wk_keyix == IEEE80211_KEYIX_NONE) in iwk_fast_recover()
6329 err = iwk_key_set(ic, &ic->ic_nw_keys[i], in iwk_fast_recover()
6330 ic->ic_bss->in_macaddr); in iwk_fast_recover()
6340 sc->sc_flags &= ~IWK_F_HW_ERR_RECOVER; in iwk_fast_recover()
6344 mac_tx_update(ic->ic_mach); in iwk_fast_recover()
6354 ieee80211_node_t *in = ic->ic_bss; in iwk_start_tx_beacon()
6364 tx_beacon_p = &sc->sc_ibss.ibss_beacon.beacon_cmd; in iwk_start_tx_beacon()
6371 tx_beacon_p->config.sta_id = IWK_BROADCAST_ID; in iwk_start_tx_beacon()
6372 tx_beacon_p->config.stop_time.life_time = in iwk_start_tx_beacon()
6375 if (sc->sc_ibss.ibss_beacon.mp != NULL) { in iwk_start_tx_beacon()
6376 freemsg(sc->sc_ibss.ibss_beacon.mp); in iwk_start_tx_beacon()
6377 sc->sc_ibss.ibss_beacon.mp = NULL; in iwk_start_tx_beacon()
6380 sc->sc_ibss.ibss_beacon.mp = in iwk_start_tx_beacon()
6382 &sc->sc_ibss.ibss_beacon.iwk_boff); in iwk_start_tx_beacon()
6383 if (sc->sc_ibss.ibss_beacon.mp == NULL) { in iwk_start_tx_beacon()
6389 mp = sc->sc_ibss.ibss_beacon.mp; in iwk_start_tx_beacon()
6391 ASSERT(mp->b_cont == NULL); in iwk_start_tx_beacon()
6393 bcopy(mp->b_rptr, tx_beacon_p->bcon_frame, MBLKL(mp)); in iwk_start_tx_beacon()
6395 tx_beacon_p->config.len = LE_16((uint16_t)(MBLKL(mp))); in iwk_start_tx_beacon()
6396 sc->sc_ibss.ibss_beacon.beacon_cmd_len = in iwk_start_tx_beacon()
6398 4 + LE_16(tx_beacon_p->config.len); in iwk_start_tx_beacon()
6403 rate = in->in_rates.ir_rates[0]; in iwk_start_tx_beacon()
6413 tx_beacon_p->config.rate.r.rate_n_flags = in iwk_start_tx_beacon()
6417 tx_beacon_p->config.tx_flags = in iwk_start_tx_beacon()
6420 if (ic->ic_bss->in_tstamp.tsf != 0) { in iwk_start_tx_beacon()
6421 sc->sc_ibss.ibss_beacon.syncbeacon = 1; in iwk_start_tx_beacon()
6424 &sc->sc_ibss.ibss_beacon.iwk_boff, in iwk_start_tx_beacon()
6426 bcopy(mp->b_rptr, in iwk_start_tx_beacon()
6427 tx_beacon_p->bcon_frame, in iwk_start_tx_beacon()
6433 sc->sc_ibss.ibss_beacon.beacon_cmd_len, in iwk_start_tx_beacon()
6441 sc->sc_ibss.ibss_beacon.syncbeacon = 0; in iwk_start_tx_beacon()
6469 if (!sc->sc_ibss.ibss_node_tb[index].used) { in iwk_clean_add_node_ibss()
6478 sc->sc_ibss.node_number >= 25) { in iwk_clean_add_node_ibss()
6487 sc->sc_ibss.ibss_node_tb[i].used = 0; in iwk_clean_add_node_ibss()
6488 (void) memset(&sc->sc_ibss.ibss_node_tb[i].node, in iwk_clean_add_node_ibss()
6492 sc->sc_ibss.node_number = 0; in iwk_clean_add_node_ibss()
6543 ibss_node_p = &sc->sc_ibss.ibss_node_tb[index]; in iwk_clean_add_node_ibss()
6545 ibss_node_p->used = 1; in iwk_clean_add_node_ibss()
6547 (void) memset(&ibss_node_p->node, 0, in iwk_clean_add_node_ibss()
6550 IEEE80211_ADDR_COPY(ibss_node_p->node.bssid, addr); in iwk_clean_add_node_ibss()
6551 ibss_node_p->node.id = index; in iwk_clean_add_node_ibss()
6552 ibss_node_p->node.control = 0; in iwk_clean_add_node_ibss()
6553 ibss_node_p->node.flags = 0; in iwk_clean_add_node_ibss()
6555 err = iwk_cmd(sc, REPLY_ADD_STA, &ibss_node_p->node, in iwk_clean_add_node_ibss()
6560 ibss_node_p->used = 0; in iwk_clean_add_node_ibss()
6561 (void) memset(&ibss_node_p->node, 0, in iwk_clean_add_node_ibss()
6566 sc->sc_ibss.node_number++; in iwk_clean_add_node_ibss()
6570 rs = ic->ic_sup_rates[ieee80211_chan2mode(ic, in iwk_clean_add_node_ibss()
6571 ic->ic_curchan)]; in iwk_clean_add_node_ibss()
6576 ir_rates[rs.ir_nrates - i]; in iwk_clean_add_node_ibss()
6597 link_quality.sta_id = ibss_node_p->node.id; in iwk_clean_add_node_ibss()
6604 ibss_node_p->used = 0; in iwk_clean_add_node_ibss()
6605 (void) memset(ibss_node_p->node.bssid, 0, 6); in iwk_clean_add_node_ibss()