Lines Matching +full:out +full:- +full:of +full:- +full:band

1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2019-2020 Realtek Corporation
19 const struct rtw89_phy_gen_def *phy = rtwdev->chip->phy_def;
21 return phy->phy0_phy1_offset(rtwdev, addr);
27 u32 bit_rate = report->bit_rate;
34 if (report->might_fallback_legacy)
49 return rtwdev->chip->max_amsdu_limit;
65 ra_mask |= GENMASK_ULL(highest_mcs - gap, 0) << nss;
68 ra_mask |= GENMASK_ULL(highest_mcs - gap * 2, 0) << nss;
80 struct ieee80211_sta_he_cap cap = sta->deflink.he_cap;
83 switch (sta->deflink.bandwidth) {
123 struct ieee80211_sta_eht_cap *eht_cap = &sta->deflink.eht_cap;
126 u8 *he_phy_cap = sta->deflink.he_cap.he_cap_elem.phy_cap_info;
128 switch (sta->deflink.bandwidth) {
130 mcs_nss = &eht_cap->eht_mcs_nss_supp.bw._320;
132 return get_eht_mcs_ra_mask(mcs_nss->rx_tx_max_nss, 9, 3);
134 mcs_nss = &eht_cap->eht_mcs_nss_supp.bw._160;
136 return get_eht_mcs_ra_mask(mcs_nss->rx_tx_max_nss, 9, 3);
140 mcs_nss_20mhz = &eht_cap->eht_mcs_nss_supp.only_20mhz;
142 return get_eht_mcs_ra_mask(mcs_nss_20mhz->rx_tx_max_nss, 7, 4);
147 mcs_nss = &eht_cap->eht_mcs_nss_supp.bw._80;
149 return get_eht_mcs_ra_mask(mcs_nss->rx_tx_max_nss, 9, 3);
202 struct cfg80211_bitrate_mask *mask = &rtwsta->mask;
203 enum nl80211_band band;
206 if (!rtwsta->use_cfg_mask)
207 return -1;
209 switch (chan->band_type) {
211 band = NL80211_BAND_2GHZ;
212 cfg_mask = u64_encode_bits(mask->control[NL80211_BAND_2GHZ].legacy,
216 band = NL80211_BAND_5GHZ;
217 cfg_mask = u64_encode_bits(mask->control[NL80211_BAND_5GHZ].legacy,
221 band = NL80211_BAND_6GHZ;
222 cfg_mask = u64_encode_bits(mask->control[NL80211_BAND_6GHZ].legacy,
226 rtw89_warn(rtwdev, "unhandled band type %d\n", chan->band_type);
227 return -1;
230 if (sta->deflink.he_cap.has_he) {
231 cfg_mask |= u64_encode_bits(mask->control[band].he_mcs[0],
233 cfg_mask |= u64_encode_bits(mask->control[band].he_mcs[1],
235 } else if (sta->deflink.vht_cap.vht_supported) {
236 cfg_mask |= u64_encode_bits(mask->control[band].vht_mcs[0],
238 cfg_mask |= u64_encode_bits(mask->control[band].vht_mcs[1],
240 } else if (sta->deflink.ht_cap.ht_supported) {
241 cfg_mask |= u64_encode_bits(mask->control[band].ht_mcs[0],
243 cfg_mask |= u64_encode_bits(mask->control[band].ht_mcs[1],
268 struct cfg80211_bitrate_mask *mask = &rtwsta->mask;
269 u8 band = chan->band_type;
270 enum nl80211_band nl_band = rtw89_hw_to_nl80211_band(band);
271 u8 he_gi = mask->control[nl_band].he_gi;
272 u8 he_ltf = mask->control[nl_band].he_ltf;
274 if (!rtwsta->use_cfg_mask)
300 struct rtw89_sta *rtwsta = (struct rtw89_sta *)sta->drv_priv;
301 struct rtw89_vif *rtwvif = rtwsta->rtwvif;
302 struct rtw89_phy_rate_pattern *rate_pattern = &rtwvif->rate_pattern;
303 struct rtw89_ra_info *ra = &rtwsta->ra;
305 rtwvif->sub_entity_idx);
306 struct ieee80211_vif *vif = rtwvif_to_vif(rtwsta->rtwvif);
308 u8 rssi = ewma_rssi_read(&rtwsta->avg_rssi);
323 if (sta->deflink.eht_cap.has_eht) {
327 } else if (sta->deflink.he_cap.has_he) {
332 if (sta->deflink.he_cap.he_cap_elem.phy_cap_info[2] &
335 if (sta->deflink.he_cap.he_cap_elem.phy_cap_info[1] &
339 } else if (sta->deflink.vht_cap.vht_supported) {
340 u16 mcs_map = le16_to_cpu(sta->deflink.vht_cap.vht_mcs.rx_mcs_map);
347 if (sta->deflink.vht_cap.cap & IEEE80211_VHT_CAP_RXSTBC_MASK)
349 if (sta->deflink.vht_cap.cap & IEEE80211_VHT_CAP_RXLDPC)
351 } else if (sta->deflink.ht_cap.ht_supported) {
354 ra_mask |= ((u64)sta->deflink.ht_cap.mcs.rx_mask[3] << 48) |
355 ((u64)sta->deflink.ht_cap.mcs.rx_mask[2] << 36) |
356 (sta->deflink.ht_cap.mcs.rx_mask[1] << 24) |
357 (sta->deflink.ht_cap.mcs.rx_mask[0] << 12);
359 if (sta->deflink.ht_cap.cap & IEEE80211_HT_CAP_RX_STBC)
361 if (sta->deflink.ht_cap.cap & IEEE80211_HT_CAP_LDPC_CODING)
365 switch (chan->band_type) {
367 ra_mask |= sta->deflink.supp_rates[NL80211_BAND_2GHZ];
368 if (sta->deflink.supp_rates[NL80211_BAND_2GHZ] & 0xf)
370 if (sta->deflink.supp_rates[NL80211_BAND_2GHZ] & 0xff0)
374 ra_mask |= (u64)sta->deflink.supp_rates[NL80211_BAND_5GHZ] << 4;
378 ra_mask |= (u64)sta->deflink.supp_rates[NL80211_BAND_6GHZ] << 4;
382 rtw89_err(rtwdev, "Unknown band type\n");
390 for (i = 0; i < rtwdev->hal.tx_nss; i++)
407 switch (sta->deflink.bandwidth) {
410 sgi = sta->deflink.vht_cap.vht_supported &&
411 (sta->deflink.vht_cap.cap & IEEE80211_VHT_CAP_SHORT_GI_160);
415 sgi = sta->deflink.vht_cap.vht_supported &&
416 (sta->deflink.vht_cap.cap & IEEE80211_VHT_CAP_SHORT_GI_80);
420 sgi = sta->deflink.ht_cap.ht_supported &&
421 (sta->deflink.ht_cap.cap & IEEE80211_HT_CAP_SGI_40);
425 sgi = sta->deflink.ht_cap.ht_supported &&
426 (sta->deflink.ht_cap.cap & IEEE80211_HT_CAP_SGI_20);
430 if (sta->deflink.he_cap.he_cap_elem.phy_cap_info[3] &
432 ra->dcm_cap = 1;
434 if (rate_pattern->enable && !vif->p2p) {
436 ra_mask &= rate_pattern->ra_mask;
437 mode = rate_pattern->ra_mode;
440 ra->bw_cap = bw_mode;
441 ra->er_cap = rtwsta->er_cap;
442 ra->mode_ctrl = mode;
443 ra->macid = rtwsta->mac_id;
444 ra->stbc_cap = stbc_en;
445 ra->ldpc_cap = ldpc_en;
446 ra->ss_num = min(sta->deflink.rx_nss, rtwdev->hal.tx_nss) - 1;
447 ra->en_sgi = sgi;
448 ra->ra_mask = ra_mask;
449 ra->fix_giltf_en = fix_giltf_en;
450 ra->fix_giltf = fix_giltf;
455 ra->fixed_csi_rate_en = false;
456 ra->ra_csi_rate_en = true;
457 ra->cr_tbl_sel = false;
458 ra->band_num = rtwvif->phy_idx;
459 ra->csi_bw = bw_mode;
460 ra->csi_gi_ltf = RTW89_GILTF_LGI_4XHE32;
461 ra->csi_mcs_ss_idx = 5;
462 ra->csi_mode = csi_mode;
468 struct rtw89_sta *rtwsta = (struct rtw89_sta *)sta->drv_priv;
469 struct rtw89_ra_info *ra = &rtwsta->ra;
474 ra->upd_mask = 1;
476 ra->upd_bw_nss_mask = 1;
480 ra->macid,
481 ra->bw_cap,
482 ra->ss_num,
483 ra->en_sgi,
484 ra->giltf);
505 if (next->enable)
509 next->rate = rate_base + c;
510 next->ra_mode = ra_mode;
511 next->ra_mask = ra_mask;
512 next->enable = true;
528 struct rtw89_vif *rtwvif = (struct rtw89_vif *)vif->drv_priv;
531 rtwvif->sub_entity_idx);
550 u8 band = chan->band_type;
551 enum nl80211_band nl_band = rtw89_hw_to_nl80211_band(band);
552 enum rtw89_chip_gen chip_gen = rtwdev->chip->chip_gen;
553 u8 tx_nss = rtwdev->hal.tx_nss;
559 mask->control[nl_band].he_mcs[i],
561 goto out;
566 mask->control[nl_band].vht_mcs[i],
568 goto out;
573 mask->control[nl_band].ht_mcs[i],
575 goto out;
581 sband = rtwdev->hw->wiphy->bands[nl_band];
582 if (band == RTW89_BAND_2G) {
586 mask->control[nl_band].legacy,
587 BIT(sband->n_bitrates) - 1, false))
588 goto out;
592 mask->control[nl_band].legacy,
593 BIT(sband->n_bitrates) - 1, false))
594 goto out;
598 goto out;
600 rtwvif->rate_pattern = next_pattern;
615 out:
616 rtwvif->rate_pattern.enable = false;
629 ieee80211_iterate_stations_atomic(rtwdev->hw,
636 struct rtw89_sta *rtwsta = (struct rtw89_sta *)sta->drv_priv;
637 struct rtw89_ra_info *ra = &rtwsta->ra;
638 u8 rssi = ewma_rssi_read(&rtwsta->avg_rssi) >> RSSI_FACTOR;
644 ra->init_rate_lv = 1;
646 ra->init_rate_lv = 2;
648 ra->init_rate_lv = 3;
650 ra->init_rate_lv = 0;
651 ra->upd_all = 1;
654 ra->macid,
655 ra->mode_ctrl,
656 ra->bw_cap,
657 ra->ss_num,
658 ra->init_rate_lv);
661 ra->dcm_cap,
662 ra->er_cap,
663 ra->ldpc_cap,
664 ra->stbc_cap,
665 ra->en_sgi,
666 ra->giltf);
675 enum rtw89_bandwidth cbw = chan->band_width;
676 u8 pri_ch = chan->primary_channel;
677 u8 central_ch = chan->channel;
691 txsc_idx = (pri_ch - central_ch) >> 1;
693 txsc_idx = ((central_ch - pri_ch) >> 1) + 1;
700 tmp = (pri_ch - central_ch) >> 1;
702 tmp = ((central_ch - pri_ch) >> 1) + 1;
724 txsc_idx = (10 - (pri_ch - central_ch)) >> 1;
726 txsc_idx = ((central_ch - pri_ch) >> 1) + 5;
744 enum rtw89_bandwidth cbw = chan->band_width;
745 u8 pri_ch = chan->primary_channel;
746 u8 central_ch = chan->channel;
758 txsb_idx = (pri_ch - central_ch + 6) / 4;
764 txsb_idx = (pri_ch - central_ch + 14) / 4;
766 txsb_idx = (pri_ch - central_ch + 12) / 8;
772 txsb_idx = (pri_ch - central_ch + 30) / 4;
774 txsb_idx = (pri_ch - central_ch + 28) / 8;
776 txsb_idx = (pri_ch - central_ch + 24) / 16;
797 const struct rtw89_chip_info *chip = rtwdev->chip;
798 const u32 *base_addr = chip->rf_base_addr;
801 if (rf_path >= rtwdev->chip->rf_path_num) {
854 if (rf_path >= rtwdev->chip->rf_path_num) {
894 goto out;
898 out:
919 if (rf_path >= rtwdev->chip->rf_path_num) {
934 const struct rtw89_chip_info *chip = rtwdev->chip;
935 const u32 *base_addr = chip->rf_base_addr;
938 if (rf_path >= rtwdev->chip->rf_path_num) {
998 if (rf_path >= rtwdev->chip->rf_path_num) {
1058 if (rf_path >= rtwdev->chip->rf_path_num) {
1072 return rtwdev->chip->ops->write_rf == rtw89_phy_write_rf_v1;
1078 const struct rtw89_chip_info *chip = rtwdev->chip;
1080 chip->ops->bb_reset(rtwdev, phy_idx);
1090 if (reg->addr == 0xfe) {
1092 } else if (reg->addr == 0xfd) {
1094 } else if (reg->addr == 0xfc) {
1096 } else if (reg->addr == 0xfb) {
1098 } else if (reg->addr == 0xfa) {
1100 } else if (reg->addr == 0xf9) {
1102 } else if (reg->data == BYPASS_CR_DATA) {
1103 rtw89_debug(rtwdev, RTW89_DBG_PHY_TRACK, "Bypass CR 0x%x\n", reg->addr);
1105 addr = reg->addr;
1108 addr += rtw89_phy0_phy1_offset(rtwdev, reg->addr);
1110 rtw89_phy_write32(rtwdev, addr, reg->data);
1134 struct rtw89_phy_bb_gain_info *gain = &rtwdev->bb_gain.ax;
1143 gain->lna_gain[gband][path][i] = data & 0xff;
1147 gain->lna_gain[gband][path][i] = data & 0xff;
1151 gain->tia_gain[gband][path][i] = data & 0xff;
1173 struct rtw89_phy_bb_gain_info *gain = &rtwdev->bb_gain.ax;
1184 gain->rpl_ofst_20[gband][path] = (s8)data;
1188 gain->rpl_ofst_40[gband][path][0] = (s8)data;
1193 gain->rpl_ofst_40[gband][path][rxsc] = ofst;
1199 gain->rpl_ofst_80[gband][path][0] = (s8)data;
1204 gain->rpl_ofst_80[gband][path][rxsc] = ofst;
1210 gain->rpl_ofst_80[gband][path][rxsc] = ofst;
1216 gain->rpl_ofst_160[gband][path][0] = (s8)data;
1221 gain->rpl_ofst_160[gband][path][rxsc] = ofst;
1227 gain->rpl_ofst_160[gband][path][rxsc] = ofst;
1233 gain->rpl_ofst_160[gband][path][rxsc] = ofst;
1239 gain->rpl_ofst_160[gband][path][rxsc] = ofst;
1255 struct rtw89_phy_bb_gain_info *gain = &rtwdev->bb_gain.ax;
1264 gain->lna_gain_bypass[gband][path][i] = data & 0xff;
1268 gain->lna_gain_bypass[gband][path][i] = data & 0xff;
1282 struct rtw89_phy_bb_gain_info *gain = &rtwdev->bb_gain.ax;
1291 gain->lna_op1db[gband][path][i] = data & 0xff;
1295 gain->lna_op1db[gband][path][i] = data & 0xff;
1299 gain->tia_lna_op1db[gband][path][i] = data & 0xff;
1303 gain->tia_lna_op1db[gband][path][i] = data & 0xff;
1318 const struct rtw89_chip_info *chip = rtwdev->chip;
1319 union rtw89_phy_bb_gain_arg arg = { .addr = reg->addr };
1320 struct rtw89_efuse *efuse = &rtwdev->efuse;
1325 if (arg.path >= chip->rf_path_num)
1335 rtw89_phy_cfg_bb_gain_error(rtwdev, arg, reg->data);
1338 rtw89_phy_cfg_bb_rpl_ofst(rtwdev, arg, reg->data);
1341 rtw89_phy_cfg_bb_gain_bypass(rtwdev, arg, reg->data);
1344 rtw89_phy_cfg_bb_gain_op1db(rtwdev, arg, reg->data);
1348 if (efuse->rfe_type < 50)
1354 arg.addr, reg->data, arg.cfg_type);
1365 u16 idx = info->curr_idx % RTW89_H2C_RF_PAGE_SIZE;
1366 u8 page = info->curr_idx / RTW89_H2C_RF_PAGE_SIZE;
1370 rf_path, info->curr_idx);
1374 info->rtw89_phy_config_rf_h2c[page][idx] =
1375 cpu_to_le32((reg->addr << 20) | reg->data);
1376 info->curr_idx++;
1382 u16 remain = info->curr_idx;
1391 ret = -EINVAL;
1392 goto out;
1395 for (i = 0; i < RTW89_H2C_RF_PAGE_NUM && remain; i++, remain -= len) {
1399 goto out;
1401 out:
1402 info->curr_idx = 0;
1412 u32 addr = reg->addr;
1430 if (reg->addr == 0xfe) {
1432 } else if (reg->addr == 0xfd) {
1434 } else if (reg->addr == 0xfc) {
1436 } else if (reg->addr == 0xfb) {
1438 } else if (reg->addr == 0xfa) {
1440 } else if (reg->addr == 0xf9) {
1443 rtw89_write_rf(rtwdev, rf_path, reg->addr, 0xfffff, reg->data);
1454 rtw89_write_rf(rtwdev, rf_path, reg->addr, RFREG_MASK, reg->data);
1456 if (reg->addr < 0x100)
1477 for (i = 0; i < table->n_regs; i++) {
1478 reg = &table->regs[i];
1479 headline = get_phy_headline(reg->addr);
1490 reg = &table->regs[i];
1491 target = get_phy_target(reg->addr);
1501 reg = &table->regs[i];
1502 target = get_phy_target(reg->addr);
1511 reg = &table->regs[i];
1512 rfe_para = get_phy_cond_rfe(reg->addr);
1513 cv_para = get_phy_cond_cv(reg->addr);
1528 reg = &table->regs[i];
1529 rfe_para = get_phy_cond_rfe(reg->addr);
1530 cv_para = get_phy_cond_cv(reg->addr);
1543 return -EINVAL;
1555 enum rtw89_rf_path rf_path = table->rf_path;
1556 u8 rfe = rtwdev->efuse.rfe_type;
1557 u8 cv = rtwdev->hal.cv;
1573 cfg_target = get_phy_target(table->regs[headline_idx].addr);
1574 for (i = headline_size; i < table->n_regs; i++) {
1575 reg = &table->regs[i];
1576 cond = get_phy_cond(reg->addr);
1580 target = get_phy_target(reg->addr);
1586 reg->addr, reg->data);
1618 struct rtw89_fw_elm_info *elm_info = &rtwdev->fw.elm_info;
1619 const struct rtw89_chip_info *chip = rtwdev->chip;
1623 bb_table = elm_info->bb_tbl ? elm_info->bb_tbl : chip->bb_table;
1625 if (rtwdev->dbcc_en)
1630 bb_gain_table = elm_info->bb_gain ? elm_info->bb_gain : chip->bb_gain_table;
1633 chip->phy_def->config_bb_gain, NULL);
1648 struct rtw89_fw_elm_info *elm_info = &rtwdev->fw.elm_info;
1649 const struct rtw89_chip_info *chip = rtwdev->chip;
1658 for (path = RF_PATH_A; path < chip->rf_path_num; path++) {
1659 rf_table = elm_info->rf_radio[path] ?
1660 elm_info->rf_radio[path] : chip->rf_table[path];
1661 rf_reg_info->rf_path = rf_table->rf_path;
1665 config = rf_table->config ? rf_table->config :
1670 rf_reg_info->rf_path);
1677 const struct rtw89_chip_info *chip = rtwdev->chip;
1685 if (chip->chip_id != RTL8851B)
1687 if (chip->chip_id == RTL8852B || chip->chip_id == RTL8852BT)
1705 struct rtw89_fw_elm_info *elm_info = &rtwdev->fw.elm_info;
1706 const struct rtw89_chip_info *chip = rtwdev->chip;
1711 nctl_table = elm_info->rf_nctl ? elm_info->rf_nctl : chip->nctl_table;
1714 if (chip->nctl_post_table)
1715 rtw89_rfk_parser(rtwdev, chip->nctl_post_table);
1752 if (rtwdev->dbcc_en && phy_idx == RTW89_PHY_1)
1761 if (rtwdev->dbcc_en && phy_idx == RTW89_PHY_1)
1772 if (!rtwdev->dbcc_en)
1785 for (i = 0; i < tbl->size; i++) {
1786 reg3 = &tbl->reg3[i];
1787 rtw89_phy_write32_mask(rtwdev, reg3->addr, reg3->mask, reg3->data);
1812 switch (desc->rs) {
1814 return &head->cck[desc->idx];
1816 return &head->ofdm[desc->idx];
1818 return &head->mcs[desc->ofdma][desc->nss][desc->idx];
1820 return &head->hedcm[desc->ofdma][desc->nss][desc->idx];
1822 return &head->offset[desc->idx];
1824 rtw89_warn(rtwdev, "unrecognized byr rs: %d\n", desc->rs);
1825 return &head->trap;
1832 const struct rtw89_txpwr_byrate_cfg *cfg = tbl->data;
1833 const struct rtw89_txpwr_byrate_cfg *end = cfg + tbl->size;
1841 byr_head = &rtwdev->byr[cfg->band][0];
1842 desc.rs = cfg->rs;
1843 desc.nss = cfg->nss;
1844 data = cfg->data;
1846 for (i = 0; i < cfg->len; i++, data >>= 8) {
1847 desc.idx = cfg->shf + i;
1857 const struct rtw89_chip_info *chip = rtwdev->chip;
1859 return txpwr_rf >> (chip->txpwr_factor_rf - chip->txpwr_factor_mac);
1864 const struct rtw89_chip_info *chip = rtwdev->chip;
1866 return clamp_t(s16, dbm << chip->txpwr_factor_mac, -64, 63);
1875 dbm -= tssi_max_deviation;
1880 static s8 rtw89_phy_get_tpe_constraint(struct rtw89_dev *rtwdev, u8 band)
1882 struct rtw89_regulatory_info *regulatory = &rtwdev->regulatory;
1883 const struct rtw89_reg_6ghz_tpe *tpe = &regulatory->reg_6ghz_tpe;
1886 if (band == RTW89_BAND_6G && tpe->valid)
1887 cstr = rtw89_phy_txpwr_dbm_without_tolerance(tpe->constraint);
1892 s8 rtw89_phy_read_txpwr_byrate(struct rtw89_dev *rtwdev, u8 band, u8 bw,
1898 if (rate_desc->rs == RTW89_RS_CCK)
1899 band = RTW89_BAND_2G;
1901 byr_head = &rtwdev->byr[band][bw];
1911 return (channel_6g - 1) / 2;
1913 return (channel_6g - 3) / 2;
1915 return (channel_6g - 5) / 2;
1917 return (channel_6g - 7) / 2;
1919 return (channel_6g - 9) / 2;
1921 return (channel_6g - 11) / 2;
1923 return (channel_6g - 13) / 2;
1925 return (channel_6g - 15) / 2;
1932 static u8 rtw89_channel_to_idx(struct rtw89_dev *rtwdev, u8 band, u8 channel)
1934 if (band == RTW89_BAND_6G)
1939 return channel - 1;
1941 return (channel - 36) / 2;
1943 return ((channel - 100) / 2) + 15;
1945 return ((channel - 149) / 2) + 38;
1952 s8 rtw89_phy_read_txpwr_limit(struct rtw89_dev *rtwdev, u8 band,
1955 const struct rtw89_rfe_parms *rfe_parms = rtwdev->rfe_parms;
1956 const struct rtw89_txpwr_rule_2ghz *rule_2ghz = &rfe_parms->rule_2ghz;
1957 const struct rtw89_txpwr_rule_5ghz *rule_5ghz = &rfe_parms->rule_5ghz;
1958 const struct rtw89_txpwr_rule_6ghz *rule_6ghz = &rfe_parms->rule_6ghz;
1959 struct rtw89_regulatory_info *regulatory = &rtwdev->regulatory;
1960 enum nl80211_band nl_band = rtw89_hw_to_nl80211_band(band);
1962 u8 ch_idx = rtw89_channel_to_idx(rtwdev, band, ch);
1963 u8 regd = rtw89_regd_get(rtwdev, band);
1964 u8 reg6 = regulatory->reg_6ghz_power;
1968 switch (band) {
1970 lmt = (*rule_2ghz->lmt)[bw][ntx][rs][bf][regd][ch_idx];
1974 lmt = (*rule_2ghz->lmt)[bw][ntx][rs][bf][RTW89_WW][ch_idx];
1977 lmt = (*rule_5ghz->lmt)[bw][ntx][rs][bf][regd][ch_idx];
1981 lmt = (*rule_5ghz->lmt)[bw][ntx][rs][bf][RTW89_WW][ch_idx];
1984 lmt = (*rule_6ghz->lmt)[bw][ntx][rs][bf][regd][reg6][ch_idx];
1988 lmt = (*rule_6ghz->lmt)[bw][ntx][rs][bf][RTW89_WW]
1993 rtw89_warn(rtwdev, "unknown band type: %d\n", band);
1999 cstr = rtw89_phy_get_tpe_constraint(rtwdev, band);
2005 #define __fill_txpwr_limit_nonbf_bf(ptr, band, bw, ntx, rs, ch) \
2010 band, \
2018 u8 band, u8 ntx, u8 ch)
2020 __fill_txpwr_limit_nonbf_bf(lmt->cck_20m, band, RTW89_CHANNEL_WIDTH_20,
2022 __fill_txpwr_limit_nonbf_bf(lmt->cck_40m, band, RTW89_CHANNEL_WIDTH_40,
2024 __fill_txpwr_limit_nonbf_bf(lmt->ofdm, band, RTW89_CHANNEL_WIDTH_20,
2026 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[0], band,
2033 u8 band, u8 ntx, u8 ch, u8 pri_ch)
2035 __fill_txpwr_limit_nonbf_bf(lmt->cck_20m, band, RTW89_CHANNEL_WIDTH_20,
2036 ntx, RTW89_RS_CCK, ch - 2);
2037 __fill_txpwr_limit_nonbf_bf(lmt->cck_40m, band, RTW89_CHANNEL_WIDTH_40,
2039 __fill_txpwr_limit_nonbf_bf(lmt->ofdm, band, RTW89_CHANNEL_WIDTH_20,
2041 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[0], band,
2043 ntx, RTW89_RS_MCS, ch - 2);
2044 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[1], band,
2047 __fill_txpwr_limit_nonbf_bf(lmt->mcs_40m[0], band,
2054 u8 band, u8 ntx, u8 ch, u8 pri_ch)
2060 __fill_txpwr_limit_nonbf_bf(lmt->ofdm, band, RTW89_CHANNEL_WIDTH_20,
2062 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[0], band,
2064 ntx, RTW89_RS_MCS, ch - 6);
2065 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[1], band,
2067 ntx, RTW89_RS_MCS, ch - 2);
2068 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[2], band,
2071 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[3], band,
2074 __fill_txpwr_limit_nonbf_bf(lmt->mcs_40m[0], band,
2076 ntx, RTW89_RS_MCS, ch - 4);
2077 __fill_txpwr_limit_nonbf_bf(lmt->mcs_40m[1], band,
2080 __fill_txpwr_limit_nonbf_bf(lmt->mcs_80m[0], band,
2084 __fill_txpwr_limit_nonbf_bf(val_0p5_n, band, RTW89_CHANNEL_WIDTH_40,
2085 ntx, RTW89_RS_MCS, ch - 4);
2086 __fill_txpwr_limit_nonbf_bf(val_0p5_p, band, RTW89_CHANNEL_WIDTH_40,
2090 lmt->mcs_40m_0p5[i] = min_t(s8, val_0p5_n[i], val_0p5_p[i]);
2095 u8 band, u8 ntx, u8 ch, u8 pri_ch)
2104 __fill_txpwr_limit_nonbf_bf(lmt->ofdm, band, RTW89_CHANNEL_WIDTH_20,
2108 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[0], band,
2110 ntx, RTW89_RS_MCS, ch - 14);
2111 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[1], band,
2113 ntx, RTW89_RS_MCS, ch - 10);
2114 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[2], band,
2116 ntx, RTW89_RS_MCS, ch - 6);
2117 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[3], band,
2119 ntx, RTW89_RS_MCS, ch - 2);
2120 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[4], band,
2123 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[5], band,
2126 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[6], band,
2129 __fill_txpwr_limit_nonbf_bf(lmt->mcs_20m[7], band,
2134 __fill_txpwr_limit_nonbf_bf(lmt->mcs_40m[0], band,
2136 ntx, RTW89_RS_MCS, ch - 12);
2137 __fill_txpwr_limit_nonbf_bf(lmt->mcs_40m[1], band,
2139 ntx, RTW89_RS_MCS, ch - 4);
2140 __fill_txpwr_limit_nonbf_bf(lmt->mcs_40m[2], band,
2143 __fill_txpwr_limit_nonbf_bf(lmt->mcs_40m[3], band,
2148 __fill_txpwr_limit_nonbf_bf(lmt->mcs_80m[0], band,
2150 ntx, RTW89_RS_MCS, ch - 8);
2151 __fill_txpwr_limit_nonbf_bf(lmt->mcs_80m[1], band,
2156 __fill_txpwr_limit_nonbf_bf(lmt->mcs_160m, band,
2161 __fill_txpwr_limit_nonbf_bf(val_0p5_n, band, RTW89_CHANNEL_WIDTH_40,
2162 ntx, RTW89_RS_MCS, ch - 4);
2163 __fill_txpwr_limit_nonbf_bf(val_0p5_p, band, RTW89_CHANNEL_WIDTH_40,
2167 lmt->mcs_40m_0p5[i] = min_t(s8, val_0p5_n[i], val_0p5_p[i]);
2170 __fill_txpwr_limit_nonbf_bf(val_2p5_n, band, RTW89_CHANNEL_WIDTH_40,
2171 ntx, RTW89_RS_MCS, ch - 8);
2172 __fill_txpwr_limit_nonbf_bf(val_2p5_p, band, RTW89_CHANNEL_WIDTH_40,
2176 lmt->mcs_40m_2p5[i] = min_t(s8, val_2p5_n[i], val_2p5_p[i]);
2185 u8 band = chan->band_type;
2186 u8 pri_ch = chan->primary_channel;
2187 u8 ch = chan->channel;
2188 u8 bw = chan->band_width;
2194 rtw89_phy_fill_txpwr_limit_20m_ax(rtwdev, lmt, band, ntx, ch);
2197 rtw89_phy_fill_txpwr_limit_40m_ax(rtwdev, lmt, band, ntx, ch,
2201 rtw89_phy_fill_txpwr_limit_80m_ax(rtwdev, lmt, band, ntx, ch,
2205 rtw89_phy_fill_txpwr_limit_160m_ax(rtwdev, lmt, band, ntx, ch,
2211 s8 rtw89_phy_read_txpwr_limit_ru(struct rtw89_dev *rtwdev, u8 band,
2214 const struct rtw89_rfe_parms *rfe_parms = rtwdev->rfe_parms;
2215 const struct rtw89_txpwr_rule_2ghz *rule_2ghz = &rfe_parms->rule_2ghz;
2216 const struct rtw89_txpwr_rule_5ghz *rule_5ghz = &rfe_parms->rule_5ghz;
2217 const struct rtw89_txpwr_rule_6ghz *rule_6ghz = &rfe_parms->rule_6ghz;
2218 struct rtw89_regulatory_info *regulatory = &rtwdev->regulatory;
2219 enum nl80211_band nl_band = rtw89_hw_to_nl80211_band(band);
2221 u8 ch_idx = rtw89_channel_to_idx(rtwdev, band, ch);
2222 u8 regd = rtw89_regd_get(rtwdev, band);
2223 u8 reg6 = regulatory->reg_6ghz_power;
2227 switch (band) {
2229 lmt_ru = (*rule_2ghz->lmt_ru)[ru][ntx][regd][ch_idx];
2233 lmt_ru = (*rule_2ghz->lmt_ru)[ru][ntx][RTW89_WW][ch_idx];
2236 lmt_ru = (*rule_5ghz->lmt_ru)[ru][ntx][regd][ch_idx];
2240 lmt_ru = (*rule_5ghz->lmt_ru)[ru][ntx][RTW89_WW][ch_idx];
2243 lmt_ru = (*rule_6ghz->lmt_ru)[ru][ntx][regd][reg6][ch_idx];
2247 lmt_ru = (*rule_6ghz->lmt_ru)[ru][ntx][RTW89_WW]
2252 rtw89_warn(rtwdev, "unknown band type: %d\n", band);
2258 cstr = rtw89_phy_get_tpe_constraint(rtwdev, band);
2266 u8 band, u8 ntx, u8 ch)
2268 lmt_ru->ru26[0] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2271 lmt_ru->ru52[0] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2274 lmt_ru->ru106[0] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2282 u8 band, u8 ntx, u8 ch)
2284 lmt_ru->ru26[0] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2286 ntx, ch - 2);
2287 lmt_ru->ru26[1] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2290 lmt_ru->ru52[0] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2292 ntx, ch - 2);
2293 lmt_ru->ru52[1] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2296 lmt_ru->ru106[0] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2298 ntx, ch - 2);
2299 lmt_ru->ru106[1] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2307 u8 band, u8 ntx, u8 ch)
2309 lmt_ru->ru26[0] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2311 ntx, ch - 6);
2312 lmt_ru->ru26[1] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2314 ntx, ch - 2);
2315 lmt_ru->ru26[2] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2318 lmt_ru->ru26[3] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2321 lmt_ru->ru52[0] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2323 ntx, ch - 6);
2324 lmt_ru->ru52[1] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2326 ntx, ch - 2);
2327 lmt_ru->ru52[2] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2330 lmt_ru->ru52[3] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2333 lmt_ru->ru106[0] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2335 ntx, ch - 6);
2336 lmt_ru->ru106[1] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2338 ntx, ch - 2);
2339 lmt_ru->ru106[2] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2342 lmt_ru->ru106[3] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2350 u8 band, u8 ntx, u8 ch)
2352 static const int ofst[] = { -14, -10, -6, -2, 2, 6, 10, 14 };
2361 lmt_ru->ru26[i] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2365 lmt_ru->ru52[i] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2369 lmt_ru->ru106[i] = rtw89_phy_read_txpwr_limit_ru(rtwdev, band,
2382 u8 band = chan->band_type;
2383 u8 ch = chan->channel;
2384 u8 bw = chan->band_width;
2390 rtw89_phy_fill_txpwr_limit_ru_20m_ax(rtwdev, lmt_ru, band, ntx,
2394 rtw89_phy_fill_txpwr_limit_ru_40m_ax(rtwdev, lmt_ru, band, ntx,
2398 rtw89_phy_fill_txpwr_limit_ru_80m_ax(rtwdev, lmt_ru, band, ntx,
2402 rtw89_phy_fill_txpwr_limit_ru_160m_ax(rtwdev, lmt_ru, band, ntx,
2412 u8 max_nss_num = rtwdev->chip->rf_path_num;
2420 u8 band = chan->band_type;
2421 u8 ch = chan->channel;
2445 band, 0,
2473 u8 band = chan->band_type;
2480 v[desc.idx] = rtw89_phy_read_txpwr_byrate(rtwdev, band, 0, &desc);
2497 u8 max_ntx_num = rtwdev->chip->rf_path_num;
2499 u8 ch = chan->channel;
2500 u8 bw = chan->band_width;
2532 u8 max_ntx_num = rtwdev->chip->rf_path_num;
2534 u8 ch = chan->channel;
2535 u8 bw = chan->band_width;
2571 struct rtw89_dev *rtwdev = ra_data->rtwdev;
2572 struct rtw89_sta *rtwsta = (struct rtw89_sta *)sta->drv_priv;
2574 (const struct rtw89_c2h_ra_rpt *)ra_data->c2h->data;
2575 struct rtw89_ra_report *ra_report = &rtwsta->ra_report;
2576 const struct rtw89_chip_info *chip = rtwdev->chip;
2577 bool format_v1 = chip->chip_gen == RTW89_CHIP_BE;
2584 mac_id = le32_get_bits(c2h->w2, RTW89_C2H_RA_RPT_W2_MACID);
2585 if (mac_id != rtwsta->mac_id)
2588 rate = le32_get_bits(c2h->w3, RTW89_C2H_RA_RPT_W3_MCSNSS);
2589 bw = le32_get_bits(c2h->w3, RTW89_C2H_RA_RPT_W3_BW);
2590 giltf = le32_get_bits(c2h->w3, RTW89_C2H_RA_RPT_W3_GILTF);
2591 mode = le32_get_bits(c2h->w3, RTW89_C2H_RA_RPT_W3_MD_SEL);
2594 t = le32_get_bits(c2h->w2, RTW89_C2H_RA_RPT_W2_MCSNSS_B7);
2596 t = le32_get_bits(c2h->w3, RTW89_C2H_RA_RPT_W3_BW_B2);
2598 t = le32_get_bits(c2h->w3, RTW89_C2H_RA_RPT_W3_MD_SEL_B2);
2608 memset(&ra_report->txrate, 0, sizeof(ra_report->txrate));
2612 ra_report->txrate.legacy = legacy_bitrate;
2615 ra_report->txrate.flags |= RATE_INFO_FLAGS_MCS;
2616 if (RTW89_CHK_FW_FEATURE(OLD_HT_RA_FORMAT, &rtwdev->fw))
2621 ra_report->txrate.mcs = rate;
2623 ra_report->txrate.flags |= RATE_INFO_FLAGS_SHORT_GI;
2624 mcs = ra_report->txrate.mcs & 0x07;
2627 ra_report->txrate.flags |= RATE_INFO_FLAGS_VHT_MCS;
2628 ra_report->txrate.mcs = format_v1 ?
2631 ra_report->txrate.nss = format_v1 ?
2635 ra_report->txrate.flags |= RATE_INFO_FLAGS_SHORT_GI;
2636 mcs = ra_report->txrate.mcs;
2639 ra_report->txrate.flags |= RATE_INFO_FLAGS_HE_MCS;
2640 ra_report->txrate.mcs = format_v1 ?
2643 ra_report->txrate.nss = format_v1 ?
2647 ra_report->txrate.he_gi = NL80211_RATE_INFO_HE_GI_0_8;
2649 ra_report->txrate.he_gi = NL80211_RATE_INFO_HE_GI_1_6;
2651 ra_report->txrate.he_gi = NL80211_RATE_INFO_HE_GI_3_2;
2652 mcs = ra_report->txrate.mcs;
2655 ra_report->txrate.flags |= RATE_INFO_FLAGS_EHT_MCS;
2656 ra_report->txrate.mcs = u8_get_bits(rate, RTW89_RA_RATE_MASK_MCS_V1);
2657 ra_report->txrate.nss = u8_get_bits(rate, RTW89_RA_RATE_MASK_NSS_V1) + 1;
2659 ra_report->txrate.eht_gi = NL80211_RATE_INFO_EHT_GI_0_8;
2661 ra_report->txrate.eht_gi = NL80211_RATE_INFO_EHT_GI_1_6;
2663 ra_report->txrate.eht_gi = NL80211_RATE_INFO_EHT_GI_3_2;
2664 mcs = ra_report->txrate.mcs;
2668 ra_report->txrate.bw = rtw89_hw_to_rate_info_bw(bw);
2669 ra_report->bit_rate = cfg80211_calculate_bitrate(&ra_report->txrate);
2670 ra_report->hw_rate = format_v1 ?
2675 ra_report->might_fallback_legacy = mcs <= 2;
2676 sta->deflink.agg.max_rc_amsdu_len = get_max_amsdu_len(rtwdev, ra_report);
2677 rtwsta->max_agg_wait = sta->deflink.agg.max_rc_amsdu_len / 1500 - 1;
2687 ieee80211_iterate_stations_atomic(rtwdev->hw,
2712 goto out;
2716 "DPK ver:%d idx:%2ph band:%2ph bw:%2ph ch:%2ph path:%2ph\n",
2717 dpk->ver, dpk->idx, dpk->band, dpk->bw, dpk->ch, dpk->path_ok);
2720 dpk->txagc, dpk->ther, dpk->gs, dpk->dc_i, dpk->dc_q);
2723 dpk->corr_val, dpk->corr_idx, dpk->is_timeout, dpk->rxbb_ov);
2727 goto out;
2732 dack->fwdack_ver, dack->fwdack_rpt_ver);
2734 dack->cdack_d[0][0][0], dack->cdack_d[0][0][1]);
2736 dack->cdack_d[0][1][0], dack->cdack_d[0][1][1]);
2738 dack->cdack_d[1][0][0], dack->cdack_d[1][0][1]);
2740 dack->cdack_d[1][1][0], dack->cdack_d[1][1][1]);
2743 dack->addck2_d[0][0][0], dack->addck2_d[0][0][1]);
2745 dack->addck2_d[0][1][0], dack->addck2_d[0][1][1]);
2747 dack->addck2_d[1][0][0], dack->addck2_d[1][0][1]);
2749 dack->addck2_d[1][1][0], dack->addck2_d[1][1][1]);
2752 dack->adgaink_d[0][0], dack->adgaink_d[0][1]);
2754 dack->adgaink_d[1][0], dack->adgaink_d[1][1]);
2757 dack->dadck_d[0][0], dack->dadck_d[0][1]);
2759 dack->dadck_d[1][0], dack->dadck_d[1][1]);
2762 dack->biask_d[0][0]);
2764 dack->biask_d[1][0]);
2767 (int)sizeof(dack->msbk_d[0][0]), dack->msbk_d[0][0]);
2769 (int)sizeof(dack->msbk_d[0][1]), dack->msbk_d[0][1]);
2771 (int)sizeof(dack->msbk_d[1][0]), dack->msbk_d[1][0]);
2773 (int)sizeof(dack->msbk_d[1][1]), dack->msbk_d[1][1]);
2777 goto out;
2781 "RXDCK ver:%d band:%2ph bw:%2ph ch:%2ph to:%2ph\n",
2782 rxdck->ver, rxdck->band, rxdck->bw, rxdck->ch,
2783 rxdck->timeout);
2787 goto out;
2792 le32_to_cpu(txgapk->r0x8010[0]),
2793 le32_to_cpu(txgapk->r0x8010[1]));
2795 txgapk->chk_id);
2797 le32_to_cpu(txgapk->chk_cnt));
2799 txgapk->ver);
2801 txgapk->rsv1);
2804 (int)sizeof(txgapk->track_d[0]), txgapk->track_d[0]);
2806 (int)sizeof(txgapk->power_d[0]), txgapk->power_d[0]);
2808 (int)sizeof(txgapk->track_d[1]), txgapk->track_d[1]);
2810 (int)sizeof(txgapk->power_d[1]), txgapk->power_d[1]);
2816 out:
2825 struct rtw89_fw_elm_info *elm_info = &rtwdev->fw.elm_info;
2834 if (!elm_info->rfk_log_fmt)
2837 elm = elm_info->rfk_log_fmt->elm[func];
2838 fmt_idx = le32_to_cpu(log->fmt_idx);
2839 if (!elm || fmt_idx >= elm->u.rfk_log_fmt.nr)
2842 offset = le16_to_cpu(elm->u.rfk_log_fmt.offset[fmt_idx]);
2846 rtw89_debug(rtwdev, RTW89_DBG_RFK, &elm->u.common.contents[offset],
2847 le32_to_cpu(log->arg[0]), le32_to_cpu(log->arg[1]),
2848 le32_to_cpu(log->arg[2]), le32_to_cpu(log->arg[3]));
2857 struct rtw89_c2h_hdr *c2h_hdr = (struct rtw89_c2h_hdr *)c2h->data;
2872 len -= sizeof(*c2h_hdr);
2880 content_len = le16_to_cpu(log_hdr->len);
2886 switch (log_hdr->type) {
2889 log_hdr->content, content_len);
2894 rfk_name, content_len, log_hdr->content);
2898 log_hdr->content, content_len);
2905 len -= chunk_len;
2965 struct rtw89_rfk_wait_info *wait = &rtwdev->rfk_wait;
2967 wait->state = RTW89_RFK_STATE_START;
2968 wait->start_time = ktime_get();
2969 reinit_completion(&wait->completion);
2976 struct rtw89_rfk_wait_info *wait = &rtwdev->rfk_wait;
2980 if (test_bit(RTW89_FLAG_SER_HANDLING, rtwdev->flags)) {
2982 goto out;
2985 time_left = wait_for_completion_timeout(&wait->completion,
2989 return -ETIMEDOUT;
2990 } else if (wait->state != RTW89_RFK_STATE_OK) {
2992 rfk_name, wait->state);
2993 return -EFAULT;
2996 out:
2999 rfk_name, ktime_ms_delta(ktime_get(), wait->start_time));
3002 rfk_name, ktime_ms_delta(ktime_get(), (intmax_t)wait->start_time));
3012 (const struct rtw89_c2h_rfk_report *)c2h->data;
3013 struct rtw89_rfk_wait_info *wait = &rtwdev->rfk_wait;
3015 wait->state = report->state;
3016 wait->version = report->version;
3018 complete(&wait->completion);
3022 wait->state, wait->version,
3023 (int)(len - sizeof(report->hdr)), &report->state);
3512 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3513 enum rtw89_band band = chan->band_type;
3514 u8 ch = chan->channel;
3522 if (band == RTW89_BAND_6G)
3534 de_1st = tssi_info->tssi_mcs[path][gidx_1st];
3535 de_2nd = tssi_info->tssi_mcs[path][gidx_2nd];
3542 val = tssi_info->tssi_mcs[path][gidx];
3560 de_1st = tssi_info->tssi_6g_mcs[path][gidx_1st];
3561 de_2nd = tssi_info->tssi_6g_mcs[path][gidx_2nd];
3568 val = tssi_info->tssi_6g_mcs[path][gidx];
3582 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3583 enum rtw89_band band = chan->band_type;
3584 u8 ch = chan->channel;
3592 if (band == RTW89_BAND_6G)
3604 tde_1st = tssi_info->tssi_trim[path][tgidx_1st];
3605 tde_2nd = tssi_info->tssi_trim[path][tgidx_2nd];
3612 val = tssi_info->tssi_trim[path][tgidx];
3631 tde_1st = tssi_info->tssi_trim_6g[path][tgidx_1st];
3632 tde_2nd = tssi_info->tssi_trim_6g[path][tgidx_2nd];
3639 val = tssi_info->tssi_trim_6g[path][tgidx];
3654 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3655 u8 ch = chan->channel;
3668 h2c->curr_tssi_trim_de[i] = trim_de;
3674 cck_de = tssi_info->tssi_cck[i][gidx];
3677 h2c->curr_tssi_cck_de[i] = 0x0;
3678 h2c->curr_tssi_cck_de_20m[i] = val;
3679 h2c->curr_tssi_cck_de_40m[i] = val;
3680 h2c->curr_tssi_efuse_cck_de[i] = cck_de;
3688 h2c->curr_tssi_ofdm_de[i] = 0x0;
3689 h2c->curr_tssi_ofdm_de_20m[i] = val;
3690 h2c->curr_tssi_ofdm_de_40m[i] = val;
3691 h2c->curr_tssi_ofdm_de_80m[i] = val;
3692 h2c->curr_tssi_ofdm_de_160m[i] = val;
3693 h2c->curr_tssi_ofdm_de_320m[i] = val;
3694 h2c->curr_tssi_efuse_ofdm_de[i] = ofdm_de;
3706 struct rtw89_fw_txpwr_track_cfg *trk = rtwdev->fw.elm_info.txpwr_trk;
3707 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3710 u8 subband = chan->subband_type;
3719 thm_up[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_2GA_P][0];
3720 thm_down[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_2GA_N][0];
3721 thm_up[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_2GB_P][0];
3722 thm_down[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_2GB_N][0];
3725 thm_up[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_5GA_P][0];
3726 thm_down[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_5GA_N][0];
3727 thm_up[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_5GB_P][0];
3728 thm_down[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_5GB_N][0];
3731 thm_up[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_5GA_P][1];
3732 thm_down[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_5GA_N][1];
3733 thm_up[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_5GB_P][1];
3734 thm_down[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_5GB_N][1];
3737 thm_up[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_5GA_P][2];
3738 thm_down[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_5GA_N][2];
3739 thm_up[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_5GB_P][2];
3740 thm_down[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_5GB_N][2];
3744 thm_up[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GA_P][0];
3745 thm_down[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GA_N][0];
3746 thm_up[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GB_P][0];
3747 thm_down[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GB_N][0];
3751 thm_up[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GA_P][1];
3752 thm_down[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GA_N][1];
3753 thm_up[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GB_P][1];
3754 thm_down[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GB_N][1];
3758 thm_up[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GA_P][2];
3759 thm_down[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GA_N][2];
3760 thm_up[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GB_P][2];
3761 thm_down[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GB_N][2];
3765 thm_up[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GA_P][3];
3766 thm_down[RF_PATH_A] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GA_N][3];
3767 thm_up[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GB_P][3];
3768 thm_down[RF_PATH_B] = trk->delta[RTW89_FW_TXPWR_TRK_TYPE_6GB_N][3];
3776 thermal = tssi_info->thermal[path];
3781 h2c->pg_thermal[path] = 0x38;
3782 memset(h2c->ftable[path], 0, sizeof(h2c->ftable[path]));
3786 h2c->pg_thermal[path] = thermal;
3792 thm_up[path][DELTA_SWINGIDX_SIZE - 1];
3795 for (j = 127; j >= 64; j--)
3797 -thm_down[path][i++] :
3798 -thm_down[path][DELTA_SWINGIDX_SIZE - 1];
3801 h2c->ftable[path][i + 0] = thm_ofst[i + 3];
3802 h2c->ftable[path][i + 1] = thm_ofst[i + 2];
3803 h2c->ftable[path][i + 2] = thm_ofst[i + 1];
3804 h2c->ftable[path][i + 3] = thm_ofst[i + 0];
3816 const struct rtw89_xtal_info *xtal = rtwdev->chip->xtal_info;
3820 reg_mask = xtal->sc_xo_mask;
3822 reg_mask = xtal->sc_xi_mask;
3824 return (u8)rtw89_read32_mask(rtwdev, xtal->xcap_reg, reg_mask);
3830 const struct rtw89_xtal_info *xtal = rtwdev->chip->xtal_info;
3834 reg_mask = xtal->sc_xo_mask;
3836 reg_mask = xtal->sc_xi_mask;
3838 rtw89_write32_mask(rtwdev, xtal->xcap_reg, reg_mask, val);
3844 struct rtw89_cfo_tracking_info *cfo = &rtwdev->cfo_tracking;
3845 const struct rtw89_chip_info *chip = rtwdev->chip;
3848 if (!force && cfo->crystal_cap == crystal_cap)
3851 if (chip->chip_id == RTL8852A || chip->chip_id == RTL8851B) {
3864 cfo->crystal_cap = sc_xi_val;
3865 cfo->x_cap_ofst = (s8)((int)cfo->crystal_cap - cfo->def_x_cap);
3870 cfo->x_cap_ofst);
3876 struct rtw89_cfo_tracking_info *cfo = &rtwdev->cfo_tracking;
3879 cfo->def_x_cap = cfo->crystal_cap_default & B_AX_XTAL_SC_MASK;
3880 cfo->is_adjust = false;
3881 if (cfo->crystal_cap == cfo->def_x_cap)
3883 cap = cfo->crystal_cap;
3884 cap += (cap > cfo->def_x_cap ? -1 : 1);
3887 "(0x%x) approach to dflt_val=(0x%x)\n", cfo->crystal_cap,
3888 cfo->def_x_cap);
3893 const struct rtw89_reg_def *dcfo_comp = rtwdev->chip->dcfo_comp;
3894 bool is_linked = rtwdev->total_sta_assoc > 0;
3899 if (rtwdev->chip->chip_id == RTL8922A)
3911 sign = curr_cfo > 0 ? 1 : -1;
3914 if (rtwdev->chip->chip_id == RTL8852A && rtwdev->hal.cv == CHIP_CBV)
3915 cfo_avg_312 = -cfo_avg_312;
3916 rtw89_phy_set_phy_regs(rtwdev, dcfo_comp->addr, dcfo_comp->mask,
3922 const struct rtw89_phy_gen_def *phy = rtwdev->chip->phy_def;
3923 const struct rtw89_chip_info *chip = rtwdev->chip;
3924 const struct rtw89_cfo_regs *cfo = phy->cfo;
3926 rtw89_phy_set_phy_regs(rtwdev, cfo->comp_seg0, cfo->valid_0_mask, 1);
3927 rtw89_phy_set_phy_regs(rtwdev, cfo->comp, cfo->weighting_mask, 8);
3929 if (chip->chip_gen == RTW89_CHIP_AX) {
3930 if (chip->cfo_hw_comp) {
3943 struct rtw89_cfo_tracking_info *cfo = &rtwdev->cfo_tracking;
3944 struct rtw89_efuse *efuse = &rtwdev->efuse;
3946 cfo->crystal_cap_default = efuse->xtal_cap & B_AX_XTAL_SC_MASK;
3947 cfo->crystal_cap = cfo->crystal_cap_default;
3948 cfo->def_x_cap = cfo->crystal_cap;
3949 cfo->x_cap_ub = min_t(int, cfo->def_x_cap + CFO_BOUND, 0x7f);
3950 cfo->x_cap_lb = max_t(int, cfo->def_x_cap - CFO_BOUND, 0x1);
3951 cfo->is_adjust = false;
3952 cfo->divergence_lock_en = false;
3953 cfo->x_cap_ofst = 0;
3954 cfo->lock_cnt = 0;
3955 cfo->rtw89_multi_cfo_mode = RTW89_TP_BASED_AVG_MODE;
3956 cfo->apply_compensation = false;
3957 cfo->residual_cfo_acc = 0;
3959 cfo->crystal_cap_default);
3960 rtw89_phy_cfo_set_crystal_cap(rtwdev, cfo->crystal_cap_default, true);
3962 cfo->cfo_timer_ms = 2000;
3963 cfo->cfo_trig_by_timer_en = false;
3964 cfo->phy_cfo_trk_cnt = 0;
3965 cfo->phy_cfo_status = RTW89_PHY_DCFO_STATE_NORMAL;
3966 cfo->cfo_ul_ofdma_acc_mode = RTW89_CFO_UL_OFDMA_ACC_ENABLE;
3972 struct rtw89_cfo_tracking_info *cfo = &rtwdev->cfo_tracking;
3973 s8 crystal_cap = cfo->crystal_cap;
3981 if (!cfo->is_adjust) {
3983 cfo->is_adjust = true;
3986 cfo->is_adjust = false;
3988 if (!cfo->is_adjust) {
3992 sign = curr_cfo > 0 ? 1 : -1;
4006 cfo->crystal_cap, cfo->def_x_cap);
4011 const struct rtw89_chip_info *chip = rtwdev->chip;
4012 struct rtw89_cfo_tracking_info *cfo = &rtwdev->cfo_tracking;
4018 if (rtwdev->total_sta_assoc != 1)
4022 if (cfo->cfo_cnt[i] == 0)
4024 cfo_khz_all += cfo->cfo_tail[i];
4025 cfo_cnt_all += cfo->cfo_cnt[i];
4027 cfo->pre_cfo_avg[i] = cfo->cfo_avg[i];
4028 cfo->dcfo_avg = phy_div(cfo_khz_all << chip->dcfo_comp_sft,
4041 struct rtw89_cfo_tracking_info *cfo = &rtwdev->cfo_tracking;
4042 struct rtw89_traffic_stats *stats = &rtwdev->stats;
4057 if (cfo->rtw89_multi_cfo_mode == RTW89_PKT_BASED_AVG_MODE) {
4060 if (cfo->cfo_cnt[i] == 0)
4062 cfo_khz_all += cfo->cfo_tail[i];
4063 cfo_cnt_all += cfo->cfo_cnt[i];
4070 } else if (cfo->rtw89_multi_cfo_mode == RTW89_ENTRY_BASED_AVG_MODE) {
4073 if (cfo->cfo_cnt[i] == 0)
4075 cfo->cfo_avg[i] = phy_div(cfo->cfo_tail[i],
4076 (s32)cfo->cfo_cnt[i]);
4077 cfo_khz_all += cfo->cfo_avg[i];
4080 cfo->cfo_avg[i]);
4082 sta_cnt = rtwdev->total_sta_assoc;
4088 } else if (cfo->rtw89_multi_cfo_mode == RTW89_TP_BASED_AVG_MODE) {
4090 cfo_tol = cfo->sta_cfo_tolerance;
4093 if (cfo->cfo_cnt[i] != 0) {
4094 cfo->cfo_avg[i] = phy_div(cfo->cfo_tail[i],
4095 (s32)cfo->cfo_cnt[i]);
4098 cfo->cfo_avg[i] = cfo->pre_cfo_avg[i];
4100 max_cfo_lb = max(cfo->cfo_avg[i] - cfo_tol, max_cfo_lb);
4101 min_cfo_ub = min(cfo->cfo_avg[i] + cfo_tol, min_cfo_ub);
4102 cfo_khz_all += cfo->cfo_avg[i];
4106 i, cfo->cfo_avg[i]);
4107 if (sta_cnt >= rtwdev->total_sta_assoc)
4110 tp_all = stats->rx_throughput; /* need tp for each entry */
4125 min_cfo_ub - max_cfo_lb);
4129 "No intersection of cfo tolerance windows\n");
4133 cfo->pre_cfo_avg[i] = cfo->cfo_avg[i];
4141 struct rtw89_cfo_tracking_info *cfo = &rtwdev->cfo_tracking;
4143 memset(&cfo->cfo_tail, 0, sizeof(cfo->cfo_tail));
4144 memset(&cfo->cfo_cnt, 0, sizeof(cfo->cfo_cnt));
4145 cfo->packet_count = 0;
4146 cfo->packet_count_pre = 0;
4147 cfo->cfo_avg_pre = 0;
4152 struct rtw89_cfo_tracking_info *cfo = &rtwdev->cfo_tracking;
4155 u8 pre_x_cap = cfo->crystal_cap;
4156 u8 dcfo_comp_sft = rtwdev->chip->dcfo_comp_sft;
4158 cfo->dcfo_avg = 0;
4160 rtwdev->total_sta_assoc);
4161 if (rtwdev->total_sta_assoc == 0) {
4165 if (cfo->packet_count == 0) {
4169 if (cfo->packet_count == cfo->packet_count_pre) {
4173 if (rtwdev->total_sta_assoc == 1)
4177 if (cfo->divergence_lock_en) {
4178 cfo->lock_cnt++;
4179 if (cfo->lock_cnt > CFO_PERIOD_CNT) {
4180 cfo->divergence_lock_en = false;
4181 cfo->lock_cnt = 0;
4187 if (cfo->crystal_cap >= cfo->x_cap_ub ||
4188 cfo->crystal_cap <= cfo->x_cap_lb) {
4189 cfo->divergence_lock_en = true;
4195 cfo->cfo_avg_pre = new_cfo;
4196 cfo->dcfo_avg_pre = cfo->dcfo_avg;
4197 x_cap_update = cfo->crystal_cap != pre_x_cap;
4199 rtw89_debug(rtwdev, RTW89_DBG_CFO, "Xcap: D:%x C:%x->%x, ofst=%d\n",
4200 cfo->def_x_cap, pre_x_cap, cfo->crystal_cap,
4201 cfo->x_cap_ofst);
4203 if (cfo->dcfo_avg > 0)
4204 cfo->dcfo_avg -= CFO_SW_COMP_FINE_TUNE << dcfo_comp_sft;
4206 cfo->dcfo_avg += CFO_SW_COMP_FINE_TUNE << dcfo_comp_sft;
4208 rtw89_dcfo_comp(rtwdev, cfo->dcfo_avg);
4216 struct rtw89_cfo_tracking_info *cfo = &rtwdev->cfo_tracking;
4218 mutex_lock(&rtwdev->mutex);
4219 if (!cfo->cfo_trig_by_timer_en)
4220 goto out;
4223 ieee80211_queue_delayed_work(rtwdev->hw, &rtwdev->cfo_track_work,
4224 msecs_to_jiffies(cfo->cfo_timer_ms));
4225 out:
4226 mutex_unlock(&rtwdev->mutex);
4231 struct rtw89_cfo_tracking_info *cfo = &rtwdev->cfo_tracking;
4233 ieee80211_queue_delayed_work(rtwdev->hw, &rtwdev->cfo_track_work,
4234 msecs_to_jiffies(cfo->cfo_timer_ms));
4239 struct rtw89_cfo_tracking_info *cfo = &rtwdev->cfo_tracking;
4240 struct rtw89_traffic_stats *stats = &rtwdev->stats;
4243 if (stats->rx_tf_periodic > CFO_TF_CNT_TH)
4245 if (cfo->cfo_ul_ofdma_acc_mode == RTW89_CFO_UL_OFDMA_ACC_ENABLE &&
4249 switch (cfo->phy_cfo_status) {
4251 if (stats->tx_throughput >= CFO_TP_UPPER) {
4252 cfo->phy_cfo_status = RTW89_PHY_DCFO_STATE_ENHANCE;
4253 cfo->cfo_trig_by_timer_en = true;
4254 cfo->cfo_timer_ms = CFO_COMP_PERIOD;
4259 if (stats->tx_throughput <= CFO_TP_LOWER)
4260 cfo->phy_cfo_status = RTW89_PHY_DCFO_STATE_NORMAL;
4262 cfo->phy_cfo_trk_cnt >= CFO_PERIOD_CNT)
4263 cfo->phy_cfo_status = RTW89_PHY_DCFO_STATE_HOLD;
4265 cfo->phy_cfo_trk_cnt++;
4267 if (cfo->phy_cfo_status == RTW89_PHY_DCFO_STATE_NORMAL) {
4268 cfo->phy_cfo_trk_cnt = 0;
4269 cfo->cfo_trig_by_timer_en = false;
4273 if (stats->tx_throughput <= CFO_TP_LOWER) {
4274 cfo->phy_cfo_status = RTW89_PHY_DCFO_STATE_NORMAL;
4275 cfo->phy_cfo_trk_cnt = 0;
4276 cfo->cfo_trig_by_timer_en = false;
4278 cfo->phy_cfo_trk_cnt++;
4282 cfo->phy_cfo_status = RTW89_PHY_DCFO_STATE_NORMAL;
4283 cfo->phy_cfo_trk_cnt = 0;
4288 stats->tx_throughput, cfo->phy_cfo_status,
4289 cfo->cfo_trig_by_timer_en, cfo->phy_cfo_trk_cnt,
4290 ewma_thermal_read(&rtwdev->phystat.avg_thermal[0]));
4291 if (cfo->cfo_trig_by_timer_en)
4299 struct rtw89_cfo_tracking_info *cfo = &rtwdev->cfo_tracking;
4300 u8 macid = phy_ppdu->mac_id;
4303 rtw89_warn(rtwdev, "mac_id %d is out of range\n", macid);
4307 cfo->cfo_tail[macid] += cfo_val;
4308 cfo->cfo_cnt[macid]++;
4309 cfo->packet_count++;
4314 const struct rtw89_chip_info *chip = rtwdev->chip;
4316 rtwvif->sub_entity_idx);
4317 struct rtw89_phy_ul_tb_info *ul_tb_info = &rtwdev->ul_tb_info;
4319 if (!chip->ul_tb_waveform_ctrl)
4322 rtwvif->def_tri_idx =
4325 if (chip->chip_id == RTL8852B && rtwdev->hal.cv > CHIP_CBV)
4326 rtwvif->dyn_tb_bedge_en = false;
4327 else if (chan->band_type >= RTW89_BAND_5G &&
4328 chan->band_width >= RTW89_CHANNEL_WIDTH_40)
4329 rtwvif->dyn_tb_bedge_en = true;
4331 rtwvif->dyn_tb_bedge_en = false;
4335 ul_tb_info->def_if_bandedge, rtwvif->def_tri_idx);
4338 rtwvif->dyn_tb_bedge_en, ul_tb_info->dyn_tb_tri_en);
4369 if (!rtwdev->chip->ul_tb_pwr_diff)
4372 if (rtwvif->pwr_diff_en == rtwvif->pre_pwr_diff_en) {
4373 rtwvif->pwr_diff_en = false;
4377 rtwvif->pre_pwr_diff_en = rtwvif->pwr_diff_en;
4378 param = &table[rtwvif->pwr_diff_en];
4381 param->q_00);
4383 param->q_11);
4385 B_CUSTOMIZE_Q_MATRIX_EN, param->q_matrix_en);
4387 reg = rtw89_mac_reg_by_idx(rtwdev, R_AX_PWR_UL_TB_1T, rtwvif->mac_idx);
4389 param->ultb_1t_norm_160);
4391 reg = rtw89_mac_reg_by_idx(rtwdev, R_AX_PWR_UL_TB_2T, rtwvif->mac_idx);
4393 param->ultb_2t_norm_160);
4395 reg = rtw89_mac_reg_by_idx(rtwdev, R_AX_PATH_COM1, rtwvif->mac_idx);
4397 param->com1_norm_1sts);
4399 reg = rtw89_mac_reg_by_idx(rtwdev, R_AX_PATH_COM2, rtwvif->mac_idx);
4401 param->com2_resp_1sts_path);
4409 struct rtw89_traffic_stats *stats = &rtwdev->stats;
4412 if (rtwvif->wifi_role != RTW89_WIFI_ROLE_STATION)
4415 if (!vif->cfg.assoc)
4418 if (rtwdev->chip->ul_tb_waveform_ctrl) {
4419 if (stats->rx_tf_periodic > UL_TB_TF_CNT_L2H_TH)
4420 ul_tb_data->high_tf_client = true;
4421 else if (stats->rx_tf_periodic < UL_TB_TF_CNT_H2L_TH)
4422 ul_tb_data->low_tf_client = true;
4424 ul_tb_data->valid = true;
4425 ul_tb_data->def_tri_idx = rtwvif->def_tri_idx;
4426 ul_tb_data->dyn_tb_bedge_en = rtwvif->dyn_tb_bedge_en;
4435 struct rtw89_phy_ul_tb_info *ul_tb_info = &rtwdev->ul_tb_info;
4437 if (!rtwdev->chip->ul_tb_waveform_ctrl)
4440 if (ul_tb_data->dyn_tb_bedge_en) {
4441 if (ul_tb_data->high_tf_client) {
4445 } else if (ul_tb_data->low_tf_client) {
4447 ul_tb_info->def_if_bandedge);
4450 ul_tb_info->def_if_bandedge);
4454 if (ul_tb_info->dyn_tb_tri_en) {
4455 if (ul_tb_data->high_tf_client) {
4460 } else if (ul_tb_data->low_tf_client) {
4463 ul_tb_data->def_tri_idx);
4466 ul_tb_data->def_tri_idx);
4473 const struct rtw89_chip_info *chip = rtwdev->chip;
4477 if (!chip->ul_tb_waveform_ctrl && !chip->ul_tb_pwr_diff)
4480 if (rtwdev->total_sta_assoc != 1)
4494 const struct rtw89_chip_info *chip = rtwdev->chip;
4495 struct rtw89_phy_ul_tb_info *ul_tb_info = &rtwdev->ul_tb_info;
4497 if (!chip->ul_tb_waveform_ctrl)
4500 ul_tb_info->dyn_tb_tri_en = true;
4501 ul_tb_info->def_if_bandedge =
4508 ewma_rssi_init(&antdiv_sts->cck_rssi_avg);
4509 ewma_rssi_init(&antdiv_sts->ofdm_rssi_avg);
4510 ewma_rssi_init(&antdiv_sts->non_legacy_rssi_avg);
4511 antdiv_sts->pkt_cnt_cck = 0;
4512 antdiv_sts->pkt_cnt_ofdm = 0;
4513 antdiv_sts->pkt_cnt_non_legacy = 0;
4514 antdiv_sts->evm = 0;
4521 if (rtw89_get_data_rate_mode(rtwdev, phy_ppdu->rate) == DATA_RATE_MODE_NON_HT) {
4522 if (phy_ppdu->rate < RTW89_HW_RATE_OFDM6) {
4523 ewma_rssi_add(&stats->cck_rssi_avg, phy_ppdu->rssi_avg);
4524 stats->pkt_cnt_cck++;
4526 ewma_rssi_add(&stats->ofdm_rssi_avg, phy_ppdu->rssi_avg);
4527 stats->pkt_cnt_ofdm++;
4528 stats->evm += phy_ppdu->ofdm.evm_min;
4531 ewma_rssi_add(&stats->non_legacy_rssi_avg, phy_ppdu->rssi_avg);
4532 stats->pkt_cnt_non_legacy++;
4533 stats->evm += phy_ppdu->ofdm.evm_min;
4539 if (stats->pkt_cnt_non_legacy >= stats->pkt_cnt_cck &&
4540 stats->pkt_cnt_non_legacy >= stats->pkt_cnt_ofdm)
4541 return ewma_rssi_read(&stats->non_legacy_rssi_avg);
4542 else if (stats->pkt_cnt_ofdm >= stats->pkt_cnt_cck &&
4543 stats->pkt_cnt_ofdm >= stats->pkt_cnt_non_legacy)
4544 return ewma_rssi_read(&stats->ofdm_rssi_avg);
4546 return ewma_rssi_read(&stats->cck_rssi_avg);
4551 return phy_div(stats->evm, stats->pkt_cnt_non_legacy + stats->pkt_cnt_ofdm);
4557 struct rtw89_antdiv_info *antdiv = &rtwdev->antdiv;
4558 struct rtw89_hal *hal = &rtwdev->hal;
4560 if (!hal->ant_diversity || hal->ant_diversity_fixed)
4563 rtw89_phy_antdiv_sts_instance_add(rtwdev, phy_ppdu, &antdiv->target_stats);
4565 if (!antdiv->get_stats)
4568 if (hal->antenna_rx == RF_A)
4569 rtw89_phy_antdiv_sts_instance_add(rtwdev, phy_ppdu, &antdiv->main_stats);
4570 else if (hal->antenna_rx == RF_B)
4571 rtw89_phy_antdiv_sts_instance_add(rtwdev, phy_ppdu, &antdiv->aux_stats);
4604 struct rtw89_antdiv_info *antdiv = &rtwdev->antdiv;
4606 rtw89_phy_antdiv_sts_instance_reset(&antdiv->target_stats);
4607 rtw89_phy_antdiv_sts_instance_reset(&antdiv->main_stats);
4608 rtw89_phy_antdiv_sts_instance_reset(&antdiv->aux_stats);
4613 struct rtw89_antdiv_info *antdiv = &rtwdev->antdiv;
4614 struct rtw89_hal *hal = &rtwdev->hal;
4616 if (!hal->ant_diversity)
4619 antdiv->get_stats = false;
4620 antdiv->rssi_pre = 0;
4627 struct rtw89_phy_stat *phystat = &rtwdev->phystat;
4631 for (i = 0; i < rtwdev->chip->rf_path_num; i++) {
4634 ewma_thermal_add(&phystat->avg_thermal[i], th);
4638 ewma_thermal_read(&phystat->avg_thermal[i]));
4651 struct rtw89_sta *rtwsta = (struct rtw89_sta *)sta->drv_priv;
4654 struct rtw89_phy_ch_info *ch_info = rssi_data->ch_info;
4657 rssi_curr = ewma_rssi_read(&rtwsta->avg_rssi);
4659 if (rssi_curr < ch_info->rssi_min) {
4660 ch_info->rssi_min = rssi_curr;
4661 ch_info->rssi_min_macid = rtwsta->mac_id;
4664 if (rtwsta->prev_rssi == 0) {
4665 rtwsta->prev_rssi = rssi_curr;
4666 } else if (abs((int)rtwsta->prev_rssi - (int)rssi_curr) > (3 << RSSI_FACTOR)) {
4667 rtwsta->prev_rssi = rssi_curr;
4668 rssi_data->rssi_changed = true;
4677 rssi_data.ch_info = &rtwdev->ch_info;
4678 rssi_data.ch_info->rssi_min = U8_MAX;
4679 ieee80211_iterate_stations_atomic(rtwdev->hw,
4688 struct rtw89_phy_stat *phystat = &rtwdev->phystat;
4691 for (i = 0; i < rtwdev->chip->rf_path_num; i++)
4692 ewma_thermal_init(&phystat->avg_thermal[i]);
4696 memset(&phystat->cur_pkt_stat, 0, sizeof(phystat->cur_pkt_stat));
4697 memset(&phystat->last_pkt_stat, 0, sizeof(phystat->last_pkt_stat));
4702 struct rtw89_phy_stat *phystat = &rtwdev->phystat;
4707 phystat->last_pkt_stat = phystat->cur_pkt_stat;
4708 memset(&phystat->cur_pkt_stat, 0, sizeof(phystat->cur_pkt_stat));
4713 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
4715 return time_us >> (ilog2(CCX_US_BASE_RATIO) + env->ccx_unit_idx);
4720 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
4722 return idx << (ilog2(CCX_US_BASE_RATIO) + env->ccx_unit_idx);
4727 const struct rtw89_phy_gen_def *phy = rtwdev->chip->phy_def;
4728 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
4729 const struct rtw89_ccx_regs *ccx = phy->ccx;
4731 env->ccx_manual_ctrl = false;
4732 env->ccx_ongoing = false;
4733 env->ccx_rac_lv = RTW89_RAC_RELEASE;
4734 env->ccx_period = 0;
4735 env->ccx_unit_idx = RTW89_CCX_32_US;
4737 rtw89_phy_set_phy_regs(rtwdev, ccx->setting_addr, ccx->en_mask, 1);
4738 rtw89_phy_set_phy_regs(rtwdev, ccx->setting_addr, ccx->trig_opt_mask, 1);
4739 rtw89_phy_set_phy_regs(rtwdev, ccx->setting_addr, ccx->measurement_trig_mask, 1);
4740 rtw89_phy_set_phy_regs(rtwdev, ccx->setting_addr, ccx->edcca_opt_mask,
4747 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
4751 numer = report * score + (env->ccx_period >> 1);
4752 if (env->ccx_period)
4753 ret = numer / env->ccx_period;
4755 return ret >= score ? score - 1 : ret;
4789 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
4792 "lv:(%d)->(0)\n", env->ccx_rac_lv);
4794 env->ccx_ongoing = false;
4795 env->ccx_rac_lv = RTW89_RAC_RELEASE;
4796 env->ifs_clm_app = RTW89_IFS_CLM_BACKGROUND;
4802 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
4803 bool is_update = env->ifs_clm_app != para->ifs_clm_app;
4805 u16 *ifs_th_l = env->ifs_clm_th_l;
4806 u16 *ifs_th_h = env->ifs_clm_th_h;
4813 switch (para->ifs_clm_app) {
4824 ifs_th0_us = para->ifs_clm_manual_th0;
4825 ifs_th_times = para->ifs_clm_manual_th_times;
4832 * low[i] = high[i-1] + 1
4833 * high[i] = high[i-1] * ifs_th_times
4840 ifs_th_l[i] = ifs_th_h[i - 1] + 1;
4841 ifs_th_h_us[i] = ifs_th_h_us[i - 1] * ifs_th_times;
4855 const struct rtw89_phy_gen_def *phy = rtwdev->chip->phy_def;
4856 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
4857 const struct rtw89_ccx_regs *ccx = phy->ccx;
4860 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_t1_addr, ccx->ifs_t1_th_l_mask,
4861 env->ifs_clm_th_l[0]);
4862 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_t2_addr, ccx->ifs_t2_th_l_mask,
4863 env->ifs_clm_th_l[1]);
4864 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_t3_addr, ccx->ifs_t3_th_l_mask,
4865 env->ifs_clm_th_l[2]);
4866 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_t4_addr, ccx->ifs_t4_th_l_mask,
4867 env->ifs_clm_th_l[3]);
4869 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_t1_addr, ccx->ifs_t1_th_h_mask,
4870 env->ifs_clm_th_h[0]);
4871 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_t2_addr, ccx->ifs_t2_th_h_mask,
4872 env->ifs_clm_th_h[1]);
4873 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_t3_addr, ccx->ifs_t3_th_h_mask,
4874 env->ifs_clm_th_h[2]);
4875 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_t4_addr, ccx->ifs_t4_th_h_mask,
4876 env->ifs_clm_th_h[3]);
4881 i + 1, env->ifs_clm_th_l[i], env->ifs_clm_th_h[i]);
4886 const struct rtw89_phy_gen_def *phy = rtwdev->chip->phy_def;
4887 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
4888 const struct rtw89_ccx_regs *ccx = phy->ccx;
4891 env->ifs_clm_app = RTW89_IFS_CLM_BACKGROUND;
4892 env->ifs_clm_mntr_time = 0;
4898 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_cnt_addr, ccx->ifs_collect_en_mask, true);
4899 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_t1_addr, ccx->ifs_t1_en_mask, true);
4900 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_t2_addr, ccx->ifs_t2_en_mask, true);
4901 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_t3_addr, ccx->ifs_t3_en_mask, true);
4902 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_t4_addr, ccx->ifs_t4_en_mask, true);
4908 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
4914 return -EINVAL;
4918 "ccx_ongoing=%d, level:(%d)->(%d)\n", env->ccx_ongoing,
4919 env->ccx_rac_lv, level);
4921 if (env->ccx_ongoing) {
4922 if (level <= env->ccx_rac_lv)
4923 ret = -EINVAL;
4925 env->ccx_ongoing = false;
4929 env->ccx_rac_lv = level;
4939 const struct rtw89_phy_gen_def *phy = rtwdev->chip->phy_def;
4940 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
4941 const struct rtw89_ccx_regs *ccx = phy->ccx;
4943 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_cnt_addr, ccx->ifs_clm_cnt_clear_mask, 0);
4944 rtw89_phy_set_phy_regs(rtwdev, ccx->setting_addr, ccx->measurement_trig_mask, 0);
4945 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_cnt_addr, ccx->ifs_clm_cnt_clear_mask, 1);
4946 rtw89_phy_set_phy_regs(rtwdev, ccx->setting_addr, ccx->measurement_trig_mask, 1);
4948 env->ccx_ongoing = true;
4953 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
4957 env->ifs_clm_tx_ratio =
4958 rtw89_phy_ccx_get_report(rtwdev, env->ifs_clm_tx, PERCENT);
4959 env->ifs_clm_edcca_excl_cca_ratio =
4960 rtw89_phy_ccx_get_report(rtwdev, env->ifs_clm_edcca_excl_cca,
4962 env->ifs_clm_cck_fa_ratio =
4963 rtw89_phy_ccx_get_report(rtwdev, env->ifs_clm_cckfa, PERCENT);
4964 env->ifs_clm_ofdm_fa_ratio =
4965 rtw89_phy_ccx_get_report(rtwdev, env->ifs_clm_ofdmfa, PERCENT);
4966 env->ifs_clm_cck_cca_excl_fa_ratio =
4967 rtw89_phy_ccx_get_report(rtwdev, env->ifs_clm_cckcca_excl_fa,
4969 env->ifs_clm_ofdm_cca_excl_fa_ratio =
4970 rtw89_phy_ccx_get_report(rtwdev, env->ifs_clm_ofdmcca_excl_fa,
4972 env->ifs_clm_cck_fa_permil =
4973 rtw89_phy_ccx_get_report(rtwdev, env->ifs_clm_cckfa, PERMIL);
4974 env->ifs_clm_ofdm_fa_permil =
4975 rtw89_phy_ccx_get_report(rtwdev, env->ifs_clm_ofdmfa, PERMIL);
4978 if (env->ifs_clm_his[i] > ENV_MNTR_IFSCLM_HIS_MAX) {
4979 env->ifs_clm_ifs_avg[i] = ENV_MNTR_FAIL_DWORD;
4981 env->ifs_clm_ifs_avg[i] =
4983 env->ifs_clm_avg[i]);
4986 res = rtw89_phy_ccx_idx_to_us(rtwdev, env->ifs_clm_cca[i]);
4987 res += env->ifs_clm_his[i] >> 1;
4988 if (env->ifs_clm_his[i])
4989 res /= env->ifs_clm_his[i];
4992 env->ifs_clm_cca_avg[i] = res;
4996 "IFS-CLM ratio {Tx, EDCCA_exclu_cca} = {%d, %d}\n",
4997 env->ifs_clm_tx_ratio, env->ifs_clm_edcca_excl_cca_ratio);
4999 "IFS-CLM FA ratio {CCK, OFDM} = {%d, %d}\n",
5000 env->ifs_clm_cck_fa_ratio, env->ifs_clm_ofdm_fa_ratio);
5002 "IFS-CLM FA permil {CCK, OFDM} = {%d, %d}\n",
5003 env->ifs_clm_cck_fa_permil, env->ifs_clm_ofdm_fa_permil);
5005 "IFS-CLM CCA_exclu_FA ratio {CCK, OFDM} = {%d, %d}\n",
5006 env->ifs_clm_cck_cca_excl_fa_ratio,
5007 env->ifs_clm_ofdm_cca_excl_fa_ratio);
5012 i + 1, env->ifs_clm_his[i], env->ifs_clm_ifs_avg[i],
5013 env->ifs_clm_cca_avg[i]);
5018 const struct rtw89_phy_gen_def *phy = rtwdev->chip->phy_def;
5019 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
5020 const struct rtw89_ccx_regs *ccx = phy->ccx;
5023 if (rtw89_phy_read32_mask(rtwdev, ccx->ifs_total_addr,
5024 ccx->ifs_cnt_done_mask) == 0) {
5030 env->ifs_clm_tx =
5031 rtw89_phy_read32_mask(rtwdev, ccx->ifs_clm_tx_cnt_addr,
5032 ccx->ifs_clm_tx_cnt_msk);
5033 env->ifs_clm_edcca_excl_cca =
5034 rtw89_phy_read32_mask(rtwdev, ccx->ifs_clm_tx_cnt_addr,
5035 ccx->ifs_clm_edcca_excl_cca_fa_mask);
5036 env->ifs_clm_cckcca_excl_fa =
5037 rtw89_phy_read32_mask(rtwdev, ccx->ifs_clm_cca_addr,
5038 ccx->ifs_clm_cckcca_excl_fa_mask);
5039 env->ifs_clm_ofdmcca_excl_fa =
5040 rtw89_phy_read32_mask(rtwdev, ccx->ifs_clm_cca_addr,
5041 ccx->ifs_clm_ofdmcca_excl_fa_mask);
5042 env->ifs_clm_cckfa =
5043 rtw89_phy_read32_mask(rtwdev, ccx->ifs_clm_fa_addr,
5044 ccx->ifs_clm_cck_fa_mask);
5045 env->ifs_clm_ofdmfa =
5046 rtw89_phy_read32_mask(rtwdev, ccx->ifs_clm_fa_addr,
5047 ccx->ifs_clm_ofdm_fa_mask);
5049 env->ifs_clm_his[0] =
5050 rtw89_phy_read32_mask(rtwdev, ccx->ifs_his_addr,
5051 ccx->ifs_t1_his_mask);
5052 env->ifs_clm_his[1] =
5053 rtw89_phy_read32_mask(rtwdev, ccx->ifs_his_addr,
5054 ccx->ifs_t2_his_mask);
5055 env->ifs_clm_his[2] =
5056 rtw89_phy_read32_mask(rtwdev, ccx->ifs_his_addr,
5057 ccx->ifs_t3_his_mask);
5058 env->ifs_clm_his[3] =
5059 rtw89_phy_read32_mask(rtwdev, ccx->ifs_his_addr,
5060 ccx->ifs_t4_his_mask);
5062 env->ifs_clm_avg[0] =
5063 rtw89_phy_read32_mask(rtwdev, ccx->ifs_avg_l_addr,
5064 ccx->ifs_t1_avg_mask);
5065 env->ifs_clm_avg[1] =
5066 rtw89_phy_read32_mask(rtwdev, ccx->ifs_avg_l_addr,
5067 ccx->ifs_t2_avg_mask);
5068 env->ifs_clm_avg[2] =
5069 rtw89_phy_read32_mask(rtwdev, ccx->ifs_avg_h_addr,
5070 ccx->ifs_t3_avg_mask);
5071 env->ifs_clm_avg[3] =
5072 rtw89_phy_read32_mask(rtwdev, ccx->ifs_avg_h_addr,
5073 ccx->ifs_t4_avg_mask);
5075 env->ifs_clm_cca[0] =
5076 rtw89_phy_read32_mask(rtwdev, ccx->ifs_cca_l_addr,
5077 ccx->ifs_t1_cca_mask);
5078 env->ifs_clm_cca[1] =
5079 rtw89_phy_read32_mask(rtwdev, ccx->ifs_cca_l_addr,
5080 ccx->ifs_t2_cca_mask);
5081 env->ifs_clm_cca[2] =
5082 rtw89_phy_read32_mask(rtwdev, ccx->ifs_cca_h_addr,
5083 ccx->ifs_t3_cca_mask);
5084 env->ifs_clm_cca[3] =
5085 rtw89_phy_read32_mask(rtwdev, ccx->ifs_cca_h_addr,
5086 ccx->ifs_t4_cca_mask);
5088 env->ifs_clm_total_ifs =
5089 rtw89_phy_read32_mask(rtwdev, ccx->ifs_total_addr,
5090 ccx->ifs_total_mask);
5092 rtw89_debug(rtwdev, RTW89_DBG_PHY_TRACK, "IFS-CLM total_ifs = %d\n",
5093 env->ifs_clm_total_ifs);
5096 env->ifs_clm_tx, env->ifs_clm_edcca_excl_cca);
5098 "IFS-CLM FA{CCK, OFDM} = {%d, %d}\n",
5099 env->ifs_clm_cckfa, env->ifs_clm_ofdmfa);
5101 "IFS-CLM CCA_exclu_FA{CCK, OFDM} = {%d, %d}\n",
5102 env->ifs_clm_cckcca_excl_fa, env->ifs_clm_ofdmcca_excl_fa);
5107 "T%d:[%d, %d, %d]\n", i + 1, env->ifs_clm_his[i],
5108 env->ifs_clm_avg[i], env->ifs_clm_cca[i]);
5118 const struct rtw89_phy_gen_def *phy = rtwdev->chip->phy_def;
5119 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
5120 const struct rtw89_ccx_regs *ccx = phy->ccx;
5124 if (para->mntr_time == 0) {
5127 return -EINVAL;
5130 if (rtw89_phy_ccx_racing_ctrl(rtwdev, para->rac_lv))
5131 return -EINVAL;
5133 if (para->mntr_time != env->ifs_clm_mntr_time) {
5134 rtw89_phy_ccx_ms_to_period_unit(rtwdev, para->mntr_time,
5136 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_cnt_addr,
5137 ccx->ifs_clm_period_mask, period);
5138 rtw89_phy_set_phy_regs(rtwdev, ccx->ifs_cnt_addr,
5139 ccx->ifs_clm_cnt_unit_mask,
5143 "Update IFS-CLM time ((%d)) -> ((%d))\n",
5144 env->ifs_clm_mntr_time, para->mntr_time);
5146 env->ifs_clm_mntr_time = para->mntr_time;
5147 env->ccx_period = (u16)period;
5148 env->ccx_unit_idx = (u8)unit_idx;
5152 env->ifs_clm_app = para->ifs_clm_app;
5161 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
5165 env->ccx_watchdog_result = RTW89_PHY_ENV_MON_CCX_FAIL;
5166 if (env->ccx_manual_ctrl) {
5174 env->ccx_watchdog_result |= RTW89_PHY_ENV_MON_IFS_CLM;
5188 env->ccx_watchdog_result, chk_result);
5197 *ie_page -= 1;
5226 const struct rtw89_chip_info *chip = rtwdev->chip;
5232 if (chip->chip_id == RTL8852A)
5258 const struct rtw89_phy_gen_def *phy = rtwdev->chip->phy_def;
5259 const struct rtw89_physts_regs *physts = phy->physts;
5262 rtw89_phy_write32_clr(rtwdev, physts->setting_addr,
5263 physts->dis_trigger_fail_mask);
5264 rtw89_phy_write32_clr(rtwdev, physts->setting_addr,
5265 physts->dis_trigger_brk_mask);
5267 rtw89_phy_write32_set(rtwdev, physts->setting_addr,
5268 physts->dis_trigger_fail_mask);
5269 rtw89_phy_write32_set(rtwdev, physts->setting_addr,
5270 physts->dis_trigger_brk_mask);
5304 const struct rtw89_chip_info *chip = rtwdev->chip;
5305 struct rtw89_dig_info *dig = &rtwdev->dig;
5315 gain_arr = dig->lna_gain_g;
5317 cfg = chip->dig_table->cfg_lna_g;
5321 gain_arr = dig->tia_gain_g;
5323 cfg = chip->dig_table->cfg_tia_g;
5327 gain_arr = dig->lna_gain_a;
5329 cfg = chip->dig_table->cfg_lna_a;
5333 gain_arr = dig->tia_gain_a;
5335 cfg = chip->dig_table->cfg_tia_a;
5342 for (i = 0; i < cfg->size; i++) {
5343 tmp = rtw89_phy_read32_mask(rtwdev, cfg->table[i].addr,
5344 cfg->table[i].mask);
5356 struct rtw89_dig_info *dig = &rtwdev->dig;
5360 if (!rtwdev->hal.support_igi)
5365 dig->ib_pkpwr = sign_extend32(tmp >> DIG_GAIN_SHIFT, U8_MAX_BIT);
5366 dig->ib_pbk = rtw89_phy_read32_mask(rtwdev, R_PATH0_IB_PBK,
5369 dig->ib_pkpwr, dig->ib_pbk);
5383 struct rtw89_phy_ch_info *ch_info = &rtwdev->ch_info;
5384 struct rtw89_dig_info *dig = &rtwdev->dig;
5385 bool is_linked = rtwdev->total_sta_assoc > 0;
5388 dig->igi_rssi = ch_info->rssi_min >> 1;
5391 dig->igi_rssi = rssi_nolink;
5397 struct rtw89_dig_info *dig = &rtwdev->dig;
5399 bool is_linked = rtwdev->total_sta_assoc > 0;
5402 switch (chan->band_type) {
5404 dig->lna_gain = dig->lna_gain_g;
5405 dig->tia_gain = dig->tia_gain_g;
5407 dig->force_gaincode_idx_en = false;
5408 dig->dyn_pd_th_en = true;
5412 dig->lna_gain = dig->lna_gain_a;
5413 dig->tia_gain = dig->tia_gain_a;
5415 dig->force_gaincode_idx_en = true;
5416 dig->dyn_pd_th_en = true;
5419 memcpy(dig->fa_th, fa_th_src, sizeof(dig->fa_th));
5420 memcpy(dig->igi_rssi_th, igi_rssi_th, sizeof(dig->igi_rssi_th));
5429 struct rtw89_dig_info *dig = &rtwdev->dig;
5431 dig->cur_gaincode.lna_idx = LNA_IDX_MAX;
5432 dig->cur_gaincode.tia_idx = TIA_IDX_MAX;
5433 dig->cur_gaincode.rxb_idx = RXB_IDX_MAX;
5434 dig->force_gaincode.lna_idx = LNA_IDX_MAX;
5435 dig->force_gaincode.tia_idx = TIA_IDX_MAX;
5436 dig->force_gaincode.rxb_idx = RXB_IDX_MAX;
5438 dig->dyn_igi_max = igi_max_performance_mode;
5439 dig->dyn_igi_min = dynamic_igi_min;
5440 dig->dyn_pd_th_max = dynamic_pd_threshold_max;
5441 dig->pd_low_th_ofst = pd_low_th_offset;
5442 dig->is_linked_pre = false;
5453 struct rtw89_dig_info *dig = &rtwdev->dig;
5456 if (rssi < dig->igi_rssi_th[0])
5458 else if (rssi < dig->igi_rssi_th[1])
5460 else if (rssi < dig->igi_rssi_th[2])
5462 else if (rssi < dig->igi_rssi_th[3])
5464 else if (rssi < dig->igi_rssi_th[4])
5474 struct rtw89_dig_info *dig = &rtwdev->dig;
5477 if (rssi < dig->igi_rssi_th[0])
5490 struct rtw89_dig_info *dig = &rtwdev->dig;
5491 s8 lna_gain = dig->lna_gain[set->lna_idx];
5492 s8 tia_gain = dig->tia_gain[set->tia_idx];
5497 rxb_idx_tmp += dig->ib_pkpwr - dig->ib_pbk - wb_rssi;
5509 set->lna_idx = rtw89_phy_dig_lna_idx_by_rssi(rtwdev, rssi);
5510 set->tia_idx = rtw89_phy_dig_tia_idx_by_rssi(rtwdev, rssi);
5511 set->rxb_idx = rtw89_phy_dig_rxb_idx_by_rssi(rtwdev, rssi, set);
5515 rssi, set->lna_idx, set->tia_idx, set->rxb_idx);
5522 struct rtw89_dig_info *dig = &rtwdev->dig;
5523 struct rtw89_env_monitor_info *env = &rtwdev->env_monitor;
5525 u8 igi_offset = dig->fa_rssi_ofst;
5528 fa_ratio = env->ifs_clm_cck_fa_permil + env->ifs_clm_ofdm_fa_permil;
5530 if (fa_ratio < dig->fa_th[0])
5532 else if (fa_ratio < dig->fa_th[1])
5534 else if (fa_ratio < dig->fa_th[2])
5536 else if (fa_ratio < dig->fa_th[3])
5547 dig->fa_rssi_ofst = igi_offset;
5550 "fa_th: [+6 (%d) +4 (%d) +2 (%d) 0 (%d) -2 ]\n",
5551 dig->fa_th[3], dig->fa_th[2], dig->fa_th[1], dig->fa_th[0]);
5555 env->ifs_clm_cck_fa_permil, env->ifs_clm_ofdm_fa_permil,
5556 env->ifs_clm_cck_fa_permil + env->ifs_clm_ofdm_fa_permil,
5562 const struct rtw89_dig_regs *dig_regs = rtwdev->chip->dig_regs;
5564 rtw89_phy_write32_mask(rtwdev, dig_regs->p0_lna_init.addr,
5565 dig_regs->p0_lna_init.mask, lna_idx);
5566 rtw89_phy_write32_mask(rtwdev, dig_regs->p1_lna_init.addr,
5567 dig_regs->p1_lna_init.mask, lna_idx);
5572 const struct rtw89_dig_regs *dig_regs = rtwdev->chip->dig_regs;
5574 rtw89_phy_write32_mask(rtwdev, dig_regs->p0_tia_init.addr,
5575 dig_regs->p0_tia_init.mask, tia_idx);
5576 rtw89_phy_write32_mask(rtwdev, dig_regs->p1_tia_init.addr,
5577 dig_regs->p1_tia_init.mask, tia_idx);
5582 const struct rtw89_dig_regs *dig_regs = rtwdev->chip->dig_regs;
5584 rtw89_phy_write32_mask(rtwdev, dig_regs->p0_rxb_init.addr,
5585 dig_regs->p0_rxb_init.mask, rxb_idx);
5586 rtw89_phy_write32_mask(rtwdev, dig_regs->p1_rxb_init.addr,
5587 dig_regs->p1_rxb_init.mask, rxb_idx);
5593 if (!rtwdev->hal.support_igi)
5607 const struct rtw89_dig_regs *dig_regs = rtwdev->chip->dig_regs;
5609 rtw89_phy_write32_mask(rtwdev, dig_regs->p0_p20_pagcugc_en.addr,
5610 dig_regs->p0_p20_pagcugc_en.mask, enable);
5611 rtw89_phy_write32_mask(rtwdev, dig_regs->p0_s20_pagcugc_en.addr,
5612 dig_regs->p0_s20_pagcugc_en.mask, enable);
5613 rtw89_phy_write32_mask(rtwdev, dig_regs->p1_p20_pagcugc_en.addr,
5614 dig_regs->p1_p20_pagcugc_en.mask, enable);
5615 rtw89_phy_write32_mask(rtwdev, dig_regs->p1_s20_pagcugc_en.addr,
5616 dig_regs->p1_s20_pagcugc_en.mask, enable);
5623 struct rtw89_dig_info *dig = &rtwdev->dig;
5625 if (!rtwdev->hal.support_igi)
5628 if (dig->force_gaincode_idx_en) {
5629 rtw89_phy_dig_set_igi_cr(rtwdev, dig->force_gaincode);
5633 rtw89_phy_dig_gaincode_by_rssi(rtwdev, dig->igi_fa_rssi,
5634 &dig->cur_gaincode);
5635 rtw89_phy_dig_set_igi_cr(rtwdev, dig->cur_gaincode);
5643 const struct rtw89_dig_regs *dig_regs = rtwdev->chip->dig_regs;
5644 enum rtw89_bandwidth cbw = chan->band_width;
5645 struct rtw89_dig_info *dig = &rtwdev->dig;
5646 u8 final_rssi = 0, under_region = dig->pd_low_th_ofst;
5651 if (rtwdev->chip->chip_gen == RTW89_CHIP_AX)
5671 dig->dyn_pd_th_max = dig->igi_rssi;
5673 final_rssi = min_t(u8, rssi, dig->igi_rssi);
5678 pd_val = (ofdm_cca_th - under_region - PD_TH_MIN_RSSI) >> 1;
5687 rtw89_phy_write32_mask(rtwdev, dig_regs->seg0_pd_reg,
5688 dig_regs->pd_lower_bound_mask, pd_val);
5689 rtw89_phy_write32_mask(rtwdev, dig_regs->seg0_pd_reg,
5690 dig_regs->pd_spatial_reuse_en, enable);
5692 if (!rtwdev->hal.support_cckpd)
5695 cck_cca_th = max_t(s8, final_rssi - under_region, CCKPD_TH_MIN_RSSI);
5696 pd_val = (u32)(cck_cca_th - IGI_RSSI_MAX);
5702 rtw89_phy_write32_mask(rtwdev, dig_regs->bmode_pd_reg,
5703 dig_regs->bmode_cca_rssi_limit_en, enable);
5704 rtw89_phy_write32_mask(rtwdev, dig_regs->bmode_pd_lower_bound_reg,
5705 dig_regs->bmode_rssi_nocca_low_th_mask, pd_val);
5710 struct rtw89_dig_info *dig = &rtwdev->dig;
5712 dig->bypass_dig = false;
5714 rtw89_phy_dig_set_igi_cr(rtwdev, dig->force_gaincode);
5723 struct rtw89_dig_info *dig = &rtwdev->dig;
5724 bool is_linked = rtwdev->total_sta_assoc > 0;
5726 if (unlikely(dig->bypass_dig)) {
5727 dig->bypass_dig = false;
5731 if (!dig->is_linked_pre && is_linked) {
5734 } else if (dig->is_linked_pre && !is_linked) {
5738 dig->is_linked_pre = is_linked;
5743 dig->dyn_igi_min = (dig->igi_rssi > IGI_RSSI_MIN) ?
5744 dig->igi_rssi - IGI_RSSI_MIN : 0;
5745 dig->dyn_igi_max = dig->dyn_igi_min + IGI_OFFSET_MAX;
5746 dig->igi_fa_rssi = dig->dyn_igi_min + dig->fa_rssi_ofst;
5748 dig->igi_fa_rssi = clamp(dig->igi_fa_rssi, dig->dyn_igi_min,
5749 dig->dyn_igi_max);
5753 dig->igi_rssi, dig->dyn_igi_max, dig->dyn_igi_min,
5754 dig->igi_fa_rssi);
5758 rtw89_phy_dig_dyn_pd_th(rtwdev, dig->igi_fa_rssi, dig->dyn_pd_th_en);
5760 if (dig->dyn_pd_th_en && dig->igi_fa_rssi > dig->dyn_pd_th_max)
5768 struct rtw89_sta *rtwsta = (struct rtw89_sta *)sta->drv_priv;
5769 struct rtw89_dev *rtwdev = rtwsta->rtwdev;
5770 struct rtw89_vif *rtwvif = rtwsta->rtwvif;
5771 struct rtw89_hal *hal = &rtwdev->hal;
5776 if (rtwvif->wifi_role != RTW89_WIFI_ROLE_STATION || sta->tdls)
5784 rssi_a = ewma_rssi_read(&rtwsta->rssi[RF_PATH_A]);
5785 rssi_b = ewma_rssi_read(&rtwsta->rssi[RF_PATH_B]);
5794 if (hal->antenna_tx == candidate)
5797 hal->antenna_tx = candidate;
5800 if (hal->antenna_tx == RF_A) {
5803 } else if (hal->antenna_tx == RF_B) {
5811 struct rtw89_hal *hal = &rtwdev->hal;
5814 if (!hal->tx_path_diversity)
5817 ieee80211_iterate_stations_atomic(rtwdev->hw,
5827 struct rtw89_hal *hal = &rtwdev->hal;
5830 if (!hal->ant_diversity || hal->antenna_tx == 0)
5833 if (hal->antenna_tx == RF_B) {
5853 struct rtw89_hal *hal = &rtwdev->hal;
5855 hal->antenna_rx = hal->antenna_rx == RF_A ? RF_B : RF_A;
5856 hal->antenna_tx = hal->antenna_rx;
5861 struct rtw89_antdiv_info *antdiv = &rtwdev->antdiv;
5862 struct rtw89_hal *hal = &rtwdev->hal;
5868 antdiv->get_stats = false;
5869 antdiv->training_count = 0;
5871 main_rssi = rtw89_phy_antdiv_sts_instance_get_rssi(&antdiv->main_stats);
5872 main_evm = rtw89_phy_antdiv_sts_instance_get_evm(&antdiv->main_stats);
5873 aux_rssi = rtw89_phy_antdiv_sts_instance_get_rssi(&antdiv->aux_stats);
5874 aux_evm = rtw89_phy_antdiv_sts_instance_get_evm(&antdiv->aux_stats);
5893 hal->antenna_tx = candidate;
5894 hal->antenna_rx = candidate;
5899 struct rtw89_antdiv_info *antdiv = &rtwdev->antdiv;
5902 if (antdiv->training_count % 2 == 0) {
5903 if (antdiv->training_count == 0)
5906 antdiv->get_stats = true;
5909 antdiv->get_stats = false;
5916 antdiv->training_count++;
5917 ieee80211_queue_delayed_work(rtwdev->hw, &rtwdev->antdiv_work,
5925 struct rtw89_antdiv_info *antdiv = &rtwdev->antdiv;
5927 mutex_lock(&rtwdev->mutex);
5929 if (antdiv->training_count <= ANTDIV_TRAINNING_CNT) {
5936 mutex_unlock(&rtwdev->mutex);
5941 struct rtw89_antdiv_info *antdiv = &rtwdev->antdiv;
5942 struct rtw89_hal *hal = &rtwdev->hal;
5945 if (!hal->ant_diversity || hal->ant_diversity_fixed)
5948 rssi = rtw89_phy_antdiv_sts_instance_get_rssi(&antdiv->target_stats);
5949 rssi_pre = antdiv->rssi_pre;
5950 antdiv->rssi_pre = rssi;
5951 rtw89_phy_antdiv_sts_instance_reset(&antdiv->target_stats);
5953 if (abs((int)rssi - (int)rssi_pre) < ANTDIV_RSSI_DIFF_TH)
5956 antdiv->training_count = 0;
5957 ieee80211_queue_delayed_work(rtwdev->hw, &rtwdev->antdiv_work, 0);
5968 const struct rtw89_edcca_regs *edcca_regs = rtwdev->chip->edcca_regs;
5969 struct rtw89_edcca_bak *edcca_bak = &rtwdev->hal.edcca_bak;
5973 if (rtwdev->chip->chip_id == RTL8922A && rtwdev->hal.cv == CHIP_CAV) {
5985 rtw89_phy_write32_mask(rtwdev, edcca_regs->tx_collision_t2r_st,
5986 edcca_regs->tx_collision_t2r_st_mask, 0x29);
6017 const struct rtw89_chip_info *chip = rtwdev->chip;
6018 const struct rtw89_reg_def *bss_clr_vld = &chip->bss_clr_vld;
6022 if (!vif->bss_conf.he_support || !vif->cfg.assoc)
6025 bss_color = vif->bss_conf.he_bss_color.color;
6027 rtw89_phy_write32_idx(rtwdev, bss_clr_vld->addr, bss_clr_vld->mask, 0x1,
6029 rtw89_phy_write32_idx(rtwdev, chip->bss_clr_map_reg, B_BSS_CLR_MAP_TGT,
6031 rtw89_phy_write32_idx(rtwdev, chip->bss_clr_map_reg, B_BSS_CLR_MAP_STAID,
6032 vif->cfg.aid, phy_idx);
6037 return desc->ch != 0;
6046 if (desc->ch != chan->channel)
6049 if (desc->has_band && desc->band != chan->band_type)
6052 if (desc->has_bw && desc->bw != chan->band_width)
6067 if (rfk_chan_is_equivalent(&iter_data->desc, chan))
6068 iter_data->found++;
6077 int sel = -1;
6089 if (!iter_data.found && sel == -1)
6093 if (sel == -1) {
6106 rtw89_write_rf(rtwdev, def->path, def->addr, def->mask, def->data);
6112 rtw89_phy_write32_mask(rtwdev, def->addr, def->mask, def->data);
6118 rtw89_phy_write32_set(rtwdev, def->addr, def->mask);
6124 rtw89_phy_write32_clr(rtwdev, def->addr, def->mask);
6130 udelay(def->data);
6151 const struct rtw89_reg5_def *p = tbl->defs;
6152 const struct rtw89_reg5_def *end = tbl->defs + tbl->size;
6155 _rfk_handler[p->flag](rtwdev, p);
6236 const struct rtw89_chip_info *chip = rtwdev->chip;
6250 data = chip->tssi_dbw_table->data[bandedge_cfg];
6278 u8 rtw89_encode_chan_idx(struct rtw89_dev *rtwdev, u8 central_ch, u8 band)
6284 switch (band) {
6298 rtw89_warn(rtwdev, "Unsupported band %d\n", band);
6302 for (idx = last; idx >= first; idx--)
6307 rtw89_warn(rtwdev, "Unknown band %d channel %d\n", band, central_ch);
6313 (central_ch - rtw89_ch_base_table[idx]) >> 1);
6319 u8 *ch, enum nl80211_band *band)
6327 *band = NL80211_BAND_2GHZ;
6332 *band = idx <= RTW89_CH_BASE_IDX_5G_LAST ? NL80211_BAND_5GHZ : NL80211_BAND_6GHZ;
6339 const struct rtw89_edcca_regs *edcca_regs = rtwdev->chip->edcca_regs;
6340 struct rtw89_edcca_bak *edcca_bak = &rtwdev->hal.edcca_bak;
6343 edcca_bak->a =
6344 rtw89_phy_read32_mask(rtwdev, edcca_regs->edcca_level,
6345 edcca_regs->edcca_mask);
6346 edcca_bak->p =
6347 rtw89_phy_read32_mask(rtwdev, edcca_regs->edcca_level,
6348 edcca_regs->edcca_p_mask);
6349 edcca_bak->ppdu =
6350 rtw89_phy_read32_mask(rtwdev, edcca_regs->ppdu_level,
6351 edcca_regs->ppdu_mask);
6353 rtw89_phy_write32_mask(rtwdev, edcca_regs->edcca_level,
6354 edcca_regs->edcca_mask, EDCCA_MAX);
6355 rtw89_phy_write32_mask(rtwdev, edcca_regs->edcca_level,
6356 edcca_regs->edcca_p_mask, EDCCA_MAX);
6357 rtw89_phy_write32_mask(rtwdev, edcca_regs->ppdu_level,
6358 edcca_regs->ppdu_mask, EDCCA_MAX);
6360 rtw89_phy_write32_mask(rtwdev, edcca_regs->edcca_level,
6361 edcca_regs->edcca_mask,
6362 edcca_bak->a);
6363 rtw89_phy_write32_mask(rtwdev, edcca_regs->edcca_level,
6364 edcca_regs->edcca_p_mask,
6365 edcca_bak->p);
6366 rtw89_phy_write32_mask(rtwdev, edcca_regs->ppdu_level,
6367 edcca_regs->ppdu_mask,
6368 edcca_bak->ppdu);
6374 const struct rtw89_edcca_regs *edcca_regs = rtwdev->chip->edcca_regs;
6384 if (rtwdev->chip->chip_id == RTL8922A)
6385 rtw89_phy_write32_mask(rtwdev, edcca_regs->rpt_sel_be,
6386 edcca_regs->rpt_sel_be_mask, 0);
6388 rtw89_phy_write32_mask(rtwdev, edcca_regs->rpt_sel,
6389 edcca_regs->rpt_sel_mask, 0);
6390 tmp = rtw89_phy_read32(rtwdev, edcca_regs->rpt_b);
6401 rtw89_phy_write32_mask(rtwdev, edcca_regs->rpt_sel,
6402 edcca_regs->rpt_sel_mask, 4);
6403 tmp = rtw89_phy_read32(rtwdev, edcca_regs->rpt_b);
6407 per20_bitmap = rtw89_phy_read32_mask(rtwdev, edcca_regs->rpt_a,
6410 if (rtwdev->chip->chip_id == RTL8922A) {
6411 rtw89_phy_write32_mask(rtwdev, edcca_regs->rpt_sel_be,
6412 edcca_regs->rpt_sel_be_mask, 4);
6413 tmp = rtw89_phy_read32(rtwdev, edcca_regs->rpt_b);
6419 rtw89_phy_write32_mask(rtwdev, edcca_regs->rpt_sel_be,
6420 edcca_regs->rpt_sel_be_mask, 5);
6421 tmp = rtw89_phy_read32(rtwdev, edcca_regs->rpt_b);
6427 rtw89_phy_write32_mask(rtwdev, edcca_regs->rpt_sel,
6428 edcca_regs->rpt_sel_mask, 0);
6429 tmp = rtw89_phy_read32(rtwdev, edcca_regs->rpt_a);
6433 rtw89_phy_write32_mask(rtwdev, edcca_regs->rpt_sel,
6434 edcca_regs->rpt_sel_mask, 1);
6435 tmp = rtw89_phy_read32(rtwdev, edcca_regs->rpt_a);
6439 rtw89_phy_write32_mask(rtwdev, edcca_regs->rpt_sel,
6440 edcca_regs->rpt_sel_mask, 2);
6441 tmp = rtw89_phy_read32(rtwdev, edcca_regs->rpt_a);
6445 rtw89_phy_write32_mask(rtwdev, edcca_regs->rpt_sel,
6446 edcca_regs->rpt_sel_mask, 3);
6447 tmp = rtw89_phy_read32(rtwdev, edcca_regs->rpt_a);
6471 struct rtw89_phy_ch_info *ch_info = &rtwdev->ch_info;
6472 bool is_linked = rtwdev->total_sta_assoc > 0;
6473 u8 rssi_min = ch_info->rssi_min >> 1;
6479 edcca_thre = rssi_min - RSSI_UNIT_CONVER + EDCCA_UNIT_CONVER -
6489 const struct rtw89_edcca_regs *edcca_regs = rtwdev->chip->edcca_regs;
6490 struct rtw89_edcca_bak *edcca_bak = &rtwdev->hal.edcca_bak;
6494 if (th == edcca_bak->th_old)
6497 edcca_bak->th_old = th;
6502 rtw89_phy_write32_mask(rtwdev, edcca_regs->edcca_level,
6503 edcca_regs->edcca_mask, th);
6504 rtw89_phy_write32_mask(rtwdev, edcca_regs->edcca_level,
6505 edcca_regs->edcca_p_mask, th);
6506 rtw89_phy_write32_mask(rtwdev, edcca_regs->ppdu_level,
6507 edcca_regs->ppdu_mask, th);
6512 struct rtw89_hal *hal = &rtwdev->hal;
6514 if (hal->disabled_dm_bitmap & BIT(RTW89_DM_DYNAMIC_EDCCA))
6526 rtwdev->dbcc_en, rtwdev->mlo_dbcc_mode, phy_idx);
6528 switch (rtwdev->mlo_dbcc_mode) {
6560 rtwdev->dbcc_en, rtwdev->mlo_dbcc_mode, phy_idx);
6562 switch (rtwdev->mlo_dbcc_mode) {