Lines Matching +full:0 +full:xfb000000

49  *	at the time it indicated completion is stored there.  Returns 0 if the
61 return 0; in t3_wait_op_done_val()
63 if (--attempts == 0) in t3_wait_op_done_val()
144 static int shift[] = { 0, 0, 16, 24 }; in t3_mc7_bd_read()
145 static int step[] = { 0, 32, 16, 8 }; in t3_mc7_bd_read()
156 u64 val64 = 0; in t3_mc7_bd_read()
158 for (i = (1 << mc7->width) - 1; i >= 0; --i) { in t3_mc7_bd_read()
164 t3_write_reg(adap, mc7->offset + A_MC7_BD_OP, 0); in t3_mc7_bd_read()
173 if (mc7->width == 0) { in t3_mc7_bd_read()
186 return 0; in t3_mc7_bd_read()
203 * as a 0/1 value.
211 F_I2C_READ | (chained ? F_I2C_CONT : 0)); in t3_i2c_read8()
212 ret = t3_wait_op_done_val(adapter, A_I2C_OP, F_I2C_BUSY, 0, in t3_i2c_read8()
214 if (ret >= 0) { in t3_i2c_read8()
227 * 0/1 value.
236 F_I2C_WRITE | (chained ? F_I2C_CONT : 0)); in t3_i2c_write8()
237 ret = t3_wait_op_done_val(adapter, A_I2C_OP, F_I2C_BUSY, 0, in t3_i2c_write8()
239 if (ret >= 0) in t3_i2c_write8()
274 ret = t3_wait_op_done(adapter, A_MI1_OP, F_BUSY, 0, MDIO_ATTEMPTS, 10); in t3_mi1_read()
295 ret = t3_wait_op_done(adapter, A_MI1_OP, F_BUSY, 0, MDIO_ATTEMPTS, 10); in t3_mi1_write()
315 t3_set_reg_field(adapter, A_MI1_CFG, V_ST(M_ST), 0); in mi1_ext_read()
318 t3_write_reg(adapter, A_MI1_OP, V_MDI_OP(0)); in mi1_ext_read()
319 ret = t3_wait_op_done(adapter, A_MI1_OP, F_BUSY, 0, MDIO_ATTEMPTS, 10); in mi1_ext_read()
322 ret = t3_wait_op_done(adapter, A_MI1_OP, F_BUSY, 0, in mi1_ext_read()
338 t3_set_reg_field(adapter, A_MI1_CFG, V_ST(M_ST), 0); in mi1_ext_write()
341 t3_write_reg(adapter, A_MI1_OP, V_MDI_OP(0)); in mi1_ext_write()
342 ret = t3_wait_op_done(adapter, A_MI1_OP, F_BUSY, 0, MDIO_ATTEMPTS, 10); in mi1_ext_write()
346 ret = t3_wait_op_done(adapter, A_MI1_OP, F_BUSY, 0, in mi1_ext_write()
390 * @mmd should be 0 for 10/100/1000 PHYs and the device address to reset
411 return ctl ? -1 : 0; in t3_phy_reset()
425 unsigned int val = 0; in t3_phy_advertise()
427 err = mdio_read(phy, 0, MII_CTRL1000, &val); in t3_phy_advertise()
437 err = mdio_write(phy, 0, MII_CTRL1000, val); in t3_phy_advertise()
454 return mdio_write(phy, 0, MII_ADVERTISE, val); in t3_phy_advertise()
467 unsigned int val = 0; in t3_phy_advertise_fiber()
477 return mdio_write(phy, 0, MII_ADVERTISE, val); in t3_phy_advertise_fiber()
494 err = mdio_read(phy, 0, MII_BMCR, &ctl); in t3_set_phy_speed_duplex()
498 if (speed >= 0) { in t3_set_phy_speed_duplex()
505 if (duplex >= 0) { in t3_set_phy_speed_duplex()
512 return mdio_write(phy, 0, MII_BMCR, ctl); in t3_set_phy_speed_duplex()
522 return mdio_write(phy, MDIO_DEV_PMA_PMD, LASI_CTRL, 0); in t3_phy_lasi_intr_disable()
539 return (status & 1) ? cphy_cause_link_change : 0; in t3_phy_lasi_intr_handler()
543 { 1, 1, 0,
545 F_GPIO2_OUT_VAL | F_GPIO4_OUT_VAL, { S_GPIO3, S_GPIO5 }, 0,
547 { 1, 1, 0,
549 F_GPIO2_OUT_VAL | F_GPIO4_OUT_VAL, { S_GPIO3, S_GPIO5 }, 0,
551 { 1, 0, 0,
554 { 0 }, SUPPORTED_10000baseT_Full | SUPPORTED_AUI,
556 { 1, 1, 0,
562 { 4, 0, 0,
567 { 0 },
568 { 1, 0, 0,
573 { 1, 0, 0,
641 #define EEPROM_STAT_ADDR 0x4000
642 #define VPD_BASE 0xc00
671 CH_ERR(adapter, "reading EEPROM address 0x%x failed\n", addr); in t3_seeprom_read()
676 return 0; in t3_seeprom_read()
707 CH_ERR(adapter, "write to EEPROM address 0x%x failed\n", addr); in t3_seeprom_write()
710 return 0; in t3_seeprom_write()
716 * @enable: 1 to enable write protection, 0 to disable it
722 return t3_seeprom_write(adapter, EEPROM_STAT_ADDR, enable ? 0xc : 0); in t3_seeprom_wp()
730 return isdigit(c) ? c - '0' : toupper(c) - 'A' + 10; in hex2int()
743 u32 read_offset, tmp, shift, len = 0; in get_desc_len()
747 read_offset = offset & 0xfffffffc; in get_desc_len()
748 shift = offset & 0x03; in get_desc_len()
751 if (ret < 0) in get_desc_len()
757 if (tag & 0x80) { in get_desc_len()
759 if (ret < 0) in get_desc_len()
763 len = (buf[shift + 1] & 0xff) + in get_desc_len()
764 ((buf[shift+2] << 8) & 0xff00) + 3; in get_desc_len()
766 len = (tag & 0x07) + 1; in get_desc_len()
783 read_offset = offset & 0xfffffffc; in is_end_tag()
784 shift = offset & 0x03; in is_end_tag()
791 if (buf[shift] == 0x78) in is_end_tag()
794 return 0; in is_end_tag()
807 u32 len=0, offset; in t3_get_vpd_len()
814 if (ret < 0) in t3_get_vpd_len()
820 if (inc < 0) in t3_get_vpd_len()
841 for (i = 0; i < vpd->len; i += 4) { in t3_read_vpd()
848 return 0; in t3_read_vpd()
866 * it at 0. in get_vpd_params()
871 addr = vpd.id_tag == 0x82 ? VPD_BASE : 0; in get_vpd_params()
873 for (i = 0; i < sizeof(vpd); i += 4) { in get_vpd_params()
889 if (adapter->params.rev == 0 && !vpd.port0_data[0]) { in get_vpd_params()
890 p->port_type[0] = uses_xaui(adapter) ? 1 : 2; in get_vpd_params()
893 p->port_type[0] = (u8)hex2int(vpd.port0_data[0]); in get_vpd_params()
894 p->port_type[1] = (u8)hex2int(vpd.port1_data[0]); in get_vpd_params()
895 p->port_type[2] = (u8)hex2int(vpd.port2_data[0]); in get_vpd_params()
896 p->port_type[3] = (u8)hex2int(vpd.port3_data[0]); in get_vpd_params()
897 p->xauicfg[0] = simple_strtoul(vpd.xaui0cfg_data, NULL, 16); in get_vpd_params()
901 for (i = 0; i < 6; i++) in get_vpd_params()
904 return 0; in get_vpd_params()
927 SF_RD_DATA_FAST = 0xb, /* read flash */
928 SF_ERASE_SECTOR = 0xd8, /* erase sector */
930 FW_FLASH_BOOT_ADDR = 0x70000, /* start address of FW in flash */
931 FW_VERS_ADDR = 0x7fffc, /* flash address holding FW version */
932 FW_VERS_ADDR_PRE8 = 0x77ffc,/* flash address holding FW version pre8 */
937 BOOT_FLASH_BOOT_ADDR = 0x0,/* start address of boot image in flash */
938 BOOT_SIGNATURE = 0xaa55, /* signature of BIOS boot ROM */
965 ret = t3_wait_op_done(adapter, A_SF_OP, F_BUSY, 0, SF_ATTEMPTS, 10); in sf1_read()
992 return t3_wait_op_done(adapter, A_SF_OP, F_BUSY, 0, SF_ATTEMPTS, 10); in sf1_write()
1009 if ((ret = sf1_write(adapter, 1, 1, SF_RD_STATUS)) != 0 || in flash_wait_op()
1010 (ret = sf1_read(adapter, 1, 0, &status)) != 0) in flash_wait_op()
1013 return 0; in flash_wait_op()
1014 if (--attempts == 0) in flash_wait_op()
1044 if ((ret = sf1_write(adapter, 4, 1, addr)) != 0 || in t3_read_flash()
1045 (ret = sf1_read(adapter, 1, 1, data)) != 0) in t3_read_flash()
1055 return 0; in t3_read_flash()
1078 unsigned int c, left, val, offset = addr & 0xff; in t3_write_flash()
1085 if ((ret = sf1_write(adapter, 1, 0, SF_WR_ENABLE)) != 0 || in t3_write_flash()
1086 (ret = sf1_write(adapter, 4, 1, val)) != 0) in t3_write_flash()
1100 if ((ret = flash_wait_op(adapter, 5, 1)) != 0) in t3_write_flash()
1104 ret = t3_read_flash(adapter, addr & ~0xff, ARRAY_SIZE(buf), buf, in t3_write_flash()
1111 return 0; in t3_write_flash()
1126 t3_write_reg(adapter, A_TP_EMBED_OP_FIELD0, 0); in t3_get_tp_version()
1134 return 0; in t3_get_tp_version()
1149 return 0; in t3_check_tpsram_version()
1162 return 0; in t3_check_tpsram_version()
1179 * Returns 0 if the versions are compatible, a negative error otherwise.
1188 for (csum = 0, i = 0; i < size / sizeof(csum); i++) in t3_check_tpsram()
1190 if (csum != 0xffffffff) { in t3_check_tpsram()
1196 return 0; in t3_check_tpsram()
1215 int ret = t3_read_flash(adapter, FW_VERS_ADDR, 1, vers, 0); in t3_get_fw_version()
1216 if (!ret && *vers != 0xffffffff) in t3_get_fw_version()
1217 return 0; in t3_get_fw_version()
1219 return t3_read_flash(adapter, FW_VERS_ADDR_PRE8, 1, vers, 0); in t3_get_fw_version()
1226 * Checks if an adapter's FW is compatible with the driver. Returns 0
1245 return 0; in t3_check_fw_version()
1255 return 0; in t3_check_fw_version()
1273 if ((ret = sf1_write(adapter, 1, 0, SF_WR_ENABLE)) != 0 || in t3_flash_erase_sectors()
1274 (ret = sf1_write(adapter, 4, 0, in t3_flash_erase_sectors()
1275 SF_ERASE_SECTOR | (start << 8))) != 0 || in t3_flash_erase_sectors()
1276 (ret = flash_wait_op(adapter, 5, 500)) != 0) in t3_flash_erase_sectors()
1280 return 0; in t3_flash_erase_sectors()
1316 for (csum = 0, i = 0; i < size / sizeof(csum); i++) in t3_load_fw()
1318 if (csum != 0xffffffff) { in t3_load_fw()
1391 ret = t3_write_flash(adapter, addr, chunk_size, boot_data, 0); in t3_load_boot()
1406 #define CIM_CTL_BASE 0x2000
1420 int ret = 0; in t3_cim_ctl_blk_read()
1428 0, 5, 2); in t3_cim_ctl_blk_read()
1449 t3_write_reg(mac->adapter, A_XGM_RX_HASH_HIGH + mac->offset, 0); in t3_gate_rx_traffic()
1453 t3_write_reg(mac->adapter, A_XGM_RX_HASH_LOW + mac->offset, 0); in t3_gate_rx_traffic()
1481 t3_write_reg(adapter, A_XGM_RX_CTRL + mac->offset, 0); in t3_detect_link_fault()
1492 return (link_fault & F_LINKFAULTCHANGE ? 1 : 0); in t3_detect_link_fault()
1531 link_fault = 0; in t3_link_changed()
1536 phy->rst = 0; in t3_link_changed()
1538 phy->ops->reset(phy, 0); in t3_link_changed()
1539 phy->rst = 0; in t3_link_changed()
1542 if (link_ok == 0) in t3_link_changed()
1551 if (link_ok && speed >= 0 && lc->autoneg == AUTONEG_ENABLE && in t3_link_changed()
1578 link_ok = 0; in t3_link_changed()
1594 lc->speed = speed < 0 ? SPEED_INVALID : speed; in t3_link_changed()
1595 lc->duplex = duplex < 0 ? DUPLEX_INVALID : duplex; in t3_link_changed()
1602 if (adapter->params.rev > 0 && uses_xaui(adapter)) { in t3_link_changed()
1615 F_ENDROPPKT, 0); in t3_link_changed()
1626 if (adapter->params.rev > 0 && uses_xaui(adapter)) { in t3_link_changed()
1628 A_XGM_XAUI_ACT_CTRL + mac->offset, 0); in t3_link_changed()
1635 F_XGM_INT, 0); in t3_link_changed()
1644 A_XGM_TXFIFO_CFG + mac->offset, 0, F_ENDROPPKT); in t3_link_changed()
1645 t3_write_reg(adapter, A_XGM_RX_CTRL + mac->offset, 0); in t3_link_changed()
1655 mac->was_reset = 0; in t3_link_changed()
1675 lc->link_ok = 0; in t3_link_start()
1696 phy->ops->power_down(phy, 0); in t3_link_start()
1702 phy->ops->reset(phy, 0); in t3_link_start()
1704 return 0; in t3_link_start()
1711 * @on: enable (1) or disable (0) HW VLAN extraction
1719 on ? (ports << S_VLANEXTRACTIONENABLE) : 0); in t3_set_vlan_accel()
1742 * specifying mask 0. Returns the number of fatal interrupt conditions.
1749 int fatal = 0; in t3_handle_intr_status()
1756 CH_ALERT(adapter, "%s (0x%x)\n", in t3_handle_intr_status()
1760 CH_WARN(adapter, "%s (0x%x)\n", in t3_handle_intr_status()
1762 if (acts->stat_idx >= 0) in t3_handle_intr_status()
1798 #define ULPTX_INTR_MASK 0xfc
1842 STAT_PCI_CORR_ECC, 0 }, in pci_intr_handler()
1853 { 0 } in pci_intr_handler()
1883 { 0 } in pcie_intr_handler()
1887 CH_ALERT(adapter, "PEX error code 0x%x\n", in pcie_intr_handler()
1901 { 0xffffff, "TP parity error", -1, 1 }, in tp_intr_handler()
1902 { 0x1000000, "TP out of Rx pages", -1, 1 }, in tp_intr_handler()
1903 { 0x2000000, "TP out of Tx pages", -1, 1 }, in tp_intr_handler()
1904 { 0 } in tp_intr_handler()
1907 { 0x1fffffff, "TP parity error", -1, 1 }, in tp_intr_handler()
1910 { 0 } in tp_intr_handler()
1913 if (t3_handle_intr_status(adapter, A_TP_INT_CAUSE, 0xffffffff, in tp_intr_handler()
1949 { 0 } in cim_intr_handler()
1971 { 0 } in ulprx_intr_handler()
1974 if (t3_handle_intr_status(adapter, A_ULPRX_INT_CAUSE, 0xffffffff, in ulprx_intr_handler()
1985 { F_PBL_BOUND_ERR_CH0, "ULP TX channel 0 PBL out of bounds", in ulptx_intr_handler()
1986 STAT_ULP_CH0_PBL_OOB, 0 }, in ulptx_intr_handler()
1988 STAT_ULP_CH1_PBL_OOB, 0 }, in ulptx_intr_handler()
1989 { 0xfc, "ULP TX parity error", -1, 1 }, in ulptx_intr_handler()
1990 { 0 } in ulptx_intr_handler()
1993 if (t3_handle_intr_status(adapter, A_ULPTX_INT_CAUSE, 0xffffffff, in ulptx_intr_handler()
2013 { F_ZERO_C_CMD_ERROR, "PMTX 0-length pcmd", -1, 1 }, in pmtx_intr_handler()
2020 { 0 } in pmtx_intr_handler()
2023 if (t3_handle_intr_status(adapter, A_PM1_TX_INT_CAUSE, 0xffffffff, in pmtx_intr_handler()
2043 { F_ZERO_E_CMD_ERROR, "PMRX 0-length pcmd", -1, 1 }, in pmrx_intr_handler()
2050 { 0 } in pmrx_intr_handler()
2053 if (t3_handle_intr_status(adapter, A_PM1_RX_INT_CAUSE, 0xffffffff, in pmrx_intr_handler()
2070 { 0 } in cplsw_intr_handler()
2073 if (t3_handle_intr_status(adapter, A_CPL_INTR_CAUSE, 0xffffffff, in cplsw_intr_handler()
2084 { 0x1ff, "MPS parity error", -1, 1 }, in mps_intr_handler()
2085 { 0 } in mps_intr_handler()
2088 if (t3_handle_intr_status(adapter, A_MPS_INT_CAUSE, 0xffffffff, in mps_intr_handler()
2105 CH_WARN(adapter, "%s MC7 correctable error at addr 0x%x, " in mc7_intr_handler()
2106 "data 0x%x 0x%x 0x%x\n", mc7->name, in mc7_intr_handler()
2115 CH_ALERT(adapter, "%s MC7 uncorrectable error at addr 0x%x, " in mc7_intr_handler()
2116 "data 0x%x 0x%x 0x%x\n", mc7->name, in mc7_intr_handler()
2125 CH_ALERT(adapter, "%s MC7 parity error 0x%x\n", in mc7_intr_handler()
2130 u32 addr = 0; in mc7_intr_handler()
2132 if (adapter->params.rev > 0) in mc7_intr_handler()
2136 CH_ALERT(adapter, "%s MC7 address error: 0x%x\n", in mc7_intr_handler()
2157 idx = idx == 0 ? 0 : adapter_info(adap)->nports0; /* MAC idx -> port */ in mac_intr_handler()
2191 F_XGM_INT, 0); in mac_intr_handler()
2202 return cause != 0; in mac_intr_handler()
2235 return 0; in phy_intr_handler()
2252 return 0; in t3_slow_intr_handler()
2286 mac_intr_handler(adapter, 0); in t3_slow_intr_handler()
2300 unsigned int i, gpi_intr = 0; in calc_gpio_intr()
2335 t3_write_regs(adapter, intr_en_avp, ARRAY_SIZE(intr_en_avp), 0); in t3_intr_enable()
2337 adapter->params.rev >= T3_REV_C ? 0x2bfffff : 0x3bfffff); in t3_intr_enable()
2340 if (adapter->params.rev > 0) { in t3_intr_enable()
2370 t3_write_reg(adapter, A_PL_INT_ENABLE0, 0); in t3_intr_disable()
2372 adapter->slow_intr_mask = 0; in t3_intr_disable()
2407 for (i = 0; i < ARRAY_SIZE(cause_reg_addr); ++i) in t3_intr_clear()
2408 t3_write_reg(adapter, cause_reg_addr[i], 0xffffffff); in t3_intr_clear()
2411 t3_write_reg(adapter, A_PCIE_PEX_ERR, 0xffffffff); in t3_intr_clear()
2412 t3_write_reg(adapter, A_PL_INT_CAUSE0, 0xffffffff); in t3_intr_clear()
2429 0x7ff); in t3_xgm_intr_disable()
2460 t3_write_reg(adapter, A_XGM_INT_ENABLE + pi->mac.offset, 0); in t3_port_intr_disable()
2476 t3_write_reg(adapter, A_XGM_INT_CAUSE + pi->mac.offset, 0xffffffff); in t3_port_intr_clear()
2501 t3_write_reg(adapter, A_SG_CONTEXT_MASK0, 0xffffffff); in t3_sge_write_context()
2502 t3_write_reg(adapter, A_SG_CONTEXT_MASK1, 0xffffffff); in t3_sge_write_context()
2503 t3_write_reg(adapter, A_SG_CONTEXT_MASK2, 0x17ffffff); in t3_sge_write_context()
2504 t3_write_reg(adapter, A_SG_CONTEXT_MASK3, 0xffffffff); in t3_sge_write_context()
2506 t3_write_reg(adapter, A_SG_CONTEXT_MASK0, 0xffffffff); in t3_sge_write_context()
2507 t3_write_reg(adapter, A_SG_CONTEXT_MASK1, 0xffffffff); in t3_sge_write_context()
2508 t3_write_reg(adapter, A_SG_CONTEXT_MASK2, 0xffffffff); in t3_sge_write_context()
2509 t3_write_reg(adapter, A_SG_CONTEXT_MASK3, 0xffffffff); in t3_sge_write_context()
2514 0, SG_CONTEXT_CMD_ATTEMPTS, 1); in t3_sge_write_context()
2530 t3_write_reg(adap, A_SG_CONTEXT_DATA0, 0); in clear_sge_ctxt()
2531 t3_write_reg(adap, A_SG_CONTEXT_DATA1, 0); in clear_sge_ctxt()
2532 t3_write_reg(adap, A_SG_CONTEXT_DATA2, 0); in clear_sge_ctxt()
2533 t3_write_reg(adap, A_SG_CONTEXT_DATA3, 0); in clear_sge_ctxt()
2534 t3_write_reg(adap, A_SG_CONTEXT_MASK0, 0xffffffff); in clear_sge_ctxt()
2535 t3_write_reg(adap, A_SG_CONTEXT_MASK1, 0xffffffff); in clear_sge_ctxt()
2536 t3_write_reg(adap, A_SG_CONTEXT_MASK2, 0xffffffff); in clear_sge_ctxt()
2537 t3_write_reg(adap, A_SG_CONTEXT_MASK3, 0xffffffff); in clear_sge_ctxt()
2541 0, SG_CONTEXT_CMD_ATTEMPTS, 1); in clear_sge_ctxt()
2566 unsigned int credits = type == SGE_CNTXT_OFLD ? 0 : FW_WR_NUM; in t3_sge_init_ecntxt()
2568 if (base_addr & 0xfff) /* must be 4K aligned */ in t3_sge_init_ecntxt()
2577 V_EC_BASE_LO((u32)base_addr & 0xffff)); in t3_sge_init_ecntxt()
2582 V_EC_BASE_HI((u32)base_addr & 0xf) | V_EC_RESPQ(respq) | in t3_sge_init_ecntxt()
2608 if (base_addr & 0xfff) /* must be 4K aligned */ in t3_sge_init_flcntxt()
2632 * @irq_vec_idx: MSI-X interrupt vector index, 0 if no MSI-X, -1 if no IRQ
2647 unsigned int ctrl, intr = 0; in t3_sge_init_rspcntxt()
2649 if (base_addr & 0xfff) /* must be 4K aligned */ in t3_sge_init_rspcntxt()
2660 if ((irq_vec_idx > 0) || in t3_sge_init_rspcntxt()
2661 ((irq_vec_idx == 0) && !(ctrl & F_ONEINTMULTQ))) in t3_sge_init_rspcntxt()
2663 if (irq_vec_idx >= 0) in t3_sge_init_rspcntxt()
2690 if (base_addr & 0xfff) /* must be 4K aligned */ in t3_sge_init_cqcntxt()
2712 * @enable: enable (1) or disable (0) the context
2722 t3_write_reg(adapter, A_SG_CONTEXT_MASK0, 0); in t3_sge_enable_ecntxt()
2723 t3_write_reg(adapter, A_SG_CONTEXT_MASK1, 0); in t3_sge_enable_ecntxt()
2724 t3_write_reg(adapter, A_SG_CONTEXT_MASK2, 0); in t3_sge_enable_ecntxt()
2730 0, SG_CONTEXT_CMD_ATTEMPTS, 1); in t3_sge_enable_ecntxt()
2746 t3_write_reg(adapter, A_SG_CONTEXT_MASK0, 0); in t3_sge_disable_fl()
2747 t3_write_reg(adapter, A_SG_CONTEXT_MASK1, 0); in t3_sge_disable_fl()
2749 t3_write_reg(adapter, A_SG_CONTEXT_MASK3, 0); in t3_sge_disable_fl()
2750 t3_write_reg(adapter, A_SG_CONTEXT_DATA2, 0); in t3_sge_disable_fl()
2754 0, SG_CONTEXT_CMD_ATTEMPTS, 1); in t3_sge_disable_fl()
2771 t3_write_reg(adapter, A_SG_CONTEXT_MASK1, 0); in t3_sge_disable_rspcntxt()
2772 t3_write_reg(adapter, A_SG_CONTEXT_MASK2, 0); in t3_sge_disable_rspcntxt()
2773 t3_write_reg(adapter, A_SG_CONTEXT_MASK3, 0); in t3_sge_disable_rspcntxt()
2774 t3_write_reg(adapter, A_SG_CONTEXT_DATA0, 0); in t3_sge_disable_rspcntxt()
2778 0, SG_CONTEXT_CMD_ATTEMPTS, 1); in t3_sge_disable_rspcntxt()
2795 t3_write_reg(adapter, A_SG_CONTEXT_MASK1, 0); in t3_sge_disable_cqcntxt()
2796 t3_write_reg(adapter, A_SG_CONTEXT_MASK2, 0); in t3_sge_disable_cqcntxt()
2797 t3_write_reg(adapter, A_SG_CONTEXT_MASK3, 0); in t3_sge_disable_cqcntxt()
2798 t3_write_reg(adapter, A_SG_CONTEXT_DATA0, 0); in t3_sge_disable_cqcntxt()
2802 0, SG_CONTEXT_CMD_ATTEMPTS, 1); in t3_sge_disable_cqcntxt()
2831 0, SG_CONTEXT_CMD_ATTEMPTS, 1, &val)) in t3_sge_cqcntxt_op()
2835 if (adapter->params.rev > 0) in t3_sge_cqcntxt_op()
2839 V_CONTEXT_CMD_OPCODE(0) | F_CQ | V_CONTEXT(id)); in t3_sge_cqcntxt_op()
2841 F_CONTEXT_CMD_BUSY, 0, in t3_sge_cqcntxt_op()
2846 return 0; in t3_sge_cqcntxt_op()
2866 V_CONTEXT_CMD_OPCODE(0) | type | V_CONTEXT(id)); in t3_sge_read_context()
2867 if (t3_wait_op_done(adapter, A_SG_CONTEXT_CMD, F_CONTEXT_CMD_BUSY, 0, in t3_sge_read_context()
2870 data[0] = t3_read_reg(adapter, A_SG_CONTEXT_DATA0); in t3_sge_read_context()
2874 return 0; in t3_sge_read_context()
2945 * @cpus: values for the CPU lookup table (0xff terminated)
2946 * @rspq: values for the response queue lookup table (0xffff terminated)
2956 int i, j, cpu_idx = 0, q_idx = 0; in t3_config_rss()
2959 for (i = 0; i < RSS_TABLE_SIZE; ++i) { in t3_config_rss()
2962 for (j = 0; j < 2; ++j) { in t3_config_rss()
2963 val |= (cpus[cpu_idx++] & 0x3f) << (8 * j); in t3_config_rss()
2964 if (cpus[cpu_idx] == 0xff) in t3_config_rss()
2965 cpu_idx = 0; in t3_config_rss()
2971 for (i = 0; i < RSS_TABLE_SIZE; ++i) { in t3_config_rss()
2974 if (rspq[q_idx] == 0xffff) in t3_config_rss()
2975 q_idx = 0; in t3_config_rss()
2995 for (i = 0; i < RSS_TABLE_SIZE; ++i) { in t3_read_rss()
2997 0xffff0000 | i); in t3_read_rss()
2999 if (!(val & 0x80000000)) in t3_read_rss()
3006 for (i = 0; i < RSS_TABLE_SIZE; ++i) { in t3_read_rss()
3008 0xffff0000 | i); in t3_read_rss()
3010 if (!(val & 0x80000000)) in t3_read_rss()
3014 return 0; in t3_read_rss()
3020 * @enable: 1 to select offload mode, 0 for regular NIC
3056 t3_set_reg_field(adap, A_TP_IN_CONFIG, F_NICMODE, 0); in t3_enable_filters()
3057 t3_set_reg_field(adap, A_MC5_DB_CONFIG, 0, F_FILTEREN); in t3_enable_filters()
3058 t3_set_reg_field(adap, A_TP_GLOBAL_CONFIG, 0, V_FIVETUPLELOOKUP(3)); in t3_enable_filters()
3059 tp_wr_bits_indirect(adap, A_TP_INGRESS_CONFIG, 0, F_LOOKUPEVERYPKT); in t3_enable_filters()
3071 t3_set_reg_field(adap, A_MC5_DB_CONFIG, F_FILTEREN, 0); in t3_disable_filters()
3073 V_FIVETUPLELOOKUP(M_FIVETUPLELOOKUP), 0); in t3_disable_filters()
3074 tp_wr_bits_indirect(adap, A_TP_INGRESS_CONFIG, F_LOOKUPEVERYPKT, 0); in t3_disable_filters()
3109 unsigned int timers = 0, timers_shift = 22; in partition_mem()
3111 if (adap->params.rev > 0) { in partition_mem()
3127 t3_write_reg(adap, A_TP_PMM_TX_BASE, 0); in partition_mem()
3133 t3_write_reg(adap, A_TP_PMM_RX_BASE, 0); in partition_mem()
3153 m = (m + 4095) & ~0xfff; in partition_mem()
3185 V_AUTOSTATE2(1) | V_AUTOSTATE1(0) | in tp_config()
3190 t3_write_reg(adap, A_TP_TX_RESOURCE_LIMIT, 0x18141814); in tp_config()
3191 t3_write_reg(adap, A_TP_PARA_REG4, 0x5050105); in tp_config()
3192 t3_set_reg_field(adap, A_TP_PARA_REG6, 0, in tp_config()
3193 adap->params.rev > 0 ? F_ENABLEESND : in tp_config()
3205 if (adap->params.rev > 0) { in tp_config()
3207 t3_set_reg_field(adap, A_TP_PARA_REG3, 0, in tp_config()
3210 tp_wr_indirect(adap, A_TP_VLAN_PRI_MAP, 0xfa50); in tp_config()
3211 tp_wr_indirect(adap, A_TP_MAC_MATCH_MAP0, 0xfac688); in tp_config()
3212 tp_wr_indirect(adap, A_TP_MAC_MATCH_MAP1, 0xfac688); in tp_config()
3214 t3_set_reg_field(adap, A_TP_PARA_REG3, 0, F_TXPACEFIXED); in tp_config()
3221 t3_write_reg(adap, A_TP_TX_MOD_QUEUE_WEIGHT1, 0); in tp_config()
3222 t3_write_reg(adap, A_TP_TX_MOD_QUEUE_WEIGHT0, 0); in tp_config()
3223 t3_write_reg(adap, A_TP_MOD_CHANNEL_WEIGHT, 0); in tp_config()
3224 t3_write_reg(adap, A_TP_MOD_RATE_LIMIT, 0xf2200000); in tp_config()
3227 t3_set_reg_field(adap, A_TP_PC_CONFIG2, 0, in tp_config()
3231 V_RXMAPMODE(M_RXMAPMODE), 0); in tp_config()
3236 tp_wr_indirect(adap, A_TP_PREAMBLE_MSB, 0xfb000000); in tp_config()
3237 tp_wr_indirect(adap, A_TP_PREAMBLE_LSB, 0xd5); in tp_config()
3266 t3_write_reg(adap, A_TP_TCP_BACKOFF_REG0, 0x3020100); in tp_set_timers()
3267 t3_write_reg(adap, A_TP_TCP_BACKOFF_REG1, 0x7060504); in tp_set_timers()
3268 t3_write_reg(adap, A_TP_TCP_BACKOFF_REG2, 0xb0a0908); in tp_set_timers()
3269 t3_write_reg(adap, A_TP_TCP_BACKOFF_REG3, 0xf0e0d0c); in tp_set_timers()
3278 adap->params.rev > 0 ? 0 : 2 SECONDS); in tp_set_timers()
3318 return 0; in t3_tp_set_coalescing_size()
3342 mtus[0] = 88; in init_mtus()
3369 a[0] = a[1] = a[2] = a[3] = a[4] = a[5] = a[6] = a[7] = a[8] = 1; in init_cong_ctrl()
3394 b[0] = b[1] = b[2] = b[3] = b[4] = b[5] = b[6] = b[7] = b[8] = 0; in init_cong_ctrl()
3430 for (i = 0; i < NMTUS; ++i) { in t3_load_mtus()
3439 for (w = 0; w < NCCTRL_WIN; ++w) { in t3_load_mtus()
3462 for (i = 0; i < NMTUS; ++i) { in t3_read_hw_mtus()
3465 t3_write_reg(adap, A_TP_MTU_TABLE, 0xff000000 | i); in t3_read_hw_mtus()
3467 mtus[i] = val & 0x3fff; in t3_read_hw_mtus()
3484 for (mtu = 0; mtu < NMTUS; ++mtu) in t3_get_cong_cntl_tab()
3485 for (w = 0; w < NCCTRL_WIN; ++w) { in t3_get_cong_cntl_tab()
3487 0xffff0000 | (mtu << 5) | w); in t3_get_cong_cntl_tab()
3489 A_TP_CCTRL_TABLE) & 0x1fff; in t3_get_cong_cntl_tab()
3503 sizeof(*tps) / sizeof(u32), 0); in t3_tp_get_mib_stats()
3517 for (i = 0; i < NTX_SCHED; i++) { in t3_read_pace_tbl()
3518 t3_write_reg(adap, A_TP_PACE_TABLE, 0xffff0000 + i); in t3_read_pace_tbl()
3564 t3_write_reg(adap, A_ULPRX_TDDP_TAGMASK, 0xffffffff); in ulp_config()
3580 for (i = 0; i < PROTO_SRAM_LINES; i++) { in t3_set_proto_sram()
3591 return 0; in t3_set_proto_sram()
3609 key[0] = tp->sport | (tp->sip << 16); in t3_config_trace_filter()
3614 mask[0] = tp->sport_mask | (tp->sip_mask << 16); in t3_config_trace_filter()
3625 tp_wr_indirect(adapter, addr++, key[0]); in t3_config_trace_filter()
3626 tp_wr_indirect(adapter, addr++, mask[0]); in t3_config_trace_filter()
3652 key[0] = tp_rd_indirect(adapter, addr++); in t3_query_trace_filter()
3653 mask[0] = tp_rd_indirect(adapter, addr++); in t3_query_trace_filter()
3661 tp->sport = key[0] & 0xffff; in t3_query_trace_filter()
3662 tp->sip = (key[0] >> 16) | ((key[1] & 0xffff) << 16); in t3_query_trace_filter()
3665 tp->proto = key[3] & 0xff; in t3_query_trace_filter()
3669 tp->sport_mask = mask[0] & 0xffff; in t3_query_trace_filter()
3670 tp->sip_mask = (mask[0] >> 16) | ((mask[1] & 0xffff) << 16); in t3_query_trace_filter()
3673 tp->proto_mask = mask[3] & 0xff; in t3_query_trace_filter()
3691 unsigned int v, tps, cpt, bpt, delta, mindelta = ~0; in t3_config_sched()
3693 unsigned int selected_cpt = 0, selected_bpt = 0; in t3_config_sched()
3695 if (kbps > 0) { in t3_config_sched()
3700 if (bpt > 0 && bpt <= 255) { in t3_config_sched()
3718 v = (v & 0xffff) | (selected_cpt << 16) | (selected_bpt << 24); in t3_config_sched()
3720 v = (v & 0xffff0000) | selected_cpt | (selected_bpt << 8); in t3_config_sched()
3722 return 0; in t3_config_sched()
3740 if (ipg > 0xffff) in t3_set_sched_ipg()
3746 v = (v & 0xffff) | (ipg << 16); in t3_set_sched_ipg()
3748 v = (v & 0xffff0000) | ipg; in t3_set_sched_ipg()
3751 return 0; in t3_set_sched_ipg()
3774 bpt = (v >> 8) & 0xff; in t3_get_tx_sched()
3775 cpt = v & 0xff; in t3_get_tx_sched()
3777 *kbps = 0; /* scheduler disabled */ in t3_get_tx_sched()
3789 v &= 0xffff; in t3_get_tx_sched()
3803 int busy = 0; in tp_init()
3806 t3_set_vlan_accel(adap, 3, 0); in tp_init()
3812 0, 1000, 5); in tp_init()
3835 return 0; in t3_mps_set_active_ports()
3851 t3_set_reg_field(adap, A_ULPRX_CTL, F_ROUND_ROBIN, 0); in chan_init_hw()
3852 t3_set_reg_field(adap, A_ULPTX_CONFIG, F_CFG_RR_ARB, 0); in chan_init_hw()
3857 chan_map == 1 ? 0xffffffff : 0); in chan_init_hw()
3860 V_TX_MOD_QUEUE_REQ_MAP(0xff)); in chan_init_hw()
3861 t3_write_reg(adap, A_TP_TX_MOD_QUE_TABLE, (12 << 16) | 0xd9c8); in chan_init_hw()
3862 t3_write_reg(adap, A_TP_TX_MOD_QUE_TABLE, (13 << 16) | 0xfbea); in chan_init_hw()
3864 t3_set_reg_field(adap, A_ULPRX_CTL, 0, F_ROUND_ROBIN); in chan_init_hw()
3865 t3_set_reg_field(adap, A_ULPTX_CONFIG, 0, F_CFG_RR_ARB); in chan_init_hw()
3871 t3_write_reg(adap, A_PM1_TX_CFG, 0x80008000); in chan_init_hw()
3872 t3_set_reg_field(adap, A_TP_PC_CONFIG, 0, F_TXTOSQUEUEMAPMODE); in chan_init_hw()
3874 V_TX_MOD_QUEUE_REQ_MAP(0xaa)); in chan_init_hw()
3875 for (i = 0; i < 16; i++) in chan_init_hw()
3877 (i << 16) | 0x1010); in chan_init_hw()
3878 t3_write_reg(adap, A_TP_TX_MOD_QUE_TABLE, (12 << 16) | 0xba98); in chan_init_hw()
3879 t3_write_reg(adap, A_TP_TX_MOD_QUE_TABLE, (13 << 16) | 0xfedc); in chan_init_hw()
3888 for (i = 0; i < 5; ++i) { in calibrate_xgm()
3889 t3_write_reg(adapter, A_XGM_XAUI_IMP, 0); in calibrate_xgm()
3896 return 0; in calibrate_xgm()
3907 return 0; in calibrate_xgm()
3915 t3_set_reg_field(adapter, A_XGM_RGMII_IMP, F_CALRESET, 0); in calibrate_xgm_t3b()
3916 t3_set_reg_field(adapter, A_XGM_RGMII_IMP, 0, in calibrate_xgm_t3b()
3919 0); in calibrate_xgm_t3b()
3920 t3_set_reg_field(adapter, A_XGM_RGMII_IMP, F_CALUPDATE, 0); in calibrate_xgm_t3b()
3921 t3_set_reg_field(adapter, A_XGM_RGMII_IMP, 0, F_CALUPDATE); in calibrate_xgm_t3b()
3945 return 0; in wrreg_wait()
3946 CH_ERR(adapter, "write to MC7 register 0x%x timed out\n", addr); in wrreg_wait()
3953 0x632, 0x642, 0x652, 0x432, 0x442 in mc7_init()
3956 { 12, 3, 4, { 20, 28, 34, 52, 0 }, 15, 6, 4 }, in mc7_init()
3957 { 12, 4, 5, { 20, 28, 34, 52, 0 }, 16, 7, 4 }, in mc7_init()
3958 { 12, 5, 6, { 20, 28, 34, 52, 0 }, 17, 8, 4 }, in mc7_init()
3959 { 9, 3, 4, { 15, 21, 26, 39, 0 }, 12, 6, 4 }, in mc7_init()
3960 { 9, 4, 5, { 15, 21, 26, 39, 0 }, 13, 7, 4 } in mc7_init()
3969 return 0; in mc7_init()
4008 if (wrreg_wait(adapter, mc7->offset + A_MC7_PRE, 0) || in mc7_init()
4009 wrreg_wait(adapter, mc7->offset + A_MC7_EXT_MODE2, 0) || in mc7_init()
4010 wrreg_wait(adapter, mc7->offset + A_MC7_EXT_MODE3, 0) || in mc7_init()
4015 t3_write_reg(adapter, mc7->offset + A_MC7_MODE, 0x100); in mc7_init()
4017 F_DLLRST, 0); in mc7_init()
4021 if (wrreg_wait(adapter, mc7->offset + A_MC7_PRE, 0) || in mc7_init()
4022 wrreg_wait(adapter, mc7->offset + A_MC7_REF, 0) || in mc7_init()
4023 wrreg_wait(adapter, mc7->offset + A_MC7_REF, 0) || in mc7_init()
4026 wrreg_wait(adapter, mc7->offset + A_MC7_EXT_MODE1, val | 0x380) || in mc7_init()
4040 t3_write_reg(adapter, mc7->offset + A_MC7_BIST_DATA, 0); in mc7_init()
4041 t3_write_reg(adapter, mc7->offset + A_MC7_BIST_ADDR_BEG, 0); in mc7_init()
4058 t3_set_reg_field(adapter, mc7->offset + A_MC7_CFG, 0, F_RDY); in mc7_init()
4059 return 0; in mc7_init()
4093 t3_os_pci_read_config_2(adap, 0x2, &devid); in config_pcie()
4094 if (devid == 0x37) { in config_pcie()
4098 pldsize = 0; in config_pcie()
4106 fst_trn_rx = adap->params.rev == 0 ? fst_trn_tx : in config_pcie()
4114 if (adap->params.rev == 0) in config_pcie()
4125 t3_write_reg(adap, A_PCIE_PEX_ERR, 0xffffffff); in config_pcie()
4126 t3_set_reg_field(adap, A_PCIE_CFG, 0, in config_pcie()
4142 * Only the top 8 bits are available for use, the rest must be 0.
4149 if (adapter->params.rev > 0) in t3_init_hw()
4155 t3_mac_init(&adap2pinfo(adapter, 0)->mac); in t3_init_hw()
4168 for (i = 0; i < 32; i++) in t3_init_hw()
4185 t3_set_reg_field(adapter, A_PCIX_CFG, 0, in t3_init_hw()
4189 t3_set_reg_field(adapter, A_ULPTX_CONFIG, 0, in t3_init_hw()
4192 t3_write_reg(adapter, A_PM1_RX_CFG, 0xffffffff); in t3_init_hw()
4193 t3_write_reg(adapter, A_PM1_RX_MODE, 0); in t3_init_hw()
4194 t3_write_reg(adapter, A_PM1_TX_MODE, 0); in t3_init_hw()
4197 t3_set_reg_field(adapter, A_PL_RST, 0, F_FATALPERREN); in t3_init_hw()
4215 err = 0; in t3_init_hw()
4241 p->width = (val >> 4) & 0x3f; in get_pci_mode()
4249 if (pci_mode == 0) in get_pci_mode()
4280 lc->advertising = 0; in init_link_config()
4312 mc7->size = G_DEN(cfg) == M_DEN ? 0 : mc7_calc_size(cfg); in mc7_prep()
4331 t3_os_pci_read_config_2(adapter, 0x2, &devid); in mac_prep()
4334 (!adapter->params.vpd.xauicfg[1] && (devid==0x37))) in mac_prep()
4335 index = 0; in mac_prep()
4339 if (adapter->params.rev == 0 && uses_xaui(adapter)) { in mac_prep()
4341 is_10G(adapter) ? 0x2901c04 : 0x2301c04); in mac_prep()
4343 F_ENRGMII, 0); in mac_prep()
4367 t3_write_reg(adapter, A_MC5_DB_SERVER_INDEX, 0); in early_hw_init()
4368 t3_write_reg(adapter, A_SG_OCO_BASE, V_BASE1(0xfff)); in early_hw_init()
4370 if (adapter->params.rev == 0 || !uses_xaui(adapter)) in early_hw_init()
4394 uint16_t devid = 0; in t3_reset_adapter()
4404 for (i = 0; i < 10; i++) { in t3_reset_adapter()
4406 t3_os_pci_read_config_2(adapter, 0x00, &devid); in t3_reset_adapter()
4407 if (devid == 0x1425) in t3_reset_adapter()
4411 if (devid != 0x1425) in t3_reset_adapter()
4416 return 0; in t3_reset_adapter()
4426 for (err = i = 0; !err && i < 16; i++) in init_parity()
4428 for (i = 0xfff0; !err && i <= 0xffff; i++) in init_parity()
4430 for (i = 0; !err && i < SGE_QSETS; i++) in init_parity()
4435 t3_write_reg(adap, A_CIM_IBQ_DBG_DATA, 0); in init_parity()
4436 for (i = 0; i < 4; i++) in init_parity()
4437 for (addr = 0; addr <= M_IBQDBGADDR; addr++) { in init_parity()
4442 F_IBQDBGBUSY, 0, 2, 1); in init_parity()
4446 return 0; in init_parity()
4462 unsigned int i, j = 0; in t3_prep_adapter()
4490 if (ret < 0) in t3_prep_adapter()
4514 adapter->params.rev > 0 ? 12 : 6; in t3_prep_adapter()
4529 adapter->params.mc5.nfilters = 0; in t3_prep_adapter()
4530 adapter->params.mc5.nroutes = 0; in t3_prep_adapter()
4593 return 0; in t3_prep_adapter()
4608 int ret, j = 0; in t3_reinit_adapter()
4641 return 0; in t3_reinit_adapter()
4677 F_HOSTBUSY, 0, 10, 10, &v)) in t3_cim_hac_read()
4682 return 0; in t3_cim_hac_read()
4695 F_HOSTBUSY, 0, 10, 5, &v)) in t3_cim_hac_write()
4697 return 0; in t3_cim_hac_write()
4717 ret = t3_cim_hac_write(adapter, LA_CTRL, 0); in t3_get_up_la()
4722 for (i = 0; i < LA_ENTRIES; i++) { in t3_get_up_la()
4771 for (i = 0; i < 4; i++) { in t3_get_up_ioqs()
4779 for (i = 0; i < IOQ_ENTRIES; i++) { in t3_get_up_ioqs()
4780 u32 base_addr = 0x10 * (i + 1); in t3_get_up_ioqs()
4782 for (j = 0; j < 4; j++) { in t3_get_up_ioqs()