Lines Matching +full:prefetch +full:- +full:dma
16 * are provided to you under the BSD-type license terms provided
21 * - Redistributions of source code must retain the above copyright
23 * - Redistributions in binary form must reproduce the above
27 * - Neither the name of Marvell nor the names of its contributors
61 * 4. Neither the name of the author nor the names of any co-contributors
277 yge_dev_t *dev = port->p_dev; in yge_mii_readreg()
278 int pnum = port->p_port; in yge_mii_readreg()
306 yge_dev_t *dev = port->p_dev; in yge_mii_writereg()
307 int pnum = port->p_port; in yge_mii_writereg()
328 PHY_LOCK(port->p_dev); in yge_mii_read()
330 PHY_UNLOCK(port->p_dev); in yge_mii_read()
339 PHY_LOCK(port->p_dev); in yge_mii_write()
341 PHY_UNLOCK(port->p_dev); in yge_mii_write()
352 yge_dev_t *dev = port->p_dev; in yge_mii_notify()
359 fc = mii_get_flowctrl(port->p_mii); in yge_mii_notify()
360 duplex = mii_get_duplex(port->p_mii); in yge_mii_notify()
361 speed = mii_get_speed(port->p_mii); in yge_mii_notify()
368 CSR_WRITE_1(dev, MR_ADDR(port->p_port, GMAC_IRQ_MSK), in yge_mii_notify()
418 GMAC_WRITE_2(dev, port->p_port, GM_GP_CTRL, gpcr); in yge_mii_notify()
421 (void) GMAC_READ_2(dev, port->p_port, GM_GP_CTRL); in yge_mii_notify()
424 CSR_WRITE_4(dev, MR_ADDR(port->p_port, GMAC_CTRL), gmac); in yge_mii_notify()
428 gpcr = GMAC_READ_2(dev, port->p_port, GM_GP_CTRL); in yge_mii_notify()
430 GMAC_WRITE_2(dev, port->p_port, GM_GP_CTRL, gpcr); in yge_mii_notify()
433 (void) GMAC_READ_2(dev, port->p_port, GM_GP_CTRL); in yge_mii_notify()
438 mac_link_update(port->p_mh, link); in yge_mii_notify()
440 if (port->p_running && (link == LINK_STATE_UP)) { in yge_mii_notify()
441 mac_tx_update(port->p_mh); in yge_mii_notify()
454 dev = port->p_dev; in yge_setrxfilt()
455 pnum = port->p_port; in yge_setrxfilt()
456 ea = port->p_curraddr; in yge_setrxfilt()
457 mchash = port->p_mchash; in yge_setrxfilt()
459 if (dev->d_suspended) in yge_setrxfilt()
474 if (port->p_promisc) { in yge_setrxfilt()
495 port->p_rx_cons = 0; in yge_init_rx_ring()
496 port->p_rx_putwm = YGE_PUT_WM; in yge_init_rx_ring()
497 ring = &port->p_rx_ring; in yge_init_rx_ring()
504 rxb = &port->p_rx_buf[prod]; in yge_init_rx_ring()
506 PUTADDR(ring, prod, rxb->b_paddr); in yge_init_rx_ring()
507 PUTCTRL(ring, prod, port->p_framesize | OP_PACKET | HW_OWNER); in yge_init_rx_ring()
512 yge_set_prefetch(port->p_dev, port->p_rxq, ring); in yge_init_rx_ring()
514 /* Update prefetch unit. */ in yge_init_rx_ring()
515 CSR_WRITE_2(port->p_dev, in yge_init_rx_ring()
516 Y2_PREF_Q_ADDR(port->p_rxq, PREF_UNIT_PUT_IDX_REG), in yge_init_rx_ring()
517 YGE_RX_RING_CNT - 1); in yge_init_rx_ring()
523 yge_ring_t *ring = &port->p_tx_ring; in yge_init_tx_ring()
525 port->p_tx_prod = 0; in yge_init_tx_ring()
526 port->p_tx_cons = 0; in yge_init_tx_ring()
527 port->p_tx_cnt = 0; in yge_init_tx_ring()
532 yge_set_prefetch(port->p_dev, port->p_txq, ring); in yge_init_tx_ring()
542 dev->d_ramsize = CSR_READ_1(dev, B2_E_0) * 4; in yge_setup_rambuffer()
543 if (dev->d_ramsize == 0) in yge_setup_rambuffer()
546 dev->d_pflags |= PORT_FLAG_RAMBUF; in yge_setup_rambuffer()
552 dev->d_rxqsize = (((dev->d_ramsize * 1024 * 2) / 3) & ~(1024 - 1)); in yge_setup_rambuffer()
553 dev->d_txqsize = (dev->d_ramsize * 1024) - dev->d_rxqsize; in yge_setup_rambuffer()
555 for (i = 0, next = 0; i < dev->d_num_port; i++) { in yge_setup_rambuffer()
556 dev->d_rxqstart[i] = next; in yge_setup_rambuffer()
557 dev->d_rxqend[i] = next + dev->d_rxqsize - 1; in yge_setup_rambuffer()
558 next = dev->d_rxqend[i] + 1; in yge_setup_rambuffer()
559 dev->d_txqstart[i] = next; in yge_setup_rambuffer()
560 dev->d_txqend[i] = next + dev->d_txqsize - 1; in yge_setup_rambuffer()
561 next = dev->d_txqend[i] + 1; in yge_setup_rambuffer()
579 if (dev->d_hw_id == CHIP_ID_YUKON_XL && in yge_phy_power()
580 dev->d_hw_rev > CHIP_REV_YU_XL_A1) { in yge_phy_power()
591 val = pci_config_get32(dev->d_pcih, PCI_OUR_REG_1); in yge_phy_power()
593 if (dev->d_hw_id == CHIP_ID_YUKON_XL && in yge_phy_power()
594 dev->d_hw_rev > CHIP_REV_YU_XL_A1) { in yge_phy_power()
597 if (dev->d_num_port > 1) in yge_phy_power()
602 pci_config_put32(dev->d_pcih, PCI_OUR_REG_1, val); in yge_phy_power()
604 switch (dev->d_hw_id) { in yge_phy_power()
613 pci_config_put32(dev->d_pcih, PCI_OUR_REG_3, 0); in yge_phy_power()
615 our = pci_config_get32(dev->d_pcih, PCI_OUR_REG_4); in yge_phy_power()
619 pci_config_put32(dev->d_pcih, PCI_OUR_REG_4, our); in yge_phy_power()
622 our = pci_config_get32(dev->d_pcih, PCI_OUR_REG_5); in yge_phy_power()
624 pci_config_put32(dev->d_pcih, PCI_OUR_REG_5, our); in yge_phy_power()
626 pci_config_put32(dev->d_pcih, PCI_OUR_REG_1, 0); in yge_phy_power()
629 * Enable workaround for dev 4.107 on Yukon-Ultra in yge_phy_power()
643 for (i = 0; i < dev->d_num_port; i++) { in yge_phy_power()
650 val = pci_config_get32(dev->d_pcih, PCI_OUR_REG_1); in yge_phy_power()
651 if (dev->d_hw_id == CHIP_ID_YUKON_XL && in yge_phy_power()
652 dev->d_hw_rev > CHIP_REV_YU_XL_A1) { in yge_phy_power()
654 if (dev->d_num_port > 1) in yge_phy_power()
660 pci_config_put32(dev->d_pcih, PCI_OUR_REG_1, val); in yge_phy_power()
665 if (dev->d_hw_id == CHIP_ID_YUKON_XL && in yge_phy_power()
666 dev->d_hw_rev > CHIP_REV_YU_XL_A1) { in yge_phy_power()
687 ddi_acc_handle_t pcih = dev->d_pcih; in yge_reset()
690 if (dev->d_hw_id == CHIP_ID_YUKON_EX) { in yge_reset()
722 switch (dev->d_bustype) { in yge_reset()
730 dev->d_intrmask &= ~Y2_IS_HW_ERR; in yge_reset()
731 dev->d_intrhwemask &= ~Y2_IS_PCI_EXP; in yge_reset()
755 for (i = 0; i < dev->d_num_port; i++) { in yge_reset()
762 if (dev->d_hw_id == CHIP_ID_YUKON_EX || in yge_reset()
763 dev->d_hw_id == CHIP_ID_YUKON_SUPR) { in yge_reset()
794 for (i = 0; i < dev->d_num_port; i++) in yge_reset()
798 for (i = 0; i < dev->d_num_port; i++) { in yge_reset()
822 * On dual port PCI-X card, there is an problem where status in yge_reset()
825 if (dev->d_bustype == PCIX_BUS && dev->d_num_port > 1) { in yge_reset()
838 if (dev->d_bustype == PEX_BUS) { in yge_reset()
857 CLEARRING(&dev->d_status_ring); in yge_reset()
858 SYNCRING(&dev->d_status_ring, DDI_DMA_SYNC_FORDEV); in yge_reset()
860 dev->d_stat_cons = 0; in yge_reset()
866 addr = dev->d_status_ring.r_paddr; in yge_reset()
871 CSR_WRITE_2(dev, STAT_LAST_IDX, YGE_STAT_RING_CNT - 1); in yge_reset()
874 if (dev->d_hw_id == CHIP_ID_YUKON_EC && in yge_reset()
875 dev->d_hw_rev == CHIP_REV_YU_EC_A1) { in yge_reset()
886 if (dev->d_hw_id == CHIP_ID_YUKON_XL && in yge_reset()
887 dev->d_hw_rev == CHIP_REV_YU_XL_A0) in yge_reset()
911 yge_dev_t *dev = port->p_dev; in yge_init_port()
915 port->p_flags = dev->d_pflags; in yge_init_port()
916 port->p_ppa = ddi_get_instance(dev->d_dip) + (port->p_port * 100); in yge_init_port()
918 port->p_tx_buf = kmem_zalloc(sizeof (yge_buf_t) * YGE_TX_RING_CNT, in yge_init_port()
920 port->p_rx_buf = kmem_zalloc(sizeof (yge_buf_t) * YGE_RX_RING_CNT, in yge_init_port()
924 if (port->p_port == YGE_PORT_A) { in yge_init_port()
925 port->p_txq = Q_XA1; in yge_init_port()
926 port->p_txsq = Q_XS1; in yge_init_port()
927 port->p_rxq = Q_R1; in yge_init_port()
929 port->p_txq = Q_XA2; in yge_init_port()
930 port->p_txsq = Q_XS2; in yge_init_port()
931 port->p_rxq = Q_R2; in yge_init_port()
935 if (dev->d_hw_id == CHIP_ID_YUKON_FE) in yge_init_port()
936 port->p_flags |= PORT_FLAG_NOJUMBO; in yge_init_port()
945 port->p_mtu = ETHERMTU; in yge_init_port()
947 port->p_mii = mii_alloc(port, dev->d_dip, &yge_mii_ops); in yge_init_port()
948 if (port->p_mii == NULL) { in yge_init_port()
953 mii_set_pauseable(port->p_mii, B_TRUE, B_TRUE); in yge_init_port()
965 port->p_curraddr[i] = in yge_init_port()
966 CSR_READ_1(dev, B2_MAC_1 + (port->p_port * 8) + i); in yge_init_port()
974 macp->m_type_ident = MAC_PLUGIN_IDENT_ETHER; in yge_init_port()
975 macp->m_driver = port; in yge_init_port()
976 macp->m_dip = dev->d_dip; in yge_init_port()
977 macp->m_src_addr = port->p_curraddr; in yge_init_port()
978 macp->m_callbacks = &yge_m_callbacks; in yge_init_port()
979 macp->m_min_sdu = 0; in yge_init_port()
980 macp->m_max_sdu = port->p_mtu; in yge_init_port()
981 macp->m_instance = port->p_ppa; in yge_init_port()
982 macp->m_margin = VLAN_TAGSZ; in yge_init_port()
984 port->p_mreg = macp; in yge_init_port()
998 dip = dev->d_dip; in yge_add_intr()
1014 dev->d_intrcnt = 1; in yge_add_intr()
1016 dev->d_intrsize = count * sizeof (ddi_intr_handle_t); in yge_add_intr()
1017 dev->d_intrh = kmem_zalloc(dev->d_intrsize, KM_SLEEP); in yge_add_intr()
1018 if (dev->d_intrh == NULL) { in yge_add_intr()
1023 rv = ddi_intr_alloc(dip, dev->d_intrh, intr_type, 0, dev->d_intrcnt, in yge_add_intr()
1029 kmem_free(dev->d_intrh, dev->d_intrsize); in yge_add_intr()
1033 if ((rv = ddi_intr_get_pri(dev->d_intrh[0], &dev->d_intrpri)) != in yge_add_intr()
1035 for (i = 0; i < dev->d_intrcnt; i++) in yge_add_intr()
1036 (void) ddi_intr_free(dev->d_intrh[i]); in yge_add_intr()
1039 kmem_free(dev->d_intrh, dev->d_intrsize); in yge_add_intr()
1043 if ((rv = ddi_intr_get_cap(dev->d_intrh[0], &dev->d_intrcap)) != in yge_add_intr()
1047 for (i = 0; i < dev->d_intrcnt; i++) in yge_add_intr()
1048 (void) ddi_intr_free(dev->d_intrh[i]); in yge_add_intr()
1049 kmem_free(dev->d_intrh, dev->d_intrsize); in yge_add_intr()
1054 for (i = 0; i < dev->d_intrcnt; i++) { in yge_add_intr()
1055 if ((rv = ddi_intr_add_handler(dev->d_intrh[i], yge_intr, in yge_add_intr()
1060 (void) ddi_intr_remove_handler(dev->d_intrh[j]); in yge_add_intr()
1061 for (i = 0; i < dev->d_intrcnt; i++) in yge_add_intr()
1062 (void) ddi_intr_free(dev->d_intrh[i]); in yge_add_intr()
1063 kmem_free(dev->d_intrh, dev->d_intrsize); in yge_add_intr()
1068 mutex_init(&dev->d_rxlock, NULL, MUTEX_DRIVER, in yge_add_intr()
1069 DDI_INTR_PRI(dev->d_intrpri)); in yge_add_intr()
1070 mutex_init(&dev->d_txlock, NULL, MUTEX_DRIVER, in yge_add_intr()
1071 DDI_INTR_PRI(dev->d_intrpri)); in yge_add_intr()
1072 mutex_init(&dev->d_phylock, NULL, MUTEX_DRIVER, in yge_add_intr()
1073 DDI_INTR_PRI(dev->d_intrpri)); in yge_add_intr()
1074 mutex_init(&dev->d_task_mtx, NULL, MUTEX_DRIVER, in yge_add_intr()
1075 DDI_INTR_PRI(dev->d_intrpri)); in yge_add_intr()
1083 dev_info_t *dip = dev->d_dip; in yge_attach_intr()
1133 if (dev->d_intrcap & DDI_INTR_FLAG_BLOCK) { in yge_intr_enable()
1135 (void) ddi_intr_block_enable(dev->d_intrh, dev->d_intrcnt); in yge_intr_enable()
1138 for (i = 0; i < dev->d_intrcnt; i++) in yge_intr_enable()
1139 (void) ddi_intr_enable(dev->d_intrh[i]); in yge_intr_enable()
1148 if (dev->d_intrcap & DDI_INTR_FLAG_BLOCK) { in yge_intr_disable()
1149 (void) ddi_intr_block_disable(dev->d_intrh, dev->d_intrcnt); in yge_intr_disable()
1151 for (i = 0; i < dev->d_intrcnt; i++) in yge_intr_disable()
1152 (void) ddi_intr_disable(dev->d_intrh[i]); in yge_intr_disable()
1161 ddi_acc_handle_t pcih = dev->d_pcih; in yge_find_capability()
1181 dev_info_t *dip = dev->d_dip; in yge_attach()
1186 if (pci_config_setup(dip, &dev->d_pcih) != DDI_SUCCESS) { in yge_attach()
1199 pmcsr = pci_config_get16(dev->d_pcih, pm_cap + PCI_PMCSR); in yge_attach()
1201 pci_config_put16(dev->d_pcih, pm_cap + PCI_PMCSR, in yge_attach()
1206 pci_config_put16(dev->d_pcih, PCI_CONF_COMM, in yge_attach()
1207 pci_config_get16(dev->d_pcih, PCI_CONF_COMM) | in yge_attach()
1212 rv = ddi_regs_map_setup(dip, 1, &dev->d_regs, 0, 0, &yge_regs_attr, in yge_attach()
1213 &dev->d_regsh); in yge_attach()
1222 pci_config_put32(dev->d_pcih, PCI_OUR_REG_3, 0); in yge_attach()
1226 dev->d_hw_id = CSR_READ_1(dev, B2_CHIP_ID); in yge_attach()
1227 dev->d_hw_rev = (CSR_READ_1(dev, B2_MAC_CFG) >> 4) & 0x0f; in yge_attach()
1232 * this in production builds. The Ultra-2 (88e8057) has a problem in yge_attach()
1236 if (dev->d_hw_id < CHIP_ID_YUKON_XL || in yge_attach()
1237 dev->d_hw_id >= CHIP_ID_YUKON_UL_2) { in yge_attach()
1239 dev->d_hw_id, dev->d_hw_rev); in yge_attach()
1248 dev->d_pmd = CSR_READ_1(dev, B2_PMD_TYP); in yge_attach()
1249 if (dev->d_pmd == 'L' || dev->d_pmd == 'S' || dev->d_pmd == 'P') in yge_attach()
1250 dev->d_coppertype = 0; in yge_attach()
1252 dev->d_coppertype = 1; in yge_attach()
1254 dev->d_num_port = 1; in yge_attach()
1258 dev->d_num_port++; in yge_attach()
1263 dev->d_bustype = PEX_BUS; in yge_attach()
1265 dev->d_bustype = PCIX_BUS; in yge_attach()
1267 dev->d_bustype = PCI_BUS; in yge_attach()
1270 switch (dev->d_hw_id) { in yge_attach()
1272 dev->d_clock = 125; /* 125 Mhz */ in yge_attach()
1275 dev->d_clock = 125; /* 125 Mhz */ in yge_attach()
1278 dev->d_clock = 125; /* 125 Mhz */ in yge_attach()
1281 dev->d_clock = 125; /* 125 Mhz */ in yge_attach()
1284 dev->d_clock = 125; /* 125 Mhz */ in yge_attach()
1287 dev->d_clock = 100; /* 100 Mhz */ in yge_attach()
1290 dev->d_clock = 50; /* 50 Mhz */ in yge_attach()
1293 dev->d_clock = 156; /* 156 Mhz */ in yge_attach()
1296 dev->d_clock = 156; /* 156 Mhz */ in yge_attach()
1300 dev->d_process_limit = YGE_RX_RING_CNT/2; in yge_attach()
1302 rv = yge_alloc_ring(NULL, dev, &dev->d_status_ring, YGE_STAT_RING_CNT); in yge_attach()
1307 dev->d_task_q = ddi_taskq_create(dip, "tq", 1, TASKQ_DEFAULTPRI, 0); in yge_attach()
1308 if (dev->d_task_q == NULL) { in yge_attach()
1314 cv_init(&dev->d_task_cv, NULL, CV_DRIVER, NULL); in yge_attach()
1322 dev->d_intrmask = Y2_IS_HW_ERR | Y2_IS_STAT_BMU; in yge_attach()
1323 dev->d_intrhwemask = Y2_IS_TIST_OV | Y2_IS_MST_ERR | in yge_attach()
1332 for (int i = 0; i < dev->d_num_port; i++) { in yge_attach()
1333 yge_port_t *port = dev->d_port[i]; in yge_attach()
1342 dev->d_periodic = ddi_periodic_add(yge_tick, dev, 1000000000, 0); in yge_attach()
1344 for (int i = 0; i < dev->d_num_port; i++) { in yge_attach()
1345 yge_port_t *port = dev->d_port[i]; in yge_attach()
1356 if (ddi_taskq_dispatch(dev->d_task_q, yge_task, dev, DDI_SLEEP) != in yge_attach()
1374 if (mac_register(port->p_mreg, &port->p_mh) != DDI_SUCCESS) { in yge_register_port()
1389 ASSERT(!port->p_running); in yge_uninit_port()
1391 if (port->p_mreg) in yge_uninit_port()
1392 mac_free(port->p_mreg); in yge_uninit_port()
1394 if (port->p_mii) in yge_uninit_port()
1395 mii_free(port->p_mii); in yge_uninit_port()
1399 if (port->p_tx_buf) in yge_uninit_port()
1400 kmem_free(port->p_tx_buf, in yge_uninit_port()
1402 if (port->p_rx_buf) in yge_uninit_port()
1403 kmem_free(port->p_rx_buf, in yge_uninit_port()
1413 if (dev->d_periodic) in yge_detach()
1414 ddi_periodic_delete(dev->d_periodic); in yge_detach()
1416 for (int i = 0; i < dev->d_num_port; i++) { in yge_detach()
1417 yge_uninit_port(dev->d_port[i]); in yge_detach()
1434 yge_free_ring(&dev->d_status_ring); in yge_detach()
1436 if (dev->d_task_q != NULL) { in yge_detach()
1438 ddi_taskq_destroy(dev->d_task_q); in yge_detach()
1439 dev->d_task_q = NULL; in yge_detach()
1442 cv_destroy(&dev->d_task_cv); in yge_detach()
1446 if (dev->d_intrh != NULL) { in yge_detach()
1447 for (int i = 0; i < dev->d_intrcnt; i++) { in yge_detach()
1448 (void) ddi_intr_remove_handler(dev->d_intrh[i]); in yge_detach()
1449 (void) ddi_intr_free(dev->d_intrh[i]); in yge_detach()
1451 kmem_free(dev->d_intrh, dev->d_intrsize); in yge_detach()
1452 mutex_destroy(&dev->d_phylock); in yge_detach()
1453 mutex_destroy(&dev->d_txlock); in yge_detach()
1454 mutex_destroy(&dev->d_rxlock); in yge_detach()
1455 mutex_destroy(&dev->d_task_mtx); in yge_detach()
1457 if (dev->d_regsh != NULL) in yge_detach()
1458 ddi_regs_map_free(&dev->d_regsh); in yge_detach()
1460 if (dev->d_pcih != NULL) in yge_detach()
1461 pci_config_teardown(&dev->d_pcih); in yge_detach()
1475 dev = port->p_dev; in yge_alloc_ring()
1476 dip = dev->d_dip; in yge_alloc_ring()
1478 ring->r_num = num; in yge_alloc_ring()
1481 NULL, &ring->r_dmah); in yge_alloc_ring()
1483 yge_error(dev, port, "Unable to allocate ring DMA handle"); in yge_alloc_ring()
1487 rv = ddi_dma_mem_alloc(ring->r_dmah, num * sizeof (yge_desc_t), in yge_alloc_ring()
1489 &kaddr, &len, &ring->r_acch); in yge_alloc_ring()
1491 yge_error(dev, port, "Unable to allocate ring DMA memory"); in yge_alloc_ring()
1494 ring->r_size = len; in yge_alloc_ring()
1495 ring->r_kaddr = (void *)kaddr; in yge_alloc_ring()
1499 rv = ddi_dma_addr_bind_handle(ring->r_dmah, NULL, kaddr, in yge_alloc_ring()
1503 yge_error(dev, port, "Unable to bind ring DMA handle"); in yge_alloc_ring()
1507 ring->r_paddr = dmac.dmac_address; in yge_alloc_ring()
1515 if (ring->r_paddr) in yge_free_ring()
1516 (void) ddi_dma_unbind_handle(ring->r_dmah); in yge_free_ring()
1517 ring->r_paddr = 0; in yge_free_ring()
1518 if (ring->r_acch) in yge_free_ring()
1519 ddi_dma_mem_free(&ring->r_acch); in yge_free_ring()
1520 ring->r_kaddr = NULL; in yge_free_ring()
1521 ring->r_acch = NULL; in yge_free_ring()
1522 if (ring->r_dmah) in yge_free_ring()
1523 ddi_dma_free_handle(&ring->r_dmah); in yge_free_ring()
1524 ring->r_dmah = NULL; in yge_free_ring()
1530 yge_dev_t *dev = port->p_dev; in yge_alloc_buf()
1540 rv = ddi_dma_alloc_handle(dev->d_dip, &yge_buf_dma_attr, in yge_alloc_buf()
1541 DDI_DMA_DONTWAIT, NULL, &b->b_dmah); in yge_alloc_buf()
1543 yge_error(NULL, port, "Unable to alloc DMA handle for buffer"); in yge_alloc_buf()
1547 rv = ddi_dma_mem_alloc(b->b_dmah, bufsz, &yge_buf_attr, in yge_alloc_buf()
1548 sflag, DDI_DMA_DONTWAIT, NULL, &b->b_buf, &l, &b->b_acch); in yge_alloc_buf()
1550 yge_error(NULL, port, "Unable to alloc DMA memory for buffer"); in yge_alloc_buf()
1554 rv = ddi_dma_addr_bind_handle(b->b_dmah, NULL, b->b_buf, l, flag, in yge_alloc_buf()
1557 yge_error(NULL, port, "Unable to bind DMA handle for buffer"); in yge_alloc_buf()
1561 b->b_paddr = dmac.dmac_address; in yge_alloc_buf()
1568 if (b->b_paddr) in yge_free_buf()
1569 (void) ddi_dma_unbind_handle(b->b_dmah); in yge_free_buf()
1570 b->b_paddr = 0; in yge_free_buf()
1571 if (b->b_acch) in yge_free_buf()
1572 ddi_dma_mem_free(&b->b_acch); in yge_free_buf()
1573 b->b_buf = NULL; in yge_free_buf()
1574 b->b_acch = NULL; in yge_free_buf()
1575 if (b->b_dmah) in yge_free_buf()
1576 ddi_dma_free_handle(&b->b_dmah); in yge_free_buf()
1577 b->b_dmah = NULL; in yge_free_buf()
1589 * It seems that Yukon II supports full 64 bit DMA operations. in yge_txrx_dma_alloc()
1596 * and re-examine it. in yge_txrx_dma_alloc()
1600 bufsz = port->p_mtu; in yge_txrx_dma_alloc()
1603 rv = yge_alloc_ring(port, NULL, &port->p_tx_ring, YGE_TX_RING_CNT); in yge_txrx_dma_alloc()
1609 b = port->p_tx_buf; in yge_txrx_dma_alloc()
1620 rv = yge_alloc_ring(port, NULL, &port->p_rx_ring, YGE_RX_RING_CNT); in yge_txrx_dma_alloc()
1626 b = port->p_rx_buf; in yge_txrx_dma_alloc()
1645 yge_free_ring(&port->p_tx_ring); in yge_txrx_dma_free()
1648 yge_free_ring(&port->p_rx_ring); in yge_txrx_dma_free()
1651 b = port->p_tx_buf; in yge_txrx_dma_free()
1656 b = port->p_rx_buf; in yge_txrx_dma_free()
1665 yge_ring_t *ring = &port->p_tx_ring; in yge_send()
1675 if (len > port->p_framesize) { in yge_send()
1682 if (port->p_tx_cnt + 1 >= in yge_send()
1683 (YGE_TX_RING_CNT - YGE_RESERVED_TX_DESC_CNT)) { in yge_send()
1684 port->p_wantw = B_TRUE; in yge_send()
1688 prod = port->p_tx_prod; in yge_send()
1690 txb = &port->p_tx_buf[prod]; in yge_send()
1691 mcopymsg(mp, txb->b_buf); in yge_send()
1694 PUTADDR(ring, prod, txb->b_paddr); in yge_send()
1697 port->p_tx_cnt++; in yge_send()
1702 port->p_tx_prod = prod; in yge_send()
1710 for (int i = 0; i < dev->d_num_port; i++) { in yge_suspend()
1711 yge_port_t *port = dev->d_port[i]; in yge_suspend()
1712 mii_suspend(port->p_mii); in yge_suspend()
1718 for (int i = 0; i < dev->d_num_port; i++) { in yge_suspend()
1719 yge_port_t *port = dev->d_port[i]; in yge_suspend()
1721 if (port->p_running) { in yge_suspend()
1736 dev->d_suspended = B_TRUE; in yge_suspend()
1755 pmcsr = pci_config_get16(dev->d_pcih, pm_cap + PCI_PMCSR); in yge_resume()
1757 pci_config_put16(dev->d_pcih, pm_cap + PCI_PMCSR, in yge_resume()
1762 pci_config_put16(dev->d_pcih, PCI_CONF_COMM, in yge_resume()
1763 pci_config_get16(dev->d_pcih, PCI_CONF_COMM) | in yge_resume()
1767 switch (dev->d_hw_id) { in yge_resume()
1771 pci_config_put32(dev->d_pcih, PCI_OUR_REG_3, 0); in yge_resume()
1786 for (int i = 0; i < dev->d_num_port; i++) { in yge_resume()
1787 yge_port_t *port = dev->d_port[i]; in yge_resume()
1789 if (port != NULL && port->p_running) { in yge_resume()
1793 dev->d_suspended = B_FALSE; in yge_resume()
1798 for (int i = 0; i < dev->d_num_port; i++) { in yge_resume()
1799 yge_port_t *port = dev->d_port[i]; in yge_resume()
1801 if (port->p_running) { in yge_resume()
1802 mii_resume(port->p_mii); in yge_resume()
1803 mac_tx_update(port->p_mh); in yge_resume()
1813 yge_dev_t *dev = port->p_dev; in yge_rxeof()
1819 ASSERT(mutex_owned(&dev->d_rxlock)); in yge_rxeof()
1821 if (!port->p_running) in yge_rxeof()
1824 ring = &port->p_rx_ring; in yge_rxeof()
1825 cons = port->p_rx_cons; in yge_rxeof()
1827 rxb = &port->p_rx_buf[cons]; in yge_rxeof()
1831 if ((dev->d_hw_id == CHIP_ID_YUKON_FE_P) && in yge_rxeof()
1832 (dev->d_hw_rev == CHIP_REV_YU_FE2_A0)) { in yge_rxeof()
1838 if ((len > port->p_framesize) || (rxlen != len)) { in yge_rxeof()
1842 if ((len > port->p_framesize) || (rxlen != len) || in yge_rxeof()
1851 /* good packet - yay */ in yge_rxeof()
1852 mp->b_rptr += YGE_HEADROOM; in yge_rxeof()
1854 bcopy(rxb->b_buf, mp->b_rptr, len); in yge_rxeof()
1855 mp->b_wptr = mp->b_rptr + len; in yge_rxeof()
1857 port->p_stats.rx_nobuf++; in yge_rxeof()
1862 PUTCTRL(ring, cons, port->p_framesize | OP_PACKET | HW_OWNER); in yge_rxeof()
1866 Y2_PREF_Q_ADDR(port->p_rxq, PREF_UNIT_PUT_IDX_REG), in yge_rxeof()
1869 YGE_INC(port->p_rx_cons, YGE_RX_RING_CNT); in yge_rxeof()
1881 if (!port->p_running) { in yge_txeof_locked()
1885 cons = port->p_tx_cons; in yge_txeof_locked()
1888 if (port->p_tx_cnt <= 0) in yge_txeof_locked()
1891 port->p_tx_cnt--; in yge_txeof_locked()
1895 port->p_tx_cons = cons; in yge_txeof_locked()
1898 resched = port->p_wantw; in yge_txeof_locked()
1899 port->p_tx_wdog = 0; in yge_txeof_locked()
1900 port->p_wantw = B_FALSE; in yge_txeof_locked()
1912 TX_LOCK(port->p_dev); in yge_txeof()
1916 TX_UNLOCK(port->p_dev); in yge_txeof()
1918 if (resched && port->p_running) { in yge_txeof()
1919 mac_tx_update(port->p_mh); in yge_txeof()
1931 for (int i = 0; i < dev->d_num_port; i++) { in yge_restart_task()
1932 port = dev->d_port[i]; in yge_restart_task()
1933 if (port->p_running) in yge_restart_task()
1934 yge_stop_port(dev->d_port[i]); in yge_restart_task()
1937 for (int i = 0; i < dev->d_num_port; i++) { in yge_restart_task()
1938 port = dev->d_port[i]; in yge_restart_task()
1940 if (port->p_running) in yge_restart_task()
1946 for (int i = 0; i < dev->d_num_port; i++) { in yge_restart_task()
1947 port = dev->d_port[i]; in yge_restart_task()
1949 mii_reset(port->p_mii); in yge_restart_task()
1950 if (port->p_running) in yge_restart_task()
1951 mac_tx_update(port->p_mh); in yge_restart_task()
1966 if (dev->d_suspended) { in yge_tick()
1971 for (int i = 0; i < dev->d_num_port; i++) { in yge_tick()
1972 port = dev->d_port[i]; in yge_tick()
1974 if (!port->p_running) in yge_tick()
1977 if (port->p_tx_cnt) { in yge_tick()
1984 ridx = port->p_port == YGE_PORT_A ? in yge_tick()
1987 if (port->p_tx_cons != idx) { in yge_tick()
1993 port->p_tx_wdog++; in yge_tick()
1994 if (port->p_tx_wdog > YGE_TX_TIMEOUT) { in yge_tick()
1995 port->p_tx_wdog = 0; in yge_tick()
2009 for (int i = 0; i < dev->d_num_port; i++) { in yge_tick()
2010 port = dev->d_port[i]; in yge_tick()
2012 if (port->p_running) in yge_tick()
2013 mac_tx_update(port->p_mh); in yge_tick()
2022 yge_dev_t *dev = port->p_dev; in yge_intr_gmac()
2023 int pnum = port->p_port; in yge_intr_gmac()
2055 yge_dev_t *dev = port->p_dev; in yge_handle_hwerr()
2060 CSR_WRITE_2(dev, SELECT_RAM_BUFFER(port->p_port, B3_RI_CTRL), in yge_handle_hwerr()
2066 CSR_WRITE_2(dev, SELECT_RAM_BUFFER(port->p_port, B3_RI_CTRL), in yge_handle_hwerr()
2072 CSR_WRITE_4(dev, MR_ADDR(port->p_port, TX_GMF_CTRL_T), in yge_handle_hwerr()
2078 CSR_WRITE_4(dev, Q_ADDR(port->p_rxq, Q_CSR), BMU_CLR_IRQ_PAR); in yge_handle_hwerr()
2083 CSR_WRITE_4(dev, Q_ADDR(port->p_txq, Q_CSR), BMU_CLR_IRQ_TCP); in yge_handle_hwerr()
2116 v16 = pci_config_get16(dev->d_pcih, PCI_CONF_STAT); in yge_intr_hwerr()
2118 pci_config_put16(dev->d_pcih, PCI_CONF_STAT, v16 | in yge_intr_hwerr()
2151 dev->d_intrhwemask &= ~Y2_IS_PCI_EXP; in yge_intr_hwerr()
2153 dev->d_intrhwemask); in yge_intr_hwerr()
2163 if ((status & Y2_HWE_L1_MASK) != 0 && dev->d_port[YGE_PORT_A] != NULL) in yge_intr_hwerr()
2164 yge_handle_hwerr(dev->d_port[YGE_PORT_A], status); in yge_intr_hwerr()
2165 if ((status & Y2_HWE_L2_MASK) != 0 && dev->d_port[YGE_PORT_B] != NULL) in yge_intr_hwerr()
2166 yge_handle_hwerr(dev->d_port[YGE_PORT_B], status >> 8); in yge_intr_hwerr()
2185 if (idx == dev->d_stat_cons) { in yge_handle_events()
2189 ring = &dev->d_status_ring; in yge_handle_events()
2191 for (cons = dev->d_stat_cons; cons != idx; ) { in yge_handle_events()
2206 port = dev->d_port[pnum]; in yge_handle_events()
2220 tails[pnum]->b_next = mp; in yge_handle_events()
2245 if (rxprogs[pnum] > dev->d_process_limit) { in yge_handle_events()
2250 dev->d_stat_cons = cons; in yge_handle_events()
2251 if (dev->d_stat_cons != CSR_READ_2(dev, STAT_PUT_IDX)) in yge_handle_events()
2273 txindex[0] = txindex[1] = -1; in yge_intr()
2276 port1 = dev->d_port[YGE_PORT_A]; in yge_intr()
2277 port2 = dev->d_port[YGE_PORT_B]; in yge_intr()
2281 if (dev->d_suspended) { in yge_intr()
2289 (status & dev->d_intrmask) == 0) { /* Stray interrupt ? */ in yge_intr()
2310 dev->d_intrmask &= ~(Y2_IS_CHK_RX1 | Y2_IS_CHK_RX2); in yge_intr()
2311 CSR_WRITE_4(dev, B0_IMSK, dev->d_intrmask); in yge_intr()
2317 dev->d_intrmask &= ~(Y2_IS_CHK_TXA1 | Y2_IS_CHK_TXA2); in yge_intr()
2318 CSR_WRITE_4(dev, B0_IMSK, dev->d_intrmask); in yge_intr()
2340 if (port1->p_running) { in yge_intr()
2345 mac_rx(port1->p_mh, NULL, heads[0]); in yge_intr()
2350 heads[0] = mp->b_next; in yge_intr()
2356 if (port2->p_running) { in yge_intr()
2361 mac_rx(port2->p_mh, NULL, heads[1]); in yge_intr()
2366 heads[1] = mp->b_next; in yge_intr()
2378 yge_dev_t *dev = port->p_dev; in yge_set_tx_stfwd()
2379 int pnum = port->p_port; in yge_set_tx_stfwd()
2381 switch (dev->d_hw_id) { in yge_set_tx_stfwd()
2383 if (dev->d_hw_rev == CHIP_REV_YU_EX_A0) in yge_set_tx_stfwd()
2386 if (port->p_mtu > ETHERMTU) in yge_set_tx_stfwd()
2395 if (port->p_mtu > ETHERMTU) { in yge_set_tx_stfwd()
2414 yge_dev_t *dev = port->p_dev; in yge_start_port()
2421 pnum = port->p_port; in yge_start_port()
2422 txq = port->p_txq; in yge_start_port()
2423 rxq = port->p_rxq; in yge_start_port()
2425 if (port->p_mtu < ETHERMTU) in yge_start_port()
2426 port->p_framesize = ETHERMTU; in yge_start_port()
2428 port->p_framesize = port->p_mtu; in yge_start_port()
2429 port->p_framesize += sizeof (struct ether_vlan_header); in yge_start_port()
2441 if (dev->d_hw_id == CHIP_ID_YUKON_EX) in yge_start_port()
2446 * Initialize GMAC first such that speed/duplex/flow-control in yge_start_port()
2474 if (port->p_mtu > ETHERMTU) in yge_start_port()
2487 if (dev->d_hw_id == CHIP_ID_YUKON_FE_P || in yge_start_port()
2488 dev->d_hw_id == CHIP_ID_YUKON_EX) in yge_start_port()
2504 if ((dev->d_hw_id == CHIP_ID_YUKON_FE_P) && in yge_start_port()
2505 (dev->d_hw_rev == CHIP_REV_YU_FE2_A0)) in yge_start_port()
2519 if ((port->p_flags & PORT_FLAG_RAMBUF) == 0) { in yge_start_port()
2521 if ((dev->d_hw_id == CHIP_ID_YUKON_FE_P) && in yge_start_port()
2522 (dev->d_hw_rev == CHIP_REV_YU_FE2_A0)) { in yge_start_port()
2533 /* Configure store-and-forward for TX */ in yge_start_port()
2537 if ((dev->d_hw_id == CHIP_ID_YUKON_FE_P) && in yge_start_port()
2538 (dev->d_hw_rev == CHIP_REV_YU_FE2_A0)) { in yge_start_port()
2558 CSR_WRITE_1(dev, RB_ADDR(port->p_txsq, RB_CTRL), RB_RST_SET); in yge_start_port()
2566 switch (dev->d_hw_id) { in yge_start_port()
2568 if (dev->d_hw_rev == CHIP_REV_YU_EC_U_A0) { in yge_start_port()
2569 /* Fix for Yukon-EC Ultra: set BMU FIFO level */ in yge_start_port()
2578 if (dev->d_hw_rev == CHIP_REV_YU_EX_B0) in yge_start_port()
2587 if (dev->d_bustype == PEX_BUS) { in yge_start_port()
2592 if (dev->d_hw_id == CHIP_ID_YUKON_EC_U && in yge_start_port()
2593 dev->d_hw_rev >= CHIP_REV_YU_EC_U_A1) { in yge_start_port()
2607 if (port == dev->d_port[YGE_PORT_A]) { in yge_start_port()
2608 dev->d_intrmask |= Y2_IS_PORT_A; in yge_start_port()
2609 dev->d_intrhwemask |= Y2_HWE_L1_MASK; in yge_start_port()
2610 } else if (port == dev->d_port[YGE_PORT_B]) { in yge_start_port()
2611 dev->d_intrmask |= Y2_IS_PORT_B; in yge_start_port()
2612 dev->d_intrhwemask |= Y2_HWE_L2_MASK; in yge_start_port()
2614 CSR_WRITE_4(dev, B0_HWE_IMSK, dev->d_intrhwemask); in yge_start_port()
2616 CSR_WRITE_4(dev, B0_IMSK, dev->d_intrmask); in yge_start_port()
2622 GMAC_WRITE_2(port->p_dev, port->p_port, GM_GP_CTRL, gmac); in yge_start_port()
2627 port->p_tx_wdog = 0; in yge_start_port()
2639 dev = port->p_dev; in yge_set_rambuffer()
2640 pnum = port->p_port; in yge_set_rambuffer()
2641 rxq = port->p_rxq; in yge_set_rambuffer()
2642 txq = port->p_txq; in yge_set_rambuffer()
2644 if ((port->p_flags & PORT_FLAG_RAMBUF) == 0) in yge_set_rambuffer()
2649 CSR_WRITE_4(dev, RB_ADDR(rxq, RB_START), dev->d_rxqstart[pnum] / 8); in yge_set_rambuffer()
2650 CSR_WRITE_4(dev, RB_ADDR(rxq, RB_END), dev->d_rxqend[pnum] / 8); in yge_set_rambuffer()
2651 CSR_WRITE_4(dev, RB_ADDR(rxq, RB_WP), dev->d_rxqstart[pnum] / 8); in yge_set_rambuffer()
2652 CSR_WRITE_4(dev, RB_ADDR(rxq, RB_RP), dev->d_rxqstart[pnum] / 8); in yge_set_rambuffer()
2655 (dev->d_rxqend[pnum] + 1 - dev->d_rxqstart[pnum] - RB_ULPP) / 8; in yge_set_rambuffer()
2657 (dev->d_rxqend[pnum] + 1 - dev->d_rxqstart[pnum] - RB_LLPP_B) / 8; in yge_set_rambuffer()
2659 if (dev->d_rxqsize < MSK_MIN_RXQ_SIZE) in yge_set_rambuffer()
2660 ltpp += (RB_LLPP_B - RB_LLPP_S) / 8; in yge_set_rambuffer()
2671 CSR_WRITE_4(dev, RB_ADDR(txq, RB_START), dev->d_txqstart[pnum] / 8); in yge_set_rambuffer()
2672 CSR_WRITE_4(dev, RB_ADDR(txq, RB_END), dev->d_txqend[pnum] / 8); in yge_set_rambuffer()
2673 CSR_WRITE_4(dev, RB_ADDR(txq, RB_WP), dev->d_txqstart[pnum] / 8); in yge_set_rambuffer()
2674 CSR_WRITE_4(dev, RB_ADDR(txq, RB_RP), dev->d_txqstart[pnum] / 8); in yge_set_rambuffer()
2684 /* Reset the prefetch unit. */ in yge_set_prefetch()
2691 YGE_ADDR_LO(ring->r_paddr)); in yge_set_prefetch()
2693 YGE_ADDR_HI(ring->r_paddr)); in yge_set_prefetch()
2696 ring->r_num - 1); in yge_set_prefetch()
2697 /* Turn on prefetch unit. */ in yge_set_prefetch()
2707 yge_dev_t *dev = port->p_dev; in yge_stop_port()
2708 int pnum = port->p_port; in yge_stop_port()
2709 uint32_t txq = port->p_txq; in yge_stop_port()
2710 uint32_t rxq = port->p_rxq; in yge_stop_port()
2714 dev = port->p_dev; in yge_stop_port()
2719 port->p_tx_wdog = 0; in yge_stop_port()
2723 dev->d_intrmask &= ~Y2_IS_PORT_A; in yge_stop_port()
2724 dev->d_intrhwemask &= ~Y2_HWE_L1_MASK; in yge_stop_port()
2726 dev->d_intrmask &= ~Y2_IS_PORT_B; in yge_stop_port()
2727 dev->d_intrhwemask &= ~Y2_HWE_L2_MASK; in yge_stop_port()
2729 CSR_WRITE_4(dev, B0_HWE_IMSK, dev->d_intrhwemask); in yge_stop_port()
2731 CSR_WRITE_4(dev, B0_IMSK, dev->d_intrmask); in yge_stop_port()
2770 /* Reset the Tx prefetch units. */ in yge_stop_port()
2783 * The Rx Stop command will not work for Yukon-2 if the BMU does not in yge_stop_port()
2785 * incoming data, we must reset the BMU while it is not during a DMA in yge_stop_port()
2788 * data will not trigger a DMA. After the RAM buffer is stopped, the in yge_stop_port()
2789 * BMU is polled until any DMA in progress is ended and only then it in yge_stop_port()
2806 /* Reset the Rx prefetch unit. */ in yge_stop_port()
2834 pnum = port->p_port; in yge_stats_clear()
2835 dev = port->p_dev; in yge_stats_clear()
2856 dev = port->p_dev; in yge_stats_update()
2857 pnum = port->p_port; in yge_stats_update()
2859 if (dev->d_suspended || !port->p_running) { in yge_stats_update()
2862 stats = &port->p_stats; in yge_stats_update()
2868 stats->rx_ucast_frames += YGE_READ_MIB32(pnum, GM_RXF_UC_OK); in yge_stats_update()
2869 stats->rx_bcast_frames += YGE_READ_MIB32(pnum, GM_RXF_BC_OK); in yge_stats_update()
2870 stats->rx_pause_frames += YGE_READ_MIB32(pnum, GM_RXF_MPAUSE); in yge_stats_update()
2871 stats->rx_mcast_frames += YGE_READ_MIB32(pnum, GM_RXF_MC_OK); in yge_stats_update()
2872 stats->rx_crc_errs += YGE_READ_MIB32(pnum, GM_RXF_FCS_ERR); in yge_stats_update()
2874 stats->rx_good_octets += YGE_READ_MIB64(pnum, GM_RXO_OK_LO); in yge_stats_update()
2875 stats->rx_bad_octets += YGE_READ_MIB64(pnum, GM_RXO_ERR_LO); in yge_stats_update()
2876 stats->rx_runts += YGE_READ_MIB32(pnum, GM_RXF_SHT); in yge_stats_update()
2877 stats->rx_runt_errs += YGE_READ_MIB32(pnum, GM_RXE_FRAG); in yge_stats_update()
2878 stats->rx_pkts_64 += YGE_READ_MIB32(pnum, GM_RXF_64B); in yge_stats_update()
2879 stats->rx_pkts_65_127 += YGE_READ_MIB32(pnum, GM_RXF_127B); in yge_stats_update()
2880 stats->rx_pkts_128_255 += YGE_READ_MIB32(pnum, GM_RXF_255B); in yge_stats_update()
2881 stats->rx_pkts_256_511 += YGE_READ_MIB32(pnum, GM_RXF_511B); in yge_stats_update()
2882 stats->rx_pkts_512_1023 += YGE_READ_MIB32(pnum, GM_RXF_1023B); in yge_stats_update()
2883 stats->rx_pkts_1024_1518 += YGE_READ_MIB32(pnum, GM_RXF_1518B); in yge_stats_update()
2884 stats->rx_pkts_1519_max += YGE_READ_MIB32(pnum, GM_RXF_MAX_SZ); in yge_stats_update()
2885 stats->rx_pkts_too_long += YGE_READ_MIB32(pnum, GM_RXF_LNG_ERR); in yge_stats_update()
2886 stats->rx_pkts_jabbers += YGE_READ_MIB32(pnum, GM_RXF_JAB_PKT); in yge_stats_update()
2888 stats->rx_fifo_oflows += YGE_READ_MIB32(pnum, GM_RXE_FIFO_OV); in yge_stats_update()
2892 stats->tx_ucast_frames += YGE_READ_MIB32(pnum, GM_TXF_UC_OK); in yge_stats_update()
2893 stats->tx_bcast_frames += YGE_READ_MIB32(pnum, GM_TXF_BC_OK); in yge_stats_update()
2894 stats->tx_pause_frames += YGE_READ_MIB32(pnum, GM_TXF_MPAUSE); in yge_stats_update()
2895 stats->tx_mcast_frames += YGE_READ_MIB32(pnum, GM_TXF_MC_OK); in yge_stats_update()
2896 stats->tx_octets += YGE_READ_MIB64(pnum, GM_TXO_OK_LO); in yge_stats_update()
2897 stats->tx_pkts_64 += YGE_READ_MIB32(pnum, GM_TXF_64B); in yge_stats_update()
2898 stats->tx_pkts_65_127 += YGE_READ_MIB32(pnum, GM_TXF_127B); in yge_stats_update()
2899 stats->tx_pkts_128_255 += YGE_READ_MIB32(pnum, GM_TXF_255B); in yge_stats_update()
2900 stats->tx_pkts_256_511 += YGE_READ_MIB32(pnum, GM_TXF_511B); in yge_stats_update()
2901 stats->tx_pkts_512_1023 += YGE_READ_MIB32(pnum, GM_TXF_1023B); in yge_stats_update()
2902 stats->tx_pkts_1024_1518 += YGE_READ_MIB32(pnum, GM_TXF_1518B); in yge_stats_update()
2903 stats->tx_pkts_1519_max += YGE_READ_MIB32(pnum, GM_TXF_MAX_SZ); in yge_stats_update()
2905 stats->tx_colls += YGE_READ_MIB32(pnum, GM_TXF_COL); in yge_stats_update()
2906 stats->tx_late_colls += YGE_READ_MIB32(pnum, GM_TXF_LAT_COL); in yge_stats_update()
2907 stats->tx_excess_colls += YGE_READ_MIB32(pnum, GM_TXF_ABO_COL); in yge_stats_update()
2908 stats->tx_multi_colls += YGE_READ_MIB32(pnum, GM_TXF_MUL_COL); in yge_stats_update()
2909 stats->tx_single_colls += YGE_READ_MIB32(pnum, GM_TXF_SNG_COL); in yge_stats_update()
2910 stats->tx_underflows += YGE_READ_MIB32(pnum, GM_TXE_FIFO_UR); in yge_stats_update()
2944 struct yge_hw_stats *stats = &port->p_stats; in yge_m_stat()
2952 DEV_LOCK(port->p_dev); in yge_m_stat()
2954 DEV_UNLOCK(port->p_dev); in yge_m_stat()
2957 if (mii_m_getstat(port->p_mii, stat, val) == 0) { in yge_m_stat()
2963 *val = stats->rx_mcast_frames; in yge_m_stat()
2967 *val = stats->rx_bcast_frames; in yge_m_stat()
2971 *val = stats->tx_mcast_frames; in yge_m_stat()
2975 *val = stats->tx_bcast_frames; in yge_m_stat()
2979 *val = stats->rx_ucast_frames; in yge_m_stat()
2983 *val = stats->rx_good_octets; in yge_m_stat()
2987 *val = stats->tx_ucast_frames; in yge_m_stat()
2991 *val = stats->tx_octets; in yge_m_stat()
2995 *val = stats->rx_nobuf; in yge_m_stat()
2999 *val = stats->tx_colls; in yge_m_stat()
3003 *val = stats->rx_runt_errs; in yge_m_stat()
3007 *val = stats->rx_crc_errs; in yge_m_stat()
3011 *val = stats->tx_single_colls; in yge_m_stat()
3015 *val = stats->tx_multi_colls; in yge_m_stat()
3019 *val = stats->tx_late_colls; in yge_m_stat()
3023 *val = stats->tx_excess_colls; in yge_m_stat()
3027 *val = stats->rx_pkts_too_long; in yge_m_stat()
3031 *val = stats->rx_fifo_oflows; in yge_m_stat()
3035 *val = stats->tx_underflows; in yge_m_stat()
3039 *val = stats->rx_runts; in yge_m_stat()
3043 *val = stats->rx_pkts_jabbers; in yge_m_stat()
3057 DEV_LOCK(port->p_dev); in yge_m_start()
3061 * don't want to waste DMA resources that might better be used in yge_m_start()
3070 DEV_UNLOCK(port->p_dev); in yge_m_start()
3074 if (!port->p_dev->d_suspended) in yge_m_start()
3076 port->p_running = B_TRUE; in yge_m_start()
3077 DEV_UNLOCK(port->p_dev); in yge_m_start()
3079 mii_start(port->p_mii); in yge_m_start()
3088 yge_dev_t *dev = port->p_dev; in yge_m_stop()
3091 if (!dev->d_suspended) in yge_m_stop()
3094 port->p_running = B_FALSE; in yge_m_stop()
3106 DEV_LOCK(port->p_dev); in yge_m_promisc()
3109 port->p_promisc = on; in yge_m_promisc()
3112 DEV_UNLOCK(port->p_dev); in yge_m_promisc()
3127 DEV_LOCK(port->p_dev); in yge_m_multicst()
3129 if (port->p_mccount[bit] == 0) { in yge_m_multicst()
3131 port->p_mchash[bit / 32] |= (1 << (bit % 32)); in yge_m_multicst()
3134 port->p_mccount[bit]++; in yge_m_multicst()
3136 ASSERT(port->p_mccount[bit] > 0); in yge_m_multicst()
3137 port->p_mccount[bit]--; in yge_m_multicst()
3138 if (port->p_mccount[bit] == 0) { in yge_m_multicst()
3139 port->p_mchash[bit / 32] &= ~(1 << (bit % 32)); in yge_m_multicst()
3147 DEV_UNLOCK(port->p_dev); in yge_m_multicst()
3156 DEV_LOCK(port->p_dev); in yge_m_unicst()
3158 bcopy(macaddr, port->p_curraddr, ETHERADDRL); in yge_m_unicst()
3161 DEV_UNLOCK(port->p_dev); in yge_m_unicst()
3176 TX_LOCK(port->p_dev); in yge_m_tx()
3178 if (port->p_dev->d_suspended) { in yge_m_tx()
3180 TX_UNLOCK(port->p_dev); in yge_m_tx()
3184 mp = mp->b_next; in yge_m_tx()
3191 ridx = port->p_port == YGE_PORT_A ? in yge_m_tx()
3193 idx = CSR_READ_2(port->p_dev, ridx); in yge_m_tx()
3194 if (port->p_tx_cons != idx) in yge_m_tx()
3198 nmp = mp->b_next; in yge_m_tx()
3199 mp->b_next = NULL; in yge_m_tx()
3202 mp->b_next = nmp; in yge_m_tx()
3211 CSR_WRITE_2(port->p_dev, in yge_m_tx()
3212 Y2_PREF_Q_ADDR(port->p_txq, PREF_UNIT_PUT_IDX_REG), in yge_m_tx()
3213 port->p_tx_prod); in yge_m_tx()
3216 TX_UNLOCK(port->p_dev); in yge_m_tx()
3219 mac_tx_update(port->p_mh); in yge_m_tx()
3236 if (mii_m_loop_ioctl(port->p_mii, wq, mp)) in yge_m_ioctl()
3253 err = mii_m_setprop(port->p_mii, pr_name, pr_num, pr_valsize, pr_val); in yge_m_setprop()
3258 DEV_LOCK(port->p_dev); in yge_m_setprop()
3267 if (new_mtu == port->p_mtu) { in yge_m_setprop()
3278 if (new_mtu > (port->p_flags & PORT_FLAG_NOJUMBO ? in yge_m_setprop()
3285 if (port->p_running) { in yge_m_setprop()
3319 err = mac_maxsdu_update(port->p_mh, new_mtu); in yge_m_setprop()
3325 port->p_mtu = new_mtu; in yge_m_setprop()
3335 DEV_UNLOCK(port->p_dev); in yge_m_setprop()
3346 return (mii_m_getprop(port->p_mii, pr_name, pr_num, pr_valsize, in yge_m_getprop()
3359 port->p_flags & PORT_FLAG_NOJUMBO ? in yge_m_propinfo()
3363 mii_m_propinfo(port->p_mii, pr_name, pr_num, prh); in yge_m_propinfo()
3372 dev->d_task_flags |= flag; in yge_dispatch()
3386 while ((flags = dev->d_task_flags) == 0) in yge_task()
3389 dev->d_task_flags = 0; in yge_task()
3421 ppa = port->p_ppa; in yge_error()
3423 ppa = ddi_get_instance(dev->d_dip); in yge_error()
3437 dev->d_port[0] = kmem_zalloc(sizeof (yge_port_t), KM_SLEEP); in yge_ddi_attach()
3438 dev->d_port[1] = kmem_zalloc(sizeof (yge_port_t), KM_SLEEP); in yge_ddi_attach()
3439 dev->d_dip = dip; in yge_ddi_attach()
3442 dev->d_port[0]->p_port = 0; in yge_ddi_attach()
3443 dev->d_port[0]->p_dev = dev; in yge_ddi_attach()
3444 dev->d_port[1]->p_port = 0; in yge_ddi_attach()
3445 dev->d_port[1]->p_dev = dev; in yge_ddi_attach()
3450 kmem_free(dev->d_port[1], sizeof (yge_port_t)); in yge_ddi_attach()
3451 kmem_free(dev->d_port[0], sizeof (yge_port_t)); in yge_ddi_attach()
3478 for (int i = 0; i < dev->d_num_port; i++) { in yge_ddi_detach()
3480 if (((mh = dev->d_port[i]->p_mh) != NULL) && in yge_ddi_detach()
3491 ASSERT(dip == dev->d_dip); in yge_ddi_detach()
3494 for (int i = 0; i < dev->d_num_port; i++) { in yge_ddi_detach()
3495 if ((mh = dev->d_port[i]->p_mh) != NULL) { in yge_ddi_detach()
3500 kmem_free(dev->d_port[1], sizeof (yge_port_t)); in yge_ddi_detach()
3501 kmem_free(dev->d_port[0], sizeof (yge_port_t)); in yge_ddi_detach()
3524 for (int i = 0; i < dev->d_num_port; i++) { in yge_quiesce()
3525 yge_port_t *port = dev->d_port[i]; in yge_quiesce()
3526 if (port->p_running) in yge_quiesce()