Lines Matching +full:mac +full:- +full:wol

1 /*-
2 * SPDX-License-Identifier: BSD-2-Clause
91 { -1, 0, 0 }
95 { -1, 0, 0 }
99 { -1, 0, 0 }
189 bus_read_4((sc)->mem[0], (reg))
191 bus_read_2((sc)->mem[0], (reg))
193 bus_read_1((sc)->mem[0], (reg))
195 bus_write_4((sc)->mem[0], (reg), (val))
197 bus_write_2((sc)->mem[0], (reg), (val))
199 bus_write_1((sc)->mem[0], (reg), (val))
201 ae_miibus_readreg(sc->dev, 0, reg)
203 ae_miibus_writereg(sc->dev, 0, reg, val)
247 sc->dev = dev; in ae_attach()
252 mtx_init(&sc->mtx, device_get_nameunit(dev), MTX_NETWORK_LOCK, MTX_DEF); in ae_attach()
253 callout_init_mtx(&sc->tick_ch, &sc->mtx, 0); in ae_attach()
254 TASK_INIT(&sc->int_task, 0, ae_int_task, sc); in ae_attach()
255 TASK_INIT(&sc->link_task, 0, ae_link_task, sc); in ae_attach()
259 sc->spec_mem = ae_res_spec_mem; in ae_attach()
262 * Allocate memory-mapped registers. in ae_attach()
264 error = bus_alloc_resources(dev, sc->spec_mem, sc->mem); in ae_attach()
267 sc->spec_mem = NULL; in ae_attach()
292 sc->spec_irq = ae_res_spec_msi; in ae_attach()
293 error = bus_alloc_resources(dev, sc->spec_irq, sc->irq); in ae_attach()
296 sc->spec_irq = NULL; in ae_attach()
299 sc->flags |= AE_FLAG_MSI; in ae_attach()
303 if (sc->spec_irq == NULL) { in ae_attach()
304 sc->spec_irq = ae_res_spec_irq; in ae_attach()
305 error = bus_alloc_resources(dev, sc->spec_irq, sc->irq); in ae_attach()
308 sc->spec_irq = NULL; in ae_attach()
322 ae_retrieve_address(sc); /* Load MAC address. */ in ae_attach()
328 ifp = sc->ifp = if_alloc(IFT_ETHER); in ae_attach()
341 sc->flags |= AE_FLAG_PMG; in ae_attach()
348 error = mii_attach(dev, &sc->miibus, ifp, ae_mediachange, in ae_attach()
356 ether_ifattach(ifp, sc->eaddr); in ae_attach()
363 sc->tq = taskqueue_create_fast("ae_taskq", M_WAITOK, in ae_attach()
364 taskqueue_thread_enqueue, &sc->tq); in ae_attach()
365 taskqueue_start_threads(&sc->tq, 1, PI_NET, "%s taskq", in ae_attach()
366 device_get_nameunit(sc->dev)); in ae_attach()
371 error = bus_setup_intr(dev, sc->irq[0], INTR_TYPE_NET | INTR_MPSAFE, in ae_attach()
372 ae_intr, NULL, sc, &sc->intrhand); in ae_attach()
375 taskqueue_free(sc->tq); in ae_attach()
376 sc->tq = NULL; in ae_attach()
399 ae_stats = &sc->stats; in ae_init_tunables()
401 ctx = device_get_sysctl_ctx(sc->dev); in ae_init_tunables()
402 root = device_get_sysctl_tree(sc->dev); in ae_init_tunables()
410 CTLFLAG_RD | CTLFLAG_MPSAFE, NULL, "Rx MAC statistics"); in ae_init_tunables()
412 "broadcast frames", &ae_stats->rx_bcast); in ae_init_tunables()
414 "multicast frames", &ae_stats->rx_mcast); in ae_init_tunables()
416 "PAUSE frames", &ae_stats->rx_pause); in ae_init_tunables()
418 "control frames", &ae_stats->rx_ctrl); in ae_init_tunables()
420 "frames with CRC errors", &ae_stats->rx_crcerr); in ae_init_tunables()
422 "frames with invalid opcode", &ae_stats->rx_codeerr); in ae_init_tunables()
424 "runt frames", &ae_stats->rx_runt); in ae_init_tunables()
426 "fragmented frames", &ae_stats->rx_frag); in ae_init_tunables()
428 "frames with alignment errors", &ae_stats->rx_align); in ae_init_tunables()
430 "frames truncated due to Rx FIFO inderrun", &ae_stats->rx_trunc); in ae_init_tunables()
436 CTLFLAG_RD | CTLFLAG_MPSAFE, NULL, "Tx MAC statistics"); in ae_init_tunables()
438 "broadcast frames", &ae_stats->tx_bcast); in ae_init_tunables()
440 "multicast frames", &ae_stats->tx_mcast); in ae_init_tunables()
442 "PAUSE frames", &ae_stats->tx_pause); in ae_init_tunables()
444 "control frames", &ae_stats->tx_ctrl); in ae_init_tunables()
446 "deferrals occuried", &ae_stats->tx_defer); in ae_init_tunables()
448 "excessive deferrals occuried", &ae_stats->tx_excdefer); in ae_init_tunables()
450 "single collisions occuried", &ae_stats->tx_singlecol); in ae_init_tunables()
452 "multiple collisions occuried", &ae_stats->tx_multicol); in ae_init_tunables()
454 "late collisions occuried", &ae_stats->tx_latecol); in ae_init_tunables()
456 "transmit aborts due collisions", &ae_stats->tx_abortcol); in ae_init_tunables()
458 "Tx FIFO underruns", &ae_stats->tx_underrun); in ae_init_tunables()
486 bus_barrier(sc->mem[0], AE_MASTER_REG, 4, in ae_reset()
498 device_printf(sc->dev, "reset timeout.\n"); in ae_reset()
511 device_printf(sc->dev, "could not enter idle state.\n"); in ae_reset()
552 ifp = sc->ifp; in ae_init_locked()
555 mii = device_get_softc(sc->miibus); in ae_init_locked()
569 * Set the MAC address. in ae_init_locked()
577 bzero(sc->rxd_base_dma, AE_RXD_COUNT_DEFAULT * 1536 + AE_RXD_PADDING); in ae_init_locked()
578 bzero(sc->txd_base, AE_TXD_BUFSIZE_DEFAULT); in ae_init_locked()
579 bzero(sc->txs_base, AE_TXS_COUNT_DEFAULT * 4); in ae_init_locked()
583 addr = sc->dma_rxd_busaddr; in ae_init_locked()
586 addr = sc->dma_txd_busaddr; in ae_init_locked()
588 addr = sc->dma_txs_busaddr; in ae_init_locked()
612 * Configure half-duplex operation. in ae_init_locked()
645 * Configure cut-through threshold. in ae_init_locked()
660 sc->txd_cur = sc->rxd_cur = 0; in ae_init_locked()
661 sc->txs_ack = sc->txd_ack = 0; in ae_init_locked()
662 sc->rxd_cur = 0; in ae_init_locked()
663 AE_WRITE_2(sc, AE_MB_TXD_IDX_REG, sc->txd_cur); in ae_init_locked()
664 AE_WRITE_2(sc, AE_MB_RXD_IDX_REG, sc->rxd_cur); in ae_init_locked()
666 sc->tx_inproc = 0; /* Number of packets the chip processes now. */ in ae_init_locked()
667 sc->flags |= AE_FLAG_TXAVAIL; /* Free Tx's available. */ in ae_init_locked()
680 device_printf(sc->dev, "Initialization failed.\n"); in ae_init_locked()
698 * Disable WOL. in ae_init_locked()
703 * Configure MAC. in ae_init_locked()
714 * Configure Rx MAC. in ae_init_locked()
725 sc->flags &= ~AE_FLAG_LINK; in ae_init_locked()
728 callout_reset(&sc->tick_ch, hz, ae_tick, sc); in ae_init_locked()
734 device_printf(sc->dev, "Initialization complete.\n"); in ae_init_locked()
748 ifp = sc->ifp; in ae_detach()
751 sc->flags |= AE_FLAG_DETACH; in ae_detach()
754 callout_drain(&sc->tick_ch); in ae_detach()
755 taskqueue_drain(sc->tq, &sc->int_task); in ae_detach()
756 taskqueue_drain(taskqueue_swi, &sc->link_task); in ae_detach()
759 if (sc->tq != NULL) { in ae_detach()
760 taskqueue_drain(sc->tq, &sc->int_task); in ae_detach()
761 taskqueue_free(sc->tq); in ae_detach()
762 sc->tq = NULL; in ae_detach()
764 bus_generic_detach(sc->dev); in ae_detach()
766 if (sc->intrhand != NULL) { in ae_detach()
767 bus_teardown_intr(dev, sc->irq[0], sc->intrhand); in ae_detach()
768 sc->intrhand = NULL; in ae_detach()
772 sc->ifp = NULL; in ae_detach()
774 if (sc->spec_irq != NULL) in ae_detach()
775 bus_release_resources(dev, sc->spec_irq, sc->irq); in ae_detach()
776 if (sc->spec_mem != NULL) in ae_detach()
777 bus_release_resources(dev, sc->spec_mem, sc->mem); in ae_detach()
778 if ((sc->flags & AE_FLAG_MSI) != 0) in ae_detach()
780 mtx_destroy(&sc->mtx); in ae_detach()
814 device_printf(sc->dev, "phy read timeout: %d.\n", reg); in ae_miibus_readreg()
850 device_printf(sc->dev, "phy write timeout: %d.\n", reg); in ae_miibus_writereg()
861 taskqueue_enqueue(taskqueue_swi, &sc->link_task); in ae_miibus_statchg()
874 mii = device_get_softc(sc->miibus); in ae_mediastatus()
876 ifmr->ifm_status = mii->mii_media_status; in ae_mediastatus()
877 ifmr->ifm_active = mii->mii_media_active; in ae_mediastatus()
893 mii = device_get_softc(sc->miibus); in ae_mediachange()
894 LIST_FOREACH(mii_sc, &mii->mii_phys, mii_list) in ae_mediachange()
918 error = pci_find_cap(sc->dev, PCIY_VPD, vpdc); in ae_check_eeprom_present()
943 device_printf(sc->dev, "timeout reading VPD register %d.\n", in ae_vpd_read_word()
1007 device_printf(sc->dev, in ae_get_vpd_eaddr()
1027 device_printf(sc->dev, in ae_get_reg_eaddr()
1048 device_printf(sc->dev, in ae_retrieve_address()
1055 sc->eaddr[0] = 0x02; /* U/L bit set. */ in ae_retrieve_address()
1056 sc->eaddr[1] = 0x1f; in ae_retrieve_address()
1057 sc->eaddr[2] = 0xc6; in ae_retrieve_address()
1058 sc->eaddr[3] = (eaddr[0] >> 16) & 0xff; in ae_retrieve_address()
1059 sc->eaddr[4] = (eaddr[0] >> 8) & 0xff; in ae_retrieve_address()
1060 sc->eaddr[5] = (eaddr[0] >> 0) & 0xff; in ae_retrieve_address()
1062 sc->eaddr[0] = (eaddr[1] >> 8) & 0xff; in ae_retrieve_address()
1063 sc->eaddr[1] = (eaddr[1] >> 0) & 0xff; in ae_retrieve_address()
1064 sc->eaddr[2] = (eaddr[0] >> 24) & 0xff; in ae_retrieve_address()
1065 sc->eaddr[3] = (eaddr[0] >> 16) & 0xff; in ae_retrieve_address()
1066 sc->eaddr[4] = (eaddr[0] >> 8) & 0xff; in ae_retrieve_address()
1067 sc->eaddr[5] = (eaddr[0] >> 0) & 0xff; in ae_retrieve_address()
1092 error = bus_dma_tag_create(bus_get_dma_tag(sc->dev), in ae_alloc_rings()
1096 &sc->dma_parent_tag); in ae_alloc_rings()
1098 device_printf(sc->dev, "could not creare parent DMA tag.\n"); in ae_alloc_rings()
1105 error = bus_dma_tag_create(sc->dma_parent_tag, in ae_alloc_rings()
1109 &sc->dma_txd_tag); in ae_alloc_rings()
1111 device_printf(sc->dev, "could not creare TxD DMA tag.\n"); in ae_alloc_rings()
1118 error = bus_dma_tag_create(sc->dma_parent_tag, in ae_alloc_rings()
1122 &sc->dma_txs_tag); in ae_alloc_rings()
1124 device_printf(sc->dev, "could not creare TxS DMA tag.\n"); in ae_alloc_rings()
1131 error = bus_dma_tag_create(sc->dma_parent_tag, in ae_alloc_rings()
1135 &sc->dma_rxd_tag); in ae_alloc_rings()
1137 device_printf(sc->dev, "could not creare TxS DMA tag.\n"); in ae_alloc_rings()
1144 error = bus_dmamem_alloc(sc->dma_txd_tag, (void **)&sc->txd_base, in ae_alloc_rings()
1146 &sc->dma_txd_map); in ae_alloc_rings()
1148 device_printf(sc->dev, in ae_alloc_rings()
1152 error = bus_dmamap_load(sc->dma_txd_tag, sc->dma_txd_map, sc->txd_base, in ae_alloc_rings()
1155 device_printf(sc->dev, in ae_alloc_rings()
1159 sc->dma_txd_busaddr = busaddr; in ae_alloc_rings()
1164 error = bus_dmamem_alloc(sc->dma_txs_tag, (void **)&sc->txs_base, in ae_alloc_rings()
1166 &sc->dma_txs_map); in ae_alloc_rings()
1168 device_printf(sc->dev, in ae_alloc_rings()
1172 error = bus_dmamap_load(sc->dma_txs_tag, sc->dma_txs_map, sc->txs_base, in ae_alloc_rings()
1175 device_printf(sc->dev, in ae_alloc_rings()
1179 sc->dma_txs_busaddr = busaddr; in ae_alloc_rings()
1184 error = bus_dmamem_alloc(sc->dma_rxd_tag, (void **)&sc->rxd_base_dma, in ae_alloc_rings()
1186 &sc->dma_rxd_map); in ae_alloc_rings()
1188 device_printf(sc->dev, in ae_alloc_rings()
1192 error = bus_dmamap_load(sc->dma_rxd_tag, sc->dma_rxd_map, in ae_alloc_rings()
1193 sc->rxd_base_dma, AE_RXD_COUNT_DEFAULT * 1536 + AE_RXD_PADDING, in ae_alloc_rings()
1196 device_printf(sc->dev, in ae_alloc_rings()
1200 sc->dma_rxd_busaddr = busaddr + AE_RXD_PADDING; in ae_alloc_rings()
1201 sc->rxd_base = (ae_rxd_t *)(sc->rxd_base_dma + AE_RXD_PADDING); in ae_alloc_rings()
1210 if (sc->dma_txd_tag != NULL) { in ae_dma_free()
1211 if (sc->dma_txd_busaddr != 0) in ae_dma_free()
1212 bus_dmamap_unload(sc->dma_txd_tag, sc->dma_txd_map); in ae_dma_free()
1213 if (sc->txd_base != NULL) in ae_dma_free()
1214 bus_dmamem_free(sc->dma_txd_tag, sc->txd_base, in ae_dma_free()
1215 sc->dma_txd_map); in ae_dma_free()
1216 bus_dma_tag_destroy(sc->dma_txd_tag); in ae_dma_free()
1217 sc->dma_txd_tag = NULL; in ae_dma_free()
1218 sc->txd_base = NULL; in ae_dma_free()
1219 sc->dma_txd_busaddr = 0; in ae_dma_free()
1221 if (sc->dma_txs_tag != NULL) { in ae_dma_free()
1222 if (sc->dma_txs_busaddr != 0) in ae_dma_free()
1223 bus_dmamap_unload(sc->dma_txs_tag, sc->dma_txs_map); in ae_dma_free()
1224 if (sc->txs_base != NULL) in ae_dma_free()
1225 bus_dmamem_free(sc->dma_txs_tag, sc->txs_base, in ae_dma_free()
1226 sc->dma_txs_map); in ae_dma_free()
1227 bus_dma_tag_destroy(sc->dma_txs_tag); in ae_dma_free()
1228 sc->dma_txs_tag = NULL; in ae_dma_free()
1229 sc->txs_base = NULL; in ae_dma_free()
1230 sc->dma_txs_busaddr = 0; in ae_dma_free()
1232 if (sc->dma_rxd_tag != NULL) { in ae_dma_free()
1233 if (sc->dma_rxd_busaddr != 0) in ae_dma_free()
1234 bus_dmamap_unload(sc->dma_rxd_tag, sc->dma_rxd_map); in ae_dma_free()
1235 if (sc->rxd_base_dma != NULL) in ae_dma_free()
1236 bus_dmamem_free(sc->dma_rxd_tag, sc->rxd_base_dma, in ae_dma_free()
1237 sc->dma_rxd_map); in ae_dma_free()
1238 bus_dma_tag_destroy(sc->dma_rxd_tag); in ae_dma_free()
1239 sc->dma_rxd_tag = NULL; in ae_dma_free()
1240 sc->rxd_base_dma = NULL; in ae_dma_free()
1241 sc->dma_rxd_busaddr = 0; in ae_dma_free()
1243 if (sc->dma_parent_tag != NULL) { in ae_dma_free()
1244 bus_dma_tag_destroy(sc->dma_parent_tag); in ae_dma_free()
1245 sc->dma_parent_tag = NULL; in ae_dma_free()
1309 ifp = sc->ifp; in ae_pm_init()
1310 if ((sc->flags & AE_FLAG_PMG) == 0) { in ae_pm_init()
1311 /* Disable WOL entirely. */ in ae_pm_init()
1317 * Configure WOL if enabled. in ae_pm_init()
1320 mii = device_get_softc(sc->miibus); in ae_pm_init()
1322 if ((mii->mii_media_status & IFM_AVALID) != 0 && in ae_pm_init()
1323 (mii->mii_media_status & IFM_ACTIVE) != 0) { in ae_pm_init()
1328 * Configure MAC. in ae_pm_init()
1337 if ((IFM_OPTIONS(mii->mii_media_active) & \ in ae_pm_init()
1365 pci_enable_pme(sc->dev); in ae_pm_init()
1392 AE_READ_4(sc, AE_WOL_REG); /* Clear WOL status. */ in ae_resume()
1393 if ((if_getflags(sc->ifp) & IFF_UP) != 0) in ae_resume()
1405 if (sc->txd_cur >= sc->txd_ack) in ae_tx_avail_size()
1406 avail = AE_TXD_BUFSIZE_DEFAULT - (sc->txd_cur - sc->txd_ack); in ae_tx_avail_size()
1408 avail = sc->txd_ack - sc->txd_cur; in ae_tx_avail_size()
1424 len = m0->m_pkthdr.len; in ae_encap()
1426 if ((sc->flags & AE_FLAG_TXAVAIL) == 0 || in ae_encap()
1429 if_printf(sc->ifp, "No free Tx available.\n"); in ae_encap()
1434 hdr = (ae_txd_t *)(sc->txd_base + sc->txd_cur); in ae_encap()
1437 sc->txd_cur = (sc->txd_cur + sizeof(ae_txd_t)) % AE_TXD_BUFSIZE_DEFAULT; in ae_encap()
1439 to_end = AE_TXD_BUFSIZE_DEFAULT - sc->txd_cur; in ae_encap()
1441 m_copydata(m0, 0, len, (caddr_t)(sc->txd_base + sc->txd_cur)); in ae_encap()
1443 m_copydata(m0, 0, to_end, (caddr_t)(sc->txd_base + in ae_encap()
1444 sc->txd_cur)); in ae_encap()
1445 m_copydata(m0, to_end, len - to_end, (caddr_t)sc->txd_base); in ae_encap()
1451 if ((m0->m_flags & M_VLANTAG) != 0) { in ae_encap()
1452 hdr->vlan = htole16(AE_TXD_VLAN(m0->m_pkthdr.ether_vtag)); in ae_encap()
1453 hdr->len = htole16(len | AE_TXD_INSERT_VTAG); in ae_encap()
1455 hdr->len = htole16(len); in ae_encap()
1459 * Set current TxD position and round up to a 4-byte boundary. in ae_encap()
1461 sc->txd_cur = ((sc->txd_cur + len + 3) & ~3) % AE_TXD_BUFSIZE_DEFAULT; in ae_encap()
1462 if (sc->txd_cur == sc->txd_ack) in ae_encap()
1463 sc->flags &= ~AE_FLAG_TXAVAIL; in ae_encap()
1465 if_printf(sc->ifp, "New txd_cur = %d.\n", sc->txd_cur); in ae_encap()
1471 sc->txs_base[sc->txs_cur].flags &= ~htole16(AE_TXS_UPDATE); in ae_encap()
1472 sc->txs_cur = (sc->txs_cur + 1) % AE_TXS_COUNT_DEFAULT; in ae_encap()
1473 if (sc->txs_cur == sc->txs_ack) in ae_encap()
1474 sc->flags &= ~AE_FLAG_TXAVAIL; in ae_encap()
1479 bus_dmamap_sync(sc->dma_txd_tag, sc->dma_txd_map, BUS_DMASYNC_PREREAD | in ae_encap()
1481 bus_dmamap_sync(sc->dma_txs_tag, sc->dma_txs_map, in ae_encap()
1515 IFF_DRV_RUNNING || (sc->flags & AE_FLAG_LINK) == 0) in ae_start_locked()
1536 sc->tx_inproc++; in ae_start_locked()
1545 AE_WRITE_2(sc, AE_MB_TXD_IDX_REG, sc->txd_cur / 4); in ae_start_locked()
1546 sc->wd_timer = AE_TX_TIMEOUT; /* Load watchdog. */ in ae_start_locked()
1549 if_printf(ifp, "Tx pos now is %d.\n", sc->txd_cur); in ae_start_locked()
1566 ifp = sc->ifp; in ae_link_task()
1567 mii = device_get_softc(sc->miibus); in ae_link_task()
1574 sc->flags &= ~AE_FLAG_LINK; in ae_link_task()
1575 if ((mii->mii_media_status & (IFM_AVALID | IFM_ACTIVE)) == in ae_link_task()
1577 switch(IFM_SUBTYPE(mii->mii_media_active)) { in ae_link_task()
1580 sc->flags |= AE_FLAG_LINK; in ae_link_task()
1593 if ((sc->flags & AE_FLAG_LINK) != 0) { in ae_link_task()
1621 * Stop Rx MAC engine. in ae_stop_rxmac()
1645 device_printf(sc->dev, "timed out while stopping Rx MAC.\n"); in ae_stop_rxmac()
1657 * Stop Tx MAC engine. in ae_stop_txmac()
1681 device_printf(sc->dev, "timed out while stopping Tx MAC.\n"); in ae_stop_txmac()
1692 mii = device_get_softc(sc->miibus); in ae_mac_config()
1697 if ((IFM_OPTIONS(mii->mii_media_active) & IFM_FDX) != 0) in ae_mac_config()
1720 taskqueue_enqueue(sc->tq, &sc->int_task); in ae_intr()
1736 ifp = sc->ifp; in ae_int_task()
1766 * Re-enable interrupts. in ae_int_task()
1770 if ((sc->flags & AE_FLAG_TXAVAIL) != 0) { in ae_int_task()
1789 ifp = sc->ifp; in ae_tx_intr()
1798 bus_dmamap_sync(sc->dma_txd_tag, sc->dma_txd_map, in ae_tx_intr()
1800 bus_dmamap_sync(sc->dma_txs_tag, sc->dma_txs_map, in ae_tx_intr()
1804 txs = sc->txs_base + sc->txs_ack; in ae_tx_intr()
1805 flags = le16toh(txs->flags); in ae_tx_intr()
1808 txs->flags = htole16(flags & ~AE_TXS_UPDATE); in ae_tx_intr()
1810 ae_update_stats_tx(flags, &sc->stats); in ae_tx_intr()
1815 sc->txs_ack = (sc->txs_ack + 1) % AE_TXS_COUNT_DEFAULT; in ae_tx_intr()
1816 sc->flags |= AE_FLAG_TXAVAIL; in ae_tx_intr()
1818 txd = (ae_txd_t *)(sc->txd_base + sc->txd_ack); in ae_tx_intr()
1819 if (txs->len != txd->len) in ae_tx_intr()
1820 device_printf(sc->dev, "Size mismatch: TxS:%d TxD:%d\n", in ae_tx_intr()
1821 le16toh(txs->len), le16toh(txd->len)); in ae_tx_intr()
1824 * Move txd ack and align on 4-byte boundary. in ae_tx_intr()
1826 sc->txd_ack = ((sc->txd_ack + le16toh(txd->len) + in ae_tx_intr()
1834 sc->tx_inproc--; in ae_tx_intr()
1837 if ((sc->flags & AE_FLAG_TXAVAIL) != 0) in ae_tx_intr()
1839 if (sc->tx_inproc < 0) { in ae_tx_intr()
1841 sc->tx_inproc = 0; in ae_tx_intr()
1844 if (sc->tx_inproc == 0) in ae_tx_intr()
1845 sc->wd_timer = 0; /* Unarm watchdog. */ in ae_tx_intr()
1850 bus_dmamap_sync(sc->dma_txd_tag, sc->dma_txd_map, in ae_tx_intr()
1852 bus_dmamap_sync(sc->dma_txs_tag, sc->dma_txs_map, in ae_tx_intr()
1866 ifp = sc->ifp; in ae_rxeof()
1867 flags = le16toh(rxd->flags); in ae_rxeof()
1872 size = le16toh(rxd->len) - ETHER_CRC_LEN; in ae_rxeof()
1873 if (size < (ETHER_MIN_LEN - ETHER_CRC_LEN - ETHER_VLAN_ENCAP_LEN)) { in ae_rxeof()
1879 m = m_devget(&rxd->data[0], size, ETHER_ALIGN, ifp, NULL); in ae_rxeof()
1887 m->m_pkthdr.ether_vtag = AE_RXD_VLAN(le16toh(rxd->vlan)); in ae_rxeof()
1888 m->m_flags |= M_VLANTAG; in ae_rxeof()
1912 ifp = sc->ifp; in ae_rx_intr()
1917 bus_dmamap_sync(sc->dma_rxd_tag, sc->dma_rxd_map, in ae_rx_intr()
1921 rxd = (ae_rxd_t *)(sc->rxd_base + sc->rxd_cur); in ae_rx_intr()
1922 flags = le16toh(rxd->flags); in ae_rx_intr()
1925 rxd->flags = htole16(flags & ~AE_RXD_UPDATE); in ae_rx_intr()
1927 ae_update_stats_rx(flags, &sc->stats); in ae_rx_intr()
1932 sc->rxd_cur = (sc->rxd_cur + 1) % AE_RXD_COUNT_DEFAULT; in ae_rx_intr()
1941 bus_dmamap_sync(sc->dma_rxd_tag, sc->dma_rxd_map, in ae_rx_intr()
1946 AE_WRITE_2(sc, AE_MB_RXD_IDX_REG, sc->rxd_cur); in ae_rx_intr()
1957 ifp = sc->ifp; in ae_watchdog()
1959 if (sc->wd_timer == 0 || --sc->wd_timer != 0) in ae_watchdog()
1962 if ((sc->flags & AE_FLAG_LINK) == 0) in ae_watchdog()
1965 if_printf(ifp, "watchdog timeout - resetting.\n"); in ae_watchdog()
1984 mii = device_get_softc(sc->miibus); in ae_tick()
1987 callout_reset(&sc->tick_ch, hz, ae_tick, sc); in ae_tick()
1997 ifp = sc->ifp; in ae_rxvlan()
2027 ifp = sc->ifp; in ae_rxfilter()
2075 if (ifr->ifr_mtu < ETHERMIN || ifr->ifr_mtu > ETHERMTU) in ae_ioctl()
2077 else if (if_getmtu(ifp) != ifr->ifr_mtu) { in ae_ioctl()
2079 if_setmtu(ifp, ifr->ifr_mtu); in ae_ioctl()
2091 if (((if_getflags(ifp) ^ sc->if_flags) in ae_ioctl()
2095 if ((sc->flags & AE_FLAG_DETACH) == 0) in ae_ioctl()
2102 sc->if_flags = if_getflags(ifp); in ae_ioctl()
2114 mii = device_get_softc(sc->miibus); in ae_ioctl()
2115 error = ifmedia_ioctl(ifp, ifr, &mii->mii_media, cmd); in ae_ioctl()
2119 mask = ifr->ifr_reqcap ^ if_getcapenable(ifp); in ae_ioctl()
2143 ifp = sc->ifp; in ae_stop()
2145 sc->flags &= ~AE_FLAG_LINK; in ae_stop()
2146 sc->wd_timer = 0; /* Cancel watchdog. */ in ae_stop()
2147 callout_stop(&sc->tick_ch); in ae_stop()
2176 device_printf(sc->dev, "could not enter idle state in stop.\n"); in ae_stop()
2184 stats->tx_bcast++; in ae_update_stats_tx()
2186 stats->tx_mcast++; in ae_update_stats_tx()
2188 stats->tx_pause++; in ae_update_stats_tx()
2190 stats->tx_ctrl++; in ae_update_stats_tx()
2192 stats->tx_defer++; in ae_update_stats_tx()
2194 stats->tx_excdefer++; in ae_update_stats_tx()
2196 stats->tx_singlecol++; in ae_update_stats_tx()
2198 stats->tx_multicol++; in ae_update_stats_tx()
2200 stats->tx_latecol++; in ae_update_stats_tx()
2202 stats->tx_abortcol++; in ae_update_stats_tx()
2204 stats->tx_underrun++; in ae_update_stats_tx()
2212 stats->rx_bcast++; in ae_update_stats_rx()
2214 stats->rx_mcast++; in ae_update_stats_rx()
2216 stats->rx_pause++; in ae_update_stats_rx()
2218 stats->rx_ctrl++; in ae_update_stats_rx()
2220 stats->rx_crcerr++; in ae_update_stats_rx()
2222 stats->rx_codeerr++; in ae_update_stats_rx()
2224 stats->rx_runt++; in ae_update_stats_rx()
2226 stats->rx_frag++; in ae_update_stats_rx()
2228 stats->rx_trunc++; in ae_update_stats_rx()
2230 stats->rx_align++; in ae_update_stats_rx()