Lines Matching +full:rx +full:- +full:m
1 /*-
2 * SPDX-License-Identifier: BSD-3-Clause
187 nitems(et_devices) - 1);
209 for (d = et_devices; d->desc != NULL; ++d) { in et_probe()
210 if (vid == d->vid && did == d->did) { in et_probe()
211 device_set_desc(dev, d->desc); in et_probe()
228 sc->dev = dev; in et_attach()
229 mtx_init(&sc->sc_mtx, device_get_nameunit(dev), MTX_NETWORK_LOCK, in et_attach()
231 callout_init_mtx(&sc->sc_tick, &sc->sc_mtx, 0); in et_attach()
233 ifp = sc->ifp = if_alloc(IFT_ETHER); in et_attach()
238 sc->sc_rx_intr_npkts = et_rx_intr_npkts; in et_attach()
239 sc->sc_rx_intr_delay = et_rx_intr_delay; in et_attach()
240 sc->sc_tx_intr_nsegs = et_tx_intr_nsegs; in et_attach()
241 sc->sc_timer = et_timer; in et_attach()
249 sc->sc_mem_rid = PCIR_BAR(0); in et_attach()
250 sc->sc_mem_res = bus_alloc_resource_any(dev, SYS_RES_MEMORY, in et_attach()
251 &sc->sc_mem_rid, RF_ACTIVE); in et_attach()
252 if (sc->sc_mem_res == NULL) { in et_attach()
259 sc->sc_expcap = cap; in et_attach()
260 sc->sc_flags |= ET_FLAG_PCIE; in et_attach()
271 sc->sc_flags |= ET_FLAG_MSI; in et_attach()
280 if ((sc->sc_flags & ET_FLAG_MSI) == 0) { in et_attach()
281 sc->sc_irq_rid = 0; in et_attach()
282 sc->sc_irq_res = bus_alloc_resource_any(dev, SYS_RES_IRQ, in et_attach()
283 &sc->sc_irq_rid, RF_SHAREABLE | RF_ACTIVE); in et_attach()
285 sc->sc_irq_rid = 1; in et_attach()
286 sc->sc_irq_res = bus_alloc_resource_any(dev, SYS_RES_IRQ, in et_attach()
287 &sc->sc_irq_rid, RF_ACTIVE); in et_attach()
289 if (sc->sc_irq_res == NULL) { in et_attach()
296 sc->sc_flags |= ET_FLAG_FASTETHER; in et_attach()
306 if ((sc->sc_flags & ET_FLAG_FASTETHER) == 0) in et_attach()
325 if_setsendqlen(ifp, ET_TX_NDESC - 1); in et_attach()
330 error = mii_attach(dev, &sc->sc_miibus, ifp, et_ifmedia_upd, in et_attach()
343 error = bus_setup_intr(dev, sc->sc_irq_res, INTR_TYPE_NET | INTR_MPSAFE, in et_attach()
344 NULL, et_intr, sc, &sc->sc_irq_handle); in et_attach()
366 ether_ifdetach(sc->ifp); in et_detach()
370 callout_drain(&sc->sc_tick); in et_detach()
375 if (sc->sc_irq_handle != NULL) in et_detach()
376 bus_teardown_intr(dev, sc->sc_irq_res, sc->sc_irq_handle); in et_detach()
377 if (sc->sc_irq_res != NULL) in et_detach()
379 rman_get_rid(sc->sc_irq_res), sc->sc_irq_res); in et_detach()
380 if ((sc->sc_flags & ET_FLAG_MSI) != 0) in et_detach()
382 if (sc->sc_mem_res != NULL) in et_detach()
384 rman_get_rid(sc->sc_mem_res), sc->sc_mem_res); in et_detach()
386 if (sc->ifp != NULL) in et_detach()
387 if_free(sc->ifp); in et_detach()
391 mtx_destroy(&sc->sc_mtx); in et_detach()
435 if_printf(sc->ifp, in et_miibus_readreg()
480 if_printf(sc->ifp, in et_miibus_writereg()
503 mii = device_get_softc(sc->sc_miibus); in et_miibus_statchg()
504 ifp = sc->ifp; in et_miibus_statchg()
509 sc->sc_flags &= ~ET_FLAG_LINK; in et_miibus_statchg()
510 if ((mii->mii_media_status & (IFM_ACTIVE | IFM_AVALID)) == in et_miibus_statchg()
512 switch (IFM_SUBTYPE(mii->mii_media_active)) { in et_miibus_statchg()
515 sc->sc_flags |= ET_FLAG_LINK; in et_miibus_statchg()
518 if ((sc->sc_flags & ET_FLAG_FASTETHER) == 0) in et_miibus_statchg()
519 sc->sc_flags |= ET_FLAG_LINK; in et_miibus_statchg()
524 /* XXX Stop TX/RX MAC? */ in et_miibus_statchg()
525 if ((sc->sc_flags & ET_FLAG_LINK) == 0) in et_miibus_statchg()
528 /* Program MACs with resolved speed/duplex/flow-control. */ in et_miibus_statchg()
541 if (IFM_SUBTYPE(mii->mii_media_active) == IFM_1000_T) in et_miibus_statchg()
548 if (IFM_OPTIONS(mii->mii_media_active) & IFM_FDX) { in et_miibus_statchg()
555 * provide a way that tells how many free RX in et_miibus_statchg()
562 if (IFM_OPTIONS(mii->mii_media_active) & IFM_ETH_TXPAUSE) in et_miibus_statchg()
565 if (IFM_OPTIONS(mii->mii_media_active) & IFM_ETH_RXPAUSE) in et_miibus_statchg()
585 if_printf(ifp, "can't enable RX/TX\n"); in et_miibus_statchg()
586 sc->sc_flags |= ET_FLAG_TXRX_ENABLED; in et_miibus_statchg()
599 mii = device_get_softc(sc->sc_miibus); in et_ifmedia_upd_locked()
600 LIST_FOREACH(miisc, &mii->mii_phys, mii_list) in et_ifmedia_upd_locked()
632 mii = device_get_softc(sc->sc_miibus); in et_ifmedia_sts()
634 ifmr->ifm_active = mii->mii_media_active; in et_ifmedia_sts()
635 ifmr->ifm_status = mii->mii_media_status; in et_ifmedia_sts()
646 ifp = sc->ifp; in et_stop()
647 callout_stop(&sc->sc_tick); in et_stop()
662 sc->sc_tx = 0; in et_stop()
663 sc->sc_tx_intr = 0; in et_stop()
664 sc->sc_flags &= ~ET_FLAG_TXRX_ENABLED; in et_stop()
666 sc->watchdog_timer = 0; in et_stop()
680 pci_read_config(sc->dev, ET_PCIR_EEPROM_STATUS, 1); in et_bus_config()
681 val = pci_read_config(sc->dev, ET_PCIR_EEPROM_STATUS, 1); in et_bus_config()
683 device_printf(sc->dev, "EEPROM status error 0x%02x\n", val); in et_bus_config()
689 if ((sc->sc_flags & ET_FLAG_PCIE) == 0) in et_bus_config()
696 val = pci_read_config(sc->dev, in et_bus_config()
697 sc->sc_expcap + PCIER_DEVICE_CAP, 4); in et_bus_config()
712 ack_latency = pci_read_config(sc->dev, ET_PCIR_ACK_LATENCY, 2); in et_bus_config()
713 replay_timer = pci_read_config(sc->dev, in et_bus_config()
715 device_printf(sc->dev, "ack latency %u, replay timer %u\n", in et_bus_config()
720 pci_write_config(sc->dev, ET_PCIR_ACK_LATENCY, ack_latency, 2); in et_bus_config()
721 pci_write_config(sc->dev, ET_PCIR_REPLAY_TIMER, replay_timer, in et_bus_config()
728 val = pci_read_config(sc->dev, ET_PCIR_L0S_L1_LATENCY, 4); in et_bus_config()
734 pci_write_config(sc->dev, ET_PCIR_L0S_L1_LATENCY, val, 4); in et_bus_config()
739 pci_set_max_read_req(sc->dev, 2048); in et_bus_config()
756 eaddr[i] = (val >> (8 * (i - 4))) & 0xff; in et_get_eaddr()
796 ctx->et_busaddr = segs->ds_addr; in et_dma_map_addr()
807 error = bus_dma_tag_create(sc->sc_dtag, alignment, 0, BUS_SPACE_MAXADDR, in et_dma_ring_alloc()
811 device_printf(sc->dev, "could not create %s dma tag\n", msg); in et_dma_ring_alloc()
818 device_printf(sc->dev, in et_dma_ring_alloc()
827 device_printf(sc->dev, in et_dma_ring_alloc()
866 error = bus_dma_tag_create(bus_get_dma_tag(sc->dev), 1, 0, in et_dma_alloc()
869 &sc->sc_dtag); in et_dma_alloc()
871 device_printf(sc->dev, "could not allocate parent dma tag\n"); in et_dma_alloc()
876 tx_ring = &sc->sc_tx_ring; in et_dma_alloc()
878 &tx_ring->tr_dtag, (uint8_t **)&tx_ring->tr_desc, &tx_ring->tr_dmap, in et_dma_alloc()
879 &tx_ring->tr_paddr, "TX ring"); in et_dma_alloc()
884 txsd = &sc->sc_tx_status; in et_dma_alloc()
886 &txsd->txsd_dtag, (uint8_t **)&txsd->txsd_status, &txsd->txsd_dmap, in et_dma_alloc()
887 &txsd->txsd_paddr, "TX status block"); in et_dma_alloc()
891 /* RX ring 0, used as to recive small sized frames. */ in et_dma_alloc()
892 rx_ring = &sc->sc_rx_ring[0]; in et_dma_alloc()
894 &rx_ring->rr_dtag, (uint8_t **)&rx_ring->rr_desc, &rx_ring->rr_dmap, in et_dma_alloc()
895 &rx_ring->rr_paddr, "RX ring 0"); in et_dma_alloc()
896 rx_ring->rr_posreg = ET_RX_RING0_POS; in et_dma_alloc()
900 /* RX ring 1, used as to store normal sized frames. */ in et_dma_alloc()
901 rx_ring = &sc->sc_rx_ring[1]; in et_dma_alloc()
903 &rx_ring->rr_dtag, (uint8_t **)&rx_ring->rr_desc, &rx_ring->rr_dmap, in et_dma_alloc()
904 &rx_ring->rr_paddr, "RX ring 1"); in et_dma_alloc()
905 rx_ring->rr_posreg = ET_RX_RING1_POS; in et_dma_alloc()
909 /* RX stat ring. */ in et_dma_alloc()
910 rxst_ring = &sc->sc_rxstat_ring; in et_dma_alloc()
912 &rxst_ring->rsr_dtag, (uint8_t **)&rxst_ring->rsr_stat, in et_dma_alloc()
913 &rxst_ring->rsr_dmap, &rxst_ring->rsr_paddr, "RX stat ring"); in et_dma_alloc()
917 /* RX status block. */ in et_dma_alloc()
918 rxsd = &sc->sc_rx_status; in et_dma_alloc()
920 sizeof(struct et_rxstatus), &rxsd->rxsd_dtag, in et_dma_alloc()
921 (uint8_t **)&rxsd->rxsd_status, &rxsd->rxsd_dmap, in et_dma_alloc()
922 &rxsd->rxsd_paddr, "RX status block"); in et_dma_alloc()
927 error = bus_dma_tag_create(bus_get_dma_tag(sc->dev), 1, 0, in et_dma_alloc()
930 &sc->sc_mbuf_dtag); in et_dma_alloc()
932 device_printf(sc->dev, in et_dma_alloc()
937 /* Create DMA tag for mini RX mbufs to use RX ring 0. */ in et_dma_alloc()
938 error = bus_dma_tag_create(sc->sc_mbuf_dtag, 1, 0, in et_dma_alloc()
940 MHLEN, 0, NULL, NULL, &sc->sc_rx_mini_tag); in et_dma_alloc()
942 device_printf(sc->dev, "could not create mini RX dma tag\n"); in et_dma_alloc()
946 /* Create DMA tag for standard RX mbufs to use RX ring 1. */ in et_dma_alloc()
947 error = bus_dma_tag_create(sc->sc_mbuf_dtag, 1, 0, in et_dma_alloc()
949 MCLBYTES, 0, NULL, NULL, &sc->sc_rx_tag); in et_dma_alloc()
951 device_printf(sc->dev, "could not create RX dma tag\n"); in et_dma_alloc()
956 error = bus_dma_tag_create(sc->sc_mbuf_dtag, 1, 0, in et_dma_alloc()
959 &sc->sc_tx_tag); in et_dma_alloc()
961 device_printf(sc->dev, "could not create TX dma tag\n"); in et_dma_alloc()
965 /* Initialize RX ring 0. */ in et_dma_alloc()
966 rbd = &sc->sc_rx_data[0]; in et_dma_alloc()
967 rbd->rbd_bufsize = ET_RXDMA_CTRL_RING0_128; in et_dma_alloc()
968 rbd->rbd_newbuf = et_newbuf_hdr; in et_dma_alloc()
969 rbd->rbd_discard = et_rxbuf_discard; in et_dma_alloc()
970 rbd->rbd_softc = sc; in et_dma_alloc()
971 rbd->rbd_ring = &sc->sc_rx_ring[0]; in et_dma_alloc()
972 /* Create DMA maps for mini RX buffers, ring 0. */ in et_dma_alloc()
974 error = bus_dmamap_create(sc->sc_rx_mini_tag, 0, in et_dma_alloc()
975 &rbd->rbd_buf[i].rb_dmap); in et_dma_alloc()
977 device_printf(sc->dev, in et_dma_alloc()
978 "could not create DMA map for mini RX mbufs\n"); in et_dma_alloc()
983 /* Create a spare DMA map for mini RX buffers, ring 0. */ in et_dma_alloc()
984 error = bus_dmamap_create(sc->sc_rx_mini_tag, 0, in et_dma_alloc()
985 &sc->sc_rx_mini_sparemap); in et_dma_alloc()
987 device_printf(sc->dev, in et_dma_alloc()
988 "could not create spare DMA map for mini RX mbuf\n"); in et_dma_alloc()
992 /* Initialize RX ring 1. */ in et_dma_alloc()
993 rbd = &sc->sc_rx_data[1]; in et_dma_alloc()
994 rbd->rbd_bufsize = ET_RXDMA_CTRL_RING1_2048; in et_dma_alloc()
995 rbd->rbd_newbuf = et_newbuf_cluster; in et_dma_alloc()
996 rbd->rbd_discard = et_rxbuf_discard; in et_dma_alloc()
997 rbd->rbd_softc = sc; in et_dma_alloc()
998 rbd->rbd_ring = &sc->sc_rx_ring[1]; in et_dma_alloc()
999 /* Create DMA maps for standard RX buffers, ring 1. */ in et_dma_alloc()
1001 error = bus_dmamap_create(sc->sc_rx_tag, 0, in et_dma_alloc()
1002 &rbd->rbd_buf[i].rb_dmap); in et_dma_alloc()
1004 device_printf(sc->dev, in et_dma_alloc()
1005 "could not create DMA map for mini RX mbufs\n"); in et_dma_alloc()
1010 /* Create a spare DMA map for standard RX buffers, ring 1. */ in et_dma_alloc()
1011 error = bus_dmamap_create(sc->sc_rx_tag, 0, &sc->sc_rx_sparemap); in et_dma_alloc()
1013 device_printf(sc->dev, in et_dma_alloc()
1014 "could not create spare DMA map for RX mbuf\n"); in et_dma_alloc()
1019 tbd = &sc->sc_tx_data; in et_dma_alloc()
1021 error = bus_dmamap_create(sc->sc_tx_tag, 0, in et_dma_alloc()
1022 &tbd->tbd_buf[i].tb_dmap); in et_dma_alloc()
1024 device_printf(sc->dev, in et_dma_alloc()
1044 /* Destroy DMA maps for mini RX buffers, ring 0. */ in et_dma_free()
1045 rbd = &sc->sc_rx_data[0]; in et_dma_free()
1047 if (rbd->rbd_buf[i].rb_dmap) { in et_dma_free()
1048 bus_dmamap_destroy(sc->sc_rx_mini_tag, in et_dma_free()
1049 rbd->rbd_buf[i].rb_dmap); in et_dma_free()
1050 rbd->rbd_buf[i].rb_dmap = NULL; in et_dma_free()
1053 if (sc->sc_rx_mini_sparemap) { in et_dma_free()
1054 bus_dmamap_destroy(sc->sc_rx_mini_tag, sc->sc_rx_mini_sparemap); in et_dma_free()
1055 sc->sc_rx_mini_sparemap = NULL; in et_dma_free()
1057 if (sc->sc_rx_mini_tag) { in et_dma_free()
1058 bus_dma_tag_destroy(sc->sc_rx_mini_tag); in et_dma_free()
1059 sc->sc_rx_mini_tag = NULL; in et_dma_free()
1062 /* Destroy DMA maps for standard RX buffers, ring 1. */ in et_dma_free()
1063 rbd = &sc->sc_rx_data[1]; in et_dma_free()
1065 if (rbd->rbd_buf[i].rb_dmap) { in et_dma_free()
1066 bus_dmamap_destroy(sc->sc_rx_tag, in et_dma_free()
1067 rbd->rbd_buf[i].rb_dmap); in et_dma_free()
1068 rbd->rbd_buf[i].rb_dmap = NULL; in et_dma_free()
1071 if (sc->sc_rx_sparemap) { in et_dma_free()
1072 bus_dmamap_destroy(sc->sc_rx_tag, sc->sc_rx_sparemap); in et_dma_free()
1073 sc->sc_rx_sparemap = NULL; in et_dma_free()
1075 if (sc->sc_rx_tag) { in et_dma_free()
1076 bus_dma_tag_destroy(sc->sc_rx_tag); in et_dma_free()
1077 sc->sc_rx_tag = NULL; in et_dma_free()
1081 tbd = &sc->sc_tx_data; in et_dma_free()
1083 if (tbd->tbd_buf[i].tb_dmap) { in et_dma_free()
1084 bus_dmamap_destroy(sc->sc_tx_tag, in et_dma_free()
1085 tbd->tbd_buf[i].tb_dmap); in et_dma_free()
1086 tbd->tbd_buf[i].tb_dmap = NULL; in et_dma_free()
1089 if (sc->sc_tx_tag) { in et_dma_free()
1090 bus_dma_tag_destroy(sc->sc_tx_tag); in et_dma_free()
1091 sc->sc_tx_tag = NULL; in et_dma_free()
1094 /* Destroy mini RX ring, ring 0. */ in et_dma_free()
1095 rx_ring = &sc->sc_rx_ring[0]; in et_dma_free()
1096 et_dma_ring_free(sc, &rx_ring->rr_dtag, (void *)&rx_ring->rr_desc, in et_dma_free()
1097 rx_ring->rr_dmap, &rx_ring->rr_paddr); in et_dma_free()
1098 /* Destroy standard RX ring, ring 1. */ in et_dma_free()
1099 rx_ring = &sc->sc_rx_ring[1]; in et_dma_free()
1100 et_dma_ring_free(sc, &rx_ring->rr_dtag, (void *)&rx_ring->rr_desc, in et_dma_free()
1101 rx_ring->rr_dmap, &rx_ring->rr_paddr); in et_dma_free()
1102 /* Destroy RX stat ring. */ in et_dma_free()
1103 rxst_ring = &sc->sc_rxstat_ring; in et_dma_free()
1104 et_dma_ring_free(sc, &rxst_ring->rsr_dtag, (void *)&rxst_ring->rsr_stat, in et_dma_free()
1105 rxst_ring->rsr_dmap, &rxst_ring->rsr_paddr); in et_dma_free()
1106 /* Destroy RX status block. */ in et_dma_free()
1107 et_dma_ring_free(sc, &rxst_ring->rsr_dtag, (void *)&rxst_ring->rsr_stat, in et_dma_free()
1108 rxst_ring->rsr_dmap, &rxst_ring->rsr_paddr); in et_dma_free()
1110 tx_ring = &sc->sc_tx_ring; in et_dma_free()
1111 et_dma_ring_free(sc, &tx_ring->tr_dtag, (void *)&tx_ring->tr_desc, in et_dma_free()
1112 tx_ring->tr_dmap, &tx_ring->tr_paddr); in et_dma_free()
1114 txsd = &sc->sc_tx_status; in et_dma_free()
1115 et_dma_ring_free(sc, &txsd->txsd_dtag, (void *)&txsd->txsd_status, in et_dma_free()
1116 txsd->txsd_dmap, &txsd->txsd_paddr); in et_dma_free()
1119 if (sc->sc_dtag) { in et_dma_free()
1120 bus_dma_tag_destroy(sc->sc_dtag); in et_dma_free()
1121 sc->sc_dtag = NULL; in et_dma_free()
1174 ifp = sc->ifp; in et_intr()
1186 device_printf(sc->dev, "DMA error(0x%08x) -- resetting\n", in et_intr()
1198 CSR_WRITE_4(sc, ET_TIMER, sc->sc_timer); in et_intr()
1216 ifp = sc->ifp; in et_init_locked()
1233 * Start TX/RX DMA engine in et_init_locked()
1246 CSR_WRITE_4(sc, ET_TIMER, sc->sc_timer); in et_init_locked()
1251 sc->sc_flags &= ~ET_FLAG_LINK; in et_init_locked()
1254 callout_reset(&sc->sc_tick, hz, et_tick, sc); in et_init_locked()
1289 if ((if_getflags(ifp) ^ sc->sc_if_flags) & in et_ioctl()
1299 sc->sc_if_flags = if_getflags(ifp); in et_ioctl()
1305 mii = device_get_softc(sc->sc_miibus); in et_ioctl()
1306 error = ifmedia_ioctl(ifp, ifr, &mii->mii_media, cmd); in et_ioctl()
1321 if (sc->sc_flags & ET_FLAG_JUMBO) in et_ioctl()
1325 max_framelen = MCLBYTES - 1; in et_ioctl()
1327 if (ET_FRAMELEN(ifr->ifr_mtu) > max_framelen) { in et_ioctl()
1333 if (if_getmtu(ifp) != ifr->ifr_mtu) { in et_ioctl()
1334 if_setmtu(ifp, ifr->ifr_mtu); in et_ioctl()
1345 mask = ifr->ifr_reqcap ^ if_getcapenable(ifp); in et_ioctl()
1379 (sc->sc_flags & (ET_FLAG_LINK | ET_FLAG_TXRX_ENABLED)) != in et_start_locked()
1391 tbd = &sc->sc_tx_data; in et_start_locked()
1392 if (tbd->tbd_used > (ET_TX_NDESC * 2) / 3) in et_start_locked()
1396 if (tbd->tbd_used + ET_NSEG_SPARE >= ET_TX_NDESC) { in et_start_locked()
1411 if (tbd->tbd_used > 0) in et_start_locked()
1420 tx_ring = &sc->sc_tx_ring; in et_start_locked()
1421 bus_dmamap_sync(tx_ring->tr_dtag, tx_ring->tr_dmap, in et_start_locked()
1423 tx_ready_pos = tx_ring->tr_ready_index & in et_start_locked()
1425 if (tx_ring->tr_ready_wrap) in et_start_locked()
1428 sc->watchdog_timer = 5; in et_start_locked()
1450 if (sc->watchdog_timer == 0 || --sc->watchdog_timer) in et_watchdog()
1453 bus_dmamap_sync(sc->sc_tx_status.txsd_dtag, sc->sc_tx_status.txsd_dmap, in et_watchdog()
1455 status = le32toh(*(sc->sc_tx_status.txsd_status)); in et_watchdog()
1456 if_printf(sc->ifp, "watchdog timed out (0x%08x) -- resetting\n", in et_watchdog()
1459 if_inc_counter(sc->ifp, IFCOUNTER_OERRORS, 1); in et_watchdog()
1460 if_setdrvflagbits(sc->ifp, 0, IFF_DRV_RUNNING); in et_watchdog()
1474 if_printf(sc->ifp, "can't stop RX DMA engine\n"); in et_stop_rxdma()
1496 tbd = &sc->sc_tx_data; in et_free_tx_ring()
1498 tb = &tbd->tbd_buf[i]; in et_free_tx_ring()
1499 if (tb->tb_mbuf != NULL) { in et_free_tx_ring()
1500 bus_dmamap_sync(sc->sc_tx_tag, tb->tb_dmap, in et_free_tx_ring()
1502 bus_dmamap_unload(sc->sc_mbuf_dtag, tb->tb_dmap); in et_free_tx_ring()
1503 m_freem(tb->tb_mbuf); in et_free_tx_ring()
1504 tb->tb_mbuf = NULL; in et_free_tx_ring()
1518 rx_ring = &sc->sc_rx_ring[0]; in et_free_rx_ring()
1519 rbd = &sc->sc_rx_data[0]; in et_free_rx_ring()
1521 rb = &rbd->rbd_buf[i]; in et_free_rx_ring()
1522 if (rb->rb_mbuf != NULL) { in et_free_rx_ring()
1523 bus_dmamap_sync(sc->sc_rx_mini_tag, rx_ring->rr_dmap, in et_free_rx_ring()
1525 bus_dmamap_unload(sc->sc_rx_mini_tag, rb->rb_dmap); in et_free_rx_ring()
1526 m_freem(rb->rb_mbuf); in et_free_rx_ring()
1527 rb->rb_mbuf = NULL; in et_free_rx_ring()
1532 rx_ring = &sc->sc_rx_ring[1]; in et_free_rx_ring()
1533 rbd = &sc->sc_rx_data[1]; in et_free_rx_ring()
1535 rb = &rbd->rbd_buf[i]; in et_free_rx_ring()
1536 if (rb->rb_mbuf != NULL) { in et_free_rx_ring()
1537 bus_dmamap_sync(sc->sc_rx_tag, rx_ring->rr_dmap, in et_free_rx_ring()
1539 bus_dmamap_unload(sc->sc_rx_tag, rb->rb_dmap); in et_free_rx_ring()
1540 m_freem(rb->rb_mbuf); in et_free_rx_ring()
1541 rb->rb_mbuf = NULL; in et_free_rx_ring()
1556 h -= 32; in et_hash_maddr()
1559 h -= 64; in et_hash_maddr()
1562 h -= 96; in et_hash_maddr()
1579 ifp = sc->ifp; in et_setmulti()
1610 ifp = sc->ifp; in et_chip_init()
1612 * Split 16Kbytes internal memory between TX and RX in et_chip_init()
1621 rxmem_size = ET_MEM_SIZE - in et_chip_init()
1635 if ((sc->sc_flags & ET_FLAG_MSI) == 0) in et_chip_init()
1647 /* Initialize RX MAC */ in et_chip_init()
1653 /* Initialize RX DMA engine */ in et_chip_init()
1673 tx_ring = &sc->sc_tx_ring; in et_init_tx_ring()
1674 bzero(tx_ring->tr_desc, ET_TX_RING_SIZE); in et_init_tx_ring()
1675 bus_dmamap_sync(tx_ring->tr_dtag, tx_ring->tr_dmap, in et_init_tx_ring()
1678 tbd = &sc->sc_tx_data; in et_init_tx_ring()
1679 tbd->tbd_start_index = 0; in et_init_tx_ring()
1680 tbd->tbd_start_wrap = 0; in et_init_tx_ring()
1681 tbd->tbd_used = 0; in et_init_tx_ring()
1683 txsd = &sc->sc_tx_status; in et_init_tx_ring()
1684 bzero(txsd->txsd_status, sizeof(uint32_t)); in et_init_tx_ring()
1685 bus_dmamap_sync(txsd->txsd_dtag, txsd->txsd_dmap, in et_init_tx_ring()
1698 rbd = &sc->sc_rx_data[n]; in et_init_rx_ring()
1700 error = rbd->rbd_newbuf(rbd, i); in et_init_rx_ring()
1702 if_printf(sc->ifp, "%d ring %d buf, " in et_init_rx_ring()
1709 rxsd = &sc->sc_rx_status; in et_init_rx_ring()
1710 bzero(rxsd->rxsd_status, sizeof(struct et_rxstatus)); in et_init_rx_ring()
1711 bus_dmamap_sync(rxsd->rxsd_dtag, rxsd->rxsd_dmap, in et_init_rx_ring()
1714 rxst_ring = &sc->sc_rxstat_ring; in et_init_rx_ring()
1715 bzero(rxst_ring->rsr_stat, ET_RXSTAT_RING_SIZE); in et_init_rx_ring()
1716 bus_dmamap_sync(rxst_ring->rsr_dtag, rxst_ring->rsr_dmap, in et_init_rx_ring()
1732 if_printf(sc->ifp, "can't init RX DMA engine\n"); in et_init_rxdma()
1737 * Install RX status in et_init_rxdma()
1739 rxsd = &sc->sc_rx_status; in et_init_rxdma()
1740 CSR_WRITE_4(sc, ET_RX_STATUS_HI, ET_ADDR_HI(rxsd->rxsd_paddr)); in et_init_rxdma()
1741 CSR_WRITE_4(sc, ET_RX_STATUS_LO, ET_ADDR_LO(rxsd->rxsd_paddr)); in et_init_rxdma()
1744 * Install RX stat ring in et_init_rxdma()
1746 rxst_ring = &sc->sc_rxstat_ring; in et_init_rxdma()
1747 CSR_WRITE_4(sc, ET_RXSTAT_HI, ET_ADDR_HI(rxst_ring->rsr_paddr)); in et_init_rxdma()
1748 CSR_WRITE_4(sc, ET_RXSTAT_LO, ET_ADDR_LO(rxst_ring->rsr_paddr)); in et_init_rxdma()
1749 CSR_WRITE_4(sc, ET_RXSTAT_CNT, ET_RX_NSTAT - 1); in et_init_rxdma()
1751 CSR_WRITE_4(sc, ET_RXSTAT_MINCNT, ((ET_RX_NSTAT * 15) / 100) - 1); in et_init_rxdma()
1754 rxst_ring->rsr_index = 0; in et_init_rxdma()
1755 rxst_ring->rsr_wrap = 0; in et_init_rxdma()
1758 * Install the 2nd RX descriptor ring in et_init_rxdma()
1760 rx_ring = &sc->sc_rx_ring[1]; in et_init_rxdma()
1761 CSR_WRITE_4(sc, ET_RX_RING1_HI, ET_ADDR_HI(rx_ring->rr_paddr)); in et_init_rxdma()
1762 CSR_WRITE_4(sc, ET_RX_RING1_LO, ET_ADDR_LO(rx_ring->rr_paddr)); in et_init_rxdma()
1763 CSR_WRITE_4(sc, ET_RX_RING1_CNT, ET_RX_NDESC - 1); in et_init_rxdma()
1765 CSR_WRITE_4(sc, ET_RX_RING1_MINCNT, ((ET_RX_NDESC * 15) / 100) - 1); in et_init_rxdma()
1768 rx_ring->rr_index = 0; in et_init_rxdma()
1769 rx_ring->rr_wrap = 1; in et_init_rxdma()
1772 * Install the 1st RX descriptor ring in et_init_rxdma()
1774 rx_ring = &sc->sc_rx_ring[0]; in et_init_rxdma()
1775 CSR_WRITE_4(sc, ET_RX_RING0_HI, ET_ADDR_HI(rx_ring->rr_paddr)); in et_init_rxdma()
1776 CSR_WRITE_4(sc, ET_RX_RING0_LO, ET_ADDR_LO(rx_ring->rr_paddr)); in et_init_rxdma()
1777 CSR_WRITE_4(sc, ET_RX_RING0_CNT, ET_RX_NDESC - 1); in et_init_rxdma()
1779 CSR_WRITE_4(sc, ET_RX_RING0_MINCNT, ((ET_RX_NDESC * 15) / 100) - 1); in et_init_rxdma()
1782 rx_ring->rr_index = 0; in et_init_rxdma()
1783 rx_ring->rr_wrap = 1; in et_init_rxdma()
1786 * RX intr moderation in et_init_rxdma()
1788 CSR_WRITE_4(sc, ET_RX_INTR_NPKTS, sc->sc_rx_intr_npkts); in et_init_rxdma()
1789 CSR_WRITE_4(sc, ET_RX_INTR_DELAY, sc->sc_rx_intr_delay); in et_init_rxdma()
1803 if_printf(sc->ifp, "can't init TX DMA engine\n"); in et_init_txdma()
1810 tx_ring = &sc->sc_tx_ring; in et_init_txdma()
1811 CSR_WRITE_4(sc, ET_TX_RING_HI, ET_ADDR_HI(tx_ring->tr_paddr)); in et_init_txdma()
1812 CSR_WRITE_4(sc, ET_TX_RING_LO, ET_ADDR_LO(tx_ring->tr_paddr)); in et_init_txdma()
1813 CSR_WRITE_4(sc, ET_TX_RING_CNT, ET_TX_NDESC - 1); in et_init_txdma()
1818 txsd = &sc->sc_tx_status; in et_init_txdma()
1819 CSR_WRITE_4(sc, ET_TX_STATUS_HI, ET_ADDR_HI(txsd->txsd_paddr)); in et_init_txdma()
1820 CSR_WRITE_4(sc, ET_TX_STATUS_LO, ET_ADDR_LO(txsd->txsd_paddr)); in et_init_txdma()
1825 tx_ring->tr_ready_index = 0; in et_init_txdma()
1826 tx_ring->tr_ready_wrap = 0; in et_init_txdma()
1871 ifp = sc->ifp; in et_init_mac()
1893 /* Disable RX MAC and WOL */ in et_init_rxmac()
1907 ifp = sc->ifp; in et_init_rxmac()
1926 * RX MAC and RX DMA needs to be reduced in size to in et_init_rxmac()
1927 * (ET_MEM_SIZE - ET_MEM_TXSIZE_EX - framelen). In in et_init_rxmac()
1929 * mode in the RX MAC, which chops packets down into in et_init_rxmac()
1931 * since this is the size of the PCI-Express TLP's in et_init_rxmac()
1943 /* Initialize RX MAC management register */ in et_init_rxmac()
1962 /* Enable RX MAC but leave WOL disabled */ in et_init_rxmac()
1996 val = (sc->sc_rx_data[0].rbd_bufsize & ET_RXDMA_CTRL_RING0_SIZE_MASK) | in et_start_rxdma()
1998 val |= (sc->sc_rx_data[1].rbd_bufsize & ET_RXDMA_CTRL_RING1_SIZE_MASK) | in et_start_rxdma()
2006 if_printf(sc->ifp, "can't start RX DMA engine\n"); in et_start_rxdma()
2029 struct mbuf *m; in et_rxeof() local
2037 ifp = sc->ifp; in et_rxeof()
2038 rxsd = &sc->sc_rx_status; in et_rxeof()
2039 rxst_ring = &sc->sc_rxstat_ring; in et_rxeof()
2041 if ((sc->sc_flags & ET_FLAG_TXRX_ENABLED) == 0) in et_rxeof()
2044 bus_dmamap_sync(rxsd->rxsd_dtag, rxsd->rxsd_dmap, in et_rxeof()
2046 bus_dmamap_sync(rxst_ring->rsr_dtag, rxst_ring->rsr_dmap, in et_rxeof()
2050 rxs_stat_ring = le32toh(rxsd->rxsd_status->rxs_stat_ring); in et_rxeof()
2055 while (rxst_index != rxst_ring->rsr_index || in et_rxeof()
2056 rxst_wrap != rxst_ring->rsr_wrap) { in et_rxeof()
2060 MPASS(rxst_ring->rsr_index < ET_RX_NSTAT); in et_rxeof()
2061 st = &rxst_ring->rsr_stat[rxst_ring->rsr_index]; in et_rxeof()
2062 rxst_info1 = le32toh(st->rxst_info1); in et_rxeof()
2063 rxst_info2 = le32toh(st->rxst_info2); in et_rxeof()
2071 if (++rxst_ring->rsr_index == ET_RX_NSTAT) { in et_rxeof()
2072 rxst_ring->rsr_index = 0; in et_rxeof()
2073 rxst_ring->rsr_wrap ^= 1; in et_rxeof()
2075 rxstat_pos = rxst_ring->rsr_index & ET_RXSTAT_POS_INDEX_MASK; in et_rxeof()
2076 if (rxst_ring->rsr_wrap) in et_rxeof()
2091 rbd = &sc->sc_rx_data[ring_idx]; in et_rxeof()
2092 m = rbd->rbd_buf[buf_idx].rb_mbuf; in et_rxeof()
2095 rbd->rbd_discard(rbd, buf_idx); in et_rxeof()
2096 } else if (rbd->rbd_newbuf(rbd, buf_idx) != 0) { in et_rxeof()
2099 rbd->rbd_discard(rbd, buf_idx); in et_rxeof()
2101 buflen -= ETHER_CRC_LEN; in et_rxeof()
2103 m_freem(m); in et_rxeof()
2106 m->m_pkthdr.len = m->m_len = buflen; in et_rxeof()
2107 m->m_pkthdr.rcvif = ifp; in et_rxeof()
2109 if_input(ifp, m); in et_rxeof()
2114 rx_ring = &sc->sc_rx_ring[ring_idx]; in et_rxeof()
2115 if (buf_idx != rx_ring->rr_index) { in et_rxeof()
2118 ring_idx, buf_idx, rx_ring->rr_index); in et_rxeof()
2121 MPASS(rx_ring->rr_index < ET_RX_NDESC); in et_rxeof()
2122 if (++rx_ring->rr_index == ET_RX_NDESC) { in et_rxeof()
2123 rx_ring->rr_index = 0; in et_rxeof()
2124 rx_ring->rr_wrap ^= 1; in et_rxeof()
2126 rxring_pos = rx_ring->rr_index & ET_RX_RING_POS_INDEX_MASK; in et_rxeof()
2127 if (rx_ring->rr_wrap) in et_rxeof()
2129 CSR_WRITE_4(sc, rx_ring->rr_posreg, rxring_pos); in et_rxeof()
2132 bus_dmamap_sync(rxsd->rxsd_dtag, rxsd->rxsd_dmap, in et_rxeof()
2134 bus_dmamap_sync(rxst_ring->rsr_dtag, rxst_ring->rsr_dmap, in et_rxeof()
2144 struct mbuf *m; in et_encap() local
2150 tx_ring = &sc->sc_tx_ring; in et_encap()
2151 MPASS(tx_ring->tr_ready_index < ET_TX_NDESC); in et_encap()
2152 tbd = &sc->sc_tx_data; in et_encap()
2153 first_idx = tx_ring->tr_ready_index; in et_encap()
2154 map = tbd->tbd_buf[first_idx].tb_dmap; in et_encap()
2156 error = bus_dmamap_load_mbuf_sg(sc->sc_tx_tag, map, *m0, segs, &nsegs, in et_encap()
2159 m = m_collapse(*m0, M_NOWAIT, ET_NSEG_MAX); in et_encap()
2160 if (m == NULL) { in et_encap()
2165 *m0 = m; in et_encap()
2166 error = bus_dmamap_load_mbuf_sg(sc->sc_tx_tag, map, *m0, segs, in et_encap()
2177 if (tbd->tbd_used + nsegs > ET_TX_NDESC - 1) { in et_encap()
2178 bus_dmamap_unload(sc->sc_tx_tag, map); in et_encap()
2181 bus_dmamap_sync(sc->sc_tx_tag, map, BUS_DMASYNC_PREWRITE); in et_encap()
2184 sc->sc_tx += nsegs; in et_encap()
2185 if (sc->sc_tx / sc->sc_tx_intr_nsegs != sc->sc_tx_intr) { in et_encap()
2186 sc->sc_tx_intr = sc->sc_tx / sc->sc_tx_intr_nsegs; in et_encap()
2190 m = *m0; in et_encap()
2192 if ((m->m_pkthdr.csum_flags & ET_CSUM_FEATURES) != 0) { in et_encap()
2193 if ((m->m_pkthdr.csum_flags & CSUM_IP) != 0) in et_encap()
2195 if ((m->m_pkthdr.csum_flags & CSUM_UDP) != 0) in et_encap()
2197 else if ((m->m_pkthdr.csum_flags & CSUM_TCP) != 0) in et_encap()
2200 last_idx = -1; in et_encap()
2203 td = &tx_ring->tr_desc[idx]; in et_encap()
2204 td->td_addr_hi = htole32(ET_ADDR_HI(segs[i].ds_addr)); in et_encap()
2205 td->td_addr_lo = htole32(ET_ADDR_LO(segs[i].ds_addr)); in et_encap()
2206 td->td_ctrl1 = htole32(segs[i].ds_len & ET_TDCTRL1_LEN_MASK); in et_encap()
2207 if (i == nsegs - 1) { in et_encap()
2209 td->td_ctrl2 = htole32(last_td_ctrl2 | csum_flags); in et_encap()
2212 td->td_ctrl2 = htole32(csum_flags); in et_encap()
2214 MPASS(tx_ring->tr_ready_index < ET_TX_NDESC); in et_encap()
2215 if (++tx_ring->tr_ready_index == ET_TX_NDESC) { in et_encap()
2216 tx_ring->tr_ready_index = 0; in et_encap()
2217 tx_ring->tr_ready_wrap ^= 1; in et_encap()
2220 td = &tx_ring->tr_desc[first_idx]; in et_encap()
2222 td->td_ctrl2 |= htole32(ET_TDCTRL2_FIRST_FRAG); in et_encap()
2225 tbd->tbd_buf[first_idx].tb_dmap = tbd->tbd_buf[last_idx].tb_dmap; in et_encap()
2226 tbd->tbd_buf[last_idx].tb_dmap = map; in et_encap()
2227 tbd->tbd_buf[last_idx].tb_mbuf = m; in et_encap()
2229 tbd->tbd_used += nsegs; in et_encap()
2230 MPASS(tbd->tbd_used <= ET_TX_NDESC); in et_encap()
2247 ifp = sc->ifp; in et_txeof()
2248 tx_ring = &sc->sc_tx_ring; in et_txeof()
2249 tbd = &sc->sc_tx_data; in et_txeof()
2251 if ((sc->sc_flags & ET_FLAG_TXRX_ENABLED) == 0) in et_txeof()
2254 if (tbd->tbd_used == 0) in et_txeof()
2257 bus_dmamap_sync(tx_ring->tr_dtag, tx_ring->tr_dmap, in et_txeof()
2264 while (tbd->tbd_start_index != end || tbd->tbd_start_wrap != wrap) { in et_txeof()
2265 MPASS(tbd->tbd_start_index < ET_TX_NDESC); in et_txeof()
2266 tb = &tbd->tbd_buf[tbd->tbd_start_index]; in et_txeof()
2267 if (tb->tb_mbuf != NULL) { in et_txeof()
2268 bus_dmamap_sync(sc->sc_tx_tag, tb->tb_dmap, in et_txeof()
2270 bus_dmamap_unload(sc->sc_tx_tag, tb->tb_dmap); in et_txeof()
2271 m_freem(tb->tb_mbuf); in et_txeof()
2272 tb->tb_mbuf = NULL; in et_txeof()
2275 if (++tbd->tbd_start_index == ET_TX_NDESC) { in et_txeof()
2276 tbd->tbd_start_index = 0; in et_txeof()
2277 tbd->tbd_start_wrap ^= 1; in et_txeof()
2280 MPASS(tbd->tbd_used > 0); in et_txeof()
2281 tbd->tbd_used--; in et_txeof()
2284 if (tbd->tbd_used == 0) in et_txeof()
2285 sc->watchdog_timer = 0; in et_txeof()
2286 if (tbd->tbd_used + ET_NSEG_SPARE < ET_TX_NDESC) in et_txeof()
2298 mii = device_get_softc(sc->sc_miibus); in et_tick()
2304 callout_reset(&sc->sc_tick, hz, et_tick, sc); in et_tick()
2313 struct mbuf *m; in et_newbuf_cluster() local
2319 m = m_getcl(M_NOWAIT, MT_DATA, M_PKTHDR); in et_newbuf_cluster()
2320 if (m == NULL) in et_newbuf_cluster()
2322 m->m_len = m->m_pkthdr.len = MCLBYTES; in et_newbuf_cluster()
2323 m_adj(m, ETHER_ALIGN); in et_newbuf_cluster()
2325 sc = rbd->rbd_softc; in et_newbuf_cluster()
2326 rb = &rbd->rbd_buf[buf_idx]; in et_newbuf_cluster()
2328 if (bus_dmamap_load_mbuf_sg(sc->sc_rx_tag, sc->sc_rx_sparemap, m, in et_newbuf_cluster()
2330 m_freem(m); in et_newbuf_cluster()
2335 if (rb->rb_mbuf != NULL) { in et_newbuf_cluster()
2336 bus_dmamap_sync(sc->sc_rx_tag, rb->rb_dmap, in et_newbuf_cluster()
2338 bus_dmamap_unload(sc->sc_rx_tag, rb->rb_dmap); in et_newbuf_cluster()
2340 dmap = rb->rb_dmap; in et_newbuf_cluster()
2341 rb->rb_dmap = sc->sc_rx_sparemap; in et_newbuf_cluster()
2342 sc->sc_rx_sparemap = dmap; in et_newbuf_cluster()
2343 bus_dmamap_sync(sc->sc_rx_tag, rb->rb_dmap, BUS_DMASYNC_PREREAD); in et_newbuf_cluster()
2345 rb->rb_mbuf = m; in et_newbuf_cluster()
2346 desc = &rbd->rbd_ring->rr_desc[buf_idx]; in et_newbuf_cluster()
2347 desc->rd_addr_hi = htole32(ET_ADDR_HI(segs[0].ds_addr)); in et_newbuf_cluster()
2348 desc->rd_addr_lo = htole32(ET_ADDR_LO(segs[0].ds_addr)); in et_newbuf_cluster()
2349 desc->rd_ctrl = htole32(buf_idx & ET_RDCTRL_BUFIDX_MASK); in et_newbuf_cluster()
2350 bus_dmamap_sync(rbd->rbd_ring->rr_dtag, rbd->rbd_ring->rr_dmap, in et_newbuf_cluster()
2360 desc = &rbd->rbd_ring->rr_desc[buf_idx]; in et_rxbuf_discard()
2361 desc->rd_ctrl = htole32(buf_idx & ET_RDCTRL_BUFIDX_MASK); in et_rxbuf_discard()
2362 bus_dmamap_sync(rbd->rbd_ring->rr_dtag, rbd->rbd_ring->rr_dmap, in et_rxbuf_discard()
2372 struct mbuf *m; in et_newbuf_hdr() local
2378 MGETHDR(m, M_NOWAIT, MT_DATA); in et_newbuf_hdr()
2379 if (m == NULL) in et_newbuf_hdr()
2381 m->m_len = m->m_pkthdr.len = MHLEN; in et_newbuf_hdr()
2382 m_adj(m, ETHER_ALIGN); in et_newbuf_hdr()
2384 sc = rbd->rbd_softc; in et_newbuf_hdr()
2385 rb = &rbd->rbd_buf[buf_idx]; in et_newbuf_hdr()
2387 if (bus_dmamap_load_mbuf_sg(sc->sc_rx_mini_tag, sc->sc_rx_mini_sparemap, in et_newbuf_hdr()
2388 m, segs, &nsegs, 0) != 0) { in et_newbuf_hdr()
2389 m_freem(m); in et_newbuf_hdr()
2394 if (rb->rb_mbuf != NULL) { in et_newbuf_hdr()
2395 bus_dmamap_sync(sc->sc_rx_mini_tag, rb->rb_dmap, in et_newbuf_hdr()
2397 bus_dmamap_unload(sc->sc_rx_mini_tag, rb->rb_dmap); in et_newbuf_hdr()
2399 dmap = rb->rb_dmap; in et_newbuf_hdr()
2400 rb->rb_dmap = sc->sc_rx_mini_sparemap; in et_newbuf_hdr()
2401 sc->sc_rx_mini_sparemap = dmap; in et_newbuf_hdr()
2402 bus_dmamap_sync(sc->sc_rx_mini_tag, rb->rb_dmap, BUS_DMASYNC_PREREAD); in et_newbuf_hdr()
2404 rb->rb_mbuf = m; in et_newbuf_hdr()
2405 desc = &rbd->rbd_ring->rr_desc[buf_idx]; in et_newbuf_hdr()
2406 desc->rd_addr_hi = htole32(ET_ADDR_HI(segs[0].ds_addr)); in et_newbuf_hdr()
2407 desc->rd_addr_lo = htole32(ET_ADDR_LO(segs[0].ds_addr)); in et_newbuf_hdr()
2408 desc->rd_ctrl = htole32(buf_idx & ET_RDCTRL_BUFIDX_MASK); in et_newbuf_hdr()
2409 bus_dmamap_sync(rbd->rbd_ring->rr_dtag, rbd->rbd_ring->rr_dmap, in et_newbuf_hdr()
2430 ctx = device_get_sysctl_ctx(sc->dev); in et_add_sysctls()
2431 children = SYSCTL_CHILDREN(device_get_sysctl_tree(sc->dev)); in et_add_sysctls()
2435 et_sysctl_rx_intr_npkts, "I", "RX IM, # packets per RX interrupt"); in et_add_sysctls()
2439 "RX IM, RX interrupt delay (x10 usec)"); in et_add_sysctls()
2441 CTLFLAG_RW, &sc->sc_tx_intr_nsegs, 0, in et_add_sysctls()
2444 CTLFLAG_RW, &sc->sc_timer, 0, "TX timer"); in et_add_sysctls()
2450 /* TX/RX statistics. */ in et_add_sysctls()
2451 stats = &sc->sc_stats; in et_add_sysctls()
2452 ET_SYSCTL_STAT_ADD64(ctx, parent, "frames_64", &stats->pkts_64, in et_add_sysctls()
2454 ET_SYSCTL_STAT_ADD64(ctx, parent, "frames_65_127", &stats->pkts_65, in et_add_sysctls()
2456 ET_SYSCTL_STAT_ADD64(ctx, parent, "frames_128_255", &stats->pkts_128, in et_add_sysctls()
2458 ET_SYSCTL_STAT_ADD64(ctx, parent, "frames_256_511", &stats->pkts_256, in et_add_sysctls()
2460 ET_SYSCTL_STAT_ADD64(ctx, parent, "frames_512_1023", &stats->pkts_512, in et_add_sysctls()
2462 ET_SYSCTL_STAT_ADD64(ctx, parent, "frames_1024_1518", &stats->pkts_1024, in et_add_sysctls()
2464 ET_SYSCTL_STAT_ADD64(ctx, parent, "frames_1519_1522", &stats->pkts_1519, in et_add_sysctls()
2467 /* RX statistics. */ in et_add_sysctls()
2468 tree = SYSCTL_ADD_NODE(ctx, parent, OID_AUTO, "rx", in et_add_sysctls()
2469 CTLFLAG_RD | CTLFLAG_MPSAFE, NULL, "RX MAC statistics"); in et_add_sysctls()
2472 &stats->rx_bytes, "Good bytes"); in et_add_sysctls()
2474 &stats->rx_frames, "Good frames"); in et_add_sysctls()
2476 &stats->rx_crcerrs, "CRC errors"); in et_add_sysctls()
2478 &stats->rx_mcast, "Multicast frames"); in et_add_sysctls()
2480 &stats->rx_bcast, "Broadcast frames"); in et_add_sysctls()
2482 &stats->rx_control, "Control frames"); in et_add_sysctls()
2484 &stats->rx_pause, "Pause frames"); in et_add_sysctls()
2486 &stats->rx_unknown_control, "Unknown control frames"); in et_add_sysctls()
2488 &stats->rx_alignerrs, "Alignment errors"); in et_add_sysctls()
2490 &stats->rx_lenerrs, "Frames with length mismatched"); in et_add_sysctls()
2492 &stats->rx_codeerrs, "Frames with code error"); in et_add_sysctls()
2494 &stats->rx_cserrs, "Frames with carrier sense error"); in et_add_sysctls()
2496 &stats->rx_runts, "Too short frames"); in et_add_sysctls()
2498 &stats->rx_oversize, "Oversized frames"); in et_add_sysctls()
2500 &stats->rx_fragments, "Fragmented frames"); in et_add_sysctls()
2502 &stats->rx_jabbers, "Frames with jabber error"); in et_add_sysctls()
2504 &stats->rx_drop, "Dropped frames"); in et_add_sysctls()
2511 &stats->tx_bytes, "Good bytes"); in et_add_sysctls()
2513 &stats->tx_frames, "Good frames"); in et_add_sysctls()
2515 &stats->tx_mcast, "Multicast frames"); in et_add_sysctls()
2517 &stats->tx_bcast, "Broadcast frames"); in et_add_sysctls()
2519 &stats->tx_pause, "Pause frames"); in et_add_sysctls()
2521 &stats->tx_deferred, "Deferred frames"); in et_add_sysctls()
2523 &stats->tx_excess_deferred, "Excessively deferred frames"); in et_add_sysctls()
2525 &stats->tx_single_colls, "Single collisions"); in et_add_sysctls()
2527 &stats->tx_multi_colls, "Multiple collisions"); in et_add_sysctls()
2529 &stats->tx_late_colls, "Late collisions"); in et_add_sysctls()
2531 &stats->tx_excess_colls, "Excess collisions"); in et_add_sysctls()
2533 &stats->tx_total_colls, "Total collisions"); in et_add_sysctls()
2535 &stats->tx_pause_honored, "Honored pause frames"); in et_add_sysctls()
2537 &stats->tx_drop, "Dropped frames"); in et_add_sysctls()
2539 &stats->tx_jabbers, "Frames with jabber errors"); in et_add_sysctls()
2541 &stats->tx_crcerrs, "Frames with CRC errors"); in et_add_sysctls()
2543 &stats->tx_control, "Control frames"); in et_add_sysctls()
2545 &stats->tx_oversize, "Oversized frames"); in et_add_sysctls()
2547 &stats->tx_undersize, "Undersized frames"); in et_add_sysctls()
2549 &stats->tx_fragments, "Fragmented frames"); in et_add_sysctls()
2563 ifp = sc->ifp; in et_sysctl_rx_intr_npkts()
2564 v = sc->sc_rx_intr_npkts; in et_sysctl_rx_intr_npkts()
2566 if (error || req->newptr == NULL) in et_sysctl_rx_intr_npkts()
2573 if (sc->sc_rx_intr_npkts != v) { in et_sysctl_rx_intr_npkts()
2576 sc->sc_rx_intr_npkts = v; in et_sysctl_rx_intr_npkts()
2590 ifp = sc->ifp; in et_sysctl_rx_intr_delay()
2591 v = sc->sc_rx_intr_delay; in et_sysctl_rx_intr_delay()
2593 if (error || req->newptr == NULL) in et_sysctl_rx_intr_delay()
2600 if (sc->sc_rx_intr_delay != v) { in et_sysctl_rx_intr_delay()
2603 sc->sc_rx_intr_delay = v; in et_sysctl_rx_intr_delay()
2614 stats = &sc->sc_stats; in et_stats_update()
2615 stats->pkts_64 += CSR_READ_4(sc, ET_STAT_PKTS_64); in et_stats_update()
2616 stats->pkts_65 += CSR_READ_4(sc, ET_STAT_PKTS_65_127); in et_stats_update()
2617 stats->pkts_128 += CSR_READ_4(sc, ET_STAT_PKTS_128_255); in et_stats_update()
2618 stats->pkts_256 += CSR_READ_4(sc, ET_STAT_PKTS_256_511); in et_stats_update()
2619 stats->pkts_512 += CSR_READ_4(sc, ET_STAT_PKTS_512_1023); in et_stats_update()
2620 stats->pkts_1024 += CSR_READ_4(sc, ET_STAT_PKTS_1024_1518); in et_stats_update()
2621 stats->pkts_1519 += CSR_READ_4(sc, ET_STAT_PKTS_1519_1522); in et_stats_update()
2623 stats->rx_bytes += CSR_READ_4(sc, ET_STAT_RX_BYTES); in et_stats_update()
2624 stats->rx_frames += CSR_READ_4(sc, ET_STAT_RX_FRAMES); in et_stats_update()
2625 stats->rx_crcerrs += CSR_READ_4(sc, ET_STAT_RX_CRC_ERR); in et_stats_update()
2626 stats->rx_mcast += CSR_READ_4(sc, ET_STAT_RX_MCAST); in et_stats_update()
2627 stats->rx_bcast += CSR_READ_4(sc, ET_STAT_RX_BCAST); in et_stats_update()
2628 stats->rx_control += CSR_READ_4(sc, ET_STAT_RX_CTL); in et_stats_update()
2629 stats->rx_pause += CSR_READ_4(sc, ET_STAT_RX_PAUSE); in et_stats_update()
2630 stats->rx_unknown_control += CSR_READ_4(sc, ET_STAT_RX_UNKNOWN_CTL); in et_stats_update()
2631 stats->rx_alignerrs += CSR_READ_4(sc, ET_STAT_RX_ALIGN_ERR); in et_stats_update()
2632 stats->rx_lenerrs += CSR_READ_4(sc, ET_STAT_RX_LEN_ERR); in et_stats_update()
2633 stats->rx_codeerrs += CSR_READ_4(sc, ET_STAT_RX_CODE_ERR); in et_stats_update()
2634 stats->rx_cserrs += CSR_READ_4(sc, ET_STAT_RX_CS_ERR); in et_stats_update()
2635 stats->rx_runts += CSR_READ_4(sc, ET_STAT_RX_RUNT); in et_stats_update()
2636 stats->rx_oversize += CSR_READ_4(sc, ET_STAT_RX_OVERSIZE); in et_stats_update()
2637 stats->rx_fragments += CSR_READ_4(sc, ET_STAT_RX_FRAG); in et_stats_update()
2638 stats->rx_jabbers += CSR_READ_4(sc, ET_STAT_RX_JABBER); in et_stats_update()
2639 stats->rx_drop += CSR_READ_4(sc, ET_STAT_RX_DROP); in et_stats_update()
2641 stats->tx_bytes += CSR_READ_4(sc, ET_STAT_TX_BYTES); in et_stats_update()
2642 stats->tx_frames += CSR_READ_4(sc, ET_STAT_TX_FRAMES); in et_stats_update()
2643 stats->tx_mcast += CSR_READ_4(sc, ET_STAT_TX_MCAST); in et_stats_update()
2644 stats->tx_bcast += CSR_READ_4(sc, ET_STAT_TX_BCAST); in et_stats_update()
2645 stats->tx_pause += CSR_READ_4(sc, ET_STAT_TX_PAUSE); in et_stats_update()
2646 stats->tx_deferred += CSR_READ_4(sc, ET_STAT_TX_DEFER); in et_stats_update()
2647 stats->tx_excess_deferred += CSR_READ_4(sc, ET_STAT_TX_EXCESS_DEFER); in et_stats_update()
2648 stats->tx_single_colls += CSR_READ_4(sc, ET_STAT_TX_SINGLE_COL); in et_stats_update()
2649 stats->tx_multi_colls += CSR_READ_4(sc, ET_STAT_TX_MULTI_COL); in et_stats_update()
2650 stats->tx_late_colls += CSR_READ_4(sc, ET_STAT_TX_LATE_COL); in et_stats_update()
2651 stats->tx_excess_colls += CSR_READ_4(sc, ET_STAT_TX_EXCESS_COL); in et_stats_update()
2652 stats->tx_total_colls += CSR_READ_4(sc, ET_STAT_TX_TOTAL_COL); in et_stats_update()
2653 stats->tx_pause_honored += CSR_READ_4(sc, ET_STAT_TX_PAUSE_HONOR); in et_stats_update()
2654 stats->tx_drop += CSR_READ_4(sc, ET_STAT_TX_DROP); in et_stats_update()
2655 stats->tx_jabbers += CSR_READ_4(sc, ET_STAT_TX_JABBER); in et_stats_update()
2656 stats->tx_crcerrs += CSR_READ_4(sc, ET_STAT_TX_CRC_ERR); in et_stats_update()
2657 stats->tx_control += CSR_READ_4(sc, ET_STAT_TX_CTL); in et_stats_update()
2658 stats->tx_oversize += CSR_READ_4(sc, ET_STAT_TX_OVERSIZE); in et_stats_update()
2659 stats->tx_undersize += CSR_READ_4(sc, ET_STAT_TX_UNDERSIZE); in et_stats_update()
2660 stats->tx_fragments += CSR_READ_4(sc, ET_STAT_TX_FRAG); in et_stats_update()
2670 stats = &sc->sc_stats; in et_get_counter()
2674 return (stats->tx_frames); in et_get_counter()
2676 return (stats->tx_total_colls); in et_get_counter()
2678 return (stats->tx_drop + stats->tx_jabbers + in et_get_counter()
2679 stats->tx_crcerrs + stats->tx_excess_deferred + in et_get_counter()
2680 stats->tx_late_colls); in et_get_counter()
2682 return (stats->rx_frames); in et_get_counter()
2684 return (stats->rx_crcerrs + stats->rx_alignerrs + in et_get_counter()
2685 stats->rx_lenerrs + stats->rx_codeerrs + stats->rx_cserrs + in et_get_counter()
2686 stats->rx_runts + stats->rx_jabbers + stats->rx_drop); in et_get_counter()
2700 if ((if_getdrvflags(sc->ifp) & IFF_DRV_RUNNING) != 0) in et_suspend()
2722 if ((sc->sc_flags & ET_FLAG_FASTETHER) == 0) in et_resume()
2725 if ((if_getflags(sc->ifp) & IFF_UP) != 0) in et_resume()