Lines Matching +full:dma +full:- +full:poll +full:- +full:cnt

3 /*-
4 * SPDX-License-Identifier: BSD-2-Clause
97 "Sundance ST-1023 Gigabit Ethernet" },
100 "Sundance ST-2021 Gigabit Ethernet" },
119 "D-Link DL-4000 Gigabit Ethernet" },
187 * MII bit-bang glue
233 { -1, 0, 0 }
239 { -1, 0, 0 }
243 * stge_mii_bitbang_read: [mii bit-bang interface function]
245 * Read the MII serial port for the MII bit-bang module.
262 * stge_mii_bitbang_write: [mii big-bang interface function]
264 * Write the MII serial port for the MII bit-bang module.
334 taskqueue_enqueue(taskqueue_swi, &sc->sc_link_task); in stge_miibus_statchg()
349 mii = device_get_softc(sc->sc_miibus); in stge_mediastatus()
352 ifmr->ifm_status = mii->mii_media_status; in stge_mediastatus()
353 ifmr->ifm_active = mii->mii_media_active; in stge_mediastatus()
359 * Set hardware to newly-selected media.
368 mii = device_get_softc(sc->sc_miibus); in stge_mediachange()
397 device_printf(sc->sc_dev, "EEPROM failed to come ready\n"); in stge_read_eeprom()
402 device_printf(sc->sc_dev, "EEPROM read timed out\n"); in stge_read_eeprom()
417 if (vendor == sp->stge_vendorid && in stge_probe()
418 devid == sp->stge_deviceid) { in stge_probe()
419 device_set_desc(dev, sp->stge_name); in stge_probe()
439 sc->sc_dev = dev; in stge_attach()
441 mtx_init(&sc->sc_mtx, device_get_nameunit(dev), MTX_NETWORK_LOCK, in stge_attach()
443 mtx_init(&sc->sc_mii_mtx, "stge_mii_mutex", NULL, MTX_DEF); in stge_attach()
444 callout_init_mtx(&sc->sc_tick_ch, &sc->sc_mtx, 0); in stge_attach()
445 TASK_INIT(&sc->sc_link_task, 0, stge_link_task, sc); in stge_attach()
454 sc->sc_spec = stge_res_spec_mem; in stge_attach()
458 device_printf(sc->sc_dev, "couldn't locate IO BAR\n"); in stge_attach()
462 sc->sc_spec = stge_res_spec_io; in stge_attach()
464 error = bus_alloc_resources(dev, sc->sc_spec, sc->sc_res); in stge_attach()
467 sc->sc_spec == stge_res_spec_mem ? "memory" : "I/O"); in stge_attach()
470 sc->sc_rev = pci_get_revid(dev); in stge_attach()
475 &sc->sc_rxint_nframe, 0, sysctl_hw_stge_rxint_nframe, "I", in stge_attach()
481 &sc->sc_rxint_dmawait, 0, sysctl_hw_stge_rxint_dmawait, "I", in stge_attach()
485 sc->sc_rxint_nframe = STGE_RXINT_NFRAME_DEFAULT; in stge_attach()
487 "rxint_nframe", &sc->sc_rxint_nframe); in stge_attach()
489 if (sc->sc_rxint_nframe < STGE_RXINT_NFRAME_MIN || in stge_attach()
490 sc->sc_rxint_nframe > STGE_RXINT_NFRAME_MAX) { in stge_attach()
493 sc->sc_rxint_nframe = STGE_RXINT_NFRAME_DEFAULT; in stge_attach()
497 sc->sc_rxint_dmawait = STGE_RXINT_DMAWAIT_DEFAULT; in stge_attach()
499 "rxint_dmawait", &sc->sc_rxint_dmawait); in stge_attach()
501 if (sc->sc_rxint_dmawait < STGE_RXINT_DMAWAIT_MIN || in stge_attach()
502 sc->sc_rxint_dmawait > STGE_RXINT_DMAWAIT_MAX) { in stge_attach()
505 sc->sc_rxint_dmawait = STGE_RXINT_DMAWAIT_DEFAULT; in stge_attach()
517 sc->sc_usefiber = 1; in stge_attach()
519 sc->sc_usefiber = 0; in stge_attach()
522 stge_read_eeprom(sc, STGE_EEPROM_LEDMode, &sc->sc_led); in stge_attach()
550 sc->sc_stge1023 = 0; in stge_attach()
559 sc->sc_stge1023 = 1; in stge_attach()
562 ifp = sc->sc_ifp = if_alloc(IFT_ETHER); in stge_attach()
569 if_setsendqlen(ifp, STGE_TX_RING_CNT - 1); in stge_attach()
572 if (sc->sc_rev >= 0x0c) { in stge_attach()
585 sc->sc_PhyCtrl = CSR_READ_1(sc, STGE_PhyCtrl) & in stge_attach()
590 if (sc->sc_rev >= 0x40 && sc->sc_rev <= 0x4e) in stge_attach()
592 error = mii_attach(sc->sc_dev, &sc->sc_miibus, ifp, stge_mediachange, in stge_attach()
596 device_printf(sc->sc_dev, "attaching PHYs failed\n"); in stge_attach()
604 if (sc->sc_rev >= 0x0c) in stge_attach()
623 sc->sc_txthresh = 0x0fff; in stge_attach()
628 sc->sc_DMACtrl = 0; in stge_attach()
630 sc->sc_DMACtrl |= DMAC_MWIDisable; in stge_attach()
635 error = bus_setup_intr(dev, sc->sc_res[1], INTR_TYPE_NET | INTR_MPSAFE, in stge_attach()
636 NULL, stge_intr, sc, &sc->sc_ih); in stge_attach()
639 device_printf(sc->sc_dev, "couldn't set up IRQ\n"); in stge_attach()
640 sc->sc_ifp = NULL; in stge_attach()
659 ifp = sc->sc_ifp; in stge_detach()
667 sc->sc_detach = 1; in stge_detach()
670 callout_drain(&sc->sc_tick_ch); in stge_detach()
671 taskqueue_drain(taskqueue_swi, &sc->sc_link_task); in stge_detach()
680 sc->sc_ifp = NULL; in stge_detach()
683 if (sc->sc_ih) { in stge_detach()
684 bus_teardown_intr(dev, sc->sc_res[1], sc->sc_ih); in stge_detach()
685 sc->sc_ih = NULL; in stge_detach()
688 if (sc->sc_spec) in stge_detach()
689 bus_release_resources(dev, sc->sc_spec, sc->sc_res); in stge_detach()
691 mtx_destroy(&sc->sc_mii_mtx); in stge_detach()
692 mtx_destroy(&sc->sc_mtx); in stge_detach()
710 ctx->stge_busaddr = segs[0].ds_addr; in stge_dmamap_cb()
722 error = bus_dma_tag_create(bus_get_dma_tag(sc->sc_dev),/* parent */ in stge_dma_alloc()
732 &sc->sc_cdata.stge_parent_tag); in stge_dma_alloc()
734 device_printf(sc->sc_dev, "failed to create parent DMA tag\n"); in stge_dma_alloc()
738 error = bus_dma_tag_create(sc->sc_cdata.stge_parent_tag,/* parent */ in stge_dma_alloc()
748 &sc->sc_cdata.stge_tx_ring_tag); in stge_dma_alloc()
750 device_printf(sc->sc_dev, in stge_dma_alloc()
751 "failed to allocate Tx ring DMA tag\n"); in stge_dma_alloc()
756 error = bus_dma_tag_create(sc->sc_cdata.stge_parent_tag,/* parent */ in stge_dma_alloc()
766 &sc->sc_cdata.stge_rx_ring_tag); in stge_dma_alloc()
768 device_printf(sc->sc_dev, in stge_dma_alloc()
769 "failed to allocate Rx ring DMA tag\n"); in stge_dma_alloc()
774 error = bus_dma_tag_create(sc->sc_cdata.stge_parent_tag,/* parent */ in stge_dma_alloc()
784 &sc->sc_cdata.stge_tx_tag); in stge_dma_alloc()
786 device_printf(sc->sc_dev, "failed to allocate Tx DMA tag\n"); in stge_dma_alloc()
791 error = bus_dma_tag_create(sc->sc_cdata.stge_parent_tag,/* parent */ in stge_dma_alloc()
801 &sc->sc_cdata.stge_rx_tag); in stge_dma_alloc()
803 device_printf(sc->sc_dev, "failed to allocate Rx DMA tag\n"); in stge_dma_alloc()
807 /* allocate DMA'able memory and load the DMA map for Tx ring. */ in stge_dma_alloc()
808 error = bus_dmamem_alloc(sc->sc_cdata.stge_tx_ring_tag, in stge_dma_alloc()
809 (void **)&sc->sc_rdata.stge_tx_ring, BUS_DMA_NOWAIT | in stge_dma_alloc()
810 BUS_DMA_COHERENT | BUS_DMA_ZERO, &sc->sc_cdata.stge_tx_ring_map); in stge_dma_alloc()
812 device_printf(sc->sc_dev, in stge_dma_alloc()
813 "failed to allocate DMA'able memory for Tx ring\n"); in stge_dma_alloc()
818 error = bus_dmamap_load(sc->sc_cdata.stge_tx_ring_tag, in stge_dma_alloc()
819 sc->sc_cdata.stge_tx_ring_map, sc->sc_rdata.stge_tx_ring, in stge_dma_alloc()
822 device_printf(sc->sc_dev, in stge_dma_alloc()
823 "failed to load DMA'able memory for Tx ring\n"); in stge_dma_alloc()
826 sc->sc_rdata.stge_tx_ring_paddr = ctx.stge_busaddr; in stge_dma_alloc()
828 /* allocate DMA'able memory and load the DMA map for Rx ring. */ in stge_dma_alloc()
829 error = bus_dmamem_alloc(sc->sc_cdata.stge_rx_ring_tag, in stge_dma_alloc()
830 (void **)&sc->sc_rdata.stge_rx_ring, BUS_DMA_NOWAIT | in stge_dma_alloc()
831 BUS_DMA_COHERENT | BUS_DMA_ZERO, &sc->sc_cdata.stge_rx_ring_map); in stge_dma_alloc()
833 device_printf(sc->sc_dev, in stge_dma_alloc()
834 "failed to allocate DMA'able memory for Rx ring\n"); in stge_dma_alloc()
839 error = bus_dmamap_load(sc->sc_cdata.stge_rx_ring_tag, in stge_dma_alloc()
840 sc->sc_cdata.stge_rx_ring_map, sc->sc_rdata.stge_rx_ring, in stge_dma_alloc()
843 device_printf(sc->sc_dev, in stge_dma_alloc()
844 "failed to load DMA'able memory for Rx ring\n"); in stge_dma_alloc()
847 sc->sc_rdata.stge_rx_ring_paddr = ctx.stge_busaddr; in stge_dma_alloc()
849 /* create DMA maps for Tx buffers. */ in stge_dma_alloc()
851 txd = &sc->sc_cdata.stge_txdesc[i]; in stge_dma_alloc()
852 txd->tx_m = NULL; in stge_dma_alloc()
853 txd->tx_dmamap = 0; in stge_dma_alloc()
854 error = bus_dmamap_create(sc->sc_cdata.stge_tx_tag, 0, in stge_dma_alloc()
855 &txd->tx_dmamap); in stge_dma_alloc()
857 device_printf(sc->sc_dev, in stge_dma_alloc()
862 /* create DMA maps for Rx buffers. */ in stge_dma_alloc()
863 if ((error = bus_dmamap_create(sc->sc_cdata.stge_rx_tag, 0, in stge_dma_alloc()
864 &sc->sc_cdata.stge_rx_sparemap)) != 0) { in stge_dma_alloc()
865 device_printf(sc->sc_dev, "failed to create spare Rx dmamap\n"); in stge_dma_alloc()
869 rxd = &sc->sc_cdata.stge_rxdesc[i]; in stge_dma_alloc()
870 rxd->rx_m = NULL; in stge_dma_alloc()
871 rxd->rx_dmamap = 0; in stge_dma_alloc()
872 error = bus_dmamap_create(sc->sc_cdata.stge_rx_tag, 0, in stge_dma_alloc()
873 &rxd->rx_dmamap); in stge_dma_alloc()
875 device_printf(sc->sc_dev, in stge_dma_alloc()
893 if (sc->sc_cdata.stge_tx_ring_tag) { in stge_dma_free()
894 if (sc->sc_rdata.stge_tx_ring_paddr) in stge_dma_free()
895 bus_dmamap_unload(sc->sc_cdata.stge_tx_ring_tag, in stge_dma_free()
896 sc->sc_cdata.stge_tx_ring_map); in stge_dma_free()
897 if (sc->sc_rdata.stge_tx_ring) in stge_dma_free()
898 bus_dmamem_free(sc->sc_cdata.stge_tx_ring_tag, in stge_dma_free()
899 sc->sc_rdata.stge_tx_ring, in stge_dma_free()
900 sc->sc_cdata.stge_tx_ring_map); in stge_dma_free()
901 sc->sc_rdata.stge_tx_ring = NULL; in stge_dma_free()
902 sc->sc_rdata.stge_tx_ring_paddr = 0; in stge_dma_free()
903 bus_dma_tag_destroy(sc->sc_cdata.stge_tx_ring_tag); in stge_dma_free()
904 sc->sc_cdata.stge_tx_ring_tag = NULL; in stge_dma_free()
907 if (sc->sc_cdata.stge_rx_ring_tag) { in stge_dma_free()
908 if (sc->sc_rdata.stge_rx_ring_paddr) in stge_dma_free()
909 bus_dmamap_unload(sc->sc_cdata.stge_rx_ring_tag, in stge_dma_free()
910 sc->sc_cdata.stge_rx_ring_map); in stge_dma_free()
911 if (sc->sc_rdata.stge_rx_ring) in stge_dma_free()
912 bus_dmamem_free(sc->sc_cdata.stge_rx_ring_tag, in stge_dma_free()
913 sc->sc_rdata.stge_rx_ring, in stge_dma_free()
914 sc->sc_cdata.stge_rx_ring_map); in stge_dma_free()
915 sc->sc_rdata.stge_rx_ring = NULL; in stge_dma_free()
916 sc->sc_rdata.stge_rx_ring_paddr = 0; in stge_dma_free()
917 bus_dma_tag_destroy(sc->sc_cdata.stge_rx_ring_tag); in stge_dma_free()
918 sc->sc_cdata.stge_rx_ring_tag = NULL; in stge_dma_free()
921 if (sc->sc_cdata.stge_tx_tag) { in stge_dma_free()
923 txd = &sc->sc_cdata.stge_txdesc[i]; in stge_dma_free()
924 if (txd->tx_dmamap) { in stge_dma_free()
925 bus_dmamap_destroy(sc->sc_cdata.stge_tx_tag, in stge_dma_free()
926 txd->tx_dmamap); in stge_dma_free()
927 txd->tx_dmamap = 0; in stge_dma_free()
930 bus_dma_tag_destroy(sc->sc_cdata.stge_tx_tag); in stge_dma_free()
931 sc->sc_cdata.stge_tx_tag = NULL; in stge_dma_free()
934 if (sc->sc_cdata.stge_rx_tag) { in stge_dma_free()
936 rxd = &sc->sc_cdata.stge_rxdesc[i]; in stge_dma_free()
937 if (rxd->rx_dmamap) { in stge_dma_free()
938 bus_dmamap_destroy(sc->sc_cdata.stge_rx_tag, in stge_dma_free()
939 rxd->rx_dmamap); in stge_dma_free()
940 rxd->rx_dmamap = 0; in stge_dma_free()
943 if (sc->sc_cdata.stge_rx_sparemap) { in stge_dma_free()
944 bus_dmamap_destroy(sc->sc_cdata.stge_rx_tag, in stge_dma_free()
945 sc->sc_cdata.stge_rx_sparemap); in stge_dma_free()
946 sc->sc_cdata.stge_rx_sparemap = 0; in stge_dma_free()
948 bus_dma_tag_destroy(sc->sc_cdata.stge_rx_tag); in stge_dma_free()
949 sc->sc_cdata.stge_rx_tag = NULL; in stge_dma_free()
952 if (sc->sc_cdata.stge_parent_tag) { in stge_dma_free()
953 bus_dma_tag_destroy(sc->sc_cdata.stge_parent_tag); in stge_dma_free()
954 sc->sc_cdata.stge_parent_tag = NULL; in stge_dma_free()
978 ifp = sc->sc_ifp; in stge_setwol()
1004 sc->sc_suspended = 1; in stge_suspend()
1029 ifp = sc->sc_ifp; in stge_resume()
1033 sc->sc_suspended = 0; in stge_resume()
1051 device_printf(sc->sc_dev, "DMA wait timed out\n"); in stge_dma_wait()
1066 if ((txd = STAILQ_FIRST(&sc->sc_cdata.stge_txfreeq)) == NULL) in stge_encap()
1069 error = bus_dmamap_load_mbuf_sg(sc->sc_cdata.stge_tx_tag, in stge_encap()
1070 txd->tx_dmamap, *m_head, txsegs, &nsegs, 0); in stge_encap()
1079 error = bus_dmamap_load_mbuf_sg(sc->sc_cdata.stge_tx_tag, in stge_encap()
1080 txd->tx_dmamap, *m_head, txsegs, &nsegs, 0); in stge_encap()
1096 if ((m->m_pkthdr.csum_flags & STGE_CSUM_FEATURES) != 0) { in stge_encap()
1097 if (m->m_pkthdr.csum_flags & CSUM_IP) in stge_encap()
1099 if (m->m_pkthdr.csum_flags & CSUM_TCP) in stge_encap()
1101 else if (m->m_pkthdr.csum_flags & CSUM_UDP) in stge_encap()
1105 si = sc->sc_cdata.stge_tx_prod; in stge_encap()
1106 tfd = &sc->sc_rdata.stge_tx_ring[si]; in stge_encap()
1108 tfd->tfd_frags[i].frag_word0 = in stge_encap()
1111 sc->sc_cdata.stge_tx_cnt++; in stge_encap()
1115 if (sc->sc_cdata.stge_tx_cnt >= STGE_TX_HIWAT) in stge_encap()
1119 sc->sc_cdata.stge_tx_prod = (si + 1) % STGE_TX_RING_CNT; in stge_encap()
1122 if (m->m_flags & M_VLANTAG) in stge_encap()
1123 tfc |= (TFD_VLANTagInsert | TFD_VID(m->m_pkthdr.ether_vtag)); in stge_encap()
1124 tfd->tfd_control = htole64(tfc); in stge_encap()
1127 STAILQ_REMOVE_HEAD(&sc->sc_cdata.stge_txfreeq, tx_q); in stge_encap()
1128 STAILQ_INSERT_TAIL(&sc->sc_cdata.stge_txbusyq, txd, tx_q); in stge_encap()
1129 txd->tx_m = m; in stge_encap()
1132 bus_dmamap_sync(sc->sc_cdata.stge_tx_tag, txd->tx_dmamap, in stge_encap()
1134 bus_dmamap_sync(sc->sc_cdata.stge_tx_ring_tag, in stge_encap()
1135 sc->sc_cdata.stge_tx_ring_map, in stge_encap()
1169 IFF_DRV_RUNNING || sc->sc_link == 0) in stge_start_locked()
1173 if (sc->sc_cdata.stge_tx_cnt >= STGE_TX_HIWAT) { in stge_start_locked()
1207 sc->sc_watchdog_timer = 5; in stge_start_locked()
1223 if (sc->sc_watchdog_timer == 0 || --sc->sc_watchdog_timer) in stge_watchdog()
1226 ifp = sc->sc_ifp; in stge_watchdog()
1227 if_printf(sc->sc_ifp, "device timeout\n"); in stge_watchdog()
1253 if (ifr->ifr_mtu < ETHERMIN || ifr->ifr_mtu > STGE_JUMBO_MTU) in stge_ioctl()
1255 else if (if_getmtu(ifp) != ifr->ifr_mtu) { in stge_ioctl()
1256 if_setmtu(ifp, ifr->ifr_mtu); in stge_ioctl()
1269 if (((if_getflags(ifp) ^ sc->sc_if_flags) in stge_ioctl()
1273 if (sc->sc_detach == 0) in stge_ioctl()
1280 sc->sc_if_flags = if_getflags(ifp); in stge_ioctl()
1292 mii = device_get_softc(sc->sc_miibus); in stge_ioctl()
1293 error = ifmedia_ioctl(ifp, ifr, &mii->mii_media, cmd); in stge_ioctl()
1296 mask = ifr->ifr_reqcap ^ if_getcapenable(ifp); in stge_ioctl()
1299 if ((ifr->ifr_reqcap & IFCAP_POLLING) != 0) { in stge_ioctl()
1313 sc->sc_IntEnable); in stge_ioctl()
1361 mii = device_get_softc(sc->sc_miibus); in stge_link_task()
1362 if (mii->mii_media_status & IFM_ACTIVE) { in stge_link_task()
1363 if (IFM_SUBTYPE(mii->mii_media_active) != IFM_NONE) in stge_link_task()
1364 sc->sc_link = 1; in stge_link_task()
1366 sc->sc_link = 0; in stge_link_task()
1368 sc->sc_MACCtrl = 0; in stge_link_task()
1369 if (((mii->mii_media_active & IFM_GMASK) & IFM_FDX) != 0) in stge_link_task()
1370 sc->sc_MACCtrl |= MC_DuplexSelect; in stge_link_task()
1371 if (((mii->mii_media_active & IFM_GMASK) & IFM_ETH_RXPAUSE) != 0) in stge_link_task()
1372 sc->sc_MACCtrl |= MC_RxFlowControlEnable; in stge_link_task()
1373 if (((mii->mii_media_active & IFM_GMASK) & IFM_ETH_TXPAUSE) != 0) in stge_link_task()
1374 sc->sc_MACCtrl |= MC_TxFlowControlEnable; in stge_link_task()
1381 v |= sc->sc_MACCtrl; in stge_link_task()
1383 if (((ac ^ sc->sc_MACCtrl) & MC_DuplexSelect) != 0) { in stge_link_task()
1394 device_printf(sc->sc_dev, "reset failed to complete\n"); in stge_link_task()
1416 if (sc->sc_nerr++ < STGE_MAXERR) in stge_tx_error()
1417 device_printf(sc->sc_dev, "Tx underrun, " in stge_tx_error()
1419 if (sc->sc_nerr == STGE_MAXERR) in stge_tx_error()
1420 device_printf(sc->sc_dev, "too many errors; " in stge_tx_error()
1422 error = -1; in stge_tx_error()
1425 /* Maximum/Late collisions, Re-enable Tx MAC. */ in stge_tx_error()
1449 ifp = sc->sc_ifp; in stge_intr()
1458 if (sc->sc_suspended || (status & IS_InterruptStatus) == 0) in stge_intr()
1464 status &= sc->sc_IntEnable; in stge_intr()
1469 device_printf(sc->sc_dev, in stge_intr()
1500 /* Re-enable interrupts. */ in stge_intr()
1501 CSR_WRITE_2(sc, STGE_IntEnable, sc->sc_IntEnable); in stge_intr()
1526 ifp = sc->sc_ifp; in stge_txeof()
1528 txd = STAILQ_FIRST(&sc->sc_cdata.stge_txbusyq); in stge_txeof()
1531 bus_dmamap_sync(sc->sc_cdata.stge_tx_ring_tag, in stge_txeof()
1532 sc->sc_cdata.stge_tx_ring_map, BUS_DMASYNC_POSTREAD); in stge_txeof()
1538 for (cons = sc->sc_cdata.stge_tx_cons;; in stge_txeof()
1540 if (sc->sc_cdata.stge_tx_cnt <= 0) in stge_txeof()
1542 control = le64toh(sc->sc_rdata.stge_tx_ring[cons].tfd_control); in stge_txeof()
1545 sc->sc_cdata.stge_tx_cnt--; in stge_txeof()
1548 bus_dmamap_sync(sc->sc_cdata.stge_tx_tag, txd->tx_dmamap, in stge_txeof()
1550 bus_dmamap_unload(sc->sc_cdata.stge_tx_tag, txd->tx_dmamap); in stge_txeof()
1553 m_freem(txd->tx_m); in stge_txeof()
1554 txd->tx_m = NULL; in stge_txeof()
1555 STAILQ_REMOVE_HEAD(&sc->sc_cdata.stge_txbusyq, tx_q); in stge_txeof()
1556 STAILQ_INSERT_TAIL(&sc->sc_cdata.stge_txfreeq, txd, tx_q); in stge_txeof()
1557 txd = STAILQ_FIRST(&sc->sc_cdata.stge_txbusyq); in stge_txeof()
1559 sc->sc_cdata.stge_tx_cons = cons; in stge_txeof()
1560 if (sc->sc_cdata.stge_tx_cnt == 0) in stge_txeof()
1561 sc->sc_watchdog_timer = 0; in stge_txeof()
1563 bus_dmamap_sync(sc->sc_cdata.stge_tx_ring_tag, in stge_txeof()
1564 sc->sc_cdata.stge_tx_ring_map, in stge_txeof()
1573 rfd = &sc->sc_rdata.stge_rx_ring[idx]; in stge_discard_rxbuf()
1574 rfd->rfd_status = 0; in stge_discard_rxbuf()
1579 * It seems that TC9021's DMA engine has alignment restrictions in
1580 * DMA scatter operations. The first DMA segment has no address
1595 if (m->m_len <= (MCLBYTES - ETHER_HDR_LEN)) { in stge_fixup_rx()
1596 bcopy(m->m_data, m->m_data + ETHER_HDR_LEN, m->m_len); in stge_fixup_rx()
1597 m->m_data += ETHER_HDR_LEN; in stge_fixup_rx()
1602 bcopy(m->m_data, n->m_data, ETHER_HDR_LEN); in stge_fixup_rx()
1603 m->m_data += ETHER_HDR_LEN; in stge_fixup_rx()
1604 m->m_len -= ETHER_HDR_LEN; in stge_fixup_rx()
1605 n->m_len = ETHER_HDR_LEN; in stge_fixup_rx()
1607 n->m_next = m; in stge_fixup_rx()
1634 ifp = sc->sc_ifp; in stge_rxeof()
1636 bus_dmamap_sync(sc->sc_cdata.stge_rx_ring_tag, in stge_rxeof()
1637 sc->sc_cdata.stge_rx_ring_map, BUS_DMASYNC_POSTREAD); in stge_rxeof()
1640 for (cons = sc->sc_cdata.stge_rx_cons; prog < STGE_RX_RING_CNT; in stge_rxeof()
1642 status64 = le64toh(sc->sc_rdata.stge_rx_ring[cons].rfd_status); in stge_rxeof()
1648 if (sc->sc_cdata.stge_rxcycles <= 0) in stge_rxeof()
1650 sc->sc_cdata.stge_rxcycles--; in stge_rxeof()
1654 rxd = &sc->sc_cdata.stge_rxdesc[cons]; in stge_rxeof()
1655 mp = rxd->rx_m; in stge_rxeof()
1666 if (sc->sc_cdata.stge_rxhead != NULL) { in stge_rxeof()
1667 m_freem(sc->sc_cdata.stge_rxhead); in stge_rxeof()
1678 if (sc->sc_cdata.stge_rxhead != NULL) { in stge_rxeof()
1679 m_freem(sc->sc_cdata.stge_rxhead); in stge_rxeof()
1686 mp->m_len = RFD_RxDMAFrameLen(status) - in stge_rxeof()
1687 sc->sc_cdata.stge_rxlen; in stge_rxeof()
1688 sc->sc_cdata.stge_rxlen += mp->m_len; in stge_rxeof()
1691 if (sc->sc_cdata.stge_rxhead == NULL) { in stge_rxeof()
1692 sc->sc_cdata.stge_rxhead = mp; in stge_rxeof()
1693 sc->sc_cdata.stge_rxtail = mp; in stge_rxeof()
1695 mp->m_flags &= ~M_PKTHDR; in stge_rxeof()
1696 sc->sc_cdata.stge_rxtail->m_next = mp; in stge_rxeof()
1697 sc->sc_cdata.stge_rxtail = mp; in stge_rxeof()
1701 m = sc->sc_cdata.stge_rxhead; in stge_rxeof()
1702 m->m_pkthdr.rcvif = ifp; in stge_rxeof()
1703 m->m_pkthdr.len = sc->sc_cdata.stge_rxlen; in stge_rxeof()
1705 if (m->m_pkthdr.len > sc->sc_if_framesize) { in stge_rxeof()
1716 m->m_pkthdr.csum_flags |= in stge_rxeof()
1719 m->m_pkthdr.csum_flags |= in stge_rxeof()
1726 m->m_pkthdr.csum_flags |= in stge_rxeof()
1728 m->m_pkthdr.csum_data = 0xffff; in stge_rxeof()
1733 if (sc->sc_if_framesize > (MCLBYTES - ETHER_ALIGN)) { in stge_rxeof()
1743 m->m_pkthdr.ether_vtag = RFD_TCI(status64); in stge_rxeof()
1744 m->m_flags |= M_VLANTAG; in stge_rxeof()
1759 sc->sc_cdata.stge_rx_cons = cons; in stge_rxeof()
1760 bus_dmamap_sync(sc->sc_cdata.stge_rx_ring_tag, in stge_rxeof()
1761 sc->sc_cdata.stge_rx_ring_map, in stge_rxeof()
1783 sc->sc_cdata.stge_rxcycles = count; in stge_poll()
1789 status &= sc->sc_IntEnable; in stge_poll()
1792 device_printf(sc->sc_dev, in stge_poll()
1829 mii = device_get_softc(sc->sc_miibus); in stge_tick()
1841 if (sc->sc_cdata.stge_tx_cnt != 0) in stge_tick()
1846 callout_reset(&sc->sc_tick_ch, hz, stge_tick, sc); in stge_tick()
1861 ifp = sc->sc_ifp; in stge_stats_update()
1917 (sc->sc_usefiber ? AC_RstOut : 0); in stge_reset()
1933 device_printf(sc->sc_dev, "reset failed to complete\n"); in stge_reset()
1938 if ((sc->sc_led & 0x01) != 0) in stge_reset()
1940 if ((sc->sc_led & 0x03) != 0) in stge_reset()
1942 if ((sc->sc_led & 0x08) != 0) in stge_reset()
1949 v |= ((sc->sc_led & 0x70) >> 4); in stge_reset()
1980 ifp = sc->sc_ifp; in stge_init_locked()
1983 mii = device_get_softc(sc->sc_miibus); in stge_init_locked()
1998 device_printf(sc->sc_dev, in stge_init_locked()
2013 * and disable selected stats in the non-RMON stats registers. in stge_init_locked()
2041 * Initialize the Tx auto-poll period. It's OK to make this number in stge_init_locked()
2042 * large (255 is the max, but we use 127) -- we explicitly kick the in stge_init_locked()
2047 /* ..and the Rx auto-poll period. */ in stge_init_locked()
2051 CSR_WRITE_2(sc, STGE_TxStartThresh, sc->sc_txthresh); in stge_init_locked()
2053 /* Rx DMA thresholds, from Linux */ in stge_init_locked()
2060 /* Tx DMA thresholds, from Linux */ in stge_init_locked()
2065 * Initialize the Rx DMA interrupt control register. We in stge_init_locked()
2072 RDIC_RxFrameCount(sc->sc_rxint_nframe) | in stge_init_locked()
2073 RDIC_RxDMAWaitTime(STGE_RXINT_USECS2TICK(sc->sc_rxint_dmawait))); in stge_init_locked()
2078 sc->sc_IntEnable = IS_HostError | IS_TxComplete | in stge_init_locked()
2086 CSR_WRITE_2(sc, STGE_IntEnable, sc->sc_IntEnable); in stge_init_locked()
2089 * Configure the DMA engine. in stge_init_locked()
2090 * XXX Should auto-tune TxBurstLimit. in stge_init_locked()
2092 CSR_WRITE_4(sc, STGE_DMACtrl, sc->sc_DMACtrl | DMAC_TxBurstLimit(3)); in stge_init_locked()
2096 * FIFO, and send an un-PAUSE frame when we reach 3056 bytes in stge_init_locked()
2105 sc->sc_if_framesize = if_getmtu(ifp) + ETHER_HDR_LEN + ETHER_CRC_LEN; in stge_init_locked()
2106 CSR_WRITE_2(sc, STGE_MaxFrameSize, sc->sc_if_framesize); in stge_init_locked()
2109 * Initialize MacCtrl -- do it before setting the media, in stge_init_locked()
2120 if (sc->sc_rev >= 6) { /* >= B.2 */ in stge_init_locked()
2121 /* Multi-frag frame bug work-around. */ in stge_init_locked()
2125 /* Tx Poll Now bug work-around. */ in stge_init_locked()
2128 /* Tx Poll Now bug work-around. */ in stge_init_locked()
2143 sc->sc_link = 0; in stge_init_locked()
2152 callout_reset(&sc->sc_tick_ch, hz, stge_tick, sc); in stge_init_locked()
2162 device_printf(sc->sc_dev, "interface not running\n"); in stge_init_locked()
2171 ifp = sc->sc_ifp; in stge_vlan_setup()
2204 callout_stop(&sc->sc_tick_ch); in stge_stop()
2205 sc->sc_watchdog_timer = 0; in stge_stop()
2222 * Stop the transmit and receive DMA. in stge_stop()
2234 rxd = &sc->sc_cdata.stge_rxdesc[i]; in stge_stop()
2235 if (rxd->rx_m != NULL) { in stge_stop()
2236 bus_dmamap_sync(sc->sc_cdata.stge_rx_tag, in stge_stop()
2237 rxd->rx_dmamap, BUS_DMASYNC_POSTREAD); in stge_stop()
2238 bus_dmamap_unload(sc->sc_cdata.stge_rx_tag, in stge_stop()
2239 rxd->rx_dmamap); in stge_stop()
2240 m_freem(rxd->rx_m); in stge_stop()
2241 rxd->rx_m = NULL; in stge_stop()
2245 txd = &sc->sc_cdata.stge_txdesc[i]; in stge_stop()
2246 if (txd->tx_m != NULL) { in stge_stop()
2247 bus_dmamap_sync(sc->sc_cdata.stge_tx_tag, in stge_stop()
2248 txd->tx_dmamap, BUS_DMASYNC_POSTWRITE); in stge_stop()
2249 bus_dmamap_unload(sc->sc_cdata.stge_tx_tag, in stge_stop()
2250 txd->tx_dmamap); in stge_stop()
2251 m_freem(txd->tx_m); in stge_stop()
2252 txd->tx_m = NULL; in stge_stop()
2259 ifp = sc->sc_ifp; in stge_stop()
2261 sc->sc_link = 0; in stge_stop()
2276 for (i = STGE_TIMEOUT; i > 0; i--) { in stge_start_tx()
2283 device_printf(sc->sc_dev, "Starting Tx MAC timed out\n"); in stge_start_tx()
2298 for (i = STGE_TIMEOUT; i > 0; i--) { in stge_start_rx()
2305 device_printf(sc->sc_dev, "Starting Rx MAC timed out\n"); in stge_start_rx()
2319 for (i = STGE_TIMEOUT; i > 0; i--) { in stge_stop_tx()
2326 device_printf(sc->sc_dev, "Stopping Tx MAC timed out\n"); in stge_stop_tx()
2340 for (i = STGE_TIMEOUT; i > 0; i--) { in stge_stop_rx()
2347 device_printf(sc->sc_dev, "Stopping Rx MAC timed out\n"); in stge_stop_rx()
2358 STAILQ_INIT(&sc->sc_cdata.stge_txfreeq); in stge_init_tx_ring()
2359 STAILQ_INIT(&sc->sc_cdata.stge_txbusyq); in stge_init_tx_ring()
2361 sc->sc_cdata.stge_tx_prod = 0; in stge_init_tx_ring()
2362 sc->sc_cdata.stge_tx_cons = 0; in stge_init_tx_ring()
2363 sc->sc_cdata.stge_tx_cnt = 0; in stge_init_tx_ring()
2365 rd = &sc->sc_rdata; in stge_init_tx_ring()
2366 bzero(rd->stge_tx_ring, STGE_TX_RING_SZ); in stge_init_tx_ring()
2368 if (i == (STGE_TX_RING_CNT - 1)) in stge_init_tx_ring()
2372 rd->stge_tx_ring[i].tfd_next = htole64(addr); in stge_init_tx_ring()
2373 rd->stge_tx_ring[i].tfd_control = htole64(TFD_TFDDone); in stge_init_tx_ring()
2374 txd = &sc->sc_cdata.stge_txdesc[i]; in stge_init_tx_ring()
2375 STAILQ_INSERT_TAIL(&sc->sc_cdata.stge_txfreeq, txd, tx_q); in stge_init_tx_ring()
2378 bus_dmamap_sync(sc->sc_cdata.stge_tx_ring_tag, in stge_init_tx_ring()
2379 sc->sc_cdata.stge_tx_ring_map, in stge_init_tx_ring()
2391 sc->sc_cdata.stge_rx_cons = 0; in stge_init_rx_ring()
2394 rd = &sc->sc_rdata; in stge_init_rx_ring()
2395 bzero(rd->stge_rx_ring, STGE_RX_RING_SZ); in stge_init_rx_ring()
2399 if (i == (STGE_RX_RING_CNT - 1)) in stge_init_rx_ring()
2403 rd->stge_rx_ring[i].rfd_next = htole64(addr); in stge_init_rx_ring()
2404 rd->stge_rx_ring[i].rfd_status = 0; in stge_init_rx_ring()
2407 bus_dmamap_sync(sc->sc_cdata.stge_rx_ring_tag, in stge_init_rx_ring()
2408 sc->sc_cdata.stge_rx_ring_map, in stge_init_rx_ring()
2432 m->m_len = m->m_pkthdr.len = MCLBYTES; in stge_newbuf()
2434 * The hardware requires 4bytes aligned DMA address when JUMBO in stge_newbuf()
2437 if (sc->sc_if_framesize <= (MCLBYTES - ETHER_ALIGN)) in stge_newbuf()
2440 if (bus_dmamap_load_mbuf_sg(sc->sc_cdata.stge_rx_tag, in stge_newbuf()
2441 sc->sc_cdata.stge_rx_sparemap, m, segs, &nsegs, 0) != 0) { in stge_newbuf()
2447 rxd = &sc->sc_cdata.stge_rxdesc[idx]; in stge_newbuf()
2448 if (rxd->rx_m != NULL) { in stge_newbuf()
2449 bus_dmamap_sync(sc->sc_cdata.stge_rx_tag, rxd->rx_dmamap, in stge_newbuf()
2451 bus_dmamap_unload(sc->sc_cdata.stge_rx_tag, rxd->rx_dmamap); in stge_newbuf()
2453 map = rxd->rx_dmamap; in stge_newbuf()
2454 rxd->rx_dmamap = sc->sc_cdata.stge_rx_sparemap; in stge_newbuf()
2455 sc->sc_cdata.stge_rx_sparemap = map; in stge_newbuf()
2456 bus_dmamap_sync(sc->sc_cdata.stge_rx_tag, rxd->rx_dmamap, in stge_newbuf()
2458 rxd->rx_m = m; in stge_newbuf()
2460 rfd = &sc->sc_rdata.stge_rx_ring[idx]; in stge_newbuf()
2461 rfd->rfd_frag.frag_word0 = in stge_newbuf()
2463 rfd->rfd_status = 0; in stge_newbuf()
2481 ifp = sc->sc_ifp; in stge_set_filter()
2498 stge_hash_maddr(void *arg, struct sockaddr_dl *sdl, u_int cnt) in stge_hash_maddr() argument
2521 ifp = sc->sc_ifp; in stge_set_multi()
2539 * addresses through a CRC generator, and then using the low-order in stge_set_multi()
2567 if (error || !req->newptr) in sysctl_int_range()