/illumos-gate/usr/src/uts/common/io/ena/ |
H A D | ena_rx.c | 19 ena_refill_rx(ena_rxq_t *rxq, uint16_t num) in ena_refill_rx() argument 21 VERIFY3P(rxq, !=, NULL); in ena_refill_rx() 22 ASSERT(MUTEX_HELD(&rxq->er_lock)); in ena_refill_rx() 23 ASSERT3U(num, <=, rxq->er_sq_num_descs); in ena_refill_rx() 25 const uint16_t modulo_mask = rxq->er_sq_num_descs - 1; in ena_refill_rx() 26 uint16_t tail_mod = rxq->er_sq_tail_idx & modulo_mask; in ena_refill_rx() 29 enahw_rx_desc_t *desc = &rxq->er_sq_descs[tail_mod]; in ena_refill_rx() 30 ena_rx_ctrl_block_t *rcb = &rxq->er_rcbs[tail_mod]; in ena_refill_rx() 31 uint16_t phase = rxq->er_sq_phase; in ena_refill_rx() 33 VERIFY3U(tail_mod, <, rxq->er_sq_num_descs); in ena_refill_rx() [all …]
|
H A D | ena_gld.c | 63 ena_rxq_t *rxq = (ena_rxq_t *)mih; in ena_ring_rx_intr_disable() local 66 mutex_enter(&rxq->er_lock); in ena_ring_rx_intr_disable() 67 intr_ctrl = ena_hw_abs_read32(rxq->er_ena, rxq->er_cq_unmask_addr); in ena_ring_rx_intr_disable() 69 ena_hw_abs_write32(rxq->er_ena, rxq->er_cq_unmask_addr, intr_ctrl); in ena_ring_rx_intr_disable() 70 rxq->er_mode = ENA_RXQ_MODE_POLLING; in ena_ring_rx_intr_disable() 71 mutex_exit(&rxq->er_lock); in ena_ring_rx_intr_disable() 78 ena_rxq_t *rxq = (ena_rxq_t *)mih; in ena_ring_rx_intr_enable() local 81 mutex_enter(&rxq->er_lock); in ena_ring_rx_intr_enable() 82 intr_ctrl = ena_hw_abs_read32(rxq->er_ena, rxq->er_cq_unmask_addr); in ena_ring_rx_intr_enable() 84 ena_hw_abs_write32(rxq->er_ena, rxq->er_cq_unmask_addr, intr_ctrl); in ena_ring_rx_intr_enable() [all …]
|
H A D | ena_stats.c | 403 ena_stat_rxq_cleanup(ena_rxq_t *rxq) in ena_stat_rxq_cleanup() argument 405 if (rxq->er_kstat != NULL) { in ena_stat_rxq_cleanup() 406 kstat_delete(rxq->er_kstat); in ena_stat_rxq_cleanup() 407 rxq->er_kstat = NULL; in ena_stat_rxq_cleanup() 412 ena_stat_rxq_init(ena_rxq_t *rxq) in ena_stat_rxq_init() argument 414 ena_t *ena = rxq->er_ena; in ena_stat_rxq_init() 417 ena_rxq_stat_t *ers = &rxq->er_stat; in ena_stat_rxq_init() 419 (void) snprintf(buf, sizeof (buf), "rxq_%d", rxq->er_rxqs_idx); in ena_stat_rxq_init() 431 rxq->er_kstat = ksp; in ena_stat_rxq_init() 457 kstat_install(rxq->er_kstat); in ena_stat_rxq_init() [all …]
|
H A D | ena.c | 1455 ena_rxq_t *rxq = &ena->ena_rxqs[i]; in ena_attach_alloc_rxqs() local 1457 rxq->er_rxqs_idx = i; in ena_attach_alloc_rxqs() 1459 rxq->er_intr_vector = i + 1; in ena_attach_alloc_rxqs() 1460 rxq->er_mrh = NULL; in ena_attach_alloc_rxqs() 1463 mutex_init(&rxq->er_lock, NULL, MUTEX_DRIVER, in ena_attach_alloc_rxqs() 1465 mutex_init(&rxq->er_stat_lock, NULL, MUTEX_DRIVER, in ena_attach_alloc_rxqs() 1469 rxq->er_ena = ena; in ena_attach_alloc_rxqs() 1470 rxq->er_sq_num_descs = ena->ena_rxq_num_descs; in ena_attach_alloc_rxqs() 1471 rxq->er_cq_num_descs = ena->ena_rxq_num_descs; in ena_attach_alloc_rxqs() 1473 if (!ena_stat_rxq_init(rxq)) { in ena_attach_alloc_rxqs() [all …]
|
H A D | ena_intr.c | 30 ena_rxq_t *rxq = &ena->ena_rxqs[vector - 1]; in ena_io_intr() local 37 ASSERT3P(rxq, !=, NULL); in ena_io_intr() 39 ena_rx_intr_work(rxq); in ena_io_intr()
|
/illumos-gate/usr/src/uts/common/io/bnx/570x/driver/common/lmdev/ |
H A D | bnx_lm_recv.c | 46 lm_rx_chain_t *rxq, in post_bd_buffer() argument 55 prod_bd = rxq->prod_bd; in post_bd_buffer() 56 prod_idx = rxq->prod_idx; in post_bd_buffer() 76 rxq->bd_left--; in post_bd_buffer() 77 rxq->prod_idx = prod_idx; in post_bd_buffer() 78 rxq->prod_bd = prod_bd; in post_bd_buffer() 89 lm_rx_chain_t *rxq; in lm_post_buffers() local 94 rxq = &pdev->rx_info.chain[chain_idx]; in lm_post_buffers() 103 if(rxq->bd_left == 0) in lm_post_buffers() 105 s_list_push_tail(&rxq->free_descq, &packet->link); in lm_post_buffers() [all …]
|
H A D | bnx_hw_reset.c | 1106 lm_rx_chain_t *rxq; in init_l2rxq() local 1118 rxq = &pdev->rx_info.chain[idx]; in init_l2rxq() 1120 bd_page_cnt = pdev->params.l2_rx_bd_page_cnt[rxq->idx]; in init_l2rxq() 1122 rxq->prod_idx = 0; in init_l2rxq() 1123 rxq->con_idx = 0; in init_l2rxq() 1124 rxq->prod_bseq = 0; in init_l2rxq() 1125 rxq->prod_bd = rxq->bd_chain_virt; in init_l2rxq() 1126 rxq->bd_left = bd_page_cnt * MAX_BD_PER_PAGE - 1; in init_l2rxq() 1134 (u8_t *) rxq->bd_chain_virt, in init_l2rxq() 1135 rxq->bd_chain_phy, in init_l2rxq() [all …]
|
H A D | bnx_lm_main.c | 1799 lm_rx_chain_t *rxq) in sblk_rx_con_idx_ptr() argument 1808 switch(rxq->idx) in sblk_rx_con_idx_ptr() 1882 switch(rxq->idx) in sblk_rx_con_idx_ptr() 1952 lm_rx_chain_t *rxq) in alloc_l2rx_desc() argument 1962 bd_page_cnt = pdev->params.l2_rx_bd_page_cnt[rxq->idx]; in alloc_l2rx_desc() 1963 desc_cnt = pdev->params.l2_rx_desc_cnt[rxq->idx]; in alloc_l2rx_desc() 1967 pdev->params.l2_rx_bd_page_cnt[rxq->idx] = 0; in alloc_l2rx_desc() 1968 pdev->params.l2_rx_desc_cnt[rxq->idx] = 0; in alloc_l2rx_desc() 1975 rxq->bd_chain_virt = (rx_bd_t *) mm_alloc_phys_mem( in alloc_l2rx_desc() 1978 &rxq->bd_chain_phy, in alloc_l2rx_desc() [all …]
|
/illumos-gate/usr/src/uts/intel/io/vmxnet3s/ |
H A D | vmxnet3_rx.c | 186 vmxnet3_rx_populate(vmxnet3_softc_t *dp, vmxnet3_rxqueue_t *rxq, uint16_t idx, in vmxnet3_rx_populate() argument 217 vmxnet3_cmdring_t *cmdRing = &rxq->cmdRing; in vmxnet3_rx_populate() 220 rxq->bufRing[idx].rxBuf = rxBuf; in vmxnet3_rx_populate() 240 vmxnet3_rxqueue_init(vmxnet3_softc_t *dp, vmxnet3_rxqueue_t *rxq) in vmxnet3_rxqueue_init() argument 242 vmxnet3_cmdring_t *cmdRing = &rxq->cmdRing; in vmxnet3_rxqueue_init() 249 if ((err = vmxnet3_rx_populate(dp, rxq, cmdRing->next2fill, in vmxnet3_rxqueue_init() 270 vmxnet3_free_rxbuf(dp, rxq->bufRing[cmdRing->next2fill].rxBuf); in vmxnet3_rxqueue_init() 280 vmxnet3_rxqueue_fini(vmxnet3_softc_t *dp, vmxnet3_rxqueue_t *rxq) in vmxnet3_rxqueue_fini() argument 292 for (i = 0; i < rxq->cmdRing.size; i++) { in vmxnet3_rxqueue_fini() 293 rxBuf = rxq->bufRing[i].rxBuf; in vmxnet3_rxqueue_fini() [all …]
|
H A D | vmxnet3_main.c | 387 vmxnet3_rxqueue_t *rxq = &dp->rxQueue; in vmxnet3_prepare_rxqueue() local 390 ASSERT(!(rxq->cmdRing.size & VMXNET3_RING_SIZE_MASK)); in vmxnet3_prepare_rxqueue() 391 ASSERT(!(rxq->compRing.size & VMXNET3_RING_SIZE_MASK)); in vmxnet3_prepare_rxqueue() 392 ASSERT(!rxq->cmdRing.dma.buf && !rxq->compRing.dma.buf); in vmxnet3_prepare_rxqueue() 394 if ((err = vmxnet3_alloc_cmdring(dp, &rxq->cmdRing)) != 0) { in vmxnet3_prepare_rxqueue() 397 rqdesc->conf.rxRingBasePA[0] = rxq->cmdRing.dma.bufPA; in vmxnet3_prepare_rxqueue() 398 rqdesc->conf.rxRingSize[0] = rxq->cmdRing.size; in vmxnet3_prepare_rxqueue() 402 if ((err = vmxnet3_alloc_compring(dp, &rxq->compRing)) != 0) { in vmxnet3_prepare_rxqueue() 405 rqdesc->conf.compRingBasePA = rxq->compRing.dma.bufPA; in vmxnet3_prepare_rxqueue() 406 rqdesc->conf.compRingSize = rxq->compRing.size; in vmxnet3_prepare_rxqueue() [all …]
|
H A D | vmxnet3.h | 192 int vmxnet3_rxqueue_init(vmxnet3_softc_t *dp, vmxnet3_rxqueue_t *rxq); 193 mblk_t *vmxnet3_rx_intr(vmxnet3_softc_t *dp, vmxnet3_rxqueue_t *rxq); 194 void vmxnet3_rxqueue_fini(vmxnet3_softc_t *dp, vmxnet3_rxqueue_t *rxq);
|
/illumos-gate/usr/src/uts/common/io/bnxe/ |
H A D | bnxe_rx.c | 77 if (pUM->rxq[idx].rxLowWater > s_list_entry_cnt(&pLmRxChain->active_descq)) in BnxeRxPostBuffers() 79 pUM->rxq[idx].rxLowWater = s_list_entry_cnt(&pLmRxChain->active_descq); in BnxeRxPostBuffers() 182 s_list_push_tail(&pUM->rxq[idx].doneRxQ, in BnxeRxPktFree() 186 if (s_list_entry_cnt(&pUM->rxq[idx].doneRxQ) >= pUM->devParams.maxRxFree) in BnxeRxPktFree() 188 doneRxQ = pUM->rxq[idx].doneRxQ; in BnxeRxPktFree() 189 s_list_clear(&pUM->rxq[idx].doneRxQ); in BnxeRxPktFree() 200 atomic_dec_32(&pUM->rxq[idx].rxBufUpInStack); in BnxeRxPktFree() 215 if ((cnt = pUM->rxq[FCOE_CID(&pUM->lm_dev)].rxBufUpInStack) == 0) in BnxeWaitForPacketsFromClient() 241 if ((cnt = pUM->rxq[idx].rxBufUpInStack) == 0) in BnxeWaitForPacketsFromClient() 298 pRxQ = &pUM->rxq[idx]; in BnxeRxRingProcess() [all …]
|
H A D | bnxe_lock.c | 47 void BNXE_LOCK_ENTER_RX (um_device_t * pUM, int idx) { mutex_enter(&pUM->rxq[idx].rxMut… in BNXE_LOCK_ENTER_RX() 48 void BNXE_LOCK_EXIT_RX (um_device_t * pUM, int idx) { mutex_exit(&pUM->rxq[idx].rxMute… in BNXE_LOCK_EXIT_RX() 49 void BNXE_LOCK_ENTER_DONERX (um_device_t * pUM, int idx) { mutex_enter(&pUM->rxq[idx].doneR… in BNXE_LOCK_ENTER_DONERX() 50 void BNXE_LOCK_EXIT_DONERX (um_device_t * pUM, int idx) { mutex_exit(&pUM->rxq[idx].doneRx… in BNXE_LOCK_EXIT_DONERX()
|
H A D | bnxe_main.c | 240 mutex_init(&pUM->rxq[idx].rxMutex, NULL, in BnxeMutexInit() 242 mutex_init(&pUM->rxq[idx].doneRxMutex, NULL, in BnxeMutexInit() 244 pUM->rxq[idx].pUM = pUM; in BnxeMutexInit() 245 pUM->rxq[idx].idx = idx; in BnxeMutexInit() 310 mutex_destroy(&pUM->rxq[idx].rxMutex); in BnxeMutexDestroy() 311 mutex_destroy(&pUM->rxq[idx].doneRxMutex); in BnxeMutexDestroy()
|
H A D | bnxe.h | 661 RxQueue rxq[MAX_ETH_CONS]; member 942 #define BNXE_LOCK_ENTER_RX(pUM, idx) mutex_enter(&(pUM)->rxq[(idx)].rxMutex) 943 #define BNXE_LOCK_EXIT_RX(pUM, idx) mutex_exit(&(pUM)->rxq[(idx)].rxMutex) 944 #define BNXE_LOCK_ENTER_DONERX(pUM, idx) mutex_enter(&(pUM)->rxq[(idx)].doneRxMutex) 945 #define BNXE_LOCK_EXIT_DONERX(pUM, idx) mutex_exit(&(pUM)->rxq[(idx)].doneRxMutex)
|
/illumos-gate/usr/src/uts/common/io/cxgbe/t4nex/ |
H A D | adapter.h | 398 struct sge_rxq *rxq; /* NIC rx queues */ member 568 #define RXQ_LOCK(rxq) IQ_LOCK(&(rxq)->iq) argument 569 #define RXQ_UNLOCK(rxq) IQ_UNLOCK(&(rxq)->iq) argument 570 #define RXQ_LOCK_ASSERT_OWNED(rxq) IQ_LOCK_ASSERT_OWNED(&(rxq)->iq) argument 571 #define RXQ_LOCK_ASSERT_NOTOWNED(rxq) IQ_LOCK_ASSERT_NOTOWNED(&(rxq)->iq) argument 573 #define RXQ_FL_LOCK(rxq) FL_LOCK(&(rxq)->fl) argument 574 #define RXQ_FL_UNLOCK(rxq) FL_UNLOCK(&(rxq)->fl) argument 575 #define RXQ_FL_LOCK_ASSERT_OWNED(rxq) FL_LOCK_ASSERT_OWNED(&(rxq)->fl) argument 576 #define RXQ_FL_LOCK_ASSERT_NOTOWNED(rxq) FL_LOCK_ASSERT_NOTOWNED(&(rxq)->fl) argument 591 #define for_each_rxq(pi, iter, rxq) \ argument [all …]
|
H A D | t4_mac.c | 839 struct sge_rxq *rxq = (struct sge_rxq *)rh; in t4_ring_start() local 841 RXQ_LOCK(rxq); in t4_ring_start() 842 rxq->ring_gen_num = mr_gen_num; in t4_ring_start() 843 RXQ_UNLOCK(rxq); in t4_ring_start() 853 struct sge_rxq *rxq = (struct sge_rxq *)intrh; in t4_ring_intr_enable() local 854 struct adapter *sc = rxq->port->adapter; in t4_ring_intr_enable() 857 iq = &rxq->iq; in t4_ring_intr_enable() 858 RXQ_LOCK(rxq); in t4_ring_intr_enable() 863 RXQ_UNLOCK(rxq); in t4_ring_intr_enable() 873 struct sge_rxq *rxq = (struct sge_rxq *)intrh; in t4_ring_intr_disable() local [all …]
|
H A D | t4_sge.c | 100 static int alloc_rxq(struct port_info *pi, struct sge_rxq *rxq, int intr_idx, 102 static int free_rxq(struct port_info *pi, struct sge_rxq *rxq); 158 static kstat_t *setup_rxq_kstats(struct port_info *pi, struct sge_rxq *rxq, 425 iq = &s->rxq[pi->first_rxq + idx].iq; in port_intr_iq() 434 struct sge_rxq *rxq; in t4_setup_port_queues() local 451 for_each_rxq(pi, i, rxq) { in t4_setup_port_queues() 453 init_iq(&rxq->iq, sc, pi->tmr_idx, pi->pktc_idx, p->qsize_rxq, in t4_setup_port_queues() 456 init_fl(&rxq->fl, p->qsize_rxq / 8); /* 8 bufs in each entry */ in t4_setup_port_queues() 460 rxq->iq.flags |= IQ_INTR; in t4_setup_port_queues() 461 rc = alloc_rxq(pi, rxq, intr_idx, i); in t4_setup_port_queues() [all …]
|
H A D | t4_nexus.c | 587 s->rxq = kmem_zalloc(s->nrxq * sizeof (struct sge_rxq), KM_SLEEP); in t4_devo_attach() 670 struct sge_rxq *rxq; in t4_devo_attach() local 671 rxq = &s->rxq[pi->first_rxq]; in t4_devo_attach() 672 for (q = 0; q < pi->nrxq; q++, rxq++) { in t4_devo_attach() 675 &rxq->iq); in t4_devo_attach() 783 if (s->rxq != NULL) in t4_devo_detach() 784 kmem_free(s->rxq, s->nrxq * sizeof (struct sge_rxq)); in t4_devo_detach() 2603 struct sge_rxq *rxq; in port_full_init() local 2620 for_each_rxq(pi, i, rxq) { in port_full_init() 2621 rss[i] = rxq->iq.abs_id; in port_full_init() [all …]
|
/illumos-gate/usr/src/uts/common/io/bnxe/577xx/drivers/common/lm/device/ |
H A D | lm_recv.c | 1253 lm_rx_chain_t *rxq = &LM_RXQ(pdev, qidx); in lm_return_packet_bytes() local 1255 rxq->ret_bytes += returned_bytes; in lm_return_packet_bytes() 1265 if(S32_SUB(rxq->ret_bytes, rxq->ret_bytes_last_fw_update + HC_RET_BYTES_TH(pdev)) >= 0) in lm_return_packet_bytes() 1275 …LM_INTMEM_WRITE32(PFDEV(pdev), rxq->hc_sb_info.iro_dhc_offset, rxq->ret_bytes, BAR_CSTRORM_INTMEM); in lm_return_packet_bytes() 1276 rxq->ret_bytes_last_fw_update = rxq->ret_bytes; in lm_return_packet_bytes() 1278 … VF_REG_WR(pdev, VF_BAR0_CSDM_QUEUES_OFFSET + rxq->hc_sb_info.iro_dhc_offset, rxq->ret_bytes); in lm_return_packet_bytes() 1279 rxq->ret_bytes_last_fw_update = rxq->ret_bytes; in lm_return_packet_bytes()
|
/illumos-gate/usr/src/uts/common/io/ral/ |
H A D | rt2560.c | 691 RAL_WRITE(sc, RT2560_RXCSR2, sc->rxq.physaddr); in rt2560_ring_hwsetup() 1189 dr = &sc->rxq.dr_desc; in rt2560_rx_intr() 1190 count = sc->rxq.count; in rt2560_rx_intr() 1192 mutex_enter(&sc->rxq.rx_lock); in rt2560_rx_intr() 1198 desc = &sc->rxq.desc[sc->rxq.cur]; in rt2560_rx_intr() 1199 data = &sc->rxq.data[sc->rxq.cur]; in rt2560_rx_intr() 1245 dr_bf = &sc->rxq.dr_rxbuf[sc->rxq.cur]; in rt2560_rx_intr() 1266 ral_debug(RAL_DBG_RX, "rx done idx=%u\n", sc->rxq.cur); in rt2560_rx_intr() 1268 sc->rxq.cur = (sc->rxq.cur + 1) % RT2560_RX_RING_COUNT; in rt2560_rx_intr() 1270 mutex_exit(&sc->rxq.rx_lock); in rt2560_rx_intr() [all …]
|
/illumos-gate/usr/src/uts/common/io/yge/ |
H A D | yge.c | 2417 int32_t rxq; in yge_start_port() local 2423 rxq = port->p_rxq; in yge_start_port() 2584 CSR_WRITE_4(dev, Q_ADDR(rxq, Q_CSR), BMU_CLR_RESET); in yge_start_port() 2585 CSR_WRITE_4(dev, Q_ADDR(rxq, Q_CSR), BMU_OPER_INIT); in yge_start_port() 2586 CSR_WRITE_4(dev, Q_ADDR(rxq, Q_CSR), BMU_FIFO_OP_ON); in yge_start_port() 2588 CSR_WRITE_2(dev, Q_ADDR(rxq, Q_WM), 0x80); in yge_start_port() 2590 CSR_WRITE_2(dev, Q_ADDR(rxq, Q_WM), MSK_BMU_RX_WM); in yge_start_port() 2595 CSR_WRITE_4(dev, Q_ADDR(rxq, Q_F), F_M_RX_RAM_DIS); in yge_start_port() 2601 CSR_WRITE_4(dev, Q_ADDR(rxq, Q_CSR), in yge_start_port() 2636 uint32_t rxq; in yge_set_rambuffer() local [all …]
|
/illumos-gate/usr/src/uts/common/io/bnxe/577xx/drivers/common/lm/vf/channel_vf/ |
H A D | lm_vf.c | 280 rxq_params = &request->rxq; in lm_pf_vf_fill_setup_q_response() 1895 mess->rxq.rcq_addr = lm_bd_chain_phys_addr(&(LM_RCQ(pdev,vf_qid).bd_chain), 0).as_u64; in lm_vf_pf_setup_q() 1896 mess->rxq.rcq_np_addr = lm_bd_chain_phys_addr(&(LM_RCQ(pdev,vf_qid).bd_chain), 1).as_u64; in lm_vf_pf_setup_q() 1897 mess->rxq.rxq_addr = lm_bd_chain_phys_addr(&(LM_RXQ_CHAIN(pdev,vf_qid,0)), 0).as_u64; in lm_vf_pf_setup_q() 1899 mess->rxq.sge_addr = LM_TPA_CHAIN_BD(pdev, vf_qid).bd_chain_phy.as_u64; in lm_vf_pf_setup_q() 1900 if (mess->rxq.sge_addr) { in lm_vf_pf_setup_q() 1901 mess->rxq.flags |= SW_VFPF_QUEUE_FLG_TPA; in lm_vf_pf_setup_q() 1904 mess->rxq.sge_addr = 0; in lm_vf_pf_setup_q() 1908 mess->rxq.vf_sb = vf_qid; /* relative to vf */ in lm_vf_pf_setup_q() 1909 mess->rxq.flags |= SW_VFPF_QUEUE_FLG_CACHE_ALIGN; in lm_vf_pf_setup_q() [all …]
|
/illumos-gate/usr/src/uts/common/io/bnx/ |
H A D | bnx.h | 309 um_recv_qinfo rxq[NUM_RX_CHAIN]; member 310 #define _RX_QINFO(pdev, chain) (pdev->rxq[chain])
|
/illumos-gate/usr/src/uts/common/io/rwn/ |
H A D | rt2860.c | 1718 struct rt2860_rx_data *data = &sc->rxq.data[sc->rxq.cur]; in rt2860_rx_intr() 1719 struct rt2860_rxd *rxd = &sc->rxq.rxd[sc->rxq.cur]; in rt2860_rx_intr() 1722 (void) ddi_dma_sync(sc->rxq.rxdesc_dma.dma_hdl, in rt2860_rx_intr() 1723 sc->rxq.cur * sizeof (struct rt2860_rxd), in rt2860_rx_intr() 1789 (void) ddi_dma_sync(sc->rxq.rxdesc_dma.dma_hdl, in rt2860_rx_intr() 1790 sc->rxq.cur * sizeof (struct rt2860_rxd), in rt2860_rx_intr() 1794 sc->rxq.cur = (sc->rxq.cur + 1) % RT2860_RX_RING_COUNT; in rt2860_rx_intr() 1800 (sc->rxq.cur - 1) % RT2860_RX_RING_COUNT); in rt2860_rx_intr() 2376 RT2860_WRITE(sc, RT2860_RX_BASE_PTR, sc->rxq.paddr); in rt2860_init() 2564 rt2860_reset_rx_ring(sc, &sc->rxq); in rt2860_stop() [all …]
|