Home
last modified time | relevance | path

Searched refs:txq (Results 1 – 25 of 48) sorted by relevance

12

/illumos-gate/usr/src/uts/common/io/ena/
H A Dena_tx.c19 ena_free_tx_dma(ena_txq_t *txq) in ena_free_tx_dma() argument
21 if (txq->et_tcbs != NULL) { in ena_free_tx_dma()
22 for (uint_t i = 0; i < txq->et_sq_num_descs; i++) { in ena_free_tx_dma()
23 ena_tx_control_block_t *tcb = &txq->et_tcbs[i]; in ena_free_tx_dma()
29 kmem_free(txq->et_tcbs, in ena_free_tx_dma()
30 sizeof (*txq->et_tcbs) * txq->et_sq_num_descs); in ena_free_tx_dma()
31 kmem_free(txq->et_tcbs_freelist, in ena_free_tx_dma()
32 sizeof (ena_tx_control_block_t *) * txq->et_sq_num_descs); in ena_free_tx_dma()
34 txq->et_tcbs = NULL; in ena_free_tx_dma()
35 txq->et_tcbs_freelist = NULL; in ena_free_tx_dma()
[all …]
H A Dena_watchdog.c64 ena_txq_t *txq = &ena->ena_txqs[i]; in ena_watchdog() local
67 mutex_enter(&txq->et_lock); in ena_watchdog()
68 if (txq->et_blocked) in ena_watchdog()
69 s = ++txq->et_stall_watchdog; in ena_watchdog()
71 s = txq->et_stall_watchdog = 0; in ena_watchdog()
72 mutex_exit(&txq->et_lock); in ena_watchdog()
H A Dena_stats.c348 ena_stat_txq_cleanup(ena_txq_t *txq) in ena_stat_txq_cleanup() argument
350 if (txq->et_kstat != NULL) { in ena_stat_txq_cleanup()
351 kstat_delete(txq->et_kstat); in ena_stat_txq_cleanup()
352 txq->et_kstat = NULL; in ena_stat_txq_cleanup()
357 ena_stat_txq_init(ena_txq_t *txq) in ena_stat_txq_init() argument
359 ena_t *ena = txq->et_ena; in ena_stat_txq_init()
362 ena_txq_stat_t *ets = &txq->et_stat; in ena_stat_txq_init()
364 (void) snprintf(buf, sizeof (buf), "txq_%d", txq->et_txqs_idx); in ena_stat_txq_init()
376 txq->et_kstat = ksp; in ena_stat_txq_init()
398 kstat_install(txq->et_kstat); in ena_stat_txq_init()
[all …]
H A Dena_intr.c29 ena_txq_t *txq = &ena->ena_txqs[vector - 1]; in ena_io_intr() local
36 ASSERT3P(txq, !=, NULL); in ena_io_intr()
38 ena_tx_intr_work(txq); in ena_io_intr()
45 intr_ctrl = ena_hw_abs_read32(ena, txq->et_cq_unmask_addr); in ena_io_intr()
47 ena_hw_abs_write32(ena, txq->et_cq_unmask_addr, intr_ctrl); in ena_io_intr()
H A Dena.c1532 ena_txq_t *txq = &ena->ena_txqs[i]; in ena_attach_alloc_txqs() local
1534 txq->et_txqs_idx = i; in ena_attach_alloc_txqs()
1536 txq->et_intr_vector = i + 1; in ena_attach_alloc_txqs()
1537 txq->et_mrh = NULL; in ena_attach_alloc_txqs()
1540 mutex_init(&txq->et_lock, NULL, MUTEX_DRIVER, in ena_attach_alloc_txqs()
1542 mutex_init(&txq->et_stat_lock, NULL, MUTEX_DRIVER, in ena_attach_alloc_txqs()
1546 txq->et_ena = ena; in ena_attach_alloc_txqs()
1547 txq->et_sq_num_descs = ena->ena_txq_num_descs; in ena_attach_alloc_txqs()
1548 txq->et_cq_num_descs = ena->ena_txq_num_descs; in ena_attach_alloc_txqs()
1550 if (!ena_stat_txq_init(txq)) { in ena_attach_alloc_txqs()
[all …]
/illumos-gate/usr/src/uts/common/io/bnx/570x/driver/common/lmdev/
H A Dbnx_lm_send.c33 lm_tx_chain_t *txq; in lm_send_packet() local
42 txq = &pdev->tx_info.chain[chain_idx]; in lm_send_packet()
50 GET_CID(txq->cid_addr), in lm_send_packet()
53 txq->prod_idx); in lm_send_packet()
56 GET_CID(txq->cid_addr), in lm_send_packet()
59 txq->prod_bseq); in lm_send_packet()
72 packet->u1.tx.dbg_start_bd = txq->prod_bd; in lm_send_packet()
73 packet->u1.tx.dbg_start_bd_idx = txq->prod_idx; in lm_send_packet()
79 if(frags->cnt > txq->bd_left) in lm_send_packet()
88 txq->bd_left -= (u16_t) frags->cnt; in lm_send_packet()
[all …]
H A Dbnx_hw_reset.c1007 lm_tx_chain_t *txq; in init_l2txq() local
1020 txq = &pdev->tx_info.chain[idx]; in init_l2txq()
1022 bd_page_cnt = pdev->params.l2_tx_bd_page_cnt[txq->idx]; in init_l2txq()
1024 txq->prod_idx = 0; in init_l2txq()
1025 txq->con_idx = 0; in init_l2txq()
1026 txq->prod_bseq = 0; in init_l2txq()
1027 txq->prod_bd = txq->bd_chain_virt; in init_l2txq()
1028 txq->bd_left = bd_page_cnt * MAX_BD_PER_PAGE - 1; in init_l2txq()
1036 (u8_t *) txq->bd_chain_virt, in init_l2txq()
1037 txq->bd_chain_phy, in init_l2txq()
[all …]
H A Dbnx_lm_main.c1599 lm_tx_chain_t *txq) in sblk_tx_con_idx_ptr() argument
1608 switch(txq->idx) in sblk_tx_con_idx_ptr()
1629 DbgBreakIf(txq->idx != pdev->tx_info.cu_idx); in sblk_tx_con_idx_ptr()
1631 if(txq->idx == pdev->tx_info.cu_idx) in sblk_tx_con_idx_ptr()
1640 switch(txq->idx) in sblk_tx_con_idx_ptr()
1711 lm_tx_chain_t *txq; in init_l2tx_resc() local
1733 txq = &pdev->tx_info.chain[idx]; in init_l2tx_resc()
1734 txq->idx = idx; in init_l2tx_resc()
1735 txq->cid_addr = GET_CID_ADDR(L2TX_CID_BASE + 2 * txq->idx); in init_l2tx_resc()
1737 s_list_init(&txq->active_descq, NULL, NULL, 0); in init_l2tx_resc()
[all …]
/illumos-gate/usr/src/uts/common/io/cxgbe/t4nex/
H A Dt4_sge.c111 static int alloc_txq(struct port_info *pi, struct sge_txq *txq, int idx);
112 static int free_txq(struct port_info *pi, struct sge_txq *txq);
134 static int get_frame_txinfo(struct sge_txq *txq, mblk_t **fp,
136 static inline int fits_in_txb(struct sge_txq *txq, int len, int *waste);
137 static inline int copy_into_txb(struct sge_txq *txq, mblk_t *m, int len,
140 static inline int add_mblk(struct sge_txq *txq, struct txinfo *txinfo,
142 static void free_txinfo_resources(struct sge_txq *txq, struct txinfo *txinfo);
143 static int add_to_txpkts(struct sge_txq *txq, struct txpkts *txpkts, mblk_t *m,
145 static void write_txpkts_wr(struct sge_txq *txq, struct txpkts *txpkts);
146 static int write_txpkt_wr(struct port_info *pi, struct sge_txq *txq, mblk_t *m,
[all …]
H A Dadapter.h425 struct sge_txq *txq; /* NIC tx queues */ member
630 #define TXQ_LOCK(txq) EQ_LOCK(&(txq)->eq) argument
631 #define TXQ_UNLOCK(txq) EQ_UNLOCK(&(txq)->eq) argument
632 #define TXQ_LOCK_ASSERT_OWNED(txq) EQ_LOCK_ASSERT_OWNED(&(txq)->eq) argument
633 #define TXQ_LOCK_ASSERT_NOTOWNED(txq) EQ_LOCK_ASSERT_NOTOWNED(&(txq)->eq) argument
635 #define for_each_txq(pi, iter, txq) \ argument
636 txq = &pi->adapter->sge.txq[pi->first_txq]; \
637 for (iter = 0; iter < pi->ntxq; ++iter, ++txq)
919 void t4_mac_tx_update(struct port_info *pi, struct sge_txq *txq);
H A Dt4_mac.c941 struct sge_txq *txq = (struct sge_txq *)rh; in t4_tx_stat() local
945 *val = txq->txbytes; in t4_tx_stat()
949 *val = txq->txpkts; in t4_tx_stat()
994 struct sge_txq *txq = &pi->adapter->sge.txq[pi->first_txq + ring_index]; in t4_fill_ring() local
995 txq->ring_handle = rh; in t4_fill_ring()
996 infop->mri_driver = (mac_ring_driver_t)txq; in t4_fill_ring()
1014 struct sge_txq *txq = &sc->sge.txq[pi->first_txq]; in t4_mc_tx() local
1016 return (t4_eth_tx(txq, m)); in t4_mc_tx()
2049 t4_mac_tx_update(struct port_info *pi, struct sge_txq *txq) in t4_mac_tx_update() argument
2052 mac_tx_ring_update(pi->mh, txq->ring_handle); in t4_mac_tx_update()
/illumos-gate/usr/src/uts/common/io/arn/
H A Darn_xmit.c97 static void arn_tx_send_ht_normal(struct arn_softc *sc, struct ath_txq *txq,
101 static void arn_tx_txqaddbuf(struct arn_softc *sc, struct ath_txq *txq,
148 arn_tx_queue_tid(struct ath_txq *txq, struct ath_atx_tid *tid) in arn_tx_queue_tid() argument
168 list_insert_tail(&txq->axq_acq, &ac->list); in arn_tx_queue_tid()
175 struct ath_txq *txq = &sc->sc_txq[tid->ac->qnum]; in arn_tx_pause_tid() local
177 mutex_enter(&txq->axq_lock); in arn_tx_pause_tid()
181 mutex_exit(&txq->axq_lock); in arn_tx_pause_tid()
188 struct ath_txq *txq = &sc->sc_txq[tid->ac->qnum]; in arn_tx_resume_tid() local
191 mutex_enter(&txq->axq_lock); in arn_tx_resume_tid()
204 arn_tx_queue_tid(txq, tid); in arn_tx_resume_tid()
[all …]
/illumos-gate/usr/src/uts/intel/io/vmxnet3s/
H A Dvmxnet3_tx.c41 vmxnet3_txqueue_init(vmxnet3_softc_t *dp, vmxnet3_txqueue_t *txq) in vmxnet3_txqueue_init() argument
50 vmxnet3_txqueue_fini(vmxnet3_softc_t *dp, vmxnet3_txqueue_t *txq) in vmxnet3_txqueue_fini() argument
56 for (i = 0; i < txq->cmdRing.size; i++) { in vmxnet3_txqueue_fini()
57 mblk_t *mp = txq->metaRing[i].mp; in vmxnet3_txqueue_fini()
158 vmxnet3_tx_one(vmxnet3_softc_t *dp, vmxnet3_txqueue_t *txq, in vmxnet3_tx_one() argument
163 vmxnet3_cmdring_t *cmdRing = &txq->cmdRing; in vmxnet3_tx_one()
164 Vmxnet3_TxQueueCtrl *txqCtrl = txq->sharedCtrl; in vmxnet3_tx_one()
206 ASSERT(!txq->metaRing[eopIdx].mp); in vmxnet3_tx_one()
286 txq->metaRing[sopIdx].mp = mp; in vmxnet3_tx_one()
287 txq->metaRing[eopIdx].sopIdx = sopIdx; in vmxnet3_tx_one()
[all …]
H A Dvmxnet3_main.c337 vmxnet3_txqueue_t *txq = &dp->txQueue; in vmxnet3_prepare_txqueue() local
340 ASSERT(!(txq->cmdRing.size & VMXNET3_RING_SIZE_MASK)); in vmxnet3_prepare_txqueue()
341 ASSERT(!(txq->compRing.size & VMXNET3_RING_SIZE_MASK)); in vmxnet3_prepare_txqueue()
342 ASSERT(!txq->cmdRing.dma.buf && !txq->compRing.dma.buf); in vmxnet3_prepare_txqueue()
344 if ((err = vmxnet3_alloc_cmdring(dp, &txq->cmdRing)) != 0) { in vmxnet3_prepare_txqueue()
347 tqdesc->conf.txRingBasePA = txq->cmdRing.dma.bufPA; in vmxnet3_prepare_txqueue()
348 tqdesc->conf.txRingSize = txq->cmdRing.size; in vmxnet3_prepare_txqueue()
352 if ((err = vmxnet3_alloc_compring(dp, &txq->compRing)) != 0) { in vmxnet3_prepare_txqueue()
355 tqdesc->conf.compRingBasePA = txq->compRing.dma.bufPA; in vmxnet3_prepare_txqueue()
356 tqdesc->conf.compRingSize = txq->compRing.size; in vmxnet3_prepare_txqueue()
[all …]
H A Dvmxnet3.h187 int vmxnet3_txqueue_init(vmxnet3_softc_t *dp, vmxnet3_txqueue_t *txq);
189 boolean_t vmxnet3_tx_complete(vmxnet3_softc_t *dp, vmxnet3_txqueue_t *txq);
190 void vmxnet3_txqueue_fini(vmxnet3_softc_t *dp, vmxnet3_txqueue_t *txq);
/illumos-gate/usr/src/uts/common/io/ath/
H A Dath_aux.c137 struct ath_txq *txq; in ath_set_data_queue() local
174 txq = &asc->asc_txq[qnum]; in ath_set_data_queue()
175 txq->axq_qnum = qnum; in ath_set_data_queue()
176 txq->axq_depth = 0; in ath_set_data_queue()
177 txq->axq_intrcnt = 0; in ath_set_data_queue()
178 txq->axq_link = NULL; in ath_set_data_queue()
179 list_create(&txq->axq_list, sizeof (struct ath_buf), in ath_set_data_queue()
181 mutex_init(&txq->axq_lock, NULL, MUTEX_DRIVER, NULL); in ath_set_data_queue()
209 struct ath_txq *txq = &asc->asc_txq[i]; in ath_txq_cleanup() local
211 ATH_HAL_RELEASETXQUEUE(asc->asc_ah, txq->axq_qnum); in ath_txq_cleanup()
[all …]
H A Dath_main.c679 struct ath_txq *txq; in ath_tx_start() local
788 txq = asc->asc_ac2q[WME_AC_VO]; in ath_tx_start()
800 txq = asc->asc_ac2q[WME_AC_VO]; in ath_tx_start()
811 txq = asc->asc_ac2q[WME_AC_BK]; in ath_tx_start()
878 if (++txq->axq_intrcnt >= ATH_TXINTR_PERIOD) { in ath_tx_start()
880 txq->axq_intrcnt = 0; in ath_tx_start()
905 txq->axq_qnum, rix, shortPreamble, *(uint16_t *)wh->i_dur)); in ath_tx_start()
930 mutex_enter(&txq->axq_lock); in ath_tx_start()
931 list_insert_tail(&txq->axq_list, bf); in ath_tx_start()
932 if (txq->axq_link == NULL) { in ath_tx_start()
[all …]
/illumos-gate/usr/src/uts/common/io/ral/
H A Drt2560.c684 RAL_WRITE(sc, RT2560_TXCSR3, sc->txq.physaddr); in rt2560_ring_hwsetup()
1024 dr = &sc->txq.dr_desc; in rt2560_tx_intr()
1025 count = sc->txq.count; in rt2560_tx_intr()
1030 mutex_enter(&sc->txq.tx_lock); in rt2560_tx_intr()
1033 desc = &sc->txq.desc[sc->txq.next]; in rt2560_tx_intr()
1034 data = &sc->txq.data[sc->txq.next]; in rt2560_tx_intr()
1082 ral_debug(RAL_DBG_INTR, "tx done idx=%u\n", sc->txq.next); in rt2560_tx_intr()
1084 sc->txq.queued--; in rt2560_tx_intr()
1085 sc->txq.next = (sc->txq.next + 1) % RT2560_TX_RING_COUNT; in rt2560_tx_intr()
1088 sc->txq.queued < (RT2560_TX_RING_COUNT - 32)) { in rt2560_tx_intr()
[all …]
/illumos-gate/usr/src/uts/common/io/bnxe/
H A Dbnxe_lock.c43 void BNXE_LOCK_ENTER_TX (um_device_t * pUM, int idx) { mutex_enter(&pUM->txq[idx].txMut… in BNXE_LOCK_ENTER_TX()
44 void BNXE_LOCK_EXIT_TX (um_device_t * pUM, int idx) { mutex_exit(&pUM->txq[idx].txMute… in BNXE_LOCK_EXIT_TX()
45 void BNXE_LOCK_ENTER_FREETX (um_device_t * pUM, int idx) { mutex_enter(&pUM->txq[idx].freeT… in BNXE_LOCK_ENTER_FREETX()
46 void BNXE_LOCK_EXIT_FREETX (um_device_t * pUM, int idx) { mutex_exit(&pUM->txq[idx].freeTx… in BNXE_LOCK_EXIT_FREETX()
H A Dbnxe_tx.c164 s_list_add_tail(&pUM->txq[idx].freeTxDescQ, pPktList); in BnxeTxPktsReclaim()
173 TxQueue * pTxQ = &pUM->txq[idx]; in BnxeTxSendWaitingPkt()
219 TxQueue * pTxQ = &pUM->txq[idx]; in BnxeTxRingProcess()
836 TxQueue * pTxQ = &pUM->txq[idx]; in BnxeTxSendMblk()
1055 tmpList = pUM->txq[idx].waitTxDescQ; in BnxeTxPktsAbortIdx()
1056 s_list_clear(&pUM->txq[idx].waitTxDescQ); in BnxeTxPktsAbortIdx()
1197 pTxQ = &pUM->txq[idx]; in BnxeTxPktsInitIdx()
1321 pTxQ = &pUM->txq[idx]; in BnxeTxPktsFiniIdx()
/illumos-gate/usr/src/uts/common/io/bnxe/577xx/drivers/common/lm/device/
H A Dlm_send.c268 IN lm_tx_chain_t *txq, in lm_get_coalesce_buffer() argument
274 if (ERR_IF(CHK_NULL(pdev) || CHK_NULL(txq) || !buf_size)) { in lm_get_coalesce_buffer()
279 coalesce_buf_cnt = s_list_entry_cnt(&txq->coalesce_buf_list); in lm_get_coalesce_buffer()
283 &txq->coalesce_buf_list); in lm_get_coalesce_buffer()
294 txq->coalesce_buf_used++; in lm_get_coalesce_buffer()
298 s_list_push_tail(&txq->coalesce_buf_list, &coalesce_buf->link); in lm_get_coalesce_buffer()
315 IN lm_tx_chain_t *txq, in lm_put_coalesce_buffer() argument
318 if (ERR_IF(CHK_NULL(pdev) || CHK_NULL(txq) || CHK_NULL(coalesce_buf))) { in lm_put_coalesce_buffer()
323 s_list_push_tail(&txq->coalesce_buf_list, &coalesce_buf->link); in lm_put_coalesce_buffer()
344 IN lm_tx_chain_t *txq, in lm_copy_packet_to_coalesce_buffer() argument
[all …]
/illumos-gate/usr/src/uts/common/io/bnx/
H A Dbnx.h301 um_xmit_qinfo txq[NUM_TX_CHAIN]; member
302 #define _TX_QINFO(pdev, chain) (pdev->txq[chain])
303 #define _TXQ_FREE_DESC(pdev, chain) (pdev->txq[chain].free_tx_desc)
304 #define _TXQ_RESC_DESC(pdev, chain) (pdev->txq[chain].tx_resc_que)
H A Dbnxsnd.c290 lm_tx_chain_t *txq; in bnx_xmit_packet() local
299 txq = &lmdevice->tx_info.chain[ringidx]; in bnx_xmit_packet()
302 if (txq->bd_left < BNX_MAX_SGL_ENTRIES) { in bnx_xmit_packet()
315 if (s_list_is_empty(waitq) && txq->bd_left >= lmfraglist->cnt) { in bnx_xmit_packet()
323 if (txq->bd_left >= BNX_MAX_SGL_ENTRIES) { in bnx_xmit_packet()
523 lm_tx_chain_t *txq; in bnx_xmit_ring_xmit_qpkt() local
532 txq = &lmdevice->tx_info.chain[ringidx]; in bnx_xmit_ring_xmit_qpkt()
538 if (lmfraglist->cnt > txq->bd_left) { in bnx_xmit_ring_xmit_qpkt()
/illumos-gate/usr/src/uts/common/io/yge/
H A Dyge.c2418 int32_t txq; in yge_start_port() local
2422 txq = port->p_txq; in yge_start_port()
2561 CSR_WRITE_4(dev, Q_ADDR(txq, Q_CSR), BMU_CLR_RESET); in yge_start_port()
2562 CSR_WRITE_4(dev, Q_ADDR(txq, Q_CSR), BMU_OPER_INIT); in yge_start_port()
2563 CSR_WRITE_4(dev, Q_ADDR(txq, Q_CSR), BMU_FIFO_OP_ON); in yge_start_port()
2564 CSR_WRITE_2(dev, Q_ADDR(txq, Q_WM), MSK_BMU_TX_WM); in yge_start_port()
2570 CSR_WRITE_2(dev, Q_ADDR(txq, Q_AL), MSK_ECU_TXFF_LEV); in yge_start_port()
2579 CSR_WRITE_4(dev, Q_ADDR(txq, Q_F), F_TX_CHK_AUTO_OFF); in yge_start_port()
2637 uint32_t txq; in yge_set_rambuffer() local
2642 txq = port->p_txq; in yge_set_rambuffer()
[all …]
/illumos-gate/usr/src/uts/common/io/rwd/
H A Drt2661.c907 ring = (qid <= 3) ? &sc->txq[qid] : &sc->mgtq; in rt2661_tx_intr()
1159 rt2661_tx_dma_intr(sc, &sc->txq[0]); in rt2661_intr()
1165 rt2661_tx_dma_intr(sc, &sc->txq[1]); in rt2661_intr()
1171 rt2661_tx_dma_intr(sc, &sc->txq[2]); in rt2661_intr()
1177 rt2661_tx_dma_intr(sc, &sc->txq[3]); in rt2661_intr()
1424 ring = &sc->txq[0]; in rt2661_send()
2121 rt2661_reset_tx_ring(sc, &sc->txq[0]); in rt2661_stop_locked()
2122 rt2661_reset_tx_ring(sc, &sc->txq[1]); in rt2661_stop_locked()
2123 rt2661_reset_tx_ring(sc, &sc->txq[2]); in rt2661_stop_locked()
2124 rt2661_reset_tx_ring(sc, &sc->txq[3]); in rt2661_stop_locked()
[all …]

12