Lines Matching refs:txq
176 return container_of(q, struct sge_qset, txq[qidx]); in txq_to_qset()
654 memset(q->txq, 0, sizeof(struct sge_txq) * SGE_TXQ_PER_SET); in t3_reset_qset()
691 if (q->txq[i].desc) { in t3_free_qset()
693 t3_sge_enable_ecntxt(adapter, q->txq[i].cntxt_id, 0); in t3_free_qset()
695 if (q->txq[i].sdesc) { in t3_free_qset()
696 free_tx_desc(adapter, &q->txq[i], in t3_free_qset()
697 q->txq[i].in_use); in t3_free_qset()
698 kfree(q->txq[i].sdesc); in t3_free_qset()
701 q->txq[i].size * in t3_free_qset()
703 q->txq[i].desc, q->txq[i].phys_addr); in t3_free_qset()
704 __skb_queue_purge(&q->txq[i].sendq); in t3_free_qset()
731 qs->txq[TXQ_ETH].cntxt_id = FW_TUNNEL_SGEEC_START + id; in init_qset_cntxt()
732 qs->txq[TXQ_ETH].token = FW_TUNNEL_TID_START + id; in init_qset_cntxt()
733 qs->txq[TXQ_OFLD].cntxt_id = FW_OFLD_SGEEC_START + id; in init_qset_cntxt()
734 qs->txq[TXQ_CTRL].cntxt_id = FW_CTRL_SGEEC_START + id; in init_qset_cntxt()
735 qs->txq[TXQ_CTRL].token = FW_CTRL_TID_START + id; in init_qset_cntxt()
1245 static inline void t3_stop_tx_queue(struct netdev_queue *txq, in t3_stop_tx_queue() argument
1248 netif_tx_stop_queue(txq); in t3_stop_tx_queue()
1266 struct netdev_queue *txq; in t3_eth_xmit() local
1282 q = &qs->txq[TXQ_ETH]; in t3_eth_xmit()
1283 txq = netdev_get_tx_queue(dev, qidx); in t3_eth_xmit()
1291 t3_stop_tx_queue(txq, qs, q); in t3_eth_xmit()
1308 t3_stop_tx_queue(txq, qs, q); in t3_eth_xmit()
1313 netif_tx_start_queue(txq); in t3_eth_xmit()
1524 txq[TXQ_CTRL].qresume_task); in restart_ctrlq()
1525 struct sge_txq *q = &qs->txq[TXQ_CTRL]; in restart_ctrlq()
1565 ret = ctrl_xmit(adap, &adap->sge.qs[0].txq[TXQ_CTRL], skb); in t3_mgmt_tx()
1744 txq[TXQ_OFLD].qresume_task); in restart_offloadq()
1745 struct sge_txq *q = &qs->txq[TXQ_OFLD]; in restart_offloadq()
1839 return ctrl_xmit(adap, &qs->txq[TXQ_CTRL], skb); in t3_offload_tx()
1841 return ofld_xmit(adap, &qs->txq[TXQ_OFLD], skb); in t3_offload_tx()
1988 should_restart_tx(&qs->txq[TXQ_ETH]) && in restart_tx()
1990 qs->txq[TXQ_ETH].restarts++; in restart_tx()
1996 should_restart_tx(&qs->txq[TXQ_OFLD]) && in restart_tx()
1998 qs->txq[TXQ_OFLD].restarts++; in restart_tx()
2001 queue_work(cxgb3_wq, &qs->txq[TXQ_OFLD].qresume_task); in restart_tx()
2004 should_restart_tx(&qs->txq[TXQ_CTRL]) && in restart_tx()
2006 qs->txq[TXQ_CTRL].restarts++; in restart_tx()
2009 queue_work(cxgb3_wq, &qs->txq[TXQ_CTRL].qresume_task); in restart_tx()
2222 clear_bit(TXQ_RUNNING, &qs->txq[TXQ_ETH].flags); in handle_rsp_cntrl_info()
2227 qs->txq[TXQ_ETH].processed += credits; in handle_rsp_cntrl_info()
2231 qs->txq[TXQ_CTRL].processed += credits; in handle_rsp_cntrl_info()
2235 clear_bit(TXQ_RUNNING, &qs->txq[TXQ_OFLD].flags); in handle_rsp_cntrl_info()
2239 qs->txq[TXQ_OFLD].processed += credits; in handle_rsp_cntrl_info()
2256 struct sge_txq *txq = &qs->txq[TXQ_ETH]; in check_ring_db() local
2258 if (txq->cleaned + txq->in_use != txq->processed && in check_ring_db()
2259 !test_and_set_bit(TXQ_LAST_PKT_DB, &txq->flags)) { in check_ring_db()
2260 set_bit(TXQ_RUNNING, &txq->flags); in check_ring_db()
2262 V_EGRCNTX(txq->cntxt_id)); in check_ring_db()
2267 struct sge_txq *txq = &qs->txq[TXQ_OFLD]; in check_ring_db() local
2269 if (txq->cleaned + txq->in_use != txq->processed && in check_ring_db()
2270 !test_and_set_bit(TXQ_LAST_PKT_DB, &txq->flags)) { in check_ring_db()
2271 set_bit(TXQ_RUNNING, &txq->flags); in check_ring_db()
2273 V_EGRCNTX(txq->cntxt_id)); in check_ring_db()
2916 tbd[TXQ_ETH] = reclaim_completed_tx(adap, &qs->txq[TXQ_ETH], in sge_timer_tx()
2921 if (spin_trylock(&qs->txq[TXQ_OFLD].lock)) { in sge_timer_tx()
2922 tbd[TXQ_OFLD] = reclaim_completed_tx(adap, &qs->txq[TXQ_OFLD], in sge_timer_tx()
2924 spin_unlock(&qs->txq[TXQ_OFLD].lock); in sge_timer_tx()
3060 q->txq[i].desc = alloc_ring(adapter->pdev, p->txq_size[i], in t3_sge_alloc_qset()
3062 &q->txq[i].phys_addr, in t3_sge_alloc_qset()
3063 &q->txq[i].sdesc); in t3_sge_alloc_qset()
3064 if (!q->txq[i].desc) in t3_sge_alloc_qset()
3067 q->txq[i].gen = 1; in t3_sge_alloc_qset()
3068 q->txq[i].size = p->txq_size[i]; in t3_sge_alloc_qset()
3069 spin_lock_init(&q->txq[i].lock); in t3_sge_alloc_qset()
3070 skb_queue_head_init(&q->txq[i].sendq); in t3_sge_alloc_qset()
3073 INIT_WORK(&q->txq[TXQ_OFLD].qresume_task, restart_offloadq); in t3_sge_alloc_qset()
3074 INIT_WORK(&q->txq[TXQ_CTRL].qresume_task, restart_ctrlq); in t3_sge_alloc_qset()
3085 q->txq[TXQ_ETH].stop_thres = nports * in t3_sge_alloc_qset()
3126 ret = t3_sge_init_ecntxt(adapter, q->txq[TXQ_ETH].cntxt_id, USE_GTS, in t3_sge_alloc_qset()
3127 SGE_CNTXT_ETH, id, q->txq[TXQ_ETH].phys_addr, in t3_sge_alloc_qset()
3128 q->txq[TXQ_ETH].size, q->txq[TXQ_ETH].token, in t3_sge_alloc_qset()
3134 ret = t3_sge_init_ecntxt(adapter, q->txq[TXQ_OFLD].cntxt_id, in t3_sge_alloc_qset()
3136 q->txq[TXQ_OFLD].phys_addr, in t3_sge_alloc_qset()
3137 q->txq[TXQ_OFLD].size, 0, 1, 0); in t3_sge_alloc_qset()
3143 ret = t3_sge_init_ecntxt(adapter, q->txq[TXQ_CTRL].cntxt_id, 0, in t3_sge_alloc_qset()
3145 q->txq[TXQ_CTRL].phys_addr, in t3_sge_alloc_qset()
3146 q->txq[TXQ_CTRL].size, in t3_sge_alloc_qset()
3147 q->txq[TXQ_CTRL].token, 1, 0); in t3_sge_alloc_qset()
3294 cancel_work_sync(&qs->txq[TXQ_OFLD].qresume_task); in t3_sge_stop()
3295 cancel_work_sync(&qs->txq[TXQ_CTRL].qresume_task); in t3_sge_stop()