Lines Matching full:tcb
138 bnad_txq_cleanup(struct bnad *bnad, struct bna_tcb *tcb) in bnad_txq_cleanup() argument
140 struct bnad_tx_unmap *unmap_q = tcb->unmap_q; in bnad_txq_cleanup()
144 for (i = 0; i < tcb->q_depth; i++) { in bnad_txq_cleanup()
148 bnad_tx_buff_unmap(bnad, unmap_q, tcb->q_depth, i); in bnad_txq_cleanup()
160 bnad_txcmpl_process(struct bnad *bnad, struct bna_tcb *tcb) in bnad_txcmpl_process() argument
164 struct bnad_tx_unmap *unmap_q = tcb->unmap_q; in bnad_txcmpl_process()
169 if (!test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)) in bnad_txcmpl_process()
172 hw_cons = *(tcb->hw_consumer_index); in bnad_txcmpl_process()
174 cons = tcb->consumer_index; in bnad_txcmpl_process()
175 q_depth = tcb->q_depth; in bnad_txcmpl_process()
178 BUG_ON(!(wis <= BNA_QE_IN_USE_CNT(tcb, tcb->q_depth))); in bnad_txcmpl_process()
196 tcb->consumer_index = hw_cons; in bnad_txcmpl_process()
198 tcb->txq->tx_packets += sent_packets; in bnad_txcmpl_process()
199 tcb->txq->tx_bytes += sent_bytes; in bnad_txcmpl_process()
205 bnad_tx_complete(struct bnad *bnad, struct bna_tcb *tcb) in bnad_tx_complete() argument
210 if (test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags)) in bnad_tx_complete()
213 sent = bnad_txcmpl_process(bnad, tcb); in bnad_tx_complete()
217 BNA_QE_FREE_CNT(tcb, tcb->q_depth) >= in bnad_tx_complete()
219 if (test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)) { in bnad_tx_complete()
226 if (likely(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) in bnad_tx_complete()
227 bna_ib_ack(tcb->i_dbell, sent); in bnad_tx_complete()
230 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags); in bnad_tx_complete()
239 struct bna_tcb *tcb = (struct bna_tcb *)data; in bnad_msix_tx() local
240 struct bnad *bnad = tcb->bnad; in bnad_msix_tx()
242 bnad_tx_complete(bnad, tcb); in bnad_msix_tx()
801 struct bna_tcb *tcb = NULL; in bnad_isr() local
828 tcb = bnad->tx_info[i].tcb[j]; in bnad_isr()
829 if (tcb && test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)) in bnad_isr()
830 bnad_tx_complete(bnad, bnad->tx_info[i].tcb[j]); in bnad_isr()
954 struct bna_tcb *tcb = in bnad_cb_ethport_link_status() local
955 bnad->tx_info[tx_id].tcb[tcb_id]; in bnad_cb_ethport_link_status()
957 if (!tcb) in bnad_cb_ethport_link_status()
960 txq_id = tcb->id; in bnad_cb_ethport_link_status()
963 &tcb->flags)) { in bnad_cb_ethport_link_status()
1000 bnad_cb_tcb_setup(struct bnad *bnad, struct bna_tcb *tcb) in bnad_cb_tcb_setup() argument
1003 (struct bnad_tx_info *)tcb->txq->tx->priv; in bnad_cb_tcb_setup()
1005 tcb->priv = tcb; in bnad_cb_tcb_setup()
1006 tx_info->tcb[tcb->id] = tcb; in bnad_cb_tcb_setup()
1010 bnad_cb_tcb_destroy(struct bnad *bnad, struct bna_tcb *tcb) in bnad_cb_tcb_destroy() argument
1013 (struct bnad_tx_info *)tcb->txq->tx->priv; in bnad_cb_tcb_destroy()
1015 tx_info->tcb[tcb->id] = NULL; in bnad_cb_tcb_destroy()
1016 tcb->priv = NULL; in bnad_cb_tcb_destroy()
1042 struct bna_tcb *tcb; in bnad_cb_tx_stall() local
1047 tcb = tx_info->tcb[i]; in bnad_cb_tx_stall()
1048 if (!tcb) in bnad_cb_tx_stall()
1050 txq_id = tcb->id; in bnad_cb_tx_stall()
1051 clear_bit(BNAD_TXQ_TX_STARTED, &tcb->flags); in bnad_cb_tx_stall()
1060 struct bna_tcb *tcb; in bnad_cb_tx_resume() local
1065 tcb = tx_info->tcb[i]; in bnad_cb_tx_resume()
1066 if (!tcb) in bnad_cb_tx_resume()
1068 txq_id = tcb->id; in bnad_cb_tx_resume()
1070 BUG_ON(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)); in bnad_cb_tx_resume()
1071 set_bit(BNAD_TXQ_TX_STARTED, &tcb->flags); in bnad_cb_tx_resume()
1072 BUG_ON(*(tcb->hw_consumer_index) != 0); in bnad_cb_tx_resume()
1100 struct bna_tcb *tcb; in bnad_tx_cleanup() local
1105 tcb = tx_info->tcb[i]; in bnad_tx_cleanup()
1106 if (!tcb) in bnad_tx_cleanup()
1109 bnad = tcb->bnad; in bnad_tx_cleanup()
1111 if (test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags)) { in bnad_tx_cleanup()
1116 bnad_txq_cleanup(bnad, tcb); in bnad_tx_cleanup()
1119 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags); in bnad_tx_cleanup()
1137 struct bna_tcb *tcb; in bnad_cb_tx_cleanup() local
1141 tcb = tx_info->tcb[i]; in bnad_cb_tx_cleanup()
1142 if (!tcb) in bnad_cb_tx_cleanup()
1517 if (tx_info->tcb[i] == NULL) in bnad_tx_msix_unregister()
1520 vector_num = tx_info->tcb[i]->intr_vector; in bnad_tx_msix_unregister()
1521 free_irq(bnad->msix_table[vector_num].vector, tx_info->tcb[i]); in bnad_tx_msix_unregister()
1537 vector_num = tx_info->tcb[i]->intr_vector; in bnad_tx_msix_register()
1538 snprintf(tx_info->tcb[i]->name, BNA_Q_NAME_SIZE, "%s TXQ %d", in bnad_tx_msix_register()
1540 tx_id + tx_info->tcb[i]->id); in bnad_tx_msix_register()
1543 tx_info->tcb[i]->name, in bnad_tx_msix_register()
1544 tx_info->tcb[i]); in bnad_tx_msix_register()
1930 if (tx_info->tcb[0]->intr_type == BNA_INTR_T_MSIX) in bnad_destroy_tx()
2414 if (bnad->tx_info[i].tcb[j]) { in bnad_netdev_qstats_fill()
2416 bnad->tx_info[i].tcb[j]->txq->tx_packets; in bnad_netdev_qstats_fill()
2418 bnad->tx_info[i].tcb[j]->txq->tx_bytes; in bnad_netdev_qstats_fill()
2805 bnad_txq_wi_prepare(struct bnad *bnad, struct bna_tcb *tcb, in bnad_txq_wi_prepare() argument
2817 vlan_tag = ((tcb->priority & 0x7) << VLAN_PRIO_SHIFT) in bnad_txq_wi_prepare()
2918 struct bna_tcb *tcb = NULL; in bnad_start_xmit() local
2946 tcb = bnad->tx_info[0].tcb[txq_id]; in bnad_start_xmit()
2952 if (unlikely(!tcb || !test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) { in bnad_start_xmit()
2958 q_depth = tcb->q_depth; in bnad_start_xmit()
2959 prod = tcb->producer_index; in bnad_start_xmit()
2960 unmap_q = tcb->unmap_q; in bnad_start_xmit()
2972 if (unlikely(wis > BNA_QE_FREE_CNT(tcb, q_depth))) { in bnad_start_xmit()
2973 if ((*tcb->hw_consumer_index != tcb->consumer_index) && in bnad_start_xmit()
2974 !test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags)) { in bnad_start_xmit()
2976 sent = bnad_txcmpl_process(bnad, tcb); in bnad_start_xmit()
2977 if (likely(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) in bnad_start_xmit()
2978 bna_ib_ack(tcb->i_dbell, sent); in bnad_start_xmit()
2980 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags); in bnad_start_xmit()
2992 if (likely(wis > BNA_QE_FREE_CNT(tcb, q_depth))) { in bnad_start_xmit()
3001 txqent = &((struct bna_txq_entry *)tcb->sw_q)[prod]; in bnad_start_xmit()
3005 if (bnad_txq_wi_prepare(bnad, tcb, skb, txqent)) { in bnad_start_xmit()
3034 /* Undo the changes starting at tcb->producer_index */ in bnad_start_xmit()
3036 tcb->producer_index); in bnad_start_xmit()
3048 txqent = &((struct bna_txq_entry *)tcb->sw_q)[prod]; in bnad_start_xmit()
3056 /* Undo the changes starting at tcb->producer_index */ in bnad_start_xmit()
3058 tcb->producer_index); in bnad_start_xmit()
3073 /* Undo the changes starting at tcb->producer_index */ in bnad_start_xmit()
3074 bnad_tx_buff_unmap(bnad, unmap_q, q_depth, tcb->producer_index); in bnad_start_xmit()
3081 tcb->producer_index = prod; in bnad_start_xmit()
3085 if (unlikely(!test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) in bnad_start_xmit()
3090 bna_txq_prod_indx_doorbell(tcb); in bnad_start_xmit()