Lines Matching full:txp

200 					struct ipoib_txparms *txp)  in hfi1_ipoib_build_ulp_payload()  argument
202 struct hfi1_devdata *dd = txp->dd; in hfi1_ipoib_build_ulp_payload()
231 struct ipoib_txparms *txp) in hfi1_ipoib_build_tx_desc() argument
233 struct hfi1_devdata *dd = txp->dd; in hfi1_ipoib_build_tx_desc()
237 sizeof(sdma_hdr->pbc) + (txp->hdr_dwords << 2) + tx->skb->len; in hfi1_ipoib_build_tx_desc()
248 sizeof(sdma_hdr->pbc) + (txp->hdr_dwords << 2)); in hfi1_ipoib_build_tx_desc()
253 return hfi1_ipoib_build_ulp_payload(tx, txp); in hfi1_ipoib_build_tx_desc()
257 struct ipoib_txparms *txp) in hfi1_ipoib_build_ib_tx_headers() argument
262 struct hfi1_pportdata *ppd = ppd_from_ibp(txp->ibp); in hfi1_ipoib_build_ib_tx_headers()
263 struct rdma_ah_attr *ah_attr = txp->ah_attr; in hfi1_ipoib_build_ib_tx_headers()
283 txp->hdr_dwords = 7; in hfi1_ipoib_build_ib_tx_headers()
287 txp->hdr_dwords += in hfi1_ipoib_build_ib_tx_headers()
288 hfi1_make_grh(txp->ibp, in hfi1_ipoib_build_ib_tx_headers()
291 txp->hdr_dwords - LRH_9B_DWORDS, in hfi1_ipoib_build_ib_tx_headers()
301 lrh0 |= (txp->flow.sc5 & 0xf) << 12; in hfi1_ipoib_build_ib_tx_headers()
319 dwords = txp->hdr_dwords + payload_dwords; in hfi1_ipoib_build_ib_tx_headers()
329 ohdr->bth[1] = cpu_to_be32(txp->dqpn); in hfi1_ipoib_build_ib_tx_headers()
330 ohdr->bth[2] = cpu_to_be32(mask_psn((u32)txp->txq->tx_ring.sent_txreqs)); in hfi1_ipoib_build_ib_tx_headers()
334 ohdr->u.ud.deth[1] = cpu_to_be32((txp->entropy << in hfi1_ipoib_build_ib_tx_headers()
340 ib_is_sc5(txp->flow.sc5) << in hfi1_ipoib_build_ib_tx_headers()
343 sc_to_vlt(priv->dd, txp->flow.sc5), in hfi1_ipoib_build_ib_tx_headers()
350 struct ipoib_txparms *txp) in hfi1_ipoib_send_dma_common() argument
353 struct hfi1_ipoib_txq *txq = txp->txq; in hfi1_ipoib_send_dma_common()
382 hfi1_ipoib_build_ib_tx_headers(tx, txp); in hfi1_ipoib_send_dma_common()
384 ret = hfi1_ipoib_build_tx_desc(tx, txp); in hfi1_ipoib_send_dma_common()
386 if (txq->flow.as_int != txp->flow.as_int) { in hfi1_ipoib_send_dma_common()
387 txq->flow.tx_queue = txp->flow.tx_queue; in hfi1_ipoib_send_dma_common()
388 txq->flow.sc5 = txp->flow.sc5; in hfi1_ipoib_send_dma_common()
391 txp->flow.tx_queue, in hfi1_ipoib_send_dma_common()
392 txp->flow.sc5); in hfi1_ipoib_send_dma_common()
458 struct ipoib_txparms *txp) in hfi1_ipoib_send_dma_single() argument
460 struct hfi1_ipoib_txq *txq = txp->txq; in hfi1_ipoib_send_dma_single()
465 tx = hfi1_ipoib_send_dma_common(dev, skb, txp); in hfi1_ipoib_send_dma_single()
488 ib_is_sc5(txp->flow.sc5)); in hfi1_ipoib_send_dma_single()
509 struct ipoib_txparms *txp) in hfi1_ipoib_send_dma_list() argument
511 struct hfi1_ipoib_txq *txq = txp->txq; in hfi1_ipoib_send_dma_list()
516 if (txq->flow.as_int != txp->flow.as_int) { in hfi1_ipoib_send_dma_list()
528 tx = hfi1_ipoib_send_dma_common(dev, skb, txp); in hfi1_ipoib_send_dma_list()
552 ib_is_sc5(txp->flow.sc5)); in hfi1_ipoib_send_dma_list()
577 struct ipoib_txparms txp; in hfi1_ipoib_send() local
590 txp.dd = priv->dd; in hfi1_ipoib_send()
591 txp.ah_attr = &ibah_to_rvtah(address)->attr; in hfi1_ipoib_send()
592 txp.ibp = to_iport(priv->device, priv->port_num); in hfi1_ipoib_send()
593 txp.txq = &priv->txqs[skb_get_queue_mapping(skb)]; in hfi1_ipoib_send()
594 txp.dqpn = dqpn; in hfi1_ipoib_send()
595 txp.flow.sc5 = txp.ibp->sl_to_sc[rdma_ah_get_sl(txp.ah_attr)]; in hfi1_ipoib_send()
596 txp.flow.tx_queue = (u8)skb_get_queue_mapping(skb); in hfi1_ipoib_send()
597 txp.entropy = hfi1_ipoib_calc_entropy(skb); in hfi1_ipoib_send()
599 if (netdev_xmit_more() || !list_empty(&txp.txq->tx_list)) in hfi1_ipoib_send()
600 return hfi1_ipoib_send_dma_list(dev, skb, &txp); in hfi1_ipoib_send()
602 return hfi1_ipoib_send_dma_single(dev, skb, &txp); in hfi1_ipoib_send()