Home
last modified time | relevance | path

Searched refs:tx_req (Results 1 – 15 of 15) sorted by relevance

/linux/drivers/infiniband/ulp/ipoib/
H A Dipoib_ib.c275 int ipoib_dma_map_tx(struct ib_device *ca, struct ipoib_tx_buf *tx_req) in ipoib_dma_map_tx() argument
277 struct sk_buff *skb = tx_req->skb; in ipoib_dma_map_tx()
278 u64 *mapping = tx_req->mapping; in ipoib_dma_map_tx()
318 struct ipoib_tx_buf *tx_req) in ipoib_dma_unmap_tx() argument
320 struct sk_buff *skb = tx_req->skb; in ipoib_dma_unmap_tx()
321 u64 *mapping = tx_req->mapping; in ipoib_dma_unmap_tx()
389 struct ipoib_tx_buf *tx_req; in ipoib_ib_handle_tx_wc() local
400 tx_req = &priv->tx_ring[wr_id]; in ipoib_ib_handle_tx_wc()
402 ipoib_dma_unmap_tx(priv, tx_req); in ipoib_ib_handle_tx_wc()
405 dev->stats.tx_bytes += tx_req->skb->len; in ipoib_ib_handle_tx_wc()
[all …]
H A Dipoib_cm.c699 struct ipoib_tx_buf *tx_req) in post_send() argument
701 ipoib_build_sge(priv, tx_req); in post_send()
711 struct ipoib_tx_buf *tx_req; in ipoib_cm_send() local
750 tx_req = &tx->tx_ring[tx->tx_head & (ipoib_sendq_size - 1)]; in ipoib_cm_send()
751 tx_req->skb = skb; in ipoib_cm_send()
753 if (unlikely(ipoib_dma_map_tx(priv->ca, tx_req))) { in ipoib_cm_send()
778 rc = post_send(priv, tx, tx->tx_head & (ipoib_sendq_size - 1), tx_req); in ipoib_cm_send()
782 ipoib_dma_unmap_tx(priv, tx_req); in ipoib_cm_send()
799 struct ipoib_tx_buf *tx_req; in ipoib_cm_handle_tx_wc() local
811 tx_req = &tx->tx_ring[wr_id]; in ipoib_cm_handle_tx_wc()
[all …]
H A Dipoib.h539 int ipoib_dma_map_tx(struct ib_device *ca, struct ipoib_tx_buf *tx_req);
541 struct ipoib_tx_buf *tx_req);
546 struct ipoib_tx_buf *tx_req) in ipoib_build_sge() argument
549 struct sk_buff *skb = tx_req->skb; in ipoib_build_sge()
552 u64 *mapping = tx_req->mapping; in ipoib_build_sge()
/linux/arch/arm/mach-omap1/
H A Ddevices.c123 unsigned rx_req, unsigned tx_req, in omap_mmc_add() argument
143 res[3].start = tx_req; in omap_mmc_add()
178 unsigned rx_req, tx_req; in omap1_init_mmc() local
191 tx_req = 21; in omap1_init_mmc()
199 tx_req = 54; in omap1_init_mmc()
207 rx_req, tx_req, mmc_data[i]); in omap1_init_mmc()
/linux/drivers/net/wireless/realtek/rtw89/
H A Dcore.c463 struct rtw89_core_tx_request *tx_req, in rtw89_core_tx_update_ampdu_info() argument
466 struct ieee80211_sta *sta = tx_req->sta; in rtw89_core_tx_update_ampdu_info()
467 struct rtw89_tx_desc_info *desc_info = &tx_req->desc_info; in rtw89_core_tx_update_ampdu_info()
468 struct sk_buff *skb = tx_req->skb; in rtw89_core_tx_update_ampdu_info()
500 struct rtw89_core_tx_request *tx_req) in rtw89_core_tx_update_sec_key() argument
507 struct rtw89_tx_desc_info *desc_info = &tx_req->desc_info; in rtw89_core_tx_update_sec_key()
508 struct sk_buff *skb = tx_req->skb; in rtw89_core_tx_update_sec_key()
568 struct rtw89_core_tx_request *tx_req, in rtw89_core_get_mgmt_rate() argument
571 struct sk_buff *skb = tx_req->skb; in rtw89_core_get_mgmt_rate()
584 if (!vif || !vif->bss_conf.basic_rates || !tx_req->sta) in rtw89_core_get_mgmt_rate()
[all …]
H A Dpci.c1358 struct rtw89_core_tx_request *tx_req) in rtw89_pci_txwd_submit() argument
1362 struct rtw89_tx_desc_info *desc_info = &tx_req->desc_info; in rtw89_pci_txwd_submit()
1366 struct sk_buff *skb = tx_req->skb; in rtw89_pci_txwd_submit()
1417 struct rtw89_core_tx_request *tx_req) in rtw89_pci_fwcmd_submit() argument
1421 struct rtw89_tx_desc_info *desc_info = &tx_req->desc_info; in rtw89_pci_fwcmd_submit()
1425 struct sk_buff *skb = tx_req->skb; in rtw89_pci_fwcmd_submit()
1456 struct rtw89_core_tx_request *tx_req) in rtw89_pci_txbd_submit() argument
1467 return rtw89_pci_fwcmd_submit(rtwdev, tx_ring, txbd, tx_req); in rtw89_pci_txbd_submit()
1476 ret = rtw89_pci_txwd_submit(rtwdev, tx_ring, txwd, tx_req); in rtw89_pci_txbd_submit()
1500 static int rtw89_pci_tx_write(struct rtw89_dev *rtwdev, struct rtw89_core_tx_request *tx_req, in rtw89_pci_tx_write() argument
[all …]
H A Dcore.h3520 int (*tx_write)(struct rtw89_dev *rtwdev, struct rtw89_core_tx_request *tx_req);
5612 struct rtw89_core_tx_request *tx_req) in rtw89_hci_tx_write() argument
5614 return rtwdev->hci.ops->tx_write(rtwdev, tx_req); in rtw89_hci_tx_write()
/linux/drivers/usb/gadget/udc/
H A Dfsl_qe_udc.c1141 if (ep->tx_req != NULL) { in txcomplete()
1142 struct qe_req *req = ep->tx_req; in txcomplete()
1167 if (((ep->tx_req->req.length - ep->sent) <= 0) && !zlp) { in txcomplete()
1168 done(ep, ep->tx_req, 0); in txcomplete()
1169 ep->tx_req = NULL; in txcomplete()
1176 if (ep->tx_req == NULL) { in txcomplete()
1178 ep->tx_req = list_entry(ep->queue.next, struct qe_req, in txcomplete()
1195 size = min_t(u32, (ep->tx_req->req.length - ep->sent), in qe_usb_senddata()
1197 buf = (u8 *)ep->tx_req->req.buf + ep->sent; in qe_usb_senddata()
1200 ep->tx_req->req.actual += size; in qe_usb_senddata()
[all …]
H A Dfsl_qe_udc.h285 struct qe_req *tx_req; member
/linux/drivers/media/platform/qcom/venus/
H A Dhfi_venus.c68 u32 tx_req; member
196 qhdr->tx_req = 1; in venus_write_queue()
202 qhdr->tx_req = 0; in venus_write_queue()
237 struct iface_queue *queue, void *pkt, u32 *tx_req) in venus_read_queue() argument
273 *tx_req = 0; in venus_read_queue()
324 *tx_req = qhdr->tx_req ? 1 : 0; in venus_read_queue()
681 u32 tx_req; in venus_iface_msgq_read_nolock() local
689 ret = venus_read_queue(hdev, queue, pkt, &tx_req); in venus_iface_msgq_read_nolock()
693 if (tx_req) in venus_iface_msgq_read_nolock()
714 u32 tx_req; in venus_iface_dbgq_read_nolock() local
[all …]
/linux/drivers/net/wwan/t7xx/
H A Dt7xx_hif_cldma.c843 static int t7xx_cldma_gpd_handle_tx_request(struct cldma_queue *queue, struct cldma_request *tx_req, in t7xx_cldma_gpd_handle_tx_request() argument
847 struct cldma_gpd *gpd = tx_req->gpd; in t7xx_cldma_gpd_handle_tx_request()
851 tx_req->mapped_buff = dma_map_single(md_ctrl->dev, skb->data, skb->len, DMA_TO_DEVICE); in t7xx_cldma_gpd_handle_tx_request()
853 if (dma_mapping_error(md_ctrl->dev, tx_req->mapped_buff)) { in t7xx_cldma_gpd_handle_tx_request()
858 t7xx_cldma_gpd_set_data_ptr(gpd, tx_req->mapped_buff); in t7xx_cldma_gpd_handle_tx_request()
870 tx_req->skb = skb; in t7xx_cldma_gpd_handle_tx_request()
923 struct cldma_request *tx_req; in t7xx_cldma_send_skb() local
948 tx_req = queue->tx_next; in t7xx_cldma_send_skb()
949 if (queue->budget > 0 && !tx_req->skb) { in t7xx_cldma_send_skb()
953 t7xx_cldma_gpd_handle_tx_request(queue, tx_req, skb); in t7xx_cldma_send_skb()
[all …]
/linux/drivers/perf/hisilicon/
H A Dhisi_uncore_pa_pmu.c323 HISI_PMU_EVENT_ATTR(tx_req, 0x5c),
334 HISI_PMU_EVENT_ATTR(tx_req, 0x0),
H A Dhisi_uncore_sllc_pmu.c339 HISI_PMU_EVENT_ATTR(tx_req, 0x34),
/linux/drivers/rapidio/
H A Drio_cm.c138 struct tx_req { struct
674 struct tx_req *req, *_req; in rio_txcq_handler()
709 struct tx_req *treq; in riocm_queue_req()
/linux/drivers/net/ethernet/hisilicon/hns3/hns3pf/
H A Dhclge_main.c9944 struct hclge_tx_vlan_type_cfg_cmd *tx_req; in hclge_set_vlan_protocol_type() local
9969 tx_req = (struct hclge_tx_vlan_type_cfg_cmd *)desc.data; in hclge_set_vlan_protocol_type()
9970 tx_req->ot_vlan_type = cpu_to_le16(hdev->vlan_type_cfg.tx_ot_vlan_type); in hclge_set_vlan_protocol_type()
9971 tx_req->in_vlan_type = cpu_to_le16(hdev->vlan_type_cfg.tx_in_vlan_type); in hclge_set_vlan_protocol_type()