/linux/drivers/net/ethernet/fungible/funeth/ |
H A D | funeth_tx.c | 349 unsigned int ndesc; in fun_start_xmit() local 357 ndesc = write_pkt_desc(skb, q, tls_len); in fun_start_xmit() 358 if (unlikely(!ndesc)) { in fun_start_xmit() 363 q->prod_cnt += ndesc; in fun_start_xmit() 425 unsigned int npkts = 0, nbytes = 0, ndesc = 0; in fun_txq_reclaim() local 448 ndesc += pkt_desc; in fun_txq_reclaim() 454 q->cons_cnt += ndesc; in fun_txq_reclaim() 486 unsigned int npkts = 0, ndesc = 0, head, reclaim_idx; in fun_xdpq_clean() local 505 ndesc += pkt_desc; in fun_xdpq_clean() 510 q->cons_cnt += ndesc; in fun_xdpq_clean() [all …]
|
H A D | funeth_txrx.h | 255 unsigned int ndesc, struct fun_irq *irq, int state,
|
/linux/drivers/net/ethernet/altera/ |
H A D | altera_sgdma.c | 13 struct sgdma_descrip __iomem *ndesc, 173 struct sgdma_descrip __iomem *ndesc = &descbase[1]; in sgdma_tx_buffer() local 180 ndesc, /* next descriptor */ in sgdma_tx_buffer() 181 sgdma_txphysaddr(priv, ndesc), in sgdma_tx_buffer() 294 struct sgdma_descrip __iomem *ndesc, in sgdma_setup_descrip() argument 305 u32 ctrl = csrrd8(ndesc, sgdma_descroffs(control)); in sgdma_setup_descrip() 307 csrwr8(ctrl, ndesc, sgdma_descroffs(control)); in sgdma_setup_descrip() 342 struct sgdma_descrip __iomem *ndesc = &descbase[1]; in sgdma_async_read() local 353 ndesc, /* next descriptor */ in sgdma_async_read() 354 sgdma_rxphysaddr(priv, ndesc), in sgdma_async_read()
|
/linux/drivers/net/wireless/mediatek/mt76/ |
H A D | dma.c | 193 Q_WRITE(q, ring_size, MT_DMA_RRO_EN | q->ndesc); in mt76_dma_sync_idx() 195 Q_WRITE(q, ring_size, q->ndesc); in mt76_dma_sync_idx() 203 if (!q || !q->ndesc) in __mt76_dma_queue_reset() 210 for (i = 0; i < q->ndesc; i++) in __mt76_dma_queue_reset() 278 q->head = (q->head + 1) % q->ndesc; in mt76_dma_add_rx_buf() 303 next = (q->head + 1) % q->ndesc; in mt76_dma_add_buf() 387 if (!q || !q->ndesc) in mt76_dma_tx_cleanup() 495 q->tail = (q->tail + 1) % q->ndesc; in mt76_dma_dequeue() 511 if (q->queued + 1 >= q->ndesc - 1) in mt76_dma_tx_queue_skb_raw() 590 if (q->queued + (tx_info.nbuf + 1) / 2 >= q->ndesc - 1) { in mt76_dma_tx_queue_skb() [all …]
|
H A D | usb.c | 446 q->tail = (q->tail + 1) % q->ndesc; in mt76u_get_next_rx_entry() 581 q->head = (q->head + 1) % q->ndesc; in mt76u_complete_rx() 648 for (i = 0; i < q->ndesc; i++) { in mt76u_submit_rx_buffers() 677 q->ndesc = MT_NUM_RX_ENTRIES; in mt76u_alloc_rx_queue() 680 for (i = 0; i < q->ndesc; i++) { in mt76u_alloc_rx_queue() 700 for (i = 0; i < q->ndesc; i++) { in mt76u_free_rx_queue() 731 for (j = 0; j < q->ndesc; j++) in mt76u_stop_rx() 745 for (j = 0; j < q->ndesc; j++) in mt76u_resume_rx() 864 if (q->queued == q->ndesc) in mt76u_tx_queue_skb() 879 q->head = (q->head + 1) % q->ndesc; in mt76u_tx_queue_skb() [all …]
|
H A D | sdio.c | 315 q->ndesc = MT76S_NUM_RX_ENTRIES; in mt76s_alloc_rx_queue() 338 q->ndesc = MT76S_NUM_TX_ENTRIES; in mt76s_alloc_tx_queue() 374 q->tail = (q->tail + 1) % q->ndesc; in mt76s_get_next_rx_entry() 529 if (q->queued == q->ndesc) in mt76s_tx_queue_skb() 543 q->head = (q->head + 1) % q->ndesc; in mt76s_tx_queue_skb() 562 if (q->queued == q->ndesc) { in mt76s_tx_queue_skb_raw() 574 q->head = (q->head + 1) % q->ndesc; in mt76s_tx_queue_skb_raw() 622 for (j = 0; j < q->ndesc; j++) { in mt76s_deinit()
|
H A D | wed.c | 102 if (!q || !q->ndesc) in mt76_wed_dma_setup() 142 q->head = q->ndesc - 1; in mt76_wed_dma_setup() 149 q->head = q->ndesc - 1; in mt76_wed_dma_setup()
|
H A D | sdio_txrx.c | 119 int index = (q->head + i) % q->ndesc; in mt76s_rx_run_queue() 133 if (q->queued + i + 1 == q->ndesc) in mt76s_rx_run_queue() 142 q->head = (q->head + i) % q->ndesc; in mt76s_rx_run_queue() 292 q->first = (q->first + 1) % q->ndesc; in mt76s_tx_run_queue()
|
H A D | debugfs.c | 83 queued = mt76_is_usb(dev) ? q->ndesc - q->queued : q->queued; in mt76_rx_queues_read()
|
H A D | tx.c | 453 q->queued + MT_TXQ_FREE_THR >= q->ndesc; in mt76_txq_stopped() 539 q->queued + 2 * MT_TXQ_FREE_THR >= q->ndesc) { in mt76_txq_schedule_list() 806 q->tail = (q->tail + 1) % q->ndesc; in mt76_queue_tx_complete()
|
/linux/drivers/net/ethernet/chelsio/inline_crypto/ch_ktls/ |
H A D | chcr_ktls.c | 844 u32 len, cpl = 0, ndesc, wr_len, wr_mid = 0; in chcr_ktls_xmit_tcb_cpls() local 852 ndesc = DIV_ROUND_UP(len, 64); in chcr_ktls_xmit_tcb_cpls() 854 credits = chcr_txq_avail(&q->q) - ndesc; in chcr_ktls_xmit_tcb_cpls() 922 ndesc = DIV_ROUND_UP(len, 64); in chcr_ktls_xmit_tcb_cpls() 923 chcr_txq_advance(&q->q, ndesc); in chcr_ktls_xmit_tcb_cpls() 924 cxgb4_ring_tx_db(tx_info->adap, &q->q, ndesc); in chcr_ktls_xmit_tcb_cpls() 995 unsigned int ndesc; in chcr_ktls_write_tcp_options() local 1016 ndesc = DIV_ROUND_UP(len16, 4); in chcr_ktls_write_tcp_options() 1018 credits = chcr_txq_avail(&q->q) - ndesc; in chcr_ktls_write_tcp_options() 1079 chcr_txq_advance(&q->q, ndesc); in chcr_ktls_write_tcp_options() [all …]
|
/linux/drivers/net/ethernet/chelsio/inline_crypto/ch_ipsec/ |
H A D | chcr_ipsec.c | 576 u16 ndesc; in ch_ipsec_crypto_wreq() local 585 ndesc = DIV_ROUND_UP(flits, 2); in ch_ipsec_crypto_wreq() 601 wr_mid = FW_CRYPTO_LOOKASIDE_WR_LEN16_V(ndesc); in ch_ipsec_crypto_wreq() 614 wr->req.ulptx.len = htonl(ndesc - 1); in ch_ipsec_crypto_wreq() 711 unsigned int last_desc, ndesc, flits = 0; in ch_ipsec_xmit() local 741 ndesc = flits_to_desc(flits); in ch_ipsec_xmit() 742 credits = txq_avail(&q->q) - ndesc; in ch_ipsec_xmit() 748 dev->name, qidx, credits, ndesc, txq_avail(&q->q), in ch_ipsec_xmit() 753 last_desc = q->q.pidx + ndesc - 1; in ch_ipsec_xmit() 791 txq_advance(&q->q, ndesc); in ch_ipsec_xmit() [all …]
|
/linux/drivers/usb/mon/ |
H A D | mon_text.c | 200 int i, ndesc; in mon_text_event() local 233 if ((ndesc = urb->number_of_packets) > ISODESC_MAX) in mon_text_event() 234 ndesc = ISODESC_MAX; in mon_text_event() 237 for (i = 0; i < ndesc; i++) { in mon_text_event() 591 int ndesc; /* Display this many */ in mon_text_read_isodesc() local 597 ndesc = ep->numdesc; in mon_text_read_isodesc() 598 if (ndesc > ISODESC_MAX) in mon_text_read_isodesc() 599 ndesc = ISODESC_MAX; in mon_text_read_isodesc() 600 if (ndesc < 0) in mon_text_read_isodesc() 601 ndesc = 0; in mon_text_read_isodesc() [all …]
|
/linux/drivers/net/ethernet/mediatek/ |
H A D | airoha_eth.c | 743 int ndesc; member 1418 while (q->queued < q->ndesc - 1) { in airoha_qdma_fill_rx_queue() 1430 q->head = (q->head + 1) % q->ndesc; in airoha_qdma_fill_rx_queue() 1505 q->tail = (q->tail + 1) % q->ndesc; in airoha_qdma_rx_process() 1561 struct airoha_qdma *qdma, int ndesc) in airoha_qdma_init_rx_queue() argument 1578 q->ndesc = ndesc; in airoha_qdma_init_rx_queue() 1581 q->entry = devm_kzalloc(eth->dev, q->ndesc * sizeof(*q->entry), in airoha_qdma_init_rx_queue() 1594 q->desc = dmam_alloc_coherent(eth->dev, q->ndesc * sizeof(*q->desc), in airoha_qdma_init_rx_queue() 1604 FIELD_PREP(RX_RING_SIZE_MASK, ndesc)); in airoha_qdma_init_rx_queue() 1606 thr = clamp(ndesc >> 3, 1, 32); in airoha_qdma_init_rx_queue() [all …]
|
/linux/drivers/net/ethernet/chelsio/cxgb3/ |
H A D | sge.c | 1087 static void write_wr_hdr_sgl(unsigned int ndesc, struct sk_buff *skb, in write_wr_hdr_sgl() argument 1105 if (likely(ndesc == 1)) { in write_wr_hdr_sgl() 1128 ndesc--; in write_wr_hdr_sgl() 1158 WARN_ON(ndesc != 0); in write_wr_hdr_sgl() 1179 struct sge_txq *q, unsigned int ndesc, in write_tx_pkt_wr() argument 1237 sgp = ndesc == 1 ? (struct sg_ent *)&d->flit[flits] : sgl; in write_tx_pkt_wr() 1240 write_wr_hdr_sgl(ndesc, skb, d, pidx, q, sgl, flits, sgl_flits, gen, in write_tx_pkt_wr() 1263 unsigned int ndesc, pidx, credits, gen, compl; in t3_eth_xmit() local 1288 ndesc = calc_tx_descs(skb); in t3_eth_xmit() 1290 if (unlikely(credits < ndesc)) { in t3_eth_xmit() [all …]
|
/linux/drivers/infiniband/hw/qib/ |
H A D | qib_verbs.c | 143 u32 ndesc = 1; /* count the header */ in qib_count_sge() local 150 ndesc = 0; in qib_count_sge() 153 ndesc++; in qib_count_sge() 173 return ndesc; in qib_count_sge() 782 u32 ndesc; in qib_verbs_send_dma() local 817 ndesc = qib_count_sge(ss, len); in qib_verbs_send_dma() 818 if (ndesc >= ppd->sdma_descq_cnt) in qib_verbs_send_dma() 819 ndesc = 0; in qib_verbs_send_dma() 821 ndesc = 1; in qib_verbs_send_dma() 822 if (ndesc) { in qib_verbs_send_dma() [all …]
|
H A D | qib_user_sdma.c | 803 int *maxpkts, int *ndesc) in qib_user_sdma_queue_pkts() argument 1023 *ndesc += pkt->naddr; in qib_user_sdma_queue_pkts() 1409 int ndesc = 0; in qib_user_sdma_writev() local 1412 iov, dim, &list, &mxp, &ndesc); in qib_user_sdma_writev() 1425 if (qib_sdma_descq_freecnt(ppd) < ndesc) { in qib_user_sdma_writev()
|
/linux/drivers/net/ethernet/chelsio/cxgb4vf/ |
H A D | sge.c | 1162 unsigned int flits, ndesc; in t4vf_eth_xmit() local 1213 ndesc = flits_to_desc(flits); in t4vf_eth_xmit() 1214 credits = txq_avail(&txq->q) - ndesc; in t4vf_eth_xmit() 1364 ndesc, credits, txq->q.pidx, skb->len, ssi->nr_frags); in t4vf_eth_xmit() 1435 last_desc = tq->pidx + ndesc - 1; in t4vf_eth_xmit() 1446 txq_advance(&txq->q, ndesc); in t4vf_eth_xmit() 1448 ring_tx_db(adapter, &txq->q, ndesc); in t4vf_eth_xmit()
|
/linux/drivers/infiniband/ulp/srp/ |
H A D | ib_srp.h | 347 unsigned int ndesc; member
|
/linux/drivers/dma/ |
H A D | idma64.h | 117 unsigned int ndesc; member
|
/linux/drivers/net/ethernet/atheros/ |
H A D | ag71xx.c | 1447 int i, ring_mask, ndesc, split; in ag71xx_fill_dma_desc() local 1451 ndesc = 0; in ag71xx_fill_dma_desc() 1460 i = (ring->curr + ndesc) & ring_mask; in ag71xx_fill_dma_desc() 1484 if (!ndesc) in ag71xx_fill_dma_desc() 1488 ndesc++; in ag71xx_fill_dma_desc() 1491 return ndesc; in ag71xx_fill_dma_desc()
|
/linux/drivers/crypto/inside-secure/ |
H A D | safexcel.c | 1016 int ret, i, nreq, ndesc, tot_descs, handled = 0; in safexcel_handle_result_descriptor() local 1032 ndesc = ctx->handle_result(priv, ring, req, in safexcel_handle_result_descriptor() 1034 if (ndesc < 0) { in safexcel_handle_result_descriptor() 1036 ndesc); in safexcel_handle_result_descriptor() 1046 tot_descs += ndesc; in safexcel_handle_result_descriptor()
|
H A D | safexcel_cipher.c | 619 int ndesc = 0; in safexcel_handle_req_result() local 638 ndesc++; in safexcel_handle_req_result() 670 return ndesc; in safexcel_handle_req_result() 925 int ndesc = 0, enq_ret; in safexcel_handle_inv_result() local 944 ndesc++; in safexcel_handle_inv_result() 955 return ndesc; in safexcel_handle_inv_result() 973 return ndesc; in safexcel_handle_inv_result()
|
/linux/drivers/net/ethernet/rdc/ |
H A D | r6040.c | 166 __le32 ndesc; /* 8-B */ member 302 desc->ndesc = cpu_to_le32(mapping); in r6040_init_ring_desc() 307 desc->ndesc = cpu_to_le32(desc_dma); in r6040_init_ring_desc()
|
/linux/drivers/net/ethernet/chelsio/cxgb4/ |
H A D | cxgb4_tc_mqprio.c | 109 eosw_txq->ndesc = CXGB4_EOSW_TXQ_DEFAULT_DESC_NUM; in cxgb4_init_eosw_txq() 126 cxgb4_eosw_txq_free_desc(adap, eosw_txq, eosw_txq->ndesc); in cxgb4_clean_eosw_txq()
|