Home
last modified time | relevance | path

Searched refs:qidx (Results 1 – 25 of 41) sorted by relevance

12

/linux/drivers/net/ethernet/marvell/octeontx2/nic/
H A Dotx2_xsk.c36 static int otx2_xsk_ctx_disable(struct otx2_nic *pfvf, u16 qidx, int aura_id) in otx2_xsk_ctx_disable() argument
47 cn10k_rq_aq->qidx = qidx; in otx2_xsk_ctx_disable()
56 rq_aq->qidx = qidx; in otx2_xsk_ctx_disable()
91 static void otx2_clean_up_rq(struct otx2_nic *pfvf, int qidx) in otx2_clean_up_rq() argument
102 cq = &qset->cq[qidx]; in otx2_clean_up_rq()
104 otx2_cleanup_rx_cqes(pfvf, cq, qidx); in otx2_clean_up_rq()
106 pool = &pfvf->qset.pool[qidx]; in otx2_clean_up_rq()
107 iova = otx2_aura_allocptr(pfvf, qidx); in otx2_clean_up_rq()
111 iova = otx2_aura_allocptr(pfvf, qidx); in otx2_clean_up_rq()
115 otx2_xsk_ctx_disable(pfvf, qidx, qidx); in otx2_clean_up_rq()
[all …]
H A Dqos_sq.c33 static int otx2_qos_sq_aura_pool_init(struct otx2_nic *pfvf, int qidx) in otx2_qos_sq_aura_pool_init() argument
56 pool_id = otx2_get_pool_idx(pfvf, AURA_NIX_SQ, qidx); in otx2_qos_sq_aura_pool_init()
76 sq = &qset->sq[qidx]; in otx2_qos_sq_aura_pool_init()
116 static void otx2_qos_sq_free_sqbs(struct otx2_nic *pfvf, int qidx) in otx2_qos_sq_free_sqbs() argument
124 sq = &qset->sq[qidx]; in otx2_qos_sq_free_sqbs()
140 sq = &qset->sq[qidx]; in otx2_qos_sq_free_sqbs()
151 static void otx2_qos_sqb_flush(struct otx2_nic *pfvf, int qidx) in otx2_qos_sqb_flush() argument
158 incr = (u64)qidx << 32; in otx2_qos_sqb_flush()
166 static int otx2_qos_ctx_disable(struct otx2_nic *pfvf, u16 qidx, int aura_id) in otx2_qos_ctx_disable() argument
177 cn10k_sq_aq->qidx = qidx; in otx2_qos_ctx_disable()
[all …]
H A Dotx2_common.c28 struct otx2_nic *pfvf, int qidx) in otx2_nix_rq_op_stats() argument
30 u64 incr = (u64)qidx << 32; in otx2_nix_rq_op_stats()
41 struct otx2_nic *pfvf, int qidx) in otx2_nix_sq_op_stats() argument
43 u64 incr = (u64)qidx << 32; in otx2_nix_sq_op_stats()
84 int otx2_update_rq_stats(struct otx2_nic *pfvf, int qidx) in otx2_update_rq_stats() argument
86 struct otx2_rcv_queue *rq = &pfvf->qset.rq[qidx]; in otx2_update_rq_stats()
91 otx2_nix_rq_op_stats(&rq->stats, pfvf, qidx); in otx2_update_rq_stats()
96 int otx2_update_sq_stats(struct otx2_nic *pfvf, int qidx) in otx2_update_sq_stats() argument
98 struct otx2_snd_queue *sq = &pfvf->qset.sq[qidx]; in otx2_update_sq_stats()
103 if (qidx >= pfvf->hw.non_qos_queues) { in otx2_update_sq_stats()
[all …]
H A Dotx2_txrx.c188 int qidx) in otx2_skb_add_frag() argument
219 pfvf->hw_ops->aura_freeptr(pfvf, qidx, iova & ~0x07ULL); in otx2_skb_add_frag()
247 int qidx) in otx2_free_rcv_seg() argument
260 pfvf->hw_ops->aura_freeptr(pfvf, qidx, in otx2_free_rcv_seg()
267 struct nix_cqe_rx_s *cqe, int qidx) in otx2_check_rcv_errors() argument
275 qidx, parse->errlev, parse->errcode); in otx2_check_rcv_errors()
326 otx2_free_rcv_seg(pfvf, cqe, qidx); in otx2_check_rcv_errors()
453 int *xsk_frames, int qidx, int budget) in otx2_zc_submit_pkts() argument
461 otx2_zc_napi_handler(pfvf, xsk_pool, qidx, budget); in otx2_zc_submit_pkts()
467 int tx_pkts = 0, tx_bytes = 0, qidx; in otx2_tx_napi_handler() local
[all …]
H A Drep.c156 u16 qidx = rep->rep_id; in rvu_rep_sp_stats64() local
158 otx2_update_rq_stats(priv, qidx); in rvu_rep_sp_stats64()
159 rq = &priv->qset.rq[qidx]; in rvu_rep_sp_stats64()
161 otx2_update_sq_stats(priv, qidx); in rvu_rep_sp_stats64()
162 sq = &priv->qset.sq[qidx]; in rvu_rep_sp_stats64()
504 int err = 0, qidx, vec; in rvu_rep_napi_init() local
512 for (qidx = 0; qidx < hw->cint_cnt; qidx++) { in rvu_rep_napi_init()
513 cq_poll = &qset->napi[qidx]; in rvu_rep_napi_init()
514 cq_poll->cint_idx = qidx; in rvu_rep_napi_init()
516 (qidx < hw->rx_queues) ? qidx : CINT_INVALID_CQ; in rvu_rep_napi_init()
[all …]
H A Dqos.h27 void otx2_qos_free_qid(struct otx2_nic *pfvf, int qidx);
28 int otx2_qos_enable_sq(struct otx2_nic *pfvf, int qidx);
29 void otx2_qos_disable_sq(struct otx2_nic *pfvf, int qidx);
H A Dotx2_txrx.h184 struct sk_buff *skb, u16 qidx);
186 int size, int qidx);
188 int size, int qidx);
H A Dcn10k.h28 void cn10k_sqe_flush(void *dev, struct otx2_snd_queue *sq, int size, int qidx);
29 int cn10k_sq_aq_init(void *dev, u16 qidx, u8 chan_offset, u16 sqb_aura);
H A Dotx2_xsk.h22 void otx2_attach_xsk_buff(struct otx2_nic *pfvf, struct otx2_snd_queue *sq, int qidx);
/linux/drivers/net/ethernet/chelsio/inline_crypto/ch_ipsec/
H A Dchcr_ipsec.c417 u32 qidx; in copy_esn_pktxt() local
425 qidx = skb->queue_mapping; in copy_esn_pktxt()
426 q = &adap->sge.ethtxq[qidx + pi->first_qset]; in copy_esn_pktxt()
467 u32 ctrl0, qidx; in copy_cpltx_pktxt() local
473 qidx = skb->queue_mapping; in copy_cpltx_pktxt()
474 q = &adap->sge.ethtxq[qidx + pi->first_qset]; in copy_cpltx_pktxt()
512 unsigned int qidx; in copy_key_cpltx_pktxt() local
516 qidx = skb->queue_mapping; in copy_key_cpltx_pktxt()
517 q = &adap->sge.ethtxq[qidx + pi->first_qset]; in copy_key_cpltx_pktxt()
572 int qidx = skb_get_queue_mapping(skb); in ch_ipsec_crypto_wreq() local
[all …]
/linux/drivers/net/ethernet/cavium/thunder/
H A Dnicvf_queues.h336 int qidx, bool enable);
338 void nicvf_sq_enable(struct nicvf *nic, struct snd_queue *sq, int qidx);
339 void nicvf_sq_disable(struct nicvf *nic, int qidx);
342 struct snd_queue *sq, int qidx);
363 u64 qidx, u64 val);
365 u64 offset, u64 qidx);
/linux/drivers/net/ethernet/fungible/funeth/
H A Dfuneth_rx.c432 skb_record_rx_queue(skb, q->qidx); in fun_handle_cqe_pkt()
614 unsigned int qidx, in fun_rxq_create_sw() argument
629 q->qidx = qidx; in fun_rxq_create_sw()
673 netdev_err(dev, "Unable to allocate memory for Rx queue %u\n", qidx); in fun_rxq_create_sw()
704 err = xdp_rxq_info_reg(&q->xdp_rxq, q->netdev, q->qidx, in fun_rxq_create_dev()
750 q->qidx, ncqe, nrqe, q->hw_cqid, q->hw_sqid, irq->irq_idx, in fun_rxq_create_dev()
761 q->qidx, err); in fun_rxq_create_dev()
776 q->qidx, q->hw_cqid, q->hw_sqid, irq->irq_idx); in fun_rxq_free_dev()
788 int funeth_rxq_create(struct net_device *dev, unsigned int qidx, in funeth_rxq_create() argument
796 q = fun_rxq_create_sw(dev, qidx, ncqe, nrqe, irq); in funeth_rxq_create()
H A Dfuneth_tx.c624 unsigned int qidx, in fun_txq_create_sw() argument
635 numa_node = cpu_to_node(qidx); /* XDP Tx queue */ in fun_txq_create_sw()
651 q->qidx = qidx; in fun_txq_create_sw()
661 irq ? "Tx" : "XDP", qidx); in fun_txq_create_sw()
709 q->ndq = netdev_get_tx_queue(q->netdev, q->qidx); in fun_txq_create_dev()
718 irq ? "Tx" : "XDP", q->qidx, ndesc, q->hw_qid, irq_idx, in fun_txq_create_dev()
727 irq ? "Tx" : "XDP", q->qidx, err); in fun_txq_create_dev()
740 q->irq ? "Tx" : "XDP", q->qidx, q->hw_qid, in fun_txq_free_dev()
759 int funeth_txq_create(struct net_device *dev, unsigned int qidx, in funeth_txq_create() argument
767 q = fun_txq_create_sw(dev, qidx, ndesc, irq); in funeth_txq_create()
H A Dfuneth_txrx.h117 u16 qidx; /* queue index within net_device */ member
173 u16 qidx; /* queue index within net_device */ member
254 int funeth_txq_create(struct net_device *dev, unsigned int qidx,
259 int funeth_rxq_create(struct net_device *dev, unsigned int qidx,
/linux/drivers/net/ethernet/broadcom/bnxt/
H A Dbnxt_dcb.c55 u8 qidx; in bnxt_hwrm_queue_pri2cos_cfg() local
60 qidx = bp->tc_to_qidx[ets->prio_tc[i]]; in bnxt_hwrm_queue_pri2cos_cfg()
61 pri2cos[i] = bp->q_info[qidx].queue_id; in bnxt_hwrm_queue_pri2cos_cfg()
108 u8 qidx = bp->tc_to_qidx[i]; in bnxt_hwrm_queue_cos2bw_cfg() local
112 qidx); in bnxt_hwrm_queue_cos2bw_cfg()
115 cos2bw.queue_id = bp->q_info[qidx].queue_id; in bnxt_hwrm_queue_cos2bw_cfg()
131 if (qidx == 0) { in bnxt_hwrm_queue_cos2bw_cfg()
277 u8 qidx = bp->tc_to_qidx[i]; in bnxt_hwrm_queue_pfc_cfg() local
279 if (!BNXT_LLQ(bp->q_info[qidx].queue_profile)) { in bnxt_hwrm_queue_pfc_cfg()
/linux/drivers/scsi/qla2xxx/
H A Dqla_nvme.c115 unsigned int qidx, u16 qsize, void **handle) in qla_nvme_alloc_queue() argument
122 if (qidx) in qla_nvme_alloc_queue()
123 qidx--; in qla_nvme_alloc_queue()
130 __func__, handle, qidx, qsize); in qla_nvme_alloc_queue()
132 if (qidx > qla_nvme_fc_transport.max_hw_queues) { in qla_nvme_alloc_queue()
135 __func__, qidx, qla_nvme_fc_transport.max_hw_queues); in qla_nvme_alloc_queue()
143 if (ha->queue_pair_map[qidx]) { in qla_nvme_alloc_queue()
144 *handle = ha->queue_pair_map[qidx]; in qla_nvme_alloc_queue()
147 *handle, qidx); in qla_nvme_alloc_queue()
/linux/include/linux/
H A Dnvme-fc-driver.h480 unsigned int qidx, u16 qsize,
483 unsigned int qidx, void *handle);
/linux/drivers/crypto/ccp/
H A Dccp-dev-v5.c216 n = head_idx + COMMANDS_PER_QUEUE - cmd_q->qidx - 1; in ccp5_get_free_slots()
238 mP = (__le32 *)&cmd_q->qbase[cmd_q->qidx]; in ccp5_do_cmd()
243 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; in ccp5_do_cmd()
249 tail = low_address(cmd_q->qdma_tail + cmd_q->qidx * Q_DESC_SIZE); in ccp5_do_cmd()
840 cmd_q->qidx = 0; in ccp5_init()
/linux/drivers/scsi/lpfc/
H A Dlpfc_init.c10941 int qidx, uint32_t qtype) in lpfc_create_wq_cq() argument
10949 ((eq) ? ((cq) ? "WQ" : "CQ") : "EQ"), qidx); in lpfc_create_wq_cq()
10959 qidx, (uint32_t)rc); in lpfc_create_wq_cq()
10970 qidx, cq->queue_id, qidx, eq->queue_id); in lpfc_create_wq_cq()
10977 qidx, (uint32_t)rc); in lpfc_create_wq_cq()
10989 qidx, wq->queue_id, wq->assoc_qid, qidx, cq->queue_id); in lpfc_create_wq_cq()
11020 int qidx; in lpfc_setup_cq_lookup() local
11025 for (qidx = 0; qidx < phba->cfg_irq_chann; qidx++) { in lpfc_setup_cq_lookup()
11027 eq = phba->sli4_hba.hba_eq_hdl[qidx].eq; in lpfc_setup_cq_lookup()
11061 int qidx, cpu; in lpfc_sli4_queue_setup() local
[all …]
H A Dlpfc_nvme.h43 uint32_t qidx; /* queue index passed to create */ member
/linux/drivers/net/ethernet/broadcom/bnx2x/
H A Dbnx2x_sriov.c1738 int qidx = 0, abs_vfid; in bnx2x_iov_eq_sp_event() local
1787 qidx = cid & ((1 << BNX2X_VF_CID_WND)-1); in bnx2x_iov_eq_sp_event()
1801 vf->abs_vfid, qidx); in bnx2x_iov_eq_sp_event()
1802 vfq_get(vf, qidx)->sp_obj.complete_cmd(bp, in bnx2x_iov_eq_sp_event()
1804 qidx)->sp_obj, in bnx2x_iov_eq_sp_event()
1809 vf->abs_vfid, qidx); in bnx2x_iov_eq_sp_event()
1810 bnx2x_vf_handle_classification_eqe(bp, vfq_get(vf, qidx), elem); in bnx2x_iov_eq_sp_event()
1814 vf->abs_vfid, qidx); in bnx2x_iov_eq_sp_event()
1819 vf->abs_vfid, qidx); in bnx2x_iov_eq_sp_event()
1824 vf->abs_vfid, qidx); in bnx2x_iov_eq_sp_event()
/linux/drivers/dma/amd/ptdma/
H A Dptdma.h201 unsigned int qidx; member
/linux/drivers/scsi/mpi3mr/
H A Dmpi3mr.h222 #define REPLY_QUEUE_IDX_TO_MSIX_IDX(qidx, offset) (qidx + offset) argument
1475 u64 *reply_dma, u16 qidx);
/linux/drivers/net/ethernet/chelsio/cxgb4vf/
H A Dsge.c1161 int qidx, credits, max_pkt_len; in t4vf_eth_xmit() local
1193 qidx = skb_get_queue_mapping(skb); in t4vf_eth_xmit()
1194 BUG_ON(qidx >= pi->nqsets); in t4vf_eth_xmit()
1195 txq = &adapter->sge.ethtxq[pi->first_qset + qidx]; in t4vf_eth_xmit()
1226 dev->name, qidx); in t4vf_eth_xmit()
/linux/drivers/iommu/arm/arm-smmu-v3/
H A Dtegra241-cmdqv.c589 u16 qidx, lidx, idx; in tegra241_cmdqv_hw_reset() local
603 for (idx = 0, qidx = 0; idx < cmdqv->num_vintfs; idx++) { in tegra241_cmdqv_hw_reset()
608 REG_CMDQV(cmdqv, CMDQ_ALLOC(qidx++))); in tegra241_cmdqv_hw_reset()

12