Home
last modified time | relevance | path

Searched refs:q_type (Results 1 – 25 of 27) sorted by relevance

12

/linux/net/core/
H A Dnetdev-genl.c369 u32 q_idx, u32 q_type, const struct genl_info *info) in netdev_nl_queue_fill_one() argument
381 nla_put_u32(rsp, NETDEV_A_QUEUE_TYPE, q_type) || in netdev_nl_queue_fill_one()
385 switch (q_type) { in netdev_nl_queue_fill_one()
415 u32 q_type) in netdev_nl_queue_validate() argument
417 switch (q_type) { in netdev_nl_queue_validate()
431 u32 q_type, const struct genl_info *info) in netdev_nl_queue_fill() argument
438 err = netdev_nl_queue_validate(netdev, q_idx, q_type); in netdev_nl_queue_fill()
442 return netdev_nl_queue_fill_one(rsp, netdev, q_idx, q_type, info); in netdev_nl_queue_fill()
447 u32 q_id, q_type, ifindex; in netdev_nl_queue_get_doit() local
458 q_type = nla_get_u32(info->attrs[NETDEV_A_QUEUE_TYPE]); in netdev_nl_queue_get_doit()
[all …]
/linux/drivers/crypto/
H A Dn2_core.c58 u8 q_type; member
82 if (q->q_type == HV_NCS_QTYPE_MAU) { in spu_next_offset()
1536 static void *new_queue(unsigned long q_type) in new_queue() argument
1538 return kmem_cache_zalloc(queue_cache[q_type - 1], GFP_KERNEL); in new_queue()
1541 static void free_queue(void *p, unsigned long q_type) in free_queue() argument
1543 kmem_cache_free(queue_cache[q_type - 1], p); in free_queue()
1583 unsigned long q_type = qr->type; in spu_queue_register_workfn() local
1586 hv_ret = sun4v_ncs_qconf(q_type, __pa(p->q), in spu_queue_register_workfn()
1594 static int spu_queue_register(struct spu_queue *p, unsigned long q_type) in spu_queue_register() argument
1597 struct spu_qreg qr = { .queue = p, .type = q_type }; in spu_queue_register()
[all …]
/linux/drivers/net/ethernet/huawei/hinic/
H A Dhinic_hw_cmdq.c508 enum hinic_set_arm_qtype q_type, u32 q_id) in hinic_set_arm_bit() argument
516 arm_bit.q_type = q_type; in hinic_set_arm_bit()
739 enum hinic_cmdq_type q_type, void __iomem *db_area) in init_cmdq() argument
744 cmdq->cmdq_type = q_type; in init_cmdq()
H A Dhinic_hw_cmdq.h117 u32 q_type; member
/linux/drivers/net/ethernet/microsoft/mana/
H A Dhw_channel.c490 enum gdma_queue_type q_type, u16 q_depth, in mana_hwc_create_wq() argument
499 WARN_ON(q_type != GDMA_SQ && q_type != GDMA_RQ); in mana_hwc_create_wq()
501 if (q_type == GDMA_RQ) in mana_hwc_create_wq()
513 err = mana_hwc_create_gdma_wq(hwc, q_type, queue_size, &queue); in mana_hwc_create_wq()
H A Dgdma_main.c277 enum gdma_queue_type q_type, u32 qid, in mana_gd_ring_doorbell() argument
283 switch (q_type) { in mana_gd_ring_doorbell()
1030 enum gdma_queue_type q_type, in mana_gd_write_client_oob() argument
1052 if (q_type == GDMA_SQ) in mana_gd_write_client_oob()
/linux/fs/xfs/scrub/
H A Dquota_repair.c82 trace_xrep_dquot_item_fill_bmap_hole(sc->mp, dq->q_type, dq->q_id); in xrep_quota_item_fill_bmap_hole()
91 xfs_qm_init_dquot_blk(sc->tp, dq->q_id, dq->q_type, bp); in xrep_quota_item_fill_bmap_hole()
251 trace_xrep_dquot_item(sc->mp, dq->q_type, dq->q_id); in xrep_quota_item()
H A Dquotacheck_repair.c111 trace_xrep_quotacheck_dquot(xqc->sc->mp, dq->q_type, dq->q_id); in xqcheck_commit_dquot()
/linux/drivers/net/ethernet/intel/idpf/
H A Didpf_virtchnl.c1058 static int idpf_vport_get_q_reg(u32 *reg_vals, int num_regs, u32 q_type, in idpf_vport_get_q_reg() argument
1070 if (le32_to_cpu(chunk->type) != q_type) in idpf_vport_get_q_reg()
1094 int num_regs, u32 q_type) in __idpf_queue_reg_init() argument
1099 switch (q_type) { in __idpf_queue_reg_init()
3239 static int idpf_vport_get_queue_ids(u32 *qids, int num_qids, u16 q_type, in idpf_vport_get_queue_ids() argument
3250 if (le32_to_cpu(chunk->type) != q_type) in idpf_vport_get_queue_ids()
3283 u32 q_type) in __idpf_vport_queue_ids_init() argument
3287 switch (q_type) { in __idpf_vport_queue_ids_init()
3358 u16 q_type; in idpf_vport_queue_ids_init() local
3406 q_type = VIRTCHNL2_QUEUE_TYPE_TX_COMPLETION; in idpf_vport_queue_ids_init()
[all …]
/linux/drivers/net/ethernet/intel/ice/
H A Dice_controlq.c606 static int ice_init_ctrlq(struct ice_hw *hw, enum ice_ctl_q q_type) in ice_init_ctrlq() argument
611 switch (q_type) { in ice_init_ctrlq()
627 cq->qtype = q_type; in ice_init_ctrlq()
687 static void ice_shutdown_ctrlq(struct ice_hw *hw, enum ice_ctl_q q_type, in ice_shutdown_ctrlq() argument
692 switch (q_type) { in ice_shutdown_ctrlq()
H A Dice_main.c1437 static int __ice_clean_ctrlq(struct ice_pf *pf, enum ice_ctl_q q_type) in __ice_clean_ctrlq() argument
1451 switch (q_type) { in __ice_clean_ctrlq()
1469 dev_warn(dev, "Unknown control queue type 0x%x\n", q_type); in __ice_clean_ctrlq()
/linux/drivers/net/ethernet/freescale/dpaa2/
H A Ddpni.h952 enum dpni_queue_type q_type,
961 enum dpni_queue_type q_type,
/linux/drivers/infiniband/hw/irdma/
H A Dpuda.h31 u8 q_type; member
H A Duser.h248 u8 q_type; member
H A Dpuda.c277 info->q_type = (u8)FIELD_GET(IRDMA_CQ_SQ, qword3); in irdma_puda_poll_info()
286 if (info->q_type == IRDMA_CQE_QTYPE_RQ) { in irdma_puda_poll_info()
361 if (info.q_type == IRDMA_CQE_QTYPE_RQ) { in irdma_puda_poll_cmpl()
H A Duk.c1048 info->q_type = (u8)FIELD_GET(IRDMA_CQ_SQ, qword3); in irdma_uk_cq_poll_cmpl()
1088 if (info->q_type == IRDMA_CQE_QTYPE_RQ) { in irdma_uk_cq_poll_cmpl()
H A Dutils.c2478 cmpl->cpi.q_type = IRDMA_CQE_QTYPE_SQ; in irdma_generate_flush_completions()
2520 cmpl->cpi.q_type = IRDMA_CQE_QTYPE_RQ; in irdma_generate_flush_completions()
/linux/drivers/net/ethernet/broadcom/bnx2x/
H A Dbnx2x_sriov.c170 unsigned long q_type) in bnx2x_vfop_qctor_prep() argument
216 if (test_bit(BNX2X_Q_TYPE_HAS_RX, &q_type)) { in bnx2x_vfop_qctor_prep()
228 if (test_bit(BNX2X_Q_TYPE_HAS_TX, &q_type)) { in bnx2x_vfop_qctor_prep()
1419 unsigned long q_type = 0; in bnx2x_vfq_init() local
1421 set_bit(BNX2X_Q_TYPE_HAS_TX, &q_type); in bnx2x_vfq_init()
1422 set_bit(BNX2X_Q_TYPE_HAS_RX, &q_type); in bnx2x_vfq_init()
1429 q_type); in bnx2x_vfq_init()
H A Dbnx2x_vfpf.c1540 unsigned long q_type = 0; in bnx2x_vf_mbx_setup_q() local
1561 __set_bit(BNX2X_Q_TYPE_HAS_TX, &q_type); in bnx2x_vf_mbx_setup_q()
1592 __set_bit(BNX2X_Q_TYPE_HAS_RX, &q_type); in bnx2x_vf_mbx_setup_q()
1638 bnx2x_vfop_qctor_prep(bp, vf, q, &qctor, q_type); in bnx2x_vf_mbx_setup_q()
H A Dbnx2x_sriov.h449 unsigned long q_type);
H A Dbnx2x_main.c6364 unsigned long q_type = 0; in bnx2x_init_eth_fp() local
6381 __set_bit(BNX2X_Q_TYPE_HAS_RX, &q_type); in bnx2x_init_eth_fp()
6382 __set_bit(BNX2X_Q_TYPE_HAS_TX, &q_type); in bnx2x_init_eth_fp()
6404 bnx2x_sp_mapping(bp, q_rdata), q_type); in bnx2x_init_eth_fp()
6467 unsigned long q_type = 0; in bnx2x_init_fcoe_fp() local
6489 __set_bit(BNX2X_Q_TYPE_HAS_RX, &q_type); in bnx2x_init_fcoe_fp()
6490 __set_bit(BNX2X_Q_TYPE_HAS_TX, &q_type); in bnx2x_init_fcoe_fp()
6497 bnx2x_sp_mapping(bp, q_rdata), q_type); in bnx2x_init_fcoe_fp()
/linux/drivers/accel/habanalabs/common/
H A Dcommand_submission.c1940 enum hl_queue_type q_type, u32 q_idx, u32 encaps_signal_offset) in cs_ioctl_signal_wait_create_jobs() argument
1949 job = hl_cs_allocate_job(hdev, q_type, true); in cs_ioctl_signal_wait_create_jobs()
1962 cb = hl_cb_kernel_create(hdev, cb_size, q_type == QUEUE_TYPE_HW); in cs_ioctl_signal_wait_create_jobs()
2216 enum hl_queue_type q_type; in cs_ioctl_signal_wait() local
2243 q_type = hw_queue_prop->type; in cs_ioctl_signal_wait()
2406 rc = cs_ioctl_signal_wait_create_jobs(hdev, ctx, cs, q_type, in cs_ioctl_signal_wait()
2421 if (q_type == QUEUE_TYPE_HW) in cs_ioctl_signal_wait()
/linux/drivers/net/ethernet/mellanox/mlxsw/
H A Dpci.c215 enum mlxsw_pci_queue_type q_type) in mlxsw_pci_queue_type_group_get() argument
217 return &mlxsw_pci->queues[q_type]; in mlxsw_pci_queue_type_group_get()
222 enum mlxsw_pci_queue_type q_type, u8 q_num) in __mlxsw_pci_queue_get() argument
224 return &mlxsw_pci->queues[q_type].q[q_num]; in __mlxsw_pci_queue_get()
/linux/drivers/net/ethernet/chelsio/cxgb4/
H A Dcxgb4.h845 unsigned int q_type; /* Q type Eth/Ctrl/Ofld */ member
/linux/fs/xfs/
H A Dxfs_trace.h1098 __entry->type = dqp->q_type;
1183 __entry->type = dqp->q_type;
1222 __entry->type = qtrx->qt_dquot->q_type;

12