| /linux/tools/testing/selftests/ublk/ |
| H A D | kublk.c | 65 ublk_set_sqe_cmd_op(sqe, data->cmd_op); in ublk_ctrl_init_cmd() 104 .cmd_op = UBLK_U_CMD_STOP_DEV, in ublk_ctrl_stop_dev() 114 .cmd_op = UBLK_U_CMD_START_DEV, in ublk_ctrl_start_dev() 126 .cmd_op = UBLK_U_CMD_START_USER_RECOVERY, in ublk_ctrl_start_user_recovery() 135 .cmd_op = UBLK_U_CMD_END_USER_RECOVERY, in ublk_ctrl_end_user_recovery() 147 .cmd_op = UBLK_U_CMD_ADD_DEV, in ublk_ctrl_add_dev() 159 .cmd_op = UBLK_U_CMD_DEL_DEV, in ublk_ctrl_del_dev() 169 .cmd_op = UBLK_U_CMD_GET_DEV_INFO, in ublk_ctrl_get_info() 182 .cmd_op = UBLK_U_CMD_SET_PARAMS, in ublk_ctrl_set_params() 195 .cmd_op in ublk_ctrl_get_params() 604 unsigned int cmd_op = 0; ublk_queue_io_cmd() local 778 unsigned cmd_op = user_data_to_op(cqe->user_data); ublk_handle_cqe() local [all...] |
| H A D | kublk.h | 98 __u32 cmd_op; member 316 sqe->cmd_op = UBLK_U_IO_REGISTER_IO_BUF; in io_uring_prep_buf_unregister() 323 sqe->cmd_op = UBLK_U_IO_UNREGISTER_IO_BUF; in ublk_get_sqe_cmd() 352 static inline void ublk_set_sqe_cmd_op(struct io_uring_sqe *sqe, __u32 cmd_op) in ublk_set_sqe_cmd_op() 356 addr[0] = cmd_op; 349 ublk_set_sqe_cmd_op(struct io_uring_sqe * sqe,__u32 cmd_op) ublk_set_sqe_cmd_op() argument
|
| H A D | null.c | 68 ublk_cmd_op_nr(sqe[0]->cmd_op), 0, q->q_id, 1); in null_queue_zc_io() 75 sqe[2]->user_data = build_user_data(tag, ublk_cmd_op_nr(sqe[2]->cmd_op), 0, q->q_id, 1); in null_queue_zc_io()
|
| H A D | file_backed.c | 63 ublk_cmd_op_nr(sqe[0]->cmd_op), 0, q->q_id, 1); in loop_queue_tgt_rw_io() 73 sqe[2]->user_data = build_user_data(tag, ublk_cmd_op_nr(sqe[2]->cmd_op), 0, q->q_id, 1); in loop_queue_tgt_rw_io()
|
| H A D | stripe.c | 148 ublk_cmd_op_nr(sqe[0]->cmd_op), 0, q->q_id, 1); in stripe_queue_tgt_rw_io() 173 tag, ublk_cmd_op_nr(unreg->cmd_op), 0, q->q_id, 1); in stripe_queue_tgt_rw_io()
|
| /linux/drivers/net/ethernet/qlogic/qlcnic/ |
| H A D | qlcnic_sriov_common.c | 326 cmd.cmd_op = ((struct qlcnic_bc_hdr *)hdr)->cmd_op; in qlcnic_sriov_post_bc_msg() 332 __func__, cmd.cmd_op, cmd.type, ahw->pci_func, in qlcnic_sriov_post_bc_msg() 340 __func__, cmd.cmd_op, cmd.type, ahw->pci_func, in qlcnic_sriov_post_bc_msg() 761 u8 cmd_op, num_frags, t_num_frags; in qlcnic_sriov_prepare_bc_hdr() local 771 cmd_op = cmd->req.arg[0] & 0xff; in qlcnic_sriov_prepare_bc_hdr() 790 cmd_op = cmd->req.arg[0] & 0xff; in qlcnic_sriov_prepare_bc_hdr() 791 cmd->cmd_op = cmd_op; in qlcnic_sriov_prepare_bc_hdr() 803 trans->cmd_id = cmd_op; in qlcnic_sriov_prepare_bc_hdr() 811 hdr[i].cmd_op = cmd_op; in qlcnic_sriov_prepare_bc_hdr() 1201 u8 cmd_op; in qlcnic_sriov_handle_bc_cmd() local [all …]
|
| H A D | qlcnic_sriov.h | 31 u8 cmd_op; member 42 u8 cmd_op;
|
| H A D | qlcnic_sriov_pf.c | 754 if (trans->req_hdr->cmd_op == QLCNIC_BC_CMD_CHANNEL_INIT) { in qlcnic_sriov_pf_channel_cfg_cmd() 776 if (trans->req_hdr->cmd_op == QLCNIC_BC_CMD_CHANNEL_INIT) in qlcnic_sriov_pf_channel_cfg_cmd() 1493 u8 size, cmd_op; in qlcnic_sriov_pf_process_bc_cmd() local 1495 cmd_op = trans->req_hdr->cmd_op; in qlcnic_sriov_pf_process_bc_cmd() 1499 if (cmd_op < size) { in qlcnic_sriov_pf_process_bc_cmd() 1500 qlcnic_pf_bc_cmd_hdlr[cmd_op].fn(trans, cmd); in qlcnic_sriov_pf_process_bc_cmd() 1507 if (cmd_op == qlcnic_pf_fw_cmd_hdlr[i].cmd) { in qlcnic_sriov_pf_process_bc_cmd() 1515 if (cmd_op == qlcnic_pf_passthru_supp_cmds[i]) { in qlcnic_sriov_pf_process_bc_cmd() 1727 if ((hdr->cmd_op == QLCNIC_BC_CMD_CHANNEL_INIT) && in qlcnic_sriov_soft_flr_check()
|
| H A D | qlcnic_83xx_hw.c | 923 mbx->cmd_op = type; in qlcnic_83xx_alloc_mbx_args() 3905 __func__, cmd->cmd_op); in qlcnic_83xx_flush_mbx_queue() 4039 if (cmd->cmd_op == QLCNIC_CMD_CONFIG_MAC_VLAN) { in qlcnic_83xx_check_mac_rcode() 4075 __func__, cmd->cmd_op, cmd->type, ahw->pci_func, in qlcnic_83xx_decode_mbx_rsp() 4140 __func__, cmd->cmd_op, cmd->type, ahw->pci_func, in qlcnic_83xx_mailbox_worker()
|
| H A D | qlcnic.h | 1530 u32 cmd_op; member
|
| /linux/drivers/block/ |
| H A D | ublk_drv.c | 2083 static inline int ublk_check_cmd_op(u32 cmd_op) in ublk_check_cmd_op() argument 2085 u32 ioc_type = _IOC_TYPE(cmd_op); in ublk_check_cmd_op() 2373 u32 cmd_op = cmd->cmd_op; in ublk_ch_uring_cmd_local() local 2385 __func__, cmd->cmd_op, q_id, tag, result); in ublk_ch_uring_cmd_local() 2387 ret = ublk_check_cmd_op(cmd_op); in ublk_ch_uring_cmd_local() 2395 if (_IOC_NR(cmd_op) == UBLK_IO_UNREGISTER_IO_BUF) in ublk_ch_uring_cmd_local() 2409 if (unlikely(_IOC_NR(cmd_op) == UBLK_IO_FETCH_REQ)) { in ublk_ch_uring_cmd_local() 2426 if (_IOC_NR(cmd_op) == UBLK_IO_REGISTER_IO_BUF) in ublk_ch_uring_cmd_local() 2444 ^ (_IOC_NR(cmd_op) == UBLK_IO_NEED_GET_DATA)) in ublk_ch_uring_cmd_local() 2447 switch (_IOC_NR(cmd_op)) { in ublk_ch_uring_cmd_local() [all …]
|
| /linux/io_uring/ |
| H A D | cmd_net.c | 165 switch (cmd->cmd_op) { in io_uring_cmd_sock()
|
| H A D | io_uring.c | 3785 BUILD_BUG_SQE_ELEM(8, __u32, cmd_op); in io_uring_init()
|
| /linux/tools/include/uapi/linux/ |
| H A D | io_uring.h | 39 __u32 cmd_op; member
|
| /linux/fs/fuse/ |
| H A D | dev_uring.c | 1134 u32 cmd_op = cmd->cmd_op; in fuse_uring_cmd() local 1171 switch (cmd_op) { in fuse_uring_cmd()
|
| /linux/tools/include/io_uring/ |
| H A D | mini_liburing.h | 239 sqe->cmd_op = op; in io_uring_prep_cmd()
|
| /linux/drivers/nvme/host/ |
| H A D | ioctl.c | 651 switch (ioucmd->cmd_op) { in nvme_ns_uring_cmd() 789 switch (ioucmd->cmd_op) { in nvme_dev_uring_cmd()
|
| /linux/include/uapi/linux/ |
| H A D | io_uring.h | 39 __u32 cmd_op; member
|
| /linux/drivers/net/ethernet/chelsio/cxgb4vf/ |
| H A D | t4vf_hw.c | 141 u32 cmd_op = FW_CMD_OP_G(be32_to_cpu(((struct fw_cmd_hdr *)cmd)->hi)); in t4vf_wr_mbox_core() local 235 if (cmd_op != FW_VI_STATS_CMD) in t4vf_wr_mbox_core() 298 if (cmd_op != FW_VI_STATS_CMD) in t4vf_wr_mbox_core()
|
| /linux/fs/btrfs/ |
| H A D | ioctl.c | 5006 switch (cmd->cmd_op) { in btrfs_uring_cmd()
|