Home
last modified time | relevance | path

Searched refs:se_cmd (Results 1 – 25 of 51) sorted by relevance

123

/linux/include/target/
H A Dtarget_core_fabric.h66 int (*check_stop_free)(struct se_cmd *);
67 void (*release_cmd)(struct se_cmd *);
76 int (*write_pending)(struct se_cmd *);
78 int (*get_cmd_state)(struct se_cmd *);
79 int (*queue_data_in)(struct se_cmd *);
80 int (*queue_status)(struct se_cmd *);
81 void (*queue_tm_rsp)(struct se_cmd *);
82 void (*aborted_task)(struct se_cmd *);
166 void __target_init_cmd(struct se_cmd *cmd,
171 int target_init_cmd(struct se_cmd *se_cmd, struct se_session *se_sess,
[all …]
H A Dtarget_core_backend.h45 sense_reason_t (*parse_cdb)(struct se_cmd *cmd);
55 unsigned char *(*get_sense_buffer)(struct se_cmd *);
66 sense_reason_t (*execute_rw)(struct se_cmd *cmd, struct scatterlist *,
68 sense_reason_t (*execute_sync_cache)(struct se_cmd *cmd);
69 sense_reason_t (*execute_write_same)(struct se_cmd *cmd);
70 sense_reason_t (*execute_unmap)(struct se_cmd *cmd,
72 sense_reason_t (*execute_pr_out)(struct se_cmd *cmd, u8 sa, u64 key,
74 sense_reason_t (*execute_pr_in)(struct se_cmd *cmd, u8 sa,
81 void target_complete_cmd(struct se_cmd *, u8);
82 void target_set_cmd_data_length(struct se_cmd *, int);
[all …]
/linux/drivers/target/
H A Dtarget_core_xcopy.c116 static int target_xcopy_parse_tiddesc_e4(struct se_cmd *se_cmd, struct xcopy_op *xop, in target_xcopy_parse_tiddesc_e4() argument
176 xop->src_dev = se_cmd->se_dev; in target_xcopy_parse_tiddesc_e4()
193 xop->dst_dev = se_cmd->se_dev; in target_xcopy_parse_tiddesc_e4()
202 static int target_xcopy_parse_target_descriptors(struct se_cmd *se_cmd, in target_xcopy_parse_target_descriptors() argument
206 struct se_device *local_dev = se_cmd->se_dev; in target_xcopy_parse_target_descriptors()
242 rc = target_xcopy_parse_tiddesc_e4(se_cmd, xop, in target_xcopy_parse_target_descriptors()
260 rc = target_xcopy_locate_se_dev_e4(se_cmd->se_sess, in target_xcopy_parse_target_descriptors()
266 rc = target_xcopy_locate_se_dev_e4(se_cmd->se_sess, in target_xcopy_parse_target_descriptors()
392 struct se_cmd se_cmd; member
401 static int xcopy_pt_get_cmd_state(struct se_cmd *se_cmd) in xcopy_pt_get_cmd_state() argument
[all …]
H A Dtarget_core_tmr.c27 struct se_cmd *se_cmd, in core_tmr_alloc_req() argument
40 se_cmd->se_cmd_flags |= SCF_SCSI_TMR_CDB; in core_tmr_alloc_req()
41 se_cmd->se_tmr_req = tmr; in core_tmr_alloc_req()
42 tmr->task_cmd = se_cmd; in core_tmr_alloc_req()
57 struct se_cmd *cmd) in target_check_cdb_and_preempt()
71 static bool __target_check_io_state(struct se_cmd *se_cmd, in __target_check_io_state() argument
74 struct se_session *sess = se_cmd->se_sess; in __target_check_io_state()
88 spin_lock(&se_cmd->t_state_lock); in __target_check_io_state()
89 if (se_cmd->transport_state & (CMD_T_COMPLETE | CMD_T_FABRIC_STOP)) { in __target_check_io_state()
91 " fabric stop, skipping\n", se_cmd->tag); in __target_check_io_state()
[all …]
H A Dtarget_core_transport.c54 static void transport_complete_task_attr(struct se_cmd *cmd);
55 static void translate_sense_reason(struct se_cmd *cmd, sense_reason_t reason);
56 static void transport_handle_queue_full(struct se_cmd *cmd,
696 static void target_remove_from_state_list(struct se_cmd *cmd) in target_remove_from_state_list()
712 static void target_remove_from_tmr_list(struct se_cmd *cmd) in target_remove_from_tmr_list()
734 static int transport_cmd_check_stop_to_fabric(struct se_cmd *cmd) in transport_cmd_check_stop_to_fabric()
765 static void transport_lun_remove_cmd(struct se_cmd *cmd) in transport_lun_remove_cmd()
786 struct se_cmd *cmd = container_of(work, struct se_cmd, work); in target_complete_failure_work()
795 static unsigned char *transport_get_sense_buffer(struct se_cmd *cmd) in transport_get_sense_buffer()
814 void transport_copy_sense_to_cmd(struct se_cmd *cmd, unsigned char *sense) in transport_copy_sense_to_cmd()
[all …]
H A Dtarget_core_device.c47 transport_lookup_cmd_lun(struct se_cmd *se_cmd) in transport_lookup_cmd_lun() argument
50 struct se_session *se_sess = se_cmd->se_sess; in transport_lookup_cmd_lun()
56 deve = target_nacl_find_deve(nacl, se_cmd->orig_fe_lun); in transport_lookup_cmd_lun()
60 if (se_cmd->data_direction == DMA_TO_DEVICE) in transport_lookup_cmd_lun()
62 se_cmd->data_length); in transport_lookup_cmd_lun()
63 else if (se_cmd->data_direction == DMA_FROM_DEVICE) in transport_lookup_cmd_lun()
65 se_cmd->data_length); in transport_lookup_cmd_lun()
67 if ((se_cmd->data_direction == DMA_TO_DEVICE) && in transport_lookup_cmd_lun()
71 se_cmd->se_tfo->fabric_name, in transport_lookup_cmd_lun()
72 se_cmd->orig_fe_lun); in transport_lookup_cmd_lun()
[all …]
H A Dtarget_core_user.c177 struct se_cmd *se_cmd; member
581 struct se_cmd *se_cmd = cmd->se_cmd; in tcmu_cmd_set_block_cnts() local
584 cmd->dbi_cnt = DIV_ROUND_UP(se_cmd->data_length, blk_size); in tcmu_cmd_set_block_cnts()
586 if (se_cmd->se_cmd_flags & SCF_BIDI) { in tcmu_cmd_set_block_cnts()
587 BUG_ON(!(se_cmd->t_bidi_data_sg && se_cmd->t_bidi_data_nents)); in tcmu_cmd_set_block_cnts()
588 for (i = 0, len = 0; i < se_cmd->t_bidi_data_nents; i++) in tcmu_cmd_set_block_cnts()
589 len += se_cmd->t_bidi_data_sg[i].length; in tcmu_cmd_set_block_cnts()
633 static struct tcmu_cmd *tcmu_alloc_cmd(struct se_cmd *se_cmd) in tcmu_alloc_cmd() argument
635 struct se_device *se_dev = se_cmd->se_dev; in tcmu_alloc_cmd()
644 tcmu_cmd->se_cmd = se_cmd; in tcmu_alloc_cmd()
[all …]
H A Dtarget_core_pr.h62 extern sense_reason_t target_scsi2_reservation_release(struct se_cmd *);
63 extern sense_reason_t target_scsi2_reservation_reserve(struct se_cmd *);
76 extern sense_reason_t target_scsi3_emulate_pr_in(struct se_cmd *);
77 extern sense_reason_t target_scsi3_emulate_pr_out(struct se_cmd *);
78 extern sense_reason_t target_check_reservation(struct se_cmd *);
H A Dtarget_core_alua.h85 extern sense_reason_t target_emulate_report_target_port_groups(struct se_cmd *);
86 extern sense_reason_t target_emulate_set_target_port_groups(struct se_cmd *);
87 extern sense_reason_t target_emulate_report_referrals(struct se_cmd *);
88 extern int core_alua_check_nonop_delay(struct se_cmd *);
149 extern sense_reason_t target_alua_state_check(struct se_cmd *cmd);
H A Dtarget_core_spc.c72 spc_emulate_inquiry_std(struct se_cmd *cmd, unsigned char *buf) in spc_emulate_inquiry_std()
157 spc_emulate_evpd_80(struct se_cmd *cmd, unsigned char *buf) in spc_emulate_evpd_80()
221 spc_emulate_evpd_83(struct se_cmd *cmd, unsigned char *buf) in spc_emulate_evpd_83()
466 spc_emulate_evpd_86(struct se_cmd *cmd, unsigned char *buf) in spc_emulate_evpd_86()
508 spc_emulate_evpd_b0(struct se_cmd *cmd, unsigned char *buf) in spc_emulate_evpd_b0()
624 spc_emulate_evpd_b1(struct se_cmd *cmd, unsigned char *buf) in spc_emulate_evpd_b1()
637 spc_emulate_evpd_b2(struct se_cmd *cmd, unsigned char *buf) in spc_emulate_evpd_b2()
701 spc_emulate_evpd_b3(struct se_cmd *cmd, unsigned char *buf) in spc_emulate_evpd_b3()
714 spc_emulate_evpd_00(struct se_cmd *cmd, unsigned char *buf);
718 sense_reason_t (*emulate)(struct se_cmd *, unsigned char *);
[all …]
H A Dtarget_core_sbc.c28 sbc_check_prot(struct se_device *, struct se_cmd *, unsigned char, u32, bool);
29 static sense_reason_t sbc_execute_unmap(struct se_cmd *cmd);
32 sbc_emulate_readcapacity(struct se_cmd *cmd) in sbc_emulate_readcapacity()
75 sbc_emulate_readcapacity_16(struct se_cmd *cmd) in sbc_emulate_readcapacity_16()
138 sbc_emulate_startstop(struct se_cmd *cmd) in sbc_emulate_startstop()
169 sector_t sbc_get_write_same_sectors(struct se_cmd *cmd) in sbc_get_write_same_sectors()
193 sbc_execute_write_same_unmap(struct se_cmd *cmd) in sbc_execute_write_same_unmap()
210 sbc_emulate_noop(struct se_cmd *cmd) in sbc_emulate_noop()
216 static inline u32 sbc_get_size(struct se_cmd *cmd, u32 sectors) in sbc_get_size()
274 sbc_setup_write_same(struct se_cmd *cmd, unsigned char flags, in sbc_setup_write_same()
[all …]
H A Dtarget_core_ua.h36 extern sense_reason_t target_scsi3_ua_check(struct se_cmd *);
40 extern bool core_scsi3_ua_for_check_condition(struct se_cmd *, u8 *, u8 *,
42 extern int core_scsi3_ua_clear_for_request_sense(struct se_cmd *,
/linux/drivers/target/tcm_fc/
H A Dtfc_cmd.c35 struct se_cmd *se_cmd; in _ft_dump_cmd() local
39 se_cmd = &cmd->se_cmd; in _ft_dump_cmd()
41 caller, cmd, cmd->sess, cmd->seq, se_cmd); in _ft_dump_cmd()
44 caller, cmd, se_cmd->t_data_nents, in _ft_dump_cmd()
45 se_cmd->data_length, se_cmd->se_cmd_flags); in _ft_dump_cmd()
47 for_each_sg(se_cmd->t_data_sg, sg, se_cmd->t_data_nents, count) in _ft_dump_cmd()
81 target_free_tag(sess->se_sess, &cmd->se_cmd); in ft_free_cmd()
85 void ft_release_cmd(struct se_cmd *se_cmd) in ft_release_cmd() argument
87 struct ft_cmd *cmd = container_of(se_cmd, struct ft_cmd, se_cmd); in ft_release_cmd()
92 int ft_check_stop_free(struct se_cmd *se_cmd) in ft_check_stop_free() argument
[all …]
H A Dtfc_io.c41 int ft_queue_data_in(struct se_cmd *se_cmd) in ft_queue_data_in() argument
43 struct ft_cmd *cmd = container_of(se_cmd, struct ft_cmd, se_cmd); in ft_queue_data_in()
67 if (se_cmd->scsi_status == SAM_STAT_TASK_SET_FULL) in ft_queue_data_in()
74 remaining = se_cmd->data_length; in ft_queue_data_in()
79 BUG_ON(remaining && !se_cmd->t_data_sg); in ft_queue_data_in()
81 sg = se_cmd->t_data_sg; in ft_queue_data_in()
177 se_cmd->scsi_status = SAM_STAT_TASK_SET_FULL; in ft_queue_data_in()
182 return ft_queue_status(se_cmd); in ft_queue_data_in()
189 target_execute_cmd(&cmd->se_cmd); in ft_execute_work()
197 struct se_cmd *se_cmd = &cmd->se_cmd; in ft_recv_write_data() local
[all …]
H A Dtcm_fc.h109 struct se_cmd se_cmd; /* Local TCM I/O descriptor */ member
144 int ft_check_stop_free(struct se_cmd *);
145 void ft_release_cmd(struct se_cmd *);
146 int ft_queue_status(struct se_cmd *);
147 int ft_queue_data_in(struct se_cmd *);
148 int ft_write_pending(struct se_cmd *);
149 void ft_queue_tm_resp(struct se_cmd *);
150 void ft_aborted_task(struct se_cmd *);
/linux/drivers/scsi/qla2xxx/
H A Dtcm_qla2xxx.c239 transport_generic_free_cmd(&mcmd->se_cmd, 0); in tcm_qla2xxx_complete_mcmd()
273 transport_generic_free_cmd(&cmd->se_cmd, 0); in tcm_qla2xxx_complete_free()
288 cmd->se_cmd.map_tag = tag; in tcm_qla2xxx_get_cmd()
289 cmd->se_cmd.map_cpu = cpu; in tcm_qla2xxx_get_cmd()
296 return target_get_sess_cmd(&cmd->se_cmd, true); in tcm_qla2xxx_get_cmd_ref()
301 target_put_sess_cmd(&cmd->se_cmd); in tcm_qla2xxx_put_cmd_ref()
306 target_free_tag(cmd->sess->se_sess, &cmd->se_cmd); in tcm_qla2xxx_rel_cmd()
331 static int tcm_qla2xxx_check_stop_free(struct se_cmd *se_cmd) in tcm_qla2xxx_check_stop_free() argument
335 if ((se_cmd->se_cmd_flags & SCF_SCSI_TMR_CDB) == 0) { in tcm_qla2xxx_check_stop_free()
336 cmd = container_of(se_cmd, struct qla_tgt_cmd, se_cmd); in tcm_qla2xxx_check_stop_free()
[all …]
H A Dqla_target.c1990 vha->vp_idx, cmd->se_cmd.tag); in abort_cmds_for_lun()
2077 mcmd->se_cmd.cpuid = h->cpuid; in __qlt_24xx_handle_abts()
2085 mcmd->unpacked_lun = abort_cmd->se_cmd.orig_fe_lun; in __qlt_24xx_handle_abts()
2089 mcmd->se_cmd.cpuid = abort_cmd->se_cmd.cpuid; in __qlt_24xx_handle_abts()
2095 queue_work_on(mcmd->se_cmd.cpuid, qla_tgt_wq, &mcmd->work); in __qlt_24xx_handle_abts()
2393 if (cmd->se_cmd.prot_op == TARGET_PROT_NORMAL) { in qlt_pci_map_calc_cnt()
2404 if ((cmd->se_cmd.prot_op == TARGET_PROT_DIN_INSERT) || in qlt_pci_map_calc_cnt()
2405 (cmd->se_cmd.prot_op == TARGET_PROT_DOUT_STRIP)) { in qlt_pci_map_calc_cnt()
2419 if ((cmd->se_cmd.prot_op == TARGET_PROT_DIN_INSERT) || in qlt_pci_map_calc_cnt()
2420 (cmd->se_cmd.prot_op == TARGET_PROT_DOUT_STRIP)) { in qlt_pci_map_calc_cnt()
[all …]
/linux/drivers/usb/gadget/function/
H A Df_tcm.c65 transport_generic_free_cmd(&cmd->se_cmd, 0); in bot_status_complete()
154 if (cmd->se_cmd.scsi_status == SAM_STAT_GOOD) { in bot_send_status()
202 transport_generic_free_cmd(&cmd->se_cmd, 0); in bot_read_compl()
212 struct se_cmd *se_cmd = &cmd->se_cmd; in bot_send_read_response() local
223 cmd->data_buf = kmalloc(se_cmd->data_length, GFP_ATOMIC); in bot_send_read_response()
227 sg_copy_to_buffer(se_cmd->t_data_sg, in bot_send_read_response()
228 se_cmd->t_data_nents, in bot_send_read_response()
230 se_cmd->data_length); in bot_send_read_response()
235 fu->bot_req_in->num_sgs = se_cmd->t_data_nents; in bot_send_read_response()
236 fu->bot_req_in->sg = se_cmd->t_data_sg; in bot_send_read_response()
[all …]
/linux/drivers/infiniband/ulp/isert/
H A Dib_isert.c76 isert_prot_cmd(struct isert_conn *conn, struct se_cmd *cmd) in isert_prot_cmd()
1062 data_len = cmd->se_cmd.data_length; in isert_handle_scsi_cmd()
1065 cmd->se_cmd.se_cmd_flags |= SCF_PASSTHROUGH_SG_TO_MEM_NOALLOC; in isert_handle_scsi_cmd()
1079 sg_copy_from_buffer(cmd->se_cmd.t_data_sg, sg_nents, in isert_handle_scsi_cmd()
1085 cmd->se_cmd.t_data_sg = &isert_cmd->sg; in isert_handle_scsi_cmd()
1086 cmd->se_cmd.t_data_nents = 1; in isert_handle_scsi_cmd()
1095 if (cmd->write_data_done == cmd->se_cmd.data_length) { in isert_handle_scsi_cmd()
1108 target_put_sess_cmd(&cmd->se_cmd); in isert_handle_scsi_cmd()
1141 cmd->se_cmd.data_length); in isert_handle_iscsi_dataout()
1144 sg_start = &cmd->se_cmd.t_data_sg[sg_off]; in isert_handle_iscsi_dataout()
[all …]
/linux/drivers/xen/
H A Dxen-scsiback.c135 struct se_cmd se_cmd; member
404 resid = pending_req->se_cmd.residual_count; in scsiback_cmd_done()
418 target_put_sess_cmd(&pending_req->se_cmd); in scsiback_cmd_done()
423 struct se_cmd *se_cmd = &pending_req->se_cmd; in scsiback_cmd_exec() local
427 se_cmd->tag = pending_req->rqid; in scsiback_cmd_exec()
428 target_init_cmd(se_cmd, sess, pending_req->sense_buffer, in scsiback_cmd_exec()
432 if (target_submit_prep(se_cmd, pending_req->cmnd, pending_req->sgl, in scsiback_cmd_exec()
436 target_submit(se_cmd); in scsiback_cmd_exec()
620 struct se_cmd *se_cmd = &pending_req->se_cmd; in scsiback_device_action() local
626 rc = target_submit_tmr(&pending_req->se_cmd, nexus->tvn_se_sess, in scsiback_device_action()
[all …]
/linux/drivers/target/iscsi/
H A Discsi_target_seq_pdu_list.c213 if (cmd->se_cmd.data_direction == DMA_TO_DEVICE) in iscsit_determine_counts_for_list()
224 unsolicited_data_length = min(cmd->se_cmd.data_length, in iscsit_determine_counts_for_list()
227 while (offset < cmd->se_cmd.data_length) { in iscsit_determine_counts_for_list()
240 if ((offset + mdsl) >= cmd->se_cmd.data_length) { in iscsit_determine_counts_for_list()
242 (cmd->se_cmd.data_length - offset); in iscsit_determine_counts_for_list()
243 offset += (cmd->se_cmd.data_length - offset); in iscsit_determine_counts_for_list()
262 if ((offset + mdsl) >= cmd->se_cmd.data_length) { in iscsit_determine_counts_for_list()
263 offset += (cmd->se_cmd.data_length - offset); in iscsit_determine_counts_for_list()
296 if (cmd->se_cmd.data_direction == DMA_TO_DEVICE) in iscsit_do_build_pdu_and_seq_lists()
310 unsolicited_data_length = min(cmd->se_cmd.data_length, in iscsit_do_build_pdu_and_seq_lists()
[all …]
H A Discsi_target.c826 const bool do_put = cmd->se_cmd.se_tfo != NULL; in iscsit_add_reject_from_cmd()
859 target_put_sess_cmd(&cmd->se_cmd); in iscsit_add_reject_from_cmd()
895 if (ent >= cmd->se_cmd.t_data_nents) { in iscsit_map_iovec()
900 sg = &cmd->se_cmd.t_data_sg[ent]; in iscsit_map_iovec()
930 for_each_sg(cmd->se_cmd.t_data_sg, sg, in iscsit_map_iovec()
931 cmd->se_cmd.t_data_nents, i) { in iscsit_map_iovec()
981 u32 iov_count = max(1UL, DIV_ROUND_UP(cmd->se_cmd.data_length, PAGE_SIZE)); in iscsit_allocate_iovecs()
1181 __target_init_cmd(&cmd->se_cmd, &iscsi_ops, in iscsit_setup_scsi_cmd()
1192 target_get_sess_cmd(&cmd->se_cmd, true); in iscsit_setup_scsi_cmd()
1194 cmd->se_cmd.tag = (__force u32)cmd->init_task_tag; in iscsit_setup_scsi_cmd()
[all …]
/linux/drivers/scsi/ibmvscsi_tgt/
H A Dibmvscsi_tgt.c58 static void ibmvscsis_determine_resid(struct se_cmd *se_cmd, in ibmvscsis_determine_resid() argument
61 u32 residual_count = se_cmd->residual_count; in ibmvscsis_determine_resid()
66 if (se_cmd->se_cmd_flags & SCF_UNDERFLOW_BIT) { in ibmvscsis_determine_resid()
67 if (se_cmd->data_direction == DMA_TO_DEVICE) { in ibmvscsis_determine_resid()
71 } else if (se_cmd->data_direction == DMA_FROM_DEVICE) { in ibmvscsis_determine_resid()
76 } else if (se_cmd->se_cmd_flags & SCF_OVERFLOW_BIT) { in ibmvscsis_determine_resid()
77 if (se_cmd->data_direction == DMA_TO_DEVICE) { in ibmvscsis_determine_resid()
81 } else if (se_cmd->data_direction == DMA_FROM_DEVICE) { in ibmvscsis_determine_resid()
1290 memset(&cmd->se_cmd, 0, sizeof(cmd->se_cmd)); in ibmvscsis_get_free_cmd()
1906 if (cmd->se_cmd.transport_state & CMD_T_ABORTED && in ibmvscsis_send_messages()
[all …]
/linux/drivers/target/sbp/
H A Dsbp_target.c922 req->se_cmd.map_tag = tag; in sbp_mgt_get_req()
923 req->se_cmd.map_cpu = cpu; in sbp_mgt_get_req()
924 req->se_cmd.tag = next_orb; in sbp_mgt_get_req()
1219 req->se_cmd.tag = req->orb_pointer; in sbp_handle_command()
1220 target_submit_cmd(&req->se_cmd, sess->se_sess, req->cmd_buf, in sbp_handle_command()
1249 if (req->se_cmd.data_direction == DMA_FROM_DEVICE) { in sbp_rw_data()
1282 length = req->se_cmd.data_length; in sbp_rw_data()
1285 sg_miter_start(&iter, req->se_cmd.t_data_sg, req->se_cmd.t_data_nents, in sbp_rw_data()
1350 target_put_sess_cmd(&req->se_cmd); in sbp_send_status()
1356 struct se_cmd *se_cmd = &req->se_cmd; in sbp_sense_mangle() local
[all …]
/linux/drivers/vhost/
H A Dscsi.c133 struct se_cmd tvc_se_cmd;
263 struct se_cmd se_cmd; member
418 static void vhost_scsi_release_cmd_res(struct se_cmd *se_cmd) in vhost_scsi_release_cmd_res() argument
420 struct vhost_scsi_cmd *tv_cmd = container_of(se_cmd, in vhost_scsi_release_cmd_res()
456 sbitmap_clear_bit(&svq->scsi_tags, se_cmd->map_tag); in vhost_scsi_release_cmd_res()
482 static void vhost_scsi_release_cmd(struct se_cmd *se_cmd) in vhost_scsi_release_cmd() argument
484 if (se_cmd->se_cmd_flags & SCF_SCSI_TMR_CDB) { in vhost_scsi_release_cmd()
485 struct vhost_scsi_tmf *tmf = container_of(se_cmd, in vhost_scsi_release_cmd()
486 struct vhost_scsi_tmf, se_cmd); in vhost_scsi_release_cmd()
490 struct vhost_scsi_cmd *cmd = container_of(se_cmd, in vhost_scsi_release_cmd()
[all …]

123