Home
last modified time | relevance | path

Searched refs:async_data (Results 1 – 14 of 14) sorted by relevance

/linux/drivers/vfio/pci/mlx5/
H A Dcmd.c552 struct mlx5vf_async_data *async_data) in mlx5vf_save_callback_complete() argument
554 kvfree(async_data->out); in mlx5vf_save_callback_complete()
561 struct mlx5vf_async_data *async_data = container_of(_work, in mlx5vf_mig_file_cleanup_cb() local
563 struct mlx5_vf_migration_file *migf = container_of(async_data, in mlx5vf_mig_file_cleanup_cb()
564 struct mlx5_vf_migration_file, async_data); in mlx5vf_mig_file_cleanup_cb()
567 if (async_data->status) { in mlx5vf_mig_file_cleanup_cb()
568 mlx5vf_put_data_buffer(async_data->buf); in mlx5vf_mig_file_cleanup_cb()
569 if (async_data->header_buf) in mlx5vf_mig_file_cleanup_cb()
570 mlx5vf_put_data_buffer(async_data->header_buf); in mlx5vf_mig_file_cleanup_cb()
571 if (!async_data->stop_copy_chunk && in mlx5vf_mig_file_cleanup_cb()
[all …]
H A Dcmd.h116 struct mlx5vf_async_data async_data; member
H A Dmain.c659 INIT_WORK(&migf->async_data.work, mlx5vf_mig_file_cleanup_cb); in mlx5vf_pci_save_device_data()
1071 cancel_work_sync(&mvdev->saving_migf->async_data.work); in mlx5vf_disable_fds()
/linux/io_uring/
H A During_cmd.c28 req->async_data = cache; in io_uring_async_get()
32 cache = req->async_data; in io_uring_async_get()
42 struct io_uring_cmd_data *cache = req->async_data; in io_req_uring_cleanup()
53 req->async_data = NULL; in io_req_uring_cleanup()
207 memcpy(req->async_data, sqe, uring_sqe_size(req->ctx)); in io_uring_cmd_prep_setup()
208 ioucmd->sqe = req->async_data; in io_uring_cmd_prep_setup()
272 struct io_uring_cmd_data *cache = req->async_data; in io_uring_cmd()
H A Dtimeout.c41 struct io_timeout_data *data = req->async_data; in io_is_timeout_noseq()
71 struct io_timeout_data *data = req->async_data; in io_timeout_complete()
111 struct io_timeout_data *io = req->async_data; in io_kill_timeout()
236 struct io_timeout_data *io = link->async_data; in __io_disarm_linked_timeout()
292 io = req->async_data; in io_timeout_extract()
407 io = req->async_data; in io_linked_timeout_update()
429 data = req->async_data; in io_timeout_update()
550 data = req->async_data; in __io_timeout_prep()
590 struct io_timeout_data *data = req->async_data; in io_timeout()
648 struct io_timeout_data *data = req->async_data; in io_queue_linked_timeout()
H A Drw.c160 struct io_async_rw *rw = req->async_data; in io_rw_recycle()
171 req->async_data = NULL; in io_rw_recycle()
224 req->async_data = rw; in io_rw_alloc_async()
229 rw = req->async_data; in io_rw_alloc_async()
251 rw = req->async_data; in io_prep_rw_setup()
346 io = req->async_data; in io_prep_rw_fixed()
388 io_rw_iovec_free(req->async_data); in io_readv_writev_cleanup()
411 struct io_async_rw *io = req->async_data; in io_resubmit_prep()
496 struct io_async_rw *io = req->async_data; in io_fixup_rw_res()
734 struct io_async_rw *io = req->async_data; in io_rw_should_retry()
[all …]
H A Dnet.c139 struct io_async_msghdr *hdr = req->async_data; in io_netmsg_recycle()
153 req->async_data = NULL; in io_netmsg_recycle()
171 req->async_data = hdr; in io_msg_alloc_async()
176 hdr = req->async_data; in io_msg_alloc_async()
354 struct io_async_msghdr *io = req->async_data; in io_sendmsg_recvmsg_cleanup()
362 struct io_async_msghdr *kmsg = req->async_data; in io_send_setup()
399 struct io_async_msghdr *kmsg = req->async_data; in io_sendmsg_setup()
534 struct io_async_msghdr *kmsg = req->async_data; in io_sendmsg()
584 struct io_async_msghdr *kmsg = req->async_data; in io_send()
984 struct io_async_msghdr *kmsg = req->async_data; in io_recvmsg()
[all …]
H A Dpoll.c110 return req->async_data; in io_poll_get_double()
516 (struct io_poll **) &pt->req->async_data); in io_poll_queue_proc()
H A Dio_uring.c415 kfree(req->async_data); in io_clean_op()
416 req->async_data = NULL; in io_clean_op()
944 req->async_data = NULL; in io_preinit_req()
1648 req->async_data = kmalloc(def->async_size, GFP_KERNEL); in io_alloc_async_data()
1649 if (req->async_data) { in io_alloc_async_data()
/linux/include/linux/io_uring/
H A Dcmd.h123 return cmd_to_io_kiocb(cmd)->async_data; in io_uring_cmd_get_async_data()
/linux/drivers/net/ethernet/qlogic/qed/
H A Dqed_iwarp.c739 union async_output *async_data; in qed_iwarp_mpa_received() local
745 async_data = &ep->ep_buffer_virt->async_output; in qed_iwarp_mpa_received()
747 mpa_rev = async_data->mpa_request.mpa_handshake_mode; in qed_iwarp_mpa_received()
750 async_data->mpa_request.ulp_data_len, in qed_iwarp_mpa_received()
801 async_data->mpa_request.ulp_data_len, mpa_hdr_size); in qed_iwarp_mpa_received()
806 ulp_data_len = le16_to_cpu(async_data->mpa_request.ulp_data_len); in qed_iwarp_mpa_received()
944 union async_output *async_data; in qed_iwarp_parse_private_data() local
960 async_data = &ep->ep_buffer_virt->async_output; in qed_iwarp_parse_private_data()
963 ulp_data_len = le16_to_cpu(async_data->mpa_response.ulp_data_len); in qed_iwarp_parse_private_data()
/linux/drivers/scsi/be2iscsi/
H A Dbe_main.c1632 pasync_ctx->async_data.buffer_size) in beiscsi_hdl_fwd_pdu()
1744 pasync_sge = pasync_ctx->async_data.ring_base; in beiscsi_hdq_post_handles()
1745 pi = pasync_ctx->async_data.pi; in beiscsi_hdq_post_handles()
1771 pasync_ctx->async_data.pi = pi; in beiscsi_hdq_post_handles()
2832 pasync_ctx->async_data.ring_base = in hwi_init_async_pdu_ctx()
2845 pasync_ctx->async_data.handle_base = in hwi_init_async_pdu_ctx()
2853 pasync_ctx->async_data.handle_base; in hwi_init_async_pdu_ctx()
2873 pasync_ctx->async_data.pi = 0; in hwi_init_async_pdu_ctx()
2874 pasync_ctx->async_data.buffer_size = p->defpdu_data_sz; in hwi_init_async_pdu_ctx()
2875 pasync_ctx->async_data.va_base = in hwi_init_async_pdu_ctx()
[all …]
H A Dbe_main.h591 struct hd_async_buf_context async_data; member
/linux/include/linux/
H A Dio_uring_types.h672 void *async_data; member