Searched refs:async_data (Results 1 – 16 of 16) sorted by relevance
552 struct mlx5vf_async_data *async_data) in mlx5vf_save_callback_complete() argument554 kvfree(async_data->out); in mlx5vf_save_callback_complete()561 struct mlx5vf_async_data *async_data = container_of(_work, in mlx5vf_mig_file_cleanup_cb() local563 struct mlx5_vf_migration_file *migf = container_of(async_data, in mlx5vf_mig_file_cleanup_cb()564 struct mlx5_vf_migration_file, async_data); in mlx5vf_mig_file_cleanup_cb()567 if (async_data->status) { in mlx5vf_mig_file_cleanup_cb()568 mlx5vf_put_data_buffer(async_data->buf); in mlx5vf_mig_file_cleanup_cb()569 if (async_data->header_buf) in mlx5vf_mig_file_cleanup_cb()570 mlx5vf_put_data_buffer(async_data->header_buf); in mlx5vf_mig_file_cleanup_cb()571 if (!async_data->stop_copy_chunk && in mlx5vf_mig_file_cleanup_cb()[all …]
116 struct mlx5vf_async_data async_data; member
659 INIT_WORK(&migf->async_data.work, mlx5vf_mig_file_cleanup_cb); in mlx5vf_pci_save_device_data()1071 cancel_work_sync(&mvdev->saving_migf->async_data.work); in mlx5vf_disable_fds()
49 req->async_data = NULL; in __io_futex_complete()56 struct io_futex_data *ifd = req->async_data; in io_futex_complete()68 struct futex_vector *futexv = req->async_data; in io_futexv_complete()80 kfree(req->async_data); in io_futexv_complete()97 struct io_futex_data *ifd = req->async_data; in __io_futex_cancel()237 req->async_data = futexv; in io_futexv_prep()257 struct futex_vector *futexv = req->async_data; in io_futexv_wait()273 req->async_data = NULL; in io_futexv_wait()329 req->async_data = ifd; in io_futex_wait()
37 struct io_waitid_async *iwa = req->async_data; in io_waitid_free()40 kfree(req->async_data); in io_waitid_free()41 req->async_data = NULL; in io_waitid_free()138 struct io_waitid_async *iwa = req->async_data; in __io_waitid_cancel()209 struct io_waitid_async *iwa = req->async_data; in io_waitid_drop_issue_ref()226 struct io_waitid_async *iwa = req->async_data; in io_waitid_cb()308 struct io_waitid_async *iwa = req->async_data; in io_waitid()
41 struct io_timeout_data *data = req->async_data; in io_is_timeout_noseq()71 struct io_timeout_data *data = req->async_data; in io_timeout_complete()111 struct io_timeout_data *io = req->async_data; in io_kill_timeout()236 struct io_timeout_data *io = link->async_data; in __io_disarm_linked_timeout()292 io = req->async_data; in io_timeout_extract()407 io = req->async_data; in io_linked_timeout_update()429 data = req->async_data; in io_timeout_update()589 struct io_timeout_data *data = req->async_data; in io_timeout()647 struct io_timeout_data *data = req->async_data; in io_queue_linked_timeout()
154 struct io_async_rw *rw = req->async_data; in io_rw_recycle()161 req->async_data = NULL; in io_rw_recycle()225 rw = req->async_data; in io_prep_rw_setup()257 io = req->async_data; in io_prep_rw_pi()379 io = req->async_data; in io_prep_rw_fixed()447 struct io_async_rw *io = req->async_data; in io_rw_should_reissue()510 struct io_async_rw *io = req->async_data; in io_fixup_rw_res()743 struct io_async_rw *io = req->async_data; in io_rw_should_retry()841 struct io_async_rw *io = req->async_data; in io_rw_init_file()860 struct io_async_rw *io = req->async_data; in __io_read()[all …]
139 struct io_async_msghdr *hdr = req->async_data; in io_netmsg_recycle()150 req->async_data = NULL; in io_netmsg_recycle()344 struct io_async_msghdr *io = req->async_data; in io_sendmsg_recvmsg_cleanup()352 struct io_async_msghdr *kmsg = req->async_data; in io_send_setup()389 struct io_async_msghdr *kmsg = req->async_data; in io_sendmsg_setup()524 struct io_async_msghdr *kmsg = req->async_data; in io_sendmsg()622 struct io_async_msghdr *kmsg = req->async_data; in io_send()987 struct io_async_msghdr *kmsg = req->async_data; in io_recvmsg()1140 struct io_async_msghdr *kmsg = req->async_data; in io_recv()1218 struct io_async_msghdr *io = req->async_data; in io_send_zc_cleanup()[all …]
232 req->async_data = io_cache_alloc(cache, GFP_KERNEL); in io_uring_alloc_async_data()237 req->async_data = kmalloc(def->async_size, GFP_KERNEL); in io_uring_alloc_async_data()239 if (req->async_data) in io_uring_alloc_async_data()241 return req->async_data; in io_uring_alloc_async_data()
22 struct io_uring_cmd_data *cache = req->async_data; in io_req_uring_cleanup()33 req->async_data = NULL; in io_req_uring_cleanup()
418 kfree(req->async_data); in io_clean_op()419 req->async_data = NULL; in io_clean_op()946 req->async_data = NULL; in io_preinit_req()
123 return cmd_to_io_kiocb(cmd)->async_data; in io_uring_cmd_get_async_data()
739 union async_output *async_data; in qed_iwarp_mpa_received() local745 async_data = &ep->ep_buffer_virt->async_output; in qed_iwarp_mpa_received()747 mpa_rev = async_data->mpa_request.mpa_handshake_mode; in qed_iwarp_mpa_received()750 async_data->mpa_request.ulp_data_len, in qed_iwarp_mpa_received()801 async_data->mpa_request.ulp_data_len, mpa_hdr_size); in qed_iwarp_mpa_received()806 ulp_data_len = le16_to_cpu(async_data->mpa_request.ulp_data_len); in qed_iwarp_mpa_received()944 union async_output *async_data; in qed_iwarp_parse_private_data() local960 async_data = &ep->ep_buffer_virt->async_output; in qed_iwarp_parse_private_data()963 ulp_data_len = le16_to_cpu(async_data->mpa_response.ulp_data_len); in qed_iwarp_parse_private_data()
1632 pasync_ctx->async_data.buffer_size) in beiscsi_hdl_fwd_pdu()1744 pasync_sge = pasync_ctx->async_data.ring_base; in beiscsi_hdq_post_handles()1745 pi = pasync_ctx->async_data.pi; in beiscsi_hdq_post_handles()1771 pasync_ctx->async_data.pi = pi; in beiscsi_hdq_post_handles()2832 pasync_ctx->async_data.ring_base = in hwi_init_async_pdu_ctx()2845 pasync_ctx->async_data.handle_base = in hwi_init_async_pdu_ctx()2853 pasync_ctx->async_data.handle_base; in hwi_init_async_pdu_ctx()2873 pasync_ctx->async_data.pi = 0; in hwi_init_async_pdu_ctx()2874 pasync_ctx->async_data.buffer_size = p->defpdu_data_sz; in hwi_init_async_pdu_ctx()2875 pasync_ctx->async_data.va_base = in hwi_init_async_pdu_ctx()[all …]
591 struct hd_async_buf_context async_data; member
675 void *async_data; member