| /linux/drivers/scsi/qedf/ |
| H A D | qedf_main.c | 2239 struct qedf_io_work *io_work; in qedf_process_completions() local 2299 io_work = mempool_alloc(qedf->io_mempool, GFP_ATOMIC); in qedf_process_completions() 2300 if (!io_work) { in qedf_process_completions() 2305 memset(io_work, 0, sizeof(struct qedf_io_work)); in qedf_process_completions() 2307 INIT_WORK(&io_work->work, qedf_fp_io_handler); in qedf_process_completions() 2310 memcpy(&io_work->cqe, cqe, sizeof(struct fcoe_cqe)); in qedf_process_completions() 2312 io_work->qedf = fp->qedf; in qedf_process_completions() 2313 io_work->fp = NULL; /* Only used for unsolicited frames */ in qedf_process_completions() 2315 queue_work_on(cpu, qedf_io_wq, &io_work->work); in qedf_process_completions() 2701 struct qedf_io_work *io_work = in qedf_fp_io_handler() local [all …]
|
| H A D | qedf_io.c | 2493 struct qedf_io_work *io_work; in qedf_process_unsol_compl() local 2557 io_work = mempool_alloc(qedf->io_mempool, GFP_ATOMIC); in qedf_process_unsol_compl() 2558 if (!io_work) { in qedf_process_unsol_compl() 2564 memset(io_work, 0, sizeof(struct qedf_io_work)); in qedf_process_unsol_compl() 2566 INIT_WORK(&io_work->work, qedf_fp_io_handler); in qedf_process_unsol_compl() 2569 memcpy(&io_work->cqe, cqe, sizeof(struct fcoe_cqe)); in qedf_process_unsol_compl() 2571 io_work->qedf = qedf; in qedf_process_unsol_compl() 2572 io_work->fp = fp; in qedf_process_unsol_compl() 2574 queue_work_on(smp_processor_id(), qedf_io_wq, &io_work->work); in qedf_process_unsol_compl()
|
| /linux/drivers/nvme/target/ |
| H A D | tcp.c | 151 struct work_struct io_work; member 617 queue_work_on(queue_cpu(queue), nvmet_tcp_wq, &cmd->queue->io_work); in nvmet_tcp_queue_response() 1425 container_of(w, struct nvmet_tcp_queue, io_work); in nvmet_tcp_io_work() 1451 queue_work_on(queue_cpu(queue), nvmet_tcp_wq, &queue->io_work); in nvmet_tcp_io_work() 1600 cancel_work_sync(&queue->io_work); in nvmet_tcp_release_queue_work() 1608 cancel_work_sync(&queue->io_work); in nvmet_tcp_release_queue_work() 1631 &queue->io_work); in nvmet_tcp_data_ready() 1652 queue_work_on(queue_cpu(queue), nvmet_tcp_wq, &queue->io_work); in nvmet_tcp_write_space() 1733 queue_work_on(queue_cpu(queue), nvmet_tcp_wq, &queue->io_work); in nvmet_tcp_set_queue_sock() 1910 INIT_WORK(&queue->io_work, nvmet_tcp_io_work); in nvmet_tcp_alloc_queue()
|
| /linux/drivers/memstick/core/ |
| H A D | ms_block.h | 160 struct work_struct io_work; member
|
| H A D | ms_block.c | 1503 queue_work(msb->io_queue, &msb->io_work); in msb_cache_flush_timer() 1877 struct msb_data *msb = container_of(work, struct msb_data, io_work); in msb_io_work() 2004 queue_work(msb->io_queue, &msb->io_work); in msb_queue_rq() 2062 queue_work(msb->io_queue, &msb->io_work); in msb_start() 2122 INIT_WORK(&msb->io_work, msb_io_work); in msb_init_disk()
|
| /linux/drivers/nvme/host/ |
| H A D | tcp.c | 144 struct work_struct io_work; member 427 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_queue_request() 792 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_handle_r2t() 1073 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_data_ready() 1088 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_write_space() 1379 container_of(w, struct nvme_tcp_queue, io_work); in nvme_tcp_io_work() 1411 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_io_work() 1778 INIT_WORK(&queue->io_work, nvme_tcp_io_work); in nvme_tcp_alloc_queue() 1929 cancel_work_sync(&queue->io_work); in __nvme_tcp_stop_queue() 2758 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_commit_rqs()
|
| /linux/drivers/s390/cio/ |
| H A D | vfio_ccw_fsm.c | 367 queue_work(vfio_ccw_work_q, &private->io_work); in fsm_irq()
|
| H A D | vfio_ccw_drv.c | 89 private = container_of(work, struct vfio_ccw_private, io_work); in vfio_ccw_sch_io_todo()
|
| H A D | vfio_ccw_ops.c | 55 INIT_WORK(&private->io_work, vfio_ccw_sch_io_todo); in vfio_ccw_mdev_init_dev()
|