Searched refs:nr_io_queues (Results 1 – 8 of 8) sorted by relevance
/linux/drivers/nvme/host/ |
H A D | fabrics.c | 727 opts->nr_io_queues = num_online_cpus(); in nvmf_parse_options() 828 opts->nr_io_queues = min_t(unsigned int, in nvmf_parse_options() 1065 opts->nr_io_queues = 0; in nvmf_parse_options() 1095 void nvmf_set_io_queues(struct nvmf_ctrl_options *opts, u32 nr_io_queues, in nvmf_set_io_queues() argument 1098 if (opts->nr_write_queues && opts->nr_io_queues < nr_io_queues) { in nvmf_set_io_queues() 1104 io_queues[HCTX_TYPE_READ] = opts->nr_io_queues; in nvmf_set_io_queues() 1105 nr_io_queues -= io_queues[HCTX_TYPE_READ]; in nvmf_set_io_queues() 1107 min(opts->nr_write_queues, nr_io_queues); in nvmf_set_io_queues() 1108 nr_io_queues -= io_queues[HCTX_TYPE_DEFAULT]; in nvmf_set_io_queues() 1116 min(opts->nr_io_queues, nr_io_queues); in nvmf_set_io_queues() [all …]
|
H A D | fabrics.h | 122 unsigned int nr_io_queues; member 212 return min(opts->nr_io_queues, num_online_cpus()) + in nvmf_nr_io_queues() 230 void nvmf_set_io_queues(struct nvmf_ctrl_options *opts, u32 nr_io_queues,
|
H A D | fc.c | 2858 unsigned int nr_io_queues; in nvme_fc_create_io_queues() local 2861 nr_io_queues = min(min(opts->nr_io_queues, num_online_cpus()), in nvme_fc_create_io_queues() 2863 ret = nvme_set_queue_count(&ctrl->ctrl, &nr_io_queues); in nvme_fc_create_io_queues() 2870 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_fc_create_io_queues() 2871 if (!nr_io_queues) in nvme_fc_create_io_queues() 2912 unsigned int nr_io_queues; in nvme_fc_recreate_io_queues() local 2915 nr_io_queues = min(min(opts->nr_io_queues, num_online_cpus()), in nvme_fc_recreate_io_queues() 2917 ret = nvme_set_queue_count(&ctrl->ctrl, &nr_io_queues); in nvme_fc_recreate_io_queues() 2924 if (!nr_io_queues && prior_ioq_cnt) { in nvme_fc_recreate_io_queues() 2931 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_fc_recreate_io_queues() [all …]
|
H A D | pci.c | 1616 static int nvme_cmb_qdepth(struct nvme_dev *dev, int nr_io_queues, in nvme_cmb_qdepth() argument 1623 if (q_size_aligned * nr_io_queues > dev->cmb_size) { in nvme_cmb_qdepth() 1624 u64 mem_per_q = div_u64(dev->cmb_size, nr_io_queues); in nvme_cmb_qdepth() 1838 static unsigned long db_bar_size(struct nvme_dev *dev, unsigned nr_io_queues) in db_bar_size() argument 1840 return NVME_REG_DBS + ((nr_io_queues + 1) * 8 * dev->db_stride); in db_bar_size() 2404 static int nvme_setup_irqs(struct nvme_dev *dev, unsigned int nr_io_queues) in nvme_setup_irqs() argument 2419 poll_queues = min(dev->nr_poll_queues, nr_io_queues - 1); in nvme_setup_irqs() 2436 irq_queues += (nr_io_queues - poll_queues); in nvme_setup_irqs() 2458 unsigned int nr_io_queues; in nvme_setup_io_queues() local 2469 nr_io_queues = dev->nr_allocated_queues - 1; in nvme_setup_io_queues() [all …]
|
H A D | rdma.c | 719 unsigned int nr_io_queues; in nvme_rdma_alloc_io_queues() local 722 nr_io_queues = nvmf_nr_io_queues(opts); in nvme_rdma_alloc_io_queues() 723 ret = nvme_set_queue_count(&ctrl->ctrl, &nr_io_queues); in nvme_rdma_alloc_io_queues() 727 if (nr_io_queues == 0) { in nvme_rdma_alloc_io_queues() 733 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_rdma_alloc_io_queues() 735 "creating %d I/O queues.\n", nr_io_queues); in nvme_rdma_alloc_io_queues() 737 nvmf_set_io_queues(opts, nr_io_queues, ctrl->io_queues); in nvme_rdma_alloc_io_queues() 2287 ctrl->ctrl.queue_count = opts->nr_io_queues + opts->nr_write_queues + in nvme_rdma_alloc_ctrl()
|
H A D | tcp.c | 2116 unsigned int nr_io_queues; in nvme_tcp_alloc_io_queues() local 2119 nr_io_queues = nvmf_nr_io_queues(ctrl->opts); in nvme_tcp_alloc_io_queues() 2120 ret = nvme_set_queue_count(ctrl, &nr_io_queues); in nvme_tcp_alloc_io_queues() 2124 if (nr_io_queues == 0) { in nvme_tcp_alloc_io_queues() 2130 ctrl->queue_count = nr_io_queues + 1; in nvme_tcp_alloc_io_queues() 2132 "creating %d I/O queues.\n", nr_io_queues); in nvme_tcp_alloc_io_queues() 2134 nvmf_set_io_queues(ctrl->opts, nr_io_queues, in nvme_tcp_alloc_io_queues() 2832 ctrl->ctrl.queue_count = opts->nr_io_queues + opts->nr_write_queues + in nvme_tcp_alloc_ctrl()
|
H A D | auth.c | 53 return ctrl->opts->nr_io_queues + ctrl->opts->nr_write_queues + in ctrl_max_dhchaps()
|
/linux/drivers/nvme/target/ |
H A D | loop.c | 318 unsigned int nr_io_queues; in nvme_loop_init_io_queues() local 321 nr_io_queues = min(opts->nr_io_queues, num_online_cpus()); in nvme_loop_init_io_queues() 322 ret = nvme_set_queue_count(&ctrl->ctrl, &nr_io_queues); in nvme_loop_init_io_queues() 323 if (ret || !nr_io_queues) in nvme_loop_init_io_queues() 326 dev_info(ctrl->ctrl.device, "creating %d I/O queues.\n", nr_io_queues); in nvme_loop_init_io_queues() 328 for (i = 1; i <= nr_io_queues; i++) { in nvme_loop_init_io_queues() 583 ctrl->queues = kcalloc(opts->nr_io_queues + 1, sizeof(*ctrl->queues), in nvme_loop_create_ctrl() 601 if (opts->nr_io_queues) { in nvme_loop_create_ctrl()
|