Home
last modified time | relevance | path

Searched refs:nr_io_queues (Results 1 – 8 of 8) sorted by relevance

/drivers/nvme/target/
Dloop.c297 unsigned int nr_io_queues; in nvme_loop_init_io_queues() local
300 nr_io_queues = min(opts->nr_io_queues, num_online_cpus()); in nvme_loop_init_io_queues()
301 ret = nvme_set_queue_count(&ctrl->ctrl, &nr_io_queues); in nvme_loop_init_io_queues()
302 if (ret || !nr_io_queues) in nvme_loop_init_io_queues()
305 dev_info(ctrl->ctrl.device, "creating %d I/O queues.\n", nr_io_queues); in nvme_loop_init_io_queues()
307 for (i = 1; i <= nr_io_queues; i++) { in nvme_loop_init_io_queues()
600 ctrl->queues = kcalloc(opts->nr_io_queues + 1, sizeof(*ctrl->queues), in nvme_loop_create_ctrl()
617 if (opts->nr_io_queues) { in nvme_loop_create_ctrl()
/drivers/nvme/host/
Dtcp.c1756 unsigned int nr_io_queues; in nvme_tcp_nr_io_queues() local
1758 nr_io_queues = min(ctrl->opts->nr_io_queues, num_online_cpus()); in nvme_tcp_nr_io_queues()
1759 nr_io_queues += min(ctrl->opts->nr_write_queues, num_online_cpus()); in nvme_tcp_nr_io_queues()
1760 nr_io_queues += min(ctrl->opts->nr_poll_queues, num_online_cpus()); in nvme_tcp_nr_io_queues()
1762 return nr_io_queues; in nvme_tcp_nr_io_queues()
1766 unsigned int nr_io_queues) in nvme_tcp_set_io_queues() argument
1771 if (opts->nr_write_queues && opts->nr_io_queues < nr_io_queues) { in nvme_tcp_set_io_queues()
1777 ctrl->io_queues[HCTX_TYPE_READ] = opts->nr_io_queues; in nvme_tcp_set_io_queues()
1778 nr_io_queues -= ctrl->io_queues[HCTX_TYPE_READ]; in nvme_tcp_set_io_queues()
1780 min(opts->nr_write_queues, nr_io_queues); in nvme_tcp_set_io_queues()
[all …]
Dpci.c1450 static int nvme_cmb_qdepth(struct nvme_dev *dev, int nr_io_queues, in nvme_cmb_qdepth() argument
1457 if (q_size_aligned * nr_io_queues > dev->cmb_size) { in nvme_cmb_qdepth()
1458 u64 mem_per_q = div_u64(dev->cmb_size, nr_io_queues); in nvme_cmb_qdepth()
1679 static unsigned long db_bar_size(struct nvme_dev *dev, unsigned nr_io_queues) in db_bar_size() argument
1681 return NVME_REG_DBS + ((nr_io_queues + 1) * 8 * dev->db_stride); in db_bar_size()
2104 static int nvme_setup_irqs(struct nvme_dev *dev, unsigned int nr_io_queues) in nvme_setup_irqs() argument
2118 poll_queues = min(dev->nr_poll_queues, nr_io_queues - 1); in nvme_setup_irqs()
2135 irq_queues += (nr_io_queues - poll_queues); in nvme_setup_irqs()
2155 unsigned int nr_io_queues; in nvme_setup_io_queues() local
2171 nr_io_queues = 1; in nvme_setup_io_queues()
[all …]
Drdma.c726 unsigned int nr_io_queues, nr_default_queues; in nvme_rdma_alloc_io_queues() local
731 min(opts->nr_io_queues, num_online_cpus())); in nvme_rdma_alloc_io_queues()
735 nr_io_queues = nr_read_queues + nr_default_queues + nr_poll_queues; in nvme_rdma_alloc_io_queues()
737 ret = nvme_set_queue_count(&ctrl->ctrl, &nr_io_queues); in nvme_rdma_alloc_io_queues()
741 if (nr_io_queues == 0) { in nvme_rdma_alloc_io_queues()
747 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_rdma_alloc_io_queues()
749 "creating %d I/O queues.\n", nr_io_queues); in nvme_rdma_alloc_io_queues()
751 if (opts->nr_write_queues && nr_read_queues < nr_io_queues) { in nvme_rdma_alloc_io_queues()
758 nr_io_queues -= ctrl->io_queues[HCTX_TYPE_READ]; in nvme_rdma_alloc_io_queues()
760 min(nr_default_queues, nr_io_queues); in nvme_rdma_alloc_io_queues()
[all …]
Dfc.c2852 unsigned int nr_io_queues; in nvme_fc_create_io_queues() local
2855 nr_io_queues = min(min(opts->nr_io_queues, num_online_cpus()), in nvme_fc_create_io_queues()
2857 ret = nvme_set_queue_count(&ctrl->ctrl, &nr_io_queues); in nvme_fc_create_io_queues()
2864 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_fc_create_io_queues()
2865 if (!nr_io_queues) in nvme_fc_create_io_queues()
2926 unsigned int nr_io_queues; in nvme_fc_recreate_io_queues() local
2929 nr_io_queues = min(min(opts->nr_io_queues, num_online_cpus()), in nvme_fc_recreate_io_queues()
2931 ret = nvme_set_queue_count(&ctrl->ctrl, &nr_io_queues); in nvme_fc_recreate_io_queues()
2938 if (!nr_io_queues && prior_ioq_cnt) { in nvme_fc_recreate_io_queues()
2945 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_fc_recreate_io_queues()
[all …]
Dfabrics.h101 unsigned int nr_io_queues; member
Dfabrics.c638 opts->nr_io_queues = num_online_cpus(); in nvmf_parse_options()
733 opts->nr_io_queues = min_t(unsigned int, in nvmf_parse_options()
887 opts->nr_io_queues = 0; in nvmf_parse_options()
Dcore.c1485 int status, nr_io_queues; in nvme_set_queue_count() local
1501 nr_io_queues = min(result & 0xffff, result >> 16) + 1; in nvme_set_queue_count()
1502 *count = min(*count, nr_io_queues); in nvme_set_queue_count()