Home
last modified time | relevance | path

Searched refs:nr_queues (Results 1 – 25 of 57) sorted by relevance

123

/kernel/linux/linux-5.10/block/
Dblk-mq-cpumap.c19 unsigned int nr_queues, const int q) in queue_index() argument
21 return qmap->queue_offset + (q % nr_queues); in queue_index()
38 unsigned int nr_queues = qmap->nr_queues; in blk_mq_map_queues() local
49 if (q >= nr_queues) in blk_mq_map_queues()
51 map[cpu] = queue_index(qmap, nr_queues, q++); in blk_mq_map_queues()
63 if (q < nr_queues) { in blk_mq_map_queues()
64 map[cpu] = queue_index(qmap, nr_queues, q++); in blk_mq_map_queues()
68 map[cpu] = queue_index(qmap, nr_queues, q++); in blk_mq_map_queues()
Dblk-mq-pci.c32 for (queue = 0; queue < qmap->nr_queues; queue++) { in blk_mq_pci_map_queues()
44 WARN_ON_ONCE(qmap->nr_queues > 1); in blk_mq_pci_map_queues()
Dblk-mq-rdma.c30 for (queue = 0; queue < map->nr_queues; queue++) { in blk_mq_rdma_map_queues()
Dblk-mq-virtio.c33 for (queue = 0; queue < qmap->nr_queues; queue++) { in blk_mq_virtio_map_queues()
/kernel/linux/linux-4.19/block/
Dblk-mq-cpumap.c17 static int cpu_to_queue_index(unsigned int nr_queues, const int cpu) in cpu_to_queue_index() argument
19 return cpu % nr_queues; in cpu_to_queue_index()
36 unsigned int nr_queues = set->nr_hw_queues; in blk_mq_map_queues() local
46 if (cpu < nr_queues) { in blk_mq_map_queues()
47 map[cpu] = cpu_to_queue_index(nr_queues, cpu); in blk_mq_map_queues()
51 map[cpu] = cpu_to_queue_index(nr_queues, cpu); in blk_mq_map_queues()
/kernel/linux/linux-5.10/drivers/crypto/cavium/cpt/
Dcptvf_main.c42 if (cptvf->nr_queues) { in init_worker_threads()
44 cptvf->nr_queues); in init_worker_threads()
47 for (i = 0; i < cptvf->nr_queues; i++) { in init_worker_threads()
69 if (cptvf->nr_queues) { in cleanup_worker_threads()
71 cptvf->nr_queues); in cleanup_worker_threads()
74 for (i = 0; i < cptvf->nr_queues; i++) in cleanup_worker_threads()
100 pqinfo->nr_queues = 0; in free_pending_queues()
104 u32 nr_queues) in alloc_pending_queues() argument
111 pqinfo->nr_queues = nr_queues; in alloc_pending_queues()
139 static int init_pending_queues(struct cpt_vf *cptvf, u32 qlen, u32 nr_queues) in init_pending_queues() argument
[all …]
Dcptvf.h85 u32 nr_queues; /* Number of queues supported */ member
91 for (i = 0, q = &qinfo->queue[i]; i < qinfo->nr_queues; i++, \
110 u32 nr_queues; member
Dcptvf_reqmanager.c233 if (unlikely(qno >= cptvf->nr_queues)) { in send_cpt_command()
235 qno, cptvf->nr_queues); in send_cpt_command()
549 if (unlikely(qno > cptvf->nr_queues)) { in vq_post_process()
/kernel/linux/linux-4.19/drivers/crypto/cavium/cpt/
Dcptvf_main.c45 if (cptvf->nr_queues) { in init_worker_threads()
47 cptvf->nr_queues); in init_worker_threads()
50 for (i = 0; i < cptvf->nr_queues; i++) { in init_worker_threads()
72 if (cptvf->nr_queues) { in cleanup_worker_threads()
74 cptvf->nr_queues); in cleanup_worker_threads()
77 for (i = 0; i < cptvf->nr_queues; i++) in cleanup_worker_threads()
103 pqinfo->nr_queues = 0; in free_pending_queues()
107 u32 nr_queues) in alloc_pending_queues() argument
114 pqinfo->nr_queues = nr_queues; in alloc_pending_queues()
142 static int init_pending_queues(struct cpt_vf *cptvf, u32 qlen, u32 nr_queues) in init_pending_queues() argument
[all …]
Dcptvf.h88 u32 nr_queues; /* Number of queues supported */ member
94 for (i = 0, q = &qinfo->queue[i]; i < qinfo->nr_queues; i++, \
113 u32 nr_queues; member
Dcptvf_reqmanager.c236 if (unlikely(qno >= cptvf->nr_queues)) { in send_cpt_command()
238 qno, cptvf->nr_queues); in send_cpt_command()
564 if (unlikely(qno > cptvf->nr_queues)) { in vq_post_process()
/kernel/linux/linux-5.10/drivers/crypto/cavium/nitrox/
Dnitrox_sriov.c58 int nr_queues = 0; in vf_mode_to_nr_queues() local
62 nr_queues = MAX_PF_QUEUES; in vf_mode_to_nr_queues()
65 nr_queues = 8; in vf_mode_to_nr_queues()
68 nr_queues = 4; in vf_mode_to_nr_queues()
71 nr_queues = 2; in vf_mode_to_nr_queues()
74 nr_queues = 1; in vf_mode_to_nr_queues()
78 return nr_queues; in vf_mode_to_nr_queues()
Dnitrox_lib.c91 for (i = 0; i < ndev->nr_queues; i++) { in nitrox_free_aqm_queues()
102 for (i = 0; i < ndev->nr_queues; i++) { in nitrox_alloc_aqm_queues()
142 for (i = 0; i < ndev->nr_queues; i++) { in nitrox_free_pktin_queues()
155 ndev->pkt_inq = kcalloc_node(ndev->nr_queues, in nitrox_alloc_pktin_queues()
161 for (i = 0; i < ndev->nr_queues; i++) { in nitrox_alloc_pktin_queues()
Dnitrox_dev.h160 int nr_queues; member
250 u16 nr_queues; member
Dnitrox_mbx.c66 vfdev->nr_queues = vfdev->msg.data; in pf2vf_send_response()
74 vfdev->nr_queues = 0; in pf2vf_send_response()
Dnitrox_hal.c123 for (i = 0; i < ndev->nr_queues; i++) { in nitrox_config_pkt_input_rings()
239 for (i = 0; i < ndev->nr_queues; i++) in nitrox_config_pkt_solicit_ports()
355 for (ring = 0; ring < ndev->nr_queues; ring++) { in nitrox_config_aqm_rings()
/kernel/linux/linux-4.19/tools/perf/util/
Dauxtrace.c153 static struct auxtrace_queue *auxtrace_alloc_queue_array(unsigned int nr_queues) in auxtrace_alloc_queue_array() argument
159 if (nr_queues > max_nr_queues) in auxtrace_alloc_queue_array()
162 queue_array = calloc(nr_queues, sizeof(struct auxtrace_queue)); in auxtrace_alloc_queue_array()
166 for (i = 0; i < nr_queues; i++) { in auxtrace_alloc_queue_array()
176 queues->nr_queues = AUXTRACE_INIT_NR_QUEUES; in auxtrace_queues__init()
177 queues->queue_array = auxtrace_alloc_queue_array(queues->nr_queues); in auxtrace_queues__init()
186 unsigned int nr_queues = queues->nr_queues; in auxtrace_queues__grow() local
190 if (!nr_queues) in auxtrace_queues__grow()
191 nr_queues = AUXTRACE_INIT_NR_QUEUES; in auxtrace_queues__grow()
193 while (nr_queues && nr_queues < new_nr_queues) in auxtrace_queues__grow()
[all …]
/kernel/linux/linux-4.19/drivers/crypto/cavium/nitrox/
Dnitrox_lib.c77 for (i = 0; i < ndev->nr_queues; i++) { in nitrox_cleanup_pkt_cmdqs()
90 size = ndev->nr_queues * sizeof(struct nitrox_cmdq); in nitrox_init_pkt_cmdqs()
95 for (i = 0; i < ndev->nr_queues; i++) { in nitrox_init_pkt_cmdqs()
Dnitrox_isr.c256 nr_entries = (ndev->nr_queues * NR_RING_VECTORS) + 1; in nitrox_enable_msix()
310 for (i = 0; i < ndev->nr_queues; i++) { in nitrox_cleanup_pkt_slc_bh()
325 size = ndev->nr_queues * sizeof(struct bh_data); in nitrox_setup_pkt_slc_bh()
330 for (i = 0; i < ndev->nr_queues; i++) { in nitrox_setup_pkt_slc_bh()
363 nr_ring_vectors = ndev->nr_queues * NR_RING_VECTORS; in nitrox_request_irqs()
Dnitrox_dev.h138 u16 nr_queues; member
Dnitrox_hal.c119 for (i = 0; i < ndev->nr_queues; i++) { in nitrox_config_pkt_input_rings()
216 for (i = 0; i < ndev->nr_queues; i++) in nitrox_config_pkt_solicit_ports()
/kernel/linux/linux-5.10/tools/perf/util/
Dauxtrace.c197 static struct auxtrace_queue *auxtrace_alloc_queue_array(unsigned int nr_queues) in auxtrace_alloc_queue_array() argument
203 if (nr_queues > max_nr_queues) in auxtrace_alloc_queue_array()
206 queue_array = calloc(nr_queues, sizeof(struct auxtrace_queue)); in auxtrace_alloc_queue_array()
210 for (i = 0; i < nr_queues; i++) { in auxtrace_alloc_queue_array()
220 queues->nr_queues = AUXTRACE_INIT_NR_QUEUES; in auxtrace_queues__init()
221 queues->queue_array = auxtrace_alloc_queue_array(queues->nr_queues); in auxtrace_queues__init()
230 unsigned int nr_queues = queues->nr_queues; in auxtrace_queues__grow() local
234 if (!nr_queues) in auxtrace_queues__grow()
235 nr_queues = AUXTRACE_INIT_NR_QUEUES; in auxtrace_queues__grow()
237 while (nr_queues && nr_queues < new_nr_queues) in auxtrace_queues__grow()
[all …]
/kernel/linux/linux-4.19/drivers/block/
Dnull_blk.h83 unsigned int nr_queues; member
Dnull_blk_main.c1335 if (nullb->nr_queues != 1) in nullb_to_queue()
1336 index = raw_smp_processor_id() / ((nr_cpu_ids + nullb->nr_queues - 1) / nullb->nr_queues); in nullb_to_queue()
1477 for (i = 0; i < nullb->nr_queues; i++) in cleanup_queues()
1564 nullb->nr_queues++; in null_init_queues()
1602 nullb->nr_queues = 0; in setup_queues()
1621 nullb->nr_queues++; in init_driver_queues()
/kernel/linux/linux-5.10/drivers/block/
Dnull_blk.h93 unsigned int nr_queues; member

123