Home
last modified time | relevance | path

Searched refs:num_queues (Results 1 – 25 of 59) sorted by relevance

123

/drivers/net/xen-netback/
Dinterface.c156 unsigned int num_queues; in xenvif_select_queue() local
160 num_queues = READ_ONCE(vif->num_queues); in xenvif_select_queue()
161 if (num_queues < 1) in xenvif_select_queue()
180 unsigned int num_queues; in xenvif_start_xmit() local
190 num_queues = READ_ONCE(vif->num_queues); in xenvif_start_xmit()
191 if (num_queues < 1) in xenvif_start_xmit()
196 if (index >= num_queues) { in xenvif_start_xmit()
199 index %= num_queues; in xenvif_start_xmit()
241 unsigned int num_queues; in xenvif_get_stats() local
249 num_queues = READ_ONCE(vif->num_queues); in xenvif_get_stats()
[all …]
Dxenbus.c222 for (i = 0; i < vif->num_queues; ++i) { in xenvif_debugfs_addif()
498 unsigned int num_queues = vif->num_queues; in backend_disconnect() local
510 vif->num_queues = 0; in backend_disconnect()
513 for (queue_index = 0; queue_index < num_queues; ++queue_index) in backend_disconnect()
753 for (queue_index = 0; queue_index < vif->num_queues; queue_index++) { in xen_net_rate_changed()
988 be->vif->num_queues = requested_num_queues; in connect()
1006 be->vif->num_queues = queue_index; in connect()
1022 be->vif->num_queues = queue_index; in connect()
1053 if (be->vif->num_queues > 0) in connect()
1055 for (queue_index = 0; queue_index < be->vif->num_queues; ++queue_index) in connect()
[all …]
/drivers/net/
Dxen-netfront.c350 unsigned int num_queues = dev->real_num_tx_queues; in xennet_open() local
357 for (i = 0; i < num_queues; ++i) { in xennet_open()
550 unsigned int num_queues = dev->real_num_tx_queues; in xennet_select_queue() local
555 if (num_queues == 1) { in xennet_select_queue()
559 queue_idx = hash % num_queues; in xennet_select_queue()
580 unsigned int num_queues = dev->real_num_tx_queues; in xennet_start_xmit() local
585 if (num_queues < 1) in xennet_start_xmit()
715 unsigned int num_queues = dev->real_num_tx_queues; in xennet_close() local
719 for (i = 0; i < num_queues; ++i) { in xennet_close()
1259 unsigned int num_queues = dev->real_num_tx_queues; in xennet_poll_controller() local
[all …]
/drivers/net/ethernet/amazon/ena/
Dena_netdev.c102 for (i = 0; i < adapter->num_queues; i++) in update_rx_ring_mtu()
130 adapter->netdev->rx_cpu_rmap = alloc_irq_cpu_rmap(adapter->num_queues); in ena_init_rx_cpu_rmap()
133 for (i = 0; i < adapter->num_queues; i++) { in ena_init_rx_cpu_rmap()
172 for (i = 0; i < adapter->num_queues; i++) { in ena_init_io_rings()
275 for (i = 0; i < adapter->num_queues; i++) { in ena_setup_all_tx_resources()
303 for (i = 0; i < adapter->num_queues; i++) in ena_free_all_io_tx_resources()
408 for (i = 0; i < adapter->num_queues; i++) { in ena_setup_all_rx_resources()
436 for (i = 0; i < adapter->num_queues; i++) in ena_free_all_io_rx_resources()
585 for (i = 0; i < adapter->num_queues; i++) { in ena_refill_all_rx_bufs()
601 for (i = 0; i < adapter->num_queues; i++) in ena_free_all_rx_bufs()
[all …]
Dena_ethtool.c132 for (i = 0; i < adapter->num_queues; i++) { in ena_queue_stats()
204 return adapter->num_queues * (ENA_STATS_ARRAY_TX + ENA_STATS_ARRAY_RX) in ena_get_sset_count()
213 for (i = 0; i < adapter->num_queues; i++) { in ena_queue_strings()
343 for (i = 0; i < adapter->num_queues; i++) in ena_update_tx_rings_intr_moderation()
623 info->data = adapter->num_queues; in ena_get_rxnfc()
784 channels->max_rx = adapter->num_queues; in ena_get_channels()
785 channels->max_tx = adapter->num_queues; in ena_get_channels()
788 channels->rx_count = adapter->num_queues; in ena_get_channels()
789 channels->tx_count = adapter->num_queues; in ena_get_channels()
/drivers/net/ethernet/qlogic/qede/
Dqede.h185 #define QEDE_MAX_RSS_CNT(edev) ((edev)->dev_info.num_queues)
186 #define QEDE_MAX_TSS_CNT(edev) ((edev)->dev_info.num_queues)
198 u16 num_queues; member
199 #define QEDE_QUEUE_CNT(edev) ((edev)->num_queues)
200 #define QEDE_RSS_COUNT(edev) ((edev)->num_queues - (edev)->fp_num_tx)
202 #define QEDE_TSS_COUNT(edev) ((edev)->num_queues - (edev)->fp_num_rx)
542 #define for_each_queue(i) for (i = 0; i < edev->num_queues; i++)
Dqede_main.c614 info->num_queues, info->num_queues); in qede_alloc_etherdev()
631 info->num_queues, info->num_queues); in qede_alloc_etherdev()
772 edev->num_queues = 0; in qede_free_fp_array()
1132 edev->num_queues = rc; in qede_set_num_queues()
1842 if (!edev->num_queues) { in qede_start_queues()
2092 edev->num_queues = 0; in qede_load()
/drivers/soc/ti/
Dknav_qmss_acc.c41 for (queue = 0; queue < range->num_queues; queue++) { in __knav_acc_notify()
158 queue >= range_base + range->num_queues) { in knav_acc_int_handler()
162 range_base + range->num_queues); in knav_acc_int_handler()
321 queue_mask = BIT(range->num_queues) - 1; in knav_acc_setup_cmd()
387 for (queue = 0; queue < range->num_queues; queue++) { in knav_acc_init_range()
447 channels = range->num_queues; in knav_acc_free_range()
531 channels = range->num_queues; in knav_init_acc_range()
541 if (range->num_queues > 32) { in knav_init_acc_range()
Dknav_qmss_queue.c197 kdev->base_id + kdev->num_queues > id) { in knav_queue_find_by_id()
443 kdev->base_id + kdev->num_queues - 1); in knav_queue_debug_show()
1204 range->num_queues = temp[1]; in knav_setup_queue_range()
1238 range->num_irqs = min(range->num_irqs, range->num_queues); in knav_setup_queue_range()
1258 end = min(qmgr->start_queue + qmgr->num_queues, in knav_setup_queue_range()
1259 range->queue_base + range->num_queues); in knav_setup_queue_range()
1272 range->queue_base + range->num_queues - 1, in knav_setup_queue_range()
1277 kdev->num_queues_in_use += range->num_queues; in knav_setup_queue_range()
1385 qmgr->num_queues = temp[1]; in knav_queue_init_qmgrs()
1393 qmgr->start_queue, qmgr->num_queues); in knav_queue_init_qmgrs()
[all …]
Dknav_qmss.h146 unsigned num_queues; member
298 unsigned num_queues; member
332 unsigned num_queues; member
/drivers/gpu/drm/amd/amdkfd/
Dkfd_packet_manager.c144 uint32_t num_queues; in pm_create_map_process() local
159 num_queues = 0; in pm_create_map_process()
161 num_queues++; in pm_create_map_process()
162 packet->bitfields10.num_queues = (qpd->is_debug) ? 0 : num_queues; in pm_create_map_process()
190 packet->bitfields2.num_queues = 1; in pm_create_map_queue()
527 packet->bitfields2.num_queues = 1; in pm_send_unmap_queue()
Dkfd_pm4_headers.h88 uint32_t num_queues:10; member
141 uint32_t num_queues:10; member
Dkfd_pm4_headers_vi.h191 uint32_t num_queues:10; member
245 uint32_t num_queues:3; member
366 uint32_t num_queues:3; member
Dkfd_process_queue_manager.c143 int num_queues = 0; in pqm_create_queue() local
165 num_queues++; in pqm_create_queue()
166 if (num_queues >= dev->device_info->max_no_of_hqd/2) in pqm_create_queue()
/drivers/scsi/
Dvirtio_scsi.c105 u32 num_queues; member
245 num_vqs = vscsi->num_queues; in virtscsi_poll_requests()
832 num_vqs = vscsi->num_queues + VIRTIO_SCSI_VQ_BASE; in virtscsi_init()
883 u32 num_queues; in virtscsi_probe() local
893 num_queues = virtscsi_config_get(vdev, num_queues) ? : 1; in virtscsi_probe()
897 if (num_queues == 1) in virtscsi_probe()
903 sizeof(*vscsi) + sizeof(vscsi->req_vqs[0]) * num_queues); in virtscsi_probe()
911 vscsi->num_queues = num_queues; in virtscsi_probe()
931 shost->nr_hw_queues = num_queues; in virtscsi_probe()
/drivers/net/ethernet/cavium/liquidio/
Dcn23xx_vf_device.c50 static int cn23xx_vf_reset_io_queues(struct octeon_device *oct, u32 num_queues) in cn23xx_vf_reset_io_queues() argument
57 for (q_no = 0; q_no < num_queues; q_no++) { in cn23xx_vf_reset_io_queues()
67 for (q_no = 0; q_no < num_queues; q_no++) { in cn23xx_vf_reset_io_queues()
361 u32 num_queues = oct->num_iqs; in cn23xx_disable_vf_io_queues() local
366 if (num_queues < oct->num_oqs) in cn23xx_disable_vf_io_queues()
367 num_queues = oct->num_oqs; in cn23xx_disable_vf_io_queues()
369 cn23xx_vf_reset_io_queues(oct, num_queues); in cn23xx_disable_vf_io_queues()
/drivers/scsi/qla2xxx/
Dqla_tmpl.h132 uint32_t num_queues; member
193 uint32_t num_queues; member
/drivers/staging/fsl-dpaa2/ethernet/
Ddpaa2-eth.h348 ((priv)->dpni_attrs.num_queues > 1)
358 return priv->dpni_attrs.num_queues; in dpaa2_eth_queue_count()
/drivers/scsi/qedf/
Dqedf_main.c2074 for (i = 0; i < qedf->num_queues; i++) { in qedf_request_msix_irq()
2402 for (id = 0; id < qedf->num_queues; id++) { in qedf_destroy_sb()
2419 kcalloc(qedf->num_queues, sizeof(struct qedf_fastpath), in qedf_prepare_sb()
2428 for (id = 0; id < qedf->num_queues; id++) { in qedf_prepare_sb()
2576 for (i = 0; i < qedf->num_queues; i++) { in qedf_free_global_queues()
2681 if (!qedf->num_queues) { in qedf_alloc_global_queues()
2696 * qedf->num_queues), GFP_KERNEL); in qedf_alloc_global_queues()
2711 for (i = 0; i < qedf->num_queues; i++) { in qedf_alloc_global_queues()
2779 for (i = 0; i < qedf->num_queues; i++) { in qedf_alloc_global_queues()
2812 qedf->num_queues = MIN_NUM_CPUS_MSIX(qedf); in qedf_set_fcoe_pf_param()
[all …]
/drivers/scsi/qedi/
Dqedi_main.c800 qedi->num_queues = MIN_NUM_CPUS_MSIX(qedi); in qedi_set_iscsi_pf_param()
803 "Number of CQ count is %d\n", qedi->num_queues); in qedi_set_iscsi_pf_param()
809 qedi->num_queues * sizeof(struct qedi_glbl_q_params), in qedi_set_iscsi_pf_param()
830 qedi->pf_params.iscsi_pf_params.num_queues = qedi->num_queues; in qedi_set_iscsi_pf_param()
873 size = qedi->num_queues * sizeof(struct qedi_glbl_q_params); in qedi_free_iscsi_pf_param()
1201 for (i = 0; i < qedi->num_queues; i++) { in qedi_free_global_queues()
1314 if (!qedi->num_queues) { in qedi_alloc_global_queues()
1328 qedi->num_queues), GFP_KERNEL); in qedi_alloc_global_queues()
1350 for (i = 0; i < qedi->num_queues; i++) { in qedi_alloc_global_queues()
1426 for (i = 0; i < qedi->num_queues; i++) { in qedi_alloc_global_queues()
/drivers/net/ethernet/cadence/
Dmacb_main.c1399 for (q = 0, queue = bp->queues; q < bp->num_queues; ++q, ++queue) in macb_poll_controller()
1790 for (q = 0, queue = bp->queues; q < bp->num_queues; ++q, ++queue) { in macb_free_consistent()
1838 for (q = 0, queue = bp->queues; q < bp->num_queues; ++q, ++queue) { in macb_alloc_consistent()
1882 for (q = 0, queue = bp->queues; q < bp->num_queues; ++q, ++queue) { in gem_init_rings()
1937 for (q = 0, queue = bp->queues; q < bp->num_queues; ++q, ++queue) { in macb_reset_hw()
2095 for (q = 0, queue = bp->queues; q < bp->num_queues; ++q, ++queue) { in macb_init_hw()
2782 unsigned int *num_queues) in macb_probe_queues() argument
2787 *num_queues = 1; in macb_probe_queues()
2805 (*num_queues)++; in macb_probe_queues()
3449 unsigned int queue_mask, num_queues; in macb_probe() local
[all …]
/drivers/net/ethernet/huawei/hinic/
Dhinic_hw_qp.c85 u16 num_queues, u16 max_queues) in hinic_qp_prepare_header() argument
90 qp_ctxt_hdr->num_queues = num_queues; in hinic_qp_prepare_header()
Dhinic_hw_qp_ctxt.h158 u16 num_queues; member
Dhinic_hw_qp.h129 u16 num_queues, u16 max_queues);
/drivers/gpu/drm/nouveau/nvkm/falcon/
Dmsgqueue_0148cdec.c95 u8 num_queues; in init_callback() member

123