/drivers/net/ethernet/freescale/dpaa/ |
D | dpaa_ethtool.c | 136 static void copy_stats(struct dpaa_percpu_priv *percpu_priv, int num_cpus, in copy_stats() argument 139 int num_values = num_cpus + 1; in copy_stats() 144 data[crr++ * num_values + num_cpus] += percpu_priv->in_interrupt; in copy_stats() 147 data[crr++ * num_values + num_cpus] += percpu_priv->stats.rx_packets; in copy_stats() 150 data[crr++ * num_values + num_cpus] += percpu_priv->stats.tx_packets; in copy_stats() 153 data[crr++ * num_values + num_cpus] += percpu_priv->tx_confirm; in copy_stats() 156 data[crr++ * num_values + num_cpus] += percpu_priv->tx_frag_skbuffs; in copy_stats() 159 data[crr++ * num_values + num_cpus] += percpu_priv->stats.tx_errors; in copy_stats() 162 data[crr++ * num_values + num_cpus] += percpu_priv->stats.rx_errors; in copy_stats() 165 data[crr++ * num_values + num_cpus] += percpu_priv->stats.rx_dropped; in copy_stats() [all …]
|
/drivers/acpi/ |
D | acpi_pad.c | 258 static void acpi_pad_idle_cpus(unsigned int num_cpus) in acpi_pad_idle_cpus() argument 262 num_cpus = min_t(unsigned int, num_cpus, num_online_cpus()); in acpi_pad_idle_cpus() 263 set_power_saving_task_num(num_cpus); in acpi_pad_idle_cpus() 397 int num_cpus; in acpi_pad_handle_notify() local 405 num_cpus = acpi_pad_pur(handle); in acpi_pad_handle_notify() 406 if (num_cpus < 0) { in acpi_pad_handle_notify() 410 acpi_pad_idle_cpus(num_cpus); in acpi_pad_handle_notify()
|
/drivers/pci/controller/ |
D | pci-xgene-msi.c | 41 int num_cpus; member 164 return (hwirq % xgene_msi_ctrl.num_cpus); in hwirq_to_cpu() 203 msi->num_cpus, 0); in xgene_irq_domain_alloc() 205 bitmap_set(msi->bitmap, msi_irq, msi->num_cpus); in xgene_irq_domain_alloc() 231 bitmap_clear(msi->bitmap, hwirq, msi->num_cpus); in xgene_irq_domain_free() 375 for (i = cpu; i < NR_HW_IRQS; i += msi->num_cpus) { in xgene_msi_hwirq_alloc() 416 for (i = cpu; i < NR_HW_IRQS; i += msi->num_cpus) { in xgene_msi_hwirq_free() 451 xgene_msi->num_cpus = num_possible_cpus(); in xgene_msi_probe()
|
/drivers/infiniband/sw/siw/ |
D | siw_main.c | 166 int i, num_cpus, cpu, min_use, node = sdev->numa_node, tx_cpu = -1; in siw_get_tx_cpu() local 173 num_cpus = cpumask_weight(tx_cpumask); in siw_get_tx_cpu() 174 if (!num_cpus) { in siw_get_tx_cpu() 177 num_cpus = cpumask_weight(tx_cpumask); in siw_get_tx_cpu() 179 if (!num_cpus) in siw_get_tx_cpu() 184 for (i = 0, min_use = SIW_MAX_QP; i < num_cpus; in siw_get_tx_cpu()
|
/drivers/net/ethernet/intel/ice/ |
D | ice_irq.c | 139 int num_cpus, hw_num_msix, v_other, v_wanted, v_actual; in ice_ena_msix_range() local 144 num_cpus = num_online_cpus(); in ice_ena_msix_range() 159 pf->num_lan_msix = num_cpus; in ice_ena_msix_range() 164 pf->num_rdma_msix = num_cpus + ICE_RDMA_NUM_AEQ_MSIX; in ice_ena_msix_range()
|
/drivers/thermal/ |
D | cpufreq_cooling.c | 280 unsigned int freq, num_cpus, idx; in cpufreq_state2power() local 287 num_cpus = cpumask_weight(cpufreq_cdev->policy->cpus); in cpufreq_state2power() 296 *power = cpu_freq_to_power(cpufreq_cdev, freq) * num_cpus; in cpufreq_state2power() 374 unsigned int num_cpus = cpumask_weight(cpufreq_cdev->policy->related_cpus); in allocate_idle_time() local 376 cpufreq_cdev->idle_time = kcalloc(num_cpus, in allocate_idle_time()
|
/drivers/char/ |
D | random.c | 1294 unsigned int num_cpus; in try_to_generate_entropy() local 1304 num_cpus = cpumask_weight(&timer_cpus); in try_to_generate_entropy() 1306 if (unlikely(num_cpus == 0)) { in try_to_generate_entropy() 1308 num_cpus = cpumask_weight(&timer_cpus); in try_to_generate_entropy() 1316 } while (cpu == smp_processor_id() && num_cpus > 1); in try_to_generate_entropy()
|
/drivers/net/ethernet/huawei/hinic/ |
D | hinic_main.c | 368 u16 num_cpus = 0; in hinic_enable_rss() local 397 num_cpus++; in hinic_enable_rss() 400 if (!num_cpus) in hinic_enable_rss() 401 num_cpus = num_online_cpus(); in hinic_enable_rss() 404 nic_dev->num_qps = min_t(u16, nic_dev->num_qps, num_cpus); in hinic_enable_rss()
|
/drivers/scsi/be2iscsi/ |
D | be_main.c | 814 for (i = 0; i <= phba->num_cpus; i++) { in beiscsi_free_irqs() 832 for (i = 0; i < phba->num_cpus; i++) { in beiscsi_init_irqs() 3028 for (i = 0; i < (phba->num_cpus + eq_for_mcc); i++) { in beiscsi_create_eqs() 3065 for (i = 0; i < (phba->num_cpus + eq_for_mcc); i++) { in beiscsi_create_eqs() 3090 for (i = 0; i < phba->num_cpus; i++) { in beiscsi_create_cqs() 3128 for (i = 0; i < phba->num_cpus; i++) { in beiscsi_create_cqs() 3534 &phwi_context->be_eq[phba->num_cpus].q, in be_mcc_queues_create() 3587 phba->num_cpus = nvec - 1; in be2iscsi_enable_msix() 3592 phba->num_cpus = 1; in be2iscsi_enable_msix() 3614 for (i = 0; i < (phba->num_cpus + eq_msix); i++) { in hwi_purge_eq() [all …]
|
D | be_main.h | 278 unsigned int num_cpus; member
|
D | be_iscsi.c | 1235 for (i = 0; i < phba->num_cpus; i++) { in beiscsi_flush_cq()
|
D | be_mgmt.c | 187 if (phba->nxt_cqid == phba->num_cpus) in mgmt_open_connection()
|
/drivers/net/ethernet/amazon/ena/ |
D | ena_admin_defs.h | 863 u16 num_cpus; member
|
D | ena_netdev.c | 2687 host_info->num_cpus = num_online_cpus(); in ena_config_host_info()
|
/drivers/scsi/ |
D | storvsc_drv.c | 1925 int num_cpus = num_online_cpus(); in storvsc_probe() local 1943 (num_cpus - 1) / storvsc_vcpus_per_sub_channel; in storvsc_probe()
|
/drivers/net/ethernet/chelsio/cxgb3/ |
D | cxgb3_main.c | 3087 int num_cpus = netif_get_num_default_rss_queues(); in set_nqsets() local 3094 num_cpus >= nqsets / hwports)) in set_nqsets() 3096 if (nqsets > num_cpus) in set_nqsets() 3097 nqsets = num_cpus; in set_nqsets()
|
/drivers/gpu/drm/vc4/ |
D | vc4_hdmi.c | 2794 dai_link->num_cpus = 1; in vc4_hdmi_audio_init()
|
/drivers/scsi/smartpqi/ |
D | smartpqi_init.c | 5268 int num_cpus; in pqi_calculate_queue_resources() local 5275 num_cpus = num_online_cpus(); in pqi_calculate_queue_resources() 5276 num_queue_groups = min(num_cpus, ctrl_info->max_msix_vectors); in pqi_calculate_queue_resources()
|