Home
last modified time | relevance | path

Searched refs:ncpus (Results 1 – 25 of 51) sorted by relevance

123

/kernel/linux/linux-5.10/kernel/irq/
Daffinity.c103 unsigned ncpus; member
112 return ln->ncpus - rn->ncpus; in ncpus_cmp_func()
139 node_vectors[n].ncpus = UINT_MAX; in alloc_nodes_vectors()
143 unsigned ncpus; in alloc_nodes_vectors() local
146 ncpus = cpumask_weight(nmsk); in alloc_nodes_vectors()
148 if (!ncpus) in alloc_nodes_vectors()
150 remaining_ncpus += ncpus; in alloc_nodes_vectors()
151 node_vectors[n].ncpus = ncpus; in alloc_nodes_vectors()
228 unsigned nvectors, ncpus; in alloc_nodes_vectors() local
230 if (node_vectors[n].ncpus == UINT_MAX) in alloc_nodes_vectors()
[all …]
/kernel/linux/linux-5.10/tools/testing/selftests/rcutorture/bin/
Dcpus2use.sh17 ncpus=`grep '^processor' /proc/cpuinfo | wc -l`
21 awk -v ncpus=$ncpus '{ print ncpus * ($7 + $NF) / 100 }'`
25 idlecpus=$ncpus
27 awk -v ncpus=$ncpus -v idlecpus=$idlecpus < /dev/null '
Dkvm-build.sh43 ncpus=`cpus2use.sh`
44 make -j$ncpus $TORTURE_KMAKE_ARG > $resdir/Make.out 2>&1
/kernel/linux/linux-5.10/tools/perf/util/
Dcounts.c9 struct perf_counts *perf_counts__new(int ncpus, int nthreads) in perf_counts__new() argument
16 values = xyarray__new(ncpus, nthreads, sizeof(struct perf_counts_values)); in perf_counts__new()
24 values = xyarray__new(ncpus, nthreads, sizeof(bool)); in perf_counts__new()
58 int evsel__alloc_counts(struct evsel *evsel, int ncpus, int nthreads) in evsel__alloc_counts() argument
60 evsel->counts = perf_counts__new(ncpus, nthreads); in evsel__alloc_counts()
Dstat.c151 static int evsel__alloc_prev_raw_counts(struct evsel *evsel, int ncpus, int nthreads) in evsel__alloc_prev_raw_counts() argument
155 counts = perf_counts__new(ncpus, nthreads); in evsel__alloc_prev_raw_counts()
176 int ncpus = evsel__nr_cpus(evsel); in evsel__alloc_stats() local
180 evsel__alloc_counts(evsel, ncpus, nthreads) < 0 || in evsel__alloc_stats()
181 (alloc_raw && evsel__alloc_prev_raw_counts(evsel, ncpus, nthreads) < 0)) in evsel__alloc_stats()
234 int ncpus = evsel__nr_cpus(evsel); in perf_evsel__copy_prev_raw_counts() local
238 for (int cpu = 0; cpu < ncpus; cpu++) { in perf_evsel__copy_prev_raw_counts()
381 int ncpus = evsel__nr_cpus(counter); in process_counter_maps() local
388 for (cpu = 0; cpu < ncpus; cpu++) { in process_counter_maps()
Dcounts.h38 struct perf_counts *perf_counts__new(int ncpus, int nthreads);
43 int evsel__alloc_counts(struct evsel *evsel, int ncpus, int nthreads);
Dcputopo.c180 long ncpus; in cpu_topology__new() local
185 ncpus = cpu__max_present_cpu(); in cpu_topology__new()
194 nr = (u32)(ncpus & UINT_MAX); in cpu_topology__new()
/kernel/linux/linux-5.10/arch/x86/include/asm/trace/
Dhyperv.h16 __field(unsigned int, ncpus)
21 TP_fast_assign(__entry->ncpus = cpumask_weight(cpus);
27 __entry->ncpus, __entry->mm,
64 __field(unsigned int, ncpus)
67 TP_fast_assign(__entry->ncpus = cpumask_weight(cpus);
71 __entry->ncpus, __entry->vector)
/kernel/linux/linux-5.10/arch/powerpc/platforms/powermac/
Dsmp.c270 int i, ncpus; in smp_psurge_probe() local
296 ncpus = 4; in smp_psurge_probe()
308 ncpus = 2; in smp_psurge_probe()
322 if (ncpus > NR_CPUS) in smp_psurge_probe()
323 ncpus = NR_CPUS; in smp_psurge_probe()
324 for (i = 1; i < ncpus ; ++i) in smp_psurge_probe()
561 static void __init smp_core99_setup_i2c_hwsync(int ncpus) in smp_core99_setup_i2c_hwsync() argument
693 static void __init smp_core99_setup(int ncpus) in smp_core99_setup() argument
701 smp_core99_setup_i2c_hwsync(ncpus); in smp_core99_setup()
749 for (i = 1; i < ncpus; ++i) in smp_core99_setup()
[all …]
/kernel/linux/linux-5.10/arch/powerpc/kexec/
Dcrash.c107 unsigned int ncpus = num_online_cpus() - 1;/* Excluding the panic cpu */ in crash_kexec_prepare_cpus() local
114 ncpus = num_present_cpus() - 1; in crash_kexec_prepare_cpus()
126 while ((atomic_read(&cpus_in_crash) < ncpus) && (--msecs > 0)) in crash_kexec_prepare_cpus()
131 if (atomic_read(&cpus_in_crash) >= ncpus) { in crash_kexec_prepare_cpus()
137 ncpus - atomic_read(&cpus_in_crash)); in crash_kexec_prepare_cpus()
168 while (atomic_read(&cpus_in_crash) < ncpus) in crash_kexec_prepare_cpus()
/kernel/linux/linux-5.10/tools/testing/selftests/powerpc/
Dutils.c91 int ncpus, cpu = -1; in pick_online_cpu() local
95 ncpus = get_nprocs_conf(); in pick_online_cpu()
96 size = CPU_ALLOC_SIZE(ncpus); in pick_online_cpu()
97 mask = CPU_ALLOC(ncpus); in pick_online_cpu()
111 for (cpu = 8; cpu < ncpus; cpu += 8) in pick_online_cpu()
116 for (cpu = ncpus - 1; cpu >= 0; cpu--) in pick_online_cpu()
/kernel/linux/linux-5.10/tools/lib/perf/
Devsel.c42 int perf_evsel__alloc_fd(struct perf_evsel *evsel, int ncpus, int nthreads) in perf_evsel__alloc_fd() argument
44 evsel->fd = xyarray__new(ncpus, nthreads, sizeof(int)); in perf_evsel__alloc_fd()
48 for (cpu = 0; cpu < ncpus; cpu++) { in perf_evsel__alloc_fd()
273 int perf_evsel__alloc_id(struct perf_evsel *evsel, int ncpus, int nthreads) in perf_evsel__alloc_id() argument
275 if (ncpus == 0 || nthreads == 0) in perf_evsel__alloc_id()
281 evsel->sample_id = xyarray__new(ncpus, nthreads, sizeof(struct perf_sample_id)); in perf_evsel__alloc_id()
285 evsel->id = zalloc(ncpus * nthreads * sizeof(u64)); in perf_evsel__alloc_id()
/kernel/linux/linux-5.10/arch/mips/kernel/
Dcrash.c59 unsigned int ncpus; in crash_kexec_prepare_cpus() local
64 ncpus = num_online_cpus() - 1;/* Excluding the panic cpu */ in crash_kexec_prepare_cpus()
75 while ((cpumask_weight(&cpus_in_crash) < ncpus) && (--msecs > 0)) { in crash_kexec_prepare_cpus()
/kernel/linux/linux-5.10/drivers/clk/mvebu/
Dclk-cpu.c171 int ncpus = 0; in of_cpu_clk_setup() local
185 ncpus++; in of_cpu_clk_setup()
187 cpuclk = kcalloc(ncpus, sizeof(*cpuclk), GFP_KERNEL); in of_cpu_clk_setup()
191 clks = kcalloc(ncpus, sizeof(*clks), GFP_KERNEL); in of_cpu_clk_setup()
236 while(ncpus--) in of_cpu_clk_setup()
237 kfree(cpuclk[ncpus].clk_name); in of_cpu_clk_setup()
/kernel/linux/linux-5.10/arch/sparc/kernel/
Dsetup_32.c394 int i, ncpus, err; in topology_init() local
400 ncpus = 0; in topology_init()
401 while (!cpu_find_by_instance(ncpus, NULL, NULL)) in topology_init()
402 ncpus++; in topology_init()
403 ncpus_probed = ncpus; in topology_init()
Dds.c479 static int dr_cpu_size_response(int ncpus) in dr_cpu_size_response() argument
483 (sizeof(struct dr_cpu_resp_entry) * ncpus)); in dr_cpu_size_response()
487 u64 handle, int resp_len, int ncpus, in dr_cpu_init_response() argument
502 tag->num_records = ncpus; in dr_cpu_init_response()
511 BUG_ON(i != ncpus); in dr_cpu_init_response()
514 static void dr_cpu_mark(struct ds_data *resp, int cpu, int ncpus, in dr_cpu_mark() argument
524 for (i = 0; i < ncpus; i++) { in dr_cpu_mark()
537 int resp_len, ncpus, cpu; in dr_cpu_configure() local
540 ncpus = cpumask_weight(mask); in dr_cpu_configure()
541 resp_len = dr_cpu_size_response(ncpus); in dr_cpu_configure()
[all …]
Dsun4m_smp.c177 register int ncpus = SUN4M_NCPUS; in sun4m_cross_call() local
196 for (i = 0; i < ncpus; i++) { in sun4m_cross_call()
217 } while (++i < ncpus); in sun4m_cross_call()
225 } while (++i < ncpus); in sun4m_cross_call()
/kernel/linux/linux-5.10/drivers/xen/
Dmcelog.c58 static uint32_t ncpus; variable
240 for (i = 0; i < ncpus; i++) in convert_log()
243 if (unlikely(i == ncpus)) { in convert_log()
378 ncpus = mc_op.u.mc_physcpuinfo.ncpus; in bind_virq_for_mce()
379 g_physinfo = kcalloc(ncpus, sizeof(struct mcinfo_logical_cpu), in bind_virq_for_mce()
/kernel/linux/linux-5.10/tools/lib/perf/include/internal/
Devsel.h54 int perf_evsel__alloc_fd(struct perf_evsel *evsel, int ncpus, int nthreads);
60 int perf_evsel__alloc_id(struct perf_evsel *evsel, int ncpus, int nthreads);
/kernel/linux/linux-5.10/arch/xtensa/kernel/
Dsmp.c89 unsigned int ncpus = get_core_count(); in smp_init_cpus() local
92 pr_info("%s: Core Count = %d\n", __func__, ncpus); in smp_init_cpus()
95 if (ncpus > NR_CPUS) { in smp_init_cpus()
96 ncpus = NR_CPUS; in smp_init_cpus()
97 pr_info("%s: limiting core count by %d\n", __func__, ncpus); in smp_init_cpus()
100 for (i = 0; i < ncpus; ++i) in smp_init_cpus()
/kernel/linux/linux-5.10/kernel/locking/
Dtest-ww_mutex.c328 static int test_cycle(unsigned int ncpus) in test_cycle() argument
333 for (n = 2; n <= ncpus + 1; n++) { in test_cycle()
583 int ncpus = num_online_cpus(); in test_ww_mutex_init() local
606 ret = test_cycle(ncpus); in test_ww_mutex_init()
610 ret = stress(16, 2*ncpus, STRESS_INORDER); in test_ww_mutex_init()
614 ret = stress(16, 2*ncpus, STRESS_REORDER); in test_ww_mutex_init()
618 ret = stress(4095, hweight32(STRESS_ALL)*ncpus, STRESS_ALL); in test_ww_mutex_init()
/kernel/linux/linux-5.10/tools/testing/selftests/powerpc/benchmarks/
Dcontext_switch.c108 int pid, ncpus; in start_process_on() local
121 ncpus = get_nprocs(); in start_process_on()
122 size = CPU_ALLOC_SIZE(ncpus); in start_process_on()
123 cpuset = CPU_ALLOC(ncpus); in start_process_on()
/kernel/linux/linux-5.10/arch/x86/kernel/
Dkvmclock.c204 unsigned long ncpus; in kvmclock_init_mem() local
212 ncpus = num_possible_cpus() - HVC_BOOT_ARRAY_SIZE; in kvmclock_init_mem()
213 order = get_order(ncpus * sizeof(*hvclock_mem)); in kvmclock_init_mem()
/kernel/linux/linux-5.10/drivers/misc/sgi-gru/
Dgrukservices.c145 int ctxnum, ncpus; in gru_load_kernel_context() local
163 ncpus = uv_blade_nr_possible_cpus(blade_id); in gru_load_kernel_context()
165 GRU_NUM_KERNEL_CBR * ncpus + bs->bs_async_cbrs); in gru_load_kernel_context()
167 GRU_NUM_KERNEL_DSR_BYTES * ncpus + in gru_load_kernel_context()
363 int ncpus; in gru_lock_async_resource() local
366 ncpus = uv_blade_nr_possible_cpus(blade_id); in gru_lock_async_resource()
368 *cb = bs->kernel_cb + ncpus * GRU_HANDLE_STRIDE; in gru_lock_async_resource()
370 *dsr = bs->kernel_dsr + ncpus * GRU_NUM_KERNEL_DSR_BYTES; in gru_lock_async_resource()
/kernel/linux/linux-5.10/arch/x86/platform/uv/
Duv_time.c52 int ncpus; member
158 head->ncpus = uv_blade_nr_possible_cpus(bid); in uv_rtc_allocate_timers()
177 for (c = 0; c < head->ncpus; c++) { in uv_rtc_find_next_timer()

123