Home
last modified time | relevance | path

Searched refs:num_events (Results 1 – 17 of 17) sorted by relevance

/arch/sh/kernel/cpu/sh4a/
Dubc.c51 for (i = 0; i < sh4a_ubc.num_events; i++) in sh4a_ubc_enable_all()
61 for (i = 0; i < sh4a_ubc.num_events; i++) in sh4a_ubc_disable_all()
71 for (i = 0; i < sh4a_ubc.num_events; i++) in sh4a_ubc_active_mask()
90 .num_events = 2,
117 for (i = 0; i < sh4a_ubc.num_events; i++) { in sh4a_ubc_init()
Dperf_event.c264 for (i = 0; i < sh4a_pmu.num_events; i++) in sh4a_pmu_disable_all()
272 for (i = 0; i < sh4a_pmu.num_events; i++) in sh4a_pmu_enable_all()
278 .num_events = 2,
/arch/sh/kernel/
Dperf_event.c42 static atomic_t num_events; variable
77 return sh_pmu->num_events; in perf_num_counters()
86 if (!atomic_add_unless(&num_events, -1, 1)) { in hw_perf_event_destroy()
88 if (atomic_dec_return(&num_events) == 0) in hw_perf_event_destroy()
139 if (!atomic_inc_not_zero(&num_events)) { in __hw_perf_event_init()
141 if (atomic_read(&num_events) == 0 && in __hw_perf_event_init()
145 atomic_inc(&num_events); in __hw_perf_event_init()
275 idx = find_first_zero_bit(cpuc->used_mask, sh_pmu->num_events); in sh_pmu_add()
276 if (idx == sh_pmu->num_events) in sh_pmu_add()
378 WARN_ON(_pmu->num_events > MAX_HWEVENTS); in register_sh_pmu()
Dhw_breakpoint.c38 static struct sh_ubc ubc_dummy = { .num_events = 0 };
55 for (i = 0; i < sh_ubc->num_events; i++) { in arch_install_hw_breakpoint()
64 if (WARN_ONCE(i == sh_ubc->num_events, "Can't find any breakpoint slot")) in arch_install_hw_breakpoint()
87 for (i = 0; i < sh_ubc->num_events; i++) { in arch_uninstall_hw_breakpoint()
96 if (WARN_ONCE(i == sh_ubc->num_events, "Can't find any breakpoint slot")) in arch_uninstall_hw_breakpoint()
277 for (i = 0; i < sh_ubc->num_events; i++) { in flush_ptrace_hw_breakpoint()
307 for (i = 0; i < sh_ubc->num_events; i++) { in hw_breakpoint_handler()
420 WARN_ON(ubc->num_events > HBP_NUM); in register_sh_ubc()
/arch/powerpc/perf/
De500-pmu.c87 static int num_events = 128; variable
95 if (event_low >= num_events) in e500_xlate_event()
129 num_events = 256; in init_e500_pmu()
De6500-pmu.c89 static int num_events = 512; variable
95 if (event_low >= num_events || in e6500_xlate_event()
Dcore-fsl-emb.c34 static atomic_t num_events; variable
226 if (atomic_read(&num_events)) { in fsl_emb_pmu_disable()
448 if (!atomic_add_unless(&num_events, -1, 1)) { in hw_perf_event_destroy()
450 if (atomic_dec_return(&num_events) == 0) in hw_perf_event_destroy()
573 if (!atomic_inc_not_zero(&num_events)) { in fsl_emb_pmu_event_init()
575 if (atomic_read(&num_events) == 0 && in fsl_emb_pmu_event_init()
579 atomic_inc(&num_events); in fsl_emb_pmu_event_init()
Dcore-book3s.c1791 static atomic_t num_events; variable
1800 if (!atomic_add_unless(&num_events, -1, 1)) { in hw_perf_event_destroy()
1802 if (atomic_dec_return(&num_events) == 0) in hw_perf_event_destroy()
1978 if (!atomic_inc_not_zero(&num_events)) { in power_pmu_event_init()
1980 if (atomic_read(&num_events) == 0 && in power_pmu_event_init()
1984 atomic_inc(&num_events); in power_pmu_event_init()
/arch/sh/kernel/cpu/sh4/
Dperf_event.c230 for (i = 0; i < sh7750_pmu.num_events; i++) in sh7750_pmu_disable_all()
238 for (i = 0; i < sh7750_pmu.num_events; i++) in sh7750_pmu_enable_all()
244 .num_events = 2,
/arch/sh/include/asm/
Dperf_event.h11 unsigned int num_events; member
Dhw_breakpoint.h32 unsigned int num_events; member
/arch/s390/kernel/
Dperf_cpum_cf.c224 static atomic_t num_events = ATOMIC_INIT(0); variable
304 if (!atomic_add_unless(&num_events, -1, 1)) { in hw_perf_event_destroy()
306 if (atomic_dec_return(&num_events) == 0) in hw_perf_event_destroy()
406 if (!atomic_inc_not_zero(&num_events)) { in __hw_perf_event_init()
408 if (atomic_read(&num_events) == 0 && reserve_pmc_hardware()) in __hw_perf_event_init()
411 atomic_inc(&num_events); in __hw_perf_event_init()
Dperf_cpum_sf.c568 static atomic_t num_events; variable
640 if (!atomic_add_unless(&num_events, -1, 1)) { in hw_perf_event_destroy()
642 if (atomic_dec_return(&num_events) == 0) in hw_perf_event_destroy()
687 if (!atomic_inc_not_zero(&num_events)) { in __hw_perf_event_init()
689 if (atomic_read(&num_events) == 0 && reserve_pmc_hardware()) in __hw_perf_event_init()
692 atomic_inc(&num_events); in __hw_perf_event_init()
1538 if (!atomic_read(&num_events)) in cpusf_pmu_setup()
/arch/arm/kernel/
Dperf_event_xscale.c174 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { in xscale1pmu_handle_irq()
377 cpu_pmu->num_events = 3; in xscale1pmu_init()
515 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { in xscale2pmu_handle_irq()
747 cpu_pmu->num_events = 5; in xscale2pmu_init()
Dperf_event_v6.c328 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { in armv6pmu_handle_irq()
499 cpu_pmu->num_events = 3; in armv6pmu_init()
550 cpu_pmu->num_events = 3; in armv6mpcore_pmu_init()
Dperf_event_v7.c657 (ARMV7_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
974 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { in armv7pmu_handle_irq()
1053 for (idx = ARMV7_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) { in armv7pmu_get_event_idx()
1091 u32 idx, nb_cnt = cpu_pmu->num_events, val; in armv7pmu_reset()
1192 &arm_pmu->num_events, 1); in armv7_probe_num_events()
1554 u32 idx, nb_cnt = cpu_pmu->num_events; in krait_pmu_reset()
1886 u32 idx, nb_cnt = cpu_pmu->num_events; in scorpion_pmu_reset()
/arch/arm64/kernel/
Dperf_event.c472 (ARMV8_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
704 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { in armv8pmu_handle_irq()
780 for (idx = ARMV8_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) { in armv8pmu_get_event_idx()
836 u32 idx, nb_cnt = cpu_pmu->num_events; in armv8pmu_reset()
931 cpu_pmu->num_events = (armv8pmu_pmcr_read() >> ARMV8_PMU_PMCR_N_SHIFT) in __armv8pmu_probe_pmu()
935 cpu_pmu->num_events += 1; in __armv8pmu_probe_pmu()