Home
last modified time | relevance | path

Searched refs:prev_raw_count (Results 1 – 15 of 15) sorted by relevance

/arch/sh/kernel/
Dperf_event.c179 u64 prev_raw_count, new_raw_count; in sh_perf_event_update() local
196 prev_raw_count = local64_read(&hwc->prev_count); in sh_perf_event_update()
199 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in sh_perf_event_update()
200 new_raw_count) != prev_raw_count) in sh_perf_event_update()
211 delta = (new_raw_count << shift) - (prev_raw_count << shift); in sh_perf_event_update()
/arch/riscv/kernel/
Dperf_event.c217 u64 prev_raw_count, new_raw_count; in riscv_pmu_read() local
223 prev_raw_count = local64_read(&hwc->prev_count); in riscv_pmu_read()
226 oldval = local64_cmpxchg(&hwc->prev_count, prev_raw_count, in riscv_pmu_read()
228 } while (oldval != prev_raw_count); in riscv_pmu_read()
233 delta = (new_raw_count - prev_raw_count) & in riscv_pmu_read()
/arch/xtensa/kernel/
Dperf_event.c148 uint64_t prev_raw_count, new_raw_count; in xtensa_perf_event_update() local
152 prev_raw_count = local64_read(&hwc->prev_count); in xtensa_perf_event_update()
154 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in xtensa_perf_event_update()
155 new_raw_count) != prev_raw_count); in xtensa_perf_event_update()
157 delta = (new_raw_count - prev_raw_count) & XTENSA_PMU_COUNTER_MASK; in xtensa_perf_event_update()
/arch/arm/mach-imx/
Dmmdc.c310 u64 delta, prev_raw_count, new_raw_count; in mmdc_pmu_event_update() local
313 prev_raw_count = local64_read(&hwc->prev_count); in mmdc_pmu_event_update()
316 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in mmdc_pmu_event_update()
317 new_raw_count) != prev_raw_count); in mmdc_pmu_event_update()
319 delta = (new_raw_count - prev_raw_count) & 0xFFFFFFFF; in mmdc_pmu_event_update()
/arch/x86/events/intel/
Dcstate.c361 u64 prev_raw_count, new_raw_count; in cstate_pmu_event_update() local
364 prev_raw_count = local64_read(&hwc->prev_count); in cstate_pmu_event_update()
367 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in cstate_pmu_event_update()
368 new_raw_count) != prev_raw_count) in cstate_pmu_event_update()
371 local64_add(new_raw_count - prev_raw_count, &event->count); in cstate_pmu_event_update()
Dds.c1677 u64 prev_raw_count, new_raw_count; in intel_pmu_save_and_restart_reload() local
1687 prev_raw_count = local64_read(&hwc->prev_count); in intel_pmu_save_and_restart_reload()
1719 old = ((s64)(prev_raw_count << shift) >> shift); in intel_pmu_save_and_restart_reload()
/arch/alpha/kernel/
Dperf_event.c310 long prev_raw_count, new_raw_count; in alpha_perf_event_update() local
314 prev_raw_count = local64_read(&hwc->prev_count); in alpha_perf_event_update()
317 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in alpha_perf_event_update()
318 new_raw_count) != prev_raw_count) in alpha_perf_event_update()
321 delta = (new_raw_count - (prev_raw_count & alpha_pmu->pmc_count_mask[idx])) + ovf; in alpha_perf_event_update()
/arch/x86/events/
Drapl.c178 u64 prev_raw_count, new_raw_count; in rapl_event_update() local
183 prev_raw_count = local64_read(&hwc->prev_count); in rapl_event_update()
186 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in rapl_event_update()
187 new_raw_count) != prev_raw_count) { in rapl_event_update()
200 delta = (new_raw_count << shift) - (prev_raw_count << shift); in rapl_event_update()
Dcore.c104 u64 prev_raw_count, new_raw_count; in x86_perf_event_update() local
121 prev_raw_count = local64_read(&hwc->prev_count); in x86_perf_event_update()
124 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in x86_perf_event_update()
125 new_raw_count) != prev_raw_count) in x86_perf_event_update()
136 delta = (new_raw_count << shift) - (prev_raw_count << shift); in x86_perf_event_update()
/arch/x86/events/amd/
Dibs.c160 u64 prev_raw_count; in perf_event_try_update() local
170 prev_raw_count = local64_read(&hwc->prev_count); in perf_event_try_update()
171 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in perf_event_try_update()
172 new_raw_count) != prev_raw_count) in perf_event_try_update()
183 delta = (new_raw_count << shift) - (prev_raw_count << shift); in perf_event_try_update()
/arch/nds32/kernel/
Dperf_event_cpu.c947 u64 delta, prev_raw_count, new_raw_count; in nds32_pmu_event_update() local
950 prev_raw_count = local64_read(&hwc->prev_count); in nds32_pmu_event_update()
953 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in nds32_pmu_event_update()
954 new_raw_count) != prev_raw_count) { in nds32_pmu_event_update()
961 delta = (new_raw_count - prev_raw_count) & nds32_pmu->max_period; in nds32_pmu_event_update()
/arch/sparc/kernel/
Dperf_event.c867 u64 prev_raw_count, new_raw_count; in sparc_perf_event_update() local
871 prev_raw_count = local64_read(&hwc->prev_count); in sparc_perf_event_update()
874 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in sparc_perf_event_update()
875 new_raw_count) != prev_raw_count) in sparc_perf_event_update()
878 delta = (new_raw_count << shift) - (prev_raw_count << shift); in sparc_perf_event_update()
/arch/arc/kernel/
Dperf_event.c121 u64 prev_raw_count = local64_read(&hwc->prev_count); in arc_perf_event_update() local
123 s64 delta = new_raw_count - prev_raw_count; in arc_perf_event_update()
/arch/mips/kernel/
Dperf_event_mipsxx.c453 u64 prev_raw_count, new_raw_count; in mipspmu_event_update() local
457 prev_raw_count = local64_read(&hwc->prev_count); in mipspmu_event_update()
460 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in mipspmu_event_update()
461 new_raw_count) != prev_raw_count) in mipspmu_event_update()
464 delta = new_raw_count - prev_raw_count; in mipspmu_event_update()
/arch/csky/kernel/
Dperf_event.c922 uint64_t prev_raw_count = local64_read(&hwc->prev_count); in csky_perf_event_update() local
929 int64_t delta = new_raw_count - prev_raw_count; in csky_perf_event_update()