Home
last modified time | relevance | path

Searched refs:prev_raw_count (Results 1 – 16 of 16) sorted by relevance

/arch/sh/kernel/
Dperf_event.c179 u64 prev_raw_count, new_raw_count; in sh_perf_event_update() local
196 prev_raw_count = local64_read(&hwc->prev_count); in sh_perf_event_update()
199 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in sh_perf_event_update()
200 new_raw_count) != prev_raw_count) in sh_perf_event_update()
211 delta = (new_raw_count << shift) - (prev_raw_count << shift); in sh_perf_event_update()
/arch/riscv/kernel/
Dperf_event.c217 u64 prev_raw_count, new_raw_count; in riscv_pmu_read() local
223 prev_raw_count = local64_read(&hwc->prev_count); in riscv_pmu_read()
226 oldval = local64_cmpxchg(&hwc->prev_count, prev_raw_count, in riscv_pmu_read()
228 } while (oldval != prev_raw_count); in riscv_pmu_read()
233 delta = (new_raw_count - prev_raw_count) & in riscv_pmu_read()
/arch/xtensa/kernel/
Dperf_event.c139 uint64_t prev_raw_count, new_raw_count; in xtensa_perf_event_update() local
143 prev_raw_count = local64_read(&hwc->prev_count); in xtensa_perf_event_update()
145 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in xtensa_perf_event_update()
146 new_raw_count) != prev_raw_count); in xtensa_perf_event_update()
148 delta = (new_raw_count - prev_raw_count) & XTENSA_PMU_COUNTER_MASK; in xtensa_perf_event_update()
/arch/arm/mach-imx/
Dmmdc.c308 u64 delta, prev_raw_count, new_raw_count; in mmdc_pmu_event_update() local
311 prev_raw_count = local64_read(&hwc->prev_count); in mmdc_pmu_event_update()
314 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in mmdc_pmu_event_update()
315 new_raw_count) != prev_raw_count); in mmdc_pmu_event_update()
317 delta = (new_raw_count - prev_raw_count) & 0xFFFFFFFF; in mmdc_pmu_event_update()
/arch/x86/events/intel/
Dcstate.c358 u64 prev_raw_count, new_raw_count; in cstate_pmu_event_update() local
361 prev_raw_count = local64_read(&hwc->prev_count); in cstate_pmu_event_update()
364 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in cstate_pmu_event_update()
365 new_raw_count) != prev_raw_count) in cstate_pmu_event_update()
368 local64_add(new_raw_count - prev_raw_count, &event->count); in cstate_pmu_event_update()
Drapl.c179 u64 prev_raw_count, new_raw_count; in rapl_event_update() local
184 prev_raw_count = local64_read(&hwc->prev_count); in rapl_event_update()
187 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in rapl_event_update()
188 new_raw_count) != prev_raw_count) { in rapl_event_update()
201 delta = (new_raw_count << shift) - (prev_raw_count << shift); in rapl_event_update()
Dds.c1671 u64 prev_raw_count, new_raw_count; in intel_pmu_save_and_restart_reload() local
1681 prev_raw_count = local64_read(&hwc->prev_count); in intel_pmu_save_and_restart_reload()
1713 old = ((s64)(prev_raw_count << shift) >> shift); in intel_pmu_save_and_restart_reload()
/arch/alpha/kernel/
Dperf_event.c310 long prev_raw_count, new_raw_count; in alpha_perf_event_update() local
314 prev_raw_count = local64_read(&hwc->prev_count); in alpha_perf_event_update()
317 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in alpha_perf_event_update()
318 new_raw_count) != prev_raw_count) in alpha_perf_event_update()
321 delta = (new_raw_count - (prev_raw_count & alpha_pmu->pmc_count_mask[idx])) + ovf; in alpha_perf_event_update()
/arch/x86/events/amd/
Dibs.c158 u64 prev_raw_count; in perf_event_try_update() local
168 prev_raw_count = local64_read(&hwc->prev_count); in perf_event_try_update()
169 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in perf_event_try_update()
170 new_raw_count) != prev_raw_count) in perf_event_try_update()
181 delta = (new_raw_count << shift) - (prev_raw_count << shift); in perf_event_try_update()
Diommu.c289 u64 prev_raw_count = local64_read(&hwc->prev_count); in perf_iommu_start() local
293 IOMMU_PC_COUNTER_REG, &prev_raw_count); in perf_iommu_start()
/arch/nds32/kernel/
Dperf_event_cpu.c947 u64 delta, prev_raw_count, new_raw_count; in nds32_pmu_event_update() local
950 prev_raw_count = local64_read(&hwc->prev_count); in nds32_pmu_event_update()
953 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in nds32_pmu_event_update()
954 new_raw_count) != prev_raw_count) { in nds32_pmu_event_update()
961 delta = (new_raw_count - prev_raw_count) & nds32_pmu->max_period; in nds32_pmu_event_update()
/arch/mips/kernel/
Dperf_event_mipsxx.c410 u64 prev_raw_count, new_raw_count; in mipspmu_event_update() local
414 prev_raw_count = local64_read(&hwc->prev_count); in mipspmu_event_update()
417 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in mipspmu_event_update()
418 new_raw_count) != prev_raw_count) in mipspmu_event_update()
421 delta = new_raw_count - prev_raw_count; in mipspmu_event_update()
/arch/sparc/kernel/
Dperf_event.c867 u64 prev_raw_count, new_raw_count; in sparc_perf_event_update() local
871 prev_raw_count = local64_read(&hwc->prev_count); in sparc_perf_event_update()
874 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in sparc_perf_event_update()
875 new_raw_count) != prev_raw_count) in sparc_perf_event_update()
878 delta = (new_raw_count << shift) - (prev_raw_count << shift); in sparc_perf_event_update()
/arch/arc/kernel/
Dperf_event.c121 u64 prev_raw_count = local64_read(&hwc->prev_count); in arc_perf_event_update() local
123 s64 delta = new_raw_count - prev_raw_count; in arc_perf_event_update()
/arch/x86/events/
Dcore.c72 u64 prev_raw_count, new_raw_count; in x86_perf_event_update() local
87 prev_raw_count = local64_read(&hwc->prev_count); in x86_perf_event_update()
90 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in x86_perf_event_update()
91 new_raw_count) != prev_raw_count) in x86_perf_event_update()
102 delta = (new_raw_count << shift) - (prev_raw_count << shift); in x86_perf_event_update()
/arch/csky/kernel/
Dperf_event.c922 uint64_t prev_raw_count = local64_read(&hwc->prev_count); in csky_perf_event_update() local
929 int64_t delta = new_raw_count - prev_raw_count; in csky_perf_event_update()