/arch/sh/kernel/ |
D | perf_event.c | 179 u64 prev_raw_count, new_raw_count; in sh_perf_event_update() local 196 prev_raw_count = local64_read(&hwc->prev_count); in sh_perf_event_update() 199 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in sh_perf_event_update() 200 new_raw_count) != prev_raw_count) in sh_perf_event_update() 211 delta = (new_raw_count << shift) - (prev_raw_count << shift); in sh_perf_event_update()
|
/arch/riscv/kernel/ |
D | perf_event.c | 217 u64 prev_raw_count, new_raw_count; in riscv_pmu_read() local 223 prev_raw_count = local64_read(&hwc->prev_count); in riscv_pmu_read() 226 oldval = local64_cmpxchg(&hwc->prev_count, prev_raw_count, in riscv_pmu_read() 228 } while (oldval != prev_raw_count); in riscv_pmu_read() 233 delta = (new_raw_count - prev_raw_count) & in riscv_pmu_read()
|
/arch/xtensa/kernel/ |
D | perf_event.c | 148 uint64_t prev_raw_count, new_raw_count; in xtensa_perf_event_update() local 152 prev_raw_count = local64_read(&hwc->prev_count); in xtensa_perf_event_update() 154 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in xtensa_perf_event_update() 155 new_raw_count) != prev_raw_count); in xtensa_perf_event_update() 157 delta = (new_raw_count - prev_raw_count) & XTENSA_PMU_COUNTER_MASK; in xtensa_perf_event_update()
|
/arch/arm/mach-imx/ |
D | mmdc.c | 310 u64 delta, prev_raw_count, new_raw_count; in mmdc_pmu_event_update() local 313 prev_raw_count = local64_read(&hwc->prev_count); in mmdc_pmu_event_update() 316 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in mmdc_pmu_event_update() 317 new_raw_count) != prev_raw_count); in mmdc_pmu_event_update() 319 delta = (new_raw_count - prev_raw_count) & 0xFFFFFFFF; in mmdc_pmu_event_update()
|
/arch/x86/events/intel/ |
D | cstate.c | 361 u64 prev_raw_count, new_raw_count; in cstate_pmu_event_update() local 364 prev_raw_count = local64_read(&hwc->prev_count); in cstate_pmu_event_update() 367 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in cstate_pmu_event_update() 368 new_raw_count) != prev_raw_count) in cstate_pmu_event_update() 371 local64_add(new_raw_count - prev_raw_count, &event->count); in cstate_pmu_event_update()
|
D | ds.c | 1677 u64 prev_raw_count, new_raw_count; in intel_pmu_save_and_restart_reload() local 1687 prev_raw_count = local64_read(&hwc->prev_count); in intel_pmu_save_and_restart_reload() 1719 old = ((s64)(prev_raw_count << shift) >> shift); in intel_pmu_save_and_restart_reload()
|
/arch/alpha/kernel/ |
D | perf_event.c | 310 long prev_raw_count, new_raw_count; in alpha_perf_event_update() local 314 prev_raw_count = local64_read(&hwc->prev_count); in alpha_perf_event_update() 317 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in alpha_perf_event_update() 318 new_raw_count) != prev_raw_count) in alpha_perf_event_update() 321 delta = (new_raw_count - (prev_raw_count & alpha_pmu->pmc_count_mask[idx])) + ovf; in alpha_perf_event_update()
|
/arch/x86/events/ |
D | rapl.c | 178 u64 prev_raw_count, new_raw_count; in rapl_event_update() local 183 prev_raw_count = local64_read(&hwc->prev_count); in rapl_event_update() 186 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in rapl_event_update() 187 new_raw_count) != prev_raw_count) { in rapl_event_update() 200 delta = (new_raw_count << shift) - (prev_raw_count << shift); in rapl_event_update()
|
D | core.c | 104 u64 prev_raw_count, new_raw_count; in x86_perf_event_update() local 121 prev_raw_count = local64_read(&hwc->prev_count); in x86_perf_event_update() 124 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in x86_perf_event_update() 125 new_raw_count) != prev_raw_count) in x86_perf_event_update() 136 delta = (new_raw_count << shift) - (prev_raw_count << shift); in x86_perf_event_update()
|
/arch/x86/events/amd/ |
D | ibs.c | 160 u64 prev_raw_count; in perf_event_try_update() local 170 prev_raw_count = local64_read(&hwc->prev_count); in perf_event_try_update() 171 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in perf_event_try_update() 172 new_raw_count) != prev_raw_count) in perf_event_try_update() 183 delta = (new_raw_count << shift) - (prev_raw_count << shift); in perf_event_try_update()
|
/arch/nds32/kernel/ |
D | perf_event_cpu.c | 947 u64 delta, prev_raw_count, new_raw_count; in nds32_pmu_event_update() local 950 prev_raw_count = local64_read(&hwc->prev_count); in nds32_pmu_event_update() 953 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in nds32_pmu_event_update() 954 new_raw_count) != prev_raw_count) { in nds32_pmu_event_update() 961 delta = (new_raw_count - prev_raw_count) & nds32_pmu->max_period; in nds32_pmu_event_update()
|
/arch/sparc/kernel/ |
D | perf_event.c | 867 u64 prev_raw_count, new_raw_count; in sparc_perf_event_update() local 871 prev_raw_count = local64_read(&hwc->prev_count); in sparc_perf_event_update() 874 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in sparc_perf_event_update() 875 new_raw_count) != prev_raw_count) in sparc_perf_event_update() 878 delta = (new_raw_count << shift) - (prev_raw_count << shift); in sparc_perf_event_update()
|
/arch/arc/kernel/ |
D | perf_event.c | 121 u64 prev_raw_count = local64_read(&hwc->prev_count); in arc_perf_event_update() local 123 s64 delta = new_raw_count - prev_raw_count; in arc_perf_event_update()
|
/arch/mips/kernel/ |
D | perf_event_mipsxx.c | 453 u64 prev_raw_count, new_raw_count; in mipspmu_event_update() local 457 prev_raw_count = local64_read(&hwc->prev_count); in mipspmu_event_update() 460 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in mipspmu_event_update() 461 new_raw_count) != prev_raw_count) in mipspmu_event_update() 464 delta = new_raw_count - prev_raw_count; in mipspmu_event_update()
|
/arch/csky/kernel/ |
D | perf_event.c | 922 uint64_t prev_raw_count = local64_read(&hwc->prev_count); in csky_perf_event_update() local 929 int64_t delta = new_raw_count - prev_raw_count; in csky_perf_event_update()
|