/arch/xtensa/kernel/ |
D | perf_event.c | 178 hwc->last_period = period; in xtensa_perf_event_set_period() 183 hwc->last_period = period; in xtensa_perf_event_set_period() 378 u64 last_period; in xtensa_pmu_irq_handler() local 385 last_period = hwc->last_period; in xtensa_pmu_irq_handler() 390 perf_sample_data_init(&data, 0, last_period); in xtensa_pmu_irq_handler()
|
/arch/powerpc/perf/ |
D | core-fsl-emb.c | 546 event->hw.last_period = event->hw.sample_period; in fsl_emb_pmu_event_init() 547 local64_set(&event->hw.period_left, event->hw.last_period); in fsl_emb_pmu_event_init() 618 event->hw.last_period = event->hw.sample_period; in record_and_restart() 635 perf_sample_data_init(&data, 0, event->hw.last_period); in record_and_restart()
|
D | core-book3s.c | 2057 event->hw.last_period = event->hw.sample_period; in power_pmu_event_init() 2058 local64_set(&event->hw.period_left, event->hw.last_period); in power_pmu_event_init() 2165 event->hw.last_period = event->hw.sample_period; in record_and_restart() 2193 perf_sample_data_init(&data, ~0ULL, event->hw.last_period); in record_and_restart()
|
D | imc-pmu.c | 1287 data->period = event->hw.last_period; in trace_imc_prepare_sample()
|
/arch/arc/kernel/ |
D | perf_event.c | 174 hwc->last_period = hwc->sample_period; in arc_pmu_event_init() 256 hwc->last_period = period; in arc_pmu_event_set_period() 262 hwc->last_period = period; in arc_pmu_event_set_period() 439 perf_sample_data_init(&data, 0, hwc->last_period); in arc_pmu_intr()
|
/arch/alpha/kernel/ |
D | perf_event.c | 262 hwc->last_period = period; in alpha_perf_event_set_period() 269 hwc->last_period = period; in alpha_perf_event_set_period() 680 hwc->last_period = hwc->sample_period; in __hw_perf_event_init() 857 perf_sample_data_init(&data, 0, hwc->last_period); in alpha_perf_event_irq_handler()
|
/arch/nds32/kernel/ |
D | perf_event_cpu.c | 192 if (unlikely(period != hwc->last_period)) in nds32_pmu_event_set_period() 193 left = period - (hwc->last_period - left); in nds32_pmu_event_set_period() 198 hwc->last_period = period; in nds32_pmu_event_set_period() 205 hwc->last_period = period; in nds32_pmu_event_set_period() 267 perf_sample_data_init(&data, 0, hwc->last_period); in nds32_pmu_handle_irq() 845 hwc->last_period = hwc->sample_period; in __hw_perf_event_init()
|
/arch/x86/events/amd/ |
D | ibs.c | 125 hwc->last_period = period; in perf_event_set_period() 132 hwc->last_period = period; in perf_event_set_period() 308 hwc->last_period = hwc->sample_period; in perf_ibs_init() 638 perf_sample_data_init(&data, 0, hwc->last_period); in perf_ibs_handle_irq()
|
/arch/sparc/kernel/ |
D | perf_event.c | 895 if (unlikely(period != hwc->last_period)) in sparc_perf_event_set_period() 896 left = period - (hwc->last_period - left); in sparc_perf_event_set_period() 901 hwc->last_period = period; in sparc_perf_event_set_period() 908 hwc->last_period = period; in sparc_perf_event_set_period() 1500 hwc->last_period = hwc->sample_period; in sparc_pmu_event_init() 1667 perf_sample_data_init(&data, 0, hwc->last_period); in perf_event_nmi_handler()
|
/arch/x86/events/intel/ |
D | knc.c | 253 perf_sample_data_init(&data, 0, event->hw.last_period); in knc_pmu_handle_irq()
|
D | ds.c | 614 perf_sample_data_init(&data, 0, event->hw.last_period); in intel_pmu_drain_bts_buffer() 1365 perf_sample_data_init(data, 0, event->hw.last_period); in setup_pebs_fixed_sample_data() 1367 data->period = event->hw.last_period; in setup_pebs_fixed_sample_data() 1536 perf_sample_data_init(data, 0, event->hw.last_period); in setup_pebs_adaptive_sample_data() 1537 data->period = event->hw.last_period; in setup_pebs_adaptive_sample_data()
|
D | p4.c | 1035 perf_sample_data_init(&data, 0, hwc->last_period); in p4_pmu_handle_irq()
|
D | core.c | 2691 perf_sample_data_init(&data, 0, event->hw.last_period); in handle_pmi_common()
|
/arch/arm/kernel/ |
D | perf_event_xscale.c | 185 perf_sample_data_init(&data, 0, hwc->last_period); in xscale1pmu_handle_irq() 531 perf_sample_data_init(&data, 0, hwc->last_period); in xscale2pmu_handle_irq()
|
D | perf_event_v6.c | 343 perf_sample_data_init(&data, 0, hwc->last_period); in armv6pmu_handle_irq()
|
D | perf_event_v7.c | 990 perf_sample_data_init(&data, 0, hwc->last_period); in armv7pmu_handle_irq()
|
/arch/s390/kernel/ |
D | perf_cpum_cf_diag.c | 233 event->hw.last_period = event->hw.sample_period; in __hw_perf_event_init() 496 perf_sample_data_init(&data, 0, event->hw.last_period); in cf_diag_push_sample()
|
D | perf_cpum_sf.c | 617 hwc->last_period = hwc->sample_period; in hw_init_period() 1088 perf_sample_data_init(&data, 0, event->hw.last_period); in perf_push_sample()
|
/arch/csky/kernel/ |
D | perf_event.c | 888 hwc->last_period = period; in csky_pmu_event_set_period() 895 hwc->last_period = period; in csky_pmu_event_set_period() 1139 perf_sample_data_init(&data, 0, hwc->last_period); in csky_pmu_handle_irq()
|
/arch/mips/kernel/ |
D | perf_event_mipsxx.c | 421 hwc->last_period = period; in mipspmu_event_set_period() 427 hwc->last_period = period; in mipspmu_event_set_period() 790 data->period = event->hw.last_period; in handle_associated_event() 1596 hwc->last_period = hwc->sample_period; in __hw_perf_event_init()
|
/arch/x86/events/zhaoxin/ |
D | core.c | 395 perf_sample_data_init(&data, 0, event->hw.last_period); in zhaoxin_pmu_handle_irq()
|
/arch/x86/events/ |
D | core.c | 457 hwc->last_period = hwc->sample_period; in x86_setup_perfctr() 1335 hwc->last_period = period; in x86_perf_event_set_period() 1342 hwc->last_period = period; in x86_perf_event_set_period() 1658 perf_sample_data_init(&data, 0, event->hw.last_period); in x86_pmu_handle_irq()
|
/arch/arm64/kernel/ |
D | perf_event.c | 795 perf_sample_data_init(&data, 0, hwc->last_period); in armv8pmu_handle_irq()
|