/arch/s390/kernel/ |
D | perf_cpum_cf.c | 70 struct cpu_hw_events { struct 76 static DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = { argument 125 struct cpu_hw_events *cpuhw; in validate_ctr_version() 128 cpuhw = &get_cpu_var(cpu_hw_events); in validate_ctr_version() 144 put_cpu_var(cpu_hw_events); in validate_ctr_version() 150 struct cpu_hw_events *cpuhw; in validate_ctr_auth() 154 cpuhw = &get_cpu_var(cpu_hw_events); in validate_ctr_auth() 161 put_cpu_var(cpu_hw_events); in validate_ctr_auth() 172 struct cpu_hw_events *cpuhw = &__get_cpu_var(cpu_hw_events); in cpumf_pmu_enable() 195 struct cpu_hw_events *cpuhw = &__get_cpu_var(cpu_hw_events); in cpumf_pmu_disable() [all …]
|
/arch/x86/kernel/cpu/ |
D | perf_event_intel_ds.c | 46 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in init_debug_store_on_cpu() 58 if (!per_cpu(cpu_hw_events, cpu).ds) in fini_debug_store_on_cpu() 66 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in alloc_pebs_buffer() 93 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in release_pebs_buffer() 104 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in alloc_bts_buffer() 131 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in release_bts_buffer() 149 per_cpu(cpu_hw_events, cpu).ds = ds; in alloc_ds_buffer() 156 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in release_ds_buffer() 161 per_cpu(cpu_hw_events, cpu).ds = NULL; in release_ds_buffer() 274 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_disable_bts() [all …]
|
D | perf_event_amd.c | 179 static inline int amd_has_nb(struct cpu_hw_events *cpuc) in amd_has_nb() 186 static void amd_put_event_constraints(struct cpu_hw_events *cpuc, in amd_put_event_constraints() 252 amd_get_event_constraints(struct cpu_hw_events *cpuc, struct perf_event *event) in amd_get_event_constraints() 344 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_prepare() 360 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_starting() 373 nb = per_cpu(cpu_hw_events, i).amd_nb; in amd_pmu_cpu_starting() 390 struct cpu_hw_events *cpuhw; in amd_pmu_cpu_dead() 395 cpuhw = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_dead() 526 amd_get_event_constraints_f15h(struct cpu_hw_events *cpuc, struct perf_event *event) in amd_get_event_constraints_f15h() 660 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in amd_pmu_enable_virt() [all …]
|
D | perf_event_intel_lbr.c | 109 static void intel_pmu_lbr_filter(struct cpu_hw_events *cpuc); 119 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in __intel_pmu_lbr_enable() 169 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_lbr_enable() 189 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_lbr_disable() 206 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_lbr_enable_all() 214 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_lbr_disable_all() 232 static void intel_pmu_lbr_read_32(struct cpu_hw_events *cpuc) in intel_pmu_lbr_read_32() 264 static void intel_pmu_lbr_read_64(struct cpu_hw_events *cpuc) in intel_pmu_lbr_read_64() 295 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_lbr_read() 568 intel_pmu_lbr_filter(struct cpu_hw_events *cpuc) in intel_pmu_lbr_filter()
|
D | perf_event.c | 51 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = { 497 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in x86_pmu_disable_all() 515 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in x86_pmu_disable() 532 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in x86_pmu_enable_all() 727 int x86_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign) in x86_schedule_events() 788 static int collect_events(struct cpu_hw_events *cpuc, struct perf_event *leader, bool dogrp) in collect_events() 822 struct cpu_hw_events *cpuc, int i) in x86_assign_hw_event() 843 struct cpu_hw_events *cpuc, in match_prev_assignment() 855 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in x86_pmu_enable() 991 if (__this_cpu_read(cpu_hw_events.enabled)) in x86_pmu_enable_event() [all …]
|
D | perf_event_p6.c | 67 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in p6_pmu_disable_event() 79 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in p6_pmu_enable_event()
|
D | perf_event.h | 103 struct cpu_hw_events { struct 312 int (*schedule_events)(struct cpu_hw_events *cpuc, int n, int *assign); 329 (*get_event_constraints)(struct cpu_hw_events *cpuc, 332 void (*put_event_constraints)(struct cpu_hw_events *cpuc, 402 DECLARE_PER_CPU(struct cpu_hw_events, cpu_hw_events); 460 u64 disable_mask = __this_cpu_read(cpu_hw_events.perf_ctr_virt_mask); in __x86_pmu_enable_event() 469 int x86_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign); 515 x86_get_event_constraints(struct cpu_hw_events *cpuc, struct perf_event *event);
|
D | perf_event_intel.c | 752 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_disable_all() 765 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_enable_all() 799 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_nhm_workaround() 893 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_disable_event() 956 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_enable_event() 959 if (!__this_cpu_read(cpu_hw_events.enabled)) in intel_pmu_enable_event() 1000 struct debug_store *ds = __this_cpu_read(cpu_hw_events.ds); in intel_pmu_reset() 1031 struct cpu_hw_events *cpuc; in intel_pmu_handle_irq() 1038 cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_handle_irq() 1161 __intel_shared_reg_get_constraints(struct cpu_hw_events *cpuc, in __intel_shared_reg_get_constraints() [all …]
|
D | perf_event_p4.c | 919 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in p4_pmu_disable_all() 988 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in p4_pmu_enable_all() 1002 struct cpu_hw_events *cpuc; in p4_pmu_handle_irq() 1010 cpuc = &__get_cpu_var(cpu_hw_events); in p4_pmu_handle_irq() 1207 static int p4_pmu_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign) in p4_pmu_schedule_events()
|
/arch/sh/kernel/ |
D | perf_event.c | 31 struct cpu_hw_events { struct 37 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument 230 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in sh_pmu_stop() 248 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in sh_pmu_start() 265 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in sh_pmu_del() 275 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in sh_pmu_add() 365 struct cpu_hw_events *cpuhw = &per_cpu(cpu_hw_events, cpu); in sh_pmu_setup() 367 memset(cpuhw, 0, sizeof(struct cpu_hw_events)); in sh_pmu_setup()
|
/arch/blackfin/kernel/ |
D | perf_event.c | 230 struct cpu_hw_events { struct 234 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument 303 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in bfin_pmu_stop() 321 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in bfin_pmu_start() 338 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in bfin_pmu_del() 348 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in bfin_pmu_add() 432 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in bfin_pmu_enable() 466 struct cpu_hw_events *cpuhw = &per_cpu(cpu_hw_events, cpu); in bfin_pmu_setup() 468 memset(cpuhw, 0, sizeof(struct cpu_hw_events)); in bfin_pmu_setup()
|
/arch/powerpc/perf/ |
D | core-fsl-emb.c | 23 struct cpu_hw_events { struct 29 static DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument 185 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_disable() 189 cpuhw = &__get_cpu_var(cpu_hw_events); in fsl_emb_pmu_disable() 224 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_enable() 228 cpuhw = &__get_cpu_var(cpu_hw_events); in fsl_emb_pmu_enable() 271 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_add() 278 cpuhw = &get_cpu_var(cpu_hw_events); in fsl_emb_pmu_add() 322 put_cpu_var(cpu_hw_events); in fsl_emb_pmu_add() 330 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_del() [all …]
|
D | core-book3s.c | 22 struct cpu_hw_events { struct 42 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument 282 static int power_check_constraints(struct cpu_hw_events *cpuhw, in power_check_constraints() 489 static void freeze_limited_counters(struct cpu_hw_events *cpuhw, in freeze_limited_counters() 509 static void thaw_limited_counters(struct cpu_hw_events *cpuhw, in thaw_limited_counters() 538 static void write_mmcr0(struct cpu_hw_events *cpuhw, unsigned long mmcr0) in write_mmcr0() 579 struct cpu_hw_events *cpuhw; in power_pmu_disable() 585 cpuhw = &__get_cpu_var(cpu_hw_events); in power_pmu_disable() 628 struct cpu_hw_events *cpuhw; in power_pmu_enable() 640 cpuhw = &__get_cpu_var(cpu_hw_events); in power_pmu_enable() [all …]
|
/arch/alpha/kernel/ |
D | perf_event.c | 33 struct cpu_hw_events { struct 52 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument 381 static void maybe_change_configuration(struct cpu_hw_events *cpuc) in maybe_change_configuration() 425 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in alpha_pmu_add() 477 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in alpha_pmu_del() 525 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in alpha_pmu_stop() 545 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in alpha_pmu_start() 716 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in alpha_pmu_enable() 742 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in alpha_pmu_disable() 800 struct cpu_hw_events *cpuc; in alpha_perf_event_irq_handler() [all …]
|
/arch/sparc/kernel/ |
D | perf_event.c | 64 struct cpu_hw_events { struct 101 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = { .enabled = 1, }; argument 558 static inline void sparc_pmu_enable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, in… in sparc_pmu_enable_event() 572 static inline void sparc_pmu_disable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, i… in sparc_pmu_disable_event() 674 static u64 maybe_change_configuration(struct cpu_hw_events *cpuc, u64 pcr) in maybe_change_configuration() 719 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in sparc_pmu_enable() 746 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in sparc_pmu_disable() 763 static int active_event_index(struct cpu_hw_events *cpuc, in active_event_index() 778 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in sparc_pmu_start() 793 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in sparc_pmu_stop() [all …]
|
/arch/mips/kernel/ |
D | perf_event_mipsxx.c | 32 struct cpu_hw_events { struct 49 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = { argument 308 static int mipsxx_pmu_alloc_counter(struct cpu_hw_events *cpuc, in mipsxx_pmu_alloc_counter() 340 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in mipsxx_pmu_enable_event() 355 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in mipsxx_pmu_disable_event() 455 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in mipspmu_add() 491 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in mipspmu_del() 713 struct cpu_hw_events fake_cpuc; in validate_group() 732 static void handle_associated_event(struct cpu_hw_events *cpuc, in handle_associated_event() 1278 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in pause_local_counters() [all …]
|
/arch/arm/kernel/ |
D | perf_event.c | 43 static DEFINE_PER_CPU(struct pmu_hw_events, cpu_hw_events); 681 return &__get_cpu_var(cpu_hw_events); in armpmu_get_cpu_events() 688 struct pmu_hw_events *events = &per_cpu(cpu_hw_events, cpu); in cpu_pmu_init()
|
D | perf_event_xscale.c | 253 cpuc = &__get_cpu_var(cpu_hw_events); in xscale1pmu_handle_irq() 593 cpuc = &__get_cpu_var(cpu_hw_events); in xscale2pmu_handle_irq()
|
D | perf_event_v6.c | 494 cpuc = &__get_cpu_var(cpu_hw_events); in armv6pmu_handle_irq()
|
D | perf_event_v7.c | 1082 cpuc = &__get_cpu_var(cpu_hw_events); in armv7pmu_handle_irq()
|