/arch/alpha/kernel/ |
D | perf_event.c | 381 static void maybe_change_configuration(struct cpu_hw_events *cpuc) in maybe_change_configuration() argument 385 if (cpuc->n_added == 0) in maybe_change_configuration() 389 for (j = 0; j < cpuc->n_events; j++) { in maybe_change_configuration() 390 struct perf_event *pe = cpuc->event[j]; in maybe_change_configuration() 392 if (cpuc->current_idx[j] != PMC_NO_INDEX && in maybe_change_configuration() 393 cpuc->current_idx[j] != pe->hw.idx) { in maybe_change_configuration() 394 alpha_perf_event_update(pe, &pe->hw, cpuc->current_idx[j], 0); in maybe_change_configuration() 395 cpuc->current_idx[j] = PMC_NO_INDEX; in maybe_change_configuration() 400 cpuc->idx_mask = 0; in maybe_change_configuration() 401 for (j = 0; j < cpuc->n_events; j++) { in maybe_change_configuration() [all …]
|
/arch/x86/kernel/cpu/ |
D | perf_event_intel_lbr.c | 109 static void intel_pmu_lbr_filter(struct cpu_hw_events *cpuc); 119 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in __intel_pmu_lbr_enable() local 121 if (cpuc->lbr_sel) in __intel_pmu_lbr_enable() 122 wrmsrl(MSR_LBR_SELECT, cpuc->lbr_sel->config); in __intel_pmu_lbr_enable() 169 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_lbr_enable() local 178 if (event->ctx->task && cpuc->lbr_context != event->ctx) { in intel_pmu_lbr_enable() 180 cpuc->lbr_context = event->ctx; in intel_pmu_lbr_enable() 182 cpuc->br_sel = event->hw.branch_reg.reg; in intel_pmu_lbr_enable() 184 cpuc->lbr_users++; in intel_pmu_lbr_enable() 189 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_lbr_disable() local [all …]
|
D | perf_event.c | 497 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in x86_pmu_disable_all() local 503 if (!test_bit(idx, cpuc->active_mask)) in x86_pmu_disable_all() 515 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in x86_pmu_disable() local 520 if (!cpuc->enabled) in x86_pmu_disable() 523 cpuc->n_added = 0; in x86_pmu_disable() 524 cpuc->enabled = 0; in x86_pmu_disable() 532 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in x86_pmu_enable_all() local 536 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in x86_pmu_enable_all() 538 if (!test_bit(idx, cpuc->active_mask)) in x86_pmu_enable_all() 727 int x86_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign) in x86_schedule_events() argument [all …]
|
D | perf_event_amd.c | 179 static inline int amd_has_nb(struct cpu_hw_events *cpuc) in amd_has_nb() argument 181 struct amd_nb *nb = cpuc->amd_nb; in amd_has_nb() 186 static void amd_put_event_constraints(struct cpu_hw_events *cpuc, in amd_put_event_constraints() argument 190 struct amd_nb *nb = cpuc->amd_nb; in amd_put_event_constraints() 196 if (!(amd_has_nb(cpuc) && amd_is_nb_event(hwc))) in amd_put_event_constraints() 252 amd_get_event_constraints(struct cpu_hw_events *cpuc, struct perf_event *event) in amd_get_event_constraints() argument 255 struct amd_nb *nb = cpuc->amd_nb; in amd_get_event_constraints() 263 if (!(amd_has_nb(cpuc) && amd_is_nb_event(hwc))) in amd_get_event_constraints() 344 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_prepare() local 346 WARN_ON_ONCE(cpuc->amd_nb); in amd_pmu_cpu_prepare() [all …]
|
D | perf_event_intel_ds.c | 274 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_disable_bts() local 277 if (!cpuc->ds) in intel_pmu_disable_bts() 291 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_drain_bts_buffer() local 292 struct debug_store *ds = cpuc->ds; in intel_pmu_drain_bts_buffer() 298 struct perf_event *event = cpuc->events[X86_PMC_IDX_FIXED_BTS]; in intel_pmu_drain_bts_buffer() 437 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_pebs_enable() local 442 cpuc->pebs_enabled |= 1ULL << hwc->idx; in intel_pmu_pebs_enable() 447 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_pebs_disable() local 450 cpuc->pebs_enabled &= ~(1ULL << hwc->idx); in intel_pmu_pebs_disable() 451 if (cpuc->enabled) in intel_pmu_pebs_disable() [all …]
|
D | perf_event_intel.c | 752 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_disable_all() local 756 if (test_bit(X86_PMC_IDX_FIXED_BTS, cpuc->active_mask)) in intel_pmu_disable_all() 765 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_enable_all() local 770 x86_pmu.intel_ctrl & ~cpuc->intel_ctrl_guest_mask); in intel_pmu_enable_all() 772 if (test_bit(X86_PMC_IDX_FIXED_BTS, cpuc->active_mask)) { in intel_pmu_enable_all() 774 cpuc->events[X86_PMC_IDX_FIXED_BTS]; in intel_pmu_enable_all() 799 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_nhm_workaround() local 832 event = cpuc->events[i]; in intel_pmu_nhm_workaround() 846 event = cpuc->events[i]; in intel_pmu_nhm_workaround() 893 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in intel_pmu_disable_event() local [all …]
|
D | perf_event_p6.c | 67 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in p6_pmu_disable_event() local 71 if (cpuc->enabled) in p6_pmu_disable_event() 79 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in p6_pmu_enable_event() local 84 if (cpuc->enabled) in p6_pmu_enable_event()
|
D | perf_event.h | 312 int (*schedule_events)(struct cpu_hw_events *cpuc, int n, int *assign); 329 (*get_event_constraints)(struct cpu_hw_events *cpuc, 332 void (*put_event_constraints)(struct cpu_hw_events *cpuc, 469 int x86_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign); 515 x86_get_event_constraints(struct cpu_hw_events *cpuc, struct perf_event *event);
|
D | perf_event_p4.c | 919 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in p4_pmu_disable_all() local 923 struct perf_event *event = cpuc->events[idx]; in p4_pmu_disable_all() 924 if (!test_bit(idx, cpuc->active_mask)) in p4_pmu_disable_all() 988 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in p4_pmu_enable_all() local 992 struct perf_event *event = cpuc->events[idx]; in p4_pmu_enable_all() 993 if (!test_bit(idx, cpuc->active_mask)) in p4_pmu_enable_all() 1002 struct cpu_hw_events *cpuc; in p4_pmu_handle_irq() local 1010 cpuc = &__get_cpu_var(cpu_hw_events); in p4_pmu_handle_irq() 1015 if (!test_bit(idx, cpuc->active_mask)) { in p4_pmu_handle_irq() 1017 if (__test_and_clear_bit(idx, cpuc->running)) in p4_pmu_handle_irq() [all …]
|
/arch/sparc/kernel/ |
D | perf_event.c | 558 static inline void sparc_pmu_enable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, in… in sparc_pmu_enable_event() argument 562 enc = perf_event_get_enc(cpuc->events[idx]); in sparc_pmu_enable_event() 564 val = cpuc->pcr; in sparc_pmu_enable_event() 567 cpuc->pcr = val; in sparc_pmu_enable_event() 569 pcr_ops->write(cpuc->pcr); in sparc_pmu_enable_event() 572 static inline void sparc_pmu_disable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, i… in sparc_pmu_disable_event() argument 578 val = cpuc->pcr; in sparc_pmu_disable_event() 581 cpuc->pcr = val; in sparc_pmu_disable_event() 583 pcr_ops->write(cpuc->pcr); in sparc_pmu_disable_event() 674 static u64 maybe_change_configuration(struct cpu_hw_events *cpuc, u64 pcr) in maybe_change_configuration() argument [all …]
|
/arch/sh/kernel/ |
D | perf_event.c | 230 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in sh_pmu_stop() local 236 cpuc->events[idx] = NULL; in sh_pmu_stop() 248 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in sh_pmu_start() local 258 cpuc->events[idx] = event; in sh_pmu_start() 265 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in sh_pmu_del() local 268 __clear_bit(event->hw.idx, cpuc->used_mask); in sh_pmu_del() 275 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in sh_pmu_add() local 282 if (__test_and_set_bit(idx, cpuc->used_mask)) { in sh_pmu_add() 283 idx = find_first_zero_bit(cpuc->used_mask, sh_pmu->num_events); in sh_pmu_add() 287 __set_bit(idx, cpuc->used_mask); in sh_pmu_add()
|
/arch/blackfin/kernel/ |
D | perf_event.c | 303 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in bfin_pmu_stop() local 309 cpuc->events[idx] = NULL; in bfin_pmu_stop() 321 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in bfin_pmu_start() local 331 cpuc->events[idx] = event; in bfin_pmu_start() 338 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in bfin_pmu_del() local 341 __clear_bit(event->hw.idx, cpuc->used_mask); in bfin_pmu_del() 348 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in bfin_pmu_add() local 355 if (__test_and_set_bit(idx, cpuc->used_mask)) { in bfin_pmu_add() 356 idx = find_first_zero_bit(cpuc->used_mask, MAX_HWEVENTS); in bfin_pmu_add() 360 __set_bit(idx, cpuc->used_mask); in bfin_pmu_add() [all …]
|
/arch/arm/kernel/ |
D | perf_event_xscale.c | 227 struct pmu_hw_events *cpuc; in xscale1pmu_handle_irq() local 253 cpuc = &__get_cpu_var(cpu_hw_events); in xscale1pmu_handle_irq() 255 struct perf_event *event = cpuc->events[idx]; in xscale1pmu_handle_irq() 352 xscale1pmu_get_event_idx(struct pmu_hw_events *cpuc, in xscale1pmu_get_event_idx() argument 356 if (test_and_set_bit(XSCALE_CYCLE_COUNTER, cpuc->used_mask)) in xscale1pmu_get_event_idx() 361 if (!test_and_set_bit(XSCALE_COUNTER1, cpuc->used_mask)) in xscale1pmu_get_event_idx() 364 if (!test_and_set_bit(XSCALE_COUNTER0, cpuc->used_mask)) in xscale1pmu_get_event_idx() 573 struct pmu_hw_events *cpuc; in xscale2pmu_handle_irq() local 593 cpuc = &__get_cpu_var(cpu_hw_events); in xscale2pmu_handle_irq() 595 struct perf_event *event = cpuc->events[idx]; in xscale2pmu_handle_irq() [all …]
|
D | perf_event_v6.c | 476 struct pmu_hw_events *cpuc; in armv6pmu_handle_irq() local 494 cpuc = &__get_cpu_var(cpu_hw_events); in armv6pmu_handle_irq() 496 struct perf_event *event = cpuc->events[idx]; in armv6pmu_handle_irq() 559 armv6pmu_get_event_idx(struct pmu_hw_events *cpuc, in armv6pmu_get_event_idx() argument 564 if (test_and_set_bit(ARMV6_CYCLE_COUNTER, cpuc->used_mask)) in armv6pmu_get_event_idx() 573 if (!test_and_set_bit(ARMV6_COUNTER1, cpuc->used_mask)) in armv6pmu_get_event_idx() 576 if (!test_and_set_bit(ARMV6_COUNTER0, cpuc->used_mask)) in armv6pmu_get_event_idx()
|
D | perf_event_v7.c | 1060 struct pmu_hw_events *cpuc; in armv7pmu_handle_irq() local 1082 cpuc = &__get_cpu_var(cpu_hw_events); in armv7pmu_handle_irq() 1084 struct perf_event *event = cpuc->events[idx]; in armv7pmu_handle_irq() 1142 static int armv7pmu_get_event_idx(struct pmu_hw_events *cpuc, in armv7pmu_get_event_idx() argument 1150 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv7pmu_get_event_idx() 1161 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv7pmu_get_event_idx()
|
/arch/mips/kernel/ |
D | perf_event_mipsxx.c | 308 static int mipsxx_pmu_alloc_counter(struct cpu_hw_events *cpuc, in mipsxx_pmu_alloc_counter() argument 331 !test_and_set_bit(i, cpuc->used_mask)) in mipsxx_pmu_alloc_counter() 340 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in mipsxx_pmu_enable_event() local 344 cpuc->saved_ctrl[idx] = M_PERFCTL_EVENT(evt->event_base & 0xff) | in mipsxx_pmu_enable_event() 355 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in mipsxx_pmu_disable_event() local 361 cpuc->saved_ctrl[idx] = mipsxx_pmu_read_control(idx) & in mipsxx_pmu_disable_event() 363 mipsxx_pmu_write_control(idx, cpuc->saved_ctrl[idx]); in mipsxx_pmu_disable_event() 455 struct cpu_hw_events *cpuc = &__get_cpu_var(cpu_hw_events); in mipspmu_add() local 463 idx = mipsxx_pmu_alloc_counter(cpuc, hwc); in mipspmu_add() 475 cpuc->events[idx] = event; in mipspmu_add() [all …]
|