Searched refs:cpuhw (Results 1 – 9 of 9) sorted by relevance
/arch/powerpc/perf/ |
D | core-book3s.c | 120 static unsigned long ebb_switch_in(bool ebb, struct cpu_hw_events *cpuhw) in ebb_switch_in() argument 122 return cpuhw->mmcr[0]; in ebb_switch_in() 128 static inline void power_pmu_bhrb_read(struct cpu_hw_events *cpuhw) {} in power_pmu_bhrb_read() argument 351 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in power_pmu_bhrb_enable() local 357 if (event->ctx->task && cpuhw->bhrb_context != event->ctx) { in power_pmu_bhrb_enable() 359 cpuhw->bhrb_context = event->ctx; in power_pmu_bhrb_enable() 361 cpuhw->bhrb_users++; in power_pmu_bhrb_enable() 367 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in power_pmu_bhrb_disable() local 372 WARN_ON_ONCE(!cpuhw->bhrb_users); in power_pmu_bhrb_disable() 373 cpuhw->bhrb_users--; in power_pmu_bhrb_disable() [all …]
|
D | core-fsl-emb.c | 209 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_disable() local 213 cpuhw = this_cpu_ptr(&cpu_hw_events); in fsl_emb_pmu_disable() 215 if (!cpuhw->disabled) { in fsl_emb_pmu_disable() 216 cpuhw->disabled = 1; in fsl_emb_pmu_disable() 221 if (!cpuhw->pmcs_enabled) { in fsl_emb_pmu_disable() 223 cpuhw->pmcs_enabled = 1; in fsl_emb_pmu_disable() 248 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_enable() local 252 cpuhw = this_cpu_ptr(&cpu_hw_events); in fsl_emb_pmu_enable() 253 if (!cpuhw->disabled) in fsl_emb_pmu_enable() 256 cpuhw->disabled = 0; in fsl_emb_pmu_enable() [all …]
|
/arch/s390/kernel/ |
D | perf_cpum_cf.c | 127 struct cpu_hw_events *cpuhw; in validate_ctr_version() local 130 cpuhw = &get_cpu_var(cpu_hw_events); in validate_ctr_version() 136 if (cpuhw->info.cfvn < 1) in validate_ctr_version() 141 if (cpuhw->info.csvn < 1) in validate_ctr_version() 143 if ((cpuhw->info.csvn == 1 && hwc->config > 159) || in validate_ctr_version() 144 (cpuhw->info.csvn == 2 && hwc->config > 175) || in validate_ctr_version() 145 (cpuhw->info.csvn > 2 && hwc->config > 255)) in validate_ctr_version() 156 struct cpu_hw_events *cpuhw; in validate_ctr_auth() local 160 cpuhw = &get_cpu_var(cpu_hw_events); in validate_ctr_auth() 168 if (!(ctrs_state & cpuhw->info.auth_ctl)) in validate_ctr_auth() [all …]
|
D | perf_cpum_sf.c | 108 static int sf_buffer_available(struct cpu_hw_sf *cpuhw) in sf_buffer_available() argument 110 return !!cpuhw->sfb.sdbt; in sf_buffer_available() 360 static void deallocate_buffers(struct cpu_hw_sf *cpuhw) in deallocate_buffers() argument 362 if (cpuhw->sfb.sdbt) in deallocate_buffers() 363 free_sampling_buffer(&cpuhw->sfb); in deallocate_buffers() 366 static int allocate_buffers(struct cpu_hw_sf *cpuhw, struct hw_perf_event *hwc) in allocate_buffers() argument 388 sfr_size = ALIGN((sizeof(*sfr) - sizeof(sfr->diag) + cpuhw->qsi.dsdes) + in allocate_buffers() 395 sfr->bsdes = cpuhw->qsi.bsdes; in allocate_buffers() 396 sfr->dsdes = cpuhw->qsi.dsdes; in allocate_buffers() 424 freq = sample_rate_to_freq(&cpuhw->qsi, SAMPL_RATE(hwc)); in allocate_buffers() [all …]
|
/arch/x86/events/amd/ |
D | core.c | 415 struct cpu_hw_events *cpuhw; in amd_pmu_cpu_dead() local 420 cpuhw = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_dead() 422 if (cpuhw->amd_nb) { in amd_pmu_cpu_dead() 423 struct amd_nb *nb = cpuhw->amd_nb; in amd_pmu_cpu_dead() 428 cpuhw->amd_nb = NULL; in amd_pmu_cpu_dead()
|
/arch/sh/kernel/ |
D | perf_event.c | 357 struct cpu_hw_events *cpuhw = &per_cpu(cpu_hw_events, cpu); in sh_pmu_prepare_cpu() local 359 memset(cpuhw, 0, sizeof(struct cpu_hw_events)); in sh_pmu_prepare_cpu()
|
/arch/blackfin/kernel/ |
D | perf_event.c | 458 struct cpu_hw_events *cpuhw = &per_cpu(cpu_hw_events, cpu); in bfin_pmu_prepare_cpu() local 461 memset(cpuhw, 0, sizeof(struct cpu_hw_events)); in bfin_pmu_prepare_cpu()
|
/arch/sparc/kernel/ |
D | perf_event.c | 1499 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_start_txn() local 1501 WARN_ON_ONCE(cpuhw->txn_flags); /* txn already in flight */ in sparc_pmu_start_txn() 1503 cpuhw->txn_flags = txn_flags; in sparc_pmu_start_txn() 1517 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_cancel_txn() local 1520 WARN_ON_ONCE(!cpuhw->txn_flags); /* no txn in flight */ in sparc_pmu_cancel_txn() 1522 txn_flags = cpuhw->txn_flags; in sparc_pmu_cancel_txn() 1523 cpuhw->txn_flags = 0; in sparc_pmu_cancel_txn()
|
/arch/metag/kernel/perf/ |
D | perf_event.c | 757 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in metag_pmu_counter_overflow() local 758 struct perf_event *event = cpuhw->events[idx]; in metag_pmu_counter_overflow()
|