• Home
  • Raw
  • Download

Lines Matching refs:pmc

45 		struct kvm_pmc *pmc;  in reprogram_fixed_counters()  local
47 pmc = get_fixed_pmc(pmu, MSR_CORE_PERF_FIXED_CTR0 + i); in reprogram_fixed_counters()
53 reprogram_fixed_counter(pmc, new_ctrl, i); in reprogram_fixed_counters()
71 static unsigned int intel_pmc_perf_hw_id(struct kvm_pmc *pmc) in intel_pmc_perf_hw_id() argument
73 struct kvm_pmu *pmu = pmc_to_pmu(pmc); in intel_pmc_perf_hw_id()
74 u8 event_select = pmc->eventsel & ARCH_PERFMON_EVENTSEL_EVENT; in intel_pmc_perf_hw_id()
75 u8 unit_mask = (pmc->eventsel & ARCH_PERFMON_EVENTSEL_UMASK) >> 8; in intel_pmc_perf_hw_id()
103 static bool intel_pmc_is_enabled(struct kvm_pmc *pmc) in intel_pmc_is_enabled() argument
105 struct kvm_pmu *pmu = pmc_to_pmu(pmc); in intel_pmc_is_enabled()
110 return test_bit(pmc->idx, (unsigned long *)&pmu->global_ctrl); in intel_pmc_is_enabled()
241 struct kvm_pmc *pmc; in intel_msr_idx_to_pmc() local
243 pmc = get_fixed_pmc(pmu, msr); in intel_msr_idx_to_pmc()
244 pmc = pmc ? pmc : get_gp_pmc(pmu, msr, MSR_P6_EVNTSEL0); in intel_msr_idx_to_pmc()
245 pmc = pmc ? pmc : get_gp_pmc(pmu, msr, MSR_IA32_PERFCTR0); in intel_msr_idx_to_pmc()
247 return pmc; in intel_msr_idx_to_pmc()
358 struct kvm_pmc *pmc; in intel_pmu_get_msr() local
375 if ((pmc = get_gp_pmc(pmu, msr, MSR_IA32_PERFCTR0)) || in intel_pmu_get_msr()
376 (pmc = get_gp_pmc(pmu, msr, MSR_IA32_PMC0))) { in intel_pmu_get_msr()
377 u64 val = pmc_read_counter(pmc); in intel_pmu_get_msr()
381 } else if ((pmc = get_fixed_pmc(pmu, msr))) { in intel_pmu_get_msr()
382 u64 val = pmc_read_counter(pmc); in intel_pmu_get_msr()
386 } else if ((pmc = get_gp_pmc(pmu, msr, MSR_P6_EVNTSEL0))) { in intel_pmu_get_msr()
387 msr_info->data = pmc->eventsel; in intel_pmu_get_msr()
399 struct kvm_pmc *pmc; in intel_pmu_set_msr() local
436 if ((pmc = get_gp_pmc(pmu, msr, MSR_IA32_PERFCTR0)) || in intel_pmu_set_msr()
437 (pmc = get_gp_pmc(pmu, msr, MSR_IA32_PMC0))) { in intel_pmu_set_msr()
444 pmc->counter += data - pmc_read_counter(pmc); in intel_pmu_set_msr()
445 pmc_update_sample_period(pmc); in intel_pmu_set_msr()
447 } else if ((pmc = get_fixed_pmc(pmu, msr))) { in intel_pmu_set_msr()
448 pmc->counter += data - pmc_read_counter(pmc); in intel_pmu_set_msr()
449 pmc_update_sample_period(pmc); in intel_pmu_set_msr()
451 } else if ((pmc = get_gp_pmc(pmu, msr, MSR_P6_EVNTSEL0))) { in intel_pmu_set_msr()
452 if (data == pmc->eventsel) in intel_pmu_set_msr()
455 if ((pmc->idx == 2) && in intel_pmu_set_msr()
459 reprogram_gp_counter(pmc, data); in intel_pmu_set_msr()
588 struct kvm_pmc *pmc = NULL; in intel_pmu_reset() local
592 pmc = &pmu->gp_counters[i]; in intel_pmu_reset()
594 pmc_stop_counter(pmc); in intel_pmu_reset()
595 pmc->counter = pmc->eventsel = 0; in intel_pmu_reset()
599 pmc = &pmu->fixed_counters[i]; in intel_pmu_reset()
601 pmc_stop_counter(pmc); in intel_pmu_reset()
602 pmc->counter = 0; in intel_pmu_reset()