Lines Matching refs:pmu
191 struct arm_pmu *pmu; in arm_pmu_acpi_find_alloc_pmu() local
195 pmu = per_cpu(probed_pmus, cpu); in arm_pmu_acpi_find_alloc_pmu()
196 if (!pmu || pmu->acpi_cpuid != cpuid) in arm_pmu_acpi_find_alloc_pmu()
199 return pmu; in arm_pmu_acpi_find_alloc_pmu()
202 pmu = armpmu_alloc_atomic(); in arm_pmu_acpi_find_alloc_pmu()
203 if (!pmu) { in arm_pmu_acpi_find_alloc_pmu()
209 pmu->acpi_cpuid = cpuid; in arm_pmu_acpi_find_alloc_pmu()
211 return pmu; in arm_pmu_acpi_find_alloc_pmu()
218 static bool pmu_irq_matches(struct arm_pmu *pmu, int irq) in pmu_irq_matches() argument
220 struct pmu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_irq_matches()
226 for_each_cpu(cpu, &pmu->supported_cpus) { in pmu_irq_matches()
254 struct arm_pmu *pmu; in arm_pmu_acpi_cpu_starting() local
264 pmu = arm_pmu_acpi_find_alloc_pmu(); in arm_pmu_acpi_cpu_starting()
265 if (!pmu) in arm_pmu_acpi_cpu_starting()
268 per_cpu(probed_pmus, cpu) = pmu; in arm_pmu_acpi_cpu_starting()
270 if (pmu_irq_matches(pmu, irq)) { in arm_pmu_acpi_cpu_starting()
271 hw_events = pmu->hw_events; in arm_pmu_acpi_cpu_starting()
275 cpumask_set_cpu(cpu, &pmu->supported_cpus); in arm_pmu_acpi_cpu_starting()
306 struct arm_pmu *pmu = per_cpu(probed_pmus, cpu); in arm_pmu_acpi_probe() local
309 if (!pmu || pmu->name) in arm_pmu_acpi_probe()
312 ret = init_fn(pmu); in arm_pmu_acpi_probe()
321 base_name = pmu->name; in arm_pmu_acpi_probe()
322 pmu->name = kasprintf(GFP_KERNEL, "%s_%d", base_name, pmu_idx++); in arm_pmu_acpi_probe()
323 if (!pmu->name) { in arm_pmu_acpi_probe()
328 ret = armpmu_register(pmu); in arm_pmu_acpi_probe()
331 kfree(pmu->name); in arm_pmu_acpi_probe()