Home
last modified time | relevance | path

Searched refs:config_base (Results 1 – 25 of 27) sorted by relevance

12

/arch/arm/mach-bcm/
Dbcm63xx_smp.c38 unsigned long config_base; in scu_a9_enable() local
48 config_base = scu_a9_get_base(); in scu_a9_enable()
49 if (!config_base) { in scu_a9_enable()
54 scu_base = ioremap((phys_addr_t)config_base, CORTEX_A9_SCU_SIZE); in scu_a9_enable()
57 config_base, CORTEX_A9_SCU_SIZE); in scu_a9_enable()
Dplatsmp.c54 unsigned long config_base; in scu_a9_enable() local
63 config_base = scu_a9_get_base(); in scu_a9_enable()
64 if (!config_base) { in scu_a9_enable()
69 scu_base = ioremap((phys_addr_t)config_base, CORTEX_A9_SCU_SIZE); in scu_a9_enable()
72 config_base, CORTEX_A9_SCU_SIZE); in scu_a9_enable()
/arch/arm/kernel/
Dperf_event_v7.c902 armv7_pmnc_write_evtsel(idx, hwc->config_base); in armv7pmu_enable_event()
1039 unsigned long evtype = hwc->config_base & ARMV7_EVTYPE_EVENT; in armv7pmu_get_event_idx()
1068 unsigned long config_base = 0; in armv7pmu_set_event_filter() local
1073 config_base |= ARMV7_EXCLUDE_USER; in armv7pmu_set_event_filter()
1075 config_base |= ARMV7_EXCLUDE_PL1; in armv7pmu_set_event_filter()
1077 config_base |= ARMV7_INCLUDE_HYP; in armv7pmu_set_event_filter()
1083 event->config_base = config_base; in armv7pmu_set_event_filter()
1410 static void krait_evt_setup(int idx, u32 config_base) in krait_evt_setup() argument
1415 unsigned int region = EVENT_REGION(config_base); in krait_evt_setup()
1416 unsigned int group = EVENT_GROUP(config_base); in krait_evt_setup()
[all …]
Dperf_event_xscale.c220 evt = (hwc->config_base << XSCALE1_COUNT0_EVT_SHFT) | in xscale1pmu_enable_event()
225 evt = (hwc->config_base << XSCALE1_COUNT1_EVT_SHFT) | in xscale1pmu_enable_event()
280 if (XSCALE_PERFCTR_CCNT == hwc->config_base) { in xscale1pmu_get_event_idx()
564 evtsel |= hwc->config_base << XSCALE2_COUNT0_EVT_SHFT; in xscale2pmu_enable_event()
569 evtsel |= hwc->config_base << XSCALE2_COUNT1_EVT_SHFT; in xscale2pmu_enable_event()
574 evtsel |= hwc->config_base << XSCALE2_COUNT2_EVT_SHFT; in xscale2pmu_enable_event()
579 evtsel |= hwc->config_base << XSCALE2_COUNT3_EVT_SHFT; in xscale2pmu_enable_event()
Dperf_event_v6.c282 evt = (hwc->config_base << ARMV6_PMCR_EVT_COUNT0_SHIFT) | in armv6pmu_enable_event()
286 evt = (hwc->config_base << ARMV6_PMCR_EVT_COUNT1_SHIFT) | in armv6pmu_enable_event()
395 if (ARMV6_PERFCTR_CPU_CYCLES == hwc->config_base) { in armv6pmu_get_event_idx()
/arch/s390/kernel/
Dperf_cpum_cf.c111 switch (hwc->config_base) { in validate_ctr_version()
166 ctrs_state = cpumf_state_ctl[hwc->config_base]; in validate_ctr_auth()
398 hwc->config_base = set; in __hw_perf_event_init()
509 ctr_set_enable(&cpuhw->state, hwc->config_base); in cpumf_pmu_start()
510 ctr_set_start(&cpuhw->state, hwc->config_base); in cpumf_pmu_start()
520 atomic_inc(&cpuhw->ctr_set[hwc->config_base]); in cpumf_pmu_start()
533 if (!atomic_dec_return(&cpuhw->ctr_set[hwc->config_base])) in cpumf_pmu_stop()
534 ctr_set_stop(&cpuhw->state, hwc->config_base); in cpumf_pmu_stop()
557 ctr_set_enable(&cpuhw->state, event->hw.config_base); in cpumf_pmu_add()
582 if (!atomic_read(&cpuhw->ctr_set[event->hw.config_base])) in cpumf_pmu_del()
[all …]
/arch/arm64/kernel/
Dperf_event.c638 armv8pmu_write_evtype(idx, hwc->config_base); in armv8pmu_enable_event()
769 unsigned long evtype = hwc->config_base & ARMV8_PMU_EVTYPE_EVENT; in armv8pmu_get_event_idx()
795 unsigned long config_base = 0; in armv8pmu_set_event_filter() local
808 config_base |= ARMV8_PMU_INCLUDE_EL2; in armv8pmu_set_event_filter()
811 config_base |= ARMV8_PMU_EXCLUDE_EL1; in armv8pmu_set_event_filter()
813 config_base |= ARMV8_PMU_INCLUDE_EL2; in armv8pmu_set_event_filter()
816 config_base |= ARMV8_PMU_EXCLUDE_EL0; in armv8pmu_set_event_filter()
822 event->config_base = config_base; in armv8pmu_set_event_filter()
829 unsigned long evtype = event->hw.config_base & ARMV8_PMU_EVTYPE_EVENT; in armv8pmu_filter_match()
/arch/x86/events/amd/
Dibs.c322 hwc->config_base = perf_ibs->msr; in perf_ibs_init()
370 rdmsrl(event->hw.config_base, *config); in perf_ibs_event_update()
378 wrmsrl(hwc->config_base, hwc->config | config | perf_ibs->enable_mask); in perf_ibs_enable_event()
393 wrmsrl(hwc->config_base, config); in perf_ibs_disable_event()
395 wrmsrl(hwc->config_base, config); in perf_ibs_disable_event()
445 rdmsrl(hwc->config_base, config); in perf_ibs_stop()
611 msr = hwc->config_base; in perf_ibs_handle_irq()
Duncore.c112 wrmsrl(hwc->config_base, (hwc->config | ARCH_PERFMON_EVENTSEL_ENABLE)); in amd_uncore_start()
120 wrmsrl(hwc->config_base, hwc->config); in amd_uncore_stop()
159 hwc->config_base = uncore->msr_base + (2 * hwc->idx); in amd_uncore_add()
/arch/x86/events/intel/
Dp6.c164 (void)wrmsrl_safe(hwc->config_base, val); in p6_pmu_disable_event()
181 (void)wrmsrl_safe(hwc->config_base, val); in p6_pmu_enable_event()
Dknc.c185 (void)wrmsrl_safe(hwc->config_base + hwc->idx, val); in knc_pmu_disable_event()
196 (void)wrmsrl_safe(hwc->config_base + hwc->idx, val); in knc_pmu_enable_event()
Duncore_nhmex.c242 wrmsrl(event->hw.config_base, 0); in nhmex_uncore_msr_disable_event()
250 wrmsrl(hwc->config_base, NHMEX_PMON_CTL_EN_BIT0); in nhmex_uncore_msr_enable_event()
252 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT22); in nhmex_uncore_msr_enable_event()
254 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT0); in nhmex_uncore_msr_enable_event()
387 wrmsrl(hwc->config_base, NHMEX_PMON_CTL_EN_BIT0 | in nhmex_bbox_msr_enable_event()
474 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT22); in nhmex_sbox_msr_enable_event()
862 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT0); in nhmex_mbox_msr_enable_event()
1147 wrmsrl(hwc->config_base, NHMEX_PMON_CTL_EN_BIT0 | in nhmex_rbox_msr_enable_event()
Duncore_snb.c109 wrmsrl(hwc->config_base, hwc->config | SNB_UNC_CTL_EN); in snb_uncore_msr_enable_event()
111 wrmsrl(hwc->config_base, SNB_UNC_CTL_EN); in snb_uncore_msr_enable_event()
116 wrmsrl(event->hw.config_base, 0); in snb_uncore_msr_disable_event()
872 wrmsrl(hwc->config_base, hwc->config | SNB_UNC_CTL_EN); in nhm_uncore_msr_enable_event()
874 wrmsrl(hwc->config_base, NHM_UNC_FIXED_CTR_CTL_EN); in nhm_uncore_msr_enable_event()
Dp4.c860 rdmsrl(hwc->config_base, v); in p4_pmu_clear_cccr_ovf()
862 wrmsrl(hwc->config_base, v & ~P4_CCCR_OVF); in p4_pmu_clear_cccr_ovf()
912 (void)wrmsrl_safe(hwc->config_base, in p4_pmu_disable_event()
981 (void)wrmsrl_safe(hwc->config_base, in p4_pmu_enable_event()
Duncore_snbep.c429 pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); in snbep_uncore_pci_enable_event()
437 pci_write_config_dword(pdev, hwc->config_base, hwc->config); in snbep_uncore_pci_disable_event()
494 wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); in snbep_uncore_msr_enable_event()
502 wrmsrl(hwc->config_base, hwc->config); in snbep_uncore_msr_disable_event()
1075 pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); in snbep_qpi_enable_event()
1578 wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); in ivbep_cbox_enable_event()
2089 pci_write_config_dword(pdev, hwc->config_base, in knl_uncore_imc_enable_event()
2092 pci_write_config_dword(pdev, hwc->config_base, in knl_uncore_imc_enable_event()
2562 wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); in hswep_cbox_enable_event()
3461 wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); in skx_iio_enable_event()
Dcore.c2052 rdmsrl(hwc->config_base, ctrl_val); in intel_pmu_disable_fixed()
2054 wrmsrl(hwc->config_base, ctrl_val); in intel_pmu_disable_fixed()
2077 if (unlikely(hwc->config_base == MSR_ARCH_PERFMON_FIXED_CTR_CTRL)) { in intel_pmu_disable_event()
2121 rdmsrl(hwc->config_base, ctrl_val); in intel_pmu_enable_fixed()
2124 wrmsrl(hwc->config_base, ctrl_val); in intel_pmu_enable_fixed()
2148 if (unlikely(hwc->config_base == MSR_ARCH_PERFMON_FIXED_CTR_CTRL)) { in intel_pmu_enable_event()
/arch/mips/kernel/
Dperf_event_mipsxx.c337 (evt->config_base & M_PERFCTL_CONFIG_MASK) | in mipsxx_pmu_enable_event()
1280 hwc->config_base |= M_TC_EN_ALL; in check_and_calc_range()
1286 hwc->config_base |= M_PERFCTL_VPEID(event->cpu); in check_and_calc_range()
1287 hwc->config_base |= M_TC_EN_VPE; in check_and_calc_range()
1290 hwc->config_base |= M_TC_EN_ALL; in check_and_calc_range()
1332 hwc->config_base = MIPS_PERFCTRL_IE; in __hw_perf_event_init()
1343 hwc->config_base |= MIPS_PERFCTRL_U; in __hw_perf_event_init()
1345 hwc->config_base |= MIPS_PERFCTRL_K; in __hw_perf_event_init()
1347 hwc->config_base |= MIPS_PERFCTRL_EXL; in __hw_perf_event_init()
1350 hwc->config_base |= MIPS_PERFCTRL_S; in __hw_perf_event_init()
[all …]
/arch/powerpc/perf/
Dcore-fsl-emb.c344 write_pmlca(i, event->hw.config_base); in fsl_emb_pmu_add()
553 event->hw.config_base = PMLCA_CE | PMLCA_FCM1 | in fsl_emb_pmu_event_init()
557 event->hw.config_base |= PMLCA_FCU; in fsl_emb_pmu_event_init()
559 event->hw.config_base |= PMLCA_FCS; in fsl_emb_pmu_event_init()
/arch/s390/include/asm/
Dperf_event.h70 #define SAMPL_FLAGS(hwc) ((hwc)->config_base)
/arch/alpha/kernel/
Dperf_event.c200 event[0]->hw.config_base = config; in ev67_check_constraints()
203 event[1]->hw.config_base = config; in ev67_check_constraints()
424 cpuc->config = cpuc->event[0]->hw.config_base; in maybe_change_configuration()
669 hwc->config_base = 0; in __hw_perf_event_init()
/arch/arm/mm/
Dcache-l2x0-pmu.c212 __l2x0_pmu_event_enable(hw->idx, hw->config_base); in l2x0_pmu_event_start()
331 hw->config_base = event->attr.config; in l2x0_pmu_event_init()
/arch/sparc/kernel/
Dperf_event.c977 cpuc->pcr[0] |= cpuc->event[0]->hw.config_base; in calculate_single_pcr()
1010 cpuc->pcr[idx] |= cp->hw.config_base; in calculate_multiple_pcrs()
1465 hwc->config_base = sparc_pmu->irq_bit; in sparc_pmu_event_init()
1467 hwc->config_base |= sparc_pmu->user_bit; in sparc_pmu_event_init()
1469 hwc->config_base |= sparc_pmu->priv_bit; in sparc_pmu_event_init()
1471 hwc->config_base |= sparc_pmu->hv_bit; in sparc_pmu_event_init()
/arch/x86/events/
Dperf_event.h782 wrmsrl(hwc->config_base, (hwc->config | enable_mask) & ~disable_mask); in __x86_pmu_enable_event()
797 wrmsrl(hwc->config_base, hwc->config); in x86_pmu_disable_event()
Dcore.c1008 hwc->config_base = 0; in x86_assign_hw_event()
1011 hwc->config_base = MSR_ARCH_PERFMON_FIXED_CTR_CTRL; in x86_assign_hw_event()
1015 hwc->config_base = x86_pmu_config_addr(hwc->idx); in x86_assign_hw_event()
/arch/mips/include/asm/sn/
Dklconfig.h128 unsigned long config_base; member

12