/arch/powerpc/perf/ |
D | core-fsl-emb.c | 23 struct perf_event *event[MAX_HWEVENTS]; member 162 static void fsl_emb_pmu_read(struct perf_event *event) in fsl_emb_pmu_read() argument 166 if (event->hw.state & PERF_HES_STOPPED) in fsl_emb_pmu_read() 175 prev = local64_read(&event->hw.prev_count); in fsl_emb_pmu_read() 177 val = read_pmc(event->hw.idx); in fsl_emb_pmu_read() 178 } while (local64_cmpxchg(&event->hw.prev_count, prev, val) != prev); in fsl_emb_pmu_read() 182 local64_add(delta, &event->count); in fsl_emb_pmu_read() 183 local64_sub(delta, &event->hw.period_left); in fsl_emb_pmu_read() 255 struct perf_event *event; in collect_events() local 263 for_each_sibling_event(event, group) { in collect_events() [all …]
|
D | isa207-common.c | 11 PMU_FORMAT_ATTR(event, "config:0-49"); 45 static inline bool event_is_fab_match(u64 event) in event_is_fab_match() argument 48 event &= 0xff0fe; in event_is_fab_match() 51 return (event == 0x30056 || event == 0x4f052); in event_is_fab_match() 54 static bool is_event_valid(u64 event) in is_event_valid() argument 63 return !(event & ~valid_mask); in is_event_valid() 66 static inline bool is_event_marked(u64 event) in is_event_marked() argument 68 if (event & EVENT_IS_MARKED) in is_event_marked() 74 static unsigned long sdar_mod_val(u64 event) in sdar_mod_val() argument 77 return p10_SDAR_MODE(event); in sdar_mod_val() [all …]
|
D | core-book3s.c | 37 struct perf_event *event[MAX_HWEVENTS]; member 104 static inline void perf_get_data_addr(struct perf_event *event, struct pt_regs *regs, u64 *addrp) {… in perf_get_data_addr() argument 119 static bool is_ebb_event(struct perf_event *event) { return false; } in is_ebb_event() argument 120 static int ebb_event_check(struct perf_event *event) { return 0; } in ebb_event_check() argument 121 static void ebb_event_add(struct perf_event *event) { } in ebb_event_add() argument 128 static inline void power_pmu_bhrb_enable(struct perf_event *event) {} in power_pmu_bhrb_enable() argument 129 static inline void power_pmu_bhrb_disable(struct perf_event *event) {} in power_pmu_bhrb_disable() argument 131 static inline void power_pmu_bhrb_read(struct perf_event *event, struct cpu_hw_events *cpuhw) {} in power_pmu_bhrb_read() argument 186 static inline void perf_get_data_addr(struct perf_event *event, struct pt_regs *regs, u64 *addrp) in perf_get_data_addr() argument 211 if (is_kernel_addr(mfspr(SPRN_SDAR)) && event->attr.exclude_kernel) in perf_get_data_addr() [all …]
|
D | power5+-pmu.c | 134 static int power5p_get_constraint(u64 event, unsigned long *maskp, in power5p_get_constraint() argument 141 pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in power5p_get_constraint() 148 if (pmc >= 5 && !(event == 0x500009 || event == 0x600005)) in power5p_get_constraint() 151 if (event & PM_BUSEVENT_MSK) { in power5p_get_constraint() 152 unit = (event >> PM_UNIT_SH) & PM_UNIT_MSK; in power5p_get_constraint() 159 byte = (event >> PM_BYTE_SH) & PM_BYTE_MSK; in power5p_get_constraint() 168 bit = event & 7; in power5p_get_constraint() 172 value |= (unsigned long)((event >> PM_GRS_SH) & fmask) in power5p_get_constraint() 189 static int power5p_limited_pmc_event(u64 event) in power5p_limited_pmc_event() argument 191 int pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in power5p_limited_pmc_event() [all …]
|
D | hv-gpci.c | 186 static u64 h_gpci_get_value(struct perf_event *event) in h_gpci_get_value() argument 189 unsigned long ret = single_gpci_request(event_get_request(event), in h_gpci_get_value() 190 event_get_starting_index(event), in h_gpci_get_value() 191 event_get_secondary_index(event), in h_gpci_get_value() 192 event_get_counter_info_version(event), in h_gpci_get_value() 193 event_get_offset(event), in h_gpci_get_value() 194 event_get_length(event), in h_gpci_get_value() 201 static void h_gpci_event_update(struct perf_event *event) in h_gpci_event_update() argument 204 u64 now = h_gpci_get_value(event); in h_gpci_event_update() 205 prev = local64_xchg(&event->hw.prev_count, now); in h_gpci_event_update() [all …]
|
D | 8xx-pmu.c | 49 static int event_type(struct perf_event *event) in event_type() argument 51 switch (event->attr.type) { in event_type() 53 if (event->attr.config == PERF_COUNT_HW_CPU_CYCLES) in event_type() 55 if (event->attr.config == PERF_COUNT_HW_INSTRUCTIONS) in event_type() 59 if (event->attr.config == ITLB_LOAD_MISS) in event_type() 61 if (event->attr.config == DTLB_LOAD_MISS) in event_type() 72 static int mpc8xx_pmu_event_init(struct perf_event *event) in mpc8xx_pmu_event_init() argument 74 int type = event_type(event); in mpc8xx_pmu_event_init() 81 static int mpc8xx_pmu_add(struct perf_event *event, int flags) in mpc8xx_pmu_add() argument 83 int type = event_type(event); in mpc8xx_pmu_add() [all …]
|
D | power5-pmu.c | 138 static int power5_get_constraint(u64 event, unsigned long *maskp, in power5_get_constraint() argument 146 pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in power5_get_constraint() 155 else if (event != 0x500009 && event != 0x600005) in power5_get_constraint() 158 if (event & PM_BUSEVENT_MSK) { in power5_get_constraint() 159 unit = (event >> PM_UNIT_SH) & PM_UNIT_MSK; in power5_get_constraint() 166 byte = (event >> PM_BYTE_SH) & PM_BYTE_MSK; in power5_get_constraint() 175 bit = event & 7; in power5_get_constraint() 179 value |= (unsigned long)((event >> PM_GRS_SH) & fmask) in power5_get_constraint() 225 static int find_alternative(u64 event) in find_alternative() argument 230 if (event < event_alternatives[i][0]) in find_alternative() [all …]
|
D | mpc7450-pmu.c | 37 static int mpc7450_classify_event(u32 event) in mpc7450_classify_event() argument 41 pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in mpc7450_classify_event() 47 event &= PM_PMCSEL_MSK; in mpc7450_classify_event() 48 if (event <= 1) in mpc7450_classify_event() 50 if (event <= 7) in mpc7450_classify_event() 52 if (event <= 13) in mpc7450_classify_event() 54 if (event <= 22) in mpc7450_classify_event() 77 static int mpc7450_threshold_use(u32 event) in mpc7450_threshold_use() argument 81 pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in mpc7450_threshold_use() 82 sel = event & PM_PMCSEL_MSK; in mpc7450_threshold_use() [all …]
|
D | power7-pmu.c | 83 static int power7_get_constraint(u64 event, unsigned long *maskp, in power7_get_constraint() argument 89 pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in power7_get_constraint() 96 if (pmc >= 5 && !(event == 0x500fa || event == 0x600f4)) in power7_get_constraint() 105 unit = (event >> PM_UNIT_SH) & PM_UNIT_MSK; in power7_get_constraint() 108 int l2sel = (event >> PM_L2SEL_SH) & PM_L2SEL_MSK; in power7_get_constraint() 130 static int find_alternative(u64 event) in find_alternative() argument 135 if (event < event_alternatives[i][0]) in find_alternative() 138 if (event == event_alternatives[i][j]) in find_alternative() 144 static s64 find_alternative_decode(u64 event) in find_alternative_decode() argument 149 pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in find_alternative_decode() [all …]
|
D | imc-pmu.c | 58 static struct imc_pmu *imc_event_to_pmu(struct perf_event *event) in imc_event_to_pmu() argument 60 return container_of(event->pmu, struct imc_pmu, pmu); in imc_event_to_pmu() 63 PMU_FORMAT_ATTR(event, "config:0-61"); 153 u32 base, struct imc_events *event) in imc_parse_event() argument 161 event->value = base + reg; in imc_parse_event() 166 event->name = kasprintf(GFP_KERNEL, "%s%s", prefix, s); in imc_parse_event() 167 if (!event->name) in imc_parse_event() 174 event->scale = kstrdup(s, GFP_KERNEL); in imc_parse_event() 175 if (!event->scale) in imc_parse_event() 183 event->unit = kstrdup(s, GFP_KERNEL); in imc_parse_event() [all …]
|
/arch/powerpc/kernel/ |
D | eeh_event.c | 42 struct eeh_event *event; in eeh_event_handler() local 50 event = NULL; in eeh_event_handler() 52 event = list_entry(eeh_eventlist.next, in eeh_event_handler() 54 list_del(&event->list); in eeh_event_handler() 57 if (!event) in eeh_event_handler() 61 if (event->pe) in eeh_event_handler() 62 eeh_handle_normal_event(event->pe); in eeh_event_handler() 66 kfree(event); in eeh_event_handler() 105 struct eeh_event *event; in __eeh_send_failure_event() local 107 event = kzalloc(sizeof(*event), GFP_ATOMIC); in __eeh_send_failure_event() [all …]
|
/arch/sh/kernel/ |
D | perf_event.c | 81 static void hw_perf_event_destroy(struct perf_event *event) in hw_perf_event_destroy() argument 118 static int __hw_perf_event_init(struct perf_event *event) in __hw_perf_event_init() argument 120 struct perf_event_attr *attr = &event->attr; in __hw_perf_event_init() 121 struct hw_perf_event *hwc = &event->hw; in __hw_perf_event_init() 149 event->destroy = hw_perf_event_destroy; in __hw_perf_event_init() 176 static void sh_perf_event_update(struct perf_event *event, in sh_perf_event_update() argument 214 local64_add(delta, &event->count); in sh_perf_event_update() 217 static void sh_pmu_stop(struct perf_event *event, int flags) in sh_pmu_stop() argument 220 struct hw_perf_event *hwc = &event->hw; in sh_pmu_stop() 223 if (!(event->hw.state & PERF_HES_STOPPED)) { in sh_pmu_stop() [all …]
|
/arch/s390/kernel/ |
D | perf_cpum_cf.c | 19 static enum cpumf_ctr_set get_counter_set(u64 event) in get_counter_set() argument 23 if (event < 32) in get_counter_set() 25 else if (event < 64) in get_counter_set() 27 else if (event < 128) in get_counter_set() 29 else if (event < 288) in get_counter_set() 31 else if (event >= 448 && event < 496) in get_counter_set() 171 static void hw_perf_event_destroy(struct perf_event *event) in hw_perf_event_destroy() argument 202 static int __hw_perf_event_init(struct perf_event *event, unsigned int type) in __hw_perf_event_init() argument 204 struct perf_event_attr *attr = &event->attr; in __hw_perf_event_init() 205 struct hw_perf_event *hwc = &event->hw; in __hw_perf_event_init() [all …]
|
D | perf_cpum_cf_diag.c | 183 static void cf_diag_perf_event_destroy(struct perf_event *event) in cf_diag_perf_event_destroy() argument 187 __func__, event, event->cpu, in cf_diag_perf_event_destroy() 197 static int __hw_perf_event_init(struct perf_event *event) in __hw_perf_event_init() argument 199 struct perf_event_attr *attr = &event->attr; in __hw_perf_event_init() 205 event, event->cpu); in __hw_perf_event_init() 207 event->hw.config = attr->config; in __hw_perf_event_init() 208 event->hw.config_base = 0; in __hw_perf_event_init() 221 event->hw.config_base |= cpumf_ctr_ctl[i]; in __hw_perf_event_init() 225 if (!event->hw.config_base) { in __hw_perf_event_init() 231 event->hw.sample_period = attr->sample_period; in __hw_perf_event_init() [all …]
|
/arch/xtensa/kernel/ |
D | perf_event.c | 61 struct perf_event *event[XCHAL_NUM_PERF_COUNTERS]; member 145 static void xtensa_perf_event_update(struct perf_event *event, in xtensa_perf_event_update() argument 153 new_raw_count = xtensa_pmu_read_counter(event->hw.idx); in xtensa_perf_event_update() 159 local64_add(delta, &event->count); in xtensa_perf_event_update() 163 static bool xtensa_perf_event_set_period(struct perf_event *event, in xtensa_perf_event_set_period() argument 169 if (!is_sampling_event(event)) { in xtensa_perf_event_set_period() 192 perf_event_update_userpage(event); in xtensa_perf_event_set_period() 207 static int xtensa_pmu_event_init(struct perf_event *event) in xtensa_pmu_event_init() argument 211 switch (event->attr.type) { in xtensa_pmu_event_init() 213 if (event->attr.config >= ARRAY_SIZE(xtensa_hw_ctl) || in xtensa_pmu_event_init() [all …]
|
/arch/alpha/kernel/ |
D | perf_event.c | 41 struct perf_event *event[MAX_HWEVENTS]; member 152 static int ev67_check_constraints(struct perf_event **event, in ev67_check_constraints() argument 199 event[0]->hw.idx = idx0; in ev67_check_constraints() 200 event[0]->hw.config_base = config; in ev67_check_constraints() 202 event[1]->hw.idx = idx0 ^ 1; in ev67_check_constraints() 203 event[1]->hw.config_base = config; in ev67_check_constraints() 252 static int alpha_perf_event_set_period(struct perf_event *event, in alpha_perf_event_set_period() argument 287 perf_event_update_userpage(event); in alpha_perf_event_set_period() 307 static unsigned long alpha_perf_event_update(struct perf_event *event, in alpha_perf_event_update() argument 330 local64_add(delta, &event->count); in alpha_perf_event_update() [all …]
|
/arch/x86/events/amd/ |
D | power.c | 43 static void event_update(struct perf_event *event) in event_update() argument 45 struct hw_perf_event *hwc = &event->hw; in event_update() 68 local64_add(delta, &event->count); in event_update() 71 static void __pmu_event_start(struct perf_event *event) in __pmu_event_start() argument 73 if (WARN_ON_ONCE(!(event->hw.state & PERF_HES_STOPPED))) in __pmu_event_start() 76 event->hw.state = 0; in __pmu_event_start() 78 rdmsrl(MSR_F15H_PTSC, event->hw.ptsc); in __pmu_event_start() 79 rdmsrl(MSR_F15H_CU_PWR_ACCUMULATOR, event->hw.pwr_acc); in __pmu_event_start() 82 static void pmu_event_start(struct perf_event *event, int mode) in pmu_event_start() argument 84 __pmu_event_start(event); in pmu_event_start() [all …]
|
D | iommu.c | 83 const char *event; member 89 struct amd_iommu_event_desc *event = in _iommu_event_show() local 91 return sprintf(buf, "%s\n", event->event); in _iommu_event_show() 97 .event = _event, \ 152 static int get_next_avail_iommu_bnk_cntr(struct perf_event *event) in get_next_avail_iommu_bnk_cntr() argument 154 struct perf_amd_iommu *piommu = container_of(event->pmu, struct perf_amd_iommu, pmu); in get_next_avail_iommu_bnk_cntr() 170 event->hw.iommu_bank = bank; in get_next_avail_iommu_bnk_cntr() 171 event->hw.iommu_cntr = cntr; in get_next_avail_iommu_bnk_cntr() 205 static int perf_iommu_event_init(struct perf_event *event) in perf_iommu_event_init() argument 207 struct hw_perf_event *hwc = &event->hw; in perf_iommu_event_init() [all …]
|
/arch/x86/events/ |
D | msr.c | 164 PMU_FORMAT_ATTR(event, "config:0-63"); 191 static int msr_event_init(struct perf_event *event) in msr_event_init() argument 193 u64 cfg = event->attr.config; in msr_event_init() 195 if (event->attr.type != event->pmu->type) in msr_event_init() 199 if (event->attr.sample_period) /* no sampling */ in msr_event_init() 210 event->hw.idx = -1; in msr_event_init() 211 event->hw.event_base = msr[cfg].msr; in msr_event_init() 212 event->hw.config = cfg; in msr_event_init() 217 static inline u64 msr_read_counter(struct perf_event *event) in msr_read_counter() argument 221 if (event->hw.event_base) in msr_read_counter() [all …]
|
D | core.c | 100 u64 x86_perf_event_update(struct perf_event *event) in x86_perf_event_update() argument 102 struct hw_perf_event *hwc = &event->hw; in x86_perf_event_update() 110 if (unlikely(is_topdown_count(event)) && x86_pmu.update_topdown_event) in x86_perf_event_update() 111 return x86_pmu.update_topdown_event(event); in x86_perf_event_update() 139 local64_add(delta, &event->count); in x86_perf_event_update() 148 static int x86_pmu_extra_regs(u64 config, struct perf_event *event) in x86_pmu_extra_regs() argument 153 reg = &event->hw.extra_reg; in x86_pmu_extra_regs() 159 if (er->event != (config & er->config_mask)) in x86_pmu_extra_regs() 161 if (event->attr.config1 & ~er->valid_mask) in x86_pmu_extra_regs() 168 reg->config = event->attr.config1; in x86_pmu_extra_regs() [all …]
|
/arch/arc/kernel/ |
D | perf_event.c | 118 static void arc_perf_event_update(struct perf_event *event, in arc_perf_event_update() argument 130 local64_add(delta, &event->count); in arc_perf_event_update() 134 static void arc_pmu_read(struct perf_event *event) in arc_pmu_read() argument 136 arc_perf_event_update(event, &event->hw, event->hw.idx); in arc_pmu_read() 167 static int arc_pmu_event_init(struct perf_event *event) in arc_pmu_event_init() argument 169 struct hw_perf_event *hwc = &event->hw; in arc_pmu_event_init() 172 if (!is_sampling_event(event)) { in arc_pmu_event_init() 182 if (event->attr.exclude_user) in arc_pmu_event_init() 186 if (event->attr.exclude_kernel) in arc_pmu_event_init() 190 switch (event->attr.type) { in arc_pmu_event_init() [all …]
|
/arch/powerpc/oprofile/ |
D | op_model_7450.c | 35 #define mmcr0_event1(event) \ argument 36 ((event << MMCR0_PMC1_SHIFT) & MMCR0_PMC1SEL) 37 #define mmcr0_event2(event) \ argument 38 ((event << MMCR0_PMC2_SHIFT) & MMCR0_PMC2SEL) 40 #define mmcr1_event3(event) \ argument 41 ((event << MMCR1_PMC3_SHIFT) & MMCR1_PMC3SEL) 42 #define mmcr1_event4(event) \ argument 43 ((event << MMCR1_PMC4_SHIFT) & MMCR1_PMC4SEL) 44 #define mmcr1_event5(event) \ argument 45 ((event << MMCR1_PMC5_SHIFT) & MMCR1_PMC5SEL) [all …]
|
/arch/nds32/kernel/ |
D | perf_event_cpu.c | 110 nds32_pmu_map_event(struct perf_event *event, in nds32_pmu_map_event() argument 117 u64 config = event->attr.config; in nds32_pmu_map_event() 119 switch (event->attr.type) { in nds32_pmu_map_event() 131 static int nds32_spav3_map_event(struct perf_event *event) in nds32_spav3_map_event() argument 133 return nds32_pmu_map_event(event, &nds32_pfm_perf_map, in nds32_spav3_map_event() 183 int nds32_pmu_event_set_period(struct perf_event *event) in nds32_pmu_event_set_period() argument 185 struct nds32_pmu *nds32_pmu = to_nds32_pmu(event->pmu); in nds32_pmu_event_set_period() 186 struct hw_perf_event *hwc = &event->hw; in nds32_pmu_event_set_period() 218 nds32_pmu->write_counter(event, (u64)(-left) & nds32_pmu->max_period); in nds32_pmu_event_set_period() 220 perf_event_update_userpage(event); in nds32_pmu_event_set_period() [all …]
|
/arch/arm/mm/ |
D | cache-l2x0-pmu.c | 117 static void l2x0_pmu_event_read(struct perf_event *event) in l2x0_pmu_event_read() argument 119 struct hw_perf_event *hw = &event->hw; in l2x0_pmu_event_read() 128 local64_add((new_count - prev_count) & mask, &event->count); in l2x0_pmu_event_read() 133 static void l2x0_pmu_event_configure(struct perf_event *event) in l2x0_pmu_event_configure() argument 135 struct hw_perf_event *hw = &event->hw; in l2x0_pmu_event_configure() 160 struct perf_event *event = events[i]; in l2x0_pmu_poll() local 162 if (!event) in l2x0_pmu_poll() 165 l2x0_pmu_event_read(event); in l2x0_pmu_poll() 166 l2x0_pmu_event_configure(event); in l2x0_pmu_poll() 177 static void __l2x0_pmu_event_enable(int idx, u32 event) in __l2x0_pmu_event_enable() argument [all …]
|
/arch/alpha/oprofile/ |
D | op_model_ev5.c | 47 unsigned long event = ctr[i].event; in common_reg_setup() local 53 if (event == 0) in common_reg_setup() 54 event = 12+48; in common_reg_setup() 55 else if (event == 2+41) in common_reg_setup() 56 event = 4+65; in common_reg_setup() 60 if (event < 2) in common_reg_setup() 61 ctl |= event << 31; in common_reg_setup() 62 else if (event < 24) in common_reg_setup() 64 else if (event < 40) in common_reg_setup() 65 ctl |= (event - 24) << 4; in common_reg_setup() [all …]
|