/arch/powerpc/perf/ |
D | core-fsl-emb.c | 27 struct perf_event *event[MAX_HWEVENTS]; member 179 static void fsl_emb_pmu_read(struct perf_event *event) in fsl_emb_pmu_read() argument 183 if (event->hw.state & PERF_HES_STOPPED) in fsl_emb_pmu_read() 192 prev = local64_read(&event->hw.prev_count); in fsl_emb_pmu_read() 194 val = read_pmc(event->hw.idx); in fsl_emb_pmu_read() 195 } while (local64_cmpxchg(&event->hw.prev_count, prev, val) != prev); in fsl_emb_pmu_read() 199 local64_add(delta, &event->count); in fsl_emb_pmu_read() 200 local64_sub(delta, &event->hw.period_left); in fsl_emb_pmu_read() 272 struct perf_event *event; in collect_events() local 280 list_for_each_entry(event, &group->sibling_list, group_entry) { in collect_events() [all …]
|
D | isa207-common.c | 15 static inline bool event_is_fab_match(u64 event) in event_is_fab_match() argument 18 event &= 0xff0fe; in event_is_fab_match() 21 return (event == 0x30056 || event == 0x4f052); in event_is_fab_match() 24 int isa207_get_constraint(u64 event, unsigned long *maskp, unsigned long *valp) in isa207_get_constraint() argument 31 if (event & ~EVENT_VALID_MASK) in isa207_get_constraint() 34 pmc = (event >> EVENT_PMC_SHIFT) & EVENT_PMC_MASK; in isa207_get_constraint() 35 unit = (event >> EVENT_UNIT_SHIFT) & EVENT_UNIT_MASK; in isa207_get_constraint() 36 cache = (event >> EVENT_CACHE_SEL_SHIFT) & EVENT_CACHE_SEL_MASK; in isa207_get_constraint() 37 ebb = (event >> EVENT_EBB_SHIFT) & EVENT_EBB_MASK; in isa207_get_constraint() 46 base_event = event & ~EVENT_LINUX_MASK; in isa207_get_constraint() [all …]
|
D | core-book3s.c | 36 struct perf_event *event[MAX_HWEVENTS]; member 116 static bool is_ebb_event(struct perf_event *event) { return false; } in is_ebb_event() argument 117 static int ebb_event_check(struct perf_event *event) { return 0; } in ebb_event_check() argument 118 static void ebb_event_add(struct perf_event *event) { } in ebb_event_add() argument 125 static inline void power_pmu_bhrb_enable(struct perf_event *event) {} in power_pmu_bhrb_enable() argument 126 static inline void power_pmu_bhrb_disable(struct perf_event *event) {} in power_pmu_bhrb_disable() argument 349 static void power_pmu_bhrb_enable(struct perf_event *event) in power_pmu_bhrb_enable() argument 357 if (event->ctx->task && cpuhw->bhrb_context != event->ctx) { in power_pmu_bhrb_enable() 359 cpuhw->bhrb_context = event->ctx; in power_pmu_bhrb_enable() 362 perf_sched_cb_inc(event->ctx->pmu); in power_pmu_bhrb_enable() [all …]
|
D | hv-gpci.c | 179 static u64 h_gpci_get_value(struct perf_event *event) in h_gpci_get_value() argument 182 unsigned long ret = single_gpci_request(event_get_request(event), in h_gpci_get_value() 183 event_get_starting_index(event), in h_gpci_get_value() 184 event_get_secondary_index(event), in h_gpci_get_value() 185 event_get_counter_info_version(event), in h_gpci_get_value() 186 event_get_offset(event), in h_gpci_get_value() 187 event_get_length(event), in h_gpci_get_value() 194 static void h_gpci_event_update(struct perf_event *event) in h_gpci_event_update() argument 197 u64 now = h_gpci_get_value(event); in h_gpci_event_update() 198 prev = local64_xchg(&event->hw.prev_count, now); in h_gpci_event_update() [all …]
|
D | power5+-pmu.c | 136 static int power5p_get_constraint(u64 event, unsigned long *maskp, in power5p_get_constraint() argument 143 pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in power5p_get_constraint() 150 if (pmc >= 5 && !(event == 0x500009 || event == 0x600005)) in power5p_get_constraint() 153 if (event & PM_BUSEVENT_MSK) { in power5p_get_constraint() 154 unit = (event >> PM_UNIT_SH) & PM_UNIT_MSK; in power5p_get_constraint() 161 byte = (event >> PM_BYTE_SH) & PM_BYTE_MSK; in power5p_get_constraint() 170 bit = event & 7; in power5p_get_constraint() 174 value |= (unsigned long)((event >> PM_GRS_SH) & fmask) in power5p_get_constraint() 191 static int power5p_limited_pmc_event(u64 event) in power5p_limited_pmc_event() argument 193 int pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in power5p_limited_pmc_event() [all …]
|
D | power6-pmu.c | 139 static int power6_marked_instr_event(u64 event) in power6_marked_instr_event() argument 145 pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in power6_marked_instr_event() 146 psel = (event & PM_PMCSEL_MSK) >> 1; /* drop edge/level bit */ in power6_marked_instr_event() 165 if (!(event & PM_BUSEVENT_MSK) || bit == -1) in power6_marked_instr_event() 168 byte = (event >> PM_BYTE_SH) & PM_BYTE_MSK; in power6_marked_instr_event() 169 unit = (event >> PM_UNIT_SH) & PM_UNIT_MSK; in power6_marked_instr_event() 177 static int p6_compute_mmcr(u64 event[], int n_ev, in p6_compute_mmcr() argument 190 pmc = (event[i] >> PM_PMC_SH) & PM_PMC_MSK; in p6_compute_mmcr() 198 ev = event[i]; in p6_compute_mmcr() 245 if (power6_marked_instr_event(event[i])) in p6_compute_mmcr() [all …]
|
D | hv-24x7.c | 319 static char *event_fmt(struct hv_24x7_event_data *event, unsigned domain) in event_fmt() argument 347 be16_to_cpu(event->event_counter_offs) + in event_fmt() 348 be16_to_cpu(event->event_group_record_offs), in event_fmt() 427 struct hv_24x7_event_data *event, in event_to_attr() argument 441 val = event_fmt(event, domain); in event_to_attr() 445 ev_name = event_name(event, &event_name_len); in event_to_attr() 468 static struct attribute *event_to_desc_attr(struct hv_24x7_event_data *event, in event_to_desc_attr() argument 472 char *name = event_name(event, &nl); in event_to_desc_attr() 473 char *desc = event_desc(event, &dl); in event_to_desc_attr() 483 event_to_long_desc_attr(struct hv_24x7_event_data *event, int nonce) in event_to_long_desc_attr() argument [all …]
|
D | power5-pmu.c | 140 static int power5_get_constraint(u64 event, unsigned long *maskp, in power5_get_constraint() argument 148 pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in power5_get_constraint() 157 else if (event != 0x500009 && event != 0x600005) in power5_get_constraint() 160 if (event & PM_BUSEVENT_MSK) { in power5_get_constraint() 161 unit = (event >> PM_UNIT_SH) & PM_UNIT_MSK; in power5_get_constraint() 168 byte = (event >> PM_BYTE_SH) & PM_BYTE_MSK; in power5_get_constraint() 177 bit = event & 7; in power5_get_constraint() 181 value |= (unsigned long)((event >> PM_GRS_SH) & fmask) in power5_get_constraint() 227 static int find_alternative(u64 event) in find_alternative() argument 232 if (event < event_alternatives[i][0]) in find_alternative() [all …]
|
D | mpc7450-pmu.c | 41 static int mpc7450_classify_event(u32 event) in mpc7450_classify_event() argument 45 pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in mpc7450_classify_event() 51 event &= PM_PMCSEL_MSK; in mpc7450_classify_event() 52 if (event <= 1) in mpc7450_classify_event() 54 if (event <= 7) in mpc7450_classify_event() 56 if (event <= 13) in mpc7450_classify_event() 58 if (event <= 22) in mpc7450_classify_event() 81 static int mpc7450_threshold_use(u32 event) in mpc7450_threshold_use() argument 85 pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in mpc7450_threshold_use() 86 sel = event & PM_PMCSEL_MSK; in mpc7450_threshold_use() [all …]
|
D | power7-pmu.c | 85 static int power7_get_constraint(u64 event, unsigned long *maskp, in power7_get_constraint() argument 91 pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in power7_get_constraint() 98 if (pmc >= 5 && !(event == 0x500fa || event == 0x600f4)) in power7_get_constraint() 107 unit = (event >> PM_UNIT_SH) & PM_UNIT_MSK; in power7_get_constraint() 110 int l2sel = (event >> PM_L2SEL_SH) & PM_L2SEL_MSK; in power7_get_constraint() 132 static int find_alternative(u64 event) in find_alternative() argument 137 if (event < event_alternatives[i][0]) in find_alternative() 140 if (event == event_alternatives[i][j]) in find_alternative() 146 static s64 find_alternative_decode(u64 event) in find_alternative_decode() argument 151 pmc = (event >> PM_PMC_SH) & PM_PMC_MSK; in find_alternative_decode() [all …]
|
/arch/powerpc/kernel/ |
D | eeh_event.c | 54 struct eeh_event *event; in eeh_event_handler() local 63 event = NULL; in eeh_event_handler() 65 event = list_entry(eeh_eventlist.next, in eeh_event_handler() 67 list_del(&event->list); in eeh_event_handler() 70 if (!event) in eeh_event_handler() 74 pe = event->pe; in eeh_event_handler() 90 kfree(event); in eeh_event_handler() 132 struct eeh_event *event; in eeh_send_failure_event() local 134 event = kzalloc(sizeof(*event), GFP_ATOMIC); in eeh_send_failure_event() 135 if (!event) { in eeh_send_failure_event() [all …]
|
/arch/sh/kernel/ |
D | perf_event.c | 84 static void hw_perf_event_destroy(struct perf_event *event) in hw_perf_event_destroy() argument 121 static int __hw_perf_event_init(struct perf_event *event) in __hw_perf_event_init() argument 123 struct perf_event_attr *attr = &event->attr; in __hw_perf_event_init() 124 struct hw_perf_event *hwc = &event->hw; in __hw_perf_event_init() 152 event->destroy = hw_perf_event_destroy; in __hw_perf_event_init() 179 static void sh_perf_event_update(struct perf_event *event, in sh_perf_event_update() argument 217 local64_add(delta, &event->count); in sh_perf_event_update() 220 static void sh_pmu_stop(struct perf_event *event, int flags) in sh_pmu_stop() argument 223 struct hw_perf_event *hwc = &event->hw; in sh_pmu_stop() 226 if (!(event->hw.state & PERF_HES_STOPPED)) { in sh_pmu_stop() [all …]
|
/arch/x86/events/ |
D | msr.c | 116 PMU_FORMAT_ATTR(event, "config:0-63"); 132 static int msr_event_init(struct perf_event *event) in msr_event_init() argument 134 u64 cfg = event->attr.config; in msr_event_init() 136 if (event->attr.type != event->pmu->type) in msr_event_init() 143 if (event->attr.exclude_user || in msr_event_init() 144 event->attr.exclude_kernel || in msr_event_init() 145 event->attr.exclude_hv || in msr_event_init() 146 event->attr.exclude_idle || in msr_event_init() 147 event->attr.exclude_host || in msr_event_init() 148 event->attr.exclude_guest || in msr_event_init() [all …]
|
/arch/x86/events/amd/ |
D | power.c | 50 static void event_update(struct perf_event *event) in event_update() argument 52 struct hw_perf_event *hwc = &event->hw; in event_update() 75 local64_add(delta, &event->count); in event_update() 78 static void __pmu_event_start(struct perf_event *event) in __pmu_event_start() argument 80 if (WARN_ON_ONCE(!(event->hw.state & PERF_HES_STOPPED))) in __pmu_event_start() 83 event->hw.state = 0; in __pmu_event_start() 85 rdmsrl(MSR_F15H_PTSC, event->hw.ptsc); in __pmu_event_start() 86 rdmsrl(MSR_F15H_CU_PWR_ACCUMULATOR, event->hw.pwr_acc); in __pmu_event_start() 89 static void pmu_event_start(struct perf_event *event, int mode) in pmu_event_start() argument 91 __pmu_event_start(event); in pmu_event_start() [all …]
|
D | iommu.c | 84 const char *event; member 90 struct amd_iommu_event_desc *event = in _iommu_event_show() local 92 return sprintf(buf, "%s\n", event->event); in _iommu_event_show() 98 .event = _event, \ 202 static int perf_iommu_event_init(struct perf_event *event) in perf_iommu_event_init() argument 204 struct hw_perf_event *hwc = &event->hw; in perf_iommu_event_init() 209 if (event->attr.type != event->pmu->type) in perf_iommu_event_init() 217 if (is_sampling_event(event) || event->attach_state & PERF_ATTACH_TASK) in perf_iommu_event_init() 221 if (event->attr.exclude_user || event->attr.exclude_kernel || in perf_iommu_event_init() 222 event->attr.exclude_host || event->attr.exclude_guest) in perf_iommu_event_init() [all …]
|
D | uncore.c | 56 static bool is_nb_event(struct perf_event *event) in is_nb_event() argument 58 return event->pmu->type == amd_nb_pmu.type; in is_nb_event() 61 static bool is_l2_event(struct perf_event *event) in is_l2_event() argument 63 return event->pmu->type == amd_l2_pmu.type; in is_l2_event() 66 static struct amd_uncore *event_to_amd_uncore(struct perf_event *event) in event_to_amd_uncore() argument 68 if (is_nb_event(event) && amd_uncore_nb) in event_to_amd_uncore() 69 return *per_cpu_ptr(amd_uncore_nb, event->cpu); in event_to_amd_uncore() 70 else if (is_l2_event(event) && amd_uncore_l2) in event_to_amd_uncore() 71 return *per_cpu_ptr(amd_uncore_l2, event->cpu); in event_to_amd_uncore() 76 static void amd_uncore_read(struct perf_event *event) in amd_uncore_read() argument [all …]
|
/arch/xtensa/kernel/ |
D | perf_event.c | 55 struct perf_event *event[XCHAL_NUM_PERF_COUNTERS]; member 139 static void xtensa_perf_event_update(struct perf_event *event, in xtensa_perf_event_update() argument 147 new_raw_count = xtensa_pmu_read_counter(event->hw.idx); in xtensa_perf_event_update() 153 local64_add(delta, &event->count); in xtensa_perf_event_update() 157 static bool xtensa_perf_event_set_period(struct perf_event *event, in xtensa_perf_event_set_period() argument 163 if (!is_sampling_event(event)) { in xtensa_perf_event_set_period() 186 perf_event_update_userpage(event); in xtensa_perf_event_set_period() 201 static int xtensa_pmu_event_init(struct perf_event *event) in xtensa_pmu_event_init() argument 205 switch (event->attr.type) { in xtensa_pmu_event_init() 207 if (event->attr.config >= ARRAY_SIZE(xtensa_hw_ctl) || in xtensa_pmu_event_init() [all …]
|
/arch/alpha/kernel/ |
D | perf_event.c | 40 struct perf_event *event[MAX_HWEVENTS]; member 151 static int ev67_check_constraints(struct perf_event **event, in ev67_check_constraints() argument 198 event[0]->hw.idx = idx0; in ev67_check_constraints() 199 event[0]->hw.config_base = config; in ev67_check_constraints() 201 event[1]->hw.idx = idx0 ^ 1; in ev67_check_constraints() 202 event[1]->hw.config_base = config; in ev67_check_constraints() 251 static int alpha_perf_event_set_period(struct perf_event *event, in alpha_perf_event_set_period() argument 286 perf_event_update_userpage(event); in alpha_perf_event_set_period() 306 static unsigned long alpha_perf_event_update(struct perf_event *event, in alpha_perf_event_update() argument 329 local64_add(delta, &event->count); in alpha_perf_event_update() [all …]
|
/arch/arm/mm/ |
D | cache-l2x0-pmu.c | 129 static void l2x0_pmu_event_read(struct perf_event *event) in l2x0_pmu_event_read() argument 131 struct hw_perf_event *hw = &event->hw; in l2x0_pmu_event_read() 140 local64_add((new_count - prev_count) & mask, &event->count); in l2x0_pmu_event_read() 145 static void l2x0_pmu_event_configure(struct perf_event *event) in l2x0_pmu_event_configure() argument 147 struct hw_perf_event *hw = &event->hw; in l2x0_pmu_event_configure() 172 struct perf_event *event = events[i]; in l2x0_pmu_poll() local 174 if (!event) in l2x0_pmu_poll() 177 l2x0_pmu_event_read(event); in l2x0_pmu_poll() 178 l2x0_pmu_event_configure(event); in l2x0_pmu_poll() 189 static void __l2x0_pmu_event_enable(int idx, u32 event) in __l2x0_pmu_event_enable() argument [all …]
|
/arch/arc/kernel/ |
D | perf_event.c | 103 static void arc_perf_event_update(struct perf_event *event, in arc_perf_event_update() argument 115 local64_add(delta, &event->count); in arc_perf_event_update() 119 static void arc_pmu_read(struct perf_event *event) in arc_pmu_read() argument 121 arc_perf_event_update(event, &event->hw, event->hw.idx); in arc_pmu_read() 152 static int arc_pmu_event_init(struct perf_event *event) in arc_pmu_event_init() argument 154 struct hw_perf_event *hwc = &event->hw; in arc_pmu_event_init() 157 if (!is_sampling_event(event)) { in arc_pmu_event_init() 167 if (event->attr.exclude_user) in arc_pmu_event_init() 171 if (event->attr.exclude_kernel) in arc_pmu_event_init() 175 switch (event->attr.type) { in arc_pmu_event_init() [all …]
|
/arch/blackfin/kernel/ |
D | perf_event.c | 260 static void bfin_perf_event_update(struct perf_event *event, in bfin_perf_event_update() argument 298 local64_add(delta, &event->count); in bfin_perf_event_update() 301 static void bfin_pmu_stop(struct perf_event *event, int flags) in bfin_pmu_stop() argument 304 struct hw_perf_event *hwc = &event->hw; in bfin_pmu_stop() 307 if (!(event->hw.state & PERF_HES_STOPPED)) { in bfin_pmu_stop() 310 event->hw.state |= PERF_HES_STOPPED; in bfin_pmu_stop() 313 if ((flags & PERF_EF_UPDATE) && !(event->hw.state & PERF_HES_UPTODATE)) { in bfin_pmu_stop() 314 bfin_perf_event_update(event, &event->hw, idx); in bfin_pmu_stop() 315 event->hw.state |= PERF_HES_UPTODATE; in bfin_pmu_stop() 319 static void bfin_pmu_start(struct perf_event *event, int flags) in bfin_pmu_start() argument [all …]
|
/arch/powerpc/oprofile/ |
D | op_model_7450.c | 39 #define mmcr0_event1(event) \ argument 40 ((event << MMCR0_PMC1_SHIFT) & MMCR0_PMC1SEL) 41 #define mmcr0_event2(event) \ argument 42 ((event << MMCR0_PMC2_SHIFT) & MMCR0_PMC2SEL) 44 #define mmcr1_event3(event) \ argument 45 ((event << MMCR1_PMC3_SHIFT) & MMCR1_PMC3SEL) 46 #define mmcr1_event4(event) \ argument 47 ((event << MMCR1_PMC4_SHIFT) & MMCR1_PMC4SEL) 48 #define mmcr1_event5(event) \ argument 49 ((event << MMCR1_PMC5_SHIFT) & MMCR1_PMC5SEL) [all …]
|
/arch/s390/kernel/ |
D | perf_cpum_cf.c | 89 static int get_counter_set(u64 event) in get_counter_set() argument 93 if (event < 32) in get_counter_set() 95 else if (event < 64) in get_counter_set() 97 else if (event < 128) in get_counter_set() 99 else if (event < 256) in get_counter_set() 298 static void hw_perf_event_destroy(struct perf_event *event) in hw_perf_event_destroy() argument 329 static int __hw_perf_event_init(struct perf_event *event) in __hw_perf_event_init() argument 331 struct perf_event_attr *attr = &event->attr; in __hw_perf_event_init() 332 struct hw_perf_event *hwc = &event->hw; in __hw_perf_event_init() 402 event->destroy = hw_perf_event_destroy; in __hw_perf_event_init() [all …]
|
/arch/alpha/oprofile/ |
D | op_model_ev5.c | 47 unsigned long event = ctr[i].event; in common_reg_setup() local 53 if (event == 0) in common_reg_setup() 54 event = 12+48; in common_reg_setup() 55 else if (event == 2+41) in common_reg_setup() 56 event = 4+65; in common_reg_setup() 60 if (event < 2) in common_reg_setup() 61 ctl |= event << 31; in common_reg_setup() 62 else if (event < 24) in common_reg_setup() 64 else if (event < 40) in common_reg_setup() 65 ctl |= (event - 24) << 4; in common_reg_setup() [all …]
|
/arch/x86/events/intel/ |
D | uncore_snb.c | 76 DEFINE_UNCORE_FORMAT_ATTR(event, event, "config:0-7"); 84 static void snb_uncore_msr_enable_event(struct intel_uncore_box *box, struct perf_event *event) in snb_uncore_msr_enable_event() argument 86 struct hw_perf_event *hwc = &event->hw; in snb_uncore_msr_enable_event() 94 static void snb_uncore_msr_disable_event(struct intel_uncore_box *box, struct perf_event *event) in snb_uncore_msr_disable_event() argument 96 wrmsrl(event->hw.config_base, 0); in snb_uncore_msr_disable_event() 329 static void snb_uncore_imc_enable_event(struct intel_uncore_box *box, struct perf_event *event) in snb_uncore_imc_enable_event() argument 332 static void snb_uncore_imc_disable_event(struct intel_uncore_box *box, struct perf_event *event) in snb_uncore_imc_disable_event() argument 335 static u64 snb_uncore_imc_read_counter(struct intel_uncore_box *box, struct perf_event *event) in snb_uncore_imc_read_counter() argument 337 struct hw_perf_event *hwc = &event->hw; in snb_uncore_imc_read_counter() 347 static int snb_uncore_imc_event_init(struct perf_event *event) in snb_uncore_imc_event_init() argument [all …]
|