Home
last modified time | relevance | path

Searched refs:hwc (Results 1 – 25 of 52) sorted by relevance

123

/arch/s390/include/asm/
Dperf_event.h68 #define OVERFLOW_REG(hwc) ((hwc)->extra_reg.config) argument
69 #define SFB_ALLOC_REG(hwc) ((hwc)->extra_reg.alloc) argument
70 #define TEAR_REG(hwc) ((hwc)->last_tag) argument
71 #define SAMPL_RATE(hwc) ((hwc)->event_base) argument
72 #define SAMPL_FLAGS(hwc) ((hwc)->config_base) argument
73 #define SAMPL_DIAG_MODE(hwc) (SAMPL_FLAGS(hwc) & PERF_CPUM_SF_DIAG_MODE) argument
74 #define SDB_FULL_BLOCKS(hwc) (SAMPL_FLAGS(hwc) & PERF_CPUM_SF_FULL_BLOCKS) argument
75 #define SAMPLE_FREQ_MODE(hwc) (SAMPL_FLAGS(hwc) & PERF_CPUM_SF_FREQ_MODE) argument
/arch/alpha/kernel/
Dperf_event.c253 struct hw_perf_event *hwc, int idx) in alpha_perf_event_set_period() argument
255 long left = local64_read(&hwc->period_left); in alpha_perf_event_set_period()
256 long period = hwc->sample_period; in alpha_perf_event_set_period()
261 local64_set(&hwc->period_left, left); in alpha_perf_event_set_period()
262 hwc->last_period = period; in alpha_perf_event_set_period()
268 local64_set(&hwc->period_left, left); in alpha_perf_event_set_period()
269 hwc->last_period = period; in alpha_perf_event_set_period()
283 local64_set(&hwc->prev_count, (unsigned long)(-left)); in alpha_perf_event_set_period()
308 struct hw_perf_event *hwc, int idx, long ovf) in alpha_perf_event_update() argument
314 prev_raw_count = local64_read(&hwc->prev_count); in alpha_perf_event_update()
[all …]
/arch/arc/kernel/
Dperf_event.c119 struct hw_perf_event *hwc, int idx) in arc_perf_event_update() argument
121 u64 prev_raw_count = local64_read(&hwc->prev_count); in arc_perf_event_update()
129 local64_set(&hwc->prev_count, new_raw_count); in arc_perf_event_update()
131 local64_sub(delta, &hwc->period_left); in arc_perf_event_update()
169 struct hw_perf_event *hwc = &event->hw; in arc_pmu_event_init() local
173 hwc->sample_period = arc_pmu->max_period; in arc_pmu_event_init()
174 hwc->last_period = hwc->sample_period; in arc_pmu_event_init()
175 local64_set(&hwc->period_left, hwc->sample_period); in arc_pmu_event_init()
178 hwc->config = 0; in arc_pmu_event_init()
183 hwc->config |= ARC_REG_PCT_CONFIG_KERN; in arc_pmu_event_init()
[all …]
/arch/x86/events/amd/
Diommu.c208 struct hw_perf_event *hwc = &event->hw; in perf_iommu_event_init() local
226 hwc->conf = event->attr.config; in perf_iommu_event_init()
227 hwc->conf1 = event->attr.config1; in perf_iommu_event_init()
240 struct hw_perf_event *hwc = &ev->hw; in perf_iommu_enable_event() local
241 u8 bank = hwc->iommu_bank; in perf_iommu_enable_event()
242 u8 cntr = hwc->iommu_cntr; in perf_iommu_enable_event()
245 reg = GET_CSOURCE(hwc); in perf_iommu_enable_event()
248 reg = GET_DEVID_MASK(hwc); in perf_iommu_enable_event()
249 reg = GET_DEVID(hwc) | (reg << 32); in perf_iommu_enable_event()
254 reg = GET_PASID_MASK(hwc); in perf_iommu_enable_event()
[all …]
Dibs.c105 perf_event_set_period(struct hw_perf_event *hwc, u64 min, u64 max, u64 *hw_period) in perf_event_set_period() argument
107 s64 left = local64_read(&hwc->period_left); in perf_event_set_period()
108 s64 period = hwc->sample_period; in perf_event_set_period()
116 local64_set(&hwc->period_left, left); in perf_event_set_period()
117 hwc->last_period = period; in perf_event_set_period()
123 local64_set(&hwc->period_left, left); in perf_event_set_period()
124 hwc->last_period = period; in perf_event_set_period()
150 struct hw_perf_event *hwc = &event->hw; in perf_event_try_update() local
162 prev_raw_count = local64_read(&hwc->prev_count); in perf_event_try_update()
163 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in perf_event_try_update()
[all …]
Duncore.c84 struct hw_perf_event *hwc = &event->hw; in amd_uncore_read() local
93 prev = local64_read(&hwc->prev_count); in amd_uncore_read()
94 rdpmcl(hwc->event_base_rdpmc, new); in amd_uncore_read()
95 local64_set(&hwc->prev_count, new); in amd_uncore_read()
103 struct hw_perf_event *hwc = &event->hw; in amd_uncore_start() local
106 wrmsrl(hwc->event_base, (u64)local64_read(&hwc->prev_count)); in amd_uncore_start()
108 hwc->state = 0; in amd_uncore_start()
109 wrmsrl(hwc->config_base, (hwc->config | ARCH_PERFMON_EVENTSEL_ENABLE)); in amd_uncore_start()
115 struct hw_perf_event *hwc = &event->hw; in amd_uncore_stop() local
117 wrmsrl(hwc->config_base, hwc->config); in amd_uncore_stop()
[all …]
Dpower.c45 struct hw_perf_event *hwc = &event->hw; in event_update() local
49 prev_pwr_acc = hwc->pwr_acc; in event_update()
50 prev_ptsc = hwc->ptsc; in event_update()
89 struct hw_perf_event *hwc = &event->hw; in pmu_event_stop() local
92 if (!(hwc->state & PERF_HES_STOPPED)) in pmu_event_stop()
93 hwc->state |= PERF_HES_STOPPED; in pmu_event_stop()
96 if ((mode & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { in pmu_event_stop()
102 hwc->state |= PERF_HES_UPTODATE; in pmu_event_stop()
108 struct hw_perf_event *hwc = &event->hw; in pmu_event_add() local
110 hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; in pmu_event_add()
Dcore.c312 static inline unsigned int amd_get_event_code(struct hw_perf_event *hwc) in amd_get_event_code() argument
314 return ((hwc->config >> 24) & 0x0f00) | (hwc->config & 0x00ff); in amd_get_event_code()
317 static inline bool amd_is_pair_event_code(struct hw_perf_event *hwc) in amd_is_pair_event_code() argument
322 switch (amd_get_event_code(hwc)) { in amd_is_pair_event_code()
349 static inline int amd_is_nb_event(struct hw_perf_event *hwc) in amd_is_nb_event() argument
351 return (hwc->config & 0xe0) == 0xe0; in amd_is_nb_event()
442 struct hw_perf_event *hwc = &event->hw; in __amd_get_nb_event_constraints() local
464 if (new == -1 || hwc->idx == idx) in __amd_get_nb_event_constraints()
808 struct hw_perf_event *hwc = &event->hw; in amd_get_event_constraints_f15h() local
809 unsigned int event_code = amd_get_event_code(hwc); in amd_get_event_constraints_f15h()
[all …]
/arch/xtensa/kernel/
Dperf_event.c146 struct hw_perf_event *hwc, int idx) in xtensa_perf_event_update() argument
152 prev_raw_count = local64_read(&hwc->prev_count); in xtensa_perf_event_update()
154 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in xtensa_perf_event_update()
160 local64_sub(delta, &hwc->period_left); in xtensa_perf_event_update()
164 struct hw_perf_event *hwc, int idx) in xtensa_perf_event_set_period() argument
172 s64 period = hwc->sample_period; in xtensa_perf_event_set_period()
174 left = local64_read(&hwc->period_left); in xtensa_perf_event_set_period()
177 local64_set(&hwc->period_left, left); in xtensa_perf_event_set_period()
178 hwc->last_period = period; in xtensa_perf_event_set_period()
182 local64_set(&hwc->period_left, left); in xtensa_perf_event_set_period()
[all …]
/arch/riscv/kernel/
Dperf_event.c216 struct hw_perf_event *hwc = &event->hw; in riscv_pmu_read() local
219 int idx = hwc->idx; in riscv_pmu_read()
223 prev_raw_count = local64_read(&hwc->prev_count); in riscv_pmu_read()
226 oldval = local64_cmpxchg(&hwc->prev_count, prev_raw_count, in riscv_pmu_read()
253 struct hw_perf_event *hwc = &event->hw; in riscv_pmu_stop() local
255 WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); in riscv_pmu_stop()
256 hwc->state |= PERF_HES_STOPPED; in riscv_pmu_stop()
258 if ((flags & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { in riscv_pmu_stop()
260 hwc->state |= PERF_HES_UPTODATE; in riscv_pmu_stop()
269 struct hw_perf_event *hwc = &event->hw; in riscv_pmu_start() local
[all …]
/arch/sh/kernel/
Dperf_event.c103 struct hw_perf_event *hwc = &event->hw; in __hw_perf_event_init() local
153 hwc->config |= config; in __hw_perf_event_init()
159 struct hw_perf_event *hwc, int idx) in sh_perf_event_update() argument
178 prev_raw_count = local64_read(&hwc->prev_count); in sh_perf_event_update()
181 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in sh_perf_event_update()
202 struct hw_perf_event *hwc = &event->hw; in sh_pmu_stop() local
203 int idx = hwc->idx; in sh_pmu_stop()
206 sh_pmu->disable(hwc, idx); in sh_pmu_stop()
220 struct hw_perf_event *hwc = &event->hw; in sh_pmu_start() local
221 int idx = hwc->idx; in sh_pmu_start()
[all …]
/arch/nds32/kernel/
Dperf_event_cpu.c186 struct hw_perf_event *hwc = &event->hw; in nds32_pmu_event_set_period() local
187 s64 left = local64_read(&hwc->period_left); in nds32_pmu_event_set_period()
188 s64 period = hwc->sample_period; in nds32_pmu_event_set_period()
192 if (unlikely(period != hwc->last_period)) in nds32_pmu_event_set_period()
193 left = period - (hwc->last_period - left); in nds32_pmu_event_set_period()
197 local64_set(&hwc->period_left, left); in nds32_pmu_event_set_period()
198 hwc->last_period = period; in nds32_pmu_event_set_period()
204 local64_set(&hwc->period_left, left); in nds32_pmu_event_set_period()
205 hwc->last_period = period; in nds32_pmu_event_set_period()
216 local64_set(&hwc->prev_count, (u64)(-left)); in nds32_pmu_event_set_period()
[all …]
/arch/arm/kernel/
Dperf_event_xscale.c175 struct hw_perf_event *hwc; in xscale1pmu_handle_irq() local
183 hwc = &event->hw; in xscale1pmu_handle_irq()
185 perf_sample_data_init(&data, 0, hwc->last_period); in xscale1pmu_handle_irq()
208 struct hw_perf_event *hwc = &event->hw; in xscale1pmu_enable_event() local
210 int idx = hwc->idx; in xscale1pmu_enable_event()
219 evt = (hwc->config_base << XSCALE1_COUNT0_EVT_SHFT) | in xscale1pmu_enable_event()
224 evt = (hwc->config_base << XSCALE1_COUNT1_EVT_SHFT) | in xscale1pmu_enable_event()
244 struct hw_perf_event *hwc = &event->hw; in xscale1pmu_disable_event() local
246 int idx = hwc->idx; in xscale1pmu_disable_event()
278 struct hw_perf_event *hwc = &event->hw; in xscale1pmu_get_event_idx() local
[all …]
Dperf_event_v6.c238 struct hw_perf_event *hwc = &event->hw; in armv6pmu_read_counter() local
239 int counter = hwc->idx; in armv6pmu_read_counter()
256 struct hw_perf_event *hwc = &event->hw; in armv6pmu_write_counter() local
257 int counter = hwc->idx; in armv6pmu_write_counter()
273 struct hw_perf_event *hwc = &event->hw; in armv6pmu_enable_event() local
275 int idx = hwc->idx; in armv6pmu_enable_event()
282 evt = (hwc->config_base << ARMV6_PMCR_EVT_COUNT0_SHIFT) | in armv6pmu_enable_event()
286 evt = (hwc->config_base << ARMV6_PMCR_EVT_COUNT1_SHIFT) | in armv6pmu_enable_event()
328 struct hw_perf_event *hwc; in armv6pmu_handle_irq() local
341 hwc = &event->hw; in armv6pmu_handle_irq()
[all …]
Dperf_event_v7.c749 struct hw_perf_event *hwc = &event->hw; in armv7pmu_read_counter() local
750 int idx = hwc->idx; in armv7pmu_read_counter()
769 struct hw_perf_event *hwc = &event->hw; in armv7pmu_write_counter() local
770 int idx = hwc->idx; in armv7pmu_write_counter()
874 struct hw_perf_event *hwc = &event->hw; in armv7pmu_enable_event() local
877 int idx = hwc->idx; in armv7pmu_enable_event()
902 armv7_pmnc_write_evtsel(idx, hwc->config_base); in armv7pmu_enable_event()
920 struct hw_perf_event *hwc = &event->hw; in armv7pmu_disable_event() local
923 int idx = hwc->idx; in armv7pmu_disable_event()
975 struct hw_perf_event *hwc; in armv7pmu_handle_irq() local
[all …]
/arch/x86/events/intel/
Duncore_nhmex.c247 struct hw_perf_event *hwc = &event->hw; in nhmex_uncore_msr_enable_event() local
249 if (hwc->idx == UNCORE_PMC_IDX_FIXED) in nhmex_uncore_msr_enable_event()
250 wrmsrl(hwc->config_base, NHMEX_PMON_CTL_EN_BIT0); in nhmex_uncore_msr_enable_event()
252 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT22); in nhmex_uncore_msr_enable_event()
254 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT0); in nhmex_uncore_msr_enable_event()
352 struct hw_perf_event *hwc = &event->hw; in nhmex_bbox_hw_config() local
353 struct hw_perf_event_extra *reg1 = &hwc->extra_reg; in nhmex_bbox_hw_config()
354 struct hw_perf_event_extra *reg2 = &hwc->branch_reg; in nhmex_bbox_hw_config()
357 ctr = (hwc->config & NHMEX_B_PMON_CTR_MASK) >> in nhmex_bbox_hw_config()
359 ev_sel = (hwc->config & NHMEX_B_PMON_CTL_EV_SEL_MASK) >> in nhmex_bbox_hw_config()
[all …]
Dp4.c856 static inline int p4_pmu_clear_cccr_ovf(struct hw_perf_event *hwc) in p4_pmu_clear_cccr_ovf() argument
861 rdmsrl(hwc->config_base, v); in p4_pmu_clear_cccr_ovf()
863 wrmsrl(hwc->config_base, v & ~P4_CCCR_OVF); in p4_pmu_clear_cccr_ovf()
874 rdmsrl(hwc->event_base, v); in p4_pmu_clear_cccr_ovf()
906 struct hw_perf_event *hwc = &event->hw; in p4_pmu_disable_event() local
913 (void)wrmsrl_safe(hwc->config_base, in p4_pmu_disable_event()
914 p4_config_unpack_cccr(hwc->config) & ~P4_CCCR_ENABLE & ~P4_CCCR_OVF & ~P4_CCCR_RESERVED); in p4_pmu_disable_event()
952 struct hw_perf_event *hwc = &event->hw; in __p4_pmu_enable_event() local
953 int thread = p4_ht_config_thread(hwc->config); in __p4_pmu_enable_event()
954 u64 escr_conf = p4_config_unpack_escr(p4_clear_ht_bit(hwc->config)); in __p4_pmu_enable_event()
[all …]
Duncore_discovery.c370 struct hw_perf_event *hwc = &event->hw; in intel_generic_uncore_msr_enable_event() local
372 wrmsrl(hwc->config_base, hwc->config); in intel_generic_uncore_msr_enable_event()
378 struct hw_perf_event *hwc = &event->hw; in intel_generic_uncore_msr_disable_event() local
380 wrmsrl(hwc->config_base, 0); in intel_generic_uncore_msr_disable_event()
421 struct hw_perf_event *hwc = &event->hw; in intel_generic_uncore_pci_enable_event() local
423 pci_write_config_dword(pdev, hwc->config_base, hwc->config); in intel_generic_uncore_pci_enable_event()
430 struct hw_perf_event *hwc = &event->hw; in intel_generic_uncore_pci_disable_event() local
432 pci_write_config_dword(pdev, hwc->config_base, 0); in intel_generic_uncore_pci_disable_event()
439 struct hw_perf_event *hwc = &event->hw; in intel_generic_uncore_pci_read_counter() local
442 pci_read_config_dword(pdev, hwc->event_base, (u32 *)&count); in intel_generic_uncore_pci_read_counter()
[all …]
Dknc.c179 struct hw_perf_event *hwc = &event->hw; in knc_pmu_disable_event() local
182 val = hwc->config; in knc_pmu_disable_event()
185 (void)wrmsrl_safe(hwc->config_base + hwc->idx, val); in knc_pmu_disable_event()
190 struct hw_perf_event *hwc = &event->hw; in knc_pmu_enable_event() local
193 val = hwc->config; in knc_pmu_enable_event()
196 (void)wrmsrl_safe(hwc->config_base + hwc->idx, val); in knc_pmu_enable_event()
/arch/s390/kernel/
Dperf_cpum_sf.c332 static unsigned long sfb_max_limit(struct hw_perf_event *hwc) in sfb_max_limit() argument
334 return SAMPL_DIAG_MODE(hwc) ? CPUM_SF_MAX_SDB * CPUM_SF_SDB_DIAG_FACTOR in sfb_max_limit()
339 struct hw_perf_event *hwc) in sfb_pending_allocs() argument
342 return SFB_ALLOC_REG(hwc); in sfb_pending_allocs()
343 if (SFB_ALLOC_REG(hwc) > sfb->num_sdb) in sfb_pending_allocs()
344 return SFB_ALLOC_REG(hwc) - sfb->num_sdb; in sfb_pending_allocs()
349 struct hw_perf_event *hwc) in sfb_has_pending_allocs() argument
351 return sfb_pending_allocs(sfb, hwc) > 0; in sfb_has_pending_allocs()
354 static void sfb_account_allocs(unsigned long num, struct hw_perf_event *hwc) in sfb_account_allocs() argument
357 num = min_t(unsigned long, num, sfb_max_limit(hwc) - SFB_ALLOC_REG(hwc)); in sfb_account_allocs()
[all …]
/arch/x86/events/
Dcore.c109 struct hw_perf_event *hwc = &event->hw; in x86_perf_event_update() local
114 if (unlikely(!hwc->event_base)) in x86_perf_event_update()
128 prev_raw_count = local64_read(&hwc->prev_count); in x86_perf_event_update()
129 rdpmcl(hwc->event_base_rdpmc, new_raw_count); in x86_perf_event_update()
131 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in x86_perf_event_update()
147 local64_sub(delta, &hwc->period_left); in x86_perf_event_update()
357 set_ext_hw_attr(struct hw_perf_event *hwc, struct perf_event *event) in set_ext_hw_attr() argument
387 hwc->config |= val; in set_ext_hw_attr()
474 struct hw_perf_event *hwc = &event->hw; in x86_setup_perfctr() local
478 hwc->sample_period = x86_pmu.max_period; in x86_setup_perfctr()
[all …]
/arch/mips/kernel/
Dperf_event_mipsxx.c315 struct hw_perf_event *hwc) in mipsxx_pmu_alloc_counter() argument
325 cntr_mask = (hwc->event_base >> 10) & 0xffff; in mipsxx_pmu_alloc_counter()
327 cntr_mask = (hwc->event_base >> 8) & 0xffff; in mipsxx_pmu_alloc_counter()
410 struct hw_perf_event *hwc, in mipspmu_event_set_period() argument
413 u64 left = local64_read(&hwc->period_left); in mipspmu_event_set_period()
414 u64 period = hwc->sample_period; in mipspmu_event_set_period()
420 local64_set(&hwc->period_left, left); in mipspmu_event_set_period()
421 hwc->last_period = period; in mipspmu_event_set_period()
426 local64_set(&hwc->period_left, left); in mipspmu_event_set_period()
427 hwc->last_period = period; in mipspmu_event_set_period()
[all …]
/arch/x86/events/zhaoxin/
Dcore.c289 static void zhaoxin_pmu_disable_fixed(struct hw_perf_event *hwc) in zhaoxin_pmu_disable_fixed() argument
291 int idx = hwc->idx - INTEL_PMC_IDX_FIXED; in zhaoxin_pmu_disable_fixed()
296 rdmsrl(hwc->config_base, ctrl_val); in zhaoxin_pmu_disable_fixed()
298 wrmsrl(hwc->config_base, ctrl_val); in zhaoxin_pmu_disable_fixed()
303 struct hw_perf_event *hwc = &event->hw; in zhaoxin_pmu_disable_event() local
305 if (unlikely(hwc->config_base == MSR_ARCH_PERFMON_FIXED_CTR_CTRL)) { in zhaoxin_pmu_disable_event()
306 zhaoxin_pmu_disable_fixed(hwc); in zhaoxin_pmu_disable_event()
313 static void zhaoxin_pmu_enable_fixed(struct hw_perf_event *hwc) in zhaoxin_pmu_enable_fixed() argument
315 int idx = hwc->idx - INTEL_PMC_IDX_FIXED; in zhaoxin_pmu_enable_fixed()
324 if (hwc->config & ARCH_PERFMON_EVENTSEL_USR) in zhaoxin_pmu_enable_fixed()
[all …]
/arch/csky/kernel/
Dperf_event.c880 struct hw_perf_event *hwc = &event->hw; in csky_pmu_event_set_period() local
881 s64 left = local64_read(&hwc->period_left); in csky_pmu_event_set_period()
882 s64 period = hwc->sample_period; in csky_pmu_event_set_period()
887 local64_set(&hwc->period_left, left); in csky_pmu_event_set_period()
888 hwc->last_period = period; in csky_pmu_event_set_period()
894 local64_set(&hwc->period_left, left); in csky_pmu_event_set_period()
895 hwc->last_period = period; in csky_pmu_event_set_period()
906 local64_set(&hwc->prev_count, (u64)(-left)); in csky_pmu_event_set_period()
908 if (hw_raw_write_mapping[hwc->idx] != NULL) in csky_pmu_event_set_period()
909 hw_raw_write_mapping[hwc->idx]((u64)(-left) & in csky_pmu_event_set_period()
[all …]
/arch/sparc/kernel/
Dperf_event.c827 static inline void sparc_pmu_enable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, in… in sparc_pmu_enable_event() argument
845 static inline void sparc_pmu_disable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, i… in sparc_pmu_disable_event() argument
864 struct hw_perf_event *hwc, int idx) in sparc_perf_event_update() argument
871 prev_raw_count = local64_read(&hwc->prev_count); in sparc_perf_event_update()
874 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in sparc_perf_event_update()
882 local64_sub(delta, &hwc->period_left); in sparc_perf_event_update()
888 struct hw_perf_event *hwc, int idx) in sparc_perf_event_set_period() argument
890 s64 left = local64_read(&hwc->period_left); in sparc_perf_event_set_period()
891 s64 period = hwc->sample_period; in sparc_perf_event_set_period()
895 if (unlikely(period != hwc->last_period)) in sparc_perf_event_set_period()
[all …]

123