Home
last modified time | relevance | path

Searched refs:ev (Results 1 – 18 of 18) sorted by relevance

/arch/x86/kernel/cpu/
Dperf_event_amd_iommu.c24 #define _GET_BANK(ev) ((u8)(ev->hw.extra_reg.reg >> 8)) argument
25 #define _GET_CNTR(ev) ((u8)(ev->hw.extra_reg.reg)) argument
28 #define _GET_CSOURCE(ev) ((ev->hw.config & 0xFFULL)) argument
29 #define _GET_DEVID(ev) ((ev->hw.config >> 8) & 0xFFFFULL) argument
30 #define _GET_PASID(ev) ((ev->hw.config >> 24) & 0xFFFFULL) argument
31 #define _GET_DOMID(ev) ((ev->hw.config >> 40) & 0xFFFFULL) argument
32 #define _GET_DEVID_MASK(ev) ((ev->hw.extra_reg.config) & 0xFFFFULL) argument
33 #define _GET_PASID_MASK(ev) ((ev->hw.extra_reg.config >> 16) & 0xFFFFULL) argument
34 #define _GET_DOMID_MASK(ev) ((ev->hw.extra_reg.config >> 32) & 0xFFFFULL) argument
253 static void perf_iommu_enable_event(struct perf_event *ev) in perf_iommu_enable_event() argument
[all …]
/arch/s390/appldata/
Dappldata_mem.c82 unsigned long ev[NR_VM_EVENT_ITEMS]; in appldata_get_mem_data() local
88 all_vm_events(ev); in appldata_get_mem_data()
89 mem_data->pgpgin = ev[PGPGIN] >> 1; in appldata_get_mem_data()
90 mem_data->pgpgout = ev[PGPGOUT] >> 1; in appldata_get_mem_data()
91 mem_data->pswpin = ev[PSWPIN]; in appldata_get_mem_data()
92 mem_data->pswpout = ev[PSWPOUT]; in appldata_get_mem_data()
93 mem_data->pgalloc = ev[PGALLOC_NORMAL]; in appldata_get_mem_data()
94 mem_data->pgalloc += ev[PGALLOC_DMA]; in appldata_get_mem_data()
95 mem_data->pgfault = ev[PGFAULT]; in appldata_get_mem_data()
96 mem_data->pgmajfault = ev[PGMAJFAULT]; in appldata_get_mem_data()
/arch/arm64/crypto/
Dsha1-ce-core.S37 .macro add_only, op, ev, rc, s0, dg1
38 .ifc \ev, ev
55 .macro add_update, op, ev, rc, s0, s1, s2, s3, dg1
57 add_only \op, \ev, \rc, \s1, \dg1
102 add_update c, ev, k0, 8, 9, 10, 11, dgb
104 add_update c, ev, k0, 10, 11, 8, 9
106 add_update c, ev, k1, 8, 9, 10, 11
109 add_update p, ev, k1, 10, 11, 8, 9
111 add_update p, ev, k1, 8, 9, 10, 11
114 add_update m, ev, k2, 10, 11, 8, 9
[all …]
Dsha2-ce-core.S32 .macro add_only, ev, rc, s0
34 .ifeq \ev
47 .macro add_update, ev, rc, s0, s1, s2, s3
49 add_only \ev, \rc, \s1
/arch/arm/crypto/
Dsha2-ce-core.S34 .macro add_only, ev, s0
37 vld1.32 {k\ev}, [rk, :128]!
39 sha256h.32 dg0, dg1, tb\ev
40 sha256h2.32 dg1, dg2, tb\ev
42 vadd.u32 ta\ev, q\s0, k\ev
46 .macro add_update, ev, s0, s1, s2, s3
48 add_only \ev, \s1
/arch/powerpc/platforms/cell/
Dbeat.c214 struct beat_event_list *ev = &beat_event_list[i]; in beat_register_event() local
219 ev->typecode); in beat_register_event()
227 ev->typecode); in beat_register_event()
231 ev->virq = virq; in beat_register_event()
233 rc = request_irq(virq, ev->handler, 0, in beat_register_event()
234 ev->typecode, NULL); in beat_register_event()
238 ev->typecode); in beat_register_event()
246 strncpy((char *)&path[2], ev->typecode, 8); in beat_register_event()
/arch/powerpc/perf/
Dmpc7450-pmu.c272 u32 ev, pmc, thresh; in mpc7450_compute_mmcr() local
291 ev = event[event_index[class][i]]; in mpc7450_compute_mmcr()
293 pmc = (ev >> PM_PMC_SH) & PM_PMC_MSK; in mpc7450_compute_mmcr()
305 tuse = mpc7450_threshold_use(ev); in mpc7450_compute_mmcr()
307 thresh = (ev >> PM_THRESH_SH) & PM_THRESH_MSK; in mpc7450_compute_mmcr()
309 if (tuse == 2 && (ev & PM_THRMULT_MSKS)) in mpc7450_compute_mmcr()
312 ev &= pmcsel_mask[pmc - 1]; in mpc7450_compute_mmcr()
313 ev <<= pmcsel_shift[pmc - 1]; in mpc7450_compute_mmcr()
315 mmcr0 |= ev; in mpc7450_compute_mmcr()
317 mmcr1 |= ev; in mpc7450_compute_mmcr()
Dcore-fsl-emb.c456 int ev; in hw_perf_cache_event() local
471 ev = (*ppmu->cache_events)[type][op][result]; in hw_perf_cache_event()
472 if (ev == 0) in hw_perf_cache_event()
474 if (ev == -1) in hw_perf_cache_event()
476 *eventp = ev; in hw_perf_cache_event()
482 u64 ev; in fsl_emb_pmu_event_init() local
497 ev = event->attr.config; in fsl_emb_pmu_event_init()
498 if (ev >= ppmu->n_generic || ppmu->generic_events[ev] == 0) in fsl_emb_pmu_event_init()
500 ev = ppmu->generic_events[ev]; in fsl_emb_pmu_event_init()
504 err = hw_perf_cache_event(event->attr.config, &ev); in fsl_emb_pmu_event_init()
[all …]
Dpower6-pmu.c183 unsigned int pmc, ev, b, u, s, psel; in p6_compute_mmcr() local
198 ev = event[i]; in p6_compute_mmcr()
199 pmc = (ev >> PM_PMC_SH) & PM_PMC_MSK; in p6_compute_mmcr()
212 psel = ev & PM_PMCSEL_MSK; in p6_compute_mmcr()
213 if (ev & PM_BUSEVENT_MSK) { in p6_compute_mmcr()
215 b = (ev >> PM_BYTE_SH) & PM_BYTE_MSK; in p6_compute_mmcr()
216 u = (ev >> PM_UNIT_SH) & PM_UNIT_MSK; in p6_compute_mmcr()
224 s = (ev >> PM_SUBUNIT_SH) & PM_SUBUNIT_MSK; in p6_compute_mmcr()
240 if (ev & PM_LLA) { in p6_compute_mmcr()
242 if (ev & PM_LLAV) in p6_compute_mmcr()
Dcore-book3s.c1646 static int can_go_on_limited_pmc(struct perf_event *event, u64 ev, in can_go_on_limited_pmc() argument
1658 if (ppmu->limited_pmc_event(ev)) in can_go_on_limited_pmc()
1669 n = ppmu->get_alternatives(ev, flags, alt); in can_go_on_limited_pmc()
1679 static u64 normal_pmc_alternative(u64 ev, unsigned long flags) in normal_pmc_alternative() argument
1685 n = ppmu->get_alternatives(ev, flags, alt); in normal_pmc_alternative()
1715 int ev; in hw_perf_cache_event() local
1730 ev = (*ppmu->cache_events)[type][op][result]; in hw_perf_cache_event()
1731 if (ev == 0) in hw_perf_cache_event()
1733 if (ev == -1) in hw_perf_cache_event()
1735 *eventp = ev; in hw_perf_cache_event()
[all …]
/arch/s390/kernel/
Dperf_cpum_cf.c328 u64 ev; in __hw_perf_event_init() local
337 ev = attr->config; in __hw_perf_event_init()
341 ev = attr->config; in __hw_perf_event_init()
344 if (ev >= ARRAY_SIZE(cpumf_generic_events_user)) in __hw_perf_event_init()
346 ev = cpumf_generic_events_user[ev]; in __hw_perf_event_init()
354 if (ev >= ARRAY_SIZE(cpumf_generic_events_basic)) in __hw_perf_event_init()
356 ev = cpumf_generic_events_basic[ev]; in __hw_perf_event_init()
364 if (ev == -1) in __hw_perf_event_init()
367 if (ev >= PERF_CPUM_CF_MAX_CTR) in __hw_perf_event_init()
375 hwc->config = ev; in __hw_perf_event_init()
[all …]
/arch/sh/kernel/
Dperf_event.c97 int ev; in hw_perf_cache_event() local
112 ev = (*sh_pmu->cache_events)[type][op][result]; in hw_perf_cache_event()
113 if (ev == 0) in hw_perf_cache_event()
115 if (ev == -1) in hw_perf_cache_event()
117 *evp = ev; in hw_perf_cache_event()
/arch/x86/oprofile/
Dop_model_p4.c512 struct p4_event_binding *ev = NULL; in pmc_setup_one_p4_counter() local
528 ev = &(p4_events[counter_config[ctr].event - 1]); in pmc_setup_one_p4_counter()
531 if (ev->bindings[i].virt_counter & counter_bit) { in pmc_setup_one_p4_counter()
534 rdmsr(ev->bindings[i].escr_address, escr, high); in pmc_setup_one_p4_counter()
543 ESCR_SET_EVENT_SELECT(escr, ev->event_select); in pmc_setup_one_p4_counter()
545 wrmsr(ev->bindings[i].escr_address, escr, high); in pmc_setup_one_p4_counter()
552 CCCR_SET_ESCR_SELECT(cccr, ev->escr_select); in pmc_setup_one_p4_counter()
/arch/blackfin/kernel/
Dperf_event.c239 int ev; in hw_perf_cache_event() local
251 ev = cache_events[type][op][result]; in hw_perf_cache_event()
252 if (ev == 0) in hw_perf_cache_event()
254 if (ev == -1) in hw_perf_cache_event()
256 *evp = ev; in hw_perf_cache_event()
/arch/alpha/kernel/
Dperf_event.c608 int ev; in __hw_perf_event_init() local
617 ev = alpha_pmu->event_map[attr->config]; in __hw_perf_event_init()
623 ev = attr->config; in __hw_perf_event_init()
628 if (ev < 0) { in __hw_perf_event_init()
629 return ev; in __hw_perf_event_init()
647 hwc->event_base = ev; in __hw_perf_event_init()
/arch/metag/kernel/perf/
Dperf_event.c517 int ev; in _hw_perf_cache_event() local
532 ev = (*metag_pmu->cache_events)[type][op][result]; in _hw_perf_cache_event()
533 if (ev == 0) in _hw_perf_cache_event()
535 if (ev == -1) in _hw_perf_cache_event()
537 *evp = ev; in _hw_perf_cache_event()
/arch/parisc/kernel/
Dsyscall.S121 or,ev %r1,%r30,%r30
472 or,ev %r1,%r30,%r30
/arch/powerpc/platforms/
DKconfig.cputype373 support SMP machines with 603/603e/603ev or PPC750 ("G3") processors