Home
last modified time | relevance | path

Searched refs:counts (Results 1 – 9 of 9) sorted by relevance

/arch/powerpc/platforms/pseries/
Dmsi.c231 struct msi_counts *counts = data; in count_non_bridge_devices() local
241 counts->num_devices++; in count_non_bridge_devices()
248 struct msi_counts *counts = data; in count_spare_msis() local
252 if (dn == counts->requestor) in count_spare_msis()
253 req = counts->request; in count_spare_msis()
267 if (req < counts->quota) in count_spare_msis()
268 counts->spare += counts->quota - req; in count_spare_msis()
269 else if (req > counts->quota) in count_spare_msis()
270 counts->over_quota++; in count_spare_msis()
278 struct msi_counts counts; in msi_quota_for_device() local
[all …]
/arch/x86/kernel/cpu/resctrl/
Dpseudo_lock.c938 struct residency_counts *counts) in measure_residency_fn() argument
1046 counts->miss_before = miss_before; in measure_residency_fn()
1047 counts->hits_before = hits_before; in measure_residency_fn()
1048 counts->miss_after = miss_after; in measure_residency_fn()
1049 counts->hits_after = hits_after; in measure_residency_fn()
1056 struct residency_counts counts = {0}; in measure_l2_residency() local
1078 measure_residency_fn(&perf_miss_attr, &perf_hit_attr, plr, &counts); in measure_l2_residency()
1083 trace_pseudo_lock_l2(counts.hits_after - counts.hits_before, in measure_l2_residency()
1084 counts.miss_after - counts.miss_before); in measure_l2_residency()
1094 struct residency_counts counts = {0}; in measure_l3_residency() local
[all …]
/arch/ia64/kernel/
Dsmp.c257 unsigned short *counts = __ia64_per_cpu_var(shadow_flush_counts); in smp_flush_tlb_cpumask() local
265 counts[cpu] = local_tlb_flush_counts[cpu].count & 0xffff; in smp_flush_tlb_cpumask()
279 while(counts[cpu] == (local_tlb_flush_counts[cpu].count & 0xffff)) in smp_flush_tlb_cpumask()
/arch/x86/ras/
DKconfig7 page PFN and counts their repeated occurrence. Once the counter for a
/arch/x86/events/intel/
Dds.c1852 short counts[INTEL_PMC_IDX_FIXED + MAX_FIXED_PEBS_EVENTS] = {}; in intel_pmu_drain_pebs_nhm() local
1887 counts[bit]++; in intel_pmu_drain_pebs_nhm()
1930 counts[bit]++; in intel_pmu_drain_pebs_nhm()
1934 if ((counts[bit] == 0) && (error[bit] == 0)) in intel_pmu_drain_pebs_nhm()
1952 if (counts[bit]) { in intel_pmu_drain_pebs_nhm()
1954 top, bit, counts[bit], in intel_pmu_drain_pebs_nhm()
1962 short counts[INTEL_PMC_IDX_FIXED + MAX_FIXED_PEBS_EVENTS] = {}; in intel_pmu_drain_pebs_icl() local
1994 counts[bit]++; in intel_pmu_drain_pebs_icl()
1998 if (counts[bit] == 0) in intel_pmu_drain_pebs_icl()
2009 top, bit, counts[bit], in intel_pmu_drain_pebs_icl()
/arch/x86/kernel/
Dsmpboot.c1933 u64 ratios, counts; in skx_set_max_freq_ratio() local
1947 err = rdmsrl_safe(MSR_TURBO_RATIO_LIMIT1, &counts); in skx_set_max_freq_ratio()
1952 group_size = (counts >> i) & 0xFF; in skx_set_max_freq_ratio()
/arch/sparc/lib/
DM7memset.S129 cmp %o2, 7 ! if small counts, just write bytes
/arch/sh/lib/
Dmemcpy-sh4.S197 ! cycle counts for differnet sizes using byte-at-a-time vs. optimised):
/arch/
DKconfig1127 bool "Locking event counts collection"
1133 differences. The counts are reported via debugfs.