Home
last modified time | relevance | path

Searched refs:eventsel (Results 1 – 13 of 13) sorted by relevance

/arch/x86/kvm/
Dpmu.c319 static bool filter_contains_match(u64 *events, u64 nevents, u64 eventsel) in filter_contains_match() argument
321 u64 event_select = eventsel & kvm_pmu_ops.EVENTSEL_EVENT; in filter_contains_match()
322 u64 umask = eventsel & ARCH_PERFMON_EVENTSEL_UMASK; in filter_contains_match()
353 u64 eventsel) in is_gp_event_allowed() argument
355 if (filter_contains_match(f->includes, f->nr_includes, eventsel) && in is_gp_event_allowed()
356 !filter_contains_match(f->excludes, f->nr_excludes, eventsel)) in is_gp_event_allowed()
387 return is_gp_event_allowed(filter, pmc->eventsel); in check_pmu_event_filter()
402 u64 eventsel = pmc->eventsel; in reprogram_counter() local
403 u64 new_config = eventsel; in reprogram_counter()
414 if (eventsel & ARCH_PERFMON_EVENTSEL_PIN_CONTROL) in reprogram_counter()
[all …]
Dpmu.h158 return pmc->eventsel & ARCH_PERFMON_EVENTSEL_ENABLE; in pmc_speculative_in_use()
/arch/x86/kvm/vmx/
Dpmu_intel.c51 u8 eventsel; member
107 u8 event_select = pmc->eventsel & ARCH_PERFMON_EVENTSEL_EVENT; in intel_hw_event_available()
108 u8 unit_mask = (pmc->eventsel & ARCH_PERFMON_EVENTSEL_UMASK) >> 8; in intel_hw_event_available()
118 if (intel_arch_events[i].eventsel != event_select || in intel_hw_event_available()
380 msr_info->data = pmc->eventsel; in intel_pmu_get_msr()
454 if (data != pmc->eventsel) { in intel_pmu_set_msr()
455 pmc->eventsel = data; in intel_pmu_set_msr()
480 pmc->eventsel = (intel_arch_events[event].unit_mask << 8) | in setup_fixed_pmc_eventsel()
481 intel_arch_events[event].eventsel; in setup_fixed_pmc_eventsel()
/arch/x86/kvm/svm/
Dpmu.c146 msr_info->data = pmc->eventsel; in amd_pmu_get_msr()
171 if (data != pmc->eventsel) { in amd_pmu_set_msr()
172 pmc->eventsel = data; in amd_pmu_set_msr()
/arch/arm64/kvm/
Dpmu-emul.c586 u64 eventsel, reg, data; in kvm_pmu_create_perf_event() local
593 eventsel = ARMV8_PMUV3_PERFCTR_CPU_CYCLES; in kvm_pmu_create_perf_event()
595 eventsel = data & kvm_pmu_event_mask(vcpu->kvm); in kvm_pmu_create_perf_event()
601 if (eventsel == ARMV8_PMUV3_PERFCTR_SW_INCR || in kvm_pmu_create_perf_event()
602 eventsel == ARMV8_PMUV3_PERFCTR_CHAIN) in kvm_pmu_create_perf_event()
610 !test_bit(eventsel, vcpu->kvm->arch.pmu_filter)) in kvm_pmu_create_perf_event()
622 attr.config = eventsel; in kvm_pmu_create_perf_event()
/arch/x86/events/amd/
Dcore.c300 static inline int amd_pmu_addr_offset(int index, bool eventsel) in amd_pmu_addr_offset() argument
307 if (eventsel) in amd_pmu_addr_offset()
320 if (eventsel) in amd_pmu_addr_offset()
1265 .eventsel = MSR_K7_EVNTSEL0,
1367 x86_pmu.eventsel = MSR_F15H_PERF_CTL; in amd_core_pmu_init()
/arch/x86/events/intel/
Dp6.c210 .eventsel = MSR_P6_EVNTSEL0,
Dknc.c299 .eventsel = MSR_KNC_EVNTSEL0,
Dp4.c1345 .eventsel = MSR_P4_BPU_CCCR0,
Dcore.c4919 .eventsel = MSR_ARCH_PERFMON_EVENTSEL0,
4972 .eventsel = MSR_ARCH_PERFMON_EVENTSEL0,
/arch/x86/events/
Dperf_event.h758 unsigned eventsel; member
760 int (*addr_offset)(int index, bool eventsel);
1098 return x86_pmu.eventsel + (x86_pmu.addr_offset ? in x86_pmu_config_addr()
/arch/x86/events/zhaoxin/
Dcore.c468 .eventsel = MSR_ARCH_PERFMON_EVENTSEL0,
/arch/x86/include/asm/
Dkvm_host.h497 u64 eventsel; member