Lines Matching refs:hwc
109 struct hw_perf_event *hwc = &event->hw; in x86_perf_event_update() local
114 if (unlikely(!hwc->event_base)) in x86_perf_event_update()
128 prev_raw_count = local64_read(&hwc->prev_count); in x86_perf_event_update()
129 rdpmcl(hwc->event_base_rdpmc, new_raw_count); in x86_perf_event_update()
131 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in x86_perf_event_update()
147 local64_sub(delta, &hwc->period_left); in x86_perf_event_update()
357 set_ext_hw_attr(struct hw_perf_event *hwc, struct perf_event *event) in set_ext_hw_attr() argument
387 hwc->config |= val; in set_ext_hw_attr()
474 struct hw_perf_event *hwc = &event->hw; in x86_setup_perfctr() local
478 hwc->sample_period = x86_pmu.max_period; in x86_setup_perfctr()
479 hwc->last_period = hwc->sample_period; in x86_setup_perfctr()
480 local64_set(&hwc->period_left, hwc->sample_period); in x86_setup_perfctr()
487 return set_ext_hw_attr(hwc, event); in x86_setup_perfctr()
505 hwc->config |= config; in x86_setup_perfctr()
679 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in x86_pmu_disable_all() local
689 if (is_counter_pair(hwc)) in x86_pmu_disable_all()
736 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in x86_pmu_enable_all() local
741 __x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE); in x86_pmu_enable_all()
978 struct hw_perf_event *hwc; in x86_schedule_events() local
1023 hwc = &cpuc->event_list[i]->hw; in x86_schedule_events()
1027 if (hwc->idx == -1) in x86_schedule_events()
1031 if (!test_bit(hwc->idx, c->idxmsk)) in x86_schedule_events()
1034 mask = BIT_ULL(hwc->idx); in x86_schedule_events()
1035 if (is_counter_pair(hwc)) in x86_schedule_events()
1045 assign[i] = hwc->idx; in x86_schedule_events()
1211 struct hw_perf_event *hwc = &event->hw; in x86_assign_hw_event() local
1214 idx = hwc->idx = cpuc->assign[i]; in x86_assign_hw_event()
1215 hwc->last_cpu = smp_processor_id(); in x86_assign_hw_event()
1216 hwc->last_tag = ++cpuc->tags[i]; in x86_assign_hw_event()
1218 switch (hwc->idx) { in x86_assign_hw_event()
1221 hwc->config_base = 0; in x86_assign_hw_event()
1222 hwc->event_base = 0; in x86_assign_hw_event()
1230 hwc->config_base = MSR_ARCH_PERFMON_FIXED_CTR_CTRL; in x86_assign_hw_event()
1231 hwc->event_base = MSR_ARCH_PERFMON_FIXED_CTR0 + in x86_assign_hw_event()
1233 hwc->event_base_rdpmc = (idx - INTEL_PMC_IDX_FIXED) | in x86_assign_hw_event()
1238 hwc->config_base = x86_pmu_config_addr(hwc->idx); in x86_assign_hw_event()
1239 hwc->event_base = x86_pmu_event_addr(hwc->idx); in x86_assign_hw_event()
1240 hwc->event_base_rdpmc = x86_pmu_rdpmc_index(hwc->idx); in x86_assign_hw_event()
1266 static inline int match_prev_assignment(struct hw_perf_event *hwc, in match_prev_assignment() argument
1270 return hwc->idx == cpuc->assign[i] && in match_prev_assignment()
1271 hwc->last_cpu == smp_processor_id() && in match_prev_assignment()
1272 hwc->last_tag == cpuc->tags[i]; in match_prev_assignment()
1281 struct hw_perf_event *hwc; in x86_pmu_enable() local
1300 hwc = &event->hw; in x86_pmu_enable()
1308 if (hwc->idx == -1 || in x86_pmu_enable()
1309 match_prev_assignment(hwc, cpuc, i)) in x86_pmu_enable()
1316 if (hwc->state & PERF_HES_STOPPED) in x86_pmu_enable()
1317 hwc->state |= PERF_HES_ARCH; in x86_pmu_enable()
1327 hwc = &event->hw; in x86_pmu_enable()
1329 if (!match_prev_assignment(hwc, cpuc, i)) in x86_pmu_enable()
1334 if (hwc->state & PERF_HES_ARCH) in x86_pmu_enable()
1357 struct hw_perf_event *hwc = &event->hw; in x86_perf_event_set_period() local
1358 s64 left = local64_read(&hwc->period_left); in x86_perf_event_set_period()
1359 s64 period = hwc->sample_period; in x86_perf_event_set_period()
1360 int ret = 0, idx = hwc->idx; in x86_perf_event_set_period()
1362 if (unlikely(!hwc->event_base)) in x86_perf_event_set_period()
1374 local64_set(&hwc->period_left, left); in x86_perf_event_set_period()
1375 hwc->last_period = period; in x86_perf_event_set_period()
1381 local64_set(&hwc->period_left, left); in x86_perf_event_set_period()
1382 hwc->last_period = period; in x86_perf_event_set_period()
1403 local64_set(&hwc->prev_count, (u64)-left); in x86_perf_event_set_period()
1405 wrmsrl(hwc->event_base, (u64)(-left) & x86_pmu.cntval_mask); in x86_perf_event_set_period()
1411 if (is_counter_pair(hwc)) in x86_perf_event_set_period()
1420 wrmsrl(hwc->event_base, in x86_perf_event_set_period()
1445 struct hw_perf_event *hwc; in x86_pmu_add() local
1449 hwc = &event->hw; in x86_pmu_add()
1456 hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; in x86_pmu_add()
1458 hwc->state |= PERF_HES_ARCH; in x86_pmu_add()
1590 struct hw_perf_event *hwc = &event->hw; in x86_pmu_stop() local
1592 if (test_bit(hwc->idx, cpuc->active_mask)) { in x86_pmu_stop()
1594 __clear_bit(hwc->idx, cpuc->active_mask); in x86_pmu_stop()
1595 cpuc->events[hwc->idx] = NULL; in x86_pmu_stop()
1596 WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); in x86_pmu_stop()
1597 hwc->state |= PERF_HES_STOPPED; in x86_pmu_stop()
1600 if ((flags & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { in x86_pmu_stop()
1606 hwc->state |= PERF_HES_UPTODATE; in x86_pmu_stop()
2537 struct hw_perf_event *hwc = &event->hw; in x86_pmu_event_idx() local
2539 if (!(hwc->flags & PERF_X86_EVENT_RDPMC_ALLOWED)) in x86_pmu_event_idx()
2542 if (is_metric_idx(hwc->idx)) in x86_pmu_event_idx()
2545 return hwc->event_base_rdpmc + 1; in x86_pmu_event_idx()