Lines Matching refs:hw
865 idx = cpuhw->event[i]->hw.idx; in any_pmc_overflown()
1132 if (event->hw.state & PERF_HES_STOPPED) in power_pmu_read()
1135 if (!event->hw.idx) in power_pmu_read()
1139 val = read_pmc(event->hw.idx); in power_pmu_read()
1140 local64_set(&event->hw.prev_count, val); in power_pmu_read()
1150 prev = local64_read(&event->hw.prev_count); in power_pmu_read()
1152 val = read_pmc(event->hw.idx); in power_pmu_read()
1156 } while (local64_cmpxchg(&event->hw.prev_count, prev, val) != prev); in power_pmu_read()
1170 prev = local64_read(&event->hw.period_left); in power_pmu_read()
1174 } while (local64_cmpxchg(&event->hw.period_left, prev, val) != prev); in power_pmu_read()
1197 if (!event->hw.idx) in freeze_limited_counters()
1199 val = (event->hw.idx == 5) ? pmc5 : pmc6; in freeze_limited_counters()
1200 prev = local64_read(&event->hw.prev_count); in freeze_limited_counters()
1201 event->hw.idx = 0; in freeze_limited_counters()
1217 event->hw.idx = cpuhw->limited_hwidx[i]; in thaw_limited_counters()
1218 val = (event->hw.idx == 5) ? pmc5 : pmc6; in thaw_limited_counters()
1219 prev = local64_read(&event->hw.prev_count); in thaw_limited_counters()
1221 local64_set(&event->hw.prev_count, val); in thaw_limited_counters()
1495 if (event->hw.idx && event->hw.idx != hwc_index[i] + 1) { in power_pmu_enable()
1497 write_pmc(event->hw.idx, 0); in power_pmu_enable()
1498 event->hw.idx = 0; in power_pmu_enable()
1508 if (event->hw.idx) in power_pmu_enable()
1519 val = local64_read(&event->hw.prev_count); in power_pmu_enable()
1522 if (event->hw.sample_period) { in power_pmu_enable()
1523 left = local64_read(&event->hw.period_left); in power_pmu_enable()
1527 local64_set(&event->hw.prev_count, val); in power_pmu_enable()
1530 event->hw.idx = idx; in power_pmu_enable()
1531 if (event->hw.state & PERF_HES_STOPPED) in power_pmu_enable()
1575 flags[n] = group->hw.event_base; in collect_events()
1576 events[n++] = group->hw.config; in collect_events()
1584 flags[n] = event->hw.event_base; in collect_events()
1585 events[n++] = event->hw.config; in collect_events()
1616 cpuhw->events[n0] = event->hw.config; in power_pmu_add()
1617 cpuhw->flags[n0] = event->hw.event_base; in power_pmu_add()
1626 event->hw.state = PERF_HES_STOPPED | PERF_HES_UPTODATE; in power_pmu_add()
1628 event->hw.state = 0; in power_pmu_add()
1642 event->hw.config = cpuhw->events[n0]; in power_pmu_add()
1693 ppmu->disable_pmc(event->hw.idx - 1, &cpuhw->mmcr); in power_pmu_del()
1694 if (event->hw.idx) { in power_pmu_del()
1695 write_pmc(event->hw.idx, 0); in power_pmu_del()
1696 event->hw.idx = 0; in power_pmu_del()
1735 if (!event->hw.idx || !event->hw.sample_period) in power_pmu_start()
1738 if (!(event->hw.state & PERF_HES_STOPPED)) in power_pmu_start()
1742 WARN_ON_ONCE(!(event->hw.state & PERF_HES_UPTODATE)); in power_pmu_start()
1747 event->hw.state = 0; in power_pmu_start()
1748 left = local64_read(&event->hw.period_left); in power_pmu_start()
1754 write_pmc(event->hw.idx, val); in power_pmu_start()
1765 if (!event->hw.idx || !event->hw.sample_period) in power_pmu_stop()
1768 if (event->hw.state & PERF_HES_STOPPED) in power_pmu_stop()
1775 event->hw.state |= PERF_HES_STOPPED | PERF_HES_UPTODATE; in power_pmu_stop()
1776 write_pmc(event->hw.idx, 0); in power_pmu_stop()
1855 cpuhw->event[i]->hw.config = cpuhw->events[i]; in power_pmu_commit_txn()
2033 event->hw.config_base = ev; in power_pmu_event_init()
2034 event->hw.idx = 0; in power_pmu_event_init()
2119 event->hw.config = events[n]; in power_pmu_event_init()
2120 event->hw.event_base = cflags[n]; in power_pmu_event_init()
2121 event->hw.last_period = event->hw.sample_period; in power_pmu_event_init()
2122 local64_set(&event->hw.period_left, event->hw.last_period); in power_pmu_event_init()
2129 local64_set(&event->hw.prev_count, 0); in power_pmu_event_init()
2154 return event->hw.idx; in power_pmu_event_idx()
2194 u64 period = event->hw.sample_period; in record_and_restart()
2198 if (event->hw.state & PERF_HES_STOPPED) { in record_and_restart()
2199 write_pmc(event->hw.idx, 0); in record_and_restart()
2204 prev = local64_read(&event->hw.prev_count); in record_and_restart()
2213 left = local64_read(&event->hw.period_left) - delta; in record_and_restart()
2232 event->hw.last_period = event->hw.sample_period; in record_and_restart()
2238 write_pmc(event->hw.idx, val); in record_and_restart()
2239 local64_set(&event->hw.prev_count, val); in record_and_restart()
2240 local64_set(&event->hw.period_left, left); in record_and_restart()
2260 perf_sample_data_init(&data, ~0ULL, event->hw.last_period); in record_and_restart()
2380 if (event->hw.idx == (i + 1)) { in __perf_event_interrupt()
2402 if (!event->hw.idx || is_limited_pmc(event->hw.idx)) in __perf_event_interrupt()
2404 if (pmc_overflow_power7(cpuhw->pmcs[event->hw.idx - 1])) { in __perf_event_interrupt()
2408 cpuhw->pmcs[event->hw.idx - 1], in __perf_event_interrupt()