Home
last modified time | relevance | path

Searched refs:avg (Results 1 – 12 of 12) sorted by relevance

/kernel/sched/
Dpsi.c286 static void calc_avgs(unsigned long avg[3], int missed_periods, in calc_avgs()
293 avg[0] = calc_load_n(avg[0], EXP_10s, 0, missed_periods); in calc_avgs()
294 avg[1] = calc_load_n(avg[1], EXP_60s, 0, missed_periods); in calc_avgs()
295 avg[2] = calc_load_n(avg[2], EXP_300s, 0, missed_periods); in calc_avgs()
301 avg[0] = calc_load(avg[0], EXP_10s, pct); in calc_avgs()
302 avg[1] = calc_load(avg[1], EXP_60s, pct); in calc_avgs()
303 avg[2] = calc_load(avg[2], EXP_300s, pct); in calc_avgs()
408 calc_avgs(group->avg[s], missed_periods, sample, period); in update_averages()
1208 unsigned long avg[3] = { 0, }; in psi_show() local
1215 avg[w] = group->avg[res * 2 + full][w]; in psi_show()
[all …]
Dfair.c796 struct sched_avg *sa = &se->avg; in init_entity_runnable_average()
842 struct sched_avg *sa = &se->avg; in post_init_entity_util_avg()
844 long cap = (long)(cpu_scale - cfs_rq->avg.util_avg) / 2; in post_init_entity_util_avg()
857 se->avg.last_update_time = cfs_rq_clock_pelt(cfs_rq); in post_init_entity_util_avg()
862 if (cfs_rq->avg.util_avg != 0) { in post_init_entity_util_avg()
863 sa->util_avg = cfs_rq->avg.util_avg * se->load.weight; in post_init_entity_util_avg()
864 sa->util_avg /= (cfs_rq->avg.load_avg + 1); in post_init_entity_util_avg()
2454 delta = p->se.avg.load_sum; in numa_get_avg_runtime()
3298 cfs_rq->avg.load_avg += se->avg.load_avg; in enqueue_load_avg()
3299 cfs_rq->avg.load_sum += se_weight(se) * se->avg.load_sum; in enqueue_load_avg()
[all …]
Dpelt.h42 static inline u32 get_pelt_divider(struct sched_avg *avg) in get_pelt_divider() argument
44 return PELT_MIN_DIVIDER + avg->period_contrib; in get_pelt_divider()
47 static inline void cfs_se_util_change(struct sched_avg *avg) in cfs_se_util_change() argument
55 enqueued = avg->util_est.enqueued; in cfs_se_util_change()
61 WRITE_ONCE(avg->util_est.enqueued, enqueued); in cfs_se_util_change()
165 u32 util_sum = rq->cfs.avg.util_sum; in update_idle_rq_clock_pelt()
Dpelt.c303 if (___update_load_sum(now, &se->avg, 0, 0, 0)) { in __update_load_avg_blocked_se()
304 ___update_load_avg(&se->avg, se_weight(se)); in __update_load_avg_blocked_se()
315 if (___update_load_sum(now, &se->avg, !!se->on_rq, se_runnable(se), in __update_load_avg_se()
318 ___update_load_avg(&se->avg, se_weight(se)); in __update_load_avg_se()
319 cfs_se_util_change(&se->avg); in __update_load_avg_se()
329 if (___update_load_sum(now, &cfs_rq->avg, in __update_load_avg_cfs_rq()
334 ___update_load_avg(&cfs_rq->avg, 1); in __update_load_avg_cfs_rq()
Ddebug.c485 P(se->avg.load_avg); in print_cfs_group_stats()
486 P(se->avg.util_avg); in print_cfs_group_stats()
487 P(se->avg.runnable_avg); in print_cfs_group_stats()
633 cfs_rq->avg.load_avg); in print_cfs_rq()
635 cfs_rq->avg.runnable_avg); in print_cfs_rq()
637 cfs_rq->avg.util_avg); in print_cfs_rq()
639 cfs_rq->avg.util_est.enqueued); in print_cfs_rq()
1030 P(se.avg.load_sum); in proc_sched_show_task()
1031 P(se.avg.runnable_sum); in proc_sched_show_task()
1032 P(se.avg.util_sum); in proc_sched_show_task()
[all …]
Dsched.h225 static inline void update_avg(u64 *avg, u64 sample) in update_avg() argument
227 s64 diff = sample - *avg; in update_avg()
228 *avg += diff / 8; in update_avg()
598 struct sched_avg avg; member
3013 util = READ_ONCE(cfs_rq->avg.util_avg); in cpu_util_cfs()
3017 READ_ONCE(cfs_rq->avg.util_est.enqueued)); in cpu_util_cfs()
/kernel/trace/
Dtrace_benchmark.c45 unsigned int avg; in trace_do_benchmark() local
109 avg = delta; in trace_do_benchmark()
124 seed = avg; in trace_do_benchmark()
140 bm_last, bm_first, bm_max, bm_min, avg, std, stddev); in trace_do_benchmark()
143 bm_avg = avg; in trace_do_benchmark()
Dring_buffer_benchmark.c238 unsigned long avg; in ring_buffer_producer() local
348 avg = NSEC_PER_MSEC / hit; in ring_buffer_producer()
349 trace_printk("%ld ns per entry\n", avg); in ring_buffer_producer()
366 avg = NSEC_PER_MSEC / (hit + missed); in ring_buffer_producer()
367 trace_printk("%ld ns per entry\n", avg); in ring_buffer_producer()
DKconfig845 last=632 first=3672 max=632 min=632 avg=316 std=446 std^2=199712
846 last=278 first=3672 max=632 min=278 avg=303 std=316 std^2=100337
847 last=277 first=3672 max=632 min=277 avg=296 std=258 std^2=67064
848 last=273 first=3672 max=632 min=273 avg=292 std=224 std^2=50411
849 last=273 first=3672 max=632 min=273 avg=288 std=200 std^2=40389
850 last=281 first=3672 max=632 min=273 avg=287 std=183 std^2=33666
Dftrace.c508 unsigned long long avg; in function_stat_show() local
520 avg = div64_ul(rec->time, rec->counter); in function_stat_show()
521 if (tracing_thresh && (avg < tracing_thresh)) in function_stat_show()
553 trace_print_graph_duration(avg, &s); in function_stat_show()
/kernel/time/
Dtest_udelay.c31 uint64_t avg; in udelay_test_single() local
55 avg = sum; in udelay_test_single()
56 do_div(avg, iters); in udelay_test_single()
59 (usecs * 1000) - allowed_error_ns, min, avg, max); in udelay_test_single()
/kernel/rcu/
Drefscale.c724 u64 avg; in main_func() local
727 avg = div_u64_rem(result_avg[exp], 1000, &rem); in main_func()
728 sprintf(buf1, "%d\t%llu.%03u\n", exp + 1, avg, rem); in main_func()