/kernel/sched/ |
D | fair.c | 699 s64 avg = cfs_rq->avg_vruntime; in avg_vruntime() local 705 avg += entity_key(cfs_rq, curr) * weight; in avg_vruntime() 711 if (avg < 0) in avg_vruntime() 712 avg -= (load - 1); in avg_vruntime() 713 avg = div_s64(avg, load); in avg_vruntime() 716 return cfs_rq->min_vruntime + avg; in avg_vruntime() 772 s64 avg = cfs_rq->avg_vruntime; in vruntime_eligible() local 778 avg += entity_key(cfs_rq, curr) * weight; in vruntime_eligible() 782 return avg >= (s64)(vruntime - cfs_rq->min_vruntime) * load; in vruntime_eligible() 1064 struct sched_avg *sa = &se->avg; in init_entity_runnable_average() [all …]
|
D | pelt.h | 42 static inline u32 get_pelt_divider(struct sched_avg *avg) in get_pelt_divider() argument 44 return PELT_MIN_DIVIDER + avg->period_contrib; in get_pelt_divider() 47 static inline void cfs_se_util_change(struct sched_avg *avg) in cfs_se_util_change() argument 55 enqueued = avg->util_est; in cfs_se_util_change() 61 WRITE_ONCE(avg->util_est, enqueued); in cfs_se_util_change() 165 u32 util_sum = rq->cfs.avg.util_sum; in update_idle_rq_clock_pelt()
|
D | psi.c | 317 static void calc_avgs(unsigned long avg[3], int missed_periods, in calc_avgs() 324 avg[0] = calc_load_n(avg[0], EXP_10s, 0, missed_periods); in calc_avgs() 325 avg[1] = calc_load_n(avg[1], EXP_60s, 0, missed_periods); in calc_avgs() 326 avg[2] = calc_load_n(avg[2], EXP_300s, 0, missed_periods); in calc_avgs() 332 avg[0] = calc_load(avg[0], EXP_10s, pct); in calc_avgs() 333 avg[1] = calc_load(avg[1], EXP_60s, pct); in calc_avgs() 334 avg[2] = calc_load(avg[2], EXP_300s, pct); in calc_avgs() 563 calc_avgs(group->avg[s], missed_periods, sample, period); in update_averages() 1274 unsigned long avg[3] = { 0, }; in psi_show() local 1281 avg[w] = group->avg[res * 2 + full][w]; in psi_show() [all …]
|
D | pelt.c | 299 if (___update_load_sum(now, &se->avg, 0, 0, 0)) { in __update_load_avg_blocked_se() 300 ___update_load_avg(&se->avg, se_weight(se)); in __update_load_avg_blocked_se() 311 if (___update_load_sum(now, &se->avg, !!se->on_rq, se_runnable(se), in __update_load_avg_se() 314 ___update_load_avg(&se->avg, se_weight(se)); in __update_load_avg_se() 315 cfs_se_util_change(&se->avg); in __update_load_avg_se() 325 if (___update_load_sum(now, &cfs_rq->avg, in __update_load_avg_cfs_rq() 330 ___update_load_avg(&cfs_rq->avg, 1); in __update_load_avg_cfs_rq()
|
D | debug.c | 529 P(se->avg.load_avg); in print_cfs_group_stats() 530 P(se->avg.util_avg); in print_cfs_group_stats() 531 P(se->avg.runnable_avg); in print_cfs_group_stats() 684 cfs_rq->avg.load_avg); in print_cfs_rq() 686 cfs_rq->avg.runnable_avg); in print_cfs_rq() 688 cfs_rq->avg.util_avg); in print_cfs_rq() 690 cfs_rq->avg.util_est); in print_cfs_rq() 1078 P(se.avg.load_sum); in proc_sched_show_task() 1079 P(se.avg.runnable_sum); in proc_sched_show_task() 1080 P(se.avg.util_sum); in proc_sched_show_task() [all …]
|
D | sched.h | 226 static inline void update_avg(u64 *avg, u64 sample) in update_avg() argument 228 s64 diff = sample - *avg; in update_avg() 229 *avg += diff / 8; in update_avg() 596 struct sched_avg avg; member
|
/kernel/trace/ |
D | trace_benchmark.c | 45 unsigned int avg; in trace_do_benchmark() local 109 avg = delta; in trace_do_benchmark() 124 seed = avg; in trace_do_benchmark() 140 bm_last, bm_first, bm_max, bm_min, avg, std, stddev); in trace_do_benchmark() 143 bm_avg = avg; in trace_do_benchmark()
|
D | ring_buffer_benchmark.c | 238 unsigned long avg; in ring_buffer_producer() local 348 avg = NSEC_PER_MSEC / hit; in ring_buffer_producer() 349 trace_printk("%ld ns per entry\n", avg); in ring_buffer_producer() 366 avg = NSEC_PER_MSEC / (hit + missed); in ring_buffer_producer() 367 trace_printk("%ld ns per entry\n", avg); in ring_buffer_producer()
|
D | Kconfig | 900 last=632 first=3672 max=632 min=632 avg=316 std=446 std^2=199712 901 last=278 first=3672 max=632 min=278 avg=303 std=316 std^2=100337 902 last=277 first=3672 max=632 min=277 avg=296 std=258 std^2=67064 903 last=273 first=3672 max=632 min=273 avg=292 std=224 std^2=50411 904 last=273 first=3672 max=632 min=273 avg=288 std=200 std^2=40389 905 last=281 first=3672 max=632 min=273 avg=287 std=183 std^2=33666
|
D | ftrace.c | 539 unsigned long long avg; in function_stat_show() local 551 avg = div64_ul(rec->time, rec->counter); in function_stat_show() 552 if (tracing_thresh && (avg < tracing_thresh)) in function_stat_show() 584 trace_print_graph_duration(avg, &s); in function_stat_show()
|
/kernel/time/ |
D | test_udelay.c | 31 uint64_t avg; in udelay_test_single() local 55 avg = sum; in udelay_test_single() 56 do_div(avg, iters); in udelay_test_single() 59 (usecs * 1000) - allowed_error_ns, min, avg, max); in udelay_test_single()
|
/kernel/rcu/ |
D | refscale.c | 992 u64 avg; in main_func() local 995 avg = div_u64_rem(result_avg[exp], 1000, &rem); in main_func() 996 sprintf(buf1, "%d\t%llu.%03u\n", exp + 1, avg, rem); in main_func()
|