Home
last modified time | relevance | path

Searched refs:delta (Results 1 – 25 of 38) sorted by relevance

12

/kernel/sched/
Dloadavg.c81 long nr_active, delta = 0; in calc_load_fold_active() local
87 delta = nr_active - this_rq->calc_load_active; in calc_load_fold_active()
91 return delta; in calc_load_fold_active()
236 long delta; in calc_load_nohz_fold() local
238 delta = calc_load_fold_active(rq, 0); in calc_load_nohz_fold()
239 if (delta) { in calc_load_nohz_fold()
242 atomic_long_add(delta, &calc_load_nohz[idx]); in calc_load_nohz_fold()
287 long delta = 0; in calc_load_nohz_read() local
290 delta = atomic_long_xchg(&calc_load_nohz[idx], 0); in calc_load_nohz_read()
292 return delta; in calc_load_nohz_read()
[all …]
Dstats.h9 rq_sched_info_arrive(struct rq *rq, unsigned long long delta) in rq_sched_info_arrive() argument
12 rq->rq_sched_info.run_delay += delta; in rq_sched_info_arrive()
21 rq_sched_info_depart(struct rq *rq, unsigned long long delta) in rq_sched_info_depart() argument
24 rq->rq_cpu_time += delta; in rq_sched_info_depart()
28 rq_sched_info_dequeue(struct rq *rq, unsigned long long delta) in rq_sched_info_dequeue() argument
31 rq->rq_sched_info.run_delay += delta; in rq_sched_info_dequeue()
45 static inline void rq_sched_info_arrive (struct rq *rq, unsigned long long delta) { } in rq_sched_info_arrive() argument
46 static inline void rq_sched_info_dequeue(struct rq *rq, unsigned long long delta) { } in rq_sched_info_dequeue() argument
47 static inline void rq_sched_info_depart (struct rq *rq, unsigned long long delta) { } in rq_sched_info_depart() argument
183 unsigned long long delta = 0; in sched_info_dequeue() local
[all …]
Dpelt.c107 accumulate_sum(u64 delta, struct sched_avg *sa, in accumulate_sum() argument
110 u32 contrib = (u32)delta; /* p == 0 -> delta < 1024 */ in accumulate_sum()
113 delta += sa->period_contrib; in accumulate_sum()
114 periods = delta / 1024; /* A period is 1024us (~1ms) */ in accumulate_sum()
128 delta %= 1024; in accumulate_sum()
141 1024 - sa->period_contrib, delta); in accumulate_sum()
144 sa->period_contrib = delta; in accumulate_sum()
187 u64 delta; in ___update_load_sum() local
189 delta = now - sa->last_update_time; in ___update_load_sum()
194 if ((s64)delta < 0) { in ___update_load_sum()
[all …]
Dcputime.c37 static void irqtime_account_delta(struct irqtime *irqtime, u64 delta, in irqtime_account_delta() argument
43 cpustat[idx] += delta; in irqtime_account_delta()
44 irqtime->total += delta; in irqtime_account_delta()
45 irqtime->tick_delta += delta; in irqtime_account_delta()
57 s64 delta; in irqtime_account_irq() local
65 delta = sched_clock_cpu(cpu) - irqtime->irq_start_time; in irqtime_account_irq()
66 irqtime->irq_start_time += delta; in irqtime_account_irq()
76 irqtime_account_delta(irqtime, delta, CPUTIME_IRQ); in irqtime_account_irq()
79 irqtime_account_delta(irqtime, delta, CPUTIME_SOFTIRQ); in irqtime_account_irq()
83 trace_android_rvh_account_irq(curr, cpu, delta); in irqtime_account_irq()
[all …]
Dpelt.h78 static inline void update_rq_clock_pelt(struct rq *rq, s64 delta) in update_rq_clock_pelt() argument
80 delta <<= READ_ONCE(sched_pelt_lshift); in update_rq_clock_pelt()
82 rq->clock_task_mult += delta; in update_rq_clock_pelt()
106 delta = cap_scale(delta, arch_scale_cpu_capacity(cpu_of(rq))); in update_rq_clock_pelt()
107 delta = cap_scale(delta, arch_scale_freq_capacity(cpu_of(rq))); in update_rq_clock_pelt()
109 rq->clock_pelt += delta; in update_rq_clock_pelt()
209 update_rq_clock_pelt(struct rq *rq, s64 delta) { } in update_rq_clock_pelt() argument
Dclock.c268 s64 delta; in sched_clock_local() local
272 delta = now - scd->tick_raw; in sched_clock_local()
273 if (unlikely(delta < 0)) in sched_clock_local()
274 delta = 0; in sched_clock_local()
285 clock = gtod + delta; in sched_clock_local()
Dpsi.c280 u32 delta; in get_recent_times() local
293 delta = times[s] - groupc->times_prev[aggregator][s]; in get_recent_times()
296 times[s] = delta; in get_recent_times()
297 if (delta) in get_recent_times()
705 u32 delta; in record_times() local
707 delta = now - groupc->state_start; in record_times()
711 groupc->times[PSI_IO_SOME] += delta; in record_times()
713 groupc->times[PSI_IO_FULL] += delta; in record_times()
717 groupc->times[PSI_MEM_SOME] += delta; in record_times()
719 groupc->times[PSI_MEM_FULL] += delta; in record_times()
[all …]
/kernel/trace/
Dtrace_benchmark.c41 u64 delta; in trace_do_benchmark() local
60 delta = stop - start; in trace_do_benchmark()
67 bm_first = delta; in trace_do_benchmark()
73 bm_last = delta; in trace_do_benchmark()
75 if (delta > bm_max) in trace_do_benchmark()
76 bm_max = delta; in trace_do_benchmark()
77 if (!bm_min || delta < bm_min) in trace_do_benchmark()
78 bm_min = delta; in trace_do_benchmark()
92 bm_total += delta; in trace_do_benchmark()
93 bm_totalsq += delta * delta; in trace_do_benchmark()
[all …]
Dring_buffer.c382 u64 delta; member
2748 rb_add_time_stamp(struct ring_buffer_event *event, u64 delta, bool abs) in rb_add_time_stamp() argument
2757 event->time_delta = delta & TS_MASK; in rb_add_time_stamp()
2758 event->array[0] = delta >> TS_SHIFT; in rb_add_time_stamp()
2782 (unsigned long long)info->delta, in rb_check_timestamp()
2797 u64 *delta, in rb_add_timestamp() argument
2803 if (unlikely(info->delta > (1ULL << 59))) { in rb_add_timestamp()
2821 info->delta = 0; in rb_add_timestamp()
2823 *event = rb_add_time_stamp(*event, info->delta, abs); in rb_add_timestamp()
2825 *delta = 0; in rb_add_timestamp()
[all …]
Dtrace_irqsoff.c309 static bool report_latency(struct trace_array *tr, u64 delta) in report_latency() argument
312 if (delta < tracing_thresh) in report_latency()
315 if (delta <= tr->max_latency) in report_latency()
327 u64 T0, T1, delta; in check_critical_timing() local
333 delta = T1-T0; in check_critical_timing()
337 if (!report_latency(tr, delta)) in check_critical_timing()
343 if (!report_latency(tr, delta)) in check_critical_timing()
356 tr->max_latency = delta; in check_critical_timing()
Dtrace_sched_wakeup.c352 static bool report_latency(struct trace_array *tr, u64 delta) in report_latency() argument
355 if (delta < tracing_thresh) in report_latency()
358 if (delta <= tr->max_latency) in report_latency()
434 u64 T0, T1, delta; in probe_wakeup_sched_switch() local
481 delta = T1-T0; in probe_wakeup_sched_switch()
483 if (!report_latency(wakeup_trace, delta)) in probe_wakeup_sched_switch()
487 wakeup_trace->max_latency = delta; in probe_wakeup_sched_switch()
/kernel/
Dtsacct.c26 u64 delta; in bacct_add_tsk() local
32 delta = ktime_get_ns() - tsk->start_time; in bacct_add_tsk()
34 do_div(delta, NSEC_PER_USEC); in bacct_add_tsk()
35 stats->ac_etime = delta; in bacct_add_tsk()
37 btime = ktime_get_real_seconds() - div_u64(delta, USEC_PER_SEC); in bacct_add_tsk()
121 u64 time, delta; in __acct_update_integrals() local
127 delta = time - tsk->acct_timexpd; in __acct_update_integrals()
129 if (delta < TICK_NSEC) in __acct_update_integrals()
138 tsk->acct_rss_mem1 += delta * get_mm_rss(tsk->mm) >> 10; in __acct_update_integrals()
139 tsk->acct_vm_mem1 += delta * tsk->mm->total_vm >> 10; in __acct_update_integrals()
Dtorture.c199 unsigned long delta; in torture_offline() local
235 delta = jiffies - starttime; in torture_offline()
236 *sum_offl += delta; in torture_offline()
238 *min_offl = delta; in torture_offline()
239 *max_offl = delta; in torture_offline()
241 if (*min_offl > delta) in torture_offline()
242 *min_offl = delta; in torture_offline()
243 if (*max_offl < delta) in torture_offline()
244 *max_offl = delta; in torture_offline()
261 unsigned long delta; in torture_online() local
[all …]
Dwatchdog_hld.c78 ktime_t delta, now = ktime_get_mono_fast_ns(); in watchdog_check_timestamp() local
80 delta = now - __this_cpu_read(last_timestamp); in watchdog_check_timestamp()
81 if (delta < watchdog_hrtimer_sample_threshold) { in watchdog_check_timestamp()
/kernel/time/
Dtimekeeping.c169 static inline void tk_update_sleep_time(struct timekeeper *tk, ktime_t delta) in tk_update_sleep_time() argument
171 tk->offs_boot = ktime_add(tk->offs_boot, delta); in tk_update_sleep_time()
244 u64 now, last, mask, max, delta; in timekeeping_get_delta() local
262 delta = clocksource_delta(now, last, mask); in timekeeping_get_delta()
268 if (unlikely((~delta & mask) < (mask >> 3))) { in timekeeping_get_delta()
270 delta = 0; in timekeeping_get_delta()
274 if (unlikely(delta > max)) { in timekeeping_get_delta()
276 delta = tkr->clock->max_cycles; in timekeeping_get_delta()
279 return delta; in timekeeping_get_delta()
287 u64 cycle_now, delta; in timekeeping_get_delta() local
[all …]
Dclockevents.c236 int64_t delta; in clockevents_program_min_delta() local
240 delta = dev->min_delta_ns; in clockevents_program_min_delta()
241 dev->next_event = ktime_add_ns(ktime_get(), delta); in clockevents_program_min_delta()
247 clc = ((unsigned long long) delta * dev->mult) >> dev->shift; in clockevents_program_min_delta()
275 int64_t delta = 0; in clockevents_program_min_delta() local
279 delta += dev->min_delta_ns; in clockevents_program_min_delta()
280 dev->next_event = ktime_add_ns(ktime_get(), delta); in clockevents_program_min_delta()
286 clc = ((unsigned long long) delta * dev->mult) >> dev->shift; in clockevents_program_min_delta()
307 int64_t delta; in clockevents_program_event() local
326 delta = ktime_to_ns(ktime_sub(expires, ktime_get())); in clockevents_program_event()
[all …]
Dtick-sched.c61 ktime_t delta, nextp; in tick_do_update_jiffies64() local
104 delta = ktime_sub(now, tick_next_period); in tick_do_update_jiffies64()
105 if (unlikely(delta >= TICK_NSEC)) { in tick_do_update_jiffies64()
109 ticks += ktime_divns(delta, incr); in tick_do_update_jiffies64()
670 ktime_t delta; in update_ts_time_stats() local
673 delta = ktime_sub(now, ts->idle_entrytime); in update_ts_time_stats()
675 ts->iowait_sleeptime = ktime_add(ts->iowait_sleeptime, delta); in update_ts_time_stats()
677 ts->idle_sleeptime = ktime_add(ts->idle_sleeptime, delta); in update_ts_time_stats()
729 ktime_t delta = ktime_sub(now, ts->idle_entrytime); in get_cpu_idle_time_us() local
731 idle = ktime_add(ts->idle_sleeptime, delta); in get_cpu_idle_time_us()
[all …]
Dtimecounter.c82 u64 delta = (cycle_tstamp - tc->cycle_last) & tc->cc->mask; in timecounter_cyc2time() local
90 if (delta > tc->cc->mask / 2) { in timecounter_cyc2time()
91 delta = (tc->cycle_last - cycle_tstamp) & tc->cc->mask; in timecounter_cyc2time()
92 nsec -= cc_cyc2ns_backwards(tc->cc, delta, tc->mask, frac); in timecounter_cyc2time()
94 nsec += cyclecounter_cyc2ns(tc->cc, delta, tc->mask, &frac); in timecounter_cyc2time()
Dalarmtimer.c266 ktime_t delta; in alarmtimer_suspend() local
273 delta = ktime_sub(next->expires, base->get_ktime()); in alarmtimer_suspend()
274 if (!min || (delta < min)) { in alarmtimer_suspend()
276 min = delta; in alarmtimer_suspend()
444 ktime_t delta; in alarm_forward() local
446 delta = ktime_sub(now, alarm->node.expires); in alarm_forward()
448 if (delta < 0) in alarm_forward()
451 if (unlikely(delta >= interval)) { in alarm_forward()
454 overrun = ktime_divns(delta, incr); in alarm_forward()
511 ktime_t delta; in alarmtimer_freezerset() local
[all …]
Dclocksource.c333 s64 delta; in clocksource_verify_percpu() local
356 delta = (s64)((csnow_mid - csnow_begin) & cs->mask); in clocksource_verify_percpu()
357 if (delta < 0) in clocksource_verify_percpu()
359 delta = (csnow_end - csnow_mid) & cs->mask; in clocksource_verify_percpu()
360 if (delta < 0) in clocksource_verify_percpu()
362 delta = clocksource_delta(csnow_end, csnow_begin, cs->mask); in clocksource_verify_percpu()
363 cs_nsec = clocksource_cyc2ns(delta, cs->mult, cs->shift); in clocksource_verify_percpu()
394 u64 csnow, wdnow, cslast, wdlast, delta; in clocksource_watchdog() local
452 delta = clocksource_delta(wdnow, cs->wd_last, watchdog->mask); in clocksource_watchdog()
453 wd_nsec = clocksource_cyc2ns(delta, watchdog->mult, in clocksource_watchdog()
[all …]
Dntp.c402 s64 delta; in second_overflow() local
467 delta = ntp_offset_chunk(time_offset); in second_overflow()
468 time_offset -= delta; in second_overflow()
469 tick_length += delta; in second_overflow()
934 long delta, delta_mod; in hardpps_update_freq() local
954 delta = shift_right(ftemp - pps_freq, NTP_SCALE_SHIFT); in hardpps_update_freq()
956 if (delta > PPS_MAXWANDER || delta < -PPS_MAXWANDER) { in hardpps_update_freq()
958 "hardpps: PPSWANDER: change=%ld\n", delta); in hardpps_update_freq()
970 delta_mod = delta; in hardpps_update_freq()
984 return delta; in hardpps_update_freq()
Dhrtimer.c1040 ktime_t delta; in hrtimer_forward() local
1042 delta = ktime_sub(now, hrtimer_get_expires(timer)); in hrtimer_forward()
1044 if (delta < 0) in hrtimer_forward()
1053 if (unlikely(delta >= interval)) { in hrtimer_forward()
1056 orun = ktime_divns(delta, incr); in hrtimer_forward()
1784 ktime_t expires_next, now, entry_time, delta; in hrtimer_interrupt() local
1857 delta = ktime_sub(now, entry_time); in hrtimer_interrupt()
1858 if ((unsigned int)delta > cpu_base->max_hang_time) in hrtimer_interrupt()
1859 cpu_base->max_hang_time = (unsigned int) delta; in hrtimer_interrupt()
1864 if (delta > 100 * NSEC_PER_MSEC) in hrtimer_interrupt()
[all …]
Dtimer.c519 unsigned long delta = expires - clk; in calc_wheel_index() local
522 if (delta < LVL_START(1)) { in calc_wheel_index()
524 } else if (delta < LVL_START(2)) { in calc_wheel_index()
526 } else if (delta < LVL_START(3)) { in calc_wheel_index()
528 } else if (delta < LVL_START(4)) { in calc_wheel_index()
530 } else if (delta < LVL_START(5)) { in calc_wheel_index()
532 } else if (delta < LVL_START(6)) { in calc_wheel_index()
534 } else if (delta < LVL_START(7)) { in calc_wheel_index()
536 } else if (LVL_DEPTH > 8 && delta < LVL_START(8)) { in calc_wheel_index()
538 } else if ((long) delta < 0) { in calc_wheel_index()
[all …]
/kernel/cgroup/
Drstat.c321 struct cgroup_base_stat cur, delta; in cgroup_base_stat_flush() local
335 delta = cur; in cgroup_base_stat_flush()
336 cgroup_base_stat_sub(&delta, &rstatc->last_bstat); in cgroup_base_stat_flush()
337 cgroup_base_stat_add(&cgrp->bstat, &delta); in cgroup_base_stat_flush()
338 cgroup_base_stat_add(&rstatc->last_bstat, &delta); in cgroup_base_stat_flush()
342 delta = cgrp->bstat; in cgroup_base_stat_flush()
343 cgroup_base_stat_sub(&delta, &cgrp->last_bstat); in cgroup_base_stat_flush()
344 cgroup_base_stat_add(&parent->bstat, &delta); in cgroup_base_stat_flush()
345 cgroup_base_stat_add(&cgrp->last_bstat, &delta); in cgroup_base_stat_flush()
/kernel/bpf/
Dtnum.c23 u64 chi = min ^ max, delta; in tnum_range() local
33 delta = (1ULL << bits) - 1; in tnum_range()
34 return TNUM(min & ~delta, delta); in tnum_range()

12