Home
last modified time | relevance | path

Searched refs:now (Results 1 – 25 of 39) sorted by relevance

12

/kernel/time/
Dtick-sched.c58 static void tick_do_update_jiffies64(ktime_t now) in tick_do_update_jiffies64() argument
73 if (ktime_before(now, smp_load_acquire(&tick_next_period))) in tick_do_update_jiffies64()
87 if (ktime_before(now, nextp)) in tick_do_update_jiffies64()
97 if (ktime_before(now, tick_next_period)) { in tick_do_update_jiffies64()
104 delta = ktime_sub(now, tick_next_period); in tick_do_update_jiffies64()
186 static void tick_sched_do_timer(struct tick_sched *ts, ktime_t now) in tick_sched_do_timer() argument
211 tick_do_update_jiffies64(now); in tick_sched_do_timer()
224 tick_do_update_jiffies64(now); in tick_sched_do_timer()
651 static void tick_nohz_update_jiffies(ktime_t now) in tick_nohz_update_jiffies() argument
655 __this_cpu_write(tick_cpu_sched.idle_waketime, now); in tick_nohz_update_jiffies()
[all …]
Dtimer_list.c23 u64 now; member
47 int idx, u64 now) in print_timer() argument
55 (long long)(ktime_to_ns(hrtimer_get_softexpires(timer)) - now), in print_timer()
56 (long long)(ktime_to_ns(hrtimer_get_expires(timer)) - now)); in print_timer()
61 u64 now) in print_active_timers() argument
91 print_timer(m, timer, &tmp, i, now); in print_active_timers()
99 print_base(struct seq_file *m, struct hrtimer_clock_base *base, u64 now) in print_base() argument
112 print_active_timers(m, base, now + ktime_to_ns(base->offset)); in print_base()
115 static void print_cpu(struct seq_file *m, int cpu, u64 now) in print_cpu() argument
123 print_base(m, cpu_base->clock_base + i, now); in print_cpu()
[all …]
Dalarmtimer.c244 ktime_t min, now, expires; in alarmtimer_suspend() local
293 now = rtc_tm_to_ktime(tm); in alarmtimer_suspend()
294 now = ktime_add(now, min); in alarmtimer_suspend()
297 ret = rtc_timer_start(rtc, &rtctimer, now, 0); in alarmtimer_suspend()
441 u64 alarm_forward(struct alarm *alarm, ktime_t now, ktime_t interval) in alarm_forward() argument
446 delta = ktime_sub(now, alarm->node.expires); in alarm_forward()
459 if (alarm->node.expires > now) in alarm_forward()
476 ktime_t now = base->get_ktime(); in __alarm_forward_now() local
493 now = ktime_add(now, kj); in __alarm_forward_now()
496 return alarm_forward(alarm, now, interval); in __alarm_forward_now()
[all …]
Dtimekeeping_internal.h19 static inline u64 clocksource_delta(u64 now, u64 last, u64 mask) in clocksource_delta() argument
21 u64 ret = (now - last) & mask; in clocksource_delta()
30 static inline u64 clocksource_delta(u64 now, u64 last, u64 mask) in clocksource_delta() argument
32 return (now - last) & mask; in clocksource_delta()
Dhrtimer.c631 ktime_t now = ktime_get_update_offsets_now(&base->clock_was_set_seq, in hrtimer_update_base() local
638 return now; in hrtimer_update_base()
1037 u64 hrtimer_forward(struct hrtimer *timer, ktime_t now, ktime_t interval) in hrtimer_forward() argument
1042 delta = ktime_sub(now, hrtimer_get_expires(timer)); in hrtimer_forward()
1058 if (hrtimer_get_expires_tv64(timer) > now) in hrtimer_forward()
1644 struct hrtimer *timer, ktime_t *now, in __run_hrtimer() argument
1682 trace_hrtimer_expire_entry(timer, now); in __run_hrtimer()
1717 static void __hrtimer_run_queues(struct hrtimer_cpu_base *cpu_base, ktime_t now, in __hrtimer_run_queues() argument
1727 basenow = ktime_add(now, base->offset); in __hrtimer_run_queues()
1760 ktime_t now; in hrtimer_run_softirq() local
[all …]
Dposix-cpu-timers.c115 static u64 bump_cpu_timer(struct k_itimer *timer, u64 now) in bump_cpu_timer() argument
123 if (now < expires) in bump_cpu_timer()
127 delta = now + incr - expires; in bump_cpu_timer()
786 u64 now, expires = cpu_timer_getexpires(ctmr); in posix_cpu_timer_get() local
806 now = cpu_clock_sample(clkid, p); in posix_cpu_timer_get()
808 now = cpu_clock_sample_group(clkid, p, false); in posix_cpu_timer_get()
810 if (now < expires) { in posix_cpu_timer_get()
811 itp->it_value = ns_to_timespec64(expires - now); in posix_cpu_timer_get()
827 struct list_head *firing, u64 now) in collect_timerqueue() argument
839 if (++i == MAX_COLLECTED || now < expires) in collect_timerqueue()
[all …]
Dtimekeeping.c244 u64 now, last, mask, max, delta; in timekeeping_get_delta() local
256 now = tk_clock_read(tkr); in timekeeping_get_delta()
262 delta = clocksource_delta(now, last, mask); in timekeeping_get_delta()
439 u64 now; in __ktime_get_fast_ns() local
444 now = ktime_to_ns(tkr->base); in __ktime_get_fast_ns()
446 now += timekeeping_delta_to_ns(tkr, in __ktime_get_fast_ns()
453 return now; in __ktime_get_fast_ns()
1049 u64 now; in ktime_get_snapshot() local
1055 now = tk_clock_read(&tk->tkr_mono); in ktime_get_snapshot()
1064 nsec_real = timekeeping_cycles_to_ns(&tk->tkr_mono, now); in ktime_get_snapshot()
[all …]
Dposix-timers.c387 ktime_t now = hrtimer_cb_get_time(timer); in posix_timer_fn() local
416 now = ktime_add(now, kj); in posix_timer_fn()
419 timr->it_overrun += hrtimer_forward(timer, now, in posix_timer_fn()
640 static ktime_t common_hrtimer_remaining(struct k_itimer *timr, ktime_t now) in common_hrtimer_remaining() argument
644 return __hrtimer_expires_remaining_adjusted(timer, now); in common_hrtimer_remaining()
647 static s64 common_hrtimer_forward(struct k_itimer *timr, ktime_t now) in common_hrtimer_forward() argument
651 return hrtimer_forward(timer, now, timr->it_interval); in common_hrtimer_forward()
673 ktime_t now, remaining, iv; in common_timer_get() local
691 now = kc->clock_get_ktime(timr->it_clock); in common_timer_get()
698 timr->it_overrun += kc->timer_forward(timr, now); in common_timer_get()
[all …]
Dposix-timers.h25 s64 (*timer_forward)(struct k_itimer *timr, ktime_t now);
26 ktime_t (*timer_remaining)(struct k_itimer *timr, ktime_t now);
Dtick-broadcast.c689 ktime_t now, next_event; in tick_handle_oneshot_broadcast() local
697 now = ktime_get(); in tick_handle_oneshot_broadcast()
709 if (td->evtdev->next_event <= now) { in tick_handle_oneshot_broadcast()
798 ktime_t now; in ___tick_broadcast_oneshot_control() local
913 now = ktime_get(); in ___tick_broadcast_oneshot_control()
914 if (dev->next_event <= now) { in ___tick_broadcast_oneshot_control()
Dntp.c545 const struct timespec64 *now) in rtc_tv_nsec_ok() argument
552 *to_set = timespec64_add(*now, delay); in rtc_tv_nsec_ok()
630 struct timespec64 now, to_set; in sync_hw_clock() local
641 ktime_get_real_ts64(&now); in sync_hw_clock()
643 if (!rtc_tv_nsec_ok(offset_nsec, &to_set, &now)) in sync_hw_clock()
/kernel/sched/
Dpsi.c262 u64 now, state_start; in get_recent_times() local
272 now = cpu_clock(cpu); in get_recent_times()
291 times[s] += now - state_start; in get_recent_times()
377 static u64 update_averages(struct psi_group *group, u64 now) in update_averages() argument
386 if (now - expires >= psi_period) in update_averages()
387 missed_periods = div_u64(now - expires, psi_period); in update_averages()
397 period = now - (group->avg_last_update + (missed_periods * psi_period)); in update_averages()
398 group->avg_last_update = now; in update_averages()
436 u64 now; in psi_avgs_work() local
443 now = sched_clock(); in psi_avgs_work()
[all …]
Dpelt.c184 int ___update_load_sum(u64 now, struct sched_avg *sa, in ___update_load_sum() argument
189 delta = now - sa->last_update_time; in ___update_load_sum()
195 sa->last_update_time = now; in ___update_load_sum()
302 int __update_load_avg_blocked_se(u64 now, struct sched_entity *se) in __update_load_avg_blocked_se() argument
304 if (___update_load_sum(now, &se->avg, 0, 0, 0)) { in __update_load_avg_blocked_se()
314 int __update_load_avg_se(u64 now, struct cfs_rq *cfs_rq, struct sched_entity *se) in __update_load_avg_se() argument
316 if (___update_load_sum(now, &se->avg, !!se->on_rq, se_runnable(se), in __update_load_avg_se()
328 int __update_load_avg_cfs_rq(u64 now, struct cfs_rq *cfs_rq) in __update_load_avg_cfs_rq() argument
330 if (___update_load_sum(now, &cfs_rq->avg, in __update_load_avg_cfs_rq()
354 int update_rt_rq_load_avg(u64 now, struct rq *rq, int running) in update_rt_rq_load_avg() argument
[all …]
Dpelt.h4 int __update_load_avg_blocked_se(u64 now, struct sched_entity *se);
5 int __update_load_avg_se(u64 now, struct cfs_rq *cfs_rq, struct sched_entity *se);
6 int __update_load_avg_cfs_rq(u64 now, struct cfs_rq *cfs_rq);
7 int update_rt_rq_load_avg(u64 now, struct rq *rq, int running);
8 int update_dl_rq_load_avg(u64 now, struct rq *rq, int running);
11 int update_thermal_load_avg(u64 now, struct rq *rq, u64 capacity);
19 update_thermal_load_avg(u64 now, struct rq *rq, u64 capacity) in update_thermal_load_avg() argument
169 update_cfs_rq_load_avg(u64 now, struct cfs_rq *cfs_rq) in update_cfs_rq_load_avg() argument
175 update_rt_rq_load_avg(u64 now, struct rq *rq, int running) in update_rt_rq_load_avg() argument
181 update_dl_rq_load_avg(u64 now, struct rq *rq, int running) in update_dl_rq_load_avg() argument
[all …]
Dwait_bit.c219 unsigned long now = READ_ONCE(jiffies); in bit_wait_timeout() local
221 if (time_after_eq(now, word->timeout)) in bit_wait_timeout()
223 schedule_timeout(word->timeout - now); in bit_wait_timeout()
233 unsigned long now = READ_ONCE(jiffies); in bit_wait_io_timeout() local
235 if (time_after_eq(now, word->timeout)) in bit_wait_io_timeout()
237 io_schedule_timeout(word->timeout - now); in bit_wait_io_timeout()
Dstats.h202 unsigned long long now, delta = 0; in sched_info_arrive() local
207 now = rq_clock(rq); in sched_info_arrive()
208 delta = now - t->sched_info.last_queued; in sched_info_arrive()
211 t->sched_info.last_arrival = now; in sched_info_arrive()
Dclock.c267 u64 now, clock, old_clock, min_clock, max_clock, gtod; in sched_clock_local() local
271 now = sched_clock(); in sched_clock_local()
272 delta = now - scd->tick_raw; in sched_clock_local()
/kernel/trace/
Dtrace_clock.c98 u64 now, prev_time; in trace_clock_global() local
116 now = sched_clock_cpu(this_cpu); in trace_clock_global()
119 if ((s64)(now - prev_time) < 0) in trace_clock_global()
120 now = prev_time; in trace_clock_global()
133 if ((s64)(now - prev_time) < 0) in trace_clock_global()
134 now = prev_time; in trace_clock_global()
136 trace_clock_struct.prev_time = now; in trace_clock_global()
144 return now; in trace_clock_global()
Dtrace_osnoise.c580 u64 int_counter, now; in get_int_safe_duration() local
588 now = time_get(); in get_int_safe_duration()
589 duration = (now - *delta_start); in get_int_safe_duration()
1330 u64 now; in timerlat_irq() local
1340 now = ktime_to_ns(hrtimer_cb_get_time(&tlat->timer)); in timerlat_irq()
1389 diff = now - tlat->abs_period; in timerlat_irq()
1421 ktime_t next_abs_period, now; in wait_next_period() local
1424 now = hrtimer_cb_get_time(&tlat->timer); in wait_next_period()
1435 while (ktime_compare(now, next_abs_period) > 0) { in wait_next_period()
1456 u64 now, diff; in timerlat_main() local
[all …]
/kernel/
Dsmp.c247 unsigned int *n_data, unsigned int now) in cfd_seq_data_add() argument
256 if (new[i].u.cnt <= now) in cfd_seq_data_add()
291 unsigned int now; in csd_lock_print_extended() local
296 now = data[0].u.cnt; in csd_lock_print_extended()
298 cfd_seq_data_add(pcpu->seq_queue, srccpu, cpu, CFD_SEQ_QUEUE, data, &n_data, now); in csd_lock_print_extended()
299 cfd_seq_data_add(pcpu->seq_ipi, srccpu, cpu, CFD_SEQ_IPI, data, &n_data, now); in csd_lock_print_extended()
300 cfd_seq_data_add(pcpu->seq_noipi, srccpu, cpu, CFD_SEQ_NOIPI, data, &n_data, now); in csd_lock_print_extended()
302 …dd(per_cpu(cfd_seq_local.ping, srccpu), srccpu, CFD_SEQ_NOCPU, CFD_SEQ_PING, data, &n_data, now); in csd_lock_print_extended()
303 …(per_cpu(cfd_seq_local.pinged, srccpu), srccpu, CFD_SEQ_NOCPU, CFD_SEQ_PINGED, data, &n_data, now); in csd_lock_print_extended()
305 cfd_seq_data_add(seq->idle, CFD_SEQ_NOCPU, cpu, CFD_SEQ_IDLE, data, &n_data, now); in csd_lock_print_extended()
[all …]
Dwatchdog.c316 unsigned long now) in is_softlockup() argument
320 if (time_after(now, period_ts + get_softlockup_thresh())) in is_softlockup()
321 return now - touch_ts; in is_softlockup()
365 unsigned long touch_ts, period_ts, now; in watchdog_timer_fn() local
392 now = get_timestamp(); in watchdog_timer_fn()
422 duration = is_softlockup(touch_ts, period_ts, now); in watchdog_timer_fn()
Dwatchdog_hld.c78 ktime_t delta, now = ktime_get_mono_fast_ns(); in watchdog_check_timestamp() local
80 delta = now - __this_cpu_read(last_timestamp); in watchdog_check_timestamp()
91 __this_cpu_write(last_timestamp, now); in watchdog_check_timestamp()
/kernel/irq/
Dtimings.c382 static u64 __irq_timings_next_event(struct irqt_stat *irqs, int irq, u64 now) in __irq_timings_next_event() argument
386 if ((now - irqs->last_ts) >= NSEC_PER_SEC) { in __irq_timings_next_event()
536 u64 irq_timings_next_event(u64 now) in irq_timings_next_event() argument
583 ts = __irq_timings_next_event(irqs, i, now); in irq_timings_next_event()
584 if (ts <= now) in irq_timings_next_event()
585 return now; in irq_timings_next_event()
/kernel/power/
Dsuspend_test.c73 time64_t now; in test_wakealarm() local
84 now = rtc_tm_to_time64(&alm.time); in test_wakealarm()
87 rtc_time64_to_tm(now + TEST_SUSPEND_SECONDS, &alm.time); in test_wakealarm()
Dwakelock.c103 ktime_t now; in __wakelocks_gc() local
107 now = ktime_get(); in __wakelocks_gc()
113 idle_time_ns = ktime_to_ns(ktime_sub(now, wl->ws->last_time)); in __wakelocks_gc()

12