/kernel/rcu/ |
D | tree_stall.h | 94 WRITE_ONCE(rcu_state.jiffies_stall, jiffies + ULONG_MAX / 2); in rcu_cpu_stall_reset() 104 unsigned long j = jiffies; in record_gp_stall_check_time() 120 WRITE_ONCE(rdp->last_fqs_resched, jiffies); in zero_cpu_stall_ticks() 134 if (time_after(jiffies, j) && rcu_state.gp_kthread && in rcu_stall_kick_kthreads() 269 rdp->last_accelerate & 0xffff, jiffies & 0xffff, in print_cpu_stall_fast_no_hz() 340 j = jiffies - READ_ONCE(rcu_state.gp_activity); in rcu_check_gp_kthread_starvation() 393 smp_processor_id(), (long)(jiffies - rcu_state.gp_start), in print_other_cpu_stall() 405 j = jiffies; in print_other_cpu_stall() 416 if (ULONG_CMP_GE(jiffies, READ_ONCE(rcu_state.jiffies_stall))) in print_other_cpu_stall() 418 jiffies + 3 * rcu_jiffies_till_stall_check() + 3); in print_other_cpu_stall() [all …]
|
D | rcutorture.c | 238 return shutdown_secs && time_after(jiffies, shutdown_jiffies - 30 * HZ); in shutdown_time_arrived() 834 while (ULONG_CMP_LT(jiffies, oldstarttime)) { in rcu_torture_boost() 835 schedule_timeout_interruptible(oldstarttime - jiffies); in rcu_torture_boost() 843 call_rcu_time = jiffies; in rcu_torture_boost() 844 while (ULONG_CMP_LT(jiffies, endtime)) { in rcu_torture_boost() 853 jiffies); in rcu_torture_boost() 854 call_rcu_time = jiffies; in rcu_torture_boost() 867 rcu_torture_boost_failed(call_rcu_time, jiffies); in rcu_torture_boost() 879 boost_starttime = jiffies + in rcu_torture_boost() 914 fqs_resume_time = jiffies + fqs_stutter * HZ; in rcu_torture_fqs() [all …]
|
D | tree.c | 1031 time_after(jiffies, rcu_state.gp_start + HZ)) { in rcu_implicit_dynticks_qs() 1065 (time_after(jiffies, rcu_state.gp_start + jtsq * 2) || in rcu_implicit_dynticks_qs() 1066 time_after(jiffies, rcu_state.jiffies_resched))) { in rcu_implicit_dynticks_qs() 1070 } else if (time_after(jiffies, rcu_state.gp_start + jtsq)) { in rcu_implicit_dynticks_qs() 1083 time_after(jiffies, in rcu_implicit_dynticks_qs() 1086 WRITE_ONCE(rdp->last_fqs_resched, jiffies); in rcu_implicit_dynticks_qs() 1096 if (time_after(jiffies, rcu_state.jiffies_resched)) { in rcu_implicit_dynticks_qs() 1097 if (time_after(jiffies, in rcu_implicit_dynticks_qs() 1100 WRITE_ONCE(rdp->last_fqs_resched, jiffies); in rcu_implicit_dynticks_qs() 1192 rcu_state.gp_req_activity = jiffies; in rcu_start_this_gp() [all …]
|
D | tree_plugin.h | 710 time_after(jiffies, rcu_state.gp_start + HZ)) in rcu_flavor_sched_clock_irq() 1080 ULONG_CMP_GE(jiffies, rnp->boost_time))) { in rcu_initiate_boost() 1107 rnp->boost_time = jiffies + RCU_BOOST_DELAY_JIFFIES; in rcu_preempt_boost_start_gp() 1307 if (jiffies == rdp->last_advance_all) in rcu_try_advance_all_cbs() 1309 rdp->last_advance_all = jiffies; in rcu_try_advance_all_cbs() 1357 rdp->last_accelerate = jiffies; in rcu_needs_cpu() 1362 dj = round_jiffies(rcu_idle_lazy_gp_delay + jiffies) - jiffies; in rcu_needs_cpu() 1364 dj = round_up(rcu_idle_gp_delay + jiffies, in rcu_needs_cpu() 1365 rcu_idle_gp_delay) - jiffies; in rcu_needs_cpu() 1418 if (rdp->last_accelerate == jiffies) in rcu_prepare_for_idle() [all …]
|
D | update.c | 719 lastreport = jiffies; in rcu_tasks_kthread() 741 time_after(jiffies, lastreport + rtst); in rcu_tasks_kthread() 743 lastreport = jiffies; in rcu_tasks_kthread()
|
/kernel/ |
D | hung_task.c | 109 t->last_switch_time = jiffies; in check_hung_task() 131 t->comm, t->pid, (jiffies - t->last_switch_time) / HZ); in check_hung_task() 176 unsigned long last_break = jiffies; in check_hung_uninterruptible_tasks() 191 if (time_after(jiffies, last_break + HUNG_TASK_LOCK_BREAK)) { in check_hung_uninterruptible_tasks() 194 last_break = jiffies; in check_hung_uninterruptible_tasks() 214 return timeout ? last_checked - jiffies + timeout * HZ : in hung_timeout_jiffies() 273 unsigned long hung_last_checked = jiffies; in watchdog() 290 hung_last_checked = jiffies; in watchdog()
|
D | torture.c | 98 starttime = jiffies; in torture_offline() 114 delta = jiffies - starttime; in torture_offline() 149 starttime = jiffies; in torture_online() 163 delta = jiffies - starttime; in torture_online()
|
D | workqueue.c | 1166 pwq->pool->watchdog_ts = jiffies; in pwq_activate_delayed_work() 1506 pwq->pool->watchdog_ts = jiffies; in __queue_work() 1668 timer->expires = jiffies + delay; in __queue_delayed_work() 1801 worker->last_active = jiffies; in worker_enter_idle() 1807 mod_timer(&pool->idle_timer, jiffies + IDLE_WORKER_TIMEOUT); in worker_enter_idle() 2021 if (time_before(jiffies, expires)) { in idle_worker_timeout() 2077 mod_timer(&pool->mayday_timer, jiffies + MAYDAY_INTERVAL); in pool_mayday_timeout() 2106 mod_timer(&pool->mayday_timer, jiffies + MAYDAY_INITIAL_TIMEOUT); in maybe_create_worker() 2432 pool->watchdog_ts = jiffies; in worker_thread() 2536 pool->watchdog_ts = jiffies; in rescuer_thread() [all …]
|
D | softirq.c | 251 unsigned long end = jiffies + MAX_SOFTIRQ_TIME; in __do_softirq() 310 if (time_before(jiffies, end) && !need_resched() && in __do_softirq()
|
D | acct.c | 128 acct->needcheck = jiffies + ACCT_TIMEOUT*HZ; in check_free_space() 243 acct->needcheck = jiffies; in acct_on()
|
/kernel/sched/ |
D | loadavg.c | 224 if (!time_before(jiffies, READ_ONCE(calc_load_update))) in calc_load_write_idx() 273 if (time_before(jiffies, this_rq->calc_load_update)) in calc_load_nohz_stop() 281 if (time_before(jiffies, this_rq->calc_load_update + 10)) in calc_load_nohz_stop() 311 if (!time_before(jiffies, sample_window + 10)) { in calc_global_nohz() 315 delta = jiffies - sample_window - 10; in calc_global_nohz() 357 if (time_before(jiffies, sample_window + 10)) in calc_global_load() 391 if (time_before(jiffies, this_rq->calc_load_update)) in calc_global_load_tick()
|
D | wait_bit.c | 75 wq_entry.key.timeout = jiffies + timeout; in out_of_line_wait_on_bit_timeout() 218 unsigned long now = READ_ONCE(jiffies); in bit_wait_timeout() 232 unsigned long now = READ_ONCE(jiffies); in bit_wait_io_timeout()
|
D | autogroup.c | 226 if (!capable(CAP_SYS_ADMIN) && time_before(jiffies, next)) in proc_sched_autogroup_set_nice() 229 next = HZ / 10 + jiffies; in proc_sched_autogroup_set_nice()
|
D | fair.c | 1881 p->numa_migrate_retry = jiffies + interval; in numa_migrate_preferred() 1955 p->mm->numa_next_scan = jiffies + in update_task_scan_period() 2453 if (time_after(jiffies, p->numa_migrate_retry)) { in task_numa_fault() 2488 unsigned long migrate, next_scan, now = jiffies; in task_numa_work() 2636 mm->numa_next_scan = jiffies + msecs_to_jiffies(sysctl_numa_balancing_scan_delay); in init_numa_balancing() 2700 if (!time_before(jiffies, curr->mm->numa_next_scan)) in task_tick_numa() 5541 if (time_after(jiffies, current->wakee_flip_decay_ts + HZ)) { in record_wakee() 5543 current->wakee_flip_decay_ts = jiffies; in record_wakee() 7776 rq->last_blocked_load_update_tick = jiffies; in update_blocked_load_status() 7877 unsigned long now = jiffies; in update_cfs_rq_h_load() [all …]
|
D | stats.c | 21 seq_printf(seq, "timestamp %lu\n", jiffies); in show_schedstat()
|
/kernel/irq/ |
D | spurious.c | 171 jiffies + POLL_SPURIOUS_IRQ_INTERVAL); in poll_spurious_irqs() 391 if (time_after(jiffies, desc->last_unhandled + HZ/10)) in note_interrupt() 395 desc->last_unhandled = jiffies; in note_interrupt() 424 jiffies + POLL_SPURIOUS_IRQ_INTERVAL); in note_interrupt()
|
/kernel/time/ |
D | timer.c | 362 unsigned long j0 = jiffies; in __round_jiffies_relative() 439 unsigned long j0 = jiffies; in __round_jiffies_up_relative() 900 jnow = READ_ONCE(jiffies); in forward_timer_base() 1725 unsigned long now = READ_ONCE(jiffies); in collect_expired_timers() 1789 if (!time_after_eq(jiffies, base->clk)) in __run_timers() 1811 while (time_after_eq(jiffies, base->clk)) { in __run_timers() 1844 if (time_before(jiffies, base->clk)) { in run_local_timers() 1849 if (time_before(jiffies, base->clk)) in run_local_timers() 1934 expire = timeout + jiffies; in schedule_timeout() 1945 timeout = expire - jiffies; in schedule_timeout() [all …]
|
D | jiffies.c | 37 return (u64) jiffies; in jiffies_read() 79 EXPORT_SYMBOL(jiffies);
|
D | time.c | 657 jiffies_to_timespec64(const unsigned long jiffies, struct timespec64 *value) in jiffies_to_timespec64() argument 664 value->tv_sec = div_u64_rem((u64)jiffies * TICK_NSEC, in jiffies_to_timespec64() 694 void jiffies_to_timeval(const unsigned long jiffies, struct timeval *value) in jiffies_to_timeval() argument 702 value->tv_sec = div_u64_rem((u64)jiffies * TICK_NSEC, in jiffies_to_timeval()
|
D | Makefile | 3 obj-y += timekeeping.o ntp.o clocksource.o jiffies.o timer_list.o
|
/kernel/locking/ |
D | test-ww_mutex.c | 70 unsigned long timeout = jiffies + TIMEOUT; in __test_mutex() 79 } while (time_before(jiffies, timeout)); in __test_mutex() 426 } while (!time_after(jiffies, stress->timeout)); in stress_inorder_work() 487 } while (!time_after(jiffies, stress->timeout)); in stress_reorder_work() 512 } while (!time_after(jiffies, stress->timeout)); in stress_one_work() 569 stress->timeout = jiffies + 2*HZ; in stress()
|
D | rwsem.c | 460 time_after(jiffies, waiter->timeout)) { in rwsem_mark_wake() 1046 waiter.timeout = jiffies + RWSEM_WAIT_TIMEOUT; in rwsem_down_read_slowpath() 1182 waiter.timeout = jiffies + RWSEM_WAIT_TIMEOUT; in rwsem_down_write_slowpath() 1281 time_after(jiffies, waiter.timeout))) { in rwsem_down_write_slowpath()
|
/kernel/power/ |
D | process.c | 44 end_time = jiffies + msecs_to_jiffies(freeze_timeout_msecs); in try_to_freeze_tasks() 66 if (!todo || time_after(jiffies, end_time)) in try_to_freeze_tasks()
|
D | suspend_test.c | 33 suspend_test_start_time = jiffies; in suspend_test_start() 38 long nj = jiffies - suspend_test_start_time; in suspend_test_finish()
|
/kernel/bpf/ |
D | cpumap.c | 252 unsigned long last_qs = jiffies; in cpu_map_kthread_run() 275 last_qs = jiffies; in cpu_map_kthread_run()
|