Searched refs:on_rq (Results 1 – 12 of 12) sorted by relevance
56 __field(bool, on_rq)69 __entry->on_rq = task->on_rq;87 __entry->on_rq, __entry->on_cpu)
312 if (___update_load_sum(now, &se->avg, !!se->on_rq, se_runnable(se), in __update_load_avg_se()
126 if (p->on_rq || (p->last_sleep_ts < rq->window_start)) in walt_inc_cumulative_runnable_avg()
1657 p->on_rq = TASK_ON_RQ_QUEUED; in activate_task()1662 p->on_rq = (flags & DEQUEUE_SLEEP) ? 0 : TASK_ON_RQ_MIGRATING; in deactivate_task()2056 !p->on_rq); in set_task_cpu()2065 (p->on_rq && !task_on_rq_migrating(p))); in set_task_cpu()3054 if (READ_ONCE(p->on_rq) && ttwu_runnable(p, wake_flags)) in try_to_wake_up()3178 if (p->on_rq) { in try_invoke_on_locked_down_task()3190 if (!p->on_rq) in try_invoke_on_locked_down_task()3228 p->on_rq = 0; in __sched_fork()3230 p->se.on_rq = 0; in __sched_fork()3255 p->rt.on_rq = 0; in __sched_fork()[all …]
577 if (curr->on_rq) in update_min_vruntime()738 slice = __sched_period(nr_running + !se->on_rq); in sched_slice()747 if (unlikely(!se->on_rq)) { in sched_slice()3125 if (se->on_rq) { in reweight_entity()3144 if (se->on_rq) in reweight_entity()4480 se->on_rq = 1; in enqueue_entity()4566 se->on_rq = 0; in dequeue_entity()4637 if (se->on_rq) { in set_next_entity()4735 if (prev->on_rq) in put_prev_entity()4743 if (prev->on_rq) { in put_prev_entity()[all …]
828 return !!se->on_rq; in se_runnable()840 return !!se->on_rq; in se_runnable()1844 return p->on_rq == TASK_ON_RQ_QUEUED; in task_on_rq_queued()1849 return READ_ONCE(p->on_rq) == TASK_ON_RQ_MIGRATING; in task_on_rq_migrating()
326 return p->on_rq ? SCHED_ACCOUNT_WAIT_TIME : 0; in account_busy_for_task_demand()489 if (!p->on_rq && p->state != TASK_WAKING) in fixup_busy_time()901 return p->on_rq ? SCHED_FREQ_ACCOUNT_WAIT_TIME : 0; in account_busy_for_cpu_time()
452 return rt_se->on_rq; in on_rt_rq()1328 rt_se->on_rq = 1; in __enqueue_rt_entity()1342 rt_se->on_rq = 0; in __dequeue_rt_entity()
1585 if (p->on_rq == TASK_ON_RQ_MIGRATING || flags & ENQUEUE_RESTORE) { in enqueue_task_dl()1626 if (p->on_rq == TASK_ON_RQ_MIGRATING || flags & DEQUEUE_SAVE) { in dequeue_task_dl()
407 if (t != current && READ_ONCE(t->on_rq) && !is_idle_task(t)) { in rcu_tasks_pertask()436 !READ_ONCE(t->on_rq) || in check_holdout_task()
519 unsigned int on_rq; member608 unsigned short on_rq; member808 int on_rq; member
1122 while (p->on_rq) { in trace_selftest_startup_wakeup()