Lines Matching refs:task_rq
221 if (prio_less(b, a, !!task_rq(a)->core->core_forceidle_count)) in __sched_core_less()
632 rq = task_rq(p); in __task_rq_lock()
634 if (likely(rq == task_rq(p) && !task_on_rq_migrating(p))) { in __task_rq_lock()
657 rq = task_rq(p); in task_rq_lock()
676 if (likely(rq == task_rq(p) && !task_on_rq_migrating(p))) { in task_rq_lock()
2311 rq = task_rq(p); in wait_task_inactive()
2640 if (task_rq(p) == rq) { in migration_cpu_stop()
2717 if (task_rq(p) != rq) in push_cpu_stop()
2734 if (task_rq(p) == rq) { in push_cpu_stop()
2772 struct rq *rq = task_rq(p); in __do_set_cpus_allowed()
3373 lockdep_is_held(__rq_lockp(task_rq(p))))); in set_task_cpu()
3404 src_rq = task_rq(p); in __migrate_swap_task()
3827 atomic_dec(&task_rq(p)->nr_iowait); in ttwu_do_activate()
4359 atomic_dec(&task_rq(p)->nr_iowait); in try_to_wake_up()
5560 struct sched_entity *curr = (&task_rq(p)->cfs)->curr; in prefetch_curr_exec_start()
6066 return (task_rq(t)->idle == t); in is_task_rq_idle()
8330 if (!cpumask_subset(task_rq(p)->rd->span, mask)) in dl_task_check_affinity()
8900 p_rq = task_rq(p); in yield_to()
8911 if (task_rq(p) != p_rq) { in yield_to()
9408 if (task_rq(p) == rq && task_on_rq_queued(p)) { in __balance_push_cpu_stop()