Home
last modified time | relevance | path

Searched refs:task_rq (Results 1 – 5 of 5) sorted by relevance

/kernel/sched/
Dstats.h64 unsigned long long now = task_rq(t)->clock, delta = 0; in sched_info_dequeued()
72 rq_sched_info_dequeued(task_rq(t), delta); in sched_info_dequeued()
82 unsigned long long now = task_rq(t)->clock, delta = 0; in sched_info_arrive()
91 rq_sched_info_arrive(task_rq(t), delta); in sched_info_arrive()
103 t->sched_info.last_queued = task_rq(t)->clock; in sched_info_queued()
115 unsigned long long delta = task_rq(t)->clock - in sched_info_depart()
118 rq_sched_info_depart(task_rq(t), delta); in sched_info_depart()
132 struct rq *rq = task_rq(prev); in __sched_info_switch()
Drt.c217 struct rq *rq = task_rq(p); in rt_rq_of_se()
1491 if (!cpupri_find(&task_rq(task)->rd->cpupri, task, lowest_mask)) in find_lowest_rq()
1574 if (unlikely(task_rq(task) != rq || in find_lock_lowest_rq()
1839 rq = task_rq(p); in set_cpus_allowed_rt()
1931 rq != task_rq(p)) in switched_to_rt()
Dcore.c312 rq = task_rq(p); in __task_rq_lock()
314 if (likely(rq == task_rq(p))) in __task_rq_lock()
331 rq = task_rq(p); in task_rq_lock()
333 if (likely(rq == task_rq(p))) in task_rq_lock()
519 assert_raw_spin_locked(&task_rq(p)->lock); in resched_task()
697 assert_raw_spin_locked(&task_rq(p)->lock); in resched_task()
1007 lockdep_is_held(&task_rq(p)->lock))); in set_task_cpu()
1068 rq = task_rq(p); in wait_task_inactive()
1545 struct rq *rq = task_rq(p); in try_to_wake_up_local()
4460 p_rq = task_rq(p); in yield_to()
[all …]
Dsched.h539 #define task_rq(p) cpu_rq(task_cpu(p)) macro
Dfair.c381 return &task_rq(p)->cfs; in task_cfs_rq()
387 struct rq *rq = task_rq(p); in cfs_rq_of()
2760 WARN_ON(task_rq(p) != rq); in hrtick_start_fair()