Home
last modified time | relevance | path

Searched refs:rb_node (Results 1 – 13 of 13) sorted by relevance

/kernel/power/
Dwakelock.c29 struct rb_node node;
40 struct rb_node *node; in pm_show_wakelocks()
150 struct rb_node **node = &wakelocks_tree.rb_node; in wakelock_lookup_add()
151 struct rb_node *parent = *node; in wakelock_lookup_add()
Dswap.c123 struct rb_node node;
132 struct rb_node **new = &(swsusp_extents.rb_node); in swsusp_extents_insert()
133 struct rb_node *parent = NULL; in swsusp_extents_insert()
198 struct rb_node *node; in free_all_swap_pages()
200 while ((node = swsusp_extents.rb_node)) { in free_all_swap_pages()
215 return (swsusp_extents.rb_node != NULL); in swsusp_swap_in_use()
/kernel/locking/
Drtmutex_common.h34 struct rb_node tree_entry;
35 struct rb_node pi_tree_entry;
106 struct rb_node *leftmost = rb_first_cached(&lock->waiters); in rt_mutex_waiter_is_top_waiter()
113 struct rb_node *leftmost = rb_first_cached(&lock->waiters); in rt_mutex_top_waiter()
Dww_mutex.h96 struct rb_node *n = rb_first(&lock->rtmutex.waiters.rb_root); in __ww_waiter_first()
105 struct rb_node *n = rb_next(&w->tree_entry); in __ww_waiter_next()
114 struct rb_node *n = rb_prev(&w->tree_entry); in __ww_waiter_prev()
123 struct rb_node *n = rb_last(&lock->rtmutex.waiters.rb_root); in __ww_waiter_last()
Drtmutex.c407 static __always_inline bool __waiter_less(struct rb_node *a, const struct rb_node *b) in __waiter_less()
453 __pi_waiter_less(struct rb_node *a, const struct rb_node *b) in __pi_waiter_less()
/kernel/trace/
Dtrace_stat.c27 struct rb_node node;
77 struct rb_node **new = &(root->rb_node), *parent = NULL; in insert_stat()
174 struct rb_node *node; in stat_seq_start()
198 struct rb_node *node = p; in stat_seq_next()
/kernel/events/
Duprobes.c56 struct rb_node rb_node; /* node in the rb tree */ member
637 rb_entry((node), struct uprobe, rb_node)
644 static inline int __uprobe_cmp_key(const void *key, const struct rb_node *b) in __uprobe_cmp_key()
650 static inline int __uprobe_cmp(struct rb_node *a, const struct rb_node *b) in __uprobe_cmp()
662 struct rb_node *node = rb_find(&key, &uprobes_tree, __uprobe_cmp_key); in __find_uprobe()
687 struct rb_node *node; in __insert_uprobe()
689 node = rb_find_add(&uprobe->rb_node, &uprobes_tree, __uprobe_cmp); in __insert_uprobe()
932 return !RB_EMPTY_NODE(&uprobe->rb_node); in uprobe_is_active()
945 rb_erase(&uprobe->rb_node, &uprobes_tree); in delete_uprobe()
947 RB_CLEAR_NODE(&uprobe->rb_node); /* for uprobe_is_active() */ in delete_uprobe()
[all …]
Dcore.c1737 static inline bool __group_less(struct rb_node *a, const struct rb_node *b) in __group_less()
1749 static inline int __group_cmp(const void *key, const struct rb_node *node) in __group_cmp()
1821 struct rb_node *node; in perf_event_groups_first()
1840 struct rb_node *next; in perf_event_groups_next()
/kernel/sched/
Ddeadline.c45 return !RB_EMPTY_NODE(&dl_se->rb_node); in on_dl_rq()
413 return dl_rq->root.rb_leftmost == &dl_se->rb_node; in is_leftmost()
525 static inline bool __pushable_less(struct rb_node *a, const struct rb_node *b) in __pushable_less()
536 struct rb_node *leftmost; in enqueue_pushable_dl_task()
551 struct rb_node *leftmost; in dequeue_pushable_dl_task()
1425 struct rb_node *leftmost = dl_rq->root.rb_leftmost; in dec_dl_deadline()
1428 entry = rb_entry(leftmost, struct sched_dl_entity, rb_node); in dec_dl_deadline()
1470 rb_entry((node), struct sched_dl_entity, rb_node)
1472 static inline bool __dl_less(struct rb_node *a, const struct rb_node *b) in __dl_less()
1481 BUG_ON(!RB_EMPTY_NODE(&dl_se->rb_node)); in __enqueue_dl_entity()
[all …]
Dcore.c167 static inline bool rb_sched_core_less(struct rb_node *a, const struct rb_node *b) in rb_sched_core_less()
172 static inline int rb_sched_core_cmp(const void *key, const struct rb_node *node) in rb_sched_core_cmp()
212 struct rb_node *node; in sched_core_find()
226 struct rb_node *node = &p->core_node; in sched_core_next()
4375 RB_CLEAR_NODE(&p->dl.rb_node); in __sched_fork()
Dfair.c548 struct rb_node *leftmost = rb_first_cached(&cfs_rq->tasks_timeline); in update_min_vruntime()
576 static inline bool __entity_less(struct rb_node *a, const struct rb_node *b) in __entity_less()
598 struct rb_node *left = rb_first_cached(&cfs_rq->tasks_timeline); in __pick_first_entity()
608 struct rb_node *next = rb_next(&se->run_node); in __pick_next_entity()
619 struct rb_node *last = rb_last(&cfs_rq->tasks_timeline.rb_root); in __pick_last_entity()
/kernel/bpf/
Dlocal_storage.c70 struct rb_node *node; in cgroup_storage_lookup()
75 node = root->rb_node; in cgroup_storage_lookup()
105 struct rb_node **new = &(root->rb_node), *parent = NULL; in cgroup_storage_insert()
/kernel/
Dfork.c542 struct rb_node **rb_link, *rb_parent; in dup_mmap()
567 rb_link = &mm->mm_rb.rb_node; in dup_mmap()