Home
last modified time | relevance | path

Searched refs:rb_node (Results 1 – 25 of 104) sorted by relevance

12345

/drivers/infiniband/sw/rdmavt/
Dmcast.c109 struct rb_node *n; in rvt_mcast_find()
114 n = ibp->mcast_tree.rb_node; in rvt_mcast_find()
119 mcast = rb_entry(n, struct rvt_mcast, rb_node); in rvt_mcast_find()
154 struct rb_node **n = &ibp->mcast_tree.rb_node; in rvt_mcast_add()
155 struct rb_node *pn = NULL; in rvt_mcast_add()
165 tmcast = rb_entry(pn, struct rvt_mcast, rb_node); in rvt_mcast_add()
219 rb_link_node(&mcast->rb_node, pn, n); in rvt_mcast_add()
220 rb_insert_color(&mcast->rb_node, &ibp->mcast_tree); in rvt_mcast_add()
308 struct rb_node *n; in rvt_detach_mcast()
318 n = ibp->mcast_tree.rb_node; in rvt_detach_mcast()
[all …]
/drivers/android/
Dbinder_alloc.c72 struct rb_node **p = &alloc->free_buffers.rb_node; in binder_insert_free_buffer()
73 struct rb_node *parent = NULL; in binder_insert_free_buffer()
88 buffer = rb_entry(parent, struct binder_buffer, rb_node); in binder_insert_free_buffer()
98 rb_link_node(&new_buffer->rb_node, parent, p); in binder_insert_free_buffer()
99 rb_insert_color(&new_buffer->rb_node, &alloc->free_buffers); in binder_insert_free_buffer()
105 struct rb_node **p = &alloc->allocated_buffers.rb_node; in binder_insert_allocated_buffer_locked()
106 struct rb_node *parent = NULL; in binder_insert_allocated_buffer_locked()
113 buffer = rb_entry(parent, struct binder_buffer, rb_node); in binder_insert_allocated_buffer_locked()
123 rb_link_node(&new_buffer->rb_node, parent, p); in binder_insert_allocated_buffer_locked()
124 rb_insert_color(&new_buffer->rb_node, &alloc->allocated_buffers); in binder_insert_allocated_buffer_locked()
[all …]
Dbinder_internal.h242 struct rb_node rb_node; member
329 struct rb_node rb_node_desc;
330 struct rb_node rb_node_node;
533 struct rb_node rb_node; member
/drivers/block/drbd/
Ddrbd_interval.c10 sector_t interval_end(struct rb_node *node) in interval_end()
27 struct rb_node **new = &root->rb_node, *parent = NULL; in drbd_insert_interval()
72 struct rb_node *node = root->rb_node; in drbd_contains_interval()
120 struct rb_node *node = root->rb_node; in drbd_find_overlap()
151 struct rb_node *node; in drbd_next_overlap()
/drivers/gpu/drm/
Ddrm_vma_manager.c145 struct rb_node *iter; in drm_vma_offset_lookup_locked()
148 iter = mgr->vm_addr_space_mm.interval_tree.rb_root.rb_node; in drm_vma_offset_lookup_locked()
246 struct rb_node **iter; in vma_node_allow()
247 struct rb_node *parent = NULL; in vma_node_allow()
259 iter = &node->vm_files.rb_node; in vma_node_allow()
361 struct rb_node *iter; in drm_vma_node_revoke()
365 iter = node->vm_files.rb_node; in drm_vma_node_revoke()
402 struct rb_node *iter; in drm_vma_node_is_allowed()
406 iter = node->vm_files.rb_node; in drm_vma_node_is_allowed()
Ddrm_mm.c168 struct rb_node **link, *rb; in drm_mm_interval_tree_add_node()
190 link = &mm->interval_tree.rb_root.rb_node; in drm_mm_interval_tree_add_node()
215 static u64 rb_to_hole_size(struct rb_node *rb) in rb_to_hole_size()
223 struct rb_node **link = &root->rb_root.rb_node, *rb = NULL; in insert_hole_size()
247 struct rb_node **link = &root->rb_node, *rb_parent = NULL; in RB_DECLARE_CALLBACKS_MAX()
295 static inline struct drm_mm_node *rb_hole_size_to_node(struct rb_node *rb) in rb_hole_size_to_node()
300 static inline struct drm_mm_node *rb_hole_addr_to_node(struct rb_node *rb) in rb_hole_addr_to_node()
307 struct rb_node *rb = mm->holes_size.rb_root.rb_node; in best_hole()
325 static bool usable_hole_addr(struct rb_node *rb, u64 size) in usable_hole_addr()
332 struct rb_node *rb = mm->holes_addr.rb_node; in find_hole_addr()
[all …]
Ddrm_prime.c92 struct rb_node dmabuf_rb;
93 struct rb_node handle_rb;
100 struct rb_node **p, *rb; in drm_prime_add_buf_handle()
111 p = &prime_fpriv->dmabufs.rb_node; in drm_prime_add_buf_handle()
126 p = &prime_fpriv->handles.rb_node; in drm_prime_add_buf_handle()
146 struct rb_node *rb; in drm_prime_lookup_buf_by_handle()
148 rb = prime_fpriv->handles.rb_node; in drm_prime_lookup_buf_by_handle()
168 struct rb_node *rb; in drm_prime_lookup_buf_handle()
170 rb = prime_fpriv->dmabufs.rb_node; in drm_prime_lookup_buf_handle()
191 struct rb_node *rb; in drm_prime_remove_buf_handle()
[all …]
/drivers/md/
Ddm-writecache.c83 struct rb_node rb_node; member
611 struct rb_node *node = wc->tree.rb_node; in writecache_find_entry()
617 e = container_of(node, struct wc_entry, rb_node); in writecache_find_entry()
622 e->rb_node.rb_left : e->rb_node.rb_right); in writecache_find_entry()
629 node = rb_next(&e->rb_node); in writecache_find_entry()
633 e = container_of(node, struct wc_entry, rb_node); in writecache_find_entry()
642 node = rb_prev(&e->rb_node); in writecache_find_entry()
644 node = rb_next(&e->rb_node); in writecache_find_entry()
647 e2 = container_of(node, struct wc_entry, rb_node); in writecache_find_entry()
657 struct rb_node **node = &wc->tree.rb_node, *parent = NULL; in writecache_insert_entry()
[all …]
Ddm-cache-background-tracker.c16 struct rb_node node;
93 struct rb_node **new = &b->pending.rb_node, *parent = NULL; in __insert_pending()
122 struct rb_node **new = &b->pending.rb_node; in __find_pending()
Ddm-dust.c18 struct rb_node node;
38 struct rb_node *node = root->rb_node; in dust_rb_search()
57 struct rb_node **link = &root->rb_node, *parent = NULL; in dust_rb_insert()
243 struct rb_node *node = NULL, *nnode = NULL; in __dust_clear_badblocks()
259 BUG_ON(tree->rb_node != NULL); in __dust_clear_badblocks()
292 struct rb_node *node; in dust_list_badblocks()
/drivers/gpu/drm/i915/gt/
Dintel_engine_user.c19 struct rb_node *p = i915->uabi_engines.rb_node; in intel_engine_lookup_user()
60 container_of((struct rb_node *)A, typeof(*a), uabi_node); in engine_cmp()
62 container_of((struct rb_node *)B, typeof(*b), uabi_node); in engine_cmp()
89 container_of((struct rb_node *)pos, typeof(*engine), in sort_engines()
211 struct rb_node **p, *prev; in intel_engines_driver_register()
217 p = &i915->uabi_engines.rb_node; in intel_engines_driver_register()
220 container_of((struct rb_node *)it, typeof(*engine), in intel_engines_driver_register()
/drivers/base/regmap/
Dregcache-rbtree.c31 struct rb_node node;
66 struct rb_node *node; in regcache_rbtree_lookup()
78 node = rbtree_ctx->root.rb_node; in regcache_rbtree_lookup()
99 struct rb_node **new, *parent; in regcache_rbtree_insert()
105 new = &root->rb_node; in regcache_rbtree_insert()
137 struct rb_node *node; in rbtree_show()
215 struct rb_node *next; in regcache_rbtree_exit()
373 struct rb_node *node; in regcache_rbtree_write()
402 node = rbtree_ctx->root.rb_node; in regcache_rbtree_write()
469 struct rb_node *node; in regcache_rbtree_sync()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/
Dpagealloc.c58 struct rb_node rb_node; member
127 struct rb_node *parent = NULL; in insert_page()
129 struct rb_node **new; in insert_page()
138 new = &root->rb_node; in insert_page()
142 tfp = rb_entry(parent, struct fw_page, rb_node); in insert_page()
162 rb_link_node(&nfp->rb_node, parent, new); in insert_page()
163 rb_insert_color(&nfp->rb_node, root); in insert_page()
174 struct rb_node *tmp; in find_fw_page()
181 tmp = root->rb_node; in find_fw_page()
184 tfp = rb_entry(tmp, struct fw_page, rb_node); in find_fw_page()
[all …]
/drivers/mtd/ubi/
Dfastmap-wl.c28 struct rb_node *p; in find_anchor_wl_entry()
79 if (!ubi->free.rb_node || (ubi->free_count - ubi->beb_rsvd_pebs < 1)) in ubi_wl_get_fm_peb()
117 if (!ubi->free.rb_node) in has_enough_free_count()
211 while (!ubi->free.rb_node && ubi->works_count) { in produce_free_peb()
311 if (!ubi->used.rb_node) in need_wear_leveling()
316 if (!ubi->free.rb_node) in need_wear_leveling()
322 if (ubi->free.rb_node) { in need_wear_leveling()
486 e = rb_entry(rb_next(root->rb_node), in may_reserve_for_fm()
Dwl.c141 struct rb_node **p, *parent = NULL; in wl_tree_add()
143 p = &root->rb_node; in wl_tree_add()
238 struct rb_node *p; in in_wl_tree()
240 p = root->rb_node; in in_wl_tree()
321 struct rb_node *p; in find_wl_entry()
328 p = root->rb_node; in find_wl_entry()
362 e = rb_entry(root->rb_node, struct ubi_wl_entry, u.rb); in find_mean_wl_entry()
676 if (!ubi->free.rb_node ||
678 (!ubi->used.rb_node && !ubi->scrub.rb_node)) {
690 !ubi->free.rb_node, !ubi->used.rb_node);
[all …]
/drivers/iommu/
Diova.c47 static struct iova *to_iova(struct rb_node *node) in to_iova()
72 rb_link_node(&iovad->anchor.node, NULL, &iovad->rbroot.rb_node); in init_iova_domain()
78 static struct rb_node *
115 static struct rb_node *iova_find_limit(struct iova_domain *iovad, unsigned long limit_pfn) in iova_find_limit()
117 struct rb_node *node, *next; in iova_find_limit()
129 node = iovad->rbroot.rb_node; in iova_find_limit()
155 struct rb_node *start) in iova_insert_rbtree()
157 struct rb_node **new, *parent = NULL; in iova_insert_rbtree()
159 new = (start) ? &start : &(root->rb_node); in iova_insert_rbtree()
184 struct rb_node *curr, *prev; in __alloc_and_insert_iova_range()
[all …]
/drivers/infiniband/ulp/ipoib/
Dipoib_multicast.c161 struct rb_node *n = priv->multicast_tree.rb_node; in __ipoib_mcast_find()
167 mcast = rb_entry(n, struct ipoib_mcast, rb_node); in __ipoib_mcast_find()
185 struct rb_node **n = &priv->multicast_tree.rb_node, *pn = NULL; in __ipoib_mcast_add()
192 tmcast = rb_entry(pn, struct ipoib_mcast, rb_node); in __ipoib_mcast_add()
204 rb_link_node(&mcast->rb_node, pn, n); in __ipoib_mcast_add()
205 rb_insert_color(&mcast->rb_node, &priv->multicast_tree); in __ipoib_mcast_add()
719 rb_erase(&mcast->rb_node, &priv->multicast_tree); in ipoib_check_and_add_mcast_sendonly()
838 rb_erase(&mcast->rb_node, &priv->multicast_tree); in ipoib_mcast_dev_flush()
843 rb_erase(&priv->broadcast->rb_node, &priv->multicast_tree); in ipoib_mcast_dev_flush()
936 rb_replace_node(&mcast->rb_node, in ipoib_mcast_restart_task()
[all …]
/drivers/staging/media/atomisp/pci/hmm/
Dhmm_bo.c78 struct rb_node *node, unsigned int pgnr) in __bo_search_and_remove_from_free_rbtree()
131 struct rb_node *n = root->rb_node; in __bo_search_by_addr()
156 struct rb_node *n = root->rb_node; in __bo_search_by_addr_in_range()
181 struct rb_node **new = &root->rb_node; in __bo_insert_to_free_rbtree()
182 struct rb_node *parent = NULL; in __bo_insert_to_free_rbtree()
214 struct rb_node **new = &root->rb_node; in __bo_insert_to_alloc_rbtree()
215 struct rb_node *parent = NULL; in __bo_insert_to_alloc_rbtree()
409 bo = __bo_search_and_remove_from_free_rbtree(root->rb_node, pgnr); in hmm_bo_alloc()
516 rbtree_node_to_hmm_bo(bdev->allocated_rbtree.rb_node)); in hmm_bo_device_exit()
/drivers/fpga/
Ddfl-afu-dma-region.c147 struct rb_node **new, *parent = NULL; in afu_dma_region_add()
152 new = &afu->dma_regions.rb_node; in afu_dma_region_add()
206 struct rb_node *node = rb_first(&afu->dma_regions); in afu_dma_region_destroy()
248 struct rb_node *node = afu->dma_regions.rb_node; in afu_dma_region_find()
/drivers/virt/geniezone/
Dgzvm_mmu.c8 static int cmp_ppages(struct rb_node *node, const struct rb_node *parent) in cmp_ppages()
32 static int rb_ppage_cmp(const void *key, const struct rb_node *node) in rb_ppage_cmp()
46 struct rb_node *node; in gzvm_remove_ppage()
/drivers/infiniband/sw/rxe/
Drxe_mcast.c66 struct rb_node **link = &tree->rb_node; in __rxe_insert_mcg()
67 struct rb_node *node = NULL; in __rxe_insert_mcg()
110 struct rb_node *node; in __rxe_lookup_mcg()
113 node = tree->rb_node; in __rxe_lookup_mcg()
/drivers/gpu/drm/nouveau/nvkm/core/
Dobject.c37 struct rb_node *node = client->objroot.rb_node; in nvkm_object_search()
76 struct rb_node **ptr; in nvkm_object_insert()
77 struct rb_node *parent = NULL; in nvkm_object_insert()
81 ptr = &object->client->objroot.rb_node; in nvkm_object_insert()
/drivers/gpu/drm/i915/
Di915_active.c27 struct rb_node node;
143 ref->cache = fetch_node(ref->tree.rb_node); in __active_retire()
152 rb_link_node(&ref->cache->node, NULL, &ref->tree.rb_node); in __active_retire()
154 GEM_BUG_ON(ref->tree.rb_node != &ref->cache->node); in __active_retire()
272 it = fetch_node(ref->tree.rb_node); in __active_lookup()
292 struct rb_node **p, *parent; in active_instance()
302 p = &ref->tree.rb_node; in active_instance()
776 struct rb_node *prev, *p; in reuse_idle_barrier()
796 p = ref->tree.rb_node; in reuse_idle_barrier()
955 struct rb_node **p, *parent; in i915_active_acquire_barrier()
[all …]
/drivers/xen/
Devtchn.c84 struct rb_node node;
110 struct rb_node **new = &(u->evtchns.rb_node), *parent = NULL; in add_evtchn()
145 struct rb_node *node = u->evtchns.rb_node; in find_evtchn()
671 struct rb_node *node; in evtchn_release()
673 while ((node = u->evtchns.rb_node)) { in evtchn_release()
/drivers/infiniband/hw/hfi1/
Dpin_system.c97 struct mmu_rb_node *rb_node; in find_system_node() local
101 rb_node = hfi1_mmu_rb_get_first(handler, start, (end - start)); in find_system_node()
102 if (!rb_node) { in find_system_node()
108 kref_get(&rb_node->refcount); in find_system_node()
111 return container_of(rb_node, struct sdma_mmu_node, rb); in find_system_node()

12345