/lib/ |
D | radix-tree.c | 109 static inline void tag_set(struct radix_tree_node *node, unsigned int tag, in tag_set() argument 112 __set_bit(offset, node->tags[tag]); in tag_set() 115 static inline void tag_clear(struct radix_tree_node *node, unsigned int tag, in tag_clear() argument 118 __clear_bit(offset, node->tags[tag]); in tag_clear() 121 static inline int tag_get(const struct radix_tree_node *node, unsigned int tag, in tag_get() argument 124 return test_bit(offset, node->tags[tag]); in tag_get() 161 static inline int any_tag_set(const struct radix_tree_node *node, in any_tag_set() argument 166 if (node->tags[tag][idx]) in any_tag_set() 172 static inline void all_tag_set(struct radix_tree_node *node, unsigned int tag) in all_tag_set() argument 174 bitmap_fill(node->tags[tag], RADIX_TREE_MAP_SIZE); in all_tag_set() [all …]
|
D | btree.c | 95 unsigned long *node; in btree_node_alloc() local 97 node = mempool_alloc(head->mempool, gfp); in btree_node_alloc() 98 if (likely(node)) in btree_node_alloc() 99 memset(node, 0, NODESIZE); in btree_node_alloc() 100 return node; in btree_node_alloc() 148 static unsigned long *bkey(struct btree_geo *geo, unsigned long *node, int n) in bkey() argument 150 return &node[n * geo->keylen]; in bkey() 153 static void *bval(struct btree_geo *geo, unsigned long *node, int n) in bval() argument 155 return (void *)node[geo->no_longs + n]; in bval() 158 static void setkey(struct btree_geo *geo, unsigned long *node, int n, in setkey() argument [all …]
|
D | timerqueue.c | 27 bool timerqueue_add(struct timerqueue_head *head, struct timerqueue_node *node) in timerqueue_add() argument 35 WARN_ON_ONCE(!RB_EMPTY_NODE(&node->node)); in timerqueue_add() 39 ptr = rb_entry(parent, struct timerqueue_node, node); in timerqueue_add() 40 if (node->expires < ptr->expires) { in timerqueue_add() 47 rb_link_node(&node->node, parent, p); in timerqueue_add() 48 rb_insert_color_cached(&node->node, &head->rb_root, leftmost); in timerqueue_add() 63 bool timerqueue_del(struct timerqueue_head *head, struct timerqueue_node *node) in timerqueue_del() argument 65 WARN_ON_ONCE(RB_EMPTY_NODE(&node->node)); in timerqueue_del() 67 rb_erase_cached(&node->node, &head->rb_root); in timerqueue_del() 68 RB_CLEAR_NODE(&node->node); in timerqueue_del() [all …]
|
D | rbtree.c | 85 __rb_insert(struct rb_node *node, struct rb_root *root, in __rb_insert() argument 88 struct rb_node *parent = rb_red_parent(node), *gparent, *tmp; in __rb_insert() 100 rb_set_parent_color(node, NULL, RB_BLACK); in __rb_insert() 133 node = gparent; in __rb_insert() 134 parent = rb_parent(node); in __rb_insert() 135 rb_set_parent_color(node, parent, RB_RED); in __rb_insert() 140 if (node == tmp) { in __rb_insert() 154 tmp = node->rb_left; in __rb_insert() 156 WRITE_ONCE(node->rb_left, parent); in __rb_insert() 160 rb_set_parent_color(parent, node, RB_RED); in __rb_insert() [all …]
|
D | xarray.c | 78 static inline unsigned long *node_marks(struct xa_node *node, xa_mark_t mark) in node_marks() argument 80 return node->marks[(__force unsigned)mark]; in node_marks() 83 static inline bool node_get_mark(struct xa_node *node, in node_get_mark() argument 86 return test_bit(offset, node_marks(node, mark)); in node_get_mark() 90 static inline bool node_set_mark(struct xa_node *node, unsigned int offset, in node_set_mark() argument 93 return __test_and_set_bit(offset, node_marks(node, mark)); in node_set_mark() 97 static inline bool node_clear_mark(struct xa_node *node, unsigned int offset, in node_clear_mark() argument 100 return __test_and_clear_bit(offset, node_marks(node, mark)); in node_clear_mark() 103 static inline bool node_any_mark(struct xa_node *node, xa_mark_t mark) in node_any_mark() argument 105 return !bitmap_empty(node_marks(node, mark), XA_CHUNK_SIZE); in node_any_mark() [all …]
|
D | plist.c | 73 void plist_add(struct plist_node *node, struct plist_head *head) in plist_add() argument 79 WARN_ON(!plist_node_empty(node)); in plist_add() 80 WARN_ON(!list_empty(&node->prio_list)); in plist_add() 88 if (node->prio < iter->prio) { in plist_add() 98 if (!prev || prev->prio != node->prio) in plist_add() 99 list_add_tail(&node->prio_list, &iter->prio_list); in plist_add() 101 list_add_tail(&node->node_list, node_next); in plist_add() 112 void plist_del(struct plist_node *node, struct plist_head *head) in plist_del() argument 116 if (!list_empty(&node->prio_list)) { in plist_del() 117 if (node->node_list.next != &head->node_list) { in plist_del() [all …]
|
D | rbtree_test.c | 32 static void insert(struct test_node *node, struct rb_root_cached *root) in insert() argument 35 u32 key = node->key; in insert() 45 rb_link_node(&node->rb, parent, new); in insert() 46 rb_insert_color(&node->rb, &root->rb_root); in insert() 49 static void insert_cached(struct test_node *node, struct rb_root_cached *root) in insert_cached() argument 52 u32 key = node->key; in insert_cached() 65 rb_link_node(&node->rb, parent, new); in insert_cached() 66 rb_insert_color_cached(&node->rb, root, leftmost); in insert_cached() 69 static inline void erase(struct test_node *node, struct rb_root_cached *root) in erase() argument 71 rb_erase(&node->rb, &root->rb_root); in erase() [all …]
|
D | assoc_array.c | 26 const struct assoc_array_node *node; in assoc_array_subtree_iterate() local 40 node = assoc_array_ptr_to_node(cursor); in assoc_array_subtree_iterate() 52 ptr = READ_ONCE(node->slots[slot]); /* Address dependency. */ in assoc_array_subtree_iterate() 79 node = assoc_array_ptr_to_node(cursor); in assoc_array_subtree_iterate() 81 ptr = READ_ONCE(node->slots[slot]); /* Address dependency. */ in assoc_array_subtree_iterate() 90 parent = READ_ONCE(node->back_pointer); /* Address dependency. */ in assoc_array_subtree_iterate() 91 slot = node->parent_slot; in assoc_array_subtree_iterate() 153 struct assoc_array_node *node; /* Node in which leaf might be found */ member 176 struct assoc_array_node *node; in assoc_array_walk() local 206 node = assoc_array_ptr_to_node(cursor); in assoc_array_walk() [all …]
|
D | cpumask.c | 113 bool alloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags, int node) in alloc_cpumask_var_node() argument 115 *mask = kmalloc_node(cpumask_size(), flags, node); in alloc_cpumask_var_node() 128 bool zalloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags, int node) in zalloc_cpumask_var_node() argument 130 return alloc_cpumask_var_node(mask, flags | __GFP_ZERO, node); in zalloc_cpumask_var_node() 206 unsigned int cpumask_local_spread(unsigned int i, int node) in cpumask_local_spread() argument 213 if (node == NUMA_NO_NODE) { in cpumask_local_spread() 219 for_each_cpu_and(cpu, cpumask_of_node(node), cpu_online_mask) in cpumask_local_spread() 225 if (cpumask_test_cpu(cpu, cpumask_of_node(node))) in cpumask_local_spread()
|
D | interval_tree.c | 7 #define START(node) ((node)->start) argument 8 #define LAST(node) ((node)->last) argument
|
D | debugobjects.c | 149 obj = hlist_entry(obj_to_free.first, typeof(*obj), node); in fill_pool() 150 hlist_del(&obj->node); in fill_pool() 152 hlist_add_head(&obj->node, &obj_pool); in fill_pool() 175 hlist_add_head(&new[--cnt]->node, &obj_pool); in fill_pool() 191 hlist_for_each_entry(obj, &b->list, node) { in lookup_object() 210 obj = hlist_entry(list->first, typeof(*obj), node); in __alloc_object() 211 hlist_del(&obj->node); in __alloc_object() 254 hlist_add_head(&obj2->node, in alloc_object() 276 hlist_add_head(&obj->node, &b->list); in alloc_object() 309 obj = hlist_entry(obj_to_free.first, typeof(*obj), node); in free_obj_work() [all …]
|
D | generic-radix-tree.c | 81 struct genradix_node *node; in genradix_alloc_node() local 83 node = (struct genradix_node *)__get_free_page(gfp_mask|__GFP_ZERO); in genradix_alloc_node() 90 kmemleak_alloc(node, PAGE_SIZE, 1, gfp_mask); in genradix_alloc_node() 91 return node; in genradix_alloc_node() 94 static inline void genradix_free_node(struct genradix_node *node) in genradix_free_node() argument 96 kmemleak_free(node); in genradix_free_node() 97 free_page((unsigned long)node); in genradix_free_node()
|
D | interval_tree_test.c | 32 struct interval_tree_node *node; in search() local 35 for (node = interval_tree_iter_first(root, start, last); node; in search() 36 node = interval_tree_iter_next(node, start, last)) in search()
|
D | objagg.c | 737 struct objagg_tmp_node *node = &graph->nodes[index]; in objagg_tmp_graph_node_weight() local 738 unsigned int weight = node->objagg_obj->stats.user_count; in objagg_tmp_graph_node_weight() 748 node = &graph->nodes[j]; in objagg_tmp_graph_node_weight() 749 if (node->crossed_out) in objagg_tmp_graph_node_weight() 751 weight += node->objagg_obj->stats.user_count; in objagg_tmp_graph_node_weight() 758 struct objagg_tmp_node *node; in objagg_tmp_graph_node_max_weight() local 765 node = &graph->nodes[i]; in objagg_tmp_graph_node_max_weight() 766 if (node->crossed_out) in objagg_tmp_graph_node_max_weight() 781 struct objagg_tmp_node *node; in objagg_tmp_graph_create() local 804 node = &graph->nodes[i++]; in objagg_tmp_graph_create() [all …]
|
D | nodemask.c | 6 int __next_node_in(int node, const nodemask_t *srcp) in __next_node_in() argument 8 int ret = __next_node(node, srcp); in __next_node_in()
|
D | idr.c | 292 struct radix_tree_node *node; in idr_replace() local 298 entry = __radix_tree_lookup(&idr->idr_rt, id, &node, &slot); in idr_replace() 302 __radix_tree_replace(&idr->idr_rt, node, slot, ptr); in idr_replace() 564 struct xa_node *node = xa_to_node(entry); in ida_dump_entry() local 565 unsigned int shift = node->shift + IDA_CHUNK_SHIFT + in ida_dump_entry() 569 xa_dump_node(node); in ida_dump_entry() 571 ida_dump_entry(node->slots[i], in ida_dump_entry() 572 index | (i << node->shift)); in ida_dump_entry()
|
D | test_xarray.c | 302 struct xa_node *node; in check_xa_shrink() local 316 node = xas.xa_node; in check_xa_shrink() 317 XA_BUG_ON(xa, xa_entry_locked(xa, node, 0) != xa_mk_value(0)); in check_xa_shrink() 321 XA_BUG_ON(xa, xa_entry_locked(xa, node, 0) != XA_RETRY_ENTRY); in check_xa_shrink() 334 node = xa_head(xa); in check_xa_shrink() 339 XA_BUG_ON(xa, xa_head(xa) == node); in check_xa_shrink() 343 XA_BUG_ON(xa, xa->xa_head != node); in check_xa_shrink() 1484 static void test_update_node(struct xa_node *node) in test_update_node() argument 1486 if (node->count && node->count == node->nr_values) { in test_update_node() 1487 if (list_empty(&node->private_list)) in test_update_node() [all …]
|
D | klist.c | 175 struct klist_node *node; member 192 if (waiter->node != n) in klist_release() 242 waiter.node = n; in klist_remove()
|
D | sbitmap.c | 45 gfp_t flags, int node) in sbitmap_init_node() argument 76 sb->map = kcalloc_node(sb->map_nr, sizeof(*sb->map), flags, node); in sbitmap_init_node() 370 int shift, bool round_robin, gfp_t flags, int node) in sbitmap_queue_init_node() argument 375 ret = sbitmap_init_node(&sbq->sb, depth, shift, flags, node); in sbitmap_queue_init_node() 395 sbq->ws = kzalloc_node(SBQ_WAIT_QUEUES * sizeof(*sbq->ws), flags, node); in sbitmap_queue_init_node()
|
D | test_rhashtable.c | 64 struct rhash_head node; member 95 .head_offset = offsetof(struct test_obj, node), 123 err = rhashtable_insert_fast(ht, &obj->node, params); in insert_retry() 254 rhashtable_remove_fast(ht, &obj->node, test_rht_params); in test_rhashtable() 670 err = rhashtable_remove_fast(&ht, &tdata->objs[i].node, in threadfunc()
|
D | devres.c | 187 void __iomem *devm_of_iomap(struct device *dev, struct device_node *node, int index, in devm_of_iomap() argument 192 if (of_address_to_resource(node, index, &res)) in devm_of_iomap()
|
/lib/842/ |
D | 842_compress.c | 68 struct hlist_node node; member 74 struct hlist_node node; member 80 struct hlist_node node; member 115 for (_i = 0; _i < ARRAY_SIZE((p)->node##b); _i++) { \ 116 (p)->node##b[_i].index = _i; \ 117 (p)->node##b[_i].data = 0; \ 118 INIT_HLIST_NODE(&(p)->node##b[_i].node); \ 125 hash_for_each_possible(p->htable##b, _n, node, p->data##b[n]) { \ 140 struct sw842_hlist_node##b *_n = &(p)->node##b[(i)+(d)]; \ 141 hash_del(&_n->node); \ [all …]
|
/lib/zlib_deflate/ |
D | deftree.c | 548 int node; /* new node being created */ in build_tree() local 571 node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0); in build_tree() 572 tree[node].Freq = 1; in build_tree() 573 s->depth[node] = 0; in build_tree() 574 s->opt_len--; if (stree) s->static_len -= stree[node].Len; in build_tree() 587 node = elems; /* next internal node of the tree */ in build_tree() 596 tree[node].Freq = tree[n].Freq + tree[m].Freq; in build_tree() 597 s->depth[node] = (uch) (max(s->depth[n], s->depth[m]) + 1); in build_tree() 598 tree[n].Dad = tree[m].Dad = (ush)node; in build_tree() 602 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); in build_tree() [all …]
|