/lib/ |
D | radix-tree.c | 98 static inline void tag_set(struct radix_tree_node *node, unsigned int tag, in tag_set() argument 101 __set_bit(offset, node->tags[tag]); in tag_set() 104 static inline void tag_clear(struct radix_tree_node *node, unsigned int tag, in tag_clear() argument 107 __clear_bit(offset, node->tags[tag]); in tag_clear() 110 static inline int tag_get(const struct radix_tree_node *node, unsigned int tag, in tag_get() argument 113 return test_bit(offset, node->tags[tag]); in tag_get() 150 static inline int any_tag_set(const struct radix_tree_node *node, in any_tag_set() argument 155 if (node->tags[tag][idx]) in any_tag_set() 161 static inline void all_tag_set(struct radix_tree_node *node, unsigned int tag) in all_tag_set() argument 163 bitmap_fill(node->tags[tag], RADIX_TREE_MAP_SIZE); in all_tag_set() [all …]
|
D | bootconfig.c | 65 int __init xbc_node_index(struct xbc_node *node) in xbc_node_index() argument 67 return node - &xbc_nodes[0]; in xbc_node_index() 77 struct xbc_node * __init xbc_node_get_parent(struct xbc_node *node) in xbc_node_get_parent() argument 79 return node->parent == XBC_NODE_MAX ? NULL : &xbc_nodes[node->parent]; in xbc_node_get_parent() 89 struct xbc_node * __init xbc_node_get_child(struct xbc_node *node) in xbc_node_get_child() argument 91 return node->child ? &xbc_nodes[node->child] : NULL; in xbc_node_get_child() 103 struct xbc_node * __init xbc_node_get_next(struct xbc_node *node) in xbc_node_get_next() argument 105 return node->next ? &xbc_nodes[node->next] : NULL; in xbc_node_get_next() 115 const char * __init xbc_node_get_data(struct xbc_node *node) in xbc_node_get_data() argument 117 int offset = node->data & ~XBC_VALUE; in xbc_node_get_data() [all …]
|
D | btree.c | 95 unsigned long *node; in btree_node_alloc() local 97 node = mempool_alloc(head->mempool, gfp); in btree_node_alloc() 98 if (likely(node)) in btree_node_alloc() 99 memset(node, 0, NODESIZE); in btree_node_alloc() 100 return node; in btree_node_alloc() 148 static unsigned long *bkey(struct btree_geo *geo, unsigned long *node, int n) in bkey() argument 150 return &node[n * geo->keylen]; in bkey() 153 static void *bval(struct btree_geo *geo, unsigned long *node, int n) in bval() argument 155 return (void *)node[geo->no_longs + n]; in bval() 158 static void setkey(struct btree_geo *geo, unsigned long *node, int n, in setkey() argument [all …]
|
D | rbtree.c | 85 __rb_insert(struct rb_node *node, struct rb_root *root, in __rb_insert() argument 88 struct rb_node *parent = rb_red_parent(node), *gparent, *tmp; in __rb_insert() 100 rb_set_parent_color(node, NULL, RB_BLACK); in __rb_insert() 133 node = gparent; in __rb_insert() 134 parent = rb_parent(node); in __rb_insert() 135 rb_set_parent_color(node, parent, RB_RED); in __rb_insert() 140 if (node == tmp) { in __rb_insert() 154 tmp = node->rb_left; in __rb_insert() 156 WRITE_ONCE(node->rb_left, parent); in __rb_insert() 160 rb_set_parent_color(parent, node, RB_RED); in __rb_insert() [all …]
|
D | timerqueue.c | 18 rb_entry((_n), struct timerqueue_node, node) 35 bool timerqueue_add(struct timerqueue_head *head, struct timerqueue_node *node) in timerqueue_add() argument 38 WARN_ON_ONCE(!RB_EMPTY_NODE(&node->node)); in timerqueue_add() 40 return rb_add_cached(&node->node, &head->rb_root, __timerqueue_less); in timerqueue_add() 53 bool timerqueue_del(struct timerqueue_head *head, struct timerqueue_node *node) in timerqueue_del() argument 55 WARN_ON_ONCE(RB_EMPTY_NODE(&node->node)); in timerqueue_del() 57 rb_erase_cached(&node->node, &head->rb_root); in timerqueue_del() 58 RB_CLEAR_NODE(&node->node); in timerqueue_del() 73 struct timerqueue_node *timerqueue_iterate_next(struct timerqueue_node *node) in timerqueue_iterate_next() argument 77 if (!node) in timerqueue_iterate_next() [all …]
|
D | xarray.c | 78 static inline unsigned long *node_marks(struct xa_node *node, xa_mark_t mark) in node_marks() argument 80 return node->marks[(__force unsigned)mark]; in node_marks() 83 static inline bool node_get_mark(struct xa_node *node, in node_get_mark() argument 86 return test_bit(offset, node_marks(node, mark)); in node_get_mark() 90 static inline bool node_set_mark(struct xa_node *node, unsigned int offset, in node_set_mark() argument 93 return __test_and_set_bit(offset, node_marks(node, mark)); in node_set_mark() 97 static inline bool node_clear_mark(struct xa_node *node, unsigned int offset, in node_clear_mark() argument 100 return __test_and_clear_bit(offset, node_marks(node, mark)); in node_clear_mark() 103 static inline bool node_any_mark(struct xa_node *node, xa_mark_t mark) in node_any_mark() argument 105 return !bitmap_empty(node_marks(node, mark), XA_CHUNK_SIZE); in node_any_mark() [all …]
|
D | plist.c | 73 void plist_add(struct plist_node *node, struct plist_head *head) in plist_add() argument 79 WARN_ON(!plist_node_empty(node)); in plist_add() 80 WARN_ON(!list_empty(&node->prio_list)); in plist_add() 88 if (node->prio < iter->prio) { in plist_add() 98 if (!prev || prev->prio != node->prio) in plist_add() 99 list_add_tail(&node->prio_list, &iter->prio_list); in plist_add() 101 list_add_tail(&node->node_list, node_next); in plist_add() 112 void plist_del(struct plist_node *node, struct plist_head *head) in plist_del() argument 116 if (!list_empty(&node->prio_list)) { in plist_del() 117 if (node->node_list.next != &head->node_list) { in plist_del() [all …]
|
D | rbtree_test.c | 32 static void insert(struct test_node *node, struct rb_root_cached *root) in insert() argument 35 u32 key = node->key; in insert() 45 rb_link_node(&node->rb, parent, new); in insert() 46 rb_insert_color(&node->rb, &root->rb_root); in insert() 49 static void insert_cached(struct test_node *node, struct rb_root_cached *root) in insert_cached() argument 52 u32 key = node->key; in insert_cached() 65 rb_link_node(&node->rb, parent, new); in insert_cached() 66 rb_insert_color_cached(&node->rb, root, leftmost); in insert_cached() 69 static inline void erase(struct test_node *node, struct rb_root_cached *root) in erase() argument 71 rb_erase(&node->rb, &root->rb_root); in erase() [all …]
|
D | assoc_array.c | 26 const struct assoc_array_node *node; in assoc_array_subtree_iterate() local 40 node = assoc_array_ptr_to_node(cursor); in assoc_array_subtree_iterate() 52 ptr = READ_ONCE(node->slots[slot]); /* Address dependency. */ in assoc_array_subtree_iterate() 79 node = assoc_array_ptr_to_node(cursor); in assoc_array_subtree_iterate() 81 ptr = READ_ONCE(node->slots[slot]); /* Address dependency. */ in assoc_array_subtree_iterate() 90 parent = READ_ONCE(node->back_pointer); /* Address dependency. */ in assoc_array_subtree_iterate() 91 slot = node->parent_slot; in assoc_array_subtree_iterate() 153 struct assoc_array_node *node; /* Node in which leaf might be found */ member 176 struct assoc_array_node *node; in assoc_array_walk() local 206 node = assoc_array_ptr_to_node(cursor); in assoc_array_walk() [all …]
|
D | interval_tree.c | 7 #define START(node) ((node)->start) argument 8 #define LAST(node) ((node)->last) argument
|
D | cpumask.c | 113 bool alloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags, int node) in alloc_cpumask_var_node() argument 115 *mask = kmalloc_node(cpumask_size(), flags, node); in alloc_cpumask_var_node() 128 bool zalloc_cpumask_var_node(cpumask_var_t *mask, gfp_t flags, int node) in zalloc_cpumask_var_node() argument 130 return alloc_cpumask_var_node(mask, flags | __GFP_ZERO, node); in zalloc_cpumask_var_node() 206 unsigned int cpumask_local_spread(unsigned int i, int node) in cpumask_local_spread() argument 213 if (node == NUMA_NO_NODE) { in cpumask_local_spread() 219 for_each_cpu_and(cpu, cpumask_of_node(node), cpu_online_mask) in cpumask_local_spread() 225 if (cpumask_test_cpu(cpu, cpumask_of_node(node))) in cpumask_local_spread()
|
D | debugobjects.c | 154 obj = hlist_entry(obj_to_free.first, typeof(*obj), node); in fill_pool() 155 hlist_del(&obj->node); in fill_pool() 157 hlist_add_head(&obj->node, &obj_pool); in fill_pool() 180 hlist_add_head(&new[--cnt]->node, &obj_pool); in fill_pool() 196 hlist_for_each_entry(obj, &b->list, node) { in lookup_object() 215 obj = hlist_entry(list->first, typeof(*obj), node); in __alloc_object() 216 hlist_del(&obj->node); in __alloc_object() 255 hlist_add_head(&obj2->node, in alloc_object() 277 hlist_add_head(&obj->node, &b->list); in alloc_object() 310 obj = hlist_entry(obj_to_free.first, typeof(*obj), node); in free_obj_work() [all …]
|
D | generic-radix-tree.c | 81 struct genradix_node *node; in genradix_alloc_node() local 83 node = (struct genradix_node *)__get_free_page(gfp_mask|__GFP_ZERO); in genradix_alloc_node() 90 kmemleak_alloc(node, PAGE_SIZE, 1, gfp_mask); in genradix_alloc_node() 91 return node; in genradix_alloc_node() 94 static inline void genradix_free_node(struct genradix_node *node) in genradix_free_node() argument 96 kmemleak_free(node); in genradix_free_node() 97 free_page((unsigned long)node); in genradix_free_node()
|
D | objagg.c | 737 struct objagg_tmp_node *node = &graph->nodes[index]; in objagg_tmp_graph_node_weight() local 738 unsigned int weight = node->objagg_obj->stats.user_count; in objagg_tmp_graph_node_weight() 748 node = &graph->nodes[j]; in objagg_tmp_graph_node_weight() 749 if (node->crossed_out) in objagg_tmp_graph_node_weight() 751 weight += node->objagg_obj->stats.user_count; in objagg_tmp_graph_node_weight() 758 struct objagg_tmp_node *node; in objagg_tmp_graph_node_max_weight() local 765 node = &graph->nodes[i]; in objagg_tmp_graph_node_max_weight() 766 if (node->crossed_out) in objagg_tmp_graph_node_max_weight() 781 struct objagg_tmp_node *node; in objagg_tmp_graph_create() local 804 node = &graph->nodes[i++]; in objagg_tmp_graph_create() [all …]
|
D | interval_tree_test.c | 32 struct interval_tree_node *node; in search() local 35 for (node = interval_tree_iter_first(root, start, last); node; in search() 36 node = interval_tree_iter_next(node, start, last)) in search()
|
D | nodemask.c | 6 unsigned int __next_node_in(int node, const nodemask_t *srcp) in __next_node_in() argument 8 unsigned int ret = __next_node(node, srcp); in __next_node_in()
|
D | idr.c | 292 struct radix_tree_node *node; in idr_replace() local 298 entry = __radix_tree_lookup(&idr->idr_rt, id, &node, &slot); in idr_replace() 302 __radix_tree_replace(&idr->idr_rt, node, slot, ptr); in idr_replace() 569 struct xa_node *node = xa_to_node(entry); in ida_dump_entry() local 570 unsigned int shift = node->shift + IDA_CHUNK_SHIFT + in ida_dump_entry() 574 xa_dump_node(node); in ida_dump_entry() 576 ida_dump_entry(node->slots[i], in ida_dump_entry() 577 index | (i << node->shift)); in ida_dump_entry()
|
D | test_xarray.c | 327 struct xa_node *node; in check_xa_shrink() local 341 node = xas.xa_node; in check_xa_shrink() 342 XA_BUG_ON(xa, xa_entry_locked(xa, node, 0) != xa_mk_value(0)); in check_xa_shrink() 346 XA_BUG_ON(xa, xa_entry_locked(xa, node, 0) != XA_RETRY_ENTRY); in check_xa_shrink() 359 node = xa_head(xa); in check_xa_shrink() 364 XA_BUG_ON(xa, xa_head(xa) == node); in check_xa_shrink() 368 XA_BUG_ON(xa, xa->xa_head != node); in check_xa_shrink() 1652 static void test_update_node(struct xa_node *node) in test_update_node() argument 1654 if (node->count && node->count == node->nr_values) { in test_update_node() 1655 if (list_empty(&node->private_list)) in test_update_node() [all …]
|
D | klist.c | 175 struct klist_node *node; member 192 if (waiter->node != n) in klist_release() 242 waiter.node = n; in klist_remove()
|
D | sbitmap.c | 84 gfp_t flags, int node, bool round_robin, in sbitmap_init_node() argument 114 sb->map = kcalloc_node(sb->map_nr, sizeof(*sb->map), flags, node); in sbitmap_init_node() 430 int shift, bool round_robin, gfp_t flags, int node) in sbitmap_queue_init_node() argument 435 ret = sbitmap_init_node(&sbq->sb, depth, shift, flags, node, in sbitmap_queue_init_node() 445 sbq->ws = kzalloc_node(SBQ_WAIT_QUEUES * sizeof(*sbq->ws), flags, node); in sbitmap_queue_init_node()
|
/lib/842/ |
D | 842_compress.c | 68 struct hlist_node node; member 74 struct hlist_node node; member 80 struct hlist_node node; member 115 for (_i = 0; _i < ARRAY_SIZE((p)->node##b); _i++) { \ 116 (p)->node##b[_i].index = _i; \ 117 (p)->node##b[_i].data = 0; \ 118 INIT_HLIST_NODE(&(p)->node##b[_i].node); \ 125 hash_for_each_possible(p->htable##b, _n, node, p->data##b[n]) { \ 140 struct sw842_hlist_node##b *_n = &(p)->node##b[(i)+(d)]; \ 141 hash_del(&_n->node); \ [all …]
|
/lib/kunit/ |
D | string-stream.c | 45 list_del(&frag->node); in string_stream_fragment_free() 98 list_add_tail(&frag_container->node, &stream->fragments); in string_stream_vadd() 124 node) { in string_stream_clear() 142 list_for_each_entry(frag_container, &stream->fragments, node) in string_stream_get_string()
|
D | string-stream.h | 18 struct list_head node; member
|
D | test.c | 233 list_for_each_entry(fragment, &stream->fragments, node) { in kunit_print_string_stream() 618 list_add_tail(&res->node, &test->resources); in kunit_add_resource() 681 list_del(&res->node); in kunit_remove_resource() 783 node); in kunit_cleanup()
|
/lib/zlib_deflate/ |
D | deftree.c | 494 int node; /* new node being created */ in build_tree() local 517 node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0); in build_tree() 518 tree[node].Freq = 1; in build_tree() 519 s->depth[node] = 0; in build_tree() 520 s->opt_len--; if (stree) s->static_len -= stree[node].Len; in build_tree() 533 node = elems; /* next internal node of the tree */ in build_tree() 542 tree[node].Freq = tree[n].Freq + tree[m].Freq; in build_tree() 543 s->depth[node] = (uch) (max(s->depth[n], s->depth[m]) + 1); in build_tree() 544 tree[n].Dad = tree[m].Dad = (ush)node; in build_tree() 548 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); in build_tree() [all …]
|