Home
last modified time | relevance | path

Searched refs:index (Results 1 – 25 of 26) sorted by relevance

12

/lib/
Dcpu_rmap.c53 rmap->near[cpu].index = cpu % size; in alloc_cpu_rmap()
102 rmap->near[cpu].index = rmap->near[neigh].index; in cpu_rmap_copy_neigh()
113 unsigned index; in debug_print_rmap() local
119 index = rmap->near[cpu].index; in debug_print_rmap()
121 cpu, index, rmap->near[cpu].dist); in debug_print_rmap()
140 u16 index; in cpu_rmap_add() local
143 index = rmap->used++; in cpu_rmap_add()
144 rmap->obj[index] = obj; in cpu_rmap_add()
145 return index; in cpu_rmap_add()
155 int cpu_rmap_update(struct cpu_rmap *rmap, u16 index, in cpu_rmap_update() argument
[all …]
Dtest_xarray.c32 static void *xa_mk_index(unsigned long index) in xa_mk_index() argument
34 return xa_mk_value(index & LONG_MAX); in xa_mk_index()
37 static void *xa_store_index(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_store_index() argument
39 return xa_store(xa, index, xa_mk_index(index), gfp); in xa_store_index()
42 static void xa_insert_index(struct xarray *xa, unsigned long index) in xa_insert_index() argument
44 XA_BUG_ON(xa, xa_insert(xa, index, xa_mk_index(index), in xa_insert_index()
48 static void xa_alloc_index(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_alloc_index() argument
52 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(index), xa_limit_32b, in xa_alloc_index()
54 XA_BUG_ON(xa, id != index); in xa_alloc_index()
57 static void xa_erase_index(struct xarray *xa, unsigned long index) in xa_erase_index() argument
[all …]
Diommu-helper.c14 unsigned long index; in iommu_area_alloc() local
19 index = bitmap_find_next_zero_area(map, size, start, nr, align_mask); in iommu_area_alloc()
20 if (index < size) { in iommu_area_alloc()
21 if (iommu_is_span_boundary(index, nr, shift, boundary_size)) { in iommu_area_alloc()
22 start = ALIGN(shift + index, boundary_size) - shift; in iommu_area_alloc()
25 bitmap_set(map, index, nr); in iommu_area_alloc()
26 return index; in iommu_area_alloc()
Dradix-tree.c95 struct radix_tree_node **nodep, unsigned long index) in radix_tree_descend() argument
97 unsigned int offset = (index >> parent->shift) & RADIX_TREE_MAP_MASK; in radix_tree_descend()
214 return iter->index & RADIX_TREE_MAP_MASK; in iter_offset()
230 static unsigned long next_index(unsigned long index, in next_index() argument
234 return (index & ~node_maxindex(node)) + (offset << node->shift); in next_index()
418 unsigned long index, unsigned int shift) in radix_tree_extend() argument
426 while (index > shift_maxindex(maxshift)) in radix_tree_extend()
608 unsigned long index, struct radix_tree_node **nodep, in __radix_tree_create() argument
615 unsigned long max = index; in __radix_tree_create()
645 offset = radix_tree_descend(node, &child, index); in __radix_tree_create()
[all …]
Dsbitmap.c15 static inline bool sbitmap_deferred_clear(struct sbitmap *sb, int index) in sbitmap_deferred_clear() argument
21 spin_lock_irqsave(&sb->map[index].swap_lock, flags); in sbitmap_deferred_clear()
23 if (!sb->map[index].cleared) in sbitmap_deferred_clear()
29 mask = xchg(&sb->map[index].cleared, 0); in sbitmap_deferred_clear()
35 val = sb->map[index].word; in sbitmap_deferred_clear()
36 } while (cmpxchg(&sb->map[index].word, val, val & ~mask) != val); in sbitmap_deferred_clear()
40 spin_unlock_irqrestore(&sb->map[index].swap_lock, flags); in sbitmap_deferred_clear()
139 static int sbitmap_find_bit_in_index(struct sbitmap *sb, int index, in sbitmap_find_bit_in_index() argument
145 nr = __sbitmap_get_word(&sb->map[index].word, in sbitmap_find_bit_in_index()
146 sb->map[index].depth, alloc_hint, in sbitmap_find_bit_in_index()
[all …]
Dtest_strscpy.c37 int index, i; in tc() local
92 index = chars + terminator + i; in tc()
93 if (buf[index] != '\0') { in tc()
101 index = sizeof(buf) - 1 - i; /* Check from the end back */ in tc()
102 if (buf[index] != POISON) { in tc()
Dxarray.c142 static unsigned int get_offset(unsigned long index, struct xa_node *node) in get_offset() argument
144 return (index >> node->shift) & XA_CHUNK_MASK; in get_offset()
702 unsigned long index = xas->xa_index; in xas_create_range() local
716 if (xas->xa_index <= (index | XA_CHUNK_MASK)) in xas_create_range()
732 xas->xa_index = index; in xas_create_range()
735 xas->xa_index = index; in xas_create_range()
1294 void *xa_load(struct xarray *xa, unsigned long index) in xa_load() argument
1296 XA_STATE(xas, xa, index); in xa_load()
1332 void *__xa_erase(struct xarray *xa, unsigned long index) in __xa_erase() argument
1334 XA_STATE(xas, xa, index); in __xa_erase()
[all …]
Dparman.c101 return parman_prio_first_item(prio)->index; in parman_prio_first_index()
112 return parman_prio_last_item(prio)->index; in parman_prio_last_index()
130 parman->ops->move(parman->priv, item->index, to_index, count); in __parman_prio_move()
145 item->index = to_index; in parman_prio_shift_down()
160 item->index = to_index; in parman_prio_shift_up()
175 to_index = item->index; in parman_prio_item_remove()
179 last_item->index = to_index; in parman_prio_item_remove()
202 item->index = new_index; in parman_lsort_item_add()
Dtest_vmalloc.c387 int index, i, j; in test_func() local
407 index = random_array[i]; in test_func()
412 if (!((run_test_mask & (1 << index)) >> index)) in test_func()
417 if (!test_case_array[index].test_func()) in test_func()
418 per_cpu_test_data[t->cpu][index].test_passed++; in test_func()
420 per_cpu_test_data[t->cpu][index].test_failed++; in test_func()
429 per_cpu_test_data[t->cpu][index].time = delta; in test_func()
Dstring.c690 int index; in match_string() local
693 for (index = 0; index < n; index++) { in match_string()
694 item = array[index]; in match_string()
698 return index; in match_string()
717 int index; in __sysfs_match_string() local
719 for (index = 0; index < n; index++) { in __sysfs_match_string()
720 item = array[index]; in __sysfs_match_string()
724 return index; in __sysfs_match_string()
Didr.c50 *nextid = iter.index + base; in idr_alloc_u32()
204 unsigned long id = iter.index + base; in idr_for_each()
249 *nextid = iter.index + base; in idr_get_next_ul()
553 extern void xa_dump_index(unsigned long index, unsigned int shift);
556 static void ida_dump_entry(void *entry, unsigned long index) in ida_dump_entry() argument
568 xa_dump_index(index * IDA_BITMAP_BITS, shift); in ida_dump_entry()
572 index | (i << node->shift)); in ida_dump_entry()
574 xa_dump_index(index * IDA_BITMAP_BITS, ilog2(BITS_PER_LONG)); in ida_dump_entry()
579 xa_dump_index(index * IDA_BITMAP_BITS, IDA_CHUNK_SHIFT); in ida_dump_entry()
Dsg_pool.c42 unsigned int index; in sg_pool_index() local
47 index = 0; in sg_pool_index()
49 index = get_count_order(nents) - 3; in sg_pool_index()
51 return index; in sg_pool_index()
Dbitmap.c332 unsigned long index, end, i; in bitmap_find_next_zero_area_off() local
334 index = find_next_zero_bit(map, size, start); in bitmap_find_next_zero_area_off()
337 index = __ALIGN_MASK(index + align_offset, align_mask) - align_offset; in bitmap_find_next_zero_area_off()
339 end = index + nr; in bitmap_find_next_zero_area_off()
342 i = find_next_bit(map, end, index); in bitmap_find_next_zero_area_off()
347 return index; in bitmap_find_next_zero_area_off()
1035 int index; /* index first long of region in bitmap */ in __reg_op() local
1048 index = pos / BITS_PER_LONG; in __reg_op()
1049 offset = pos - (index * BITS_PER_LONG); in __reg_op()
1064 if (bitmap[index + i] & mask) in __reg_op()
[all …]
Dgenalloc.c752 unsigned long index; in gen_pool_best_fit() local
754 index = bitmap_find_next_zero_area(map, size, start, nr, 0); in gen_pool_best_fit()
756 while (index < size) { in gen_pool_best_fit()
757 int next_bit = find_next_bit(map, size, index + nr); in gen_pool_best_fit()
758 if ((next_bit - index) < len) { in gen_pool_best_fit()
759 len = next_bit - index; in gen_pool_best_fit()
760 start_bit = index; in gen_pool_best_fit()
764 index = bitmap_find_next_zero_area(map, size, in gen_pool_best_fit()
872 const char *propname, int index) in of_gen_pool_get() argument
879 np_pool = of_parse_phandle(np, propname, index); in of_gen_pool_get()
Dobjagg.c711 int parent_index, int index) in objagg_tmp_graph_edge_index() argument
713 return index * graph->nodes_count + parent_index; in objagg_tmp_graph_edge_index()
717 int parent_index, int index) in objagg_tmp_graph_edge_set() argument
719 int edge_index = objagg_tmp_graph_edge_index(graph, index, in objagg_tmp_graph_edge_set()
726 int parent_index, int index) in objagg_tmp_graph_is_edge() argument
728 int edge_index = objagg_tmp_graph_edge_index(graph, index, in objagg_tmp_graph_is_edge()
735 unsigned int index) in objagg_tmp_graph_node_weight() argument
737 struct objagg_tmp_node *node = &graph->nodes[index]; in objagg_tmp_graph_node_weight()
746 if (!objagg_tmp_graph_is_edge(graph, index, j)) in objagg_tmp_graph_node_weight()
848 int index; in objagg_opt_simple_greedy_fillup_hints() local
[all …]
Dbtree.c679 unsigned long *key, size_t index, in __btree_for_each() argument
703 size_t index, void *func2) in empty() argument
708 size_t index, void *__func) in visitorl() argument
712 func(elem, opaque, *key, index); in visitorl()
717 size_t index, void *__func) in visitor32() argument
722 func(elem, opaque, *key, index); in visitor32()
727 size_t index, void *__func) in visitor64() argument
732 func(elem, opaque, *key, index); in visitor64()
737 size_t index, void *__func) in visitor128() argument
742 func(elem, opaque, key[0], key[1], index); in visitor128()
[all …]
Dtest_parman.c286 test_parman->prio_array[item->parman_item.index] = item; in test_parman_run()
289 test_parman->prio_array[item->parman_item.index] = NULL; in test_parman_run()
336 if (item->parman_item.index != i) { in test_parman_check_array()
338 item->parman_item.index, i); in test_parman_check_array()
Drhashtable.c1171 unsigned int index = hash & ((1 << tbl->nest) - 1); in __rht_bucket_nested() local
1177 ntbl = rht_dereference_bucket_rcu(ntbl[index].table, tbl, hash); in __rht_bucket_nested()
1181 index = subhash & ((1 << shift) - 1); in __rht_bucket_nested()
1182 ntbl = rht_dereference_bucket_rcu(ntbl[index].table, in __rht_bucket_nested()
1212 unsigned int index = hash & ((1 << tbl->nest) - 1); in rht_bucket_nested_insert() local
1218 ntbl = nested_table_alloc(ht, &ntbl[index].table, in rht_bucket_nested_insert()
1222 index = hash & ((1 << shift) - 1); in rht_bucket_nested_insert()
1225 ntbl = nested_table_alloc(ht, &ntbl[index].table, in rht_bucket_nested_insert()
Dlru_cache.c606 void lc_set(struct lru_cache *lc, unsigned int enr, int index) in lc_set() argument
611 if (index < 0 || index >= lc->nr_elements) in lc_set()
614 e = lc_element_by_index(lc, index); in lc_set()
Ddevres.c187 void __iomem *devm_of_iomap(struct device *dev, struct device_node *node, int index, in devm_of_iomap() argument
192 if (of_address_to_resource(node, index, &res)) in devm_of_iomap()
Dubsan.c339 void __ubsan_handle_out_of_bounds(struct out_of_bounds_data *data, void *index) in __ubsan_handle_out_of_bounds() argument
348 val_to_string(index_str, sizeof(index_str), data->index_type, index); in __ubsan_handle_out_of_bounds()
/lib/xz/
Dxz_dec_stream.c116 } index; member
284 s->index.size += in_used; in index_update()
307 switch (s->index.sequence) { in dec_index()
309 s->index.count = s->vli; in dec_index()
316 if (s->index.count != s->block.count) in dec_index()
319 s->index.sequence = SEQ_INDEX_UNPADDED; in dec_index()
323 s->index.hash.unpadded += s->vli; in dec_index()
324 s->index.sequence = SEQ_INDEX_UNCOMPRESSED; in dec_index()
328 s->index.hash.uncompressed += s->vli; in dec_index()
329 s->index.hash.crc32 = xz_crc32( in dec_index()
[all …]
/lib/842/
D842_compress.c70 u8 index; member
76 u16 index; member
82 u8 index; member
116 (p)->node##b[_i].index = _i; \
124 p->index##b[n] = INDEX_NOT_FOUND; \
127 p->index##b[n] = _n->index; \
131 p->index##b[n] >= 0; \
135 ((p)->index##b[n] == INDEX_NOT_CHECKED \
137 : (p)->index##b[n] >= 0)
144 (unsigned int)_n->index, \
D842_decompress.c165 u64 index, offset, total = round_down(p->out - p->ostart, 8); in __do_index() local
168 ret = next_bits(p, &index, bits); in __do_index()
172 offset = index * size; in __do_index()
200 size, (unsigned long)index, in __do_index()
201 (unsigned long)(index * size), (unsigned long)offset, in __do_index()
/lib/crypto/
Dsha256.c250 unsigned int index, pad_len; in __sha256_final() local
258 index = sctx->count & 0x3f; in __sha256_final()
259 pad_len = (index < 56) ? (56 - index) : ((64+56) - index); in __sha256_final()

12