Home
last modified time | relevance | path

Searched refs:index (Results 1 – 25 of 32) sorted by relevance

12

/lib/
Dcpu_rmap.c53 rmap->near[cpu].index = cpu % size; in alloc_cpu_rmap()
102 rmap->near[cpu].index = rmap->near[neigh].index; in cpu_rmap_copy_neigh()
113 unsigned index; in debug_print_rmap() local
119 index = rmap->near[cpu].index; in debug_print_rmap()
121 cpu, index, rmap->near[cpu].dist); in debug_print_rmap()
140 u16 index; in cpu_rmap_add() local
143 index = rmap->used++; in cpu_rmap_add()
144 rmap->obj[index] = obj; in cpu_rmap_add()
145 return index; in cpu_rmap_add()
155 int cpu_rmap_update(struct cpu_rmap *rmap, u16 index, in cpu_rmap_update() argument
[all …]
Dtest_xarray.c35 static void *xa_mk_index(unsigned long index) in xa_mk_index() argument
37 return xa_mk_value(index & LONG_MAX); in xa_mk_index()
40 static void *xa_store_index(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_store_index() argument
42 return xa_store(xa, index, xa_mk_index(index), gfp); in xa_store_index()
45 static void xa_insert_index(struct xarray *xa, unsigned long index) in xa_insert_index() argument
47 XA_BUG_ON(xa, xa_insert(xa, index, xa_mk_index(index), in xa_insert_index()
51 static void xa_alloc_index(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_alloc_index() argument
55 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(index), xa_limit_32b, in xa_alloc_index()
57 XA_BUG_ON(xa, id != index); in xa_alloc_index()
60 static void xa_erase_index(struct xarray *xa, unsigned long index) in xa_erase_index() argument
[all …]
Diommu-helper.c14 unsigned long index; in iommu_area_alloc() local
19 index = bitmap_find_next_zero_area(map, size, start, nr, align_mask); in iommu_area_alloc()
20 if (index < size) { in iommu_area_alloc()
21 if (iommu_is_span_boundary(index, nr, shift, boundary_size)) { in iommu_area_alloc()
22 start = ALIGN(shift + index, boundary_size) - shift; in iommu_area_alloc()
25 bitmap_set(map, index, nr); in iommu_area_alloc()
26 return index; in iommu_area_alloc()
Dradix-tree.c84 struct radix_tree_node **nodep, unsigned long index) in radix_tree_descend() argument
86 unsigned int offset = (index >> parent->shift) & RADIX_TREE_MAP_MASK; in radix_tree_descend()
203 return iter->index & RADIX_TREE_MAP_MASK; in iter_offset()
219 static unsigned long next_index(unsigned long index, in next_index() argument
223 return (index & ~node_maxindex(node)) + (offset << node->shift); in next_index()
407 unsigned long index, unsigned int shift) in radix_tree_extend() argument
415 while (index > shift_maxindex(maxshift)) in radix_tree_extend()
597 unsigned long index, struct radix_tree_node **nodep, in __radix_tree_create() argument
604 unsigned long max = index; in __radix_tree_create()
634 offset = radix_tree_descend(node, &child, index); in __radix_tree_create()
[all …]
Dtest_maple_tree.c35 unsigned long index, gfp_t gfp) in mtree_insert_index() argument
37 return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp); in mtree_insert_index()
40 static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index) in mtree_erase_index() argument
42 MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX)); in mtree_erase_index()
43 MT_BUG_ON(mt, mtree_load(mt, index) != NULL); in mtree_erase_index()
46 static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index, in mtree_test_insert() argument
49 return mtree_insert(mt, index, ptr, GFP_KERNEL); in mtree_test_insert()
70 static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index) in mtree_test_load() argument
72 return mtree_load(mt, index); in mtree_test_load()
75 static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index) in mtree_test_erase() argument
[all …]
Dsbitmap.c170 static int sbitmap_find_bit_in_index(struct sbitmap *sb, int index, in sbitmap_find_bit_in_index() argument
173 struct sbitmap_word *map = &sb->map[index]; in sbitmap_find_bit_in_index()
177 nr = __sbitmap_get_word(&map->word, __map_depth(sb, index), in sbitmap_find_bit_in_index()
190 unsigned int i, index; in __sbitmap_get() local
193 index = SB_NR_TO_INDEX(sb, alloc_hint); in __sbitmap_get()
206 nr = sbitmap_find_bit_in_index(sb, index, alloc_hint); in __sbitmap_get()
208 nr += index << sb->shift; in __sbitmap_get()
214 if (++index >= sb->map_nr) in __sbitmap_get()
215 index = 0; in __sbitmap_get()
242 unsigned int i, index; in __sbitmap_get_shallow() local
[all …]
Dmaple_tree.c1428 if (mas->index > 0) in mas_start()
2147 if (piv + 1 < mas->index) { in mas_store_b_node()
2151 b_node->gap[b_end] = mas->index - 1 - piv; in mas_store_b_node()
2152 b_node->pivot[b_end++] = mas->index - 1; in mas_store_b_node()
2223 MA_STATE(parent, mas->tree, mas->index, mas->last); in mas_next_sibling()
2266 unsigned long index, min, max; in mas_wr_node_walk() local
2269 wr_mas->r_max = wr_mas->r_min = mas->index; in mas_wr_node_walk()
2270 mas->offset = mas->index = mas->min; in mas_wr_node_walk()
2284 index = mas->index; in mas_wr_node_walk()
2285 if (unlikely(index <= max)) in mas_wr_node_walk()
[all …]
Dtest_strscpy.c37 int index, i; in tc() local
92 index = chars + terminator + i; in tc()
93 if (buf[index] != '\0') { in tc()
101 index = sizeof(buf) - 1 - i; /* Check from the end back */ in tc()
102 if (buf[index] != POISON) { in tc()
Dparman.c100 return parman_prio_first_item(prio)->index; in parman_prio_first_index()
111 return parman_prio_last_item(prio)->index; in parman_prio_last_index()
129 parman->ops->move(parman->priv, item->index, to_index, count); in __parman_prio_move()
144 item->index = to_index; in parman_prio_shift_down()
159 item->index = to_index; in parman_prio_shift_up()
174 to_index = item->index; in parman_prio_item_remove()
178 last_item->index = to_index; in parman_prio_item_remove()
201 item->index = new_index; in parman_lsort_item_add()
Dxarray.c142 static unsigned int get_offset(unsigned long index, struct xa_node *node) in get_offset() argument
144 return (index >> node->shift) & XA_CHUNK_MASK; in get_offset()
708 unsigned long index = xas->xa_index; in xas_create_range() local
722 if (xas->xa_index <= (index | XA_CHUNK_MASK)) in xas_create_range()
740 xas->xa_index = index; in xas_create_range()
743 xas->xa_index = index; in xas_create_range()
1456 void *xa_load(struct xarray *xa, unsigned long index) in xa_load() argument
1458 XA_STATE(xas, xa, index); in xa_load()
1494 void *__xa_erase(struct xarray *xa, unsigned long index) in __xa_erase() argument
1496 XA_STATE(xas, xa, index); in __xa_erase()
[all …]
Dstring_helpers.c931 int index; in match_string() local
934 for (index = 0; index < n; index++) { in match_string()
935 item = array[index]; in match_string()
939 return index; in match_string()
966 int index; in __sysfs_match_string() local
968 for (index = 0; index < n; index++) { in __sysfs_match_string()
969 item = array[index]; in __sysfs_match_string()
973 return index; in __sysfs_match_string()
Dtest_vmalloc.c405 int index, i, j; in test_func() local
422 index = random_array[i]; in test_func()
427 if (!((run_test_mask & (1 << index)) >> index)) in test_func()
432 if (!test_case_array[index].test_func()) in test_func()
433 t->data[index].test_passed++; in test_func()
435 t->data[index].test_failed++; in test_func()
444 t->data[index].time = delta; in test_func()
Didr.c50 *nextid = iter.index + base; in idr_alloc_u32()
204 unsigned long id = iter.index + base; in idr_for_each()
249 *nextid = iter.index + base; in idr_get_next_ul()
558 extern void xa_dump_index(unsigned long index, unsigned int shift);
561 static void ida_dump_entry(void *entry, unsigned long index) in ida_dump_entry() argument
573 xa_dump_index(index * IDA_BITMAP_BITS, shift); in ida_dump_entry()
577 index | (i << node->shift)); in ida_dump_entry()
579 xa_dump_index(index * IDA_BITMAP_BITS, ilog2(BITS_PER_LONG)); in ida_dump_entry()
584 xa_dump_index(index * IDA_BITMAP_BITS, IDA_CHUNK_SHIFT); in ida_dump_entry()
Dsg_pool.c42 unsigned int index; in sg_pool_index() local
47 index = 0; in sg_pool_index()
49 index = get_count_order(nents) - 3; in sg_pool_index()
51 return index; in sg_pool_index()
Dgenalloc.c760 unsigned long index; in gen_pool_best_fit() local
762 index = bitmap_find_next_zero_area(map, size, start, nr, 0); in gen_pool_best_fit()
764 while (index < size) { in gen_pool_best_fit()
765 unsigned long next_bit = find_next_bit(map, size, index + nr); in gen_pool_best_fit()
766 if ((next_bit - index) < len) { in gen_pool_best_fit()
767 len = next_bit - index; in gen_pool_best_fit()
768 start_bit = index; in gen_pool_best_fit()
772 index = bitmap_find_next_zero_area(map, size, in gen_pool_best_fit()
880 const char *propname, int index) in of_gen_pool_get() argument
887 np_pool = of_parse_phandle(np, propname, index); in of_gen_pool_get()
Dbitmap.c424 unsigned long index, end, i; in bitmap_find_next_zero_area_off() local
426 index = find_next_zero_bit(map, size, start); in bitmap_find_next_zero_area_off()
429 index = __ALIGN_MASK(index + align_offset, align_mask) - align_offset; in bitmap_find_next_zero_area_off()
431 end = index + nr; in bitmap_find_next_zero_area_off()
434 i = find_next_bit(map, end, index); in bitmap_find_next_zero_area_off()
439 return index; in bitmap_find_next_zero_area_off()
1250 int index; /* index first long of region in bitmap */ in __reg_op() local
1263 index = pos / BITS_PER_LONG; in __reg_op()
1264 offset = pos - (index * BITS_PER_LONG); in __reg_op()
1279 if (bitmap[index + i] & mask) in __reg_op()
[all …]
Dobjagg.c711 int parent_index, int index) in objagg_tmp_graph_edge_index() argument
713 return index * graph->nodes_count + parent_index; in objagg_tmp_graph_edge_index()
717 int parent_index, int index) in objagg_tmp_graph_edge_set() argument
719 int edge_index = objagg_tmp_graph_edge_index(graph, index, in objagg_tmp_graph_edge_set()
726 int parent_index, int index) in objagg_tmp_graph_is_edge() argument
728 int edge_index = objagg_tmp_graph_edge_index(graph, index, in objagg_tmp_graph_is_edge()
735 unsigned int index) in objagg_tmp_graph_node_weight() argument
737 struct objagg_tmp_node *node = &graph->nodes[index]; in objagg_tmp_graph_node_weight()
746 if (!objagg_tmp_graph_is_edge(graph, index, j)) in objagg_tmp_graph_node_weight()
845 int index; in objagg_opt_simple_greedy_fillup_hints() local
[all …]
Dbtree.c675 unsigned long *key, size_t index, in __btree_for_each() argument
699 size_t index, void *func2) in empty() argument
704 size_t index, void *__func) in visitorl() argument
708 func(elem, opaque, *key, index); in visitorl()
713 size_t index, void *__func) in visitor32() argument
718 func(elem, opaque, *key, index); in visitor32()
723 size_t index, void *__func) in visitor64() argument
728 func(elem, opaque, *key, index); in visitor64()
733 size_t index, void *__func) in visitor128() argument
738 func(elem, opaque, key[0], key[1], index); in visitor128()
[all …]
Dtest_parman.c286 test_parman->prio_array[item->parman_item.index] = item; in test_parman_run()
289 test_parman->prio_array[item->parman_item.index] = NULL; in test_parman_run()
336 if (item->parman_item.index != i) { in test_parman_check_array()
338 item->parman_item.index, i); in test_parman_check_array()
Drhashtable.c1178 unsigned int index = hash & ((1 << tbl->nest) - 1); in __rht_bucket_nested() local
1184 ntbl = rht_dereference_bucket_rcu(ntbl[index].table, tbl, hash); in __rht_bucket_nested()
1188 index = subhash & ((1 << shift) - 1); in __rht_bucket_nested()
1189 ntbl = rht_dereference_bucket_rcu(ntbl[index].table, in __rht_bucket_nested()
1218 unsigned int index = hash & ((1 << tbl->nest) - 1); in rht_bucket_nested_insert() local
1224 ntbl = nested_table_alloc(ht, &ntbl[index].table, in rht_bucket_nested_insert()
1228 index = hash & ((1 << shift) - 1); in rht_bucket_nested_insert()
1231 ntbl = nested_table_alloc(ht, &ntbl[index].table, in rht_bucket_nested_insert()
/lib/xz/
Dxz_dec_stream.c116 } index; member
284 s->index.size += in_used; in index_update()
307 switch (s->index.sequence) { in dec_index()
309 s->index.count = s->vli; in dec_index()
316 if (s->index.count != s->block.count) in dec_index()
319 s->index.sequence = SEQ_INDEX_UNPADDED; in dec_index()
323 s->index.hash.unpadded += s->vli; in dec_index()
324 s->index.sequence = SEQ_INDEX_UNCOMPRESSED; in dec_index()
328 s->index.hash.uncompressed += s->vli; in dec_index()
329 s->index.hash.crc32 = xz_crc32( in dec_index()
[all …]
/lib/842/
D842_compress.c70 u8 index; member
76 u16 index; member
82 u8 index; member
116 (p)->node##b[_i].index = _i; \
124 p->index##b[n] = INDEX_NOT_FOUND; \
127 p->index##b[n] = _n->index; \
131 p->index##b[n] >= 0; \
135 ((p)->index##b[n] == INDEX_NOT_CHECKED \
137 : (p)->index##b[n] >= 0)
144 (unsigned int)_n->index, \
D842_decompress.c165 u64 index, offset, total = round_down(p->out - p->ostart, 8); in __do_index() local
168 ret = next_bits(p, &index, bits); in __do_index()
172 offset = index * size; in __do_index()
200 size, (unsigned long)index, in __do_index()
201 (unsigned long)(index * size), (unsigned long)offset, in __do_index()
/lib/pldmfw/
Dpldmfw.c472 component->index = i; in pldm_parse_components()
744 u8 index = component->index, transfer_flag = 0; in pldm_send_component_tables() local
747 if (!test_bit(index, bitmap)) in pldm_send_component_tables()
753 if (index == find_first_bit(bitmap, data->component_bitmap_len)) in pldm_send_component_tables()
755 if (index == find_last_bit(bitmap, data->component_bitmap_len)) in pldm_send_component_tables()
786 u8 index = component->index; in pldm_flash_components() local
789 if (!test_bit(index, bitmap)) in pldm_flash_components()
/lib/crypto/
Dsha256.c165 unsigned int index, pad_len; in __sha256_final() local
173 index = sctx->count & 0x3f; in __sha256_final()
174 pad_len = (index < 56) ? (56 - index) : ((64+56) - index); in __sha256_final()

12