/lib/ |
D | errseq.c | 61 errseq_t cur, old; in errseq_set() local 90 cur = new; in errseq_set() 95 cur = cmpxchg(eseq, old, new); in errseq_set() 101 if (likely(cur == old || cur == new)) in errseq_set() 105 old = cur; in errseq_set() 107 return cur; in errseq_set() 147 errseq_t cur = READ_ONCE(*eseq); in errseq_check() local 149 if (likely(cur == since)) in errseq_check() 151 return -(cur & MAX_ERRNO); in errseq_check()
|
D | test_list_sort.c | 62 struct list_head *cur; in list_sort_test() local 84 for (cur = head.next; cur->next != &head; cur = cur->next) { in list_sort_test() 88 KUNIT_ASSERT_PTR_EQ_MSG(test, cur->next->prev, cur, in list_sort_test() 91 cmp_result = cmp(test, cur, cur->next); in list_sort_test() 94 el = container_of(cur, struct debug_el, list); in list_sort_test() 95 el1 = container_of(cur->next, struct debug_el, list); in list_sort_test() 104 KUNIT_EXPECT_PTR_EQ_MSG(test, head.prev, cur, "list is corrupted"); in list_sort_test()
|
D | interval_tree.c | 30 struct interval_tree_node *cur = state->nodes[1]; in interval_tree_span_iter_next_gap() local 32 state->nodes[0] = cur; in interval_tree_span_iter_next_gap() 34 if (cur->last > state->nodes[0]->last) in interval_tree_span_iter_next_gap() 35 state->nodes[0] = cur; in interval_tree_span_iter_next_gap() 36 cur = interval_tree_iter_next(cur, state->first_index, in interval_tree_span_iter_next_gap() 38 } while (cur && (state->nodes[0]->last >= cur->start || in interval_tree_span_iter_next_gap() 39 state->nodes[0]->last + 1 == cur->start)); in interval_tree_span_iter_next_gap() 40 state->nodes[1] = cur; in interval_tree_span_iter_next_gap()
|
D | list-test.c | 387 struct list_head entries[3], *cur; in list_test_list_cut_position() local 400 list_for_each(cur, &list2) { in list_test_list_cut_position() 401 KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]); in list_test_list_cut_position() 407 list_for_each(cur, &list1) { in list_test_list_cut_position() 408 KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]); in list_test_list_cut_position() 415 struct list_head entries[3], *cur; in list_test_list_cut_before() local 428 list_for_each(cur, &list2) { in list_test_list_cut_before() 429 KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]); in list_test_list_cut_before() 435 list_for_each(cur, &list1) { in list_test_list_cut_before() 436 KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]); in list_test_list_cut_before() [all …]
|
D | cmdline.c | 58 char *cur = *str; in get_option() local 61 if (!cur || !(*cur)) in get_option() 63 if (*cur == '-') in get_option() 64 value = -simple_strtoull(++cur, str, 0); in get_option() 66 value = simple_strtoull(cur, str, 0); in get_option() 69 if (cur == *str) in get_option()
|
D | ts_fsm.c | 137 struct ts_fsm_token *cur = NULL, *next; in fsm_find() local 166 cur = &fsm->tokens[tok_idx]; in fsm_find() 173 switch (cur->recur) { in fsm_find() 178 if (!match_token(cur, data[block_idx])) in fsm_find() 184 !match_token(cur, data[block_idx])) in fsm_find() 192 if (!match_token(cur, data[block_idx])) in fsm_find() 206 if (!match_token(cur, data[block_idx])) in fsm_find() 229 if (!match_token(cur, data[block_idx])) in fsm_find()
|
D | scatterlist.c | 387 struct scatterlist *cur, in get_next_sg() argument 394 if (cur) { in get_next_sg() 395 next_sg = sg_next(cur); in get_next_sg() 406 if (cur) { in get_next_sg()
|
D | rbtree_test.c | 173 struct test_node *cur, *n; in check_postorder_foreach() local 175 rbtree_postorder_for_each_entry_safe(cur, n, &root.rb_root, rb) in check_postorder_foreach()
|
D | kobject.c | 130 int cur = strlen(kobject_name(parent)); in fill_kobj_path() local 132 length -= cur; in fill_kobj_path() 135 memcpy(path + length, kobject_name(parent), cur); in fill_kobj_path()
|
/lib/zstd/compress/ |
D | zstd_opt.c | 1078 U32 cur, last_pos = 0; in ZSTD_compressBlock_opt_generic() local 1111 cur = 0; in ZSTD_compressBlock_opt_generic() 1142 for (cur = 1; cur <= last_pos; cur++) { in ZSTD_compressBlock_opt_generic() 1143 const BYTE* const inr = ip + cur; in ZSTD_compressBlock_opt_generic() 1144 assert(cur < ZSTD_OPT_NUM); in ZSTD_compressBlock_opt_generic() 1145 DEBUGLOG(7, "cPos:%zi==rPos:%u", inr-istart, cur) in ZSTD_compressBlock_opt_generic() 1148 { U32 const litlen = (opt[cur-1].mlen == 0) ? opt[cur-1].litlen + 1 : 1; in ZSTD_compressBlock_opt_generic() 1149 int const price = opt[cur-1].price in ZSTD_compressBlock_opt_generic() 1150 + (int)ZSTD_rawLiteralsCost(ip+cur-1, 1, optStatePtr, optLevel) in ZSTD_compressBlock_opt_generic() 1154 if (price <= opt[cur].price) { in ZSTD_compressBlock_opt_generic() [all …]
|
D | zstd_ldm.c | 388 ldmEntry_t const* cur; in ZSTD_ldm_generateSequences_internal() local 403 for (cur = bucket; cur < bucket + entsPerBucket; cur++) { in ZSTD_ldm_generateSequences_internal() 406 if (cur->checksum != checksum || cur->offset <= lowestIndex) { in ZSTD_ldm_generateSequences_internal() 411 cur->offset < dictLimit ? dictBase : base; in ZSTD_ldm_generateSequences_internal() 412 BYTE const* const pMatch = curMatchBase + cur->offset; in ZSTD_ldm_generateSequences_internal() 414 cur->offset < dictLimit ? dictEnd : iend; in ZSTD_ldm_generateSequences_internal() 416 cur->offset < dictLimit ? dictStart : lowPrefixPtr; in ZSTD_ldm_generateSequences_internal() 425 BYTE const* const pMatch = base + cur->offset; in ZSTD_ldm_generateSequences_internal() 439 bestEntry = cur; in ZSTD_ldm_generateSequences_internal()
|