/lib/ |
D | bitmap.c | 49 unsigned int k, lim = bits/BITS_PER_LONG; in __bitmap_equal() local 50 for (k = 0; k < lim; ++k) in __bitmap_equal() 51 if (bitmap1[k] != bitmap2[k]) in __bitmap_equal() 55 if ((bitmap1[k] ^ bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits)) in __bitmap_equal() 64 unsigned int k, lim = bits/BITS_PER_LONG; in __bitmap_complement() local 65 for (k = 0; k < lim; ++k) in __bitmap_complement() 66 dst[k] = ~src[k]; in __bitmap_complement() 69 dst[k] = ~src[k]; in __bitmap_complement() 87 unsigned k, lim = BITS_TO_LONGS(nbits); in __bitmap_shift_right() local 90 for (k = 0; off + k < lim; ++k) { in __bitmap_shift_right() [all …]
|
D | klist.c | 85 void klist_init(struct klist *k, void (*get)(struct klist_node *), in klist_init() argument 88 INIT_LIST_HEAD(&k->k_list); in klist_init() 89 spin_lock_init(&k->k_lock); in klist_init() 90 k->get = get; in klist_init() 91 k->put = put; in klist_init() 95 static void add_head(struct klist *k, struct klist_node *n) in add_head() argument 97 spin_lock(&k->k_lock); in add_head() 98 list_add(&n->n_node, &k->k_list); in add_head() 99 spin_unlock(&k->k_lock); in add_head() 102 static void add_tail(struct klist *k, struct klist_node *n) in add_tail() argument [all …]
|
D | ts_kmp.c | 81 unsigned int k, q; in compute_prefix_tbl() local 84 for (k = 0, q = 1; q < len; q++) { in compute_prefix_tbl() 85 while (k > 0 && (icase ? toupper(pattern[k]) : pattern[k]) in compute_prefix_tbl() 87 k = prefix_tbl[k-1]; in compute_prefix_tbl() 88 if ((icase ? toupper(pattern[k]) : pattern[k]) in compute_prefix_tbl() 90 k++; in compute_prefix_tbl() 91 prefix_tbl[q] = k; in compute_prefix_tbl()
|
D | inflate.c | 233 #define NEEDBITS(n) {while(k<(n)){b|=((ulg)NEXTBYTE())<<k;k+=8;}} 234 #define DUMPBITS(n) {b>>=(n);k-=(n);} 342 register int k; /* number of bits in current code */ in huft_build() local 396 k = j; /* minimum code length */ in huft_build() 453 for (; k <= g; k++) in huft_build() 456 a = c[k]; in huft_build() 462 while (k > w + l) in huft_build() 470 if ((f = 1 << (j = k - w)) > a + 1) /* try a k-w bit table */ in huft_build() 474 xp = c + k; in huft_build() 517 r.b = (uch)(k - w); in huft_build() [all …]
|
D | oid_registry.c | 34 unsigned i, j, k, hash; in look_up_OID() local 50 k = OID__NR; in look_up_OID() 51 while (i < k) { in look_up_OID() 52 j = (i + k) / 2; in look_up_OID() 56 k = j; in look_up_OID() 67 k = j; in look_up_OID() 82 k = j; in look_up_OID()
|
D | bch.c | 385 int k, pp = -1; in compute_error_locator_polynomial() local 398 k = 2*i-pp; in compute_error_locator_polynomial() 405 elp->c[j+k] ^= a_pow(bch, tmp+l); in compute_error_locator_polynomial() 409 tmp = pelp->deg+k; in compute_error_locator_polynomial() 437 int rem, c, r, p, k, param[m]; in solve_linear_system() local 439 k = 0; in solve_linear_system() 445 p = c-k; in solve_linear_system() 467 param[k++] = c; in solve_linear_system() 472 if (k > 0) { in solve_linear_system() 473 p = k; in solve_linear_system() [all …]
|
D | kobject.c | 768 void kset_init(struct kset *k) in kset_init() argument 770 kobject_init_internal(&k->kobj); in kset_init() 771 INIT_LIST_HEAD(&k->list); in kset_init() 772 spin_lock_init(&k->list_lock); in kset_init() 810 int kset_register(struct kset *k) in kset_register() argument 814 if (!k) in kset_register() 817 kset_init(k); in kset_register() 818 err = kobject_add_internal(&k->kobj); in kset_register() 821 kobject_uevent(&k->kobj, KOBJ_ADD); in kset_register() 830 void kset_unregister(struct kset *k) in kset_unregister() argument [all …]
|
D | decompress_bunzip2.c | 161 i, j, k, t, runPos, symCount, symTotal, nSelectors, *byteCount; in get_next_block() local 198 k = get_bits(bd, 16); in get_next_block() 200 if (k&(1 << (15-j))) in get_next_block() 257 k = get_bits(bd, 2); in get_next_block() 258 if (k < 2) { in get_next_block() 264 t += (((k+1)&2)-1); in get_next_block() 488 k = j+byteCount[i]; in get_next_block() 490 j = k; in get_next_block()
|
D | Kconfig | 320 Constant value for Galois field order 'm'. If 'k' is the 322 that (k + m*t) <= 2**m - 1.
|
D | test_bpf.c | 96 __u32 k = ~0; in bpf_fill_maxinsns1() local 103 for (i = 0; i < len; i++, k--) in bpf_fill_maxinsns1() 104 insn[i] = __BPF_STMT(BPF_RET | BPF_K, k); in bpf_fill_maxinsns1() 145 __u32 k = prandom_u32_state(&rnd); in bpf_fill_maxinsns3() local 147 insn[i] = __BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, k); in bpf_fill_maxinsns3() 393 int i = 0, j, k = 0; in bpf_fill_ld_abs_vlan_push_pop() local 424 if (++k < 5) in bpf_fill_ld_abs_vlan_push_pop() 5285 if (fp[len].code != 0 || fp[len].k != 0) in filter_length()
|
D | Kconfig.debug | 413 This enables checks whether a k/v free operation frees an area 1787 This code (~1k) is freed after boot. By then, the firewire stack
|
/lib/raid6/ |
D | mktables.c | 58 int i, j, k; in main() local 73 for (k = 0; k < 8; k++) in main() 74 printf("0x%02x,%c", gfmul(i, j + k), in main() 75 (k == 7) ? '\n' : ' '); in main() 92 for (k = 0; k < 8; k++) in main() 93 printf("0x%02x,%c", gfmul(i, j + k), in main() 94 (k == 7) ? '\n' : ' '); in main() 98 for (k = 0; k < 8; k++) in main() 99 printf("0x%02x,%c", gfmul(i, (j + k) << 4), in main() 100 (k == 7) ? '\n' : ' '); in main()
|
/lib/reed_solomon/ |
D | decode_rs.c | 21 int i, j, r, k, pad; variable 191 for (i = 1, k = iprim - 1; i <= nn; i++, k = rs_modnn(rs, k + iprim)) { 203 loc[count] = k;
|
/lib/zlib_deflate/ |
D | deftree.c | 138 static void pqdownheap (deflate_state *s, ct_data *tree, int k); 374 int k /* node to move down */ in pqdownheap() argument 377 int v = s->heap[k]; in pqdownheap() 378 int j = k << 1; /* left son of k */ in pqdownheap() 389 s->heap[k] = s->heap[j]; k = j; in pqdownheap() 394 s->heap[k] = v; in pqdownheap()
|