/lib/zstd/ |
D | huf_compress.c | 155 U32 n; in HUF_writeCTable_wksp() local 177 for (n = 1; n < huffLog + 1; n++) in HUF_writeCTable_wksp() 178 bitsToWeight[n] = (BYTE)(huffLog + 1 - n); in HUF_writeCTable_wksp() 179 for (n = 0; n < maxSymbolValue; n++) in HUF_writeCTable_wksp() 180 huffWeight[n] = bitsToWeight[CTable[n].nbBits]; in HUF_writeCTable_wksp() 198 for (n = 0; n < maxSymbolValue; n += 2) in HUF_writeCTable_wksp() 199 op[(n / 2) + 1] = (BYTE)((huffWeight[n] << 4) + huffWeight[n + 1]); in HUF_writeCTable_wksp() 235 U32 n, nextRankStart = 0; in HUF_readCTable_wksp() local 236 for (n = 1; n <= tableLog; n++) { in HUF_readCTable_wksp() 238 nextRankStart += (rankVal[n] << (n - 1)); in HUF_readCTable_wksp() [all …]
|
D | entropy_common.c | 188 U32 n; in HUF_readStats_wksp() local 189 for (n = 0; n < oSize; n += 2) { in HUF_readStats_wksp() 190 huffWeight[n] = ip[n / 2] >> 4; in HUF_readStats_wksp() 191 huffWeight[n + 1] = ip[n / 2] & 15; in HUF_readStats_wksp() 206 U32 n; in HUF_readStats_wksp() local 207 for (n = 0; n < oSize; n++) { in HUF_readStats_wksp() 208 if (huffWeight[n] >= HUF_TABLELOG_MAX) in HUF_readStats_wksp() 210 rankStats[huffWeight[n]]++; in HUF_readStats_wksp() 211 weightTotal += (1 << huffWeight[n]) >> 1; in HUF_readStats_wksp()
|
/lib/ |
D | sort.c | 58 static void swap_words_32(void *a, void *b, size_t n) in swap_words_32() argument 61 u32 t = *(u32 *)(a + (n -= 4)); in swap_words_32() 62 *(u32 *)(a + n) = *(u32 *)(b + n); in swap_words_32() 63 *(u32 *)(b + n) = t; in swap_words_32() 64 } while (n); in swap_words_32() 83 static void swap_words_64(void *a, void *b, size_t n) in swap_words_64() argument 87 u64 t = *(u64 *)(a + (n -= 8)); in swap_words_64() 88 *(u64 *)(a + n) = *(u64 *)(b + n); in swap_words_64() 89 *(u64 *)(b + n) = t; in swap_words_64() 92 u32 t = *(u32 *)(a + (n -= 4)); in swap_words_64() [all …]
|
D | klist.c | 94 static void add_head(struct klist *k, struct klist_node *n) in add_head() argument 97 list_add(&n->n_node, &k->k_list); in add_head() 101 static void add_tail(struct klist *k, struct klist_node *n) in add_tail() argument 104 list_add_tail(&n->n_node, &k->k_list); in add_tail() 108 static void klist_node_init(struct klist *k, struct klist_node *n) in klist_node_init() argument 110 INIT_LIST_HEAD(&n->n_node); in klist_node_init() 111 kref_init(&n->n_ref); in klist_node_init() 112 knode_set_klist(n, k); in klist_node_init() 114 k->get(n); in klist_node_init() 122 void klist_add_head(struct klist_node *n, struct klist *k) in klist_add_head() argument [all …]
|
D | generic-radix-tree.c | 57 struct genradix_node *n = genradix_root_to_node(r); in __genradix_ptr() local 64 if (!n) in __genradix_ptr() 71 n = n->children[offset >> genradix_depth_shift(level)]; in __genradix_ptr() 75 return &n->data[offset]; in __genradix_ptr() 108 struct genradix_node *n, *new_node = NULL; in __genradix_ptr_alloc() local 115 n = genradix_root_to_node(r); in __genradix_ptr_alloc() 118 if (n && ilog2(offset) < genradix_depth_shift(level)) in __genradix_ptr_alloc() 127 new_node->children[0] = n; in __genradix_ptr_alloc() 129 ((unsigned long) new_node | (n ? level + 1 : 0))); in __genradix_ptr_alloc() 139 &n->children[offset >> genradix_depth_shift(level)]; in __genradix_ptr_alloc() [all …]
|
D | usercopy.c | 8 unsigned long _copy_from_user(void *to, const void __user *from, unsigned long n) in _copy_from_user() argument 10 unsigned long res = n; in _copy_from_user() 12 if (likely(access_ok(from, n))) { in _copy_from_user() 13 kasan_check_write(to, n); in _copy_from_user() 14 res = raw_copy_from_user(to, from, n); in _copy_from_user() 17 memset(to + (n - res), 0, res); in _copy_from_user() 24 unsigned long _copy_to_user(void __user *to, const void *from, unsigned long n) in _copy_to_user() argument 27 if (likely(access_ok(to, n))) { in _copy_to_user() 28 kasan_check_read(from, n); in _copy_to_user() 29 n = raw_copy_to_user(to, from, n); in _copy_to_user() [all …]
|
D | cpumask.c | 17 unsigned int cpumask_next(int n, const struct cpumask *srcp) in cpumask_next() argument 20 if (n != -1) in cpumask_next() 21 cpumask_check(n); in cpumask_next() 22 return find_next_bit(cpumask_bits(srcp), nr_cpumask_bits, n + 1); in cpumask_next() 34 int cpumask_next_and(int n, const struct cpumask *src1p, in cpumask_next_and() argument 38 if (n != -1) in cpumask_next_and() 39 cpumask_check(n); in cpumask_next_and() 41 nr_cpumask_bits, n + 1); in cpumask_next_and() 77 int cpumask_next_wrap(int n, const struct cpumask *mask, int start, bool wrap) in cpumask_next_wrap() argument 82 next = cpumask_next(n, mask); in cpumask_next_wrap() [all …]
|
D | iov_iter.c | 14 #define iterate_iovec(i, n, __v, __p, skip, STEP) { \ argument 16 size_t wanted = n; \ 18 __v.iov_len = min(n, __p->iov_len - skip); \ 24 n -= __v.iov_len; \ 28 while (unlikely(!left && n)) { \ 30 __v.iov_len = min(n, __p->iov_len); \ 37 n -= __v.iov_len; \ 39 n = wanted - n; \ 42 #define iterate_kvec(i, n, __v, __p, skip, STEP) { \ argument 43 size_t wanted = n; \ [all …]
|
D | oid_registry.c | 110 unsigned char n; in sprint_oid() local 117 n = *v++; in sprint_oid() 118 ret = count = snprintf(buffer, bufsize, "%u.%u", n / 40, n % 40); in sprint_oid() 126 n = *v++; in sprint_oid() 127 if (!(n & 0x80)) { in sprint_oid() 128 num = n; in sprint_oid() 130 num = n & 0x7f; in sprint_oid() 134 n = *v++; in sprint_oid() 136 num |= n & 0x7f; in sprint_oid() 137 } while (n & 0x80); in sprint_oid()
|
D | inflate.c | 143 ush n; /* literal, length base, or distance base */ member 234 #define NEEDBITS(n) {while(k<(n)){b|=((ulg)NEXTBYTE())<<k;k+=8;}} argument 235 #define DUMPBITS(n) {b>>=(n);k-=(n);} argument 324 unsigned n, /* number of codes (assumed <= N_MAX) */ in huft_build() argument 375 p = b; i = n; in huft_build() 377 Tracecv(*p, (stderr, (n-i >= ' ' && n-i <= '~' ? "%c %d\n" : "0x%x %d\n"), in huft_build() 378 n-i, *p)); in huft_build() 382 if (c[0] == n) /* null input--all zero length codes */ in huft_build() 438 } while (++i < n); in huft_build() 439 n = x[g]; /* set n to length of v */ in huft_build() [all …]
|
D | kfifo.c | 298 int n; in setup_sgl_buf() local 309 n = 0; in setup_sgl_buf() 323 if (++n == nents || sgl == NULL) in setup_sgl_buf() 324 return n; in setup_sgl_buf() 331 return n + 1; in setup_sgl_buf() 340 unsigned int n; in setup_sgl() local 350 n = setup_sgl_buf(sgl, fifo->data + off, nents, l); in setup_sgl() 351 n += setup_sgl_buf(sgl + n, fifo->data, nents - n, len - l); in setup_sgl() 353 return n; in setup_sgl() 421 static void __kfifo_poke_n(struct __kfifo *fifo, unsigned int n, size_t recsize) in __kfifo_poke_n() argument [all …]
|
D | globtest.c | 132 unsigned n = 0; in glob_init() local 150 n++; in glob_init() 153 n -= successes; in glob_init() 154 printk(message, successes, n); in glob_init() 157 return n ? -ECANCELED : 0; in glob_init()
|
D | fault-inject.c | 71 int n, nr_entries; in fail_stacktrace() local 78 for (n = 0; n < nr_entries; n++) { in fail_stacktrace() 79 if (attr->reject_start <= entries[n] && in fail_stacktrace() 80 entries[n] < attr->reject_end) in fail_stacktrace() 82 if (attr->require_start <= entries[n] && in fail_stacktrace() 83 entries[n] < attr->require_end) in fail_stacktrace()
|
D | asn1_decoder.c | 62 size_t dp = *_dp, len, n; in asn1_find_indefinite_length() local 107 n = len - 0x80; in asn1_find_indefinite_length() 108 if (unlikely(n > sizeof(len) - 1)) in asn1_find_indefinite_length() 110 if (unlikely(n > datalen - dp)) in asn1_find_indefinite_length() 113 for (; n > 0; n--) { in asn1_find_indefinite_length() 270 int n = len - 0x80; in asn1_ber_decoder() local 271 if (unlikely(n > 2)) in asn1_ber_decoder() 273 if (unlikely(n > datalen - dp)) in asn1_ber_decoder() 275 hdr += n; in asn1_ber_decoder() 276 for (len = 0; n > 0; n--) { in asn1_ber_decoder()
|
D | rbtree.c | 468 struct rb_node *n; in rb_first() local 470 n = root->rb_node; in rb_first() 471 if (!n) in rb_first() 473 while (n->rb_left) in rb_first() 474 n = n->rb_left; in rb_first() 475 return n; in rb_first() 481 struct rb_node *n; in rb_last() local 483 n = root->rb_node; in rb_last() 484 if (!n) in rb_last() 486 while (n->rb_right) in rb_last() [all …]
|
D | bch.c | 86 #define GF_N(_p) ((_p)->n) 268 const unsigned int n = GF_N(bch); in modulo() local 269 while (v >= n) { in modulo() 270 v -= n; in modulo() 271 v = (v & n) + (v >> GF_M(bch)); in modulo() 281 const unsigned int n = GF_N(bch); in mod_s() local 282 return (v < n) ? v : v-n; in mod_s() 390 const unsigned int n = GF_N(bch); in compute_error_locator_polynomial() local 411 tmp = a_log(bch, d)+n-a_log(bch, pd); in compute_error_locator_polynomial() 558 int n = 0; in find_poly_deg1_roots() local [all …]
|
D | plist.c | 33 struct list_head *n) in plist_check_prev_next() argument 35 WARN(n->prev != p || p->next != n, in plist_check_prev_next() 41 n, n->next, n->prev); in plist_check_prev_next()
|
/lib/zlib_deflate/ |
D | deftree.c | 228 int n; /* iterates over tree elements */ in tr_static_init() local 242 for (n = 0; n < (1<<extra_lbits[code]); n++) { in tr_static_init() 257 for (n = 0; n < (1<<extra_dbits[code]); n++) { in tr_static_init() 265 for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { in tr_static_init() 273 n = 0; in tr_static_init() 274 while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; in tr_static_init() 275 while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++; in tr_static_init() 276 while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++; in tr_static_init() 277 while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++; in tr_static_init() 285 for (n = 0; n < D_CODES; n++) { in tr_static_init() [all …]
|
/lib/mpi/ |
D | mpi-bit.c | 41 unsigned n; in mpi_get_nbits() local 48 n = count_leading_zeros(alimb); in mpi_get_nbits() 50 n = BITS_PER_MPI_LIMB; in mpi_get_nbits() 51 n = BITS_PER_MPI_LIMB - n + (a->nlimbs - 1) * BITS_PER_MPI_LIMB; in mpi_get_nbits() 53 n = 0; in mpi_get_nbits() 54 return n; in mpi_get_nbits()
|
D | mpi-internal.h | 56 #define MPN_COPY(d, s, n) \ argument 59 for (_i = 0; _i < (n); _i++) \ 63 #define MPN_COPY_DECR(d, s, n) \ argument 66 for (_i = (n)-1; _i >= 0; _i--) \ 71 #define MPN_ZERO(d, n) \ argument 74 for (_i = 0; _i < (n); _i++) \ 78 #define MPN_NORMALIZE(d, n) \ argument 80 while ((n) > 0) { \ 81 if ((d)[(n)-1]) \ 83 (n)--; \
|
D | mpicoder.c | 154 unsigned int n = mpi_get_size(a); in mpi_read_buffer() local 165 if (buf_len < n - lzeros) { in mpi_read_buffer() 166 *nbytes = n - lzeros; in mpi_read_buffer() 171 *nbytes = n - lzeros; in mpi_read_buffer() 206 unsigned int n; in mpi_get_buffer() local 212 n = mpi_get_size(a); in mpi_get_buffer() 214 if (!n) in mpi_get_buffer() 215 n++; in mpi_get_buffer() 217 buf = kmalloc(n, GFP_KERNEL); in mpi_get_buffer() 222 ret = mpi_read_buffer(a, buf, n, nbytes, sign); in mpi_get_buffer() [all …]
|
/lib/842/ |
D | 842_decompress.c | 65 static int next_bits(struct sw842_param *p, u64 *d, u8 n); 67 static int __split_next_bits(struct sw842_param *p, u64 *d, u8 n, u8 s) in __split_next_bits() argument 72 if (n <= s) { in __split_next_bits() 73 pr_debug("split_next_bits invalid n %u s %u\n", n, s); in __split_next_bits() 77 ret = next_bits(p, &tmp, n - s); in __split_next_bits() 87 static int next_bits(struct sw842_param *p, u64 *d, u8 n) in next_bits() argument 89 u8 *in = p->in, b = p->bit, bits = b + n; in next_bits() 91 if (n > 64) { in next_bits() 92 pr_debug("next_bits invalid n %u\n", n); in next_bits() 100 return __split_next_bits(p, d, n, 32); in next_bits() [all …]
|
D | 842_compress.c | 122 #define find_index(p, b, n) ({ \ argument 124 p->index##b[n] = INDEX_NOT_FOUND; \ 125 hash_for_each_possible(p->htable##b, _n, node, p->data##b[n]) { \ 126 if (p->data##b[n] == _n->data) { \ 127 p->index##b[n] = _n->index; \ 131 p->index##b[n] >= 0; \ 134 #define check_index(p, b, n) \ argument 135 ((p)->index##b[n] == INDEX_NOT_CHECKED \ 136 ? find_index(p, b, n) \ 137 : (p)->index##b[n] >= 0) [all …]
|
/lib/math/ |
D | div64.c | 29 uint32_t __attribute__((weak)) __div64_32(uint64_t *n, uint32_t base) in __div64_32() argument 31 uint64_t rem = *n; in __div64_32() 58 *n = res; in __div64_32() 112 int n = fls(high); in div64_u64_rem() local 113 quot = div_u64(dividend >> n, divisor >> n); in div64_u64_rem() 150 int n = fls(high); in div64_u64() local 151 quot = div_u64(dividend >> n, divisor >> n); in div64_u64()
|
D | rational.c | 36 unsigned long n, d, n0, d0, n1, d1; in rational_best_approximation() local 37 n = given_numerator; in rational_best_approximation() 51 a = n / d; in rational_best_approximation() 52 d = n % d; in rational_best_approximation() 53 n = t; in rational_best_approximation()
|