/lib/ |
D | klist.c | 94 static void add_head(struct klist *k, struct klist_node *n) in add_head() argument 97 list_add(&n->n_node, &k->k_list); in add_head() 101 static void add_tail(struct klist *k, struct klist_node *n) in add_tail() argument 104 list_add_tail(&n->n_node, &k->k_list); in add_tail() 108 static void klist_node_init(struct klist *k, struct klist_node *n) in klist_node_init() argument 110 INIT_LIST_HEAD(&n->n_node); in klist_node_init() 111 kref_init(&n->n_ref); in klist_node_init() 112 knode_set_klist(n, k); in klist_node_init() 114 k->get(n); in klist_node_init() 122 void klist_add_head(struct klist_node *n, struct klist *k) in klist_add_head() argument [all …]
|
D | sort.c | 58 static void swap_words_32(void *a, void *b, size_t n) in swap_words_32() argument 61 u32 t = *(u32 *)(a + (n -= 4)); in swap_words_32() 62 *(u32 *)(a + n) = *(u32 *)(b + n); in swap_words_32() 63 *(u32 *)(b + n) = t; in swap_words_32() 64 } while (n); in swap_words_32() 83 static void swap_words_64(void *a, void *b, size_t n) in swap_words_64() argument 87 u64 t = *(u64 *)(a + (n -= 8)); in swap_words_64() 88 *(u64 *)(a + n) = *(u64 *)(b + n); in swap_words_64() 89 *(u64 *)(b + n) = t; in swap_words_64() 92 u32 t = *(u32 *)(a + (n -= 4)); in swap_words_64() [all …]
|
D | generic-radix-tree.c | 57 struct genradix_node *n = genradix_root_to_node(r); in __genradix_ptr() local 64 if (!n) in __genradix_ptr() 71 n = n->children[offset >> genradix_depth_shift(level)]; in __genradix_ptr() 75 return &n->data[offset]; in __genradix_ptr() 108 struct genradix_node *n, *new_node = NULL; in __genradix_ptr_alloc() local 115 n = genradix_root_to_node(r); in __genradix_ptr_alloc() 118 if (n && ilog2(offset) < genradix_depth_shift(level)) in __genradix_ptr_alloc() 127 new_node->children[0] = n; in __genradix_ptr_alloc() 129 ((unsigned long) new_node | (n ? level + 1 : 0))); in __genradix_ptr_alloc() 139 &n->children[offset >> genradix_depth_shift(level)]; in __genradix_ptr_alloc() [all …]
|
D | usercopy.c | 11 unsigned long _copy_from_user(void *to, const void __user *from, unsigned long n) in _copy_from_user() argument 13 unsigned long res = n; in _copy_from_user() 15 if (!should_fail_usercopy() && likely(access_ok(from, n))) { in _copy_from_user() 22 instrument_copy_from_user_before(to, from, n); in _copy_from_user() 23 res = raw_copy_from_user(to, from, n); in _copy_from_user() 24 instrument_copy_from_user_after(to, from, n, res); in _copy_from_user() 27 memset(to + (n - res), 0, res); in _copy_from_user() 34 unsigned long _copy_to_user(void __user *to, const void *from, unsigned long n) in _copy_to_user() argument 38 return n; in _copy_to_user() 39 if (likely(access_ok(to, n))) { in _copy_to_user() [all …]
|
D | group_cpus.c | 88 int n, nodes = 0; in get_nodes_in_cpumask() local 91 for_each_node(n) { in get_nodes_in_cpumask() 92 if (cpumask_intersects(mask, node_to_cpumask[n])) { in get_nodes_in_cpumask() 93 node_set(n, *nodemsk); in get_nodes_in_cpumask() 137 unsigned n, remaining_ncpus = 0; in alloc_nodes_groups() local 139 for (n = 0; n < nr_node_ids; n++) { in alloc_nodes_groups() 140 node_groups[n].id = n; in alloc_nodes_groups() 141 node_groups[n].ncpus = UINT_MAX; in alloc_nodes_groups() 144 for_each_node_mask(n, nodemsk) { in alloc_nodes_groups() 147 cpumask_and(nmsk, cpu_mask, node_to_cpumask[n]); in alloc_nodes_groups() [all …]
|
D | dhry_run.c | 35 int i, n; in dhry_benchmark() local 38 n = dhry(iterations); in dhry_benchmark() 43 n = dhry(i); in dhry_benchmark() 44 if (n != -EAGAIN) in dhry_benchmark() 50 if (n >= 0) in dhry_benchmark() 52 n, n / DHRY_VAX); in dhry_benchmark() 53 else if (n == -EAGAIN) in dhry_benchmark() 56 pr_err("Dhrystone benchmark failed error %pe\n", ERR_PTR(n)); in dhry_benchmark()
|
D | iov_iter.c | 18 #define iterate_buf(i, n, base, len, off, __p, STEP) { \ argument 20 len = n; \ 24 n = len; \ 28 #define iterate_iovec(i, n, base, len, off, __p, STEP) { \ argument 32 len = min(n, __p->iov_len - skip); \ 38 n -= len; \ 44 } while (n); \ 46 n = off; \ 49 #define iterate_bvec(i, n, base, len, off, p, STEP) { \ argument 52 while (n) { \ [all …]
|
D | inflate.c | 143 ush n; /* literal, length base, or distance base */ member 234 #define NEEDBITS(n) {while(k<(n)){b|=((ulg)NEXTBYTE())<<k;k+=8;}} argument 235 #define DUMPBITS(n) {b>>=(n);k-=(n);} argument 324 unsigned n, /* number of codes (assumed <= N_MAX) */ in huft_build() argument 375 p = b; i = n; in huft_build() 377 Tracecv(*p, (stderr, (n-i >= ' ' && n-i <= '~' ? "%c %d\n" : "0x%x %d\n"), in huft_build() 378 n-i, *p)); in huft_build() 382 if (c[0] == n) /* null input--all zero length codes */ in huft_build() 438 } while (++i < n); in huft_build() 439 n = x[g]; /* set n to length of v */ in huft_build() [all …]
|
D | oid_registry.c | 134 unsigned char n; in sprint_oid() local 141 n = *v++; in sprint_oid() 142 ret = count = snprintf(buffer, bufsize, "%u.%u", n / 40, n % 40); in sprint_oid() 149 n = *v++; in sprint_oid() 150 if (!(n & 0x80)) { in sprint_oid() 151 num = n; in sprint_oid() 153 num = n & 0x7f; in sprint_oid() 157 n = *v++; in sprint_oid() 159 num |= n & 0x7f; in sprint_oid() 160 } while (n & 0x80); in sprint_oid()
|
D | kfifo.c | 298 int n; in setup_sgl_buf() local 309 n = 0; in setup_sgl_buf() 323 if (++n == nents || sgl == NULL) in setup_sgl_buf() 324 return n; in setup_sgl_buf() 331 return n + 1; in setup_sgl_buf() 340 unsigned int n; in setup_sgl() local 350 n = setup_sgl_buf(sgl, fifo->data + off, nents, l); in setup_sgl() 351 n += setup_sgl_buf(sgl + n, fifo->data, nents - n, len - l); in setup_sgl() 353 return n; in setup_sgl() 421 static void __kfifo_poke_n(struct __kfifo *fifo, unsigned int n, size_t recsize) in __kfifo_poke_n() argument [all …]
|
D | bootconfig.c | 748 static int __init xbc_parse_key(char **k, char *n) in xbc_parse_key() argument 760 *k = n; in xbc_parse_key() 765 static int __init xbc_open_brace(char **k, char *n) in xbc_open_brace() argument 772 *k = n; in xbc_open_brace() 774 return __xbc_open_brace(n - 1); in xbc_open_brace() 777 static int __init xbc_close_brace(char **k, char *n) in xbc_close_brace() argument 781 ret = xbc_parse_key(k, n); in xbc_close_brace() 786 return __xbc_close_brace(n - 1); in xbc_close_brace() 792 struct xbc_node *n, *m; in xbc_verify_tree() local 796 n = &xbc_nodes[open_brace[brace_index]]; in xbc_verify_tree() [all …]
|
D | globtest.c | 132 unsigned n = 0; in glob_init() local 150 n++; in glob_init() 153 n -= successes; in glob_init() 154 printk(message, successes, n); in glob_init() 157 return n ? -ECANCELED : 0; in glob_init()
|
/lib/zlib_deflate/ |
D | deftree.c | 174 int n; /* iterates over tree elements */ in tr_static_init() local 188 for (n = 0; n < (1<<extra_lbits[code]); n++) { in tr_static_init() 203 for (n = 0; n < (1<<extra_dbits[code]); n++) { in tr_static_init() 211 for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { in tr_static_init() 219 n = 0; in tr_static_init() 220 while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; in tr_static_init() 221 while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++; in tr_static_init() 222 while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++; in tr_static_init() 223 while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++; in tr_static_init() 231 for (n = 0; n < D_CODES; n++) { in tr_static_init() [all …]
|
/lib/crypto/mpi/ |
D | mpi-bit.c | 42 unsigned n; in mpi_get_nbits() local 49 n = count_leading_zeros(alimb); in mpi_get_nbits() 51 n = BITS_PER_MPI_LIMB; in mpi_get_nbits() 52 n = BITS_PER_MPI_LIMB - n + (a->nlimbs - 1) * BITS_PER_MPI_LIMB; in mpi_get_nbits() 54 n = 0; in mpi_get_nbits() 55 return n; in mpi_get_nbits() 62 int mpi_test_bit(MPI a, unsigned int n) in mpi_test_bit() argument 67 limbno = n / BITS_PER_MPI_LIMB; in mpi_test_bit() 68 bitno = n % BITS_PER_MPI_LIMB; in mpi_test_bit() 80 void mpi_set_bit(MPI a, unsigned int n) in mpi_set_bit() argument [all …]
|
D | mpicoder.c | 261 unsigned int n = mpi_get_size(a); in mpi_read_buffer() local 272 if (buf_len < n - lzeros) { in mpi_read_buffer() 273 *nbytes = n - lzeros; in mpi_read_buffer() 278 *nbytes = n - lzeros; in mpi_read_buffer() 313 unsigned int n; in mpi_get_buffer() local 319 n = mpi_get_size(a); in mpi_get_buffer() 321 if (!n) in mpi_get_buffer() 322 n++; in mpi_get_buffer() 324 buf = kmalloc(n, GFP_KERNEL); in mpi_get_buffer() 329 ret = mpi_read_buffer(a, buf, n, nbytes, sign); in mpi_get_buffer() [all …]
|
D | mpi-internal.h | 62 #define MPN_COPY(d, s, n) \ argument 65 for (_i = 0; _i < (n); _i++) \ 69 #define MPN_COPY_INCR(d, s, n) \ argument 72 for (_i = 0; _i < (n); _i++) \ 77 #define MPN_COPY_DECR(d, s, n) \ argument 80 for (_i = (n)-1; _i >= 0; _i--) \ 85 #define MPN_ZERO(d, n) \ argument 88 for (_i = 0; _i < (n); _i++) \ 92 #define MPN_NORMALIZE(d, n) \ argument 94 while ((n) > 0) { \ [all …]
|
/lib/zstd/compress/ |
D | huf_compress.c | 175 U32 n; in HUF_writeCTable_wksp() local 184 for (n=1; n<huffLog+1; n++) in HUF_writeCTable_wksp() 185 wksp->bitsToWeight[n] = (BYTE)(huffLog + 1 - n); in HUF_writeCTable_wksp() 186 for (n=0; n<maxSymbolValue; n++) in HUF_writeCTable_wksp() 187 wksp->huffWeight[n] = wksp->bitsToWeight[HUF_getNbBits(ct[n])]; in HUF_writeCTable_wksp() 202 for (n=0; n<maxSymbolValue; n+=2) in HUF_writeCTable_wksp() 203 op[(n/2)+1] = (BYTE)((wksp->huffWeight[n] << 4) + wksp->huffWeight[n+1]); in HUF_writeCTable_wksp() 237 { U32 n, nextRankStart = 0; in HUF_readCTable() local 238 for (n=1; n<=tableLog; n++) { in HUF_readCTable() 240 nextRankStart += (rankVal[n] << (n-1)); in HUF_readCTable() [all …]
|
D | zstd_ldm.c | 69 size_t n = 0; in ZSTD_ldm_gear_reset() local 72 hash = (hash << 1) + ZSTD_ldm_gearTab[data[n] & 0xff]; \ in ZSTD_ldm_gear_reset() 73 n += 1; \ in ZSTD_ldm_gear_reset() 75 while (n + 3 < minMatchLength) { in ZSTD_ldm_gear_reset() 81 while (n < minMatchLength) { in ZSTD_ldm_gear_reset() 100 size_t n; in ZSTD_ldm_gear_feed() local 105 n = 0; in ZSTD_ldm_gear_feed() 108 hash = (hash << 1) + ZSTD_ldm_gearTab[data[n] & 0xff]; \ in ZSTD_ldm_gear_feed() 109 n += 1; \ in ZSTD_ldm_gear_feed() 111 splits[*numSplits] = n; \ in ZSTD_ldm_gear_feed() [all …]
|
/lib/zstd/common/ |
D | zstd_deps.h | 32 #define ZSTD_memcpy(d,s,n) __builtin_memcpy((d),(s),(n)) argument 33 #define ZSTD_memmove(d,s,n) __builtin_memmove((d),(s),(n)) argument 34 #define ZSTD_memset(d,s,n) __builtin_memset((d),(s),(n)) argument 52 #define ZSTD_calloc(n,s) ({ (void)(n); (void)(s); NULL; }) argument
|
D | entropy_common.c | 281 { U32 n; in HUF_readStats_body() local 282 for (n=0; n<oSize; n+=2) { in HUF_readStats_body() 283 huffWeight[n] = ip[n/2] >> 4; in HUF_readStats_body() 284 huffWeight[n+1] = ip[n/2] & 15; in HUF_readStats_body() 296 { U32 n; for (n=0; n<oSize; n++) { in HUF_readStats_body() local 297 if (huffWeight[n] > HUF_TABLELOG_MAX) return ERROR(corruption_detected); in HUF_readStats_body() 298 rankStats[huffWeight[n]]++; in HUF_readStats_body() 299 weightTotal += (1 << huffWeight[n]) >> 1; in HUF_readStats_body()
|
/lib/842/ |
D | 842_decompress.c | 65 static int next_bits(struct sw842_param *p, u64 *d, u8 n); 67 static int __split_next_bits(struct sw842_param *p, u64 *d, u8 n, u8 s) in __split_next_bits() argument 72 if (n <= s) { in __split_next_bits() 73 pr_debug("split_next_bits invalid n %u s %u\n", n, s); in __split_next_bits() 77 ret = next_bits(p, &tmp, n - s); in __split_next_bits() 87 static int next_bits(struct sw842_param *p, u64 *d, u8 n) in next_bits() argument 89 u8 *in = p->in, b = p->bit, bits = b + n; in next_bits() 91 if (n > 64) { in next_bits() 92 pr_debug("next_bits invalid n %u\n", n); in next_bits() 100 return __split_next_bits(p, d, n, 32); in next_bits() [all …]
|
D | 842_compress.c | 122 #define find_index(p, b, n) ({ \ argument 124 p->index##b[n] = INDEX_NOT_FOUND; \ 125 hash_for_each_possible(p->htable##b, _n, node, p->data##b[n]) { \ 126 if (p->data##b[n] == _n->data) { \ 127 p->index##b[n] = _n->index; \ 131 p->index##b[n] >= 0; \ 134 #define check_index(p, b, n) \ argument 135 ((p)->index##b[n] == INDEX_NOT_CHECKED \ 136 ? find_index(p, b, n) \ 137 : (p)->index##b[n] >= 0) [all …]
|
/lib/math/ |
D | div64.c | 31 uint32_t __attribute__((weak)) __div64_32(uint64_t *n, uint32_t base) in __div64_32() argument 33 uint64_t rem = *n; in __div64_32() 60 *n = res; in __div64_32() 108 int n = fls(high); in div64_u64_rem() local 109 quot = div_u64(dividend >> n, divisor >> n); in div64_u64_rem() 146 int n = fls(high); in div64_u64() local 147 quot = div_u64(dividend >> n, divisor >> n); in div64_u64()
|
D | rational.c | 51 unsigned long n, d, n0, d0, n1, d1, n2, d2; in rational_best_approximation() local 52 n = given_numerator; in rational_best_approximation() 66 a = n / d; in rational_best_approximation() 67 d = n % d; in rational_best_approximation() 68 n = dp; in rational_best_approximation()
|
/lib/zlib_dfltcc/ |
D | dfltcc_util.h | 75 int n in is_bit_set() argument 78 return bits[n / 8] & (1 << (7 - (n % 8))); in is_bit_set() 83 int n in turn_bit_off() argument 86 bits[n / 8] &= ~(1 << (7 - (n % 8))); in turn_bit_off()
|