/lib/842/ |
D | 842_compress.c | 109 #define get_input_data(p, o, b) \ argument 110 be##b##_to_cpu(get_unaligned((__be##b *)((p)->in + (o)))) 112 #define init_hashtable_nodes(p, b) do { \ argument 114 hash_init((p)->htable##b); \ 115 for (_i = 0; _i < ARRAY_SIZE((p)->node##b); _i++) { \ 116 (p)->node##b[_i].index = _i; \ 117 (p)->node##b[_i].data = 0; \ 118 INIT_HLIST_NODE(&(p)->node##b[_i].node); \ 122 #define find_index(p, b, n) ({ \ argument 124 p->index##b[n] = INDEX_NOT_FOUND; \ [all …]
|
D | 842_decompress.c | 65 static int next_bits(struct sw842_param *p, u64 *d, u8 n); 67 static int __split_next_bits(struct sw842_param *p, u64 *d, u8 n, u8 s) in __split_next_bits() argument 77 ret = next_bits(p, &tmp, n - s); in __split_next_bits() 80 ret = next_bits(p, d, s); in __split_next_bits() 87 static int next_bits(struct sw842_param *p, u64 *d, u8 n) in next_bits() argument 89 u8 *in = p->in, b = p->bit, bits = b + n; in next_bits() 100 return __split_next_bits(p, d, n, 32); in next_bits() 101 else if (p->ilen < 8 && bits > 32 && bits <= 56) in next_bits() 102 return __split_next_bits(p, d, n, 16); in next_bits() 103 else if (p->ilen < 4 && bits > 16 && bits <= 24) in next_bits() [all …]
|
/lib/ |
D | xxhash.c | 104 const uint8_t *p = (const uint8_t *)input; in xxh32() local 105 const uint8_t *b_end = p + len; in xxh32() 116 v1 = xxh32_round(v1, get_unaligned_le32(p)); in xxh32() 117 p += 4; in xxh32() 118 v2 = xxh32_round(v2, get_unaligned_le32(p)); in xxh32() 119 p += 4; in xxh32() 120 v3 = xxh32_round(v3, get_unaligned_le32(p)); in xxh32() 121 p += 4; in xxh32() 122 v4 = xxh32_round(v4, get_unaligned_le32(p)); in xxh32() 123 p += 4; in xxh32() [all …]
|
D | flex_proportions.c | 38 int fprop_global_init(struct fprop_global *p, gfp_t gfp) in fprop_global_init() argument 42 p->period = 0; in fprop_global_init() 44 err = percpu_counter_init(&p->events, 1, gfp); in fprop_global_init() 47 seqcount_init(&p->sequence); in fprop_global_init() 51 void fprop_global_destroy(struct fprop_global *p) in fprop_global_destroy() argument 53 percpu_counter_destroy(&p->events); in fprop_global_destroy() 64 bool fprop_new_period(struct fprop_global *p, int periods) in fprop_new_period() argument 70 events = percpu_counter_sum(&p->events); in fprop_new_period() 78 write_seqcount_begin(&p->sequence); in fprop_new_period() 82 percpu_counter_add(&p->events, -events); in fprop_new_period() [all …]
|
D | parser.c | 25 static int match_one(char *s, const char *p, substring_t args[]) in match_one() argument 30 if (!p) in match_one() 35 meta = strchr(p, '%'); in match_one() 37 return strcmp(p, s) == 0; in match_one() 39 if (strncmp(p, s, meta-p)) in match_one() 42 s += meta - p; in match_one() 43 p = meta + 1; in match_one() 45 if (isdigit(*p)) in match_one() 46 len = simple_strtoul(p, (char **) &p, 10); in match_one() 47 else if (*p == '%') { in match_one() [all …]
|
D | memcat_p.c | 12 void **p = a, **new; in __memcat_p() local 16 for (nr = 0, p = a; *p; nr++, p++) in __memcat_p() 18 for (p = b; *p; nr++, p++) in __memcat_p() 28 for (nr--; nr >= 0; nr--, p = p == b ? &a[nr] : p - 1) in __memcat_p() 29 new[nr] = *p; in __memcat_p()
|
D | test_string.c | 10 u16 v, *p; in memset16_selftest() local 12 p = kmalloc(256 * 2 * 2, GFP_KERNEL); in memset16_selftest() 13 if (!p) in memset16_selftest() 18 memset(p, 0xa1, 256 * 2 * sizeof(v)); in memset16_selftest() 19 memset16(p + i, 0xb1b2, j); in memset16_selftest() 21 v = p[k]; in memset16_selftest() 37 kfree(p); in memset16_selftest() 46 u32 v, *p; in memset32_selftest() local 48 p = kmalloc(256 * 2 * 4, GFP_KERNEL); in memset32_selftest() 49 if (!p) in memset32_selftest() [all …]
|
D | bootconfig.c | 37 static int __init xbc_parse_error(const char *msg, const char *p) in xbc_parse_error() argument 40 xbc_err_pos = (int)(p - xbc_data); in xbc_parse_error() 128 const char *p = xbc_node_get_data(node); in xbc_node_match_prefix() local 129 int len = strlen(p); in xbc_node_match_prefix() 131 if (strncmp(*prefix, p, len)) in xbc_node_match_prefix() 134 p = *prefix + len; in xbc_node_match_prefix() 135 if (*p == '.') in xbc_node_match_prefix() 136 p++; in xbc_node_match_prefix() 137 else if (*p != '\0') in xbc_node_match_prefix() 139 *prefix = p; in xbc_node_match_prefix() [all …]
|
D | earlycpio.c | 64 const char *p, *dptr, *nptr; in find_cpio_data() local 70 p = data; in find_cpio_data() 73 if (!*p) { in find_cpio_data() 75 p += 4; in find_cpio_data() 86 c = *p++; in find_cpio_data() 111 dptr = PTR_ALIGN(p + ch[C_NAMESIZE], 4); in find_cpio_data() 114 if (nptr > p + len || dptr < p || nptr < dptr) in find_cpio_data() 119 !memcmp(p, path, mypathsize)) { in find_cpio_data() 127 p, MAX_CPIO_FILE_NAME); in find_cpio_data() 129 strlcpy(cd.name, p + mypathsize, MAX_CPIO_FILE_NAME); in find_cpio_data() [all …]
|
D | string_helpers.c | 135 char *p = *dst, *q = *src; in unescape_space() local 139 *p = '\n'; in unescape_space() 142 *p = '\r'; in unescape_space() 145 *p = '\t'; in unescape_space() 148 *p = '\v'; in unescape_space() 151 *p = '\f'; in unescape_space() 163 char *p = *dst, *q = *src; in unescape_octal() local 174 *p = num; in unescape_octal() 182 char *p = *dst, *q = *src; in unescape_hex() local 198 *p = num; in unescape_hex() [all …]
|
D | crc32.c | 118 u8 *p = (u8 *)(b + 1) - 1; local 121 DO_CRC(*++p); /* use pre increment for speed */ 124 DO_CRC(*++p); /* use pre increment for speed */ 146 static inline u32 __pure crc32_le_generic(u32 crc, unsigned char const *p, argument 153 crc ^= *p++; 159 crc ^= *p++; 167 crc ^= *p++; 174 crc ^= *p++; 179 crc = crc32_body(crc, p, len, tab); 186 u32 __pure __weak crc32_le(u32 crc, unsigned char const *p, size_t len) argument [all …]
|
D | globtest.c | 133 char const *p = glob_tests; in glob_init() local 143 while (*p) { in glob_init() 144 bool expected = *p++ & 1; in glob_init() 145 char const *pat = p; in glob_init() 147 p += strlen(p) + 1; in glob_init() 148 successes += test(pat, p, expected); in glob_init() 149 p += strlen(p) + 1; in glob_init()
|
D | test_vmalloc.c | 155 void *p; in random_size_alloc_test() local 162 p = vmalloc(n * PAGE_SIZE); in random_size_alloc_test() 164 if (!p) in random_size_alloc_test() 167 *((__u8 *)p) = 1; in random_size_alloc_test() 168 vfree(p); in random_size_alloc_test() 333 struct test_kvfree_rcu *p; in kvfree_rcu_1_arg_vmalloc_test() local 337 p = vmalloc(1 * PAGE_SIZE); in kvfree_rcu_1_arg_vmalloc_test() 338 if (!p) in kvfree_rcu_1_arg_vmalloc_test() 341 p->array[0] = 'a'; in kvfree_rcu_1_arg_vmalloc_test() 342 kvfree_rcu(p); in kvfree_rcu_1_arg_vmalloc_test() [all …]
|
D | kasprintf.c | 18 char *p; in kvasprintf() local 25 p = kmalloc_track_caller(first+1, gfp); in kvasprintf() 26 if (!p) in kvasprintf() 29 second = vsnprintf(p, first+1, fmt, ap); in kvasprintf() 33 return p; in kvasprintf() 56 char *p; in kasprintf() local 59 p = kvasprintf(gfp, fmt, ap); in kasprintf() 62 return p; in kasprintf()
|
D | decompress_unlzma.c | 148 static inline uint32_t INIT rc_is_bit_0_helper(struct rc *rc, uint16_t *p) in rc_is_bit_0_helper() argument 151 rc->bound = *p * (rc->range >> RC_MODEL_TOTAL_BITS); in rc_is_bit_0_helper() 154 static inline int INIT rc_is_bit_0(struct rc *rc, uint16_t *p) in rc_is_bit_0() argument 156 uint32_t t = rc_is_bit_0_helper(rc, p); in rc_is_bit_0() 161 static inline void INIT rc_update_bit_0(struct rc *rc, uint16_t *p) in rc_update_bit_0() argument 164 *p += ((1 << RC_MODEL_TOTAL_BITS) - *p) >> RC_MOVE_BITS; in rc_update_bit_0() 166 static inline void INIT rc_update_bit_1(struct rc *rc, uint16_t *p) in rc_update_bit_1() argument 170 *p -= *p >> RC_MOVE_BITS; in rc_update_bit_1() 174 static int INIT rc_get_bit(struct rc *rc, uint16_t *p, int *symbol) in rc_get_bit() argument 176 if (rc_is_bit_0(rc, p)) { in rc_get_bit() [all …]
|
D | timerqueue.c | 29 struct rb_node **p = &head->rb_root.rb_root.rb_node; in timerqueue_add() local 37 while (*p) { in timerqueue_add() 38 parent = *p; in timerqueue_add() 41 p = &(*p)->rb_left; in timerqueue_add() 43 p = &(*p)->rb_right; in timerqueue_add() 47 rb_link_node(&node->node, parent, p); in timerqueue_add()
|
D | rhashtable.c | 41 return rht_head_hashfn(ht, tbl, he, ht->p); in head_hashfn() 395 if (size < ht->p.min_size) in rhashtable_shrink() 396 size = ht->p.min_size; in rhashtable_shrink() 421 else if (ht->p.automatic_shrinking && rht_shrink_below_30(ht, tbl)) in rht_deferred_worker() 507 (ht->p.obj_cmpfn ? in rhashtable_lookup_one() 508 ht->p.obj_cmpfn(&arg, rht_obj(ht, head)) : in rhashtable_lookup_one() 600 hash = rht_head_hashfn(ht, tbl, obj, ht->p); in rhashtable_try_insert() 668 iter->p = NULL; in rhashtable_walk_enter() 736 if (iter->p && !rhlist) { in rhashtable_walk_start_check() 741 struct rhash_head *p; in rhashtable_walk_start_check() local [all …]
|
D | vsprintf.c | 923 const struct dentry *p; in dentry_name() local 936 for (i = 0; i < depth; i++, d = p) { in dentry_name() 942 p = READ_ONCE(d->d_parent); in dentry_name() 944 if (p == d) { in dentry_name() 1101 char *p = sym, *pend = sym + sizeof(sym); in resource_string() local 1108 *p++ = '['; in resource_string() 1110 p = string_nocheck(p, pend, "io ", str_spec); in resource_string() 1113 p = string_nocheck(p, pend, "mem ", str_spec); in resource_string() 1116 p = string_nocheck(p, pend, "irq ", str_spec); in resource_string() 1119 p = string_nocheck(p, pend, "dma ", str_spec); in resource_string() [all …]
|
/lib/math/ |
D | prime_numbers.c | 91 unsigned long *p, in clear_multiples() argument 102 __clear_bit(m, p); in clear_multiples() 111 const struct primes *p; in expand_to_next_prime() local 134 p = rcu_dereference_protected(primes, lockdep_is_held(&lock)); in expand_to_next_prime() 135 if (x < p->last) { in expand_to_next_prime() 145 bitmap_copy(new->primes, p->primes, p->sz); in expand_to_next_prime() 147 new->last = clear_multiples(y, new->primes, p->sz, sz); in expand_to_next_prime() 153 if (p != &small_primes) in expand_to_next_prime() 154 kfree_rcu((struct primes *)p, rcu); in expand_to_next_prime() 163 const struct primes *p; in free_primes() local [all …]
|
/lib/raid6/ |
D | avx512.c | 47 u8 *p, *q; in raid6_avx5121_gen_syndrome() local 51 p = dptr[z0+1]; /* XOR parity */ in raid6_avx5121_gen_syndrome() 94 : "m" (p[d]), "m" (q[d])); in raid6_avx5121_gen_syndrome() 105 u8 *p, *q; in raid6_avx5121_xor_syndrome() local 109 p = dptr[disks-2]; /* XOR parity */ in raid6_avx5121_xor_syndrome() 122 : "m" (dptr[z0][d]), "m" (p[d])); in raid6_avx5121_xor_syndrome() 153 : "m" (q[d]), "m" (p[d])); in raid6_avx5121_xor_syndrome() 174 u8 *p, *q; in raid6_avx5122_gen_syndrome() local 178 p = dptr[z0+1]; /* XOR parity */ in raid6_avx5122_gen_syndrome() 225 : "m" (p[d]), "m" (p[d+64]), "m" (q[d]), in raid6_avx5122_gen_syndrome() [all …]
|
D | sse2.c | 39 u8 *p, *q; in raid6_sse21_gen_syndrome() local 43 p = dptr[z0+1]; /* XOR parity */ in raid6_sse21_gen_syndrome() 76 asm volatile("movntdq %%xmm2,%0" : "=m" (p[d])); in raid6_sse21_gen_syndrome() 91 u8 *p, *q; in raid6_sse21_xor_syndrome() local 95 p = dptr[disks-2]; /* XOR parity */ in raid6_sse21_xor_syndrome() 104 asm volatile("movdqa %0,%%xmm2" : : "m" (p[d])); in raid6_sse21_xor_syndrome() 128 asm volatile("movdqa %%xmm2,%0" : "=m" (p[d])); in raid6_sse21_xor_syndrome() 149 u8 *p, *q; in raid6_sse22_gen_syndrome() local 153 p = dptr[z0+1]; /* XOR parity */ in raid6_sse22_gen_syndrome() 188 asm volatile("movntdq %%xmm2,%0" : "=m" (p[d])); in raid6_sse22_gen_syndrome() [all …]
|
D | avx2.c | 37 u8 *p, *q; in raid6_avx21_gen_syndrome() local 41 p = dptr[z0+1]; /* XOR parity */ in raid6_avx21_gen_syndrome() 72 asm volatile("vmovntdq %%ymm2,%0" : "=m" (p[d])); in raid6_avx21_gen_syndrome() 86 u8 *p, *q; in raid6_avx21_xor_syndrome() local 90 p = dptr[disks-2]; /* XOR parity */ in raid6_avx21_xor_syndrome() 99 asm volatile("vmovdqa %0,%%ymm2" : : "m" (p[d])); in raid6_avx21_xor_syndrome() 123 asm volatile("vmovdqa %%ymm2,%0" : "=m" (p[d])); in raid6_avx21_xor_syndrome() 144 u8 *p, *q; in raid6_avx22_gen_syndrome() local 148 p = dptr[z0+1]; /* XOR parity */ in raid6_avx22_gen_syndrome() 182 asm volatile("vmovntdq %%ymm2,%0" : "=m" (p[d])); in raid6_avx22_gen_syndrome() [all …]
|
D | recov_avx2.c | 19 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx2() local 24 p = (u8 *)ptrs[disks-2]; in raid6_2data_recov_avx2() 42 ptrs[disks-2] = p; in raid6_2data_recov_avx2() 59 asm volatile("vmovdqa %0, %%ymm0" : : "m" (p[0])); in raid6_2data_recov_avx2() 60 asm volatile("vmovdqa %0, %%ymm8" : : "m" (p[32])); in raid6_2data_recov_avx2() 129 p += 64; in raid6_2data_recov_avx2() 135 asm volatile("vmovdqa %0, %%ymm0" : : "m" (*p)); in raid6_2data_recov_avx2() 176 p += 32; in raid6_2data_recov_avx2() 189 u8 *p, *q, *dq; in raid6_datap_recov_avx2() local 193 p = (u8 *)ptrs[disks-2]; in raid6_datap_recov_avx2() [all …]
|
D | recov_avx512.c | 27 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx512() local 32 p = (u8 *)ptrs[disks-2]; in raid6_2data_recov_avx512() 53 ptrs[disks-2] = p; in raid6_2data_recov_avx512() 77 : "m" (q[0]), "m" (q[64]), "m" (p[0]), in raid6_2data_recov_avx512() 78 "m" (p[64]), "m" (dq[0]), "m" (dq[64]), in raid6_2data_recov_avx512() 156 p += 128; in raid6_2data_recov_avx512() 166 : "m" (*q), "m" (*p), "m"(*dq), "m" (*dp)); in raid6_2data_recov_avx512() 217 p += 64; in raid6_2data_recov_avx512() 230 u8 *p, *q, *dq; in raid6_datap_recov_avx512() local 234 p = (u8 *)ptrs[disks-2]; in raid6_datap_recov_avx512() [all …]
|
D | recov_ssse3.c | 19 u8 *p, *q, *dp, *dq; in raid6_2data_recov_ssse3() local 26 p = (u8 *)ptrs[disks-2]; in raid6_2data_recov_ssse3() 44 ptrs[disks-2] = p; in raid6_2data_recov_ssse3() 69 asm volatile("movdqa %0,%%xmm0" : : "m" (p[0])); in raid6_2data_recov_ssse3() 70 asm volatile("movdqa %0,%%xmm8" : : "m" (p[16])); in raid6_2data_recov_ssse3() 133 p += 32; in raid6_2data_recov_ssse3() 139 asm volatile("movdqa %0,%%xmm0" : : "m" (*p)); in raid6_2data_recov_ssse3() 180 p += 16; in raid6_2data_recov_ssse3() 194 u8 *p, *q, *dq; in raid6_datap_recov_ssse3() local 200 p = (u8 *)ptrs[disks-2]; in raid6_datap_recov_ssse3() [all …]
|