| /lib/842/ |
| D | 842_compress.c | 109 #define get_input_data(p, o, b) \ argument 110 be##b##_to_cpu(get_unaligned((__be##b *)((p)->in + (o)))) 112 #define init_hashtable_nodes(p, b) do { \ argument 114 hash_init((p)->htable##b); \ 115 for (_i = 0; _i < ARRAY_SIZE((p)->node##b); _i++) { \ 116 (p)->node##b[_i].index = _i; \ 117 (p)->node##b[_i].data = 0; \ 118 INIT_HLIST_NODE(&(p)->node##b[_i].node); \ 122 #define find_index(p, b, n) ({ \ argument 124 p->index##b[n] = INDEX_NOT_FOUND; \ [all …]
|
| D | 842_decompress.c | 65 static int next_bits(struct sw842_param *p, u64 *d, u8 n); 67 static int __split_next_bits(struct sw842_param *p, u64 *d, u8 n, u8 s) in __split_next_bits() argument 77 ret = next_bits(p, &tmp, n - s); in __split_next_bits() 80 ret = next_bits(p, d, s); in __split_next_bits() 87 static int next_bits(struct sw842_param *p, u64 *d, u8 n) in next_bits() argument 89 u8 *in = p->in, b = p->bit, bits = b + n; in next_bits() 100 return __split_next_bits(p, d, n, 32); in next_bits() 101 else if (p->ilen < 8 && bits > 32 && bits <= 56) in next_bits() 102 return __split_next_bits(p, d, n, 16); in next_bits() 103 else if (p->ilen < 4 && bits > 16 && bits <= 24) in next_bits() [all …]
|
| /lib/ |
| D | xxhash.c | 104 const uint8_t *p = (const uint8_t *)input; in xxh32() local 105 const uint8_t *b_end = p + len; in xxh32() 116 v1 = xxh32_round(v1, get_unaligned_le32(p)); in xxh32() 117 p += 4; in xxh32() 118 v2 = xxh32_round(v2, get_unaligned_le32(p)); in xxh32() 119 p += 4; in xxh32() 120 v3 = xxh32_round(v3, get_unaligned_le32(p)); in xxh32() 121 p += 4; in xxh32() 122 v4 = xxh32_round(v4, get_unaligned_le32(p)); in xxh32() 123 p += 4; in xxh32() [all …]
|
| D | parser.c | 34 static int match_one(char *s, const char *p, substring_t args[]) in match_one() argument 39 if (!p) in match_one() 44 meta = strchr(p, '%'); in match_one() 46 return strcmp(p, s) == 0; in match_one() 48 if (strncmp(p, s, meta-p)) in match_one() 51 s += meta - p; in match_one() 52 p = meta + 1; in match_one() 54 if (isdigit(*p)) in match_one() 55 len = simple_strtoul(p, (char **) &p, 10); in match_one() 56 else if (*p == '%') { in match_one() [all …]
|
| D | flex_proportions.c | 38 int fprop_global_init(struct fprop_global *p, gfp_t gfp) in fprop_global_init() argument 42 p->period = 0; in fprop_global_init() 44 err = percpu_counter_init(&p->events, 1, gfp); in fprop_global_init() 47 seqcount_init(&p->sequence); in fprop_global_init() 51 void fprop_global_destroy(struct fprop_global *p) in fprop_global_destroy() argument 53 percpu_counter_destroy(&p->events); in fprop_global_destroy() 64 bool fprop_new_period(struct fprop_global *p, int periods) in fprop_new_period() argument 66 s64 events = percpu_counter_sum(&p->events); in fprop_new_period() 74 write_seqcount_begin(&p->sequence); in fprop_new_period() 78 percpu_counter_add(&p->events, -events); in fprop_new_period() [all …]
|
| D | slub_kunit.c | 33 u8 *p = kmem_cache_alloc(s, GFP_KERNEL); in test_clobber_zone() local 36 p[64] = 0x12; in test_clobber_zone() 42 kmem_cache_free(s, p); in test_clobber_zone() 51 u8 *p = kmem_cache_alloc(s, GFP_KERNEL); in test_next_pointer() local 55 kmem_cache_free(s, p); in test_next_pointer() 57 ptr_addr = (unsigned long *)(p + s->offset); in test_next_pointer() 59 p[s->offset] = ~p[s->offset]; in test_next_pointer() 96 u8 *p = kmem_cache_alloc(s, GFP_KERNEL); in test_first_word() local 98 kmem_cache_free(s, p); in test_first_word() 99 *p = 0x78; in test_first_word() [all …]
|
| D | memcat_p.c | 12 void **p = a, **new; in __memcat_p() local 16 for (nr = 0, p = a; *p; nr++, p++) in __memcat_p() 18 for (p = b; *p; nr++, p++) in __memcat_p() 28 for (nr--; nr >= 0; nr--, p = p == b ? &a[nr] : p - 1) in __memcat_p() 29 new[nr] = *p; in __memcat_p()
|
| D | bootconfig.c | 103 static int __init xbc_parse_error(const char *msg, const char *p) in xbc_parse_error() argument 106 xbc_err_pos = (int)(p - xbc_data); in xbc_parse_error() 194 const char *p = xbc_node_get_data(node); in xbc_node_match_prefix() local 195 int len = strlen(p); in xbc_node_match_prefix() 197 if (strncmp(*prefix, p, len)) in xbc_node_match_prefix() 200 p = *prefix + len; in xbc_node_match_prefix() 201 if (*p == '.') in xbc_node_match_prefix() 202 p++; in xbc_node_match_prefix() 203 else if (*p != '\0') in xbc_node_match_prefix() 205 *prefix = p; in xbc_node_match_prefix() [all …]
|
| D | earlycpio.c | 64 const char *p, *dptr, *nptr; in find_cpio_data() local 70 p = data; in find_cpio_data() 73 if (!*p) { in find_cpio_data() 75 p += 4; in find_cpio_data() 86 c = *p++; in find_cpio_data() 111 dptr = PTR_ALIGN(p + ch[C_NAMESIZE], 4); in find_cpio_data() 114 if (nptr > p + len || dptr < p || nptr < dptr) in find_cpio_data() 119 !memcmp(p, path, mypathsize)) { in find_cpio_data() 127 p, MAX_CPIO_FILE_NAME); in find_cpio_data() 129 strscpy(cd.name, p + mypathsize, MAX_CPIO_FILE_NAME); in find_cpio_data() [all …]
|
| D | crc32.c | 118 u8 *p = (u8 *)(b + 1) - 1; local 121 DO_CRC(*++p); /* use pre increment for speed */ 124 DO_CRC(*++p); /* use pre increment for speed */ 146 static inline u32 __pure crc32_le_generic(u32 crc, unsigned char const *p, argument 153 crc ^= *p++; 159 crc ^= *p++; 167 crc ^= *p++; 174 crc ^= *p++; 179 crc = crc32_body(crc, p, len, tab); 186 u32 __pure __weak crc32_le(u32 crc, unsigned char const *p, size_t len) argument [all …]
|
| D | globtest.c | 133 char const *p = glob_tests; in glob_init() local 143 while (*p) { in glob_init() 144 bool expected = *p++ & 1; in glob_init() 145 char const *pat = p; in glob_init() 147 p += strlen(p) + 1; in glob_init() 148 successes += test(pat, p, expected); in glob_init() 149 p += strlen(p) + 1; in glob_init()
|
| D | decompress_unlzma.c | 150 static inline uint32_t INIT rc_is_bit_0_helper(struct rc *rc, uint16_t *p) in rc_is_bit_0_helper() argument 153 rc->bound = *p * (rc->range >> RC_MODEL_TOTAL_BITS); in rc_is_bit_0_helper() 156 static inline int INIT rc_is_bit_0(struct rc *rc, uint16_t *p) in rc_is_bit_0() argument 158 uint32_t t = rc_is_bit_0_helper(rc, p); in rc_is_bit_0() 163 static inline void INIT rc_update_bit_0(struct rc *rc, uint16_t *p) in rc_update_bit_0() argument 166 *p += ((1 << RC_MODEL_TOTAL_BITS) - *p) >> RC_MOVE_BITS; in rc_update_bit_0() 168 static inline void INIT rc_update_bit_1(struct rc *rc, uint16_t *p) in rc_update_bit_1() argument 172 *p -= *p >> RC_MOVE_BITS; in rc_update_bit_1() 176 static int INIT rc_get_bit(struct rc *rc, uint16_t *p, int *symbol) in rc_get_bit() argument 178 if (rc_is_bit_0(rc, p)) { in rc_get_bit() [all …]
|
| D | kasprintf.c | 18 char *p; in kvasprintf() local 25 p = kmalloc_track_caller(first+1, gfp); in kvasprintf() 26 if (!p) in kvasprintf() 29 second = vsnprintf(p, first+1, fmt, ap); in kvasprintf() 33 return p; in kvasprintf() 56 char *p; in kasprintf() local 59 p = kvasprintf(gfp, fmt, ap); in kasprintf() 62 return p; in kasprintf()
|
| D | rhashtable.c | 41 return rht_head_hashfn(ht, tbl, he, ht->p); in head_hashfn() 402 if (size < ht->p.min_size) in rhashtable_shrink() 403 size = ht->p.min_size; in rhashtable_shrink() 428 else if (ht->p.automatic_shrinking && rht_shrink_below_30(ht, tbl)) in rht_deferred_worker() 514 (ht->p.obj_cmpfn ? in rhashtable_lookup_one() 515 ht->p.obj_cmpfn(&arg, rht_obj(ht, head)) : in rhashtable_lookup_one() 604 hash = rht_head_hashfn(ht, tbl, obj, ht->p); in rhashtable_try_insert() 680 iter->p = NULL; in rhashtable_walk_enter() 748 if (iter->p && !rhlist) { in rhashtable_walk_start_check() 753 struct rhash_head *p; in rhashtable_walk_start_check() local [all …]
|
| D | vsprintf.c | 904 const struct dentry *p; in dentry_name() local 917 for (i = 0; i < depth; i++, d = p) { in dentry_name() 923 p = READ_ONCE(d->d_parent); in dentry_name() 925 if (p == d) { in dentry_name() 1086 char *p = sym, *pend = sym + sizeof(sym); in resource_string() local 1093 *p++ = '['; in resource_string() 1095 p = string_nocheck(p, pend, "io ", str_spec); in resource_string() 1098 p = string_nocheck(p, pend, "mem ", str_spec); in resource_string() 1101 p = string_nocheck(p, pend, "irq ", str_spec); in resource_string() 1104 p = string_nocheck(p, pend, "dma ", str_spec); in resource_string() [all …]
|
| D | string_helpers.c | 187 char *p = *dst, *q = *src; in unescape_space() local 191 *p = '\n'; in unescape_space() 194 *p = '\r'; in unescape_space() 197 *p = '\t'; in unescape_space() 200 *p = '\v'; in unescape_space() 203 *p = '\f'; in unescape_space() 215 char *p = *dst, *q = *src; in unescape_octal() local 226 *p = num; in unescape_octal() 234 char *p = *dst, *q = *src; in unescape_hex() local 250 *p = num; in unescape_hex() [all …]
|
| /lib/math/ |
| D | prime_numbers.c | 89 unsigned long *p, in clear_multiples() argument 100 __clear_bit(m, p); in clear_multiples() 109 const struct primes *p; in expand_to_next_prime() local 132 p = rcu_dereference_protected(primes, lockdep_is_held(&lock)); in expand_to_next_prime() 133 if (x < p->last) { in expand_to_next_prime() 143 bitmap_copy(new->primes, p->primes, p->sz); in expand_to_next_prime() 145 new->last = clear_multiples(y, new->primes, p->sz, sz); in expand_to_next_prime() 151 if (p != &small_primes) in expand_to_next_prime() 152 kfree_rcu((struct primes *)p, rcu); in expand_to_next_prime() 161 const struct primes *p; in free_primes() local [all …]
|
| /lib/raid6/ |
| D | recov_loongarch_simd.c | 32 u8 *p, *q, *dp, *dq; in raid6_2data_recov_lsx() local 36 p = (u8 *)ptrs[disks - 2]; in raid6_2data_recov_lsx() 56 ptrs[disks - 2] = p; in raid6_2data_recov_lsx() 90 asm volatile("vld $vr0, %0" : : "m" (p[0])); in raid6_2data_recov_lsx() 91 asm volatile("vld $vr1, %0" : : "m" (p[16])); in raid6_2data_recov_lsx() 92 asm volatile("vld $vr2, %0" : : "m" (p[32])); in raid6_2data_recov_lsx() 93 asm volatile("vld $vr3, %0" : : "m" (p[48])); in raid6_2data_recov_lsx() 177 p += 64; in raid6_2data_recov_lsx() 189 u8 *p, *q, *dq; in raid6_datap_recov_lsx() local 192 p = (u8 *)ptrs[disks - 2]; in raid6_datap_recov_lsx() [all …]
|
| D | avx512.c | 47 u8 *p, *q; in raid6_avx5121_gen_syndrome() local 51 p = dptr[z0+1]; /* XOR parity */ in raid6_avx5121_gen_syndrome() 94 : "m" (p[d]), "m" (q[d])); in raid6_avx5121_gen_syndrome() 105 u8 *p, *q; in raid6_avx5121_xor_syndrome() local 109 p = dptr[disks-2]; /* XOR parity */ in raid6_avx5121_xor_syndrome() 122 : "m" (dptr[z0][d]), "m" (p[d])); in raid6_avx5121_xor_syndrome() 153 : "m" (q[d]), "m" (p[d])); in raid6_avx5121_xor_syndrome() 174 u8 *p, *q; in raid6_avx5122_gen_syndrome() local 178 p = dptr[z0+1]; /* XOR parity */ in raid6_avx5122_gen_syndrome() 225 : "m" (p[d]), "m" (p[d+64]), "m" (q[d]), in raid6_avx5122_gen_syndrome() [all …]
|
| D | avx2.c | 37 u8 *p, *q; in raid6_avx21_gen_syndrome() local 41 p = dptr[z0+1]; /* XOR parity */ in raid6_avx21_gen_syndrome() 72 asm volatile("vmovntdq %%ymm2,%0" : "=m" (p[d])); in raid6_avx21_gen_syndrome() 86 u8 *p, *q; in raid6_avx21_xor_syndrome() local 90 p = dptr[disks-2]; /* XOR parity */ in raid6_avx21_xor_syndrome() 99 asm volatile("vmovdqa %0,%%ymm2" : : "m" (p[d])); in raid6_avx21_xor_syndrome() 123 asm volatile("vmovdqa %%ymm2,%0" : "=m" (p[d])); in raid6_avx21_xor_syndrome() 144 u8 *p, *q; in raid6_avx22_gen_syndrome() local 148 p = dptr[z0+1]; /* XOR parity */ in raid6_avx22_gen_syndrome() 182 asm volatile("vmovntdq %%ymm2,%0" : "=m" (p[d])); in raid6_avx22_gen_syndrome() [all …]
|
| D | sse2.c | 39 u8 *p, *q; in raid6_sse21_gen_syndrome() local 43 p = dptr[z0+1]; /* XOR parity */ in raid6_sse21_gen_syndrome() 76 asm volatile("movntdq %%xmm2,%0" : "=m" (p[d])); in raid6_sse21_gen_syndrome() 91 u8 *p, *q; in raid6_sse21_xor_syndrome() local 95 p = dptr[disks-2]; /* XOR parity */ in raid6_sse21_xor_syndrome() 104 asm volatile("movdqa %0,%%xmm2" : : "m" (p[d])); in raid6_sse21_xor_syndrome() 128 asm volatile("movdqa %%xmm2,%0" : "=m" (p[d])); in raid6_sse21_xor_syndrome() 149 u8 *p, *q; in raid6_sse22_gen_syndrome() local 153 p = dptr[z0+1]; /* XOR parity */ in raid6_sse22_gen_syndrome() 188 asm volatile("movntdq %%xmm2,%0" : "=m" (p[d])); in raid6_sse22_gen_syndrome() [all …]
|
| D | recov_avx2.c | 19 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx2() local 24 p = (u8 *)ptrs[disks-2]; in raid6_2data_recov_avx2() 42 ptrs[disks-2] = p; in raid6_2data_recov_avx2() 59 asm volatile("vmovdqa %0, %%ymm0" : : "m" (p[0])); in raid6_2data_recov_avx2() 60 asm volatile("vmovdqa %0, %%ymm8" : : "m" (p[32])); in raid6_2data_recov_avx2() 129 p += 64; in raid6_2data_recov_avx2() 135 asm volatile("vmovdqa %0, %%ymm0" : : "m" (*p)); in raid6_2data_recov_avx2() 176 p += 32; in raid6_2data_recov_avx2() 189 u8 *p, *q, *dq; in raid6_datap_recov_avx2() local 193 p = (u8 *)ptrs[disks-2]; in raid6_datap_recov_avx2() [all …]
|
| D | recov_avx512.c | 27 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx512() local 32 p = (u8 *)ptrs[disks-2]; in raid6_2data_recov_avx512() 53 ptrs[disks-2] = p; in raid6_2data_recov_avx512() 77 : "m" (q[0]), "m" (q[64]), "m" (p[0]), in raid6_2data_recov_avx512() 78 "m" (p[64]), "m" (dq[0]), "m" (dq[64]), in raid6_2data_recov_avx512() 156 p += 128; in raid6_2data_recov_avx512() 166 : "m" (*q), "m" (*p), "m"(*dq), "m" (*dp)); in raid6_2data_recov_avx512() 217 p += 64; in raid6_2data_recov_avx512() 230 u8 *p, *q, *dq; in raid6_datap_recov_avx512() local 234 p = (u8 *)ptrs[disks-2]; in raid6_datap_recov_avx512() [all …]
|
| D | recov_ssse3.c | 19 u8 *p, *q, *dp, *dq; in raid6_2data_recov_ssse3() local 26 p = (u8 *)ptrs[disks-2]; in raid6_2data_recov_ssse3() 44 ptrs[disks-2] = p; in raid6_2data_recov_ssse3() 69 asm volatile("movdqa %0,%%xmm0" : : "m" (p[0])); in raid6_2data_recov_ssse3() 70 asm volatile("movdqa %0,%%xmm8" : : "m" (p[16])); in raid6_2data_recov_ssse3() 133 p += 32; in raid6_2data_recov_ssse3() 139 asm volatile("movdqa %0,%%xmm0" : : "m" (*p)); in raid6_2data_recov_ssse3() 180 p += 16; in raid6_2data_recov_ssse3() 194 u8 *p, *q, *dq; in raid6_datap_recov_ssse3() local 200 p = (u8 *)ptrs[disks-2]; in raid6_datap_recov_ssse3() [all …]
|
| D | loongarch_simd.c | 36 u8 *p, *q; in raid6_lsx_gen_syndrome() local 40 p = dptr[z0+1]; /* XOR parity */ in raid6_lsx_gen_syndrome() 100 asm volatile("vst $vr0, %0" : "=m"(p[d+NSIZE*0])); in raid6_lsx_gen_syndrome() 101 asm volatile("vst $vr1, %0" : "=m"(p[d+NSIZE*1])); in raid6_lsx_gen_syndrome() 102 asm volatile("vst $vr2, %0" : "=m"(p[d+NSIZE*2])); in raid6_lsx_gen_syndrome() 103 asm volatile("vst $vr3, %0" : "=m"(p[d+NSIZE*3])); in raid6_lsx_gen_syndrome() 118 u8 *p, *q; in raid6_lsx_xor_syndrome() local 122 p = dptr[disks-2]; /* XOR parity */ in raid6_lsx_xor_syndrome() 235 : "+m"(p[d+NSIZE*0]), "+m"(p[d+NSIZE*1]), in raid6_lsx_xor_syndrome() 236 "+m"(p[d+NSIZE*2]), "+m"(p[d+NSIZE*3]), in raid6_lsx_xor_syndrome() [all …]
|