/lib/zlib_deflate/ |
D | deflate.c | 69 typedef block_state (*compress_func) (deflate_state *s, int flush); 72 static void fill_window (deflate_state *s); 73 static block_state deflate_stored (deflate_state *s, int flush); 74 static block_state deflate_fast (deflate_state *s, int flush); 75 static block_state deflate_slow (deflate_state *s, int flush); 76 static void lm_init (deflate_state *s); 77 static void putShortMSB (deflate_state *s, uInt b); 79 static uInt longest_match (deflate_state *s, IPos cur_match); 82 static void check_match (deflate_state *s, IPos start, IPos match, 163 #define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask) argument [all …]
|
D | deftree.c | 132 static void init_block (deflate_state *s); 133 static void pqdownheap (deflate_state *s, ct_data *tree, int k); 134 static void gen_bitlen (deflate_state *s, tree_desc *desc); 136 static void build_tree (deflate_state *s, tree_desc *desc); 137 static void scan_tree (deflate_state *s, ct_data *tree, int max_code); 138 static void send_tree (deflate_state *s, ct_data *tree, int max_code); 139 static int build_bl_tree (deflate_state *s); 140 static void send_all_trees (deflate_state *s, int lcodes, int dcodes, 142 static void compress_block (deflate_state *s, ct_data *ltree, 144 static void set_data_type (deflate_state *s); [all …]
|
D | defutil.h | 260 #define put_byte(s, c) {s->pending_buf[s->pending++] = (c);} argument 268 #define MAX_DIST(s) ((s)->w_size-MIN_LOOKAHEAD) argument 274 void zlib_tr_init (deflate_state *s); 275 int zlib_tr_tally (deflate_state *s, unsigned dist, unsigned lc); 276 ulg zlib_tr_flush_block (deflate_state *s, char *buf, ulg stored_len, 278 void zlib_tr_align (deflate_state *s); 279 void zlib_tr_stored_block (deflate_state *s, char *buf, ulg stored_len, 288 #define put_short(s, w) { \ argument 289 put_byte(s, (uch)((w) & 0xff)); \ 290 put_byte(s, (uch)((ush)(w) >> 8)); \ [all …]
|
/lib/xz/ |
D | xz_dec_stream.c | 157 static bool fill_temp(struct xz_dec *s, struct xz_buf *b) in fill_temp() argument 160 b->in_size - b->in_pos, s->temp.size - s->temp.pos); in fill_temp() 162 memcpy(s->temp.buf + s->temp.pos, b->in + b->in_pos, copy_size); in fill_temp() 164 s->temp.pos += copy_size; in fill_temp() 166 if (s->temp.pos == s->temp.size) { in fill_temp() 167 s->temp.pos = 0; in fill_temp() 175 static enum xz_ret dec_vli(struct xz_dec *s, const uint8_t *in, in dec_vli() argument 180 if (s->pos == 0) in dec_vli() 181 s->vli = 0; in dec_vli() 187 s->vli |= (vli_type)(byte & 0x7F) << s->pos; in dec_vli() [all …]
|
D | xz_dec_lzma2.c | 604 static uint16_t *lzma_literal_probs(struct xz_dec_lzma2 *s) in lzma_literal_probs() argument 606 uint32_t prev_byte = dict_get(&s->dict, 0); in lzma_literal_probs() 607 uint32_t low = prev_byte >> (8 - s->lzma.lc); in lzma_literal_probs() 608 uint32_t high = (s->dict.pos & s->lzma.literal_pos_mask) << s->lzma.lc; in lzma_literal_probs() 609 return s->lzma.literal[low + high]; in lzma_literal_probs() 613 static void lzma_literal(struct xz_dec_lzma2 *s) in lzma_literal() argument 622 probs = lzma_literal_probs(s); in lzma_literal() 624 if (lzma_state_is_literal(s->lzma.state)) { in lzma_literal() 625 symbol = rc_bittree(&s->rc, probs, 0x100); in lzma_literal() 628 match_byte = dict_get(&s->dict, s->lzma.rep0) << 1; in lzma_literal() [all …]
|
D | xz_dec_bcj.c | 88 static size_t bcj_x86(struct xz_dec_bcj *s, uint8_t *buf, size_t size) in bcj_x86() argument 97 uint32_t prev_mask = s->x86_prev_mask; in bcj_x86() 132 dest = src - (s->pos + (uint32_t)i + 5); in bcj_x86() 154 s->x86_prev_mask = prev_pos > 3 ? 0 : prev_mask << (prev_pos - 1); in bcj_x86() 160 static size_t bcj_powerpc(struct xz_dec_bcj *s, uint8_t *buf, size_t size) in bcj_powerpc() argument 169 instr -= s->pos + (uint32_t)i; in bcj_powerpc() 181 static size_t bcj_ia64(struct xz_dec_bcj *s, uint8_t *buf, size_t size) in bcj_ia64() argument 242 addr -= s->pos + (uint32_t)i; in bcj_ia64() 265 static size_t bcj_arm(struct xz_dec_bcj *s, uint8_t *buf, size_t size) in bcj_arm() argument 275 addr -= s->pos + (uint32_t)i + 8; in bcj_arm() [all …]
|
/lib/ |
D | seq_buf.c | 28 static bool seq_buf_can_fit(struct seq_buf *s, size_t len) in seq_buf_can_fit() argument 30 return s->len + len <= s->size; in seq_buf_can_fit() 40 int seq_buf_print_seq(struct seq_file *m, struct seq_buf *s) in seq_buf_print_seq() argument 42 unsigned int len = seq_buf_used(s); in seq_buf_print_seq() 44 return seq_write(m, s->buffer, len); in seq_buf_print_seq() 57 int seq_buf_vprintf(struct seq_buf *s, const char *fmt, va_list args) in seq_buf_vprintf() argument 61 WARN_ON(s->size == 0); in seq_buf_vprintf() 63 if (s->len < s->size) { in seq_buf_vprintf() 64 len = vsnprintf(s->buffer + s->len, s->size - s->len, fmt, args); in seq_buf_vprintf() 65 if (s->len + len < s->size) { in seq_buf_vprintf() [all …]
|
D | win_minmax.c | 32 u32 dt = val->t - m->s[0].t; in minmax_subwin_update() 42 m->s[0] = m->s[1]; in minmax_subwin_update() 43 m->s[1] = m->s[2]; in minmax_subwin_update() 44 m->s[2] = *val; in minmax_subwin_update() 45 if (unlikely(val->t - m->s[0].t > win)) { in minmax_subwin_update() 46 m->s[0] = m->s[1]; in minmax_subwin_update() 47 m->s[1] = m->s[2]; in minmax_subwin_update() 48 m->s[2] = *val; in minmax_subwin_update() 50 } else if (unlikely(m->s[1].t == m->s[0].t) && dt > win/4) { in minmax_subwin_update() 55 m->s[2] = m->s[1] = *val; in minmax_subwin_update() [all …]
|
D | slub_kunit.c | 22 struct kmem_cache *s = kmem_cache_create(name, size, 0, in test_kmem_cache_create() local 24 s->flags |= SLAB_SKIP_KFENCE; in test_kmem_cache_create() 25 return s; in test_kmem_cache_create() 30 struct kmem_cache *s = test_kmem_cache_create("TestSlub_RZ_alloc", 64, in test_clobber_zone() local 32 u8 *p = kmem_cache_alloc(s, GFP_KERNEL); in test_clobber_zone() 37 validate_slab_cache(s); in test_clobber_zone() 41 kmem_cache_free(s, p); in test_clobber_zone() 42 kmem_cache_destroy(s); in test_clobber_zone() 48 struct kmem_cache *s = test_kmem_cache_create("TestSlub_next_ptr_free", in test_next_pointer() local 50 u8 *p = kmem_cache_alloc(s, GFP_KERNEL); in test_next_pointer() [all …]
|
D | kstrtox.c | 26 const char *_parse_integer_fixup_radix(const char *s, unsigned int *base) in _parse_integer_fixup_radix() argument 29 if (s[0] == '0') { in _parse_integer_fixup_radix() 30 if (_tolower(s[1]) == 'x' && isxdigit(s[2])) in _parse_integer_fixup_radix() 37 if (*base == 16 && s[0] == '0' && _tolower(s[1]) == 'x') in _parse_integer_fixup_radix() 38 s += 2; in _parse_integer_fixup_radix() 39 return s; in _parse_integer_fixup_radix() 52 unsigned int _parse_integer_limit(const char *s, unsigned int base, unsigned long long *p, in _parse_integer_limit() argument 61 unsigned int c = *s; in _parse_integer_limit() 84 s++; in _parse_integer_limit() 91 unsigned int _parse_integer(const char *s, unsigned int base, unsigned long long *p) in _parse_integer() argument [all …]
|
D | parser.c | 34 static int match_one(char *s, const char *p, substring_t args[]) in match_one() argument 46 return strcmp(p, s) == 0; in match_one() 48 if (strncmp(p, s, meta-p)) in match_one() 51 s += meta - p; in match_one() 57 if (*s++ != '%') in match_one() 66 args[argc].from = s; in match_one() 69 size_t str_len = strlen(s); in match_one() 75 args[argc].to = s + len; in match_one() 79 simple_strtol(s, &args[argc].to, 0); in match_one() 82 simple_strtoul(s, &args[argc].to, 0); in match_one() [all …]
|
D | string.c | 327 char *strchr(const char *s, int c) in strchr() argument 329 for (; *s != (char)c; ++s) in strchr() 330 if (*s == '\0') in strchr() 332 return (char *)s; in strchr() 346 char *strchrnul(const char *s, int c) in strchrnul() argument 348 while (*s && *s != (char)c) in strchrnul() 349 s++; in strchrnul() 350 return (char *)s; in strchrnul() 365 char *strnchrnul(const char *s, size_t count, int c) in strnchrnul() argument 367 while (count-- && *s && *s != (char)c) in strnchrnul() [all …]
|
D | ashrdi3.c | 22 w.s.high = in __ashrdi3() 23 uu.s.high >> 31; in __ashrdi3() 24 w.s.low = uu.s.high >> -bm; in __ashrdi3() 26 const unsigned int carries = (unsigned int) uu.s.high << bm; in __ashrdi3() 28 w.s.high = uu.s.high >> b; in __ashrdi3() 29 w.s.low = ((unsigned int) uu.s.low >> b) | carries; in __ashrdi3()
|
D | ucmpdi2.c | 13 if ((unsigned int) au.s.high < (unsigned int) bu.s.high) in __ucmpdi2() 15 else if ((unsigned int) au.s.high > (unsigned int) bu.s.high) in __ucmpdi2() 17 if ((unsigned int) au.s.low < (unsigned int) bu.s.low) in __ucmpdi2() 19 else if ((unsigned int) au.s.low > (unsigned int) bu.s.low) in __ucmpdi2()
|
D | cmpdi2.c | 18 if (au.s.high < bu.s.high) in __cmpdi2() 20 else if (au.s.high > bu.s.high) in __cmpdi2() 23 if ((unsigned int) au.s.low < (unsigned int) bu.s.low) in __cmpdi2() 25 else if ((unsigned int) au.s.low > (unsigned int) bu.s.low) in __cmpdi2()
|
D | lshrdi3.c | 21 w.s.high = 0; in __lshrdi3() 22 w.s.low = (unsigned int) uu.s.high >> -bm; in __lshrdi3() 24 const unsigned int carries = (unsigned int) uu.s.high << bm; in __lshrdi3() 26 w.s.high = (unsigned int) uu.s.high >> b; in __lshrdi3() 27 w.s.low = ((unsigned int) uu.s.low >> b) | carries; in __lshrdi3()
|
D | ashldi3.c | 21 w.s.low = 0; in __ashldi3() 22 w.s.high = (unsigned int) uu.s.low << -bm; in __ashldi3() 24 const unsigned int carries = (unsigned int) uu.s.low >> bm; in __ashldi3() 26 w.s.low = (unsigned int) uu.s.low << b; in __ashldi3() 27 w.s.high = ((unsigned int) uu.s.high << b) | carries; in __ashldi3()
|
D | net_utils.c | 8 bool mac_pton(const char *s, u8 *mac) in mac_pton() argument 14 if (strnlen(s, maxlen) < maxlen) in mac_pton() 19 if (!isxdigit(s[i * 3]) || !isxdigit(s[i * 3 + 1])) in mac_pton() 21 if (i != ETH_ALEN - 1 && s[i * 3 + 2] != ':') in mac_pton() 25 mac[i] = (hex_to_bin(s[i * 3]) << 4) | hex_to_bin(s[i * 3 + 1]); in mac_pton()
|
D | muldi3.c | 44 umul_ppmm(__w.s.high, __w.s.low, u, v); \ 53 DWunion w = {.ll = __umulsidi3(uu.s.low, vv.s.low)}; in __muldi3() 55 w.s.high += ((unsigned long) uu.s.low * (unsigned long) vv.s.high in __muldi3() 56 + (unsigned long) uu.s.high * (unsigned long) vv.s.low); in __muldi3()
|
D | decompress_unxz.c | 209 const uint8_t *s = src; in memmove() local 212 if (d < s) { in memmove() 214 d[i] = s[i]; in memmove() 215 } else if (d > s) { in memmove() 218 d[i] = s[i]; in memmove() 260 struct xz_dec *s; in unxz() local 272 s = xz_dec_init(XZ_SINGLE, 0); in unxz() 274 s = xz_dec_init(XZ_DYNALLOC, (uint32_t)-1); in unxz() 276 if (s == NULL) in unxz() 302 ret = xz_dec_run(s, &b); in unxz() [all …]
|
/lib/zstd/compress/ |
D | fse_compress.c | 123 U32 s; in FSE_buildCTable_wksp() local 124 for (s=0; s<maxSV1; ++s, sv += add) { in FSE_buildCTable_wksp() 126 int const n = normalizedCounter[s]; in FSE_buildCTable_wksp() 140 size_t s; in FSE_buildCTable_wksp() local 143 for (s = 0; s < (size_t)tableSize; s += unroll) { in FSE_buildCTable_wksp() 147 tableSymbol[uPosition] = spread[s + u]; in FSE_buildCTable_wksp() 170 …FSE_FUNCTION_TYPE s = tableSymbol[u]; /* note : static analyzer may not understand tableSymbol i… in FSE_buildCTable_wksp() local 171 …tableU16[cumul[s]++] = (U16) (tableSize+u); /* TableU16 : sorted by symbol order; gives next sta… in FSE_buildCTable_wksp() 176 unsigned s; in FSE_buildCTable_wksp() local 177 for (s=0; s<=maxSymbolValue; s++) { in FSE_buildCTable_wksp() [all …]
|
D | hist.c | 48 { U32 s; in HIST_count_simple() local 49 for (s=0; s<=maxSymbolValue; s++) in HIST_count_simple() 50 if (count[s] > largestCount) largestCount = count[s]; in HIST_count_simple() 120 { U32 s; in HIST_count_parallel_wksp() local 121 for (s=0; s<256; s++) { in HIST_count_parallel_wksp() 122 Counting1[s] += Counting2[s] + Counting3[s] + Counting4[s]; in HIST_count_parallel_wksp() 123 if (Counting1[s] > max) max = Counting1[s]; in HIST_count_parallel_wksp()
|
D | zstd_compress_sequences.c | 87 unsigned s; in ZSTD_entropyCost() local 90 for (s = 0; s <= max; ++s) { in ZSTD_entropyCost() 91 unsigned norm = (unsigned)((256 * count[s]) / total); in ZSTD_entropyCost() 92 if (count[s] != 0 && norm == 0) in ZSTD_entropyCost() 94 assert(count[s] < total); in ZSTD_entropyCost() 95 cost += count[s] * kInverseProbabilityLog256[norm]; in ZSTD_entropyCost() 111 unsigned s; in ZSTD_fseBitCost() local 119 for (s = 0; s <= max; ++s) { in ZSTD_fseBitCost() 122 unsigned const bitCost = FSE_bitCost(cstate.symbolTT, tableLog, s, kAccuracyLog); in ZSTD_fseBitCost() 123 if (count[s] == 0) in ZSTD_fseBitCost() [all …]
|
/lib/zstd/common/ |
D | zstd_deps.h | 32 #define ZSTD_memcpy(d,s,n) __builtin_memcpy((d),(s),(n)) argument 33 #define ZSTD_memmove(d,s,n) __builtin_memmove((d),(s),(n)) argument 34 #define ZSTD_memset(d,s,n) __builtin_memset((d),(s),(n)) argument 50 #define ZSTD_malloc(s) ({ (void)(s); NULL; }) argument 52 #define ZSTD_calloc(n,s) ({ (void)(n); (void)(s); NULL; }) argument
|
D | fse_decompress.c | 92 U32 s; in FSE_buildDTable_internal() local 93 for (s=0; s<maxSV1; s++) { in FSE_buildDTable_internal() 94 if (normalizedCounter[s]==-1) { in FSE_buildDTable_internal() 95 tableDecode[highThreshold--].symbol = (FSE_FUNCTION_TYPE)s; in FSE_buildDTable_internal() 96 symbolNext[s] = 1; in FSE_buildDTable_internal() 98 if (normalizedCounter[s] >= largeLimit) DTableH.fastMode=0; in FSE_buildDTable_internal() 99 symbolNext[s] = normalizedCounter[s]; in FSE_buildDTable_internal() 118 U32 s; in FSE_buildDTable_internal() local 119 for (s=0; s<maxSV1; ++s, sv += add) { in FSE_buildDTable_internal() 121 int const n = normalizedCounter[s]; in FSE_buildDTable_internal() [all …]
|