/lib/lz4/ |
D | lz4_decompress.c | 123 const BYTE *match; in LZ4_decompress_generic() local 164 match = op - offset; in LZ4_decompress_generic() 165 assert(match <= op); /* check overflow */ in LZ4_decompress_generic() 170 (dict == withPrefix64k || match >= lowPrefix)) { in LZ4_decompress_generic() 172 memcpy(op + 0, match + 0, 8); in LZ4_decompress_generic() 173 memcpy(op + 8, match + 8, 8); in LZ4_decompress_generic() 174 memcpy(op + 16, match + 16, 2); in LZ4_decompress_generic() 280 match = op - offset; in LZ4_decompress_generic() 286 if ((checkOffset) && (unlikely(match + dictSize < lowPrefix))) { in LZ4_decompress_generic() 326 if ((dict == usingExtDict) && (match < lowPrefix)) { in LZ4_decompress_generic() [all …]
|
D | lz4_compress.c | 246 const BYTE *match; in LZ4_compress_generic() local 265 match = LZ4_getPositionOnHash(h, in LZ4_compress_generic() 270 if (match < (const BYTE *)source) { in LZ4_compress_generic() 284 ? (match < lowRefLimit) in LZ4_compress_generic() 288 : (match + MAX_DISTANCE < ip)) in LZ4_compress_generic() 289 || (LZ4_read32(match + refDelta) in LZ4_compress_generic() 294 while (((ip > anchor) & (match + refDelta > lowLimit)) in LZ4_compress_generic() 295 && (unlikely(ip[-1] == match[refDelta - 1]))) { in LZ4_compress_generic() 297 match--; in LZ4_compress_generic() 331 LZ4_writeLE16(op, (U16)(ip - match)); in LZ4_compress_generic() [all …]
|
D | lz4hc_compress.c | 126 const BYTE * const match = base + matchIndex; in LZ4HC_InsertAndFindBestMatch() local 128 if (*(match + ml) == *(ip + ml) in LZ4HC_InsertAndFindBestMatch() 129 && (LZ4_read32(match) == LZ4_read32(ip))) { in LZ4HC_InsertAndFindBestMatch() 131 match + MINMATCH, iLimit) + MINMATCH; in LZ4HC_InsertAndFindBestMatch() 135 *matchpos = match; in LZ4HC_InsertAndFindBestMatch() 139 const BYTE * const match = dictBase + matchIndex; in LZ4HC_InsertAndFindBestMatch() local 141 if (LZ4_read32(match) == LZ4_read32(ip)) { in LZ4HC_InsertAndFindBestMatch() 149 match + MINMATCH, vLimit) + MINMATCH; in LZ4HC_InsertAndFindBestMatch() 267 const BYTE * const match, in LZ4HC_encodeSequence() argument 300 LZ4_writeLE16(*op, (U16)(*ip - match)); in LZ4HC_encodeSequence()
|
/lib/zlib_deflate/ |
D | deflate.c | 80 static void check_match (deflate_state *s, IPos start, IPos match, 561 register Byte *match; /* matched string */ in longest_match() local 604 match = s->window + cur_match; in longest_match() 613 if (*(ush*)(match+best_len-1) != scan_end || in longest_match() 614 *(ush*)match != scan_start) continue; in longest_match() 625 Assert(scan[2] == match[2], "scan[2]?"); in longest_match() 626 scan++, match++; in longest_match() 628 } while (*(ush*)(scan+=2) == *(ush*)(match+=2) && in longest_match() 629 *(ush*)(scan+=2) == *(ush*)(match+=2) && in longest_match() 630 *(ush*)(scan+=2) == *(ush*)(match+=2) && in longest_match() [all …]
|
/lib/ |
D | glob.c | 71 bool match = false, inverted = (*pat == '!'); in glob_match() local 95 match |= (a <= c && c <= b); in glob_match() 98 if (match == inverted) in glob_match()
|
D | globtest.c | 21 bool match = glob_match(pat, str); in test() local 22 bool success = match == expected; in test() 39 printk(message, pat, str, mismatch + 3*match); in test()
|
D | Kconfig | 34 Drivers may use these helpers to match the bit indices as described
|
/lib/zstd/ |
D | decompress.c | 865 const BYTE *match; member 888 const BYTE *match = oLitEnd - sequence.offset; in ZSTD_execSequenceLast7() local 912 match = dictEnd - (base - match); in ZSTD_execSequenceLast7() 913 if (match + sequence.matchLength <= dictEnd) { in ZSTD_execSequenceLast7() 914 memmove(oLitEnd, match, sequence.matchLength); in ZSTD_execSequenceLast7() 919 size_t const length1 = dictEnd - match; in ZSTD_execSequenceLast7() 920 memmove(oLitEnd, match, length1); in ZSTD_execSequenceLast7() 923 match = base; in ZSTD_execSequenceLast7() 927 *op++ = *match++; in ZSTD_execSequenceLast7() 1001 seq.match = NULL; in ZSTD_decodeSequence() [all …]
|
D | compress.c | 940 static size_t ZSTD_count_2segments(const BYTE *ip, const BYTE *match, const BYTE *iEnd, const BYTE … in ZSTD_count_2segments() argument 942 const BYTE *const vEnd = MIN(ip + (mEnd - match), iEnd); in ZSTD_count_2segments() 943 size_t const matchLength = ZSTD_count(ip, match, vEnd); in ZSTD_count_2segments() 944 if (match + matchLength != mEnd) in ZSTD_count_2segments() 1040 const BYTE *match = base + matchIndex; in ZSTD_compressBlock_fast_generic() local 1049 if ((matchIndex <= lowestIndex) || (ZSTD_read32(match) != ZSTD_read32(ip))) { in ZSTD_compressBlock_fast_generic() 1053 mLength = ZSTD_count(ip + 4, match + 4, iend) + 4; in ZSTD_compressBlock_fast_generic() 1054 offset = (U32)(ip - match); in ZSTD_compressBlock_fast_generic() 1055 while (((ip > anchor) & (match > lowest)) && (ip[-1] == match[-1])) { in ZSTD_compressBlock_fast_generic() 1057 match--; in ZSTD_compressBlock_fast_generic() [all …]
|
D | zstd_opt.h | 266 const BYTE *match; in ZSTD_insertBtAndGetAllMatches() local 269 match = base + matchIndex3; in ZSTD_insertBtAndGetAllMatches() 270 if (match[bestLength] == ip[bestLength]) in ZSTD_insertBtAndGetAllMatches() 271 currMl = ZSTD_count(ip, match, iLimit); in ZSTD_insertBtAndGetAllMatches() 273 match = dictBase + matchIndex3; in ZSTD_insertBtAndGetAllMatches() 274 if (ZSTD_readMINMATCH(match, MINMATCH) == in ZSTD_insertBtAndGetAllMatches() 276 …currMl = ZSTD_count_2segments(ip + MINMATCH, match + MINMATCH, iLimit, dictEnd, prefixStart) + MIN… in ZSTD_insertBtAndGetAllMatches() 298 const BYTE *match; in ZSTD_insertBtAndGetAllMatches() local 301 match = base + matchIndex; in ZSTD_insertBtAndGetAllMatches() 302 if (match[matchLength] == ip[matchLength]) { in ZSTD_insertBtAndGetAllMatches() [all …]
|
/lib/842/ |
D | 842_compress.c | 383 int i, match, b = 0; in check_template() local 391 match = check_index(p, 2, b >> 1); in check_template() 393 match = check_index(p, 4, b >> 2); in check_template() 395 match = check_index(p, 8, 0); in check_template() 398 if (!match) in check_template()
|