Home
last modified time | relevance | path

Searched refs:ip (Results 1 – 16 of 16) sorted by relevance

/lib/lzo/
Dlzo1x_decompress_safe.c23 #define HAVE_IP(x) ((size_t)(ip_end - ip) >= (size_t)(x))
43 const unsigned char *ip; in lzo1x_decompress_safe() local
53 ip = in; in lzo1x_decompress_safe()
58 if (likely(in_len >= 5) && likely(*ip == 17)) { in lzo1x_decompress_safe()
59 bitstream_version = ip[1]; in lzo1x_decompress_safe()
60 ip += 2; in lzo1x_decompress_safe()
65 if (*ip > 17) { in lzo1x_decompress_safe()
66 t = *ip++ - 17; in lzo1x_decompress_safe()
75 t = *ip++; in lzo1x_decompress_safe()
80 const unsigned char *ip_last = ip; in lzo1x_decompress_safe()
[all …]
Dlzo1x_compress.c27 const unsigned char *ip; in lzo1x_1_do_compress() local
35 ip = in; in lzo1x_1_do_compress()
36 ii = ip; in lzo1x_1_do_compress()
37 ip += ti < 4 ? 4 - ti : 0; in lzo1x_1_do_compress()
45 ip += 1 + ((ip - ii) >> 5); in lzo1x_1_do_compress()
47 if (unlikely(ip >= ip_end)) in lzo1x_1_do_compress()
49 dv = get_unaligned_le32(ip); in lzo1x_1_do_compress()
52 const unsigned char *ir = ip + 4; in lzo1x_1_do_compress()
54 < (ip + MAX_ZERO_RUN_LENGTH + 1) in lzo1x_1_do_compress()
55 ? ip_end : ip + MAX_ZERO_RUN_LENGTH + 1; in lzo1x_1_do_compress()
[all …]
/lib/lz4/
Dlz4hc_compress.c75 const BYTE *ip) in LZ4HC_Insert() argument
80 U32 const target = (U32)(ip - base); in LZ4HC_Insert()
101 const BYTE *ip, in LZ4HC_InsertAndFindBestMatch() argument
111 const U32 lowLimit = (hc4->lowLimit + 64 * KB > (U32)(ip - base)) in LZ4HC_InsertAndFindBestMatch()
113 : (U32)(ip - base) - (64 * KB - 1); in LZ4HC_InsertAndFindBestMatch()
119 LZ4HC_Insert(hc4, ip); in LZ4HC_InsertAndFindBestMatch()
120 matchIndex = HashTable[LZ4HC_hashPtr(ip)]; in LZ4HC_InsertAndFindBestMatch()
128 if (*(match + ml) == *(ip + ml) in LZ4HC_InsertAndFindBestMatch()
129 && (LZ4_read32(match) == LZ4_read32(ip))) { in LZ4HC_InsertAndFindBestMatch()
130 size_t const mlt = LZ4_count(ip + MINMATCH, in LZ4HC_InsertAndFindBestMatch()
[all …]
Dlz4_compress.c188 const BYTE *ip = (const BYTE *) source; in LZ4_compress_generic() local
191 const BYTE * const lowRefLimit = ip - dictPtr->dictSize; in LZ4_compress_generic()
196 const BYTE * const iend = ip + inputSize; in LZ4_compress_generic()
240 LZ4_putPosition(ip, dictPtr->hashTable, tableType, base); in LZ4_compress_generic()
241 ip++; in LZ4_compress_generic()
242 forwardH = LZ4_hashPosition(ip, tableType); in LZ4_compress_generic()
251 const BYTE *forwardIp = ip; in LZ4_compress_generic()
258 ip = forwardIp; in LZ4_compress_generic()
281 LZ4_putPositionOnHash(ip, h, dictPtr->hashTable, in LZ4_compress_generic()
288 : (match + MAX_DISTANCE < ip)) in LZ4_compress_generic()
[all …]
Dlz4_decompress.c83 const BYTE *ip = (const BYTE *) src; in LZ4_decompress_generic() local
84 const BYTE * const iend = ip + srcSize; in LZ4_decompress_generic()
112 return ((srcSize == 1) && (*ip == 0)) ? 0 : -1; in LZ4_decompress_generic()
115 return (*ip == 0 ? 1 : -1); in LZ4_decompress_generic()
127 unsigned int const token = *ip++; in LZ4_decompress_generic()
131 assert(!endOnInput || ip <= iend); in LZ4_decompress_generic()
150 && likely((endOnInput ? ip < shortiend : 1) & in LZ4_decompress_generic()
153 memcpy(op, ip, endOnInput ? 16 : 8); in LZ4_decompress_generic()
154 op += length; ip += length; in LZ4_decompress_generic()
162 offset = LZ4_readLE16(ip); in LZ4_decompress_generic()
[all …]
/lib/zstd/
Dentropy_common.c63 const BYTE *ip = istart; in FSE_readNCount() local
74 bitStream = ZSTD_readLE32(ip); in FSE_readNCount()
90 if (ip < iend - 5) { in FSE_readNCount()
91 ip += 2; in FSE_readNCount()
92 bitStream = ZSTD_readLE32(ip) >> bitCount; in FSE_readNCount()
109 if ((ip <= iend - 7) || (ip + (bitCount >> 3) <= iend - 4)) { in FSE_readNCount()
110 ip += bitCount >> 3; in FSE_readNCount()
112 bitStream = ZSTD_readLE32(ip) >> bitCount; in FSE_readNCount()
140 if ((ip <= iend - 7) || (ip + (bitCount >> 3) <= iend - 4)) { in FSE_readNCount()
141 ip += bitCount >> 3; in FSE_readNCount()
[all …]
Dcompress.c940 static size_t ZSTD_count_2segments(const BYTE *ip, const BYTE *match, const BYTE *iEnd, const BYTE … in ZSTD_count_2segments() argument
942 const BYTE *const vEnd = MIN(ip + (mEnd - match), iEnd); in ZSTD_count_2segments()
943 size_t const matchLength = ZSTD_count(ip, match, vEnd); in ZSTD_count_2segments()
946 return matchLength + ZSTD_count(ip + matchLength, iStart, iEnd); in ZSTD_count_2segments()
997 const BYTE *ip = base + zc->nextToUpdate; in ZSTD_fillHashTable() local
1001 while (ip <= iend) { in ZSTD_fillHashTable()
1002 hashTable[ZSTD_hashPtr(ip, hBits, mls)] = (U32)(ip - base); in ZSTD_fillHashTable()
1003 ip += fastHashFillStep; in ZSTD_fillHashTable()
1015 const BYTE *ip = istart; in ZSTD_compressBlock_fast_generic() local
1025 ip += (ip == lowest); in ZSTD_compressBlock_fast_generic()
[all …]
Dzstd_opt.h215 U32 ZSTD_insertAndFindFirstIndexHash3(ZSTD_CCtx *zc, const BYTE *ip) in ZSTD_insertAndFindFirstIndexHash3() argument
221 const U32 target = zc->nextToUpdate3 = (U32)(ip - base); in ZSTD_insertAndFindFirstIndexHash3()
222 const size_t hash3 = ZSTD_hash3Ptr(ip, hashLog3); in ZSTD_insertAndFindFirstIndexHash3()
235 static U32 ZSTD_insertBtAndGetAllMatches(ZSTD_CCtx *zc, const BYTE *const ip, const BYTE *const iLi… in ZSTD_insertBtAndGetAllMatches() argument
239 const U32 curr = (U32)(ip - base); in ZSTD_insertBtAndGetAllMatches()
241 const size_t h = ZSTD_hashPtr(ip, hashLog, mls); in ZSTD_insertBtAndGetAllMatches()
264 U32 const matchIndex3 = ZSTD_insertAndFindFirstIndexHash3(zc, ip); in ZSTD_insertBtAndGetAllMatches()
270 if (match[bestLength] == ip[bestLength]) in ZSTD_insertBtAndGetAllMatches()
271 currMl = ZSTD_count(ip, match, iLimit); in ZSTD_insertBtAndGetAllMatches()
275 …ZSTD_readMINMATCH(ip, MINMATCH)) /* assumption : matchIndex3 <= dictLimit-4 (by table construction… in ZSTD_insertBtAndGetAllMatches()
[all …]
Dfse_compress.c326 const BYTE *ip = (const BYTE *)src; in FSE_count_simple() local
327 const BYTE *const end = ip + srcSize; in FSE_count_simple()
337 while (ip < end) in FSE_count_simple()
338 count[*ip++]++; in FSE_count_simple()
360 const BYTE *ip = (const BYTE *)source; in FSE_count_parallel_wksp() local
361 const BYTE *const iend = ip + sourceSize; in FSE_count_parallel_wksp()
382 U32 cached = ZSTD_read32(ip); in FSE_count_parallel_wksp()
383 ip += 4; in FSE_count_parallel_wksp()
384 while (ip < iend - 15) { in FSE_count_parallel_wksp()
386 cached = ZSTD_read32(ip); in FSE_count_parallel_wksp()
[all …]
Ddecompress.c209 const BYTE *ip = (const BYTE *)src; in ZSTD_getFrameParams() local
233 BYTE const fhdByte = ip[4]; in ZSTD_getFrameParams()
246 BYTE const wlByte = ip[pos++]; in ZSTD_getFrameParams()
258 dictID = ip[pos]; in ZSTD_getFrameParams()
262 dictID = ZSTD_readLE16(ip + pos); in ZSTD_getFrameParams()
266 dictID = ZSTD_readLE32(ip + pos); in ZSTD_getFrameParams()
274 frameContentSize = ip[pos]; in ZSTD_getFrameParams()
276 case 1: frameContentSize = ZSTD_readLE16(ip + pos) + 256; break; in ZSTD_getFrameParams()
277 case 2: frameContentSize = ZSTD_readLE32(ip + pos); break; in ZSTD_getFrameParams()
278 case 3: frameContentSize = ZSTD_readLE64(ip + pos); break; in ZSTD_getFrameParams()
[all …]
Dhuf_compress.c540 const BYTE *ip = (const BYTE *)src; in HUF_compress1X_usingCTable() local
558 case 3: HUF_encodeSymbol(&bitC, ip[n + 2], CTable); HUF_FLUSHBITS_2(&bitC); in HUF_compress1X_usingCTable()
560 case 2: HUF_encodeSymbol(&bitC, ip[n + 1], CTable); HUF_FLUSHBITS_1(&bitC); in HUF_compress1X_usingCTable()
562 case 1: HUF_encodeSymbol(&bitC, ip[n + 0], CTable); HUF_FLUSHBITS(&bitC); in HUF_compress1X_usingCTable()
568 HUF_encodeSymbol(&bitC, ip[n - 1], CTable); in HUF_compress1X_usingCTable()
570 HUF_encodeSymbol(&bitC, ip[n - 2], CTable); in HUF_compress1X_usingCTable()
572 HUF_encodeSymbol(&bitC, ip[n - 3], CTable); in HUF_compress1X_usingCTable()
574 HUF_encodeSymbol(&bitC, ip[n - 4], CTable); in HUF_compress1X_usingCTable()
584 const BYTE *ip = (const BYTE *)src; in HUF_compress4X_usingCTable() local
585 const BYTE *const iend = ip + srcSize; in HUF_compress4X_usingCTable()
[all …]
Dzstd_internal.h137 const BYTE* ip = (const BYTE*)src; in ZSTD_wildcopy() local
148 ZSTD_copy8(op, ip); in ZSTD_wildcopy()
150 ip += 8; in ZSTD_wildcopy()
Dhuf_decompress.c234 const BYTE *ip = (const BYTE *)cSrc; in HUF_decompress1X2_DCtx_wksp() local
241 ip += hSize; in HUF_decompress1X2_DCtx_wksp()
244 return HUF_decompress1X2_usingDTable_internal(dst, dstSize, ip, cSrcSize, DCtx); in HUF_decompress1X2_DCtx_wksp()
365 const BYTE *ip = (const BYTE *)cSrc; in HUF_decompress4X2_DCtx_wksp() local
372 ip += hSize; in HUF_decompress4X2_DCtx_wksp()
375 return HUF_decompress4X2_usingDTable_internal(dst, dstSize, ip, cSrcSize, dctx); in HUF_decompress4X2_DCtx_wksp()
694 const BYTE *ip = (const BYTE *)cSrc; in HUF_decompress1X4_DCtx_wksp() local
701 ip += hSize; in HUF_decompress1X4_DCtx_wksp()
704 return HUF_decompress1X4_usingDTable_internal(dst, dstSize, ip, cSrcSize, DCtx); in HUF_decompress1X4_DCtx_wksp()
827 const BYTE *ip = (const BYTE *)cSrc; in HUF_decompress4X4_DCtx_wksp() local
[all …]
Dfse_decompress.c297 const BYTE *ip = istart; in FSE_decompress_wksp() local
326 ip += NCountLength; in FSE_decompress_wksp()
331 …return FSE_decompress_usingDTable(dst, dstCapacity, ip, cSrcSize, dt); /* always return, even if i… in FSE_decompress_wksp()
/lib/
Dtest_printf.c435 ip(void) in ip() function
626 ip(); in test_pointer()
DKconfig.debug1534 # ip link set eth0 mtu 1024