| /lib/zlib_inflate/ |
| D | inffast.c | 89 unsigned op; /* code bits, operation, extra bits, or */ in inflate_fast() local 127 op = (unsigned)(this.bits); in inflate_fast() 128 hold >>= op; in inflate_fast() 129 bits -= op; in inflate_fast() 130 op = (unsigned)(this.op); in inflate_fast() 131 if (op == 0) { /* literal */ in inflate_fast() 134 else if (op & 16) { /* length base */ in inflate_fast() 136 op &= 15; /* number of extra bits */ in inflate_fast() 137 if (op) { in inflate_fast() 138 if (bits < op) { in inflate_fast() [all …]
|
| D | inftrees.c | 104 this.op = (unsigned char)64; /* invalid code marker */ in zlib_inflate_table() 206 this.op = (unsigned char)0; in zlib_inflate_table() 210 this.op = (unsigned char)(extra[work[sym]]); in zlib_inflate_table() 214 this.op = (unsigned char)(32 + 64); /* end of block */ in zlib_inflate_table() 271 (*table)[low].op = (unsigned char)curr; in zlib_inflate_table() 284 this.op = (unsigned char)64; /* invalid code marker */ in zlib_inflate_table()
|
| D | inflate.c | 591 if (this.op && (this.op & 0xf0) == 0) { in zlib_inflate() 595 (BITS(last.bits + last.op) >> last.bits)]; in zlib_inflate() 603 if ((int)(this.op) == 0) { in zlib_inflate() 607 if (this.op & 32) { in zlib_inflate() 611 if (this.op & 64) { in zlib_inflate() 616 state->extra = (unsigned)(this.op) & 15; in zlib_inflate() 633 if ((this.op & 0xf0) == 0) { in zlib_inflate() 637 (BITS(last.bits + last.op) >> last.bits)]; in zlib_inflate() 644 if (this.op & 64) { in zlib_inflate() 650 state->extra = (unsigned)(this.op) & 15; in zlib_inflate()
|
| /lib/lzo/ |
| D | lzo1x_decompress_safe.c | 24 #define HAVE_OP(x) ((size_t)(op_end - op) >= (size_t)(x)) 42 unsigned char *op; in lzo1x_decompress_safe() local 52 op = out; in lzo1x_decompress_safe() 98 unsigned char *oe = op + t; in lzo1x_decompress_safe() 100 COPY8(op, ip); in lzo1x_decompress_safe() 101 op += 8; in lzo1x_decompress_safe() 103 COPY8(op, ip); in lzo1x_decompress_safe() 104 op += 8; in lzo1x_decompress_safe() 108 op = oe; in lzo1x_decompress_safe() 115 *op++ = *ip++; in lzo1x_decompress_safe() [all …]
|
| D | lzo1x_compress.c | 39 unsigned char *op; in LZO_SAFE() local 46 op = *out; in LZO_SAFE() 130 op[*state_offset] |= t; in LZO_SAFE() 132 COPY4(op, ii); in LZO_SAFE() 133 op += t; in LZO_SAFE() 136 *op++ = (t - 3); in LZO_SAFE() 137 COPY8(op, ii); in LZO_SAFE() 138 COPY8(op + 8, ii + 8); in LZO_SAFE() 139 op += t; in LZO_SAFE() 143 *op++ = (t - 3); in LZO_SAFE() [all …]
|
| /lib/lz4/ |
| D | lz4_decompress.c | 86 BYTE *op = (BYTE *) dst; in LZ4_decompress_generic() local 87 BYTE * const oend = op + outputSize; in LZ4_decompress_generic() 107 assert(lowPrefix <= op); in LZ4_decompress_generic() 154 (op <= shortoend))) { in LZ4_decompress_generic() 156 LZ4_memcpy(op, ip, endOnInput ? 16 : 8); in LZ4_decompress_generic() 157 op += length; ip += length; in LZ4_decompress_generic() 167 match = op - offset; in LZ4_decompress_generic() 168 assert(match <= op); /* check overflow */ in LZ4_decompress_generic() 175 LZ4_memcpy(op + 0, match + 0, 8); in LZ4_decompress_generic() 176 LZ4_memcpy(op + 8, match + 8, 8); in LZ4_decompress_generic() [all …]
|
| D | lz4_compress.c | 200 BYTE *op = (BYTE *) dest; in LZ4_compress_generic() local 201 BYTE * const olimit = op + maxOutputSize; in LZ4_compress_generic() 304 token = op++; in LZ4_compress_generic() 308 (unlikely(op + litLength + in LZ4_compress_generic() 319 *op++ = 255; in LZ4_compress_generic() 320 *op++ = (BYTE)len; in LZ4_compress_generic() 325 LZ4_wildCopy(op, anchor, op + litLength); in LZ4_compress_generic() 326 op += litLength; in LZ4_compress_generic() 331 LZ4_writeLE16(op, (U16)(ip - match)); in LZ4_compress_generic() 332 op += 2; in LZ4_compress_generic() [all …]
|
| D | lz4hc_compress.c | 264 BYTE **op, in LZ4HC_encodeSequence() argument 276 token = (*op)++; in LZ4HC_encodeSequence() 279 && ((*op + (length>>8) in LZ4HC_encodeSequence() 290 *(*op)++ = 255; in LZ4HC_encodeSequence() 291 *(*op)++ = (BYTE)len; in LZ4HC_encodeSequence() 296 LZ4_wildCopy(*op, *anchor, (*op) + length); in LZ4HC_encodeSequence() 297 *op += length; in LZ4HC_encodeSequence() 300 LZ4_writeLE16(*op, (U16)(*ip - match)); in LZ4HC_encodeSequence() 301 *op += 2; in LZ4HC_encodeSequence() 307 && (*op + (length>>8) in LZ4HC_encodeSequence() [all …]
|
| /lib/zstd/decompress/ |
| D | zstd_decompress_block.c | 759 HINT_INLINE void ZSTD_overlapCopy8(BYTE** op, BYTE const** ip, size_t offset) { in ZSTD_overlapCopy8() argument 760 assert(*ip <= *op); in ZSTD_overlapCopy8() 766 (*op)[0] = (*ip)[0]; in ZSTD_overlapCopy8() 767 (*op)[1] = (*ip)[1]; in ZSTD_overlapCopy8() 768 (*op)[2] = (*ip)[2]; in ZSTD_overlapCopy8() 769 (*op)[3] = (*ip)[3]; in ZSTD_overlapCopy8() 771 ZSTD_copy4(*op+4, *ip); in ZSTD_overlapCopy8() 774 ZSTD_copy8(*op, *ip); in ZSTD_overlapCopy8() 777 *op += 8; in ZSTD_overlapCopy8() 778 assert(*op - *ip >= 8); in ZSTD_overlapCopy8() [all …]
|
| D | zstd_decompress.c | 839 BYTE* op = ostart; in ZSTD_decompressFrame() local 871 if (ip >= op && ip < oBlockEnd) { in ZSTD_decompressFrame() 885 oBlockEnd = op + (ip - op); in ZSTD_decompressFrame() 891 …decodedSize = ZSTD_decompressBlock_internal(dctx, op, (size_t)(oBlockEnd-op), ip, cBlockSize, /* f… in ZSTD_decompressFrame() 895 decodedSize = ZSTD_copyRawBlock(op, (size_t)(oend-op), ip, cBlockSize); in ZSTD_decompressFrame() 898 … decodedSize = ZSTD_setRleBlock(op, (size_t)(oBlockEnd-op), *ip, blockProperties.origSize); in ZSTD_decompressFrame() 907 xxh64_update(&dctx->xxhState, op, decodedSize); in ZSTD_decompressFrame() 909 op += decodedSize; in ZSTD_decompressFrame() 917 RETURN_ERROR_IF((U64)(op-ostart) != dctx->fParams.frameContentSize, in ZSTD_decompressFrame() 931 ZSTD_DCtx_trace_end(dctx, (U64)(op-ostart), (U64)(ip-istart), /* streaming */ 0); in ZSTD_decompressFrame() [all …]
|
| D | huf_decompress.c | 148 BYTE* op[4]; member 214 args->op[0] = (BYTE*)dst; in HUF_DecompressAsmArgs_init() 215 args->op[1] = args->op[0] + (dstSize+3)/4; in HUF_DecompressAsmArgs_init() 216 args->op[2] = args->op[1] + (dstSize+3)/4; in HUF_DecompressAsmArgs_init() 217 args->op[3] = args->op[2] + (dstSize+3)/4; in HUF_DecompressAsmArgs_init() 220 if (args->op[3] >= oend) in HUF_DecompressAsmArgs_init() 250 if (args->op[stream] > segmentEnd) in HUF_initRemainingDStream() 531 BYTE* op = (BYTE*)dst; in HUF_decompress1X1_usingDTable_internal_body() local 532 BYTE* const oend = op + dstSize; in HUF_decompress1X1_usingDTable_internal_body() 541 HUF_decodeStreamX1(op, &bitD, oend, dt, dtLog); in HUF_decompress1X1_usingDTable_internal_body() [all …]
|
| /lib/zstd/compress/ |
| D | zstd_compress_superblock.c | 51 BYTE* op = ostart + lhSize; in ZSTD_compressSubBlock_literal() local 73 ZSTD_memcpy(op, hufMetadata->hufDesBuffer, hufMetadata->hufDesSize); in ZSTD_compressSubBlock_literal() 74 op += hufMetadata->hufDesSize; in ZSTD_compressSubBlock_literal() 80 …{ const size_t cSize = singleStream ? HUF_compress1X_usingCTable(op, oend-op, literals, litSize,… in ZSTD_compressSubBlock_literal() 81 … : HUF_compress4X_usingCTable(op, oend-op, literals, litSize, hufTable); in ZSTD_compressSubBlock_literal() 82 op += cSize; in ZSTD_compressSubBlock_literal() 125 DEBUGLOG(5, "Compressed literals: %u -> %u", (U32)litSize, (U32)(op-ostart)); in ZSTD_compressSubBlock_literal() 126 return op-ostart; in ZSTD_compressSubBlock_literal() 170 BYTE* op = ostart; in ZSTD_compressSubBlock_sequences() local 177 RETURN_ERROR_IF((oend-op) < 3 /*max nbSeq Size*/ + 1 /*seqHead*/, in ZSTD_compressSubBlock_sequences() [all …]
|
| D | huf_compress.c | 92 BYTE* op = ostart; in HUF_compressWeights() local 114 …{ CHECK_V_F(hSize, FSE_writeNCount(op, (size_t)(oend-op), wksp->norm, maxSymbolValue, tableLog) … in HUF_compressWeights() 115 op += hSize; in HUF_compressWeights() 120 …{ CHECK_V_F(cSize, FSE_compress_usingCTable(op, (size_t)(oend - op), weightTable, wtSize, wksp->… in HUF_compressWeights() 122 op += cSize; in HUF_compressWeights() 125 return (size_t)(op-ostart); in HUF_compressWeights() 174 BYTE* op = (BYTE*)dst; in HUF_writeCTable_wksp() local 191 …{ CHECK_V_F(hSize, HUF_compressWeights(op+1, maxDstSize-1, wksp->huffWeight, maxSymbolValue, &wk… in HUF_writeCTable_wksp() 193 op[0] = (BYTE)hSize; in HUF_writeCTable_wksp() 200 op[0] = (BYTE)(128 /*special case*/ + (maxSymbolValue-1)); in HUF_writeCTable_wksp() [all …]
|
| D | zstd_compress.c | 2367 BYTE* op = ostart; in ZSTD_buildSequencesStatistics() local 2379 assert(op <= oend); in ZSTD_buildSequencesStatistics() 2394 op, (size_t)(oend - op), in ZSTD_buildSequencesStatistics() 2408 op += countSize; in ZSTD_buildSequencesStatistics() 2409 assert(op <= oend); in ZSTD_buildSequencesStatistics() 2426 op, (size_t)(oend - op), in ZSTD_buildSequencesStatistics() 2440 op += countSize; in ZSTD_buildSequencesStatistics() 2441 assert(op <= oend); in ZSTD_buildSequencesStatistics() 2447 DEBUGLOG(5, "Building ML table (remaining space : %i)", (int)(oend-op)); in ZSTD_buildSequencesStatistics() 2456 op, (size_t)(oend - op), in ZSTD_buildSequencesStatistics() [all …]
|
| /lib/ |
| D | atomic64.c | 73 #define ATOMIC64_OP(op, c_op) \ argument 74 void generic_atomic64_##op(s64 a, atomic64_t *v) \ 85 EXPORT_SYMBOL(generic_atomic64_##op); 87 #define ATOMIC64_OP_RETURN(op, c_op) \ argument 88 s64 generic_atomic64_##op##_return(s64 a, atomic64_t *v) \ 101 EXPORT_SYMBOL(generic_atomic64_##op##_return); 103 #define ATOMIC64_FETCH_OP(op, c_op) \ argument 104 s64 generic_atomic64_fetch_##op(s64 a, atomic64_t *v) \ 118 EXPORT_SYMBOL(generic_atomic64_fetch_##op); 120 #define ATOMIC64_OPS(op, c_op) \ argument [all …]
|
| D | asn1_decoder.c | 175 enum asn1_opcode op; in asn1_ber_decoder() local 205 op = machine[pc]; in asn1_ber_decoder() 206 if (unlikely(pc + asn1_op_lengths[op] > machlen)) in asn1_ber_decoder() 212 if (op <= ASN1_OP__MATCHES_TAG) { in asn1_ber_decoder() 216 if ((op & ASN1_OP_MATCH__COND && flags & FLAG_MATCHED) || in asn1_ber_decoder() 217 (op & ASN1_OP_MATCH__SKIP && dp == datalen)) { in asn1_ber_decoder() 219 pc += asn1_op_lengths[op]; in asn1_ber_decoder() 233 if (op & ASN1_OP_MATCH__ANY) { in asn1_ber_decoder() 250 if (op & ASN1_OP_MATCH__SKIP) { in asn1_ber_decoder() 251 pc += asn1_op_lengths[op]; in asn1_ber_decoder() [all …]
|
| D | atomic64_test.c | 20 #define TEST(bit, op, c_op, val) \ argument 24 atomic##bit##_##op(val, &v); \ 36 #define FAMILY_TEST(test, bit, op, args...) \ argument 38 test(bit, op, ##args); \ 39 test(bit, op##_acquire, ##args); \ 40 test(bit, op##_release, ##args); \ 41 test(bit, op##_relaxed, ##args); \ 44 #define TEST_RETURN(bit, op, c_op, val) \ argument 49 BUG_ON(atomic##bit##_##op(val, &v) != r); \ 53 #define TEST_FETCH(bit, op, c_op, val) \ argument [all …]
|
| D | logic_iomem.c | 172 #define MAKE_FALLBACK(op, sz) \ argument 173 static u##sz real_raw_read ## op(const volatile void __iomem *addr) \ 175 WARN(1, "Invalid read" #op " at address %llx\n", \ 180 static void real_raw_write ## op(u ## sz val, \ 183 WARN(1, "Invalid writeq" #op " of 0x%llx at address %llx\n", \ 218 #define MAKE_OP(op, sz) \ argument 219 u##sz __raw_read ## op(const volatile void __iomem *addr) \ 224 return real_raw_read ## op(addr); \ 230 EXPORT_SYMBOL(__raw_read ## op); \ 232 void __raw_write ## op(u ## sz val, volatile void __iomem *addr) \ [all …]
|
| D | packing.c | 78 enum packing_op op, u8 quirks) in packing() argument 102 if (op == PACK && value_width < 64 && (*uval >= (1ull << value_width))) in packing() 110 if (op == UNPACK) in packing() 165 if (op == UNPACK) { in packing()
|
| D | smp_processor_id.c | 64 noinstr void __this_cpu_preempt_check(const char *op) in __this_cpu_preempt_check() argument 66 check_preemption_disabled("__this_cpu_", op); in __this_cpu_preempt_check()
|
| D | test_bpf.c | 502 int op = ops[(i >> 1) % ARRAY_SIZE(ops)]; in __bpf_fill_max_jmp() local 505 insns[i++] = BPF_ALU32_REG(op, R0, R1); in __bpf_fill_max_jmp() 507 insns[i++] = BPF_ALU64_REG(op, R0, R1); in __bpf_fill_max_jmp() 543 static bool __bpf_alu_result(u64 *res, u64 v1, u64 v2, u8 op) in __bpf_alu_result() argument 546 switch (op) { in __bpf_alu_result() 594 static int __bpf_fill_alu_shift(struct bpf_test *self, u8 op, in __bpf_fill_alu_shift() argument 628 insn[i++] = BPF_ALU32_IMM(op, R1, imm); in __bpf_fill_alu_shift() 630 insn[i++] = BPF_ALU32_REG(op, R1, R2); in __bpf_fill_alu_shift() 632 if (op == BPF_ARSH) in __bpf_fill_alu_shift() 636 __bpf_alu_result(&val, reg, imm, op); in __bpf_fill_alu_shift() [all …]
|
| /lib/zstd/common/ |
| D | fse_decompress.c | 237 BYTE* op = ostart; in FSE_decompress_usingDTable_generic() local 238 BYTE* const omax = op + maxDstSize; in FSE_decompress_usingDTable_generic() 254 for ( ; (BIT_reloadDStream(&bitD)==BIT_DStream_unfinished) & (op<olimit) ; op+=4) { in FSE_decompress_usingDTable_generic() 255 op[0] = FSE_GETSYMBOL(&state1); in FSE_decompress_usingDTable_generic() 260 op[1] = FSE_GETSYMBOL(&state2); in FSE_decompress_usingDTable_generic() 263 { if (BIT_reloadDStream(&bitD) > BIT_DStream_unfinished) { op+=2; break; } } in FSE_decompress_usingDTable_generic() 265 op[2] = FSE_GETSYMBOL(&state1); in FSE_decompress_usingDTable_generic() 270 op[3] = FSE_GETSYMBOL(&state2); in FSE_decompress_usingDTable_generic() 276 if (op>(omax-2)) return ERROR(dstSize_tooSmall); in FSE_decompress_usingDTable_generic() 277 *op++ = FSE_GETSYMBOL(&state1); in FSE_decompress_usingDTable_generic() [all …]
|
| D | zstd_internal.h | 212 BYTE* op = (BYTE*)dst; in ZSTD_wildcopy() local 213 BYTE* const oend = op + length; in ZSTD_wildcopy() 218 COPY8(op, ip) in ZSTD_wildcopy() 219 } while (op < oend); in ZSTD_wildcopy() 230 COPY16(op, ip); in ZSTD_wildcopy() 232 while (op < oend); in ZSTD_wildcopy() 234 ZSTD_copy16(op, ip); in ZSTD_wildcopy() 236 op += 16; in ZSTD_wildcopy() 239 COPY16(op, ip); in ZSTD_wildcopy() 240 COPY16(op, ip); in ZSTD_wildcopy() [all …]
|
| /lib/842/ |
| D | 842_decompress.c | 234 u8 op = decomp_ops[o][i]; in do_op() local 236 pr_debug("op is %x\n", op); in do_op() 238 switch (op & OP_ACTION) { in do_op() 240 ret = do_data(p, op & OP_AMOUNT); in do_op() 243 ret = do_index(p, op & OP_AMOUNT); in do_op() 248 pr_err("Internal error, invalid op %x\n", op); in do_op() 282 u64 op, rep, tmp, bytes, total; in sw842_decompress() local 297 ret = next_bits(&p, &op, OP_BITS); in sw842_decompress() 301 pr_debug("template is %lx\n", (unsigned long)op); in sw842_decompress() 303 switch (op) { in sw842_decompress() [all …]
|
| /lib/kunit/ |
| D | attributes.c | 90 static int int_filter(long val, const char *op, int input, int *err) in int_filter() argument 92 if (!strncmp(op, "<=", 2)) in int_filter() 94 else if (!strncmp(op, ">=", 2)) in int_filter() 96 else if (!strncmp(op, "!=", 2)) in int_filter() 98 else if (!strncmp(op, ">", 1)) in int_filter() 100 else if (!strncmp(op, "<", 1)) in int_filter() 102 else if (!strncmp(op, "=", 1)) in int_filter() 105 pr_err("kunit executor: invalid filter operation: %s\n", op); in int_filter() 341 char op; in kunit_next_attr_filter() local 363 op = input[op_index]; in kunit_next_attr_filter() [all …]
|