/crypto/ |
D | authencesn.c | 39 struct scatterlist dst[2]; member 98 struct scatterlist *dst = req->dst; in crypto_authenc_esn_genicv_tail() local 102 scatterwalk_map_and_copy(tmp, dst, 4, 4, 0); in crypto_authenc_esn_genicv_tail() 103 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); in crypto_authenc_esn_genicv_tail() 104 scatterwalk_map_and_copy(tmp, dst, 0, 8, 1); in crypto_authenc_esn_genicv_tail() 106 scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1); in crypto_authenc_esn_genicv_tail() 132 struct scatterlist *dst = req->dst; in crypto_authenc_esn_genicv() local 139 scatterwalk_map_and_copy(tmp, dst, 0, 8, 0); in crypto_authenc_esn_genicv() 140 scatterwalk_map_and_copy(tmp, dst, 4, 4, 1); in crypto_authenc_esn_genicv() 141 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1); in crypto_authenc_esn_genicv() [all …]
|
D | aegis.h | 36 void crypto_aegis128_encrypt_chunk_simd(struct aegis_state *state, u8 *dst, 38 void crypto_aegis128_decrypt_chunk_simd(struct aegis_state *state, u8 *dst, 46 static __always_inline void crypto_aegis_block_xor(union aegis_block *dst, in crypto_aegis_block_xor() argument 49 dst->words64[0] ^= src->words64[0]; in crypto_aegis_block_xor() 50 dst->words64[1] ^= src->words64[1]; in crypto_aegis_block_xor() 53 static __always_inline void crypto_aegis_block_and(union aegis_block *dst, in crypto_aegis_block_and() argument 56 dst->words64[0] &= src->words64[0]; in crypto_aegis_block_and() 57 dst->words64[1] &= src->words64[1]; in crypto_aegis_block_and() 60 static __always_inline void crypto_aegis_aesenc(union aegis_block *dst, in crypto_aegis_aesenc() argument 73 dst->words32[0] = cpu_to_le32(d0) ^ key->words32[0]; in crypto_aegis_aesenc() [all …]
|
D | cfb.c | 37 const u8 *src, u8 *dst) in crypto_cfb_encrypt_one() argument 39 crypto_cipher_encrypt_one(skcipher_cipher_simple(tfm), dst, src); in crypto_cfb_encrypt_one() 50 u8 *dst = walk->dst.virt.addr; in crypto_cfb_final() local 55 crypto_xor_cpy(dst, stream, src, nbytes); in crypto_cfb_final() 64 u8 *dst = walk->dst.virt.addr; in crypto_cfb_encrypt_segment() local 68 crypto_cfb_encrypt_one(tfm, iv, dst); in crypto_cfb_encrypt_segment() 69 crypto_xor(dst, src, bsize); in crypto_cfb_encrypt_segment() 70 iv = dst; in crypto_cfb_encrypt_segment() 73 dst += bsize; in crypto_cfb_encrypt_segment() 113 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_cfb_encrypt() [all …]
|
D | lz4hc.c | 53 u8 *dst, unsigned int *dlen, void *ctx) in __lz4hc_compress_crypto() argument 55 int out_len = LZ4_compress_HC(src, dst, slen, in __lz4hc_compress_crypto() 66 unsigned int slen, u8 *dst, unsigned int *dlen, in lz4hc_scompress() argument 69 return __lz4hc_compress_crypto(src, slen, dst, dlen, ctx); in lz4hc_scompress() 73 unsigned int slen, u8 *dst, in lz4hc_compress_crypto() argument 78 return __lz4hc_compress_crypto(src, slen, dst, dlen, in lz4hc_compress_crypto() 83 u8 *dst, unsigned int *dlen, void *ctx) in __lz4hc_decompress_crypto() argument 85 int out_len = LZ4_decompress_safe(src, dst, slen, *dlen); in __lz4hc_decompress_crypto() 95 unsigned int slen, u8 *dst, unsigned int *dlen, in lz4hc_sdecompress() argument 98 return __lz4hc_decompress_crypto(src, slen, dst, dlen, NULL); in lz4hc_sdecompress() [all …]
|
D | lz4.c | 54 u8 *dst, unsigned int *dlen, void *ctx) in __lz4_compress_crypto() argument 56 int out_len = LZ4_compress_default(src, dst, in __lz4_compress_crypto() 67 unsigned int slen, u8 *dst, unsigned int *dlen, in lz4_scompress() argument 70 return __lz4_compress_crypto(src, slen, dst, dlen, ctx); in lz4_scompress() 74 unsigned int slen, u8 *dst, unsigned int *dlen) in lz4_compress_crypto() argument 78 return __lz4_compress_crypto(src, slen, dst, dlen, ctx->lz4_comp_mem); in lz4_compress_crypto() 82 u8 *dst, unsigned int *dlen, void *ctx) in __lz4_decompress_crypto() argument 84 int out_len = LZ4_decompress_safe(src, dst, slen, *dlen); in __lz4_decompress_crypto() 94 unsigned int slen, u8 *dst, unsigned int *dlen, in lz4_sdecompress() argument 97 return __lz4_decompress_crypto(src, slen, dst, dlen, NULL); in lz4_sdecompress() [all …]
|
D | keywrap.c | 129 struct scatterlist *src, *dst; in crypto_kw_decrypt() local 150 dst = req->dst; in crypto_kw_decrypt() 171 crypto_kw_scatterlist_ff(&dst_walk, dst, nbytes); in crypto_kw_decrypt() 180 src = req->dst; in crypto_kw_decrypt() 181 dst = req->dst; in crypto_kw_decrypt() 198 struct scatterlist *src, *dst; in crypto_kw_encrypt() local 223 dst = req->dst; in crypto_kw_encrypt() 230 scatterwalk_start(&dst_walk, dst); in crypto_kw_encrypt() 252 src = req->dst; in crypto_kw_encrypt() 253 dst = req->dst; in crypto_kw_encrypt()
|
D | lzo.c | 53 u8 *dst, unsigned int *dlen, void *ctx) in __lzo_compress() argument 58 err = lzo1x_1_compress(src, slen, dst, &tmp_len, ctx); in __lzo_compress() 68 unsigned int slen, u8 *dst, unsigned int *dlen) in lzo_compress() argument 72 return __lzo_compress(src, slen, dst, dlen, ctx->lzo_comp_mem); in lzo_compress() 76 unsigned int slen, u8 *dst, unsigned int *dlen, in lzo_scompress() argument 79 return __lzo_compress(src, slen, dst, dlen, ctx); in lzo_scompress() 83 u8 *dst, unsigned int *dlen) in __lzo_decompress() argument 88 err = lzo1x_decompress_safe(src, slen, dst, &tmp_len); in __lzo_decompress() 98 unsigned int slen, u8 *dst, unsigned int *dlen) in lzo_decompress() argument 100 return __lzo_decompress(src, slen, dst, dlen); in lzo_decompress() [all …]
|
D | lzo-rle.c | 53 u8 *dst, unsigned int *dlen, void *ctx) in __lzorle_compress() argument 58 err = lzorle1x_1_compress(src, slen, dst, &tmp_len, ctx); in __lzorle_compress() 68 unsigned int slen, u8 *dst, unsigned int *dlen) in lzorle_compress() argument 72 return __lzorle_compress(src, slen, dst, dlen, ctx->lzorle_comp_mem); in lzorle_compress() 76 unsigned int slen, u8 *dst, unsigned int *dlen, in lzorle_scompress() argument 79 return __lzorle_compress(src, slen, dst, dlen, ctx); in lzorle_scompress() 83 u8 *dst, unsigned int *dlen) in __lzorle_decompress() argument 88 err = lzo1x_decompress_safe(src, slen, dst, &tmp_len); in __lzorle_decompress() 98 unsigned int slen, u8 *dst, unsigned int *dlen) in lzorle_decompress() argument 100 return __lzorle_decompress(src, slen, dst, dlen); in lzorle_decompress() [all …]
|
D | ghash-generic.c | 83 u8 *dst = dctx->buffer; in ghash_update() local 87 u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update() 96 gf128mul_4k_lle((be128 *)dst, ctx->gf128); in ghash_update() 100 crypto_xor(dst, src, GHASH_BLOCK_SIZE); in ghash_update() 101 gf128mul_4k_lle((be128 *)dst, ctx->gf128); in ghash_update() 109 *dst++ ^= *src++; in ghash_update() 117 u8 *dst = dctx->buffer; in ghash_flush() local 120 u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_flush() 125 gf128mul_4k_lle((be128 *)dst, ctx->gf128); in ghash_flush() 131 static int ghash_final(struct shash_desc *desc, u8 *dst) in ghash_final() argument [all …]
|
D | pcbc.c | 27 u8 *dst = walk->dst.virt.addr; in crypto_pcbc_encrypt_segment() local 32 crypto_cipher_encrypt_one(tfm, dst, iv); in crypto_pcbc_encrypt_segment() 33 crypto_xor_cpy(iv, dst, src, bsize); in crypto_pcbc_encrypt_segment() 36 dst += bsize; in crypto_pcbc_encrypt_segment() 75 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_pcbc_encrypt() 94 u8 *dst = walk->dst.virt.addr; in crypto_pcbc_decrypt_segment() local 98 crypto_cipher_decrypt_one(tfm, dst, src); in crypto_pcbc_decrypt_segment() 99 crypto_xor(dst, iv, bsize); in crypto_pcbc_decrypt_segment() 100 crypto_xor_cpy(iv, dst, src, bsize); in crypto_pcbc_decrypt_segment() 103 dst += bsize; in crypto_pcbc_decrypt_segment() [all …]
|
D | zstd.c | 151 u8 *dst, unsigned int *dlen, void *ctx) in __zstd_compress() argument 157 out_len = ZSTD_compressCCtx(zctx->cctx, dst, *dlen, src, slen, params); in __zstd_compress() 165 unsigned int slen, u8 *dst, unsigned int *dlen) in zstd_compress() argument 169 return __zstd_compress(src, slen, dst, dlen, ctx); in zstd_compress() 173 unsigned int slen, u8 *dst, unsigned int *dlen, in zstd_scompress() argument 176 return __zstd_compress(src, slen, dst, dlen, ctx); in zstd_scompress() 180 u8 *dst, unsigned int *dlen, void *ctx) in __zstd_decompress() argument 185 out_len = ZSTD_decompressDCtx(zctx->dctx, dst, *dlen, src, slen); in __zstd_decompress() 193 unsigned int slen, u8 *dst, unsigned int *dlen) in zstd_decompress() argument 197 return __zstd_decompress(src, slen, dst, dlen, ctx); in zstd_decompress() [all …]
|
D | 842.c | 67 u8 *dst, unsigned int *dlen) in crypto842_compress() argument 71 return sw842_compress(src, slen, dst, dlen, ctx->wmem); in crypto842_compress() 76 u8 *dst, unsigned int *dlen, void *ctx) in crypto842_scompress() argument 78 return sw842_compress(src, slen, dst, dlen, ctx); in crypto842_scompress() 83 u8 *dst, unsigned int *dlen) in crypto842_decompress() argument 85 return sw842_decompress(src, slen, dst, dlen); in crypto842_decompress() 90 u8 *dst, unsigned int *dlen, void *ctx) in crypto842_sdecompress() argument 92 return sw842_decompress(src, slen, dst, dlen); in crypto842_sdecompress()
|
D | cipher.c | 60 u8 *dst, const u8 *src, bool enc) in cipher_crypt_one() argument 67 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { in cipher_crypt_one() 74 memcpy(dst, tmp, bs); in cipher_crypt_one() 76 fn(crypto_cipher_tfm(tfm), dst, src); in cipher_crypt_one() 81 u8 *dst, const u8 *src) in crypto_cipher_encrypt_one() argument 83 cipher_crypt_one(tfm, dst, src, true); in crypto_cipher_encrypt_one() 88 u8 *dst, const u8 *src) in crypto_cipher_decrypt_one() argument 90 cipher_crypt_one(tfm, dst, src, false); in crypto_cipher_decrypt_one()
|
D | scatterwalk.c | 21 void *dst = out ? sgdata : buf; in memcpy_dir() local 23 memcpy(dst, src, nbytes); in memcpy_dir() 72 struct scatterlist *scatterwalk_ffwd(struct scatterlist dst[2], in scatterwalk_ffwd() 87 sg_init_table(dst, 2); in scatterwalk_ffwd() 88 sg_set_page(dst, sg_page(src), src->length - len, src->offset + len); in scatterwalk_ffwd() 89 scatterwalk_crypto_chain(dst, sg_next(src), 2); in scatterwalk_ffwd() 91 return dst; in scatterwalk_ffwd()
|
D | cbc.c | 24 u8 *dst = walk->dst.virt.addr; in crypto_cbc_encrypt_segment() local 35 fn(tfm, dst, iv); in crypto_cbc_encrypt_segment() 36 memcpy(iv, dst, bsize); in crypto_cbc_encrypt_segment() 39 dst += bsize; in crypto_cbc_encrypt_segment() 82 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_cbc_encrypt() 99 u8 *dst = walk->dst.virt.addr; in crypto_cbc_decrypt_segment() local 109 fn(tfm, dst, src); in crypto_cbc_decrypt_segment() 110 crypto_xor(dst, iv, bsize); in crypto_cbc_decrypt_segment() 114 dst += bsize; in crypto_cbc_decrypt_segment() 164 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_cbc_decrypt()
|
D | des_generic.c | 37 static void crypto_des_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in crypto_des_encrypt() argument 41 des_encrypt(dctx, dst, src); in crypto_des_encrypt() 44 static void crypto_des_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in crypto_des_decrypt() argument 48 des_decrypt(dctx, dst, src); in crypto_des_decrypt() 69 static void crypto_des3_ede_encrypt(struct crypto_tfm *tfm, u8 *dst, in crypto_des3_ede_encrypt() argument 74 des3_ede_encrypt(dctx, dst, src); in crypto_des3_ede_encrypt() 77 static void crypto_des3_ede_decrypt(struct crypto_tfm *tfm, u8 *dst, in crypto_des3_ede_decrypt() argument 82 des3_ede_decrypt(dctx, dst, src); in crypto_des3_ede_decrypt()
|
D | ccm.c | 36 struct scatterlist dst[3]; member 46 struct scatterlist dst[3]; member 235 scatterwalk_map_and_copy(odata, req->dst, in crypto_ccm_encrypt_done() 274 if (req->src != req->dst) { in crypto_ccm_init_crypt() 275 sg_init_table(pctx->dst, 3); in crypto_ccm_init_crypt() 276 sg_set_buf(pctx->dst, tag, 16); in crypto_ccm_init_crypt() 277 sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen); in crypto_ccm_init_crypt() 278 if (sg != pctx->dst + 1) in crypto_ccm_init_crypt() 279 sg_chain(pctx->dst, 2, sg); in crypto_ccm_init_crypt() 291 struct scatterlist *dst; in crypto_ccm_encrypt() local [all …]
|
D | aegis128-neon.c | 13 void crypto_aegis128_encrypt_chunk_neon(void *state, void *dst, const void *src, 15 void crypto_aegis128_decrypt_chunk_neon(void *state, void *dst, const void *src, 49 void crypto_aegis128_encrypt_chunk_simd(struct aegis_state *state, u8 *dst, in crypto_aegis128_encrypt_chunk_simd() argument 53 crypto_aegis128_encrypt_chunk_neon(state, dst, src, size); in crypto_aegis128_encrypt_chunk_simd() 57 void crypto_aegis128_decrypt_chunk_simd(struct aegis_state *state, u8 *dst, in crypto_aegis128_decrypt_chunk_simd() argument 61 crypto_aegis128_decrypt_chunk_neon(state, dst, src, size); in crypto_aegis128_decrypt_chunk_simd()
|
D | scompress.c | 30 void *dst; member 77 vfree(scratch->dst); in crypto_scomp_free_scratches() 79 scratch->dst = NULL; in crypto_scomp_free_scratches() 100 scratch->dst = mem; in crypto_scomp_alloc_scratches() 133 if (req->dst && !req->dlen) in scomp_acomp_comp_decomp() 147 scratch->dst, &req->dlen, *ctx); in scomp_acomp_comp_decomp() 150 scratch->dst, &req->dlen, *ctx); in scomp_acomp_comp_decomp() 152 if (!req->dst) { in scomp_acomp_comp_decomp() 153 req->dst = sgl_alloc(req->dlen, GFP_ATOMIC, NULL); in scomp_acomp_comp_decomp() 154 if (!req->dst) { in scomp_acomp_comp_decomp() [all …]
|
D | deflate.c | 177 u8 *dst, unsigned int *dlen, void *ctx) in __deflate_compress() argument 191 stream->next_out = (u8 *)dst; in __deflate_compress() 206 unsigned int slen, u8 *dst, unsigned int *dlen) in deflate_compress() argument 210 return __deflate_compress(src, slen, dst, dlen, dctx); in deflate_compress() 214 unsigned int slen, u8 *dst, unsigned int *dlen, in deflate_scompress() argument 217 return __deflate_compress(src, slen, dst, dlen, ctx); in deflate_scompress() 221 u8 *dst, unsigned int *dlen, void *ctx) in __deflate_decompress() argument 236 stream->next_out = (u8 *)dst; in __deflate_decompress() 262 unsigned int slen, u8 *dst, unsigned int *dlen) in deflate_decompress() argument 266 return __deflate_decompress(src, slen, dst, dlen, dctx); in deflate_decompress() [all …]
|
D | xctr.c | 39 u8 *dst = walk->dst.virt.addr; in crypto_xctr_crypt_final() local 45 crypto_xor_cpy(dst, keystream, src, nbytes); in crypto_xctr_crypt_final() 55 u8 *dst = walk->dst.virt.addr; in crypto_xctr_crypt_segment() local 61 fn(crypto_cipher_tfm(tfm), dst, walk->iv); in crypto_xctr_crypt_segment() 62 crypto_xor(dst, src, XCTR_BLOCKSIZE); in crypto_xctr_crypt_segment() 68 dst += XCTR_BLOCKSIZE; in crypto_xctr_crypt_segment() 112 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_xctr_crypt()
|
D | authenc.c | 36 struct scatterlist dst[2]; member 124 scatterwalk_map_and_copy(ahreq->result, req->dst, in authenc_geniv_ahash_done() 148 ahash_request_set_crypt(ahreq, req->dst, hash, in crypto_authenc_genicv() 157 scatterwalk_map_and_copy(hash, req->dst, req->assoclen + req->cryptlen, in crypto_authenc_genicv() 186 skcipher_request_set_crypt(skreq, req->src, req->dst, req->assoclen, in crypto_authenc_copy_assoc() 203 struct scatterlist *src, *dst; in crypto_authenc_encrypt() local 207 dst = src; in crypto_authenc_encrypt() 209 if (req->src != req->dst) { in crypto_authenc_encrypt() 214 dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, req->assoclen); in crypto_authenc_encrypt() 220 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_encrypt() [all …]
|
D | aegis128-core.c | 147 static void crypto_aegis128_wipe_chunk(struct aegis_state *state, u8 *dst, in crypto_aegis128_wipe_chunk() argument 150 memzero_explicit(dst, size); in crypto_aegis128_wipe_chunk() 153 static void crypto_aegis128_encrypt_chunk(struct aegis_state *state, u8 *dst, in crypto_aegis128_encrypt_chunk() argument 158 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) { in crypto_aegis128_encrypt_chunk() 161 (union aegis_block *)dst; in crypto_aegis128_encrypt_chunk() 177 dst += AEGIS_BLOCK_SIZE; in crypto_aegis128_encrypt_chunk() 189 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE); in crypto_aegis128_encrypt_chunk() 193 dst += AEGIS_BLOCK_SIZE; in crypto_aegis128_encrypt_chunk() 210 memcpy(dst, msg.bytes, size); in crypto_aegis128_encrypt_chunk() 214 static void crypto_aegis128_decrypt_chunk(struct aegis_state *state, u8 *dst, in crypto_aegis128_decrypt_chunk() argument [all …]
|
D | tea.c | 57 static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in tea_encrypt() argument 63 __le32 *out = (__le32 *)dst; in tea_encrypt() 85 static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in tea_decrypt() argument 91 __le32 *out = (__le32 *)dst; in tea_decrypt() 130 static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in xtea_encrypt() argument 136 __le32 *out = (__le32 *)dst; in xtea_encrypt() 151 static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in xtea_decrypt() argument 156 __le32 *out = (__le32 *)dst; in xtea_decrypt() 174 static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in xeta_encrypt() argument 180 __le32 *out = (__le32 *)dst; in xeta_encrypt() [all …]
|
D | compress.c | 14 u8 *dst, unsigned int *dlen) in crypto_comp_compress() argument 18 return tfm->__crt_alg->cra_compress.coa_compress(tfm, src, slen, dst, in crypto_comp_compress() 25 u8 *dst, unsigned int *dlen) in crypto_comp_decompress() argument 29 return tfm->__crt_alg->cra_compress.coa_decompress(tfm, src, slen, dst, in crypto_comp_decompress()
|