Home
last modified time | relevance | path

Searched refs:src (Results 1 – 25 of 51) sorted by relevance

123

/crypto/
Dcbc.c50 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_segment() local
55 crypto_xor(iv, src, bsize); in crypto_cbc_encrypt_segment()
59 src += bsize; in crypto_cbc_encrypt_segment()
74 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_inplace() local
78 crypto_xor(src, iv, bsize); in crypto_cbc_encrypt_inplace()
79 fn(crypto_cipher_tfm(tfm), src, src); in crypto_cbc_encrypt_inplace()
80 iv = src; in crypto_cbc_encrypt_inplace()
82 src += bsize; in crypto_cbc_encrypt_inplace()
91 struct scatterlist *dst, struct scatterlist *src, in crypto_cbc_encrypt() argument
100 blkcipher_walk_init(&walk, dst, src, nbytes); in crypto_cbc_encrypt()
[all …]
Dpcbc.c53 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_segment() local
58 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_segment()
61 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_segment()
63 src += bsize; in crypto_pcbc_encrypt_segment()
78 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_inplace() local
83 memcpy(tmpbuf, src, bsize); in crypto_pcbc_encrypt_inplace()
84 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_inplace()
85 fn(crypto_cipher_tfm(tfm), src, iv); in crypto_pcbc_encrypt_inplace()
87 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_inplace()
89 src += bsize; in crypto_pcbc_encrypt_inplace()
[all …]
Dcipher.c66 u8 *dst, const u8 *src) in cipher_crypt_unaligned() argument
73 memcpy(tmp, src, size); in cipher_crypt_unaligned()
79 u8 *dst, const u8 *src) in cipher_encrypt_unaligned() argument
84 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { in cipher_encrypt_unaligned()
85 cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src); in cipher_encrypt_unaligned()
89 cipher->cia_encrypt(tfm, dst, src); in cipher_encrypt_unaligned()
93 u8 *dst, const u8 *src) in cipher_decrypt_unaligned() argument
98 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { in cipher_decrypt_unaligned()
99 cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src); in cipher_decrypt_unaligned()
103 cipher->cia_decrypt(tfm, dst, src); in cipher_decrypt_unaligned()
Dxts.c86 static inline void xts_round(struct sinfo *s, void *dst, const void *src) in xts_round() argument
88 be128_xor(dst, s->t, src); /* PP <- T xor P */ in xts_round()
115 wsrc = w->src.virt.addr; in crypt()
140 wsrc = w->src.virt.addr; in crypt()
148 struct scatterlist *src, unsigned int nbytes) in encrypt() argument
153 blkcipher_walk_init(&w, dst, src, nbytes); in encrypt()
159 struct scatterlist *src, unsigned int nbytes) in decrypt() argument
164 blkcipher_walk_init(&w, dst, src, nbytes); in decrypt()
177 be128 *src, *dst, *t; in xts_crypt() local
191 src = (be128 *)walk.src.virt.addr; in xts_crypt()
[all …]
D842.c77 static int nx842_crypto_compress(struct crypto_tfm *tfm, const u8 *src, in nx842_crypto_compress() argument
94 err = nx842_compress(src, slen, dst, &tmp_len, ctx->nx842_wmem); in nx842_crypto_compress()
110 err = lzo1x_1_compress(src, slen, dst, &lzodlen, ctx->nx842_wmem); in nx842_crypto_compress()
119 static int nx842_crypto_decompress(struct crypto_tfm *tfm, const u8 *src, in nx842_crypto_decompress() argument
129 hdr = (struct nx842_crypto_header *)src; in nx842_crypto_decompress()
134 src += sizeof(struct nx842_crypto_header); in nx842_crypto_decompress()
138 err = nx842_decompress(src, slen, dst, &tmp_len, in nx842_crypto_decompress()
145 err = lzo1x_decompress_safe(src, slen, dst, &lzodlen); in nx842_crypto_decompress()
Dcrypto_null.c32 static int null_compress(struct crypto_tfm *tfm, const u8 *src, in null_compress() argument
37 memcpy(dst, src, slen); in null_compress()
72 static void null_crypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in null_crypt() argument
74 memcpy(dst, src, NULL_BLOCK_SIZE); in null_crypt()
79 struct scatterlist *src, unsigned int nbytes) in skcipher_null_crypt() argument
84 blkcipher_walk_init(&walk, dst, src, nbytes); in skcipher_null_crypt()
88 if (walk.src.virt.addr != walk.dst.virt.addr) in skcipher_null_crypt()
89 memcpy(walk.dst.virt.addr, walk.src.virt.addr, in skcipher_null_crypt()
Dcts.c76 struct scatterlist *src, in cts_cbc_encrypt() argument
96 scatterwalk_map_and_copy(s, src, offset, nbytes, 0); in cts_cbc_encrypt()
126 struct scatterlist *dst, struct scatterlist *src, in crypto_cts_encrypt() argument
141 err = crypto_blkcipher_encrypt_iv(&lcldesc, dst, src, bsize); in crypto_cts_encrypt()
143 err = cts_cbc_encrypt(ctx, desc, dst, src, 0, nbytes); in crypto_cts_encrypt()
146 err = crypto_blkcipher_encrypt_iv(&lcldesc, dst, src, in crypto_cts_encrypt()
150 err = cts_cbc_encrypt(ctx, desc, dst, src, in crypto_cts_encrypt()
162 struct scatterlist *src, in cts_cbc_decrypt() argument
181 scatterwalk_map_and_copy(s, src, offset, nbytes, 0); in cts_cbc_decrypt()
220 struct scatterlist *dst, struct scatterlist *src, in crypto_cts_decrypt() argument
[all …]
Dlrw.c112 static inline void lrw_round(struct sinfo *s, void *dst, const void *src) in lrw_round() argument
114 be128_xor(dst, &s->t, src); /* PP <- T xor P */ in lrw_round()
157 wsrc = w->src.virt.addr; in crypt()
188 wsrc = w->src.virt.addr; in crypt()
196 struct scatterlist *src, unsigned int nbytes) in encrypt() argument
201 blkcipher_walk_init(&w, dst, src, nbytes); in encrypt()
207 struct scatterlist *src, unsigned int nbytes) in decrypt() argument
212 blkcipher_walk_init(&w, dst, src, nbytes); in decrypt()
226 be128 *iv, *src, *dst, *t; in lrw_crypt() local
240 src = (be128 *)walk.src.virt.addr; in lrw_crypt()
[all …]
Dtea.c62 static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in tea_encrypt() argument
67 const __le32 *in = (const __le32 *)src; in tea_encrypt()
90 static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in tea_decrypt() argument
95 const __le32 *in = (const __le32 *)src; in tea_decrypt()
135 static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in xtea_encrypt() argument
140 const __le32 *in = (const __le32 *)src; in xtea_encrypt()
156 static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in xtea_decrypt() argument
160 const __le32 *in = (const __le32 *)src; in xtea_decrypt()
179 static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in xeta_encrypt() argument
184 const __le32 *in = (const __le32 *)src; in xeta_encrypt()
[all …]
Dseed.c371 const __be32 *src = (const __be32 *)in; in seed_encrypt() local
376 x1 = be32_to_cpu(src[0]); in seed_encrypt()
377 x2 = be32_to_cpu(src[1]); in seed_encrypt()
378 x3 = be32_to_cpu(src[2]); in seed_encrypt()
379 x4 = be32_to_cpu(src[3]); in seed_encrypt()
409 const __be32 *src = (const __be32 *)in; in seed_decrypt() local
414 x1 = be32_to_cpu(src[0]); in seed_decrypt()
415 x2 = be32_to_cpu(src[1]); in seed_decrypt()
416 x3 = be32_to_cpu(src[2]); in seed_decrypt()
417 x4 = be32_to_cpu(src[3]); in seed_decrypt()
Dctr.c63 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_final() local
68 crypto_xor(keystream, src, nbytes); in crypto_ctr_crypt_final()
81 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_segment() local
88 crypto_xor(dst, src, bsize); in crypto_ctr_crypt_segment()
93 src += bsize; in crypto_ctr_crypt_segment()
109 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_inplace() local
116 crypto_xor(src, keystream, bsize); in crypto_ctr_crypt_inplace()
121 src += bsize; in crypto_ctr_crypt_inplace()
128 struct scatterlist *dst, struct scatterlist *src, in crypto_ctr_crypt() argument
138 blkcipher_walk_init(&walk, dst, src, nbytes); in crypto_ctr_crypt()
[all …]
Dsha1_generic.c44 const u8 *src; in crypto_sha1_update() local
49 src = data; in crypto_sha1_update()
58 src = sctx->buffer; in crypto_sha1_update()
62 sha_transform(sctx->state, src, temp); in crypto_sha1_update()
64 src = data + done; in crypto_sha1_update()
70 memcpy(sctx->buffer + partial, src, len - done); in crypto_sha1_update()
Dcompress.c21 const u8 *src, unsigned int slen, in crypto_compress() argument
24 return tfm->__crt_alg->cra_compress.coa_compress(tfm, src, slen, dst, in crypto_compress()
29 const u8 *src, unsigned int slen, in crypto_decompress() argument
32 return tfm->__crt_alg->cra_compress.coa_decompress(tfm, src, slen, dst, in crypto_decompress()
Dmichael_mic.c66 const __le32 *src; in michael_update() local
80 src = (const __le32 *)mctx->pending; in michael_update()
81 mctx->l ^= le32_to_cpup(src); in michael_update()
86 src = (const __le32 *)data; in michael_update()
89 mctx->l ^= le32_to_cpup(src++); in michael_update()
96 memcpy(mctx->pending, src, len); in michael_update()
Dsalsa20_generic.c140 const u8 *src, unsigned int bytes) in salsa20_encrypt_bytes() argument
144 if (dst != src) in salsa20_encrypt_bytes()
145 memcpy(dst, src, bytes); in salsa20_encrypt_bytes()
178 struct scatterlist *dst, struct scatterlist *src, in encrypt() argument
186 blkcipher_walk_init(&walk, dst, src, nbytes); in encrypt()
194 walk.src.virt.addr, nbytes); in encrypt()
200 walk.src.virt.addr, in encrypt()
207 walk.src.virt.addr, walk.nbytes); in encrypt()
Dkhazad.c805 const __be64 *src = (const __be64 *)plaintext; in khazad_crypt() local
810 state = be64_to_cpu(*src) ^ roundKey[0]; in khazad_crypt()
837 static void khazad_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in khazad_encrypt() argument
840 khazad_crypt(ctx->E, dst, src); in khazad_encrypt()
843 static void khazad_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) in khazad_decrypt() argument
846 khazad_crypt(ctx->D, dst, src); in khazad_decrypt()
Dlzo.c47 static int lzo_compress(struct crypto_tfm *tfm, const u8 *src, in lzo_compress() argument
54 err = lzo1x_1_compress(src, slen, dst, &tmp_len, ctx->lzo_comp_mem); in lzo_compress()
63 static int lzo_decompress(struct crypto_tfm *tfm, const u8 *src, in lzo_decompress() argument
69 err = lzo1x_decompress_safe(src, slen, dst, &tmp_len); in lzo_decompress()
Dgcm.c62 struct scatterlist *src; member
69 struct scatterlist src[2]; member
202 sg_init_table(pctx->src, 2); in crypto_gcm_init_crypt()
203 sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag)); in crypto_gcm_init_crypt()
204 scatterwalk_sg_chain(pctx->src, 2, req->src); in crypto_gcm_init_crypt()
206 dst = pctx->src; in crypto_gcm_init_crypt()
207 if (req->src != req->dst) { in crypto_gcm_init_crypt()
215 ablkcipher_request_set_crypt(ablk_req, pctx->src, dst, in crypto_gcm_init_crypt()
232 struct scatterlist *src, in gcm_hash_update() argument
239 ahash_request_set_crypt(ahreq, src, NULL, len); in gcm_hash_update()
[all …]
Dcast6_generic.c186 const __be32 *src = (const __be32 *)inbuf; in __cast6_encrypt() local
192 block[0] = be32_to_cpu(src[0]); in __cast6_encrypt()
193 block[1] = be32_to_cpu(src[1]); in __cast6_encrypt()
194 block[2] = be32_to_cpu(src[2]); in __cast6_encrypt()
195 block[3] = be32_to_cpu(src[3]); in __cast6_encrypt()
224 const __be32 *src = (const __be32 *)inbuf; in __cast6_decrypt() local
230 block[0] = be32_to_cpu(src[0]); in __cast6_decrypt()
231 block[1] = be32_to_cpu(src[1]); in __cast6_decrypt()
232 block[2] = be32_to_cpu(src[2]); in __cast6_decrypt()
233 block[3] = be32_to_cpu(src[3]); in __cast6_decrypt()
Dccm.c45 struct scatterlist src[2]; member
320 err = crypto_ccm_auth(req, req->src, cryptlen); in crypto_ccm_encrypt()
329 sg_init_table(pctx->src, 2); in crypto_ccm_encrypt()
330 sg_set_buf(pctx->src, odata, 16); in crypto_ccm_encrypt()
331 scatterwalk_sg_chain(pctx->src, 2, req->src); in crypto_ccm_encrypt()
333 dst = pctx->src; in crypto_ccm_encrypt()
334 if (req->src != req->dst) { in crypto_ccm_encrypt()
344 ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv); in crypto_ccm_encrypt()
396 scatterwalk_map_and_copy(authtag, req->src, cryptlen, authsize, 0); in crypto_ccm_decrypt()
400 sg_init_table(pctx->src, 2); in crypto_ccm_decrypt()
[all …]
Dghash-generic.c64 const u8 *src, unsigned int srclen) in ghash_update() argument
81 *pos++ ^= *src++; in ghash_update()
88 crypto_xor(dst, src, GHASH_BLOCK_SIZE); in ghash_update()
90 src += GHASH_BLOCK_SIZE; in ghash_update()
97 *dst++ ^= *src++; in ghash_update()
Deseqiv.c31 struct scatterlist src[2]; member
92 osrc = req->creq.src; in eseqiv_givencrypt()
111 sg_init_table(reqctx->src, 2); in eseqiv_givencrypt()
112 sg_set_buf(reqctx->src, giv, ivsize); in eseqiv_givencrypt()
113 scatterwalk_crypto_chain(reqctx->src, osrc, vsrc == giv + ivsize, 2); in eseqiv_givencrypt()
115 dst = reqctx->src; in eseqiv_givencrypt()
124 ablkcipher_request_set_crypt(subreq, reqctx->src, dst, in eseqiv_givencrypt()
Dblkcipher.c46 walk->src.virt.addr = scatterwalk_map(&walk->in); in blkcipher_map_src()
56 scatterwalk_unmap(walk->src.virt.addr); in blkcipher_unmap_src()
171 walk->src.virt.addr = blkcipher_get_spot(walk->dst.virt.addr + in blkcipher_next_slow()
174 scatterwalk_copychunks(walk->src.virt.addr, &walk->in, bsize, 0); in blkcipher_next_slow()
187 memcpy(tmp, walk->src.virt.addr, walk->nbytes); in blkcipher_next_copy()
190 walk->src.virt.addr = tmp; in blkcipher_next_copy()
201 walk->src.phys.page = scatterwalk_page(&walk->in); in blkcipher_next_fast()
202 walk->src.phys.offset = offset_in_page(walk->in.offset); in blkcipher_next_fast()
209 diff = walk->src.phys.offset - walk->dst.phys.offset; in blkcipher_next_fast()
210 diff |= walk->src.virt.page - walk->dst.virt.page; in blkcipher_next_fast()
[all …]
Daes_generic.c1332 const __le32 *src = (const __le32 *)in; in aes_encrypt() local
1338 b0[0] = le32_to_cpu(src[0]) ^ ctx->key_enc[0]; in aes_encrypt()
1339 b0[1] = le32_to_cpu(src[1]) ^ ctx->key_enc[1]; in aes_encrypt()
1340 b0[2] = le32_to_cpu(src[2]) ^ ctx->key_enc[2]; in aes_encrypt()
1341 b0[3] = le32_to_cpu(src[3]) ^ ctx->key_enc[3]; in aes_encrypt()
1404 const __le32 *src = (const __le32 *)in; in aes_decrypt() local
1410 b0[0] = le32_to_cpu(src[0]) ^ ctx->key_dec[0]; in aes_decrypt()
1411 b0[1] = le32_to_cpu(src[1]) ^ ctx->key_dec[1]; in aes_decrypt()
1412 b0[2] = le32_to_cpu(src[2]) ^ ctx->key_dec[2]; in aes_decrypt()
1413 b0[3] = le32_to_cpu(src[3]) ^ ctx->key_dec[3]; in aes_decrypt()
/crypto/async_tx/
Dasync_memcpy.c45 async_memcpy(struct page *dest, struct page *src, unsigned int dest_offset, in async_memcpy() argument
50 &dest, 1, &src, 1, len); in async_memcpy()
65 dma_src = dma_map_page(device->dev, src, src_offset, len, in async_memcpy()
89 src_buf = kmap_atomic(src) + src_offset; in async_memcpy()

123