/crypto/ |
D | ecb.c | 16 struct crypto_cipher *cipher, in crypto_ecb_crypt() argument 19 const unsigned int bsize = crypto_cipher_blocksize(cipher); in crypto_ecb_crypt() 31 fn(crypto_cipher_tfm(cipher), dst, src); in crypto_ecb_crypt() 46 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_ecb_encrypt() local 48 return crypto_ecb_crypt(req, cipher, in crypto_ecb_encrypt() 49 crypto_cipher_alg(cipher)->cia_encrypt); in crypto_ecb_encrypt() 55 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_ecb_decrypt() local 57 return crypto_ecb_crypt(req, cipher, in crypto_ecb_decrypt() 58 crypto_cipher_alg(cipher)->cia_decrypt); in crypto_ecb_decrypt()
|
D | cipher.c | 78 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; in cipher_encrypt_unaligned() local 81 cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src); in cipher_encrypt_unaligned() 85 cipher->cia_encrypt(tfm, dst, src); in cipher_encrypt_unaligned() 92 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; in cipher_decrypt_unaligned() local 95 cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src); in cipher_decrypt_unaligned() 99 cipher->cia_decrypt(tfm, dst, src); in cipher_decrypt_unaligned() 105 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; in crypto_init_cipher_ops() local 109 cipher_encrypt_unaligned : cipher->cia_encrypt; in crypto_init_cipher_ops() 111 cipher_decrypt_unaligned : cipher->cia_decrypt; in crypto_init_cipher_ops()
|
D | Kconfig | 298 Support for the AEAD wrapper using the ChaCha20 stream cipher combined 353 This block cipher algorithm is required for IPSec. 361 This block cipher algorithm is required for TPM2 Cryptography. 370 This block cipher algorithm is required for IPSec. 393 This is the simplest block cipher algorithm. It simply encrypts 403 narrow block cipher mode for dm-crypt. Use it with cipher 406 rest is used to tie each cipher block to its logical position. 413 OFB: the Output Feedback mode makes a block cipher into a synchronous 414 stream cipher. It generates keystream blocks, which are then XORed 426 This block cipher algorithm is required for RxRPC. [all …]
|
D | ctr.c | 101 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_ctr_crypt() local 102 const unsigned int bsize = crypto_cipher_blocksize(cipher); in crypto_ctr_crypt() 111 nbytes = crypto_ctr_crypt_inplace(&walk, cipher); in crypto_ctr_crypt() 113 nbytes = crypto_ctr_crypt_segment(&walk, cipher); in crypto_ctr_crypt() 119 crypto_ctr_crypt_final(&walk, cipher); in crypto_ctr_crypt() 228 struct crypto_skcipher *cipher; in crypto_rfc3686_init_tfm() local 232 cipher = crypto_spawn_skcipher(spawn); in crypto_rfc3686_init_tfm() 233 if (IS_ERR(cipher)) in crypto_rfc3686_init_tfm() 234 return PTR_ERR(cipher); in crypto_rfc3686_init_tfm() 236 ctx->child = cipher; in crypto_rfc3686_init_tfm() [all …]
|
D | ofb.c | 20 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_ofb_crypt() local 21 const unsigned int bsize = crypto_cipher_blocksize(cipher); in crypto_ofb_crypt() 34 crypto_cipher_encrypt_one(cipher, iv, iv); in crypto_ofb_crypt() 44 crypto_cipher_encrypt_one(cipher, walk.iv, walk.iv); in crypto_ofb_crypt()
|
D | pcbc.c | 66 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_pcbc_encrypt() local 76 cipher); in crypto_pcbc_encrypt() 79 cipher); in crypto_pcbc_encrypt() 133 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_pcbc_decrypt() local 143 cipher); in crypto_pcbc_decrypt() 146 cipher); in crypto_pcbc_decrypt()
|
D | testmgr.c | 133 struct cipher_test_suite cipher; member 2859 const struct cipher_test_suite *suite = &desc->suite.cipher; in alg_test_skcipher() 3250 const struct cipher_test_suite *suite = &desc->suite.cipher; in alg_test_cipher() 3886 .cipher = __VECS(adiantum_xchacha12_aes_tv_template) 3893 .cipher = __VECS(adiantum_xchacha20_aes_tv_template) 4094 .cipher = __VECS(aes_cbc_tv_template) 4100 .cipher = __VECS(anubis_cbc_tv_template) 4106 .cipher = __VECS(bf_cbc_tv_template) 4112 .cipher = __VECS(camellia_cbc_tv_template) 4118 .cipher = __VECS(cast5_cbc_tv_template) [all …]
|
D | keywrap.c | 126 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_kw_decrypt() local 166 crypto_cipher_decrypt_one(cipher, (u8 *)&block, in crypto_kw_decrypt() 195 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_kw_encrypt() local 237 crypto_cipher_encrypt_one(cipher, (u8 *)&block, in crypto_kw_encrypt()
|
D | skcipher.c | 801 struct skcipher_alg *cipher = crypto_skcipher_alg(tfm); in skcipher_setkey_unaligned() local 813 ret = cipher->setkey(tfm, alignbuffer, keylen); in skcipher_setkey_unaligned() 821 struct skcipher_alg *cipher = crypto_skcipher_alg(tfm); in skcipher_setkey() local 825 if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) { in skcipher_setkey() 833 err = cipher->setkey(tfm, key, keylen); in skcipher_setkey() 1111 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in skcipher_setkey_simple() local 1114 crypto_cipher_clear_flags(cipher, CRYPTO_TFM_REQ_MASK); in skcipher_setkey_simple() 1115 crypto_cipher_set_flags(cipher, crypto_skcipher_get_flags(tfm) & in skcipher_setkey_simple() 1117 err = crypto_cipher_setkey(cipher, key, keylen); in skcipher_setkey_simple() 1118 crypto_skcipher_set_flags(tfm, crypto_cipher_get_flags(cipher) & in skcipher_setkey_simple() [all …]
|
D | xcbc.c | 168 struct crypto_cipher *cipher; in xcbc_init_tfm() local 173 cipher = crypto_spawn_cipher(spawn); in xcbc_init_tfm() 174 if (IS_ERR(cipher)) in xcbc_init_tfm() 175 return PTR_ERR(cipher); in xcbc_init_tfm() 177 ctx->child = cipher; in xcbc_init_tfm()
|
D | cts.c | 292 struct crypto_skcipher *cipher; in crypto_cts_init_tfm() local 297 cipher = crypto_spawn_skcipher(spawn); in crypto_cts_init_tfm() 298 if (IS_ERR(cipher)) in crypto_cts_init_tfm() 299 return PTR_ERR(cipher); in crypto_cts_init_tfm() 301 ctx->child = cipher; in crypto_cts_init_tfm() 304 bsize = crypto_skcipher_blocksize(cipher); in crypto_cts_init_tfm() 306 crypto_skcipher_reqsize(cipher), in crypto_cts_init_tfm()
|
D | vmac.c | 52 struct crypto_cipher *cipher; member 443 err = crypto_cipher_setkey(tctx->cipher, key, keylen); in vmac_setkey() 450 crypto_cipher_encrypt_one(tctx->cipher, (u8 *)out, in); in vmac_setkey() 460 crypto_cipher_encrypt_one(tctx->cipher, (u8 *)out, in); in vmac_setkey() 471 crypto_cipher_encrypt_one(tctx->cipher, (u8 *)out, in); in vmac_setkey() 589 crypto_cipher_encrypt_one(tctx->cipher, dctx->nonce.bytes, in vmac_final() 603 struct crypto_cipher *cipher; in vmac_init_tfm() local 605 cipher = crypto_spawn_cipher(spawn); in vmac_init_tfm() 606 if (IS_ERR(cipher)) in vmac_init_tfm() 607 return PTR_ERR(cipher); in vmac_init_tfm() [all …]
|
D | lrw.c | 269 struct crypto_skcipher *cipher; in init_tfm() local 271 cipher = crypto_spawn_skcipher(spawn); in init_tfm() 272 if (IS_ERR(cipher)) in init_tfm() 273 return PTR_ERR(cipher); in init_tfm() 275 ctx->child = cipher; in init_tfm() 277 crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(cipher) + in init_tfm()
|
D | blkcipher.c | 371 struct blkcipher_alg *cipher = &tfm->__crt_alg->cra_blkcipher; in setkey_unaligned() local 384 ret = cipher->setkey(tfm, alignbuffer, keylen); in setkey_unaligned() 392 struct blkcipher_alg *cipher = &tfm->__crt_alg->cra_blkcipher; in setkey() local 395 if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) { in setkey() 403 return cipher->setkey(tfm, key, keylen); in setkey() 442 struct blkcipher_alg *cipher = &alg->cra_blkcipher; in crypto_blkcipher_ctxsize() local 446 cipher->ivsize) { in crypto_blkcipher_ctxsize() 448 len += cipher->ivsize; in crypto_blkcipher_ctxsize()
|
D | cmac.c | 202 struct crypto_cipher *cipher; in cmac_init_tfm() local 207 cipher = crypto_spawn_cipher(spawn); in cmac_init_tfm() 208 if (IS_ERR(cipher)) in cmac_init_tfm() 209 return PTR_ERR(cipher); in cmac_init_tfm() 211 ctx->child = cipher; in cmac_init_tfm()
|
D | pcrypt.c | 182 struct crypto_aead *cipher; in pcrypt_aead_init_tfm() local 191 cipher = crypto_spawn_aead(&ictx->spawn); in pcrypt_aead_init_tfm() 193 if (IS_ERR(cipher)) in pcrypt_aead_init_tfm() 194 return PTR_ERR(cipher); in pcrypt_aead_init_tfm() 196 ctx->child = cipher; in pcrypt_aead_init_tfm() 199 crypto_aead_reqsize(cipher)); in pcrypt_aead_init_tfm()
|
D | ablkcipher.c | 298 struct ablkcipher_alg *cipher = crypto_ablkcipher_alg(tfm); in setkey_unaligned() local 311 ret = cipher->setkey(tfm, alignbuffer, keylen); in setkey_unaligned() 320 struct ablkcipher_alg *cipher = crypto_ablkcipher_alg(tfm); in setkey() local 323 if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) { in setkey() 331 return cipher->setkey(tfm, key, keylen); in setkey()
|
D | crypto_user_stat.c | 56 rcipher.stat_encrypt_cnt = atomic64_read(&alg->stats.cipher.encrypt_cnt); in crypto_report_cipher() 57 rcipher.stat_encrypt_tlen = atomic64_read(&alg->stats.cipher.encrypt_tlen); in crypto_report_cipher() 58 rcipher.stat_decrypt_cnt = atomic64_read(&alg->stats.cipher.decrypt_cnt); in crypto_report_cipher() 59 rcipher.stat_decrypt_tlen = atomic64_read(&alg->stats.cipher.decrypt_tlen); in crypto_report_cipher() 60 rcipher.stat_err_cnt = atomic64_read(&alg->stats.cipher.err_cnt); in crypto_report_cipher()
|
D | algapi.c | 1070 atomic64_inc(&alg->stats.cipher.err_cnt); in crypto_stats_ablkcipher_encrypt() 1072 atomic64_inc(&alg->stats.cipher.encrypt_cnt); in crypto_stats_ablkcipher_encrypt() 1073 atomic64_add(nbytes, &alg->stats.cipher.encrypt_tlen); in crypto_stats_ablkcipher_encrypt() 1083 atomic64_inc(&alg->stats.cipher.err_cnt); in crypto_stats_ablkcipher_decrypt() 1085 atomic64_inc(&alg->stats.cipher.decrypt_cnt); in crypto_stats_ablkcipher_decrypt() 1086 atomic64_add(nbytes, &alg->stats.cipher.decrypt_tlen); in crypto_stats_ablkcipher_decrypt() 1269 atomic64_inc(&alg->stats.cipher.err_cnt); in crypto_stats_skcipher_encrypt() 1271 atomic64_inc(&alg->stats.cipher.encrypt_cnt); in crypto_stats_skcipher_encrypt() 1272 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen); in crypto_stats_skcipher_encrypt() 1282 atomic64_inc(&alg->stats.cipher.err_cnt); in crypto_stats_skcipher_decrypt() [all …]
|
D | cryptd.c | 368 struct crypto_skcipher *cipher; in cryptd_skcipher_init_tfm() local 370 cipher = crypto_spawn_skcipher(spawn); in cryptd_skcipher_init_tfm() 371 if (IS_ERR(cipher)) in cryptd_skcipher_init_tfm() 372 return PTR_ERR(cipher); in cryptd_skcipher_init_tfm() 374 ctx->child = (struct crypto_sync_skcipher *)cipher; in cryptd_skcipher_init_tfm() 832 struct crypto_aead *cipher; in cryptd_aead_init_tfm() local 834 cipher = crypto_spawn_aead(spawn); in cryptd_aead_init_tfm() 835 if (IS_ERR(cipher)) in cryptd_aead_init_tfm() 836 return PTR_ERR(cipher); in cryptd_aead_init_tfm() 838 ctx->child = cipher; in cryptd_aead_init_tfm() [all …]
|
D | des_generic.c | 99 .cra_u = { .cipher = { 113 .cra_u = { .cipher = {
|
D | tea.c | 225 .cra_u = { .cipher = { 239 .cra_u = { .cipher = { 253 .cra_u = { .cipher = {
|
D | ccm.c | 897 struct crypto_cipher *cipher; in cbcmac_init_tfm() local 902 cipher = crypto_spawn_cipher(spawn); in cbcmac_init_tfm() 903 if (IS_ERR(cipher)) in cbcmac_init_tfm() 904 return PTR_ERR(cipher); in cbcmac_init_tfm() 906 ctx->child = cipher; in cbcmac_init_tfm()
|
D | blowfish_generic.c | 113 .cra_u = { .cipher = {
|
D | crypto_null.c | 131 .cra_u = { .cipher = {
|