/crypto/ |
D | cbc.c | 25 struct crypto_cipher *cipher; in crypto_cbc_encrypt_segment() local 29 cipher = skcipher_cipher_simple(skcipher); in crypto_cbc_encrypt_segment() 30 tfm = crypto_cipher_tfm(cipher); in crypto_cbc_encrypt_segment() 31 fn = crypto_cipher_alg(cipher)->cia_encrypt; in crypto_cbc_encrypt_segment() 52 struct crypto_cipher *cipher; in crypto_cbc_encrypt_inplace() local 56 cipher = skcipher_cipher_simple(skcipher); in crypto_cbc_encrypt_inplace() 57 tfm = crypto_cipher_tfm(cipher); in crypto_cbc_encrypt_inplace() 58 fn = crypto_cipher_alg(cipher)->cia_encrypt; in crypto_cbc_encrypt_inplace() 100 struct crypto_cipher *cipher; in crypto_cbc_decrypt_segment() local 104 cipher = skcipher_cipher_simple(skcipher); in crypto_cbc_decrypt_segment() [all …]
|
D | ecb.c | 17 struct crypto_cipher *cipher, in crypto_ecb_crypt() argument 20 const unsigned int bsize = crypto_cipher_blocksize(cipher); in crypto_ecb_crypt() 32 fn(crypto_cipher_tfm(cipher), dst, src); in crypto_ecb_crypt() 47 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_ecb_encrypt() local 49 return crypto_ecb_crypt(req, cipher, in crypto_ecb_encrypt() 50 crypto_cipher_alg(cipher)->cia_encrypt); in crypto_ecb_encrypt() 56 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_ecb_decrypt() local 58 return crypto_ecb_crypt(req, cipher, in crypto_ecb_decrypt() 59 crypto_cipher_alg(cipher)->cia_decrypt); in crypto_ecb_decrypt()
|
D | Kconfig | 353 Support for the AEAD wrapper using the ChaCha20 stream cipher combined 407 This block cipher algorithm is required for IPSec. 415 This block cipher algorithm is required for TPM2 Cryptography. 423 This block cipher algorithm is required for IPSec. 446 This is the simplest block cipher algorithm. It simply encrypts 456 narrow block cipher mode for dm-crypt. Use it with cipher 459 rest is used to tie each cipher block to its logical position. 466 OFB: the Output Feedback mode makes a block cipher into a synchronous 467 stream cipher. It generates keystream blocks, which are then XORed 479 This block cipher algorithm is required for RxRPC. [all …]
|
D | ctr.c | 102 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_ctr_crypt() local 103 const unsigned int bsize = crypto_cipher_blocksize(cipher); in crypto_ctr_crypt() 112 nbytes = crypto_ctr_crypt_inplace(&walk, cipher); in crypto_ctr_crypt() 114 nbytes = crypto_ctr_crypt_segment(&walk, cipher); in crypto_ctr_crypt() 120 crypto_ctr_crypt_final(&walk, cipher); in crypto_ctr_crypt() 223 struct crypto_skcipher *cipher; in crypto_rfc3686_init_tfm() local 227 cipher = crypto_spawn_skcipher(spawn); in crypto_rfc3686_init_tfm() 228 if (IS_ERR(cipher)) in crypto_rfc3686_init_tfm() 229 return PTR_ERR(cipher); in crypto_rfc3686_init_tfm() 231 ctx->child = cipher; in crypto_rfc3686_init_tfm() [all …]
|
D | ofb.c | 21 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_ofb_crypt() local 22 const unsigned int bsize = crypto_cipher_blocksize(cipher); in crypto_ofb_crypt() 35 crypto_cipher_encrypt_one(cipher, iv, iv); in crypto_ofb_crypt() 45 crypto_cipher_encrypt_one(cipher, walk.iv, walk.iv); in crypto_ofb_crypt()
|
D | pcbc.c | 67 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_pcbc_encrypt() local 77 cipher); in crypto_pcbc_encrypt() 80 cipher); in crypto_pcbc_encrypt() 134 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_pcbc_decrypt() local 144 cipher); in crypto_pcbc_decrypt() 147 cipher); in crypto_pcbc_decrypt()
|
D | skcipher.c | 583 struct skcipher_alg *cipher = crypto_skcipher_alg(tfm); in skcipher_setkey_unaligned() local 595 ret = cipher->setkey(tfm, alignbuffer, keylen); in skcipher_setkey_unaligned() 603 struct skcipher_alg *cipher = crypto_skcipher_alg(tfm); in crypto_skcipher_setkey() local 607 if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) in crypto_skcipher_setkey() 613 err = cipher->setkey(tfm, key, keylen); in crypto_skcipher_setkey() 883 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in skcipher_setkey_simple() local 885 crypto_cipher_clear_flags(cipher, CRYPTO_TFM_REQ_MASK); in skcipher_setkey_simple() 886 crypto_cipher_set_flags(cipher, crypto_skcipher_get_flags(tfm) & in skcipher_setkey_simple() 888 return crypto_cipher_setkey(cipher, key, keylen); in skcipher_setkey_simple() 896 struct crypto_cipher *cipher; in skcipher_init_tfm_simple() local [all …]
|
D | testmgr.c | 151 struct cipher_test_suite cipher; member 3135 const struct cipher_test_suite *suite = &desc->suite.cipher; in alg_test_skcipher() 3526 const struct cipher_test_suite *suite = &desc->suite.cipher; in alg_test_cipher() 4171 .cipher = __VECS(adiantum_xchacha12_aes_tv_template) 4178 .cipher = __VECS(adiantum_xchacha20_aes_tv_template) 4379 .cipher = __VECS(aes_cbc_tv_template) 4385 .cipher = __VECS(anubis_cbc_tv_template) 4391 .cipher = __VECS(bf_cbc_tv_template) 4397 .cipher = __VECS(camellia_cbc_tv_template) 4403 .cipher = __VECS(cast5_cbc_tv_template) [all …]
|
D | keywrap.c | 127 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_kw_decrypt() local 167 crypto_cipher_decrypt_one(cipher, (u8 *)&block, in crypto_kw_decrypt() 196 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); in crypto_kw_encrypt() local 238 crypto_cipher_encrypt_one(cipher, (u8 *)&block, in crypto_kw_encrypt()
|
D | cts.c | 288 struct crypto_skcipher *cipher; in crypto_cts_init_tfm() local 293 cipher = crypto_spawn_skcipher(spawn); in crypto_cts_init_tfm() 294 if (IS_ERR(cipher)) in crypto_cts_init_tfm() 295 return PTR_ERR(cipher); in crypto_cts_init_tfm() 297 ctx->child = cipher; in crypto_cts_init_tfm() 300 bsize = crypto_skcipher_blocksize(cipher); in crypto_cts_init_tfm() 302 crypto_skcipher_reqsize(cipher), in crypto_cts_init_tfm()
|
D | lrw.c | 270 struct crypto_skcipher *cipher; in lrw_init_tfm() local 272 cipher = crypto_spawn_skcipher(spawn); in lrw_init_tfm() 273 if (IS_ERR(cipher)) in lrw_init_tfm() 274 return PTR_ERR(cipher); in lrw_init_tfm() 276 ctx->child = cipher; in lrw_init_tfm() 278 crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(cipher) + in lrw_init_tfm()
|
D | pcrypt.c | 181 struct crypto_aead *cipher; in pcrypt_aead_init_tfm() local 190 cipher = crypto_spawn_aead(&ictx->spawn); in pcrypt_aead_init_tfm() 192 if (IS_ERR(cipher)) in pcrypt_aead_init_tfm() 193 return PTR_ERR(cipher); in pcrypt_aead_init_tfm() 195 ctx->child = cipher; in pcrypt_aead_init_tfm() 198 crypto_aead_reqsize(cipher)); in pcrypt_aead_init_tfm()
|
D | vmac.c | 53 struct crypto_cipher *cipher; member 442 err = crypto_cipher_setkey(tctx->cipher, key, keylen); in vmac_setkey() 449 crypto_cipher_encrypt_one(tctx->cipher, (u8 *)out, in); in vmac_setkey() 459 crypto_cipher_encrypt_one(tctx->cipher, (u8 *)out, in); in vmac_setkey() 470 crypto_cipher_encrypt_one(tctx->cipher, (u8 *)out, in); in vmac_setkey() 588 crypto_cipher_encrypt_one(tctx->cipher, dctx->nonce.bytes, in vmac_final() 602 struct crypto_cipher *cipher; in vmac_init_tfm() local 604 cipher = crypto_spawn_cipher(spawn); in vmac_init_tfm() 605 if (IS_ERR(cipher)) in vmac_init_tfm() 606 return PTR_ERR(cipher); in vmac_init_tfm() [all …]
|
D | xcbc.c | 169 struct crypto_cipher *cipher; in xcbc_init_tfm() local 174 cipher = crypto_spawn_cipher(spawn); in xcbc_init_tfm() 175 if (IS_ERR(cipher)) in xcbc_init_tfm() 176 return PTR_ERR(cipher); in xcbc_init_tfm() 178 ctx->child = cipher; in xcbc_init_tfm()
|
D | cmac.c | 203 struct crypto_cipher *cipher; in cmac_init_tfm() local 208 cipher = crypto_spawn_cipher(spawn); in cmac_init_tfm() 209 if (IS_ERR(cipher)) in cmac_init_tfm() 210 return PTR_ERR(cipher); in cmac_init_tfm() 212 ctx->child = cipher; in cmac_init_tfm()
|
D | cryptd.c | 341 struct crypto_skcipher *cipher; in cryptd_skcipher_init_tfm() local 343 cipher = crypto_spawn_skcipher(spawn); in cryptd_skcipher_init_tfm() 344 if (IS_ERR(cipher)) in cryptd_skcipher_init_tfm() 345 return PTR_ERR(cipher); in cryptd_skcipher_init_tfm() 347 ctx->child = cipher; in cryptd_skcipher_init_tfm() 350 crypto_skcipher_reqsize(cipher)); in cryptd_skcipher_init_tfm() 795 struct crypto_aead *cipher; in cryptd_aead_init_tfm() local 797 cipher = crypto_spawn_aead(spawn); in cryptd_aead_init_tfm() 798 if (IS_ERR(cipher)) in cryptd_aead_init_tfm() 799 return PTR_ERR(cipher); in cryptd_aead_init_tfm() [all …]
|
D | crypto_user_stat.c | 56 rcipher.stat_encrypt_cnt = atomic64_read(&alg->stats.cipher.encrypt_cnt); in crypto_report_cipher() 57 rcipher.stat_encrypt_tlen = atomic64_read(&alg->stats.cipher.encrypt_tlen); in crypto_report_cipher() 58 rcipher.stat_decrypt_cnt = atomic64_read(&alg->stats.cipher.decrypt_cnt); in crypto_report_cipher() 59 rcipher.stat_decrypt_tlen = atomic64_read(&alg->stats.cipher.decrypt_tlen); in crypto_report_cipher() 60 rcipher.stat_err_cnt = atomic64_read(&alg->stats.cipher.err_cnt); in crypto_report_cipher()
|
D | des_generic.c | 93 .cra_u = { .cipher = { 107 .cra_u = { .cipher = {
|
D | tea.c | 225 .cra_u = { .cipher = { 239 .cra_u = { .cipher = { 253 .cra_u = { .cipher = {
|
D | algapi.c | 1262 atomic64_inc(&alg->stats.cipher.err_cnt); in crypto_stats_skcipher_encrypt() 1264 atomic64_inc(&alg->stats.cipher.encrypt_cnt); in crypto_stats_skcipher_encrypt() 1265 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen); in crypto_stats_skcipher_encrypt() 1275 atomic64_inc(&alg->stats.cipher.err_cnt); in crypto_stats_skcipher_decrypt() 1277 atomic64_inc(&alg->stats.cipher.decrypt_cnt); in crypto_stats_skcipher_decrypt() 1278 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen); in crypto_stats_skcipher_decrypt()
|
D | ccm.c | 843 struct crypto_cipher *cipher; in cbcmac_init_tfm() local 848 cipher = crypto_spawn_cipher(spawn); in cbcmac_init_tfm() 849 if (IS_ERR(cipher)) in cbcmac_init_tfm() 850 return PTR_ERR(cipher); in cbcmac_init_tfm() 852 ctx->child = cipher; in cbcmac_init_tfm()
|
D | sm4_generic.c | 214 .cipher = {
|
D | blowfish_generic.c | 113 .cra_u = { .cipher = {
|
D | twofish_generic.c | 177 .cra_u = { .cipher = {
|
D | khazad.c | 857 .cra_u = { .cipher = {
|