Home
last modified time | relevance | path

Searched refs:base (Results 1 – 25 of 56) sorted by relevance

123

/crypto/
Daead.c98 aead->base.exit = crypto_aead_exit_tfm; in crypto_aead_init_tfm()
110 struct aead_alg *aead = container_of(alg, struct aead_alg, base); in crypto_aead_report()
138 struct aead_alg *aead = container_of(alg, struct aead_alg, base); in crypto_aead_show()
172 .tfmsize = offsetof(struct crypto_aead, base),
238 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, in aead_geniv_alloc()
239 "%s(%s)", tmpl->name, alg->base.cra_name) >= in aead_geniv_alloc()
242 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, in aead_geniv_alloc()
243 "%s(%s)", tmpl->name, alg->base.cra_driver_name) >= in aead_geniv_alloc()
247 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; in aead_geniv_alloc()
248 inst->alg.base.cra_priority = alg->base.cra_priority; in aead_geniv_alloc()
[all …]
Dakcipher.c71 akcipher->base.exit = crypto_akcipher_exit_tfm; in crypto_akcipher_init_tfm()
97 .tfmsize = offsetof(struct crypto_akcipher, base),
103 spawn->base.frontend = &crypto_akcipher_type; in crypto_grab_akcipher()
104 return crypto_grab_spawn(&spawn->base, name, type, mask); in crypto_grab_akcipher()
117 struct crypto_alg *base = &alg->base; in akcipher_prepare_alg() local
119 base->cra_type = &crypto_akcipher_type; in akcipher_prepare_alg()
120 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; in akcipher_prepare_alg()
121 base->cra_flags |= CRYPTO_ALG_TYPE_AKCIPHER; in akcipher_prepare_alg()
126 struct crypto_alg *base = &alg->base; in crypto_register_akcipher() local
129 return crypto_register_alg(base); in crypto_register_akcipher()
[all …]
Dkpp.c71 kpp->base.exit = crypto_kpp_exit_tfm; in crypto_kpp_init_tfm()
89 .tfmsize = offsetof(struct crypto_kpp, base),
100 struct crypto_alg *base = &alg->base; in kpp_prepare_alg() local
102 base->cra_type = &crypto_kpp_type; in kpp_prepare_alg()
103 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; in kpp_prepare_alg()
104 base->cra_flags |= CRYPTO_ALG_TYPE_KPP; in kpp_prepare_alg()
109 struct crypto_alg *base = &alg->base; in crypto_register_kpp() local
112 return crypto_register_alg(base); in crypto_register_kpp()
118 crypto_unregister_alg(&alg->base); in crypto_unregister_kpp()
Dcts.c123 skcipher_request_set_callback(subreq, req->base.flags & in cts_cbc_encrypt()
139 (err == -EBUSY && req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) in crypto_cts_encrypt_done()
160 skcipher_request_set_callback(subreq, req->base.flags, in crypto_cts_encrypt()
161 req->base.complete, in crypto_cts_encrypt()
162 req->base.data); in crypto_cts_encrypt()
171 skcipher_request_set_callback(subreq, req->base.flags, in crypto_cts_encrypt()
215 skcipher_request_set_callback(subreq, req->base.flags & in cts_cbc_decrypt()
232 (err == -EBUSY && req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) in crypto_cts_decrypt_done()
254 skcipher_request_set_callback(subreq, req->base.flags, in crypto_cts_decrypt()
255 req->base.complete, in crypto_cts_decrypt()
[all …]
Dahash.c145 walk->flags = req->base.flags & CRYPTO_TFM_REQ_MASK; in crypto_hash_walk_first()
163 walk->flags = req->base.flags & CRYPTO_TFM_REQ_MASK; in crypto_ahash_walk_first()
231 (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? in ahash_save_req()
262 priv->complete = req->base.complete; in ahash_save_req()
263 priv->data = req->base.data; in ahash_save_req()
264 priv->flags = req->base.flags; in ahash_save_req()
273 req->base.complete = cplt; in ahash_save_req()
274 req->base.data = req; in ahash_save_req()
331 areq->base.complete(&areq->base, err); in ahash_op_unaligned_done()
398 areq->base.complete(&areq->base, err); in ahash_def_finup_done2()
[all …]
Dseqiv.c46 static void seqiv_aead_encrypt_complete(struct crypto_async_request *base, in seqiv_aead_encrypt_complete() argument
49 struct aead_request *req = base->data; in seqiv_aead_encrypt_complete()
71 compl = req->base.complete; in seqiv_aead_encrypt()
72 data = req->base.data; in seqiv_aead_encrypt()
79 skcipher_request_set_callback(nreq, req->base.flags, in seqiv_aead_encrypt()
92 info = kmalloc(ivsize, req->base.flags & in seqiv_aead_encrypt()
103 aead_request_set_callback(subreq, req->base.flags, compl, data); in seqiv_aead_encrypt()
131 compl = req->base.complete; in seqiv_aead_decrypt()
132 data = req->base.data; in seqiv_aead_decrypt()
134 aead_request_set_callback(subreq, req->base.flags, compl, data); in seqiv_aead_decrypt()
[all …]
Dcryptd.c247 req->base.complete = rctx->complete; in cryptd_blkcipher_crypt()
255 rctx->complete(&req->base, err); in cryptd_blkcipher_crypt()
288 rctx->complete = req->base.complete; in cryptd_blkcipher_enqueue()
289 req->base.complete = compl; in cryptd_blkcipher_enqueue()
291 return cryptd_enqueue_request(queue, &req->base); in cryptd_blkcipher_enqueue()
485 rctx->complete = req->base.complete; in cryptd_hash_enqueue()
486 req->base.complete = compl; in cryptd_hash_enqueue()
488 return cryptd_enqueue_request(queue, &req->base); in cryptd_hash_enqueue()
499 rctx->complete(&req->base, err); in cryptd_hash_complete()
522 req->base.complete = rctx->complete; in cryptd_hash_init()
[all …]
Dskcipher.c67 .flags = req->base.flags, in skcipher_crypt_blkcipher()
159 req->base.complete, req->base.data); in skcipher_crypt_ablkcipher()
289 skcipher->base.exit = crypto_skcipher_exit_tfm; in crypto_skcipher_init_tfm()
300 container_of(inst, struct skcipher_instance, s.base); in crypto_skcipher_free_instance()
310 base); in crypto_skcipher_show()
327 base); in crypto_skcipher_report()
363 .tfmsize = offsetof(struct crypto_skcipher, base),
369 spawn->base.frontend = &crypto_skcipher_type2; in crypto_grab_skcipher()
370 return crypto_grab_spawn(&spawn->base, name, type, mask); in crypto_grab_skcipher()
390 struct crypto_alg *base = &alg->base; in skcipher_prepare_alg() local
[all …]
Drng.c37 return container_of(tfm, struct crypto_rng, base); in __crypto_rng_cast()
68 struct rng_alg *ralg = container_of(alg, struct rng_alg, base); in seedsize()
115 .tfmsize = offsetof(struct crypto_rng, base),
187 struct crypto_alg *base = &alg->base; in crypto_register_rng() local
192 base->cra_type = &crypto_rng_type; in crypto_register_rng()
193 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; in crypto_register_rng()
194 base->cra_flags |= CRYPTO_ALG_TYPE_RNG; in crypto_register_rng()
196 return crypto_register_alg(base); in crypto_register_rng()
202 crypto_unregister_alg(&alg->base); in crypto_unregister_rng()
Ddh.c38 static int _compute_val(const struct dh_ctx *ctx, MPI base, MPI val) in _compute_val() argument
41 return mpi_powm(val, base, ctx->xa, ctx->p); in _compute_val()
102 MPI base, val = mpi_alloc(0); in dh_compute_value() local
115 base = mpi_read_raw_from_sgl(req->src, req->src_len); in dh_compute_value()
116 if (!base) { in dh_compute_value()
121 base = ctx->g; in dh_compute_value()
124 ret = _compute_val(ctx, base, val); in dh_compute_value()
136 mpi_free(base); in dh_compute_value()
162 .base = {
Dgcm.c678 if (ctr->base.cra_blocksize != 1) in crypto_gcm_create_common()
682 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, in crypto_gcm_create_common()
683 "gcm_base(%s,%s)", ctr->base.cra_driver_name, in crypto_gcm_create_common()
688 memcpy(inst->alg.base.cra_name, full_name, CRYPTO_MAX_ALG_NAME); in crypto_gcm_create_common()
690 inst->alg.base.cra_flags = (ghash->base.cra_flags | in crypto_gcm_create_common()
691 ctr->base.cra_flags) & CRYPTO_ALG_ASYNC; in crypto_gcm_create_common()
692 inst->alg.base.cra_priority = (ghash->base.cra_priority + in crypto_gcm_create_common()
693 ctr->base.cra_priority) / 2; in crypto_gcm_create_common()
694 inst->alg.base.cra_blocksize = 1; in crypto_gcm_create_common()
695 inst->alg.base.cra_alignmask = ghash->base.cra_alignmask | in crypto_gcm_create_common()
[all …]
Dechainiv.c53 skcipher_request_set_callback(nreq, req->base.flags, in echainiv_encrypt()
64 aead_request_set_callback(subreq, req->base.flags, in echainiv_encrypt()
65 req->base.complete, req->base.data); in echainiv_encrypt()
104 compl = req->base.complete; in echainiv_decrypt()
105 data = req->base.data; in echainiv_decrypt()
107 aead_request_set_callback(subreq, req->base.flags, compl, data); in echainiv_decrypt()
143 inst->alg.base.cra_ctxsize = sizeof(struct aead_geniv_ctx); in echainiv_aead_create()
144 inst->alg.base.cra_ctxsize += inst->alg.ivsize; in echainiv_aead_create()
Drsa-pkcs1pad.c230 async_req.data = req->base.data; in pkcs1pad_encrypt_sign_complete_cb()
233 req->base.complete(&async_req, in pkcs1pad_encrypt_sign_complete_cb()
280 akcipher_request_set_callback(&req_ctx->child_req, req->base.flags, in pkcs1pad_encrypt()
290 !(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))) in pkcs1pad_encrypt()
359 async_req.data = req->base.data; in pkcs1pad_decrypt_complete_cb()
362 req->base.complete(&async_req, pkcs1pad_decrypt_complete(req, err)); in pkcs1pad_decrypt_complete_cb()
383 akcipher_request_set_callback(&req_ctx->child_req, req->base.flags, in pkcs1pad_decrypt()
394 !(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))) in pkcs1pad_decrypt()
441 akcipher_request_set_callback(&req_ctx->child_req, req->base.flags, in pkcs1pad_sign()
451 !(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))) in pkcs1pad_sign()
[all …]
Dccm.c559 if (ctr->base.cra_blocksize != 1) in crypto_ccm_create_common()
567 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, in crypto_ccm_create_common()
568 "ccm_base(%s,%s)", ctr->base.cra_driver_name, in crypto_ccm_create_common()
572 memcpy(inst->alg.base.cra_name, full_name, CRYPTO_MAX_ALG_NAME); in crypto_ccm_create_common()
574 inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC; in crypto_ccm_create_common()
575 inst->alg.base.cra_priority = (cipher->cra_priority + in crypto_ccm_create_common()
576 ctr->base.cra_priority) / 2; in crypto_ccm_create_common()
577 inst->alg.base.cra_blocksize = 1; in crypto_ccm_create_common()
578 inst->alg.base.cra_alignmask = cipher->cra_alignmask | in crypto_ccm_create_common()
579 ctr->base.cra_alignmask | in crypto_ccm_create_common()
[all …]
Dshash.c226 desc->flags = req->base.flags; in shash_async_init()
281 desc->flags = req->base.flags; in shash_async_finup()
317 desc->flags = req->base.flags; in shash_async_digest()
333 desc->flags = req->base.flags; in shash_async_import()
393 !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY)) in crypto_shash_init_tfm()
446 .tfmsize = offsetof(struct crypto_shash, base),
458 struct crypto_alg *base = &alg->base; in shash_prepare_alg() local
465 base->cra_type = &crypto_shash_type; in shash_prepare_alg()
466 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; in shash_prepare_alg()
467 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; in shash_prepare_alg()
[all …]
Dmcryptd.c324 rctx->complete = req->base.complete; in mcryptd_hash_enqueue()
325 req->base.complete = complete; in mcryptd_hash_enqueue()
327 ret = mcryptd_enqueue_request(queue, &req->base, rctx); in mcryptd_hash_enqueue()
352 rctx->complete(&req->base, err); in mcryptd_hash_init()
372 req->base.complete = rctx->complete; in mcryptd_hash_update()
379 rctx->complete(&req->base, err); in mcryptd_hash_update()
399 req->base.complete = rctx->complete; in mcryptd_hash_final()
406 rctx->complete(&req->base, err); in mcryptd_hash_final()
426 req->base.complete = rctx->complete; in mcryptd_hash_finup()
433 rctx->complete(&req->base, err); in mcryptd_hash_finup()
[all …]
Dctr.c298 skcipher_request_set_callback(subreq, req->base.flags, in crypto_rfc3686_crypt()
299 req->base.complete, req->base.data); in crypto_rfc3686_crypt()
387 if (alg->base.cra_blocksize != 1) in crypto_rfc3686_create()
391 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, in crypto_rfc3686_create()
392 "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME) in crypto_rfc3686_create()
394 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, in crypto_rfc3686_create()
395 "rfc3686(%s)", alg->base.cra_driver_name) >= in crypto_rfc3686_create()
399 inst->alg.base.cra_priority = alg->base.cra_priority; in crypto_rfc3686_create()
400 inst->alg.base.cra_blocksize = 1; in crypto_rfc3686_create()
401 inst->alg.base.cra_alignmask = alg->base.cra_alignmask; in crypto_rfc3686_create()
[all …]
Dauthenc.c263 req->base.complete, req->base.data); in crypto_authenc_decrypt_tail()
403 auth_base = &auth->base; in crypto_authenc_create()
435 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, in crypto_authenc_create()
437 enc->base.cra_name) >= in crypto_authenc_create()
441 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, in crypto_authenc_create()
443 enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) in crypto_authenc_create()
446 inst->alg.base.cra_flags = (auth_base->cra_flags | in crypto_authenc_create()
447 enc->base.cra_flags) & CRYPTO_ALG_ASYNC; in crypto_authenc_create()
448 inst->alg.base.cra_priority = enc->base.cra_priority * 10 + in crypto_authenc_create()
450 inst->alg.base.cra_blocksize = enc->base.cra_blocksize; in crypto_authenc_create()
[all …]
Dauthencesn.c269 req->base.complete, req->base.data); in crypto_authenc_esn_decrypt_tail()
429 auth_base = &auth->base; in crypto_authenc_esn_create()
458 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, in crypto_authenc_esn_create()
460 enc->base.cra_name) >= CRYPTO_MAX_ALG_NAME) in crypto_authenc_esn_create()
463 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, in crypto_authenc_esn_create()
465 enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) in crypto_authenc_esn_create()
468 inst->alg.base.cra_flags = (auth_base->cra_flags | in crypto_authenc_esn_create()
469 enc->base.cra_flags) & CRYPTO_ALG_ASYNC; in crypto_authenc_esn_create()
470 inst->alg.base.cra_priority = enc->base.cra_priority * 10 + in crypto_authenc_esn_create()
472 inst->alg.base.cra_blocksize = enc->base.cra_blocksize; in crypto_authenc_esn_create()
[all …]
Dhmac.c197 alg = &salg->base; in hmac_create()
220 inst->alg.base.cra_priority = alg->cra_priority; in hmac_create()
221 inst->alg.base.cra_blocksize = alg->cra_blocksize; in hmac_create()
222 inst->alg.base.cra_alignmask = alg->cra_alignmask; in hmac_create()
228 inst->alg.base.cra_ctxsize = sizeof(struct hmac_ctx) + in hmac_create()
231 inst->alg.base.cra_init = hmac_init_tfm; in hmac_create()
232 inst->alg.base.cra_exit = hmac_exit_tfm; in hmac_create()
Dablkcipher.c108 struct crypto_tfm *tfm = req->base.tfm; in ablkcipher_walk_done()
134 crypto_yield(req->base.flags); in ablkcipher_walk_done()
154 void *src, *dst, *base; in ablkcipher_next_slow() local
165 base = p + 1; in ablkcipher_next_slow()
167 dst = (u8 *)ALIGN((unsigned long)base, alignmask + 1); in ablkcipher_next_slow()
225 struct crypto_tfm *tfm = req->base.tfm; in ablkcipher_walk_next()
233 req->base.flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; in ablkcipher_walk_next()
270 struct crypto_tfm *tfm = req->base.tfm; in ablkcipher_walk_first()
299 walk->blocksize = crypto_tfm_alg_blocksize(req->base.tfm); in ablkcipher_walk_phys()
361 crt->base = __crypto_ablkcipher_cast(tfm); in crypto_init_ablkcipher_ops()
[all …]
Dxcbc.c234 inst->alg.base.cra_alignmask = alignmask; in xcbc_create()
235 inst->alg.base.cra_priority = alg->cra_priority; in xcbc_create()
236 inst->alg.base.cra_blocksize = alg->cra_blocksize; in xcbc_create()
245 inst->alg.base.cra_ctxsize = ALIGN(sizeof(struct xcbc_tfm_ctx), in xcbc_create()
248 inst->alg.base.cra_init = xcbc_init_tfm; in xcbc_create()
249 inst->alg.base.cra_exit = xcbc_exit_tfm; in xcbc_create()
Dablk_helper.c41 struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base; in ablk_set_key()
80 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base); in ablk_encrypt()
100 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base); in ablk_decrypt()
136 crypto_ablkcipher_reqsize(&cryptd_tfm->base); in ablk_init_common()
Dcmac.c261 inst->alg.base.cra_alignmask = alignmask; in cmac_create()
262 inst->alg.base.cra_priority = alg->cra_priority; in cmac_create()
263 inst->alg.base.cra_blocksize = alg->cra_blocksize; in cmac_create()
271 inst->alg.base.cra_ctxsize = in cmac_create()
275 inst->alg.base.cra_init = cmac_init_tfm; in cmac_create()
276 inst->alg.base.cra_exit = cmac_exit_tfm; in cmac_create()
Dcrypto_engine.c268 if (engine->cur_req == &req->base) in crypto_finalize_cipher_request()
285 req->base.complete(&req->base, err); in crypto_finalize_cipher_request()
305 if (engine->cur_req == &req->base) in crypto_finalize_hash_request()
322 req->base.complete(&req->base, err); in crypto_finalize_hash_request()

123