Home
last modified time | relevance | path

Searched refs:shash (Results 1 – 24 of 24) sorted by relevance

/drivers/nvme/target/
Dauth.c267 struct shash_desc *shash; in nvmet_auth_host_hash() local
324 shash = kzalloc(sizeof(*shash) + crypto_shash_descsize(shash_tfm), in nvmet_auth_host_hash()
326 if (!shash) { in nvmet_auth_host_hash()
330 shash->tfm = shash_tfm; in nvmet_auth_host_hash()
331 ret = crypto_shash_init(shash); in nvmet_auth_host_hash()
334 ret = crypto_shash_update(shash, challenge, shash_len); in nvmet_auth_host_hash()
338 ret = crypto_shash_update(shash, buf, 4); in nvmet_auth_host_hash()
342 ret = crypto_shash_update(shash, buf, 2); in nvmet_auth_host_hash()
346 ret = crypto_shash_update(shash, buf, 1); in nvmet_auth_host_hash()
349 ret = crypto_shash_update(shash, "HostHost", 8); in nvmet_auth_host_hash()
[all …]
/drivers/crypto/vmx/
Dghash.c41 u64 shash[2]; member
51 memset(dctx->shash, 0, GHASH_DIGEST_SIZE); in p8_ghash_init()
83 gcm_ghash_p8(dctx->shash, ctx->htable, in __ghash_block()
89 crypto_xor((u8 *)dctx->shash, dctx->buffer, GHASH_BLOCK_SIZE); in __ghash_block()
90 gf128mul_lle((be128 *)dctx->shash, &ctx->key); in __ghash_block()
102 gcm_ghash_p8(dctx->shash, ctx->htable, in __ghash_blocks()
109 crypto_xor((u8 *)dctx->shash, src, GHASH_BLOCK_SIZE); in __ghash_blocks()
110 gf128mul_lle((be128 *)dctx->shash, &ctx->key); in __ghash_blocks()
165 memcpy(out, dctx->shash, GHASH_DIGEST_SIZE); in p8_ghash_final()
/drivers/infiniband/sw/rxe/
Drxe_icrc.c48 SHASH_DESC_ON_STACK(shash, rxe->tfm); in rxe_crc32()
50 shash->tfm = rxe->tfm; in rxe_crc32()
51 *(__be32 *)shash_desc_ctx(shash) = crc; in rxe_crc32()
52 err = crypto_shash_update(shash, next, len); in rxe_crc32()
58 icrc = *(__be32 *)shash_desc_ctx(shash); in rxe_crc32()
59 barrier_data(shash_desc_ctx(shash)); in rxe_crc32()
/drivers/nvme/common/
Dauth.c236 struct shash_desc *shash; in nvme_auth_transform_key() local
258 shash = kmalloc(sizeof(struct shash_desc) + in nvme_auth_transform_key()
261 if (!shash) { in nvme_auth_transform_key()
272 shash->tfm = key_tfm; in nvme_auth_transform_key()
276 ret = crypto_shash_init(shash); in nvme_auth_transform_key()
279 ret = crypto_shash_update(shash, nqn, strlen(nqn)); in nvme_auth_transform_key()
282 ret = crypto_shash_update(shash, "NVMe-over-Fabrics", 17); in nvme_auth_transform_key()
285 ret = crypto_shash_final(shash, transformed_key); in nvme_auth_transform_key()
289 kfree(shash); in nvme_auth_transform_key()
297 kfree(shash); in nvme_auth_transform_key()
/drivers/nvme/host/
Dauth.c427 SHASH_DESC_ON_STACK(shash, chap->shash_tfm); in nvme_auth_dhchap_setup_host_response()
470 shash->tfm = chap->shash_tfm; in nvme_auth_dhchap_setup_host_response()
471 ret = crypto_shash_init(shash); in nvme_auth_dhchap_setup_host_response()
474 ret = crypto_shash_update(shash, challenge, chap->hash_len); in nvme_auth_dhchap_setup_host_response()
478 ret = crypto_shash_update(shash, buf, 4); in nvme_auth_dhchap_setup_host_response()
482 ret = crypto_shash_update(shash, buf, 2); in nvme_auth_dhchap_setup_host_response()
486 ret = crypto_shash_update(shash, buf, 1); in nvme_auth_dhchap_setup_host_response()
489 ret = crypto_shash_update(shash, "HostHost", 8); in nvme_auth_dhchap_setup_host_response()
492 ret = crypto_shash_update(shash, ctrl->opts->host->nqn, in nvme_auth_dhchap_setup_host_response()
496 ret = crypto_shash_update(shash, buf, 1); in nvme_auth_dhchap_setup_host_response()
[all …]
/drivers/net/ppp/
Dppp_mppe.c174 struct crypto_shash *shash; in mppe_alloc() local
187 shash = crypto_alloc_shash("sha1", 0, 0); in mppe_alloc()
188 if (IS_ERR(shash)) in mppe_alloc()
192 crypto_shash_descsize(shash), in mppe_alloc()
195 crypto_free_shash(shash); in mppe_alloc()
198 state->sha1->tfm = shash; in mppe_alloc()
200 digestsize = crypto_shash_digestsize(shash); in mppe_alloc()
/drivers/crypto/bcm/
Dutil.c189 struct shash_desc shash; member
233 sdesc->shash.tfm = hash; in do_shash()
243 rc = crypto_shash_init(&sdesc->shash); in do_shash()
248 rc = crypto_shash_update(&sdesc->shash, data1, data1_len); in do_shash()
254 rc = crypto_shash_update(&sdesc->shash, data2, data2_len); in do_shash()
260 rc = crypto_shash_final(&sdesc->shash, result); in do_shash()
Dcipher.c2041 ctx->shash = kmalloc(sizeof(*ctx->shash) + in ahash_init()
2043 if (!ctx->shash) { in ahash_init()
2047 ctx->shash->tfm = hash; in ahash_init()
2058 ret = crypto_shash_init(ctx->shash); in ahash_init()
2069 kfree(ctx->shash); in ahash_init()
2124 ret = crypto_shash_update(ctx->shash, tmpbuf, req->nbytes); in ahash_update()
2157 ret = crypto_shash_final(ctx->shash, req->result); in ahash_final()
2160 crypto_free_shash(ctx->shash->tfm); in ahash_final()
2161 kfree(ctx->shash); in ahash_final()
2222 ret = crypto_shash_finup(ctx->shash, tmpbuf, req->nbytes, in ahash_finup()
[all …]
Dcipher.h236 struct shash_desc *shash; member
/drivers/thunderbolt/
Ddomain.c707 struct shash_desc *shash; in tb_domain_challenge_switch_key() local
731 shash = kzalloc(sizeof(*shash) + crypto_shash_descsize(tfm), in tb_domain_challenge_switch_key()
733 if (!shash) { in tb_domain_challenge_switch_key()
738 shash->tfm = tfm; in tb_domain_challenge_switch_key()
741 ret = crypto_shash_digest(shash, challenge, sizeof(hmac), hmac); in tb_domain_challenge_switch_key()
752 kfree(shash); in tb_domain_challenge_switch_key()
757 kfree(shash); in tb_domain_challenge_switch_key()
/drivers/crypto/
Dsa2ul.c437 SHASH_DESC_ON_STACK(shash, data->ctx->shash); in sa_prepare_iopads()
438 int block_size = crypto_shash_blocksize(data->ctx->shash); in sa_prepare_iopads()
439 int digest_size = crypto_shash_digestsize(data->ctx->shash); in sa_prepare_iopads()
446 shash->tfm = data->ctx->shash; in sa_prepare_iopads()
450 crypto_shash_init(shash); in sa_prepare_iopads()
451 crypto_shash_update(shash, sha.k_pad, block_size); in sa_prepare_iopads()
452 sa_export_shash(&sha, shash, digest_size, ipad); in sa_prepare_iopads()
456 crypto_shash_init(shash); in sa_prepare_iopads()
457 crypto_shash_update(shash, sha.k_pad, block_size); in sa_prepare_iopads()
459 sa_export_shash(&sha, shash, digest_size, opad); in sa_prepare_iopads()
[all …]
Domap-sham.c165 struct crypto_shash *shash; member
1104 int bs = crypto_shash_blocksize(bctx->shash); in omap_sham_finish_hmac()
1105 int ds = crypto_shash_digestsize(bctx->shash); in omap_sham_finish_hmac()
1106 SHASH_DESC_ON_STACK(shash, bctx->shash); in omap_sham_finish_hmac()
1108 shash->tfm = bctx->shash; in omap_sham_finish_hmac()
1110 return crypto_shash_init(shash) ?: in omap_sham_finish_hmac()
1111 crypto_shash_update(shash, bctx->opad, bs) ?: in omap_sham_finish_hmac()
1112 crypto_shash_finup(shash, req->result, ds, req->result); in omap_sham_finish_hmac()
1289 int bs = crypto_shash_blocksize(bctx->shash); in omap_sham_setkey()
1290 int ds = crypto_shash_digestsize(bctx->shash); in omap_sham_setkey()
[all …]
Dsa2ul.h315 struct crypto_shash *shash; member
/drivers/crypto/qat/qat_common/
Dqat_algs.c117 SHASH_DESC_ON_STACK(shash, ctx->hash_tfm); in qat_alg_do_precomputes()
126 shash->tfm = ctx->hash_tfm; in qat_alg_do_precomputes()
129 int ret = crypto_shash_digest(shash, auth_key, in qat_alg_do_precomputes()
147 if (crypto_shash_init(shash)) in qat_alg_do_precomputes()
150 if (crypto_shash_update(shash, ctx->ipad, block_size)) in qat_alg_do_precomputes()
158 if (crypto_shash_export(shash, &ctx->sha1)) in qat_alg_do_precomputes()
164 if (crypto_shash_export(shash, &ctx->sha256)) in qat_alg_do_precomputes()
170 if (crypto_shash_export(shash, &ctx->sha512)) in qat_alg_do_precomputes()
179 if (crypto_shash_init(shash)) in qat_alg_do_precomputes()
182 if (crypto_shash_update(shash, ctx->opad, block_size)) in qat_alg_do_precomputes()
[all …]
/drivers/crypto/aspeed/
Daspeed-hace-hash.c622 SHASH_DESC_ON_STACK(shash, tfm); in aspeed_sham_shash_digest()
624 shash->tfm = tfm; in aspeed_sham_shash_digest()
626 return crypto_shash_digest(shash, data, len, out); in aspeed_sham_shash_digest()
820 int ds = crypto_shash_digestsize(bctx->shash); in aspeed_sham_setkey()
821 int bs = crypto_shash_blocksize(bctx->shash); in aspeed_sham_setkey()
829 err = aspeed_sham_shash_digest(bctx->shash, in aspeed_sham_setkey()
830 crypto_shash_get_flags(bctx->shash), in aspeed_sham_setkey()
869 bctx->shash = crypto_alloc_shash(ast_alg->alg_base, 0, in aspeed_sham_cra_init()
871 if (IS_ERR(bctx->shash)) { in aspeed_sham_cra_init()
875 return PTR_ERR(bctx->shash); in aspeed_sham_cra_init()
[all …]
Daspeed-hace.h175 struct crypto_shash *shash; member
/drivers/md/
Ddm-ima.c188 SHASH_DESC_ON_STACK(shash, NULL); in dm_ima_measure_on_table_load()
220 shash->tfm = tfm; in dm_ima_measure_on_table_load()
226 r = crypto_shash_init(shash); in dm_ima_measure_on_table_load()
275 r = crypto_shash_update(shash, (const u8 *)ima_buf, l); in dm_ima_measure_on_table_load()
316 r = crypto_shash_update(shash, (const u8 *)ima_buf, l); in dm_ima_measure_on_table_load()
326 r = crypto_shash_final(shash, digest); in dm_ima_measure_on_table_load()
/drivers/tee/
Dtee_core.c148 struct crypto_shash *shash = NULL; in uuid_v5() local
152 shash = crypto_alloc_shash("sha1", 0, 0); in uuid_v5()
153 if (IS_ERR(shash)) { in uuid_v5()
154 rc = PTR_ERR(shash); in uuid_v5()
159 desc = kzalloc(sizeof(*desc) + crypto_shash_descsize(shash), in uuid_v5()
166 desc->tfm = shash; in uuid_v5()
194 crypto_free_shash(shash); in uuid_v5()
/drivers/crypto/ccp/
Dccp-crypto-sha.c276 struct crypto_shash *shash = ctx->u.sha.hmac_tfm; in ccp_sha_setkey() local
277 unsigned int block_size = crypto_shash_blocksize(shash); in ccp_sha_setkey()
278 unsigned int digest_size = crypto_shash_digestsize(shash); in ccp_sha_setkey()
291 ret = crypto_shash_tfm_digest(shash, key, key_len, in ccp_sha_setkey()
/drivers/crypto/marvell/octeontx/
Dotx_cptvf_algs.c687 sdesc->shash.tfm = alg; in alloc_sdesc()
779 ret = crypto_shash_digest(&ctx->sdesc->shash, ctx->key, in aead_hmac_init()
803 crypto_shash_init(&ctx->sdesc->shash); in aead_hmac_init()
804 crypto_shash_update(&ctx->sdesc->shash, ipad, bs); in aead_hmac_init()
805 crypto_shash_export(&ctx->sdesc->shash, ipad); in aead_hmac_init()
811 crypto_shash_init(&ctx->sdesc->shash); in aead_hmac_init()
812 crypto_shash_update(&ctx->sdesc->shash, opad, bs); in aead_hmac_init()
813 crypto_shash_export(&ctx->sdesc->shash, opad); in aead_hmac_init()
Dotx_cptvf_algs.h166 struct shash_desc shash; member
/drivers/crypto/marvell/octeontx2/
Dotx2_cptvf_algs.c743 sdesc->shash.tfm = alg; in alloc_sdesc()
834 ret = crypto_shash_digest(&ctx->sdesc->shash, ctx->key, in aead_hmac_init()
858 crypto_shash_init(&ctx->sdesc->shash); in aead_hmac_init()
859 crypto_shash_update(&ctx->sdesc->shash, ipad, bs); in aead_hmac_init()
860 crypto_shash_export(&ctx->sdesc->shash, ipad); in aead_hmac_init()
866 crypto_shash_init(&ctx->sdesc->shash); in aead_hmac_init()
867 crypto_shash_update(&ctx->sdesc->shash, opad, bs); in aead_hmac_init()
868 crypto_shash_export(&ctx->sdesc->shash, opad); in aead_hmac_init()
Dotx2_cptvf_algs.h156 struct shash_desc shash; member
/drivers/crypto/chelsio/
Dchcr_algo.c2202 SHASH_DESC_ON_STACK(shash, hmacctx->base_hash); in chcr_ahash_setkey()
2208 shash->tfm = hmacctx->base_hash; in chcr_ahash_setkey()
2210 err = crypto_shash_digest(shash, key, keylen, in chcr_ahash_setkey()
2231 err = chcr_compute_partial_hash(shash, hmacctx->ipad, in chcr_ahash_setkey()
2237 err = chcr_compute_partial_hash(shash, hmacctx->opad, in chcr_ahash_setkey()
3613 SHASH_DESC_ON_STACK(shash, base_hash); in chcr_authenc_setkey()
3615 shash->tfm = base_hash; in chcr_authenc_setkey()
3621 err = crypto_shash_digest(shash, keys.authkey, in chcr_authenc_setkey()
3638 if (chcr_compute_partial_hash(shash, pad, actx->h_iopad, in chcr_authenc_setkey()
3647 if (chcr_compute_partial_hash(shash, pad, o_ptr, max_authsize)) in chcr_authenc_setkey()