Lines Matching refs:walk
317 struct blkcipher_walk *walk) in ecb_aes_crypt() argument
319 int ret = blkcipher_walk_virt(desc, walk); in ecb_aes_crypt()
322 while ((nbytes = walk->nbytes)) { in ecb_aes_crypt()
325 u8 *out = walk->dst.virt.addr; in ecb_aes_crypt()
326 u8 *in = walk->src.virt.addr; in ecb_aes_crypt()
333 ret = blkcipher_walk_done(desc, walk, nbytes); in ecb_aes_crypt()
344 struct blkcipher_walk walk; in ecb_aes_encrypt() local
349 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_aes_encrypt()
350 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk); in ecb_aes_encrypt()
358 struct blkcipher_walk walk; in ecb_aes_decrypt() local
363 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_aes_decrypt()
364 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk); in ecb_aes_decrypt()
446 struct blkcipher_walk *walk) in cbc_aes_crypt() argument
449 int ret = blkcipher_walk_virt(desc, walk); in cbc_aes_crypt()
450 unsigned int nbytes = walk->nbytes; in cbc_aes_crypt()
459 memcpy(param.iv, walk->iv, AES_BLOCK_SIZE); in cbc_aes_crypt()
464 u8 *out = walk->dst.virt.addr; in cbc_aes_crypt()
465 u8 *in = walk->src.virt.addr; in cbc_aes_crypt()
472 ret = blkcipher_walk_done(desc, walk, nbytes); in cbc_aes_crypt()
473 } while ((nbytes = walk->nbytes)); in cbc_aes_crypt()
474 memcpy(walk->iv, param.iv, AES_BLOCK_SIZE); in cbc_aes_crypt()
485 struct blkcipher_walk walk; in cbc_aes_encrypt() local
490 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_aes_encrypt()
491 return cbc_aes_crypt(desc, sctx->enc, &walk); in cbc_aes_encrypt()
499 struct blkcipher_walk walk; in cbc_aes_decrypt() local
504 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_aes_decrypt()
505 return cbc_aes_crypt(desc, sctx->dec, &walk); in cbc_aes_decrypt()
619 struct blkcipher_walk *walk) in xts_aes_crypt() argument
622 int ret = blkcipher_walk_virt(desc, walk); in xts_aes_crypt()
623 unsigned int nbytes = walk->nbytes; in xts_aes_crypt()
638 memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak)); in xts_aes_crypt()
649 out = walk->dst.virt.addr; in xts_aes_crypt()
650 in = walk->src.virt.addr; in xts_aes_crypt()
657 ret = blkcipher_walk_done(desc, walk, nbytes); in xts_aes_crypt()
658 } while ((nbytes = walk->nbytes)); in xts_aes_crypt()
668 struct blkcipher_walk walk; in xts_aes_encrypt() local
673 blkcipher_walk_init(&walk, dst, src, nbytes); in xts_aes_encrypt()
674 return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk); in xts_aes_encrypt()
682 struct blkcipher_walk walk; in xts_aes_decrypt() local
687 blkcipher_walk_init(&walk, dst, src, nbytes); in xts_aes_decrypt()
688 return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk); in xts_aes_decrypt()
779 struct s390_aes_ctx *sctx, struct blkcipher_walk *walk) in ctr_aes_crypt() argument
781 int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE); in ctr_aes_crypt()
786 if (!walk->nbytes) in ctr_aes_crypt()
792 memcpy(ctrptr, walk->iv, AES_BLOCK_SIZE); in ctr_aes_crypt()
793 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in ctr_aes_crypt()
794 out = walk->dst.virt.addr; in ctr_aes_crypt()
795 in = walk->src.virt.addr; in ctr_aes_crypt()
816 ret = blkcipher_walk_done(desc, walk, nbytes); in ctr_aes_crypt()
822 memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE); in ctr_aes_crypt()
826 memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE); in ctr_aes_crypt()
832 out = walk->dst.virt.addr; in ctr_aes_crypt()
833 in = walk->src.virt.addr; in ctr_aes_crypt()
840 ret = blkcipher_walk_done(desc, walk, 0); in ctr_aes_crypt()
841 memcpy(walk->iv, ctrbuf, AES_BLOCK_SIZE); in ctr_aes_crypt()
852 struct blkcipher_walk walk; in ctr_aes_encrypt() local
854 blkcipher_walk_init(&walk, dst, src, nbytes); in ctr_aes_encrypt()
855 return ctr_aes_crypt(desc, sctx->enc, sctx, &walk); in ctr_aes_encrypt()
863 struct blkcipher_walk walk; in ctr_aes_decrypt() local
865 blkcipher_walk_init(&walk, dst, src, nbytes); in ctr_aes_decrypt()
866 return ctr_aes_crypt(desc, sctx->dec, sctx, &walk); in ctr_aes_decrypt()