Lines Matching full:cc
65 struct crypt_config *cc; member
90 int (*ctr)(struct crypt_config *cc, struct dm_target *ti,
92 void (*dtr)(struct crypt_config *cc);
93 int (*init)(struct crypt_config *cc);
94 int (*wipe)(struct crypt_config *cc);
95 int (*generator)(struct crypt_config *cc, u8 *iv,
97 int (*post)(struct crypt_config *cc, u8 *iv,
232 static struct scatterlist *crypt_get_sg_data(struct crypt_config *cc,
238 static struct crypto_skcipher *any_tfm(struct crypt_config *cc) in any_tfm() argument
240 return cc->cipher_tfm.tfms[0]; in any_tfm()
243 static struct crypto_aead *any_tfm_aead(struct crypt_config *cc) in any_tfm_aead() argument
245 return cc->cipher_tfm.tfms_aead[0]; in any_tfm_aead()
298 static int crypt_iv_plain_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_plain_gen() argument
301 memset(iv, 0, cc->iv_size); in crypt_iv_plain_gen()
307 static int crypt_iv_plain64_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_plain64_gen() argument
310 memset(iv, 0, cc->iv_size); in crypt_iv_plain64_gen()
316 static int crypt_iv_plain64be_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_plain64be_gen() argument
319 memset(iv, 0, cc->iv_size); in crypt_iv_plain64be_gen()
321 *(__be64 *)&iv[cc->iv_size - sizeof(u64)] = cpu_to_be64(dmreq->iv_sector); in crypt_iv_plain64be_gen()
327 static int crypt_iv_essiv_init(struct crypt_config *cc) in crypt_iv_essiv_init() argument
329 struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; in crypt_iv_essiv_init()
337 err = crypto_shash_digest(desc, cc->key, cc->key_size, essiv->salt); in crypt_iv_essiv_init()
342 essiv_tfm = cc->iv_private; in crypt_iv_essiv_init()
353 static int crypt_iv_essiv_wipe(struct crypt_config *cc) in crypt_iv_essiv_wipe() argument
355 struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; in crypt_iv_essiv_wipe()
362 essiv_tfm = cc->iv_private; in crypt_iv_essiv_wipe()
371 static struct crypto_cipher *alloc_essiv_cipher(struct crypt_config *cc, in alloc_essiv_cipher() argument
380 essiv_tfm = crypto_alloc_cipher(cc->cipher, 0, CRYPTO_ALG_ASYNC); in alloc_essiv_cipher()
386 if (crypto_cipher_blocksize(essiv_tfm) != cc->iv_size) { in alloc_essiv_cipher()
403 static void crypt_iv_essiv_dtr(struct crypt_config *cc) in crypt_iv_essiv_dtr() argument
406 struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; in crypt_iv_essiv_dtr()
414 essiv_tfm = cc->iv_private; in crypt_iv_essiv_dtr()
419 cc->iv_private = NULL; in crypt_iv_essiv_dtr()
422 static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_essiv_ctr() argument
450 cc->iv_gen_private.essiv.salt = salt; in crypt_iv_essiv_ctr()
451 cc->iv_gen_private.essiv.hash_tfm = hash_tfm; in crypt_iv_essiv_ctr()
453 essiv_tfm = alloc_essiv_cipher(cc, ti, salt, in crypt_iv_essiv_ctr()
456 crypt_iv_essiv_dtr(cc); in crypt_iv_essiv_ctr()
459 cc->iv_private = essiv_tfm; in crypt_iv_essiv_ctr()
470 static int crypt_iv_essiv_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_essiv_gen() argument
473 struct crypto_cipher *essiv_tfm = cc->iv_private; in crypt_iv_essiv_gen()
475 memset(iv, 0, cc->iv_size); in crypt_iv_essiv_gen()
482 static int crypt_iv_benbi_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_benbi_ctr() argument
488 if (test_bit(CRYPT_MODE_INTEGRITY_AEAD, &cc->cipher_flags)) in crypt_iv_benbi_ctr()
489 bs = crypto_aead_blocksize(any_tfm_aead(cc)); in crypt_iv_benbi_ctr()
491 bs = crypto_skcipher_blocksize(any_tfm(cc)); in crypt_iv_benbi_ctr()
507 cc->iv_gen_private.benbi.shift = 9 - log; in crypt_iv_benbi_ctr()
512 static void crypt_iv_benbi_dtr(struct crypt_config *cc) in crypt_iv_benbi_dtr() argument
516 static int crypt_iv_benbi_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_benbi_gen() argument
521 memset(iv, 0, cc->iv_size - sizeof(u64)); /* rest is cleared below */ in crypt_iv_benbi_gen()
523 val = cpu_to_be64(((u64)dmreq->iv_sector << cc->iv_gen_private.benbi.shift) + 1); in crypt_iv_benbi_gen()
524 put_unaligned(val, (__be64 *)(iv + cc->iv_size - sizeof(u64))); in crypt_iv_benbi_gen()
529 static int crypt_iv_null_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_null_gen() argument
532 memset(iv, 0, cc->iv_size); in crypt_iv_null_gen()
537 static void crypt_iv_lmk_dtr(struct crypt_config *cc) in crypt_iv_lmk_dtr() argument
539 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_dtr()
549 static int crypt_iv_lmk_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_lmk_ctr() argument
552 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_ctr()
554 if (cc->sector_size != (1 << SECTOR_SHIFT)) { in crypt_iv_lmk_ctr()
566 if (cc->key_parts == cc->tfms_count) { in crypt_iv_lmk_ctr()
573 crypt_iv_lmk_dtr(cc); in crypt_iv_lmk_ctr()
581 static int crypt_iv_lmk_init(struct crypt_config *cc) in crypt_iv_lmk_init() argument
583 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_init()
584 int subkey_size = cc->key_size / cc->key_parts; in crypt_iv_lmk_init()
588 memcpy(lmk->seed, cc->key + (cc->tfms_count * subkey_size), in crypt_iv_lmk_init()
594 static int crypt_iv_lmk_wipe(struct crypt_config *cc) in crypt_iv_lmk_wipe() argument
596 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_wipe()
604 static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv, in crypt_iv_lmk_one() argument
608 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_one()
648 memcpy(iv, &md5state.hash, cc->iv_size); in crypt_iv_lmk_one()
653 static int crypt_iv_lmk_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_lmk_gen() argument
661 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_lmk_gen()
663 r = crypt_iv_lmk_one(cc, iv, dmreq, src + sg->offset); in crypt_iv_lmk_gen()
666 memset(iv, 0, cc->iv_size); in crypt_iv_lmk_gen()
671 static int crypt_iv_lmk_post(struct crypt_config *cc, u8 *iv, in crypt_iv_lmk_post() argument
681 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_lmk_post()
683 r = crypt_iv_lmk_one(cc, iv, dmreq, dst + sg->offset); in crypt_iv_lmk_post()
687 crypto_xor(dst + sg->offset, iv, cc->iv_size); in crypt_iv_lmk_post()
693 static void crypt_iv_tcw_dtr(struct crypt_config *cc) in crypt_iv_tcw_dtr() argument
695 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_dtr()
707 static int crypt_iv_tcw_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_tcw_ctr() argument
710 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_ctr()
712 if (cc->sector_size != (1 << SECTOR_SHIFT)) { in crypt_iv_tcw_ctr()
717 if (cc->key_size <= (cc->iv_size + TCW_WHITENING_SIZE)) { in crypt_iv_tcw_ctr()
728 tcw->iv_seed = kzalloc(cc->iv_size, GFP_KERNEL); in crypt_iv_tcw_ctr()
731 crypt_iv_tcw_dtr(cc); in crypt_iv_tcw_ctr()
739 static int crypt_iv_tcw_init(struct crypt_config *cc) in crypt_iv_tcw_init() argument
741 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_init()
742 int key_offset = cc->key_size - cc->iv_size - TCW_WHITENING_SIZE; in crypt_iv_tcw_init()
744 memcpy(tcw->iv_seed, &cc->key[key_offset], cc->iv_size); in crypt_iv_tcw_init()
745 memcpy(tcw->whitening, &cc->key[key_offset + cc->iv_size], in crypt_iv_tcw_init()
751 static int crypt_iv_tcw_wipe(struct crypt_config *cc) in crypt_iv_tcw_wipe() argument
753 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_wipe()
755 memset(tcw->iv_seed, 0, cc->iv_size); in crypt_iv_tcw_wipe()
761 static int crypt_iv_tcw_whitening(struct crypt_config *cc, in crypt_iv_tcw_whitening() argument
765 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_whitening()
800 static int crypt_iv_tcw_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_tcw_gen() argument
804 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_gen()
811 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_tcw_gen()
813 r = crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset); in crypt_iv_tcw_gen()
819 if (cc->iv_size > 8) in crypt_iv_tcw_gen()
821 cc->iv_size - 8); in crypt_iv_tcw_gen()
826 static int crypt_iv_tcw_post(struct crypt_config *cc, u8 *iv, in crypt_iv_tcw_post() argument
837 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_tcw_post()
839 r = crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset); in crypt_iv_tcw_post()
845 static int crypt_iv_random_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_random_gen() argument
849 get_random_bytes(iv, cc->iv_size); in crypt_iv_random_gen()
908 static bool crypt_integrity_aead(struct crypt_config *cc) in crypt_integrity_aead() argument
910 return test_bit(CRYPT_MODE_INTEGRITY_AEAD, &cc->cipher_flags); in crypt_integrity_aead()
913 static bool crypt_integrity_hmac(struct crypt_config *cc) in crypt_integrity_hmac() argument
915 return crypt_integrity_aead(cc) && cc->key_mac_size; in crypt_integrity_hmac()
919 static struct scatterlist *crypt_get_sg_data(struct crypt_config *cc, in crypt_get_sg_data() argument
922 if (unlikely(crypt_integrity_aead(cc))) in crypt_get_sg_data()
934 if (!bio_sectors(bio) || !io->cc->on_disk_tag_size) in dm_crypt_integrity_io_alloc()
941 tag_len = io->cc->on_disk_tag_size * (bio_sectors(bio) >> io->cc->sector_shift); in dm_crypt_integrity_io_alloc()
944 bip->bip_iter.bi_sector = io->cc->start + io->sector; in dm_crypt_integrity_io_alloc()
954 static int crypt_integrity_ctr(struct crypt_config *cc, struct dm_target *ti) in crypt_integrity_ctr() argument
957 struct blk_integrity *bi = blk_get_integrity(cc->dev->bdev->bd_disk); in crypt_integrity_ctr()
966 if (bi->tag_size != cc->on_disk_tag_size || in crypt_integrity_ctr()
967 bi->tuple_size != cc->on_disk_tag_size) { in crypt_integrity_ctr()
971 if (1 << bi->interval_exp != cc->sector_size) { in crypt_integrity_ctr()
976 if (crypt_integrity_aead(cc)) { in crypt_integrity_ctr()
977 cc->integrity_tag_size = cc->on_disk_tag_size - cc->integrity_iv_size; in crypt_integrity_ctr()
979 cc->integrity_tag_size, cc->integrity_iv_size); in crypt_integrity_ctr()
981 if (crypto_aead_setauthsize(any_tfm_aead(cc), cc->integrity_tag_size)) { in crypt_integrity_ctr()
985 } else if (cc->integrity_iv_size) in crypt_integrity_ctr()
987 cc->integrity_iv_size); in crypt_integrity_ctr()
989 if ((cc->integrity_tag_size + cc->integrity_iv_size) != bi->tag_size) { in crypt_integrity_ctr()
1001 static void crypt_convert_init(struct crypt_config *cc, in crypt_convert_init() argument
1012 ctx->cc_sector = sector + cc->iv_offset; in crypt_convert_init()
1016 static struct dm_crypt_request *dmreq_of_req(struct crypt_config *cc, in dmreq_of_req() argument
1019 return (struct dm_crypt_request *)((char *)req + cc->dmreq_start); in dmreq_of_req()
1022 static void *req_of_dmreq(struct crypt_config *cc, struct dm_crypt_request *dmreq) in req_of_dmreq() argument
1024 return (void *)((char *)dmreq - cc->dmreq_start); in req_of_dmreq()
1027 static u8 *iv_of_dmreq(struct crypt_config *cc, in iv_of_dmreq() argument
1030 if (crypt_integrity_aead(cc)) in iv_of_dmreq()
1032 crypto_aead_alignmask(any_tfm_aead(cc)) + 1); in iv_of_dmreq()
1035 crypto_skcipher_alignmask(any_tfm(cc)) + 1); in iv_of_dmreq()
1038 static u8 *org_iv_of_dmreq(struct crypt_config *cc, in org_iv_of_dmreq() argument
1041 return iv_of_dmreq(cc, dmreq) + cc->iv_size; in org_iv_of_dmreq()
1044 static uint64_t *org_sector_of_dmreq(struct crypt_config *cc, in org_sector_of_dmreq() argument
1047 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + cc->iv_size; in org_sector_of_dmreq()
1051 static unsigned int *org_tag_of_dmreq(struct crypt_config *cc, in org_tag_of_dmreq() argument
1054 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + in org_tag_of_dmreq()
1055 cc->iv_size + sizeof(uint64_t); in org_tag_of_dmreq()
1059 static void *tag_from_dmreq(struct crypt_config *cc, in tag_from_dmreq() argument
1065 return &io->integrity_metadata[*org_tag_of_dmreq(cc, dmreq) * in tag_from_dmreq()
1066 cc->on_disk_tag_size]; in tag_from_dmreq()
1069 static void *iv_tag_from_dmreq(struct crypt_config *cc, in iv_tag_from_dmreq() argument
1072 return tag_from_dmreq(cc, dmreq) + cc->integrity_tag_size; in iv_tag_from_dmreq()
1075 static int crypt_convert_block_aead(struct crypt_config *cc, in crypt_convert_block_aead() argument
1087 BUG_ON(cc->integrity_iv_size && cc->integrity_iv_size != cc->iv_size); in crypt_convert_block_aead()
1090 if (unlikely(bv_in.bv_len & (cc->sector_size - 1))) in crypt_convert_block_aead()
1093 dmreq = dmreq_of_req(cc, req); in crypt_convert_block_aead()
1095 if (test_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags)) in crypt_convert_block_aead()
1096 dmreq->iv_sector >>= cc->sector_shift; in crypt_convert_block_aead()
1099 *org_tag_of_dmreq(cc, dmreq) = tag_offset; in crypt_convert_block_aead()
1101 sector = org_sector_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1102 *sector = cpu_to_le64(ctx->cc_sector - cc->iv_offset); in crypt_convert_block_aead()
1104 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1105 org_iv = org_iv_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1106 tag = tag_from_dmreq(cc, dmreq); in crypt_convert_block_aead()
1107 tag_iv = iv_tag_from_dmreq(cc, dmreq); in crypt_convert_block_aead()
1116 sg_set_buf(&dmreq->sg_in[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1117 sg_set_page(&dmreq->sg_in[2], bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_aead()
1118 sg_set_buf(&dmreq->sg_in[3], tag, cc->integrity_tag_size); in crypt_convert_block_aead()
1122 sg_set_buf(&dmreq->sg_out[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1123 sg_set_page(&dmreq->sg_out[2], bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_aead()
1124 sg_set_buf(&dmreq->sg_out[3], tag, cc->integrity_tag_size); in crypt_convert_block_aead()
1126 if (cc->iv_gen_ops) { in crypt_convert_block_aead()
1128 if (cc->integrity_iv_size && bio_data_dir(ctx->bio_in) != WRITE) { in crypt_convert_block_aead()
1129 memcpy(org_iv, tag_iv, cc->iv_size); in crypt_convert_block_aead()
1131 r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); in crypt_convert_block_aead()
1135 if (cc->integrity_iv_size) in crypt_convert_block_aead()
1136 memcpy(tag_iv, org_iv, cc->iv_size); in crypt_convert_block_aead()
1139 memcpy(iv, org_iv, cc->iv_size); in crypt_convert_block_aead()
1142 aead_request_set_ad(req, sizeof(uint64_t) + cc->iv_size); in crypt_convert_block_aead()
1145 cc->sector_size, iv); in crypt_convert_block_aead()
1147 if (cc->integrity_tag_size + cc->integrity_iv_size != cc->on_disk_tag_size) in crypt_convert_block_aead()
1148 memset(tag + cc->integrity_tag_size + cc->integrity_iv_size, 0, in crypt_convert_block_aead()
1149 cc->on_disk_tag_size - (cc->integrity_tag_size + cc->integrity_iv_size)); in crypt_convert_block_aead()
1152 cc->sector_size + cc->integrity_tag_size, iv); in crypt_convert_block_aead()
1160 if (!r && cc->iv_gen_ops && cc->iv_gen_ops->post) in crypt_convert_block_aead()
1161 r = cc->iv_gen_ops->post(cc, org_iv, dmreq); in crypt_convert_block_aead()
1163 bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size); in crypt_convert_block_aead()
1164 bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size); in crypt_convert_block_aead()
1169 static int crypt_convert_block_skcipher(struct crypt_config *cc, in crypt_convert_block_skcipher() argument
1183 if (unlikely(bv_in.bv_len & (cc->sector_size - 1))) in crypt_convert_block_skcipher()
1186 dmreq = dmreq_of_req(cc, req); in crypt_convert_block_skcipher()
1188 if (test_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags)) in crypt_convert_block_skcipher()
1189 dmreq->iv_sector >>= cc->sector_shift; in crypt_convert_block_skcipher()
1192 *org_tag_of_dmreq(cc, dmreq) = tag_offset; in crypt_convert_block_skcipher()
1194 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1195 org_iv = org_iv_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1196 tag_iv = iv_tag_from_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1198 sector = org_sector_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1199 *sector = cpu_to_le64(ctx->cc_sector - cc->iv_offset); in crypt_convert_block_skcipher()
1206 sg_set_page(sg_in, bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_skcipher()
1209 sg_set_page(sg_out, bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_skcipher()
1211 if (cc->iv_gen_ops) { in crypt_convert_block_skcipher()
1213 if (cc->integrity_iv_size && bio_data_dir(ctx->bio_in) != WRITE) { in crypt_convert_block_skcipher()
1214 memcpy(org_iv, tag_iv, cc->integrity_iv_size); in crypt_convert_block_skcipher()
1216 r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); in crypt_convert_block_skcipher()
1220 if (cc->integrity_iv_size) in crypt_convert_block_skcipher()
1221 memcpy(tag_iv, org_iv, cc->integrity_iv_size); in crypt_convert_block_skcipher()
1224 memcpy(iv, org_iv, cc->iv_size); in crypt_convert_block_skcipher()
1227 skcipher_request_set_crypt(req, sg_in, sg_out, cc->sector_size, iv); in crypt_convert_block_skcipher()
1234 if (!r && cc->iv_gen_ops && cc->iv_gen_ops->post) in crypt_convert_block_skcipher()
1235 r = cc->iv_gen_ops->post(cc, org_iv, dmreq); in crypt_convert_block_skcipher()
1237 bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size); in crypt_convert_block_skcipher()
1238 bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size); in crypt_convert_block_skcipher()
1246 static void crypt_alloc_req_skcipher(struct crypt_config *cc, in crypt_alloc_req_skcipher() argument
1249 unsigned key_index = ctx->cc_sector & (cc->tfms_count - 1); in crypt_alloc_req_skcipher()
1252 ctx->r.req = mempool_alloc(&cc->req_pool, GFP_NOIO); in crypt_alloc_req_skcipher()
1254 skcipher_request_set_tfm(ctx->r.req, cc->cipher_tfm.tfms[key_index]); in crypt_alloc_req_skcipher()
1262 kcryptd_async_done, dmreq_of_req(cc, ctx->r.req)); in crypt_alloc_req_skcipher()
1265 static void crypt_alloc_req_aead(struct crypt_config *cc, in crypt_alloc_req_aead() argument
1269 ctx->r.req_aead = mempool_alloc(&cc->req_pool, GFP_NOIO); in crypt_alloc_req_aead()
1271 aead_request_set_tfm(ctx->r.req_aead, cc->cipher_tfm.tfms_aead[0]); in crypt_alloc_req_aead()
1279 kcryptd_async_done, dmreq_of_req(cc, ctx->r.req_aead)); in crypt_alloc_req_aead()
1282 static void crypt_alloc_req(struct crypt_config *cc, in crypt_alloc_req() argument
1285 if (crypt_integrity_aead(cc)) in crypt_alloc_req()
1286 crypt_alloc_req_aead(cc, ctx); in crypt_alloc_req()
1288 crypt_alloc_req_skcipher(cc, ctx); in crypt_alloc_req()
1291 static void crypt_free_req_skcipher(struct crypt_config *cc, in crypt_free_req_skcipher() argument
1294 struct dm_crypt_io *io = dm_per_bio_data(base_bio, cc->per_bio_data_size); in crypt_free_req_skcipher()
1297 mempool_free(req, &cc->req_pool); in crypt_free_req_skcipher()
1300 static void crypt_free_req_aead(struct crypt_config *cc, in crypt_free_req_aead() argument
1303 struct dm_crypt_io *io = dm_per_bio_data(base_bio, cc->per_bio_data_size); in crypt_free_req_aead()
1306 mempool_free(req, &cc->req_pool); in crypt_free_req_aead()
1309 static void crypt_free_req(struct crypt_config *cc, void *req, struct bio *base_bio) in crypt_free_req() argument
1311 if (crypt_integrity_aead(cc)) in crypt_free_req()
1312 crypt_free_req_aead(cc, req, base_bio); in crypt_free_req()
1314 crypt_free_req_skcipher(cc, req, base_bio); in crypt_free_req()
1320 static blk_status_t crypt_convert(struct crypt_config *cc, in crypt_convert() argument
1324 unsigned int sector_step = cc->sector_size >> SECTOR_SHIFT; in crypt_convert()
1331 crypt_alloc_req(cc, ctx); in crypt_convert()
1334 if (crypt_integrity_aead(cc)) in crypt_convert()
1335 r = crypt_convert_block_aead(cc, ctx, ctx->r.req_aead, tag_offset); in crypt_convert()
1337 r = crypt_convert_block_skcipher(cc, ctx, ctx->r.req, tag_offset); in crypt_convert()
1384 static void crypt_free_buffer_pages(struct crypt_config *cc, struct bio *clone);
1405 struct crypt_config *cc = io->cc; in crypt_alloc_buffer() local
1414 mutex_lock(&cc->bio_alloc_lock); in crypt_alloc_buffer()
1416 clone = bio_alloc_bioset(GFP_NOIO, nr_iovecs, &cc->bs); in crypt_alloc_buffer()
1425 page = mempool_alloc(&cc->page_pool, gfp_mask); in crypt_alloc_buffer()
1427 crypt_free_buffer_pages(cc, clone); in crypt_alloc_buffer()
1442 crypt_free_buffer_pages(cc, clone); in crypt_alloc_buffer()
1448 mutex_unlock(&cc->bio_alloc_lock); in crypt_alloc_buffer()
1453 static void crypt_free_buffer_pages(struct crypt_config *cc, struct bio *clone) in crypt_free_buffer_pages() argument
1460 mempool_free(bv->bv_page, &cc->page_pool); in crypt_free_buffer_pages()
1464 static void crypt_io_init(struct dm_crypt_io *io, struct crypt_config *cc, in crypt_io_init() argument
1467 io->cc = cc; in crypt_io_init()
1488 struct crypt_config *cc = io->cc; in crypt_dec_pending() local
1496 crypt_free_req(cc, io->ctx.r.req, base_bio); in crypt_dec_pending()
1499 mempool_free(io->integrity_metadata, &io->cc->tag_pool); in crypt_dec_pending()
1527 struct crypt_config *cc = io->cc; in crypt_endio() local
1535 crypt_free_buffer_pages(cc, clone); in crypt_endio()
1553 struct crypt_config *cc = io->cc; in clone_init() local
1557 bio_set_dev(clone, cc->dev->bdev); in clone_init()
1563 struct crypt_config *cc = io->cc; in kcryptd_io_read() local
1572 clone = bio_clone_fast(io->base_bio, gfp, &cc->bs); in kcryptd_io_read()
1579 clone->bi_iter.bi_sector = cc->start + io->sector; in kcryptd_io_read()
1603 struct crypt_config *cc = io->cc; in kcryptd_queue_read() local
1606 queue_work(cc->io_queue, &io->work); in kcryptd_queue_read()
1620 struct crypt_config *cc = data; in dmcrypt_write() local
1627 spin_lock_irq(&cc->write_thread_lock); in dmcrypt_write()
1630 if (!RB_EMPTY_ROOT(&cc->write_tree)) in dmcrypt_write()
1635 spin_unlock_irq(&cc->write_thread_lock); in dmcrypt_write()
1645 spin_lock_irq(&cc->write_thread_lock); in dmcrypt_write()
1649 write_tree = cc->write_tree; in dmcrypt_write()
1650 cc->write_tree = RB_ROOT; in dmcrypt_write()
1651 spin_unlock_irq(&cc->write_thread_lock); in dmcrypt_write()
1673 struct crypt_config *cc = io->cc; in kcryptd_crypt_write_io_submit() local
1679 crypt_free_buffer_pages(cc, clone); in kcryptd_crypt_write_io_submit()
1688 clone->bi_iter.bi_sector = cc->start + io->sector; in kcryptd_crypt_write_io_submit()
1690 if (likely(!async) && test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags)) { in kcryptd_crypt_write_io_submit()
1695 spin_lock_irqsave(&cc->write_thread_lock, flags); in kcryptd_crypt_write_io_submit()
1696 if (RB_EMPTY_ROOT(&cc->write_tree)) in kcryptd_crypt_write_io_submit()
1697 wake_up_process(cc->write_thread); in kcryptd_crypt_write_io_submit()
1698 rbp = &cc->write_tree.rb_node; in kcryptd_crypt_write_io_submit()
1709 rb_insert_color(&io->rb_node, &cc->write_tree); in kcryptd_crypt_write_io_submit()
1710 spin_unlock_irqrestore(&cc->write_thread_lock, flags); in kcryptd_crypt_write_io_submit()
1715 struct crypt_config *cc = io->cc; in kcryptd_crypt_write_convert() local
1725 crypt_convert_init(cc, &io->ctx, NULL, io->base_bio, sector); in kcryptd_crypt_write_convert()
1739 r = crypt_convert(cc, &io->ctx); in kcryptd_crypt_write_convert()
1761 struct crypt_config *cc = io->cc; in kcryptd_crypt_read_convert() local
1766 crypt_convert_init(cc, &io->ctx, io->base_bio, io->base_bio, in kcryptd_crypt_read_convert()
1769 r = crypt_convert(cc, &io->ctx); in kcryptd_crypt_read_convert()
1785 struct crypt_config *cc = io->cc; in kcryptd_async_done() local
1797 if (!error && cc->iv_gen_ops && cc->iv_gen_ops->post) in kcryptd_async_done()
1798 error = cc->iv_gen_ops->post(cc, org_iv_of_dmreq(cc, dmreq), dmreq); in kcryptd_async_done()
1802 (unsigned long long)le64_to_cpu(*org_sector_of_dmreq(cc, dmreq))); in kcryptd_async_done()
1807 crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio); in kcryptd_async_done()
1830 struct crypt_config *cc = io->cc; in kcryptd_queue_crypt() local
1833 queue_work(cc->crypt_queue, &io->work); in kcryptd_queue_crypt()
1836 static void crypt_free_tfms_aead(struct crypt_config *cc) in crypt_free_tfms_aead() argument
1838 if (!cc->cipher_tfm.tfms_aead) in crypt_free_tfms_aead()
1841 if (cc->cipher_tfm.tfms_aead[0] && !IS_ERR(cc->cipher_tfm.tfms_aead[0])) { in crypt_free_tfms_aead()
1842 crypto_free_aead(cc->cipher_tfm.tfms_aead[0]); in crypt_free_tfms_aead()
1843 cc->cipher_tfm.tfms_aead[0] = NULL; in crypt_free_tfms_aead()
1846 kfree(cc->cipher_tfm.tfms_aead); in crypt_free_tfms_aead()
1847 cc->cipher_tfm.tfms_aead = NULL; in crypt_free_tfms_aead()
1850 static void crypt_free_tfms_skcipher(struct crypt_config *cc) in crypt_free_tfms_skcipher() argument
1854 if (!cc->cipher_tfm.tfms) in crypt_free_tfms_skcipher()
1857 for (i = 0; i < cc->tfms_count; i++) in crypt_free_tfms_skcipher()
1858 if (cc->cipher_tfm.tfms[i] && !IS_ERR(cc->cipher_tfm.tfms[i])) { in crypt_free_tfms_skcipher()
1859 crypto_free_skcipher(cc->cipher_tfm.tfms[i]); in crypt_free_tfms_skcipher()
1860 cc->cipher_tfm.tfms[i] = NULL; in crypt_free_tfms_skcipher()
1863 kfree(cc->cipher_tfm.tfms); in crypt_free_tfms_skcipher()
1864 cc->cipher_tfm.tfms = NULL; in crypt_free_tfms_skcipher()
1867 static void crypt_free_tfms(struct crypt_config *cc) in crypt_free_tfms() argument
1869 if (crypt_integrity_aead(cc)) in crypt_free_tfms()
1870 crypt_free_tfms_aead(cc); in crypt_free_tfms()
1872 crypt_free_tfms_skcipher(cc); in crypt_free_tfms()
1875 static int crypt_alloc_tfms_skcipher(struct crypt_config *cc, char *ciphermode) in crypt_alloc_tfms_skcipher() argument
1880 cc->cipher_tfm.tfms = kcalloc(cc->tfms_count, in crypt_alloc_tfms_skcipher()
1883 if (!cc->cipher_tfm.tfms) in crypt_alloc_tfms_skcipher()
1886 for (i = 0; i < cc->tfms_count; i++) { in crypt_alloc_tfms_skcipher()
1887 cc->cipher_tfm.tfms[i] = crypto_alloc_skcipher(ciphermode, 0, 0); in crypt_alloc_tfms_skcipher()
1888 if (IS_ERR(cc->cipher_tfm.tfms[i])) { in crypt_alloc_tfms_skcipher()
1889 err = PTR_ERR(cc->cipher_tfm.tfms[i]); in crypt_alloc_tfms_skcipher()
1890 crypt_free_tfms(cc); in crypt_alloc_tfms_skcipher()
1898 static int crypt_alloc_tfms_aead(struct crypt_config *cc, char *ciphermode) in crypt_alloc_tfms_aead() argument
1902 cc->cipher_tfm.tfms = kmalloc(sizeof(struct crypto_aead *), GFP_KERNEL); in crypt_alloc_tfms_aead()
1903 if (!cc->cipher_tfm.tfms) in crypt_alloc_tfms_aead()
1906 cc->cipher_tfm.tfms_aead[0] = crypto_alloc_aead(ciphermode, 0, 0); in crypt_alloc_tfms_aead()
1907 if (IS_ERR(cc->cipher_tfm.tfms_aead[0])) { in crypt_alloc_tfms_aead()
1908 err = PTR_ERR(cc->cipher_tfm.tfms_aead[0]); in crypt_alloc_tfms_aead()
1909 crypt_free_tfms(cc); in crypt_alloc_tfms_aead()
1916 static int crypt_alloc_tfms(struct crypt_config *cc, char *ciphermode) in crypt_alloc_tfms() argument
1918 if (crypt_integrity_aead(cc)) in crypt_alloc_tfms()
1919 return crypt_alloc_tfms_aead(cc, ciphermode); in crypt_alloc_tfms()
1921 return crypt_alloc_tfms_skcipher(cc, ciphermode); in crypt_alloc_tfms()
1924 static unsigned crypt_subkey_size(struct crypt_config *cc) in crypt_subkey_size() argument
1926 return (cc->key_size - cc->key_extra_size) >> ilog2(cc->tfms_count); in crypt_subkey_size()
1929 static unsigned crypt_authenckey_size(struct crypt_config *cc) in crypt_authenckey_size() argument
1931 return crypt_subkey_size(cc) + RTA_SPACE(sizeof(struct crypto_authenc_key_param)); in crypt_authenckey_size()
1937 * This funcion converts cc->key to this special format.
1956 static int crypt_setkey(struct crypt_config *cc) in crypt_setkey() argument
1962 subkey_size = crypt_subkey_size(cc); in crypt_setkey()
1964 if (crypt_integrity_hmac(cc)) { in crypt_setkey()
1965 if (subkey_size < cc->key_mac_size) in crypt_setkey()
1968 crypt_copy_authenckey(cc->authenc_key, cc->key, in crypt_setkey()
1969 subkey_size - cc->key_mac_size, in crypt_setkey()
1970 cc->key_mac_size); in crypt_setkey()
1973 for (i = 0; i < cc->tfms_count; i++) { in crypt_setkey()
1974 if (crypt_integrity_hmac(cc)) in crypt_setkey()
1975 r = crypto_aead_setkey(cc->cipher_tfm.tfms_aead[i], in crypt_setkey()
1976 cc->authenc_key, crypt_authenckey_size(cc)); in crypt_setkey()
1977 else if (crypt_integrity_aead(cc)) in crypt_setkey()
1978 r = crypto_aead_setkey(cc->cipher_tfm.tfms_aead[i], in crypt_setkey()
1979 cc->key + (i * subkey_size), in crypt_setkey()
1982 r = crypto_skcipher_setkey(cc->cipher_tfm.tfms[i], in crypt_setkey()
1983 cc->key + (i * subkey_size), in crypt_setkey()
1989 if (crypt_integrity_hmac(cc)) in crypt_setkey()
1990 memzero_explicit(cc->authenc_key, crypt_authenckey_size(cc)); in crypt_setkey()
2005 static int crypt_set_keyring_key(struct crypt_config *cc, const char *key_string) in crypt_set_keyring_key() argument
2051 if (cc->key_size != ukp->datalen) { in crypt_set_keyring_key()
2058 memcpy(cc->key, ukp->data, cc->key_size); in crypt_set_keyring_key()
2064 clear_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_set_keyring_key()
2066 ret = crypt_setkey(cc); in crypt_set_keyring_key()
2069 set_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_set_keyring_key()
2070 kzfree(cc->key_string); in crypt_set_keyring_key()
2071 cc->key_string = new_key_string; in crypt_set_keyring_key()
2103 static int crypt_set_keyring_key(struct crypt_config *cc, const char *key_string) in crypt_set_keyring_key() argument
2115 static int crypt_set_key(struct crypt_config *cc, char *key) in crypt_set_key() argument
2121 if (!cc->key_size && strcmp(key, "-")) in crypt_set_key()
2126 r = crypt_set_keyring_key(cc, key + 1); in crypt_set_key()
2131 clear_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_set_key()
2134 kzfree(cc->key_string); in crypt_set_key()
2135 cc->key_string = NULL; in crypt_set_key()
2138 if (cc->key_size && hex2bin(cc->key, key, cc->key_size) < 0) in crypt_set_key()
2141 r = crypt_setkey(cc); in crypt_set_key()
2143 set_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_set_key()
2152 static int crypt_wipe_key(struct crypt_config *cc) in crypt_wipe_key() argument
2156 clear_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_wipe_key()
2157 get_random_bytes(&cc->key, cc->key_size); in crypt_wipe_key()
2158 kzfree(cc->key_string); in crypt_wipe_key()
2159 cc->key_string = NULL; in crypt_wipe_key()
2160 r = crypt_setkey(cc); in crypt_wipe_key()
2161 memset(&cc->key, 0, cc->key_size * sizeof(u8)); in crypt_wipe_key()
2181 struct crypt_config *cc = pool_data; in crypt_page_alloc() local
2184 if (unlikely(percpu_counter_compare(&cc->n_allocated_pages, dm_crypt_pages_per_client) >= 0) && in crypt_page_alloc()
2190 percpu_counter_add(&cc->n_allocated_pages, 1); in crypt_page_alloc()
2197 struct crypt_config *cc = pool_data; in crypt_page_free() local
2200 percpu_counter_sub(&cc->n_allocated_pages, 1); in crypt_page_free()
2205 struct crypt_config *cc = ti->private; in crypt_dtr() local
2209 if (!cc) in crypt_dtr()
2212 if (cc->write_thread) in crypt_dtr()
2213 kthread_stop(cc->write_thread); in crypt_dtr()
2215 if (cc->io_queue) in crypt_dtr()
2216 destroy_workqueue(cc->io_queue); in crypt_dtr()
2217 if (cc->crypt_queue) in crypt_dtr()
2218 destroy_workqueue(cc->crypt_queue); in crypt_dtr()
2220 crypt_free_tfms(cc); in crypt_dtr()
2222 bioset_exit(&cc->bs); in crypt_dtr()
2224 mempool_exit(&cc->page_pool); in crypt_dtr()
2225 mempool_exit(&cc->req_pool); in crypt_dtr()
2226 mempool_exit(&cc->tag_pool); in crypt_dtr()
2228 WARN_ON(percpu_counter_sum(&cc->n_allocated_pages) != 0); in crypt_dtr()
2229 percpu_counter_destroy(&cc->n_allocated_pages); in crypt_dtr()
2231 if (cc->iv_gen_ops && cc->iv_gen_ops->dtr) in crypt_dtr()
2232 cc->iv_gen_ops->dtr(cc); in crypt_dtr()
2234 if (cc->dev) in crypt_dtr()
2235 dm_put_device(ti, cc->dev); in crypt_dtr()
2237 kzfree(cc->cipher); in crypt_dtr()
2238 kzfree(cc->cipher_string); in crypt_dtr()
2239 kzfree(cc->key_string); in crypt_dtr()
2240 kzfree(cc->cipher_auth); in crypt_dtr()
2241 kzfree(cc->authenc_key); in crypt_dtr()
2243 mutex_destroy(&cc->bio_alloc_lock); in crypt_dtr()
2246 kzfree(cc); in crypt_dtr()
2257 struct crypt_config *cc = ti->private; in crypt_ctr_ivmode() local
2259 if (crypt_integrity_aead(cc)) in crypt_ctr_ivmode()
2260 cc->iv_size = crypto_aead_ivsize(any_tfm_aead(cc)); in crypt_ctr_ivmode()
2262 cc->iv_size = crypto_skcipher_ivsize(any_tfm(cc)); in crypt_ctr_ivmode()
2264 if (cc->iv_size) in crypt_ctr_ivmode()
2266 cc->iv_size = max(cc->iv_size, in crypt_ctr_ivmode()
2275 cc->iv_gen_ops = NULL; in crypt_ctr_ivmode()
2277 cc->iv_gen_ops = &crypt_iv_plain_ops; in crypt_ctr_ivmode()
2279 cc->iv_gen_ops = &crypt_iv_plain64_ops; in crypt_ctr_ivmode()
2281 cc->iv_gen_ops = &crypt_iv_plain64be_ops; in crypt_ctr_ivmode()
2283 cc->iv_gen_ops = &crypt_iv_essiv_ops; in crypt_ctr_ivmode()
2285 cc->iv_gen_ops = &crypt_iv_benbi_ops; in crypt_ctr_ivmode()
2287 cc->iv_gen_ops = &crypt_iv_null_ops; in crypt_ctr_ivmode()
2289 cc->iv_gen_ops = &crypt_iv_lmk_ops; in crypt_ctr_ivmode()
2296 if (cc->key_size % cc->key_parts) { in crypt_ctr_ivmode()
2297 cc->key_parts++; in crypt_ctr_ivmode()
2298 cc->key_extra_size = cc->key_size / cc->key_parts; in crypt_ctr_ivmode()
2301 cc->iv_gen_ops = &crypt_iv_tcw_ops; in crypt_ctr_ivmode()
2302 cc->key_parts += 2; /* IV + whitening */ in crypt_ctr_ivmode()
2303 cc->key_extra_size = cc->iv_size + TCW_WHITENING_SIZE; in crypt_ctr_ivmode()
2305 cc->iv_gen_ops = &crypt_iv_random_ops; in crypt_ctr_ivmode()
2307 cc->integrity_iv_size = cc->iv_size; in crypt_ctr_ivmode()
2318 * The cc->cipher is currently used only in ESSIV.
2321 static int crypt_ctr_blkdev_cipher(struct crypt_config *cc) in crypt_ctr_blkdev_cipher() argument
2326 if (crypt_integrity_aead(cc)) { in crypt_ctr_blkdev_cipher()
2327 alg_name = crypto_tfm_alg_name(crypto_aead_tfm(any_tfm_aead(cc))); in crypt_ctr_blkdev_cipher()
2330 if (crypt_integrity_hmac(cc)) { in crypt_ctr_blkdev_cipher()
2337 alg_name = crypto_tfm_alg_name(crypto_skcipher_tfm(any_tfm(cc))); in crypt_ctr_blkdev_cipher()
2346 cc->cipher = kstrdup(alg_name, GFP_KERNEL); in crypt_ctr_blkdev_cipher()
2347 return cc->cipher ? 0 : -ENOMEM; in crypt_ctr_blkdev_cipher()
2353 cc->cipher = kzalloc(end - start + 1, GFP_KERNEL); in crypt_ctr_blkdev_cipher()
2354 if (!cc->cipher) in crypt_ctr_blkdev_cipher()
2357 strncpy(cc->cipher, start, end - start); in crypt_ctr_blkdev_cipher()
2367 static int crypt_ctr_auth_cipher(struct crypt_config *cc, char *cipher_api) in crypt_ctr_auth_cipher() argument
2391 cc->key_mac_size = crypto_ahash_digestsize(mac); in crypt_ctr_auth_cipher()
2394 cc->authenc_key = kmalloc(crypt_authenckey_size(cc), GFP_KERNEL); in crypt_ctr_auth_cipher()
2395 if (!cc->authenc_key) in crypt_ctr_auth_cipher()
2404 struct crypt_config *cc = ti->private; in crypt_ctr_cipher_new() local
2408 cc->tfms_count = 1; in crypt_ctr_cipher_new()
2432 cc->tfms_count = 64; in crypt_ctr_cipher_new()
2434 cc->key_parts = cc->tfms_count; in crypt_ctr_cipher_new()
2437 ret = crypt_alloc_tfms(cc, cipher_api); in crypt_ctr_cipher_new()
2444 if (crypt_integrity_aead(cc)) { in crypt_ctr_cipher_new()
2445 ret = crypt_ctr_auth_cipher(cc, cipher_api); in crypt_ctr_cipher_new()
2450 cc->iv_size = crypto_aead_ivsize(any_tfm_aead(cc)); in crypt_ctr_cipher_new()
2452 cc->iv_size = crypto_skcipher_ivsize(any_tfm(cc)); in crypt_ctr_cipher_new()
2454 ret = crypt_ctr_blkdev_cipher(cc); in crypt_ctr_cipher_new()
2466 struct crypt_config *cc = ti->private; in crypt_ctr_cipher_old() local
2472 if (strchr(cipher_in, '(') || crypt_integrity_aead(cc)) { in crypt_ctr_cipher_old()
2486 cc->tfms_count = 1; in crypt_ctr_cipher_old()
2487 else if (sscanf(keycount, "%u%c", &cc->tfms_count, &dummy) != 1 || in crypt_ctr_cipher_old()
2488 !is_power_of_2(cc->tfms_count)) { in crypt_ctr_cipher_old()
2492 cc->key_parts = cc->tfms_count; in crypt_ctr_cipher_old()
2494 cc->cipher = kstrdup(cipher, GFP_KERNEL); in crypt_ctr_cipher_old()
2495 if (!cc->cipher) in crypt_ctr_cipher_old()
2528 ret = crypt_alloc_tfms(cc, cipher_api); in crypt_ctr_cipher_old()
2544 struct crypt_config *cc = ti->private; in crypt_ctr_cipher() local
2548 cc->cipher_string = kstrdup(cipher_in, GFP_KERNEL); in crypt_ctr_cipher()
2549 if (!cc->cipher_string) { in crypt_ctr_cipher()
2567 ret = crypt_set_key(cc, key); in crypt_ctr_cipher()
2574 if (cc->iv_gen_ops && cc->iv_gen_ops->ctr) { in crypt_ctr_cipher()
2575 ret = cc->iv_gen_ops->ctr(cc, ti, ivopts); in crypt_ctr_cipher()
2583 if (cc->iv_gen_ops && cc->iv_gen_ops->init) { in crypt_ctr_cipher()
2584 ret = cc->iv_gen_ops->init(cc); in crypt_ctr_cipher()
2592 if (cc->key_string) in crypt_ctr_cipher()
2593 memset(cc->key, 0, cc->key_size * sizeof(u8)); in crypt_ctr_cipher()
2600 struct crypt_config *cc = ti->private; in crypt_ctr_optional() local
2629 set_bit(DM_CRYPT_SAME_CPU, &cc->flags); in crypt_ctr_optional()
2632 set_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags); in crypt_ctr_optional()
2638 cc->on_disk_tag_size = val; in crypt_ctr_optional()
2641 set_bit(CRYPT_MODE_INTEGRITY_AEAD, &cc->cipher_flags); in crypt_ctr_optional()
2647 cc->cipher_auth = kstrdup(sval, GFP_KERNEL); in crypt_ctr_optional()
2648 if (!cc->cipher_auth) in crypt_ctr_optional()
2650 } else if (sscanf(opt_string, "sector_size:%hu%c", &cc->sector_size, &dummy) == 1) { in crypt_ctr_optional()
2651 if (cc->sector_size < (1 << SECTOR_SHIFT) || in crypt_ctr_optional()
2652 cc->sector_size > 4096 || in crypt_ctr_optional()
2653 (cc->sector_size & (cc->sector_size - 1))) { in crypt_ctr_optional()
2657 if (ti->len & ((cc->sector_size >> SECTOR_SHIFT) - 1)) { in crypt_ctr_optional()
2661 cc->sector_shift = __ffs(cc->sector_size) - SECTOR_SHIFT; in crypt_ctr_optional()
2663 set_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags); in crypt_ctr_optional()
2679 struct crypt_config *cc; in crypt_ctr() local
2698 cc = kzalloc(sizeof(*cc) + key_size * sizeof(u8), GFP_KERNEL); in crypt_ctr()
2699 if (!cc) { in crypt_ctr()
2703 cc->key_size = key_size; in crypt_ctr()
2704 cc->sector_size = (1 << SECTOR_SHIFT); in crypt_ctr()
2705 cc->sector_shift = 0; in crypt_ctr()
2707 ti->private = cc; in crypt_ctr()
2714 ret = percpu_counter_init(&cc->n_allocated_pages, 0, GFP_KERNEL); in crypt_ctr()
2729 if (crypt_integrity_aead(cc)) { in crypt_ctr()
2730 cc->dmreq_start = sizeof(struct aead_request); in crypt_ctr()
2731 cc->dmreq_start += crypto_aead_reqsize(any_tfm_aead(cc)); in crypt_ctr()
2732 align_mask = crypto_aead_alignmask(any_tfm_aead(cc)); in crypt_ctr()
2734 cc->dmreq_start = sizeof(struct skcipher_request); in crypt_ctr()
2735 cc->dmreq_start += crypto_skcipher_reqsize(any_tfm(cc)); in crypt_ctr()
2736 align_mask = crypto_skcipher_alignmask(any_tfm(cc)); in crypt_ctr()
2738 cc->dmreq_start = ALIGN(cc->dmreq_start, __alignof__(struct dm_crypt_request)); in crypt_ctr()
2742 iv_size_padding = -(cc->dmreq_start + sizeof(struct dm_crypt_request)) in crypt_ctr()
2755 iv_size_padding + cc->iv_size + in crypt_ctr()
2756 cc->iv_size + in crypt_ctr()
2760 ret = mempool_init_kmalloc_pool(&cc->req_pool, MIN_IOS, cc->dmreq_start + additional_req_size); in crypt_ctr()
2766 cc->per_bio_data_size = ti->per_io_data_size = in crypt_ctr()
2767 ALIGN(sizeof(struct dm_crypt_io) + cc->dmreq_start + additional_req_size, in crypt_ctr()
2770 ret = mempool_init(&cc->page_pool, BIO_MAX_PAGES, crypt_page_alloc, crypt_page_free, cc); in crypt_ctr()
2776 ret = bioset_init(&cc->bs, MIN_IOS, 0, BIOSET_NEED_BVECS); in crypt_ctr()
2782 mutex_init(&cc->bio_alloc_lock); in crypt_ctr()
2786 (tmpll & ((cc->sector_size >> SECTOR_SHIFT) - 1))) { in crypt_ctr()
2790 cc->iv_offset = tmpll; in crypt_ctr()
2792 ret = dm_get_device(ti, argv[3], dm_table_get_mode(ti->table), &cc->dev); in crypt_ctr()
2803 cc->start = tmpll; in crypt_ctr()
2805 if (crypt_integrity_aead(cc) || cc->integrity_iv_size) { in crypt_ctr()
2806 ret = crypt_integrity_ctr(cc, ti); in crypt_ctr()
2810 cc->tag_pool_max_sectors = POOL_ENTRY_SIZE / cc->on_disk_tag_size; in crypt_ctr()
2811 if (!cc->tag_pool_max_sectors) in crypt_ctr()
2812 cc->tag_pool_max_sectors = 1; in crypt_ctr()
2814 ret = mempool_init_kmalloc_pool(&cc->tag_pool, MIN_IOS, in crypt_ctr()
2815 cc->tag_pool_max_sectors * cc->on_disk_tag_size); in crypt_ctr()
2821 cc->tag_pool_max_sectors <<= cc->sector_shift; in crypt_ctr()
2825 cc->io_queue = alloc_workqueue("kcryptd_io", WQ_HIGHPRI | WQ_CPU_INTENSIVE | WQ_MEM_RECLAIM, 1); in crypt_ctr()
2826 if (!cc->io_queue) { in crypt_ctr()
2831 if (test_bit(DM_CRYPT_SAME_CPU, &cc->flags)) in crypt_ctr()
2832 cc->crypt_queue = alloc_workqueue("kcryptd", WQ_HIGHPRI | WQ_CPU_INTENSIVE | WQ_MEM_RECLAIM, 1); in crypt_ctr()
2834 cc->crypt_queue = alloc_workqueue("kcryptd", in crypt_ctr()
2837 if (!cc->crypt_queue) { in crypt_ctr()
2842 spin_lock_init(&cc->write_thread_lock); in crypt_ctr()
2843 cc->write_tree = RB_ROOT; in crypt_ctr()
2845 cc->write_thread = kthread_create(dmcrypt_write, cc, "dmcrypt_write"); in crypt_ctr()
2846 if (IS_ERR(cc->write_thread)) { in crypt_ctr()
2847 ret = PTR_ERR(cc->write_thread); in crypt_ctr()
2848 cc->write_thread = NULL; in crypt_ctr()
2852 wake_up_process(cc->write_thread); in crypt_ctr()
2866 struct crypt_config *cc = ti->private; in crypt_map() local
2875 bio_set_dev(bio, cc->dev->bdev); in crypt_map()
2877 bio->bi_iter.bi_sector = cc->start + in crypt_map()
2886 (bio_data_dir(bio) == WRITE || cc->on_disk_tag_size)) in crypt_map()
2893 if (unlikely((bio->bi_iter.bi_sector & ((cc->sector_size >> SECTOR_SHIFT) - 1)) != 0)) in crypt_map()
2896 if (unlikely(bio->bi_iter.bi_size & (cc->sector_size - 1))) in crypt_map()
2899 io = dm_per_bio_data(bio, cc->per_bio_data_size); in crypt_map()
2900 crypt_io_init(io, cc, bio, dm_target_offset(ti, bio->bi_iter.bi_sector)); in crypt_map()
2902 if (cc->on_disk_tag_size) { in crypt_map()
2903 unsigned tag_len = cc->on_disk_tag_size * (bio_sectors(bio) >> cc->sector_shift); in crypt_map()
2908 if (bio_sectors(bio) > cc->tag_pool_max_sectors) in crypt_map()
2909 dm_accept_partial_bio(bio, cc->tag_pool_max_sectors); in crypt_map()
2910 io->integrity_metadata = mempool_alloc(&cc->tag_pool, GFP_NOIO); in crypt_map()
2915 if (crypt_integrity_aead(cc)) in crypt_map()
2932 struct crypt_config *cc = ti->private; in crypt_status() local
2942 DMEMIT("%s ", cc->cipher_string); in crypt_status()
2944 if (cc->key_size > 0) { in crypt_status()
2945 if (cc->key_string) in crypt_status()
2946 DMEMIT(":%u:%s", cc->key_size, cc->key_string); in crypt_status()
2948 for (i = 0; i < cc->key_size; i++) in crypt_status()
2949 DMEMIT("%02x", cc->key[i]); in crypt_status()
2953 DMEMIT(" %llu %s %llu", (unsigned long long)cc->iv_offset, in crypt_status()
2954 cc->dev->name, (unsigned long long)cc->start); in crypt_status()
2957 num_feature_args += test_bit(DM_CRYPT_SAME_CPU, &cc->flags); in crypt_status()
2958 num_feature_args += test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags); in crypt_status()
2959 num_feature_args += cc->sector_size != (1 << SECTOR_SHIFT); in crypt_status()
2960 num_feature_args += test_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags); in crypt_status()
2961 if (cc->on_disk_tag_size) in crypt_status()
2967 if (test_bit(DM_CRYPT_SAME_CPU, &cc->flags)) in crypt_status()
2969 if (test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags)) in crypt_status()
2971 if (cc->on_disk_tag_size) in crypt_status()
2972 DMEMIT(" integrity:%u:%s", cc->on_disk_tag_size, cc->cipher_auth); in crypt_status()
2973 if (cc->sector_size != (1 << SECTOR_SHIFT)) in crypt_status()
2974 DMEMIT(" sector_size:%d", cc->sector_size); in crypt_status()
2975 if (test_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags)) in crypt_status()
2985 struct crypt_config *cc = ti->private; in crypt_postsuspend() local
2987 set_bit(DM_CRYPT_SUSPENDED, &cc->flags); in crypt_postsuspend()
2992 struct crypt_config *cc = ti->private; in crypt_preresume() local
2994 if (!test_bit(DM_CRYPT_KEY_VALID, &cc->flags)) { in crypt_preresume()
3004 struct crypt_config *cc = ti->private; in crypt_resume() local
3006 clear_bit(DM_CRYPT_SUSPENDED, &cc->flags); in crypt_resume()
3016 struct crypt_config *cc = ti->private; in crypt_message() local
3023 if (!test_bit(DM_CRYPT_SUSPENDED, &cc->flags)) { in crypt_message()
3030 if (key_size < 0 || cc->key_size != key_size) { in crypt_message()
3035 ret = crypt_set_key(cc, argv[2]); in crypt_message()
3038 if (cc->iv_gen_ops && cc->iv_gen_ops->init) in crypt_message()
3039 ret = cc->iv_gen_ops->init(cc); in crypt_message()
3041 if (cc->key_string) in crypt_message()
3042 memset(cc->key, 0, cc->key_size * sizeof(u8)); in crypt_message()
3046 if (cc->iv_gen_ops && cc->iv_gen_ops->wipe) { in crypt_message()
3047 ret = cc->iv_gen_ops->wipe(cc); in crypt_message()
3051 return crypt_wipe_key(cc); in crypt_message()
3063 struct crypt_config *cc = ti->private; in crypt_iterate_devices() local
3065 return fn(ti, cc->dev, cc->start, ti->len, data); in crypt_iterate_devices()
3070 struct crypt_config *cc = ti->private; in crypt_io_hints() local
3081 max_t(unsigned, limits->logical_block_size, cc->sector_size); in crypt_io_hints()
3083 max_t(unsigned, limits->physical_block_size, cc->sector_size); in crypt_io_hints()
3084 limits->io_min = max_t(unsigned, limits->io_min, cc->sector_size); in crypt_io_hints()