1 /*
2 * PCBC: Propagating Cipher Block Chaining mode
3 *
4 * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
5 * Written by David Howells (dhowells@redhat.com)
6 *
7 * Derived from cbc.c
8 * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
9 *
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
13 * any later version.
14 *
15 */
16
17 #include <crypto/algapi.h>
18 #include <linux/err.h>
19 #include <linux/init.h>
20 #include <linux/kernel.h>
21 #include <linux/module.h>
22 #include <linux/scatterlist.h>
23 #include <linux/slab.h>
24
25 struct crypto_pcbc_ctx {
26 struct crypto_cipher *child;
27 };
28
crypto_pcbc_setkey(struct crypto_tfm * parent,const u8 * key,unsigned int keylen)29 static int crypto_pcbc_setkey(struct crypto_tfm *parent, const u8 *key,
30 unsigned int keylen)
31 {
32 struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(parent);
33 struct crypto_cipher *child = ctx->child;
34 int err;
35
36 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
37 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
38 CRYPTO_TFM_REQ_MASK);
39 err = crypto_cipher_setkey(child, key, keylen);
40 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
41 CRYPTO_TFM_RES_MASK);
42 return err;
43 }
44
crypto_pcbc_encrypt_segment(struct blkcipher_desc * desc,struct blkcipher_walk * walk,struct crypto_cipher * tfm)45 static int crypto_pcbc_encrypt_segment(struct blkcipher_desc *desc,
46 struct blkcipher_walk *walk,
47 struct crypto_cipher *tfm)
48 {
49 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
50 crypto_cipher_alg(tfm)->cia_encrypt;
51 int bsize = crypto_cipher_blocksize(tfm);
52 unsigned int nbytes = walk->nbytes;
53 u8 *src = walk->src.virt.addr;
54 u8 *dst = walk->dst.virt.addr;
55 u8 *iv = walk->iv;
56
57 do {
58 crypto_xor(iv, src, bsize);
59 fn(crypto_cipher_tfm(tfm), dst, iv);
60 memcpy(iv, dst, bsize);
61 crypto_xor(iv, src, bsize);
62
63 src += bsize;
64 dst += bsize;
65 } while ((nbytes -= bsize) >= bsize);
66
67 return nbytes;
68 }
69
crypto_pcbc_encrypt_inplace(struct blkcipher_desc * desc,struct blkcipher_walk * walk,struct crypto_cipher * tfm)70 static int crypto_pcbc_encrypt_inplace(struct blkcipher_desc *desc,
71 struct blkcipher_walk *walk,
72 struct crypto_cipher *tfm)
73 {
74 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
75 crypto_cipher_alg(tfm)->cia_encrypt;
76 int bsize = crypto_cipher_blocksize(tfm);
77 unsigned int nbytes = walk->nbytes;
78 u8 *src = walk->src.virt.addr;
79 u8 *iv = walk->iv;
80 u8 tmpbuf[bsize];
81
82 do {
83 memcpy(tmpbuf, src, bsize);
84 crypto_xor(iv, src, bsize);
85 fn(crypto_cipher_tfm(tfm), src, iv);
86 memcpy(iv, tmpbuf, bsize);
87 crypto_xor(iv, src, bsize);
88
89 src += bsize;
90 } while ((nbytes -= bsize) >= bsize);
91
92 memcpy(walk->iv, iv, bsize);
93
94 return nbytes;
95 }
96
crypto_pcbc_encrypt(struct blkcipher_desc * desc,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes)97 static int crypto_pcbc_encrypt(struct blkcipher_desc *desc,
98 struct scatterlist *dst, struct scatterlist *src,
99 unsigned int nbytes)
100 {
101 struct blkcipher_walk walk;
102 struct crypto_blkcipher *tfm = desc->tfm;
103 struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
104 struct crypto_cipher *child = ctx->child;
105 int err;
106
107 blkcipher_walk_init(&walk, dst, src, nbytes);
108 err = blkcipher_walk_virt(desc, &walk);
109
110 while ((nbytes = walk.nbytes)) {
111 if (walk.src.virt.addr == walk.dst.virt.addr)
112 nbytes = crypto_pcbc_encrypt_inplace(desc, &walk,
113 child);
114 else
115 nbytes = crypto_pcbc_encrypt_segment(desc, &walk,
116 child);
117 err = blkcipher_walk_done(desc, &walk, nbytes);
118 }
119
120 return err;
121 }
122
crypto_pcbc_decrypt_segment(struct blkcipher_desc * desc,struct blkcipher_walk * walk,struct crypto_cipher * tfm)123 static int crypto_pcbc_decrypt_segment(struct blkcipher_desc *desc,
124 struct blkcipher_walk *walk,
125 struct crypto_cipher *tfm)
126 {
127 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
128 crypto_cipher_alg(tfm)->cia_decrypt;
129 int bsize = crypto_cipher_blocksize(tfm);
130 unsigned int nbytes = walk->nbytes;
131 u8 *src = walk->src.virt.addr;
132 u8 *dst = walk->dst.virt.addr;
133 u8 *iv = walk->iv;
134
135 do {
136 fn(crypto_cipher_tfm(tfm), dst, src);
137 crypto_xor(dst, iv, bsize);
138 memcpy(iv, src, bsize);
139 crypto_xor(iv, dst, bsize);
140
141 src += bsize;
142 dst += bsize;
143 } while ((nbytes -= bsize) >= bsize);
144
145 memcpy(walk->iv, iv, bsize);
146
147 return nbytes;
148 }
149
crypto_pcbc_decrypt_inplace(struct blkcipher_desc * desc,struct blkcipher_walk * walk,struct crypto_cipher * tfm)150 static int crypto_pcbc_decrypt_inplace(struct blkcipher_desc *desc,
151 struct blkcipher_walk *walk,
152 struct crypto_cipher *tfm)
153 {
154 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
155 crypto_cipher_alg(tfm)->cia_decrypt;
156 int bsize = crypto_cipher_blocksize(tfm);
157 unsigned int nbytes = walk->nbytes;
158 u8 *src = walk->src.virt.addr;
159 u8 *iv = walk->iv;
160 u8 tmpbuf[bsize];
161
162 do {
163 memcpy(tmpbuf, src, bsize);
164 fn(crypto_cipher_tfm(tfm), src, src);
165 crypto_xor(src, iv, bsize);
166 memcpy(iv, tmpbuf, bsize);
167 crypto_xor(iv, src, bsize);
168
169 src += bsize;
170 } while ((nbytes -= bsize) >= bsize);
171
172 memcpy(walk->iv, iv, bsize);
173
174 return nbytes;
175 }
176
crypto_pcbc_decrypt(struct blkcipher_desc * desc,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes)177 static int crypto_pcbc_decrypt(struct blkcipher_desc *desc,
178 struct scatterlist *dst, struct scatterlist *src,
179 unsigned int nbytes)
180 {
181 struct blkcipher_walk walk;
182 struct crypto_blkcipher *tfm = desc->tfm;
183 struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
184 struct crypto_cipher *child = ctx->child;
185 int err;
186
187 blkcipher_walk_init(&walk, dst, src, nbytes);
188 err = blkcipher_walk_virt(desc, &walk);
189
190 while ((nbytes = walk.nbytes)) {
191 if (walk.src.virt.addr == walk.dst.virt.addr)
192 nbytes = crypto_pcbc_decrypt_inplace(desc, &walk,
193 child);
194 else
195 nbytes = crypto_pcbc_decrypt_segment(desc, &walk,
196 child);
197 err = blkcipher_walk_done(desc, &walk, nbytes);
198 }
199
200 return err;
201 }
202
crypto_pcbc_init_tfm(struct crypto_tfm * tfm)203 static int crypto_pcbc_init_tfm(struct crypto_tfm *tfm)
204 {
205 struct crypto_instance *inst = (void *)tfm->__crt_alg;
206 struct crypto_spawn *spawn = crypto_instance_ctx(inst);
207 struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm);
208 struct crypto_cipher *cipher;
209
210 cipher = crypto_spawn_cipher(spawn);
211 if (IS_ERR(cipher))
212 return PTR_ERR(cipher);
213
214 ctx->child = cipher;
215 return 0;
216 }
217
crypto_pcbc_exit_tfm(struct crypto_tfm * tfm)218 static void crypto_pcbc_exit_tfm(struct crypto_tfm *tfm)
219 {
220 struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm);
221 crypto_free_cipher(ctx->child);
222 }
223
crypto_pcbc_alloc(struct rtattr ** tb)224 static struct crypto_instance *crypto_pcbc_alloc(struct rtattr **tb)
225 {
226 struct crypto_instance *inst;
227 struct crypto_alg *alg;
228 int err;
229
230 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
231 if (err)
232 return ERR_PTR(err);
233
234 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
235 CRYPTO_ALG_TYPE_MASK);
236 if (IS_ERR(alg))
237 return ERR_CAST(alg);
238
239 inst = crypto_alloc_instance("pcbc", alg);
240 if (IS_ERR(inst))
241 goto out_put_alg;
242
243 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
244 inst->alg.cra_priority = alg->cra_priority;
245 inst->alg.cra_blocksize = alg->cra_blocksize;
246 inst->alg.cra_alignmask = alg->cra_alignmask;
247 inst->alg.cra_type = &crypto_blkcipher_type;
248
249 /* We access the data as u32s when xoring. */
250 inst->alg.cra_alignmask |= __alignof__(u32) - 1;
251
252 inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
253 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
254 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
255
256 inst->alg.cra_ctxsize = sizeof(struct crypto_pcbc_ctx);
257
258 inst->alg.cra_init = crypto_pcbc_init_tfm;
259 inst->alg.cra_exit = crypto_pcbc_exit_tfm;
260
261 inst->alg.cra_blkcipher.setkey = crypto_pcbc_setkey;
262 inst->alg.cra_blkcipher.encrypt = crypto_pcbc_encrypt;
263 inst->alg.cra_blkcipher.decrypt = crypto_pcbc_decrypt;
264
265 out_put_alg:
266 crypto_mod_put(alg);
267 return inst;
268 }
269
crypto_pcbc_free(struct crypto_instance * inst)270 static void crypto_pcbc_free(struct crypto_instance *inst)
271 {
272 crypto_drop_spawn(crypto_instance_ctx(inst));
273 kfree(inst);
274 }
275
276 static struct crypto_template crypto_pcbc_tmpl = {
277 .name = "pcbc",
278 .alloc = crypto_pcbc_alloc,
279 .free = crypto_pcbc_free,
280 .module = THIS_MODULE,
281 };
282
crypto_pcbc_module_init(void)283 static int __init crypto_pcbc_module_init(void)
284 {
285 return crypto_register_template(&crypto_pcbc_tmpl);
286 }
287
crypto_pcbc_module_exit(void)288 static void __exit crypto_pcbc_module_exit(void)
289 {
290 crypto_unregister_template(&crypto_pcbc_tmpl);
291 }
292
293 module_init(crypto_pcbc_module_init);
294 module_exit(crypto_pcbc_module_exit);
295
296 MODULE_LICENSE("GPL");
297 MODULE_DESCRIPTION("PCBC block cipher algorithm");
298 MODULE_ALIAS_CRYPTO("pcbc");
299