• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C)2006 USAGI/WIDE Project
3  *
4  * This program is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU General Public License as published by
6  * the Free Software Foundation; either version 2 of the License, or
7  * (at your option) any later version.
8  *
9  * This program is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12  * GNU General Public License for more details.
13  *
14  * You should have received a copy of the GNU General Public License
15  * along with this program; if not, write to the Free Software
16  * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
17  *
18  * Author:
19  * 	Kazunori Miyazawa <miyazawa@linux-ipv6.org>
20  */
21 
22 #include <crypto/scatterwalk.h>
23 #include <linux/crypto.h>
24 #include <linux/err.h>
25 #include <linux/hardirq.h>
26 #include <linux/kernel.h>
27 #include <linux/mm.h>
28 #include <linux/rtnetlink.h>
29 #include <linux/slab.h>
30 #include <linux/scatterlist.h>
31 
32 static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101,
33 			   0x02020202, 0x02020202, 0x02020202, 0x02020202,
34 			   0x03030303, 0x03030303, 0x03030303, 0x03030303};
35 /*
36  * +------------------------
37  * | <parent tfm>
38  * +------------------------
39  * | crypto_xcbc_ctx
40  * +------------------------
41  * | odds (block size)
42  * +------------------------
43  * | prev (block size)
44  * +------------------------
45  * | key (block size)
46  * +------------------------
47  * | consts (block size * 3)
48  * +------------------------
49  */
50 struct crypto_xcbc_ctx {
51 	struct crypto_cipher *child;
52 	u8 *odds;
53 	u8 *prev;
54 	u8 *key;
55 	u8 *consts;
56 	void (*xor)(u8 *a, const u8 *b, unsigned int bs);
57 	unsigned int keylen;
58 	unsigned int len;
59 };
60 
xor_128(u8 * a,const u8 * b,unsigned int bs)61 static void xor_128(u8 *a, const u8 *b, unsigned int bs)
62 {
63 	((u32 *)a)[0] ^= ((u32 *)b)[0];
64 	((u32 *)a)[1] ^= ((u32 *)b)[1];
65 	((u32 *)a)[2] ^= ((u32 *)b)[2];
66 	((u32 *)a)[3] ^= ((u32 *)b)[3];
67 }
68 
_crypto_xcbc_digest_setkey(struct crypto_hash * parent,struct crypto_xcbc_ctx * ctx)69 static int _crypto_xcbc_digest_setkey(struct crypto_hash *parent,
70 				      struct crypto_xcbc_ctx *ctx)
71 {
72 	int bs = crypto_hash_blocksize(parent);
73 	int err = 0;
74 	u8 key1[bs];
75 
76 	if ((err = crypto_cipher_setkey(ctx->child, ctx->key, ctx->keylen)))
77 	    return err;
78 
79 	crypto_cipher_encrypt_one(ctx->child, key1, ctx->consts);
80 
81 	return crypto_cipher_setkey(ctx->child, key1, bs);
82 }
83 
crypto_xcbc_digest_setkey(struct crypto_hash * parent,const u8 * inkey,unsigned int keylen)84 static int crypto_xcbc_digest_setkey(struct crypto_hash *parent,
85 				     const u8 *inkey, unsigned int keylen)
86 {
87 	struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
88 
89 	if (keylen != crypto_cipher_blocksize(ctx->child))
90 		return -EINVAL;
91 
92 	ctx->keylen = keylen;
93 	memcpy(ctx->key, inkey, keylen);
94 	ctx->consts = (u8*)ks;
95 
96 	return _crypto_xcbc_digest_setkey(parent, ctx);
97 }
98 
crypto_xcbc_digest_init(struct hash_desc * pdesc)99 static int crypto_xcbc_digest_init(struct hash_desc *pdesc)
100 {
101 	struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(pdesc->tfm);
102 	int bs = crypto_hash_blocksize(pdesc->tfm);
103 
104 	ctx->len = 0;
105 	memset(ctx->odds, 0, bs);
106 	memset(ctx->prev, 0, bs);
107 
108 	return 0;
109 }
110 
crypto_xcbc_digest_update2(struct hash_desc * pdesc,struct scatterlist * sg,unsigned int nbytes)111 static int crypto_xcbc_digest_update2(struct hash_desc *pdesc,
112 				      struct scatterlist *sg,
113 				      unsigned int nbytes)
114 {
115 	struct crypto_hash *parent = pdesc->tfm;
116 	struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
117 	struct crypto_cipher *tfm = ctx->child;
118 	int bs = crypto_hash_blocksize(parent);
119 
120 	for (;;) {
121 		struct page *pg = sg_page(sg);
122 		unsigned int offset = sg->offset;
123 		unsigned int slen = sg->length;
124 
125 		if (unlikely(slen > nbytes))
126 			slen = nbytes;
127 
128 		nbytes -= slen;
129 
130 		while (slen > 0) {
131 			unsigned int len = min(slen, ((unsigned int)(PAGE_SIZE)) - offset);
132 			char *p = crypto_kmap(pg, 0) + offset;
133 
134 			/* checking the data can fill the block */
135 			if ((ctx->len + len) <= bs) {
136 				memcpy(ctx->odds + ctx->len, p, len);
137 				ctx->len += len;
138 				slen -= len;
139 
140 				/* checking the rest of the page */
141 				if (len + offset >= PAGE_SIZE) {
142 					offset = 0;
143 					pg++;
144 				} else
145 					offset += len;
146 
147 				crypto_kunmap(p, 0);
148 				crypto_yield(pdesc->flags);
149 				continue;
150 			}
151 
152 			/* filling odds with new data and encrypting it */
153 			memcpy(ctx->odds + ctx->len, p, bs - ctx->len);
154 			len -= bs - ctx->len;
155 			p += bs - ctx->len;
156 
157 			ctx->xor(ctx->prev, ctx->odds, bs);
158 			crypto_cipher_encrypt_one(tfm, ctx->prev, ctx->prev);
159 
160 			/* clearing the length */
161 			ctx->len = 0;
162 
163 			/* encrypting the rest of data */
164 			while (len > bs) {
165 				ctx->xor(ctx->prev, p, bs);
166 				crypto_cipher_encrypt_one(tfm, ctx->prev,
167 							  ctx->prev);
168 				p += bs;
169 				len -= bs;
170 			}
171 
172 			/* keeping the surplus of blocksize */
173 			if (len) {
174 				memcpy(ctx->odds, p, len);
175 				ctx->len = len;
176 			}
177 			crypto_kunmap(p, 0);
178 			crypto_yield(pdesc->flags);
179 			slen -= min(slen, ((unsigned int)(PAGE_SIZE)) - offset);
180 			offset = 0;
181 			pg++;
182 		}
183 
184 		if (!nbytes)
185 			break;
186 		sg = scatterwalk_sg_next(sg);
187 	}
188 
189 	return 0;
190 }
191 
crypto_xcbc_digest_update(struct hash_desc * pdesc,struct scatterlist * sg,unsigned int nbytes)192 static int crypto_xcbc_digest_update(struct hash_desc *pdesc,
193 				     struct scatterlist *sg,
194 				     unsigned int nbytes)
195 {
196 	if (WARN_ON_ONCE(in_irq()))
197 		return -EDEADLK;
198 	return crypto_xcbc_digest_update2(pdesc, sg, nbytes);
199 }
200 
crypto_xcbc_digest_final(struct hash_desc * pdesc,u8 * out)201 static int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out)
202 {
203 	struct crypto_hash *parent = pdesc->tfm;
204 	struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
205 	struct crypto_cipher *tfm = ctx->child;
206 	int bs = crypto_hash_blocksize(parent);
207 	int err = 0;
208 
209 	if (ctx->len == bs) {
210 		u8 key2[bs];
211 
212 		if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0)
213 			return err;
214 
215 		crypto_cipher_encrypt_one(tfm, key2,
216 					  (u8 *)(ctx->consts + bs));
217 
218 		ctx->xor(ctx->prev, ctx->odds, bs);
219 		ctx->xor(ctx->prev, key2, bs);
220 		_crypto_xcbc_digest_setkey(parent, ctx);
221 
222 		crypto_cipher_encrypt_one(tfm, out, ctx->prev);
223 	} else {
224 		u8 key3[bs];
225 		unsigned int rlen;
226 		u8 *p = ctx->odds + ctx->len;
227 		*p = 0x80;
228 		p++;
229 
230 		rlen = bs - ctx->len -1;
231 		if (rlen)
232 			memset(p, 0, rlen);
233 
234 		if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0)
235 			return err;
236 
237 		crypto_cipher_encrypt_one(tfm, key3,
238 					  (u8 *)(ctx->consts + bs * 2));
239 
240 		ctx->xor(ctx->prev, ctx->odds, bs);
241 		ctx->xor(ctx->prev, key3, bs);
242 
243 		_crypto_xcbc_digest_setkey(parent, ctx);
244 
245 		crypto_cipher_encrypt_one(tfm, out, ctx->prev);
246 	}
247 
248 	return 0;
249 }
250 
crypto_xcbc_digest(struct hash_desc * pdesc,struct scatterlist * sg,unsigned int nbytes,u8 * out)251 static int crypto_xcbc_digest(struct hash_desc *pdesc,
252 		  struct scatterlist *sg, unsigned int nbytes, u8 *out)
253 {
254 	if (WARN_ON_ONCE(in_irq()))
255 		return -EDEADLK;
256 
257 	crypto_xcbc_digest_init(pdesc);
258 	crypto_xcbc_digest_update2(pdesc, sg, nbytes);
259 	return crypto_xcbc_digest_final(pdesc, out);
260 }
261 
xcbc_init_tfm(struct crypto_tfm * tfm)262 static int xcbc_init_tfm(struct crypto_tfm *tfm)
263 {
264 	struct crypto_cipher *cipher;
265 	struct crypto_instance *inst = (void *)tfm->__crt_alg;
266 	struct crypto_spawn *spawn = crypto_instance_ctx(inst);
267 	struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm));
268 	int bs = crypto_hash_blocksize(__crypto_hash_cast(tfm));
269 
270 	cipher = crypto_spawn_cipher(spawn);
271 	if (IS_ERR(cipher))
272 		return PTR_ERR(cipher);
273 
274 	switch(bs) {
275 	case 16:
276 		ctx->xor = xor_128;
277 		break;
278 	default:
279 		return -EINVAL;
280 	}
281 
282 	ctx->child = cipher;
283 	ctx->odds = (u8*)(ctx+1);
284 	ctx->prev = ctx->odds + bs;
285 	ctx->key = ctx->prev + bs;
286 
287 	return 0;
288 };
289 
xcbc_exit_tfm(struct crypto_tfm * tfm)290 static void xcbc_exit_tfm(struct crypto_tfm *tfm)
291 {
292 	struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm));
293 	crypto_free_cipher(ctx->child);
294 }
295 
xcbc_alloc(struct rtattr ** tb)296 static struct crypto_instance *xcbc_alloc(struct rtattr **tb)
297 {
298 	struct crypto_instance *inst;
299 	struct crypto_alg *alg;
300 	int err;
301 
302 	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_HASH);
303 	if (err)
304 		return ERR_PTR(err);
305 
306 	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
307 				  CRYPTO_ALG_TYPE_MASK);
308 	if (IS_ERR(alg))
309 		return ERR_CAST(alg);
310 
311 	switch(alg->cra_blocksize) {
312 	case 16:
313 		break;
314 	default:
315 		inst = ERR_PTR(-EINVAL);
316 		goto out_put_alg;
317 	}
318 
319 	inst = crypto_alloc_instance("xcbc", alg);
320 	if (IS_ERR(inst))
321 		goto out_put_alg;
322 
323 	inst->alg.cra_flags = CRYPTO_ALG_TYPE_HASH;
324 	inst->alg.cra_priority = alg->cra_priority;
325 	inst->alg.cra_blocksize = alg->cra_blocksize;
326 	inst->alg.cra_alignmask = alg->cra_alignmask;
327 	inst->alg.cra_type = &crypto_hash_type;
328 
329 	inst->alg.cra_hash.digestsize = alg->cra_blocksize;
330 	inst->alg.cra_ctxsize = sizeof(struct crypto_xcbc_ctx) +
331 				ALIGN(inst->alg.cra_blocksize * 3, sizeof(void *));
332 	inst->alg.cra_init = xcbc_init_tfm;
333 	inst->alg.cra_exit = xcbc_exit_tfm;
334 
335 	inst->alg.cra_hash.init = crypto_xcbc_digest_init;
336 	inst->alg.cra_hash.update = crypto_xcbc_digest_update;
337 	inst->alg.cra_hash.final = crypto_xcbc_digest_final;
338 	inst->alg.cra_hash.digest = crypto_xcbc_digest;
339 	inst->alg.cra_hash.setkey = crypto_xcbc_digest_setkey;
340 
341 out_put_alg:
342 	crypto_mod_put(alg);
343 	return inst;
344 }
345 
xcbc_free(struct crypto_instance * inst)346 static void xcbc_free(struct crypto_instance *inst)
347 {
348 	crypto_drop_spawn(crypto_instance_ctx(inst));
349 	kfree(inst);
350 }
351 
352 static struct crypto_template crypto_xcbc_tmpl = {
353 	.name = "xcbc",
354 	.alloc = xcbc_alloc,
355 	.free = xcbc_free,
356 	.module = THIS_MODULE,
357 };
358 
crypto_xcbc_module_init(void)359 static int __init crypto_xcbc_module_init(void)
360 {
361 	return crypto_register_template(&crypto_xcbc_tmpl);
362 }
363 
crypto_xcbc_module_exit(void)364 static void __exit crypto_xcbc_module_exit(void)
365 {
366 	crypto_unregister_template(&crypto_xcbc_tmpl);
367 }
368 
369 module_init(crypto_xcbc_module_init);
370 module_exit(crypto_xcbc_module_exit);
371 
372 MODULE_LICENSE("GPL");
373 MODULE_DESCRIPTION("XCBC keyed hash algorithm");
374