• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1commit d18c49a593df42023720499e2867660249346c5c
2Author: zhaoxc0502 <zhaoxc0502@thundersoft.com>
3Date:   Thu Jun 16 17:12:29 2022 +0800
4
5    linux_crypto
6
7    Change-Id: I2114b2e4bc24d3354e6a67213ac3e47c24d6260b
8
9diff --git a/crypto/Kconfig b/crypto/Kconfig
10index 774adc984..5b623f6f0 100644
11--- a/crypto/Kconfig
12+++ b/crypto/Kconfig
13@@ -365,6 +365,26 @@ config CRYPTO_ECHAINIV
14 	  a sequence number xored with a salt.  This is the default
15 	  algorithm for CBC.
16
17+config CRYPTO_TLS
18+	tristate "TLS support"
19+	select CRYPTO_AEAD
20+	select CRYPTO_BLKCIPHER
21+	select CRYPTO_MANAGER
22+	select CRYPTO_HASH
23+	select CRYPTO_NULL
24+	select CRYPTO_AUTHENC
25+	help
26+	  Support for TLS 1.0 record encryption and decryption
27+
28+	  This module adds support for encryption/decryption of TLS 1.0 frames
29+	  using blockcipher algorithms. The name of the resulting algorithm is
30+	  "tls10(hmac(<digest>),cbc(<cipher>))". By default, the generic base
31+	  algorithms are used (e.g. aes-generic, sha1-generic), but hardware
32+	  accelerated versions will be used automatically if available.
33+
34+	  User-space applications (OpenSSL, GnuTLS) can offload TLS 1.0
35+	  operations through AF_ALG or cryptodev interfaces
36+
37 comment "Block modes"
38
39 config CRYPTO_CBC
40diff --git a/crypto/Makefile b/crypto/Makefile
41index b279483fb..802d9a5e0 100644
42--- a/crypto/Makefile
43+++ b/crypto/Makefile
44@@ -151,6 +151,7 @@ obj-$(CONFIG_CRYPTO_CRC32) += crc32_generic.o
45 obj-$(CONFIG_CRYPTO_CRCT10DIF) += crct10dif_common.o crct10dif_generic.o
46 obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o authencesn.o
47 obj-$(CONFIG_CRYPTO_LZO) += lzo.o lzo-rle.o
48+obj-$(CONFIG_CRYPTO_TLS) += tls.o
49 obj-$(CONFIG_CRYPTO_LZ4) += lz4.o
50 obj-$(CONFIG_CRYPTO_LZ4HC) += lz4hc.o
51 obj-$(CONFIG_CRYPTO_XXHASH) += xxhash_generic.o
52diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c
53index 8609174e0..c0f5c91b1 100644
54--- a/crypto/tcrypt.c
55+++ b/crypto/tcrypt.c
56@@ -72,9 +72,8 @@ static const char *check[] = {
57 	"cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
58 	"khazad", "wp512", "wp384", "wp256", "tnepres", "xeta",  "fcrypt",
59 	"camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
60-	"lzo", "lzo-rle", "cts", "sha3-224", "sha3-256", "sha3-384",
61-	"sha3-512", "streebog256", "streebog512",
62-	NULL
63+	"lzo", "lzo-rle", "cts", "zlib", "sha3-224", "sha3-256", "sha3-384",
64+	"sha3-512", "streebog256", "streebog512", "rsa", NULL
65 };
66
67 static u32 block_sizes[] = { 16, 64, 256, 1024, 1472, 8192, 0 };
68@@ -258,7 +257,7 @@ static void test_mb_aead_speed(const char *algo, int enc, int secs,
69 	unsigned int i, j, iv_len;
70 	const char *key;
71 	const char *e;
72-	void *assoc;
73+	void *assoc, *assoc_out;
74 	u32 *b_size;
75 	char *iv;
76 	int ret;
77@@ -378,6 +377,8 @@ static void test_mb_aead_speed(const char *algo, int enc, int secs,
78
79 				assoc = cur->axbuf[0];
80 				memset(assoc, 0xff, aad_size);
81+				assoc_out = cur->axbuf[1];
82+				memset(assoc_out, 0xff, aad_size);
83
84 				sg_init_aead(cur->sg, cur->xbuf,
85 					     *b_size + (enc ? 0 : authsize),
86@@ -385,7 +386,7 @@ static void test_mb_aead_speed(const char *algo, int enc, int secs,
87
88 				sg_init_aead(cur->sgout, cur->xoutbuf,
89 					     *b_size + (enc ? authsize : 0),
90-					     assoc, aad_size);
91+					     assoc_out, aad_size);
92
93 				aead_request_set_ad(cur->req, aad_size);
94
95@@ -403,6 +404,9 @@ static void test_mb_aead_speed(const char *algo, int enc, int secs,
96 						       ret);
97 						break;
98 					}
99+
100+					memset(assoc, 0xff, aad_size);
101+					memset(assoc_out, 0xff, aad_size);
102 				}
103
104 				aead_request_set_crypt(cur->req, cur->sg,
105@@ -529,7 +533,7 @@ static void test_aead_speed(const char *algo, int enc, unsigned int secs,
106 	struct scatterlist *sg;
107 	struct scatterlist *sgout;
108 	const char *e;
109-	void *assoc;
110+	void *assoc, *assoc_out;
111 	char *iv;
112 	char *xbuf[XBUFSIZE];
113 	char *xoutbuf[XBUFSIZE];
114@@ -592,6 +596,8 @@ static void test_aead_speed(const char *algo, int enc, unsigned int secs,
115 		do {
116 			assoc = axbuf[0];
117 			memset(assoc, 0xff, aad_size);
118+			assoc_out = axbuf[1];
119+			memset(assoc_out, 0xff, aad_size);
120
121 			if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
122 				pr_err("template (%u) too big for tvmem (%lu)\n",
123@@ -631,7 +637,7 @@ static void test_aead_speed(const char *algo, int enc, unsigned int secs,
124 				     assoc, aad_size);
125
126 			sg_init_aead(sgout, xoutbuf,
127-				     *b_size + (enc ? authsize : 0), assoc,
128+				     *b_size + (enc ? authsize : 0), assoc_out,
129 				     aad_size);
130
131 			aead_request_set_ad(req, aad_size);
132@@ -653,6 +659,9 @@ static void test_aead_speed(const char *algo, int enc, unsigned int secs,
133 					       ret);
134 					break;
135 				}
136+
137+				memset(assoc, 0xff, aad_size);
138+				memset(assoc_out, 0xff, aad_size);
139 			}
140
141 			aead_request_set_crypt(req, sg, sgout,
142@@ -1984,6 +1993,10 @@ static int do_test(const char *alg, u32 type, u32 mask, int m, u32 num_mb)
143 		ret += tcrypt_test("hmac(streebog512)");
144 		break;
145
146+	case 117:
147+		ret += tcrypt_test("rsa");
148+		break;
149+
150 	case 150:
151 		ret += tcrypt_test("ansi_cprng");
152 		break;
153@@ -2050,6 +2063,9 @@ static int do_test(const char *alg, u32 type, u32 mask, int m, u32 num_mb)
154 		ret += tcrypt_test("cbc(sm4)");
155 		ret += tcrypt_test("ctr(sm4)");
156 		break;
157+	case 192:
158+		ret += tcrypt_test("tls10(hmac(sha1),cbc(aes))");
159+		break;
160 	case 200:
161 		test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
162 				speed_template_16_24_32);
163diff --git a/crypto/testmgr.c b/crypto/testmgr.c
164index a64a639ed..d2bc27682 100644
165--- a/crypto/testmgr.c
166+++ b/crypto/testmgr.c
167@@ -126,6 +126,13 @@ struct drbg_test_suite {
168 	unsigned int count;
169 };
170
171+struct tls_test_suite {
172+	struct {
173+		struct tls_testvec *vecs;
174+		unsigned int count;
175+	} enc, dec;
176+};
177+
178 struct akcipher_test_suite {
179 	const struct akcipher_testvec *vecs;
180 	unsigned int count;
181@@ -150,6 +157,7 @@ struct alg_test_desc {
182 		struct hash_test_suite hash;
183 		struct cprng_test_suite cprng;
184 		struct drbg_test_suite drbg;
185+		struct tls_test_suite tls;
186 		struct akcipher_test_suite akcipher;
187 		struct kpp_test_suite kpp;
188 	} suite;
189@@ -2555,6 +2563,227 @@ static int test_aead(const char *driver, int enc,
190 	return 0;
191 }
192
193+static int __test_tls(struct crypto_aead *tfm, int enc,
194+		      struct tls_testvec *template, unsigned int tcount,
195+		      const bool diff_dst)
196+{
197+	const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
198+	unsigned int i, k, authsize;
199+	char *q;
200+	struct aead_request *req;
201+	struct scatterlist *sg;
202+	struct scatterlist *sgout;
203+	const char *e, *d;
204+	struct crypto_wait wait;
205+	void *input;
206+	void *output;
207+	void *assoc;
208+	char *iv;
209+	char *key;
210+	char *xbuf[XBUFSIZE];
211+	char *xoutbuf[XBUFSIZE];
212+	char *axbuf[XBUFSIZE];
213+	int ret = -ENOMEM;
214+
215+	if (testmgr_alloc_buf(xbuf))
216+		goto out_noxbuf;
217+
218+	if (diff_dst && testmgr_alloc_buf(xoutbuf))
219+		goto out_nooutbuf;
220+
221+	if (testmgr_alloc_buf(axbuf))
222+		goto out_noaxbuf;
223+
224+	iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
225+	if (!iv)
226+		goto out_noiv;
227+
228+	key = kzalloc(MAX_KEYLEN, GFP_KERNEL);
229+	if (!key)
230+		goto out_nokey;
231+
232+	sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 2 : 1), GFP_KERNEL);
233+	if (!sg)
234+		goto out_nosg;
235+
236+	sgout = sg + 8;
237+
238+	d = diff_dst ? "-ddst" : "";
239+	e = enc ? "encryption" : "decryption";
240+
241+	crypto_init_wait(&wait);
242+
243+	req = aead_request_alloc(tfm, GFP_KERNEL);
244+	if (!req) {
245+		pr_err("alg: tls%s: Failed to allocate request for %s\n",
246+		       d, algo);
247+		goto out;
248+	}
249+
250+	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
251+				  crypto_req_done, &wait);
252+
253+	for (i = 0; i < tcount; i++) {
254+		input = xbuf[0];
255+		assoc = axbuf[0];
256+
257+		ret = -EINVAL;
258+		if (WARN_ON(template[i].ilen > PAGE_SIZE ||
259+			    template[i].alen > PAGE_SIZE))
260+			goto out;
261+
262+		memcpy(assoc, template[i].assoc, template[i].alen);
263+		memcpy(input, template[i].input, template[i].ilen);
264+
265+		if (template[i].iv)
266+			memcpy(iv, template[i].iv, MAX_IVLEN);
267+		else
268+			memset(iv, 0, MAX_IVLEN);
269+
270+		crypto_aead_clear_flags(tfm, ~0);
271+
272+		if (template[i].klen > MAX_KEYLEN) {
273+			pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
274+			       d, i, algo, template[i].klen, MAX_KEYLEN);
275+			ret = -EINVAL;
276+			goto out;
277+		}
278+		memcpy(key, template[i].key, template[i].klen);
279+
280+		ret = crypto_aead_setkey(tfm, key, template[i].klen);
281+		if ((!ret) == template[i].fail) {
282+			pr_err("alg: tls%s: setkey failed on test %d for %s: flags=%x\n",
283+			       d, i, algo, crypto_aead_get_flags(tfm));
284+			goto out;
285+		} else if (ret)
286+			continue;
287+
288+		authsize = 20;
289+		ret = crypto_aead_setauthsize(tfm, authsize);
290+		if (ret) {
291+			pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
292+			       d, authsize, i, algo);
293+			goto out;
294+		}
295+
296+		k = !!template[i].alen;
297+		sg_init_table(sg, k + 1);
298+		sg_set_buf(&sg[0], assoc, template[i].alen);
299+		sg_set_buf(&sg[k], input, (enc ? template[i].rlen :
300+					   template[i].ilen));
301+		output = input;
302+
303+		if (diff_dst) {
304+			sg_init_table(sgout, k + 1);
305+			sg_set_buf(&sgout[0], assoc, template[i].alen);
306+
307+			output = xoutbuf[0];
308+			sg_set_buf(&sgout[k], output,
309+				   (enc ? template[i].rlen : template[i].ilen));
310+		}
311+
312+		aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
313+				       template[i].ilen, iv);
314+
315+		aead_request_set_ad(req, template[i].alen);
316+
317+		ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
318+				      : crypto_aead_decrypt(req), &wait);
319+
320+		switch (ret) {
321+		case 0:
322+			if (template[i].novrfy) {
323+				/* verification was supposed to fail */
324+				pr_err("alg: tls%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
325+				       d, e, i, algo);
326+				/* so really, we got a bad message */
327+				ret = -EBADMSG;
328+				goto out;
329+			}
330+			break;
331+		case -EBADMSG:
332+			/* verification failure was expected */
333+			if (template[i].novrfy)
334+				continue;
335+			/* fall through */
336+		default:
337+			pr_err("alg: tls%s: %s failed on test %d for %s: ret=%d\n",
338+			       d, e, i, algo, -ret);
339+			goto out;
340+		}
341+
342+		q = output;
343+		if (memcmp(q, template[i].result, template[i].rlen)) {
344+			pr_err("alg: tls%s: Test %d failed on %s for %s\n",
345+			       d, i, e, algo);
346+			hexdump(q, template[i].rlen);
347+			pr_err("should be:\n");
348+			hexdump(template[i].result, template[i].rlen);
349+			ret = -EINVAL;
350+			goto out;
351+		}
352+	}
353+
354+out:
355+	aead_request_free(req);
356+
357+	kfree(sg);
358+out_nosg:
359+	kfree(key);
360+out_nokey:
361+	kfree(iv);
362+out_noiv:
363+	testmgr_free_buf(axbuf);
364+out_noaxbuf:
365+	if (diff_dst)
366+		testmgr_free_buf(xoutbuf);
367+out_nooutbuf:
368+	testmgr_free_buf(xbuf);
369+out_noxbuf:
370+	return ret;
371+}
372+
373+static int test_tls(struct crypto_aead *tfm, int enc,
374+		    struct tls_testvec *template, unsigned int tcount)
375+{
376+	int ret;
377+	/* test 'dst == src' case */
378+	ret = __test_tls(tfm, enc, template, tcount, false);
379+	if (ret)
380+		return ret;
381+	/* test 'dst != src' case */
382+	return __test_tls(tfm, enc, template, tcount, true);
383+}
384+
385+static int alg_test_tls(const struct alg_test_desc *desc, const char *driver,
386+			u32 type, u32 mask)
387+{
388+	struct crypto_aead *tfm;
389+	int err = 0;
390+
391+	tfm = crypto_alloc_aead(driver, type, mask);
392+	if (IS_ERR(tfm)) {
393+		pr_err("alg: aead: Failed to load transform for %s: %ld\n",
394+			driver, PTR_ERR(tfm));
395+		return PTR_ERR(tfm);
396+	}
397+
398+	if (desc->suite.tls.enc.vecs) {
399+		err = test_tls(tfm, ENCRYPT, desc->suite.tls.enc.vecs,
400+				desc->suite.tls.enc.count);
401+		if (err)
402+			goto out;
403+	}
404+
405+	if (!err && desc->suite.tls.dec.vecs)
406+		err = test_tls(tfm, DECRYPT, desc->suite.tls.dec.vecs,
407+			       desc->suite.tls.dec.count);
408+
409+out:
410+	crypto_free_aead(tfm);
411+	return err;
412+}
413+
414 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
415 			 u32 type, u32 mask)
416 {
417@@ -5427,6 +5656,15 @@ static const struct alg_test_desc alg_test_descs[] = {
418 		.suite = {
419 			.hash = __VECS(tgr192_tv_template)
420 		}
421+	}, {
422+		.alg = "tls10(hmac(sha1),cbc(aes))",
423+		.test = alg_test_tls,
424+		.suite = {
425+			.tls = {
426+				.enc = __VECS(tls_enc_tv_template),
427+				.dec = __VECS(tls_dec_tv_template)
428+			}
429+		}
430 	}, {
431 		.alg = "vmac64(aes)",
432 		.test = alg_test_hash,
433diff --git a/crypto/testmgr.h b/crypto/testmgr.h
434index 8c83811c0..16ae80824 100644
435--- a/crypto/testmgr.h
436+++ b/crypto/testmgr.h
437@@ -21,7 +21,12 @@
438 #define _CRYPTO_TESTMGR_H
439
440 #include <linux/oid_registry.h>
441+#include <linux/netlink.h>
442
443+#define MAX_DIGEST_SIZE		64
444+#define MAX_TAP			8
445+
446+#define MAX_KEYLEN		160
447 #define MAX_IVLEN		32
448
449 /*
450@@ -146,6 +151,20 @@ struct drbg_testvec {
451 	size_t expectedlen;
452 };
453
454+struct tls_testvec {
455+	char *key;	/* wrapped keys for encryption and authentication */
456+	char *iv;	/* initialization vector */
457+	char *input;	/* input data */
458+	char *assoc;	/* associated data: seq num, type, version, input len */
459+	char *result;	/* result data */
460+	unsigned char fail;	/* the test failure is expected */
461+	unsigned char novrfy;	/* dec verification failure expected */
462+	unsigned char klen;	/* key length */
463+	unsigned short ilen;	/* input data length */
464+	unsigned short alen;	/* associated data length */
465+	unsigned short rlen;	/* result length */
466+};
467+
468 struct akcipher_testvec {
469 	const unsigned char *key;
470 	const unsigned char *params;
471@@ -176,6 +195,211 @@ struct kpp_testvec {
472
473 static const char zeroed_string[48];
474
475+/*
476+ * TLS1.0 synthetic test vectors
477+ */
478+static struct tls_testvec tls_enc_tv_template[] = {
479+	{
480+#ifdef __LITTLE_ENDIAN
481+		.key	= "\x08\x00"		/* rta length */
482+			"\x01\x00"		/* rta type */
483+#else
484+		.key	= "\x00\x08"		/* rta length */
485+			"\x00\x01"		/* rta type */
486+#endif
487+			"\x00\x00\x00\x10"	/* enc key length */
488+			"authenticationkey20benckeyis16_bytes",
489+		.klen	= 8 + 20 + 16,
490+		.iv	= "iv0123456789abcd",
491+		.input	= "Single block msg",
492+		.ilen	= 16,
493+		.assoc	= "\x00\x01\x02\x03\x04\x05\x06\x07"
494+			"\x00\x03\x01\x00\x10",
495+		.alen	= 13,
496+		.result	= "\xd5\xac\xb\xd2\xac\xad\x3f\xb1"
497+			"\x59\x79\x1e\x91\x5f\x52\x14\x9c"
498+			"\xc0\x75\xd8\x4c\x97\x0f\x07\x73"
499+			"\xdc\x89\x47\x49\x49\xcb\x30\x6b"
500+			"\x1b\x45\x23\xa1\xd0\x51\xcf\x02"
501+			"\x2e\xa8\x5d\xa0\xfe\xca\x82\x61",
502+		.rlen	= 16 + 20 + 12,
503+	}, {
504+#ifdef __LITTLE_ENDIAN
505+		.key	= "\x08\x00"		/* rta length */
506+			"\x01\x00"		/* rta type */
507+#else
508+		.key	= "\x00\x08"		/* rta length */
509+			"\x00\x01"		/* rta type */
510+#endif
511+			"\x00\x00\x00\x10"	/* enc key length */
512+			"authenticationkey20benckeyis16_bytes",
513+		.klen	= 8 + 20 + 16,
514+		.iv	= "iv0123456789abcd",
515+		.input	= "",
516+		.ilen	= 0,
517+		.assoc	= "\x00\x01\x02\x03\x04\x05\x06\x07"
518+			"\x00\x03\x01\x00\x00",
519+		.alen	= 13,
520+		.result = "\x58\x2a\x11\xc\x86\x8e\x4b\x67"
521+			"\x2d\x16\x26\x1a\xac\x4b\xe2\x1a"
522+			"\xe9\x6a\xcc\x4d\x6f\x79\x8a\x45"
523+			"\x1f\x4e\x27\xf2\xa7\x59\xb4\x5a",
524+		.rlen	= 20 + 12,
525+	}, {
526+#ifdef __LITTLE_ENDIAN
527+		.key	= "\x08\x00"		/* rta length */
528+			"\x01\x00"		/* rta type */
529+#else
530+		.key	= "\x00\x08"		/* rta length */
531+			"\x00\x01"		/* rta type */
532+#endif
533+			"\x00\x00\x00\x10"	/* enc key length */
534+			"authenticationkey20benckeyis16_bytes",
535+		.klen	= 8 + 20 + 16,
536+		.iv	= "iv0123456789abcd",
537+		.input	= "285 bytes plaintext285 bytes plaintext285 bytes"
538+			" plaintext285 bytes plaintext285 bytes plaintext285"
539+			" bytes plaintext285 bytes plaintext285 bytes"
540+			" plaintext285 bytes plaintext285 bytes plaintext285"
541+			" bytes plaintext285 bytes plaintext285 bytes"
542+			" plaintext285 bytes plaintext285 bytes plaintext285"
543+			" bytes plaintext285 bytes plaintext",
544+		.ilen	= 285,
545+		.assoc	= "\x00\x01\x02\x03\x04\x05\x06\x07"
546+			"\x00\x03\x01\x01\x1d",
547+		.alen	= 13,
548+		.result = "\x80\x23\x82\x44\x14\x2a\x1d\x94\xc\xc2\x1d\xd"
549+			"\x3a\x32\x89\x4c\x57\x30\xa8\x89\x76\x46\xcc\x90"
550+			"\x1d\x88\xb8\xa6\x1a\x58\xe\x2d\xeb\x2c\xc7\x3a"
551+			"\x52\x4e\xdb\xb3\x1e\x83\x11\xf5\x3c\xce\x6e\x94"
552+			"\xd3\x26\x6a\x9a\xd\xbd\xc7\x98\xb9\xb3\x3a\x51"
553+			"\x1e\x4\x84\x8a\x8f\x54\x9a\x51\x69\x9c\xce\x31"
554+			"\x8d\x5d\x8b\xee\x5f\x70\xc\xc9\xb8\x50\x54\xf8"
555+			"\xb2\x4a\x7a\xcd\xeb\x7a\x82\x81\xc6\x41\xc8\x50"
556+			"\x91\x8d\xc8\xed\xcd\x40\x8f\x55\xd1\xec\xc9\xac"
557+			"\x15\x18\xf9\x20\xa0\xed\x18\xa1\xe3\x56\xe3\x14"
558+			"\xe5\xe8\x66\x63\x20\xed\xe4\x62\x9d\xa3\xa4\x1d"
559+			"\x81\x89\x18\xf2\x36\xae\xc8\x8a\x2b\xbc\xc3\xb8"
560+			"\x80\xf\x97\x21\x36\x39\x8\x84\x23\x18\x9e\x9c"
561+			"\x72\x32\x75\x2d\x2e\xf9\x60\xb\xe8\xcc\xd9\x74"
562+			"\x4\x1b\x8e\x99\xc1\x94\xee\xd0\xac\x4e\xfc\x7e"
563+			"\xf1\x96\xb3\xe7\x14\xb8\xf2\xc\x25\x97\x82\x6b"
564+			"\xbd\x0\x65\xab\x5c\xe3\x16\xfb\x68\xef\xea\x9d"
565+			"\xff\x44\x1d\x2a\x44\xf5\xc8\x56\x77\xb7\xbf\x13"
566+			"\xc8\x54\xdb\x92\xfe\x16\x4c\xbe\x18\xe9\xb\x8d"
567+			"\xb\xd4\x43\x58\x43\xaa\xf4\x3\x80\x97\x62\xd5"
568+			"\xdf\x3c\x28\xaa\xee\x48\x4b\x55\x41\x1b\x31\x2"
569+			"\xbe\xa0\x1c\xbd\xb7\x22\x2a\xe5\x53\x72\x73\x20"
570+			"\x44\x4f\xe6\x1\x2b\x34\x33\x11\x7d\xfb\x10\xc1"
571+			"\x66\x7c\xa6\xf4\x48\x36\x5e\x2\xda\x41\x4b\x3e"
572+			"\xe7\x80\x17\x17\xce\xf1\x3e\x6a\x8e\x26\xf3\xb7"
573+			"\x2b\x85\xd\x31\x8d\xba\x6c\x22\xb4\x28\x55\x7e"
574+			"\x2a\x9e\x26\xf1\x3d\x21\xac\x65",
575+		.rlen	= 285 + 20 + 15,
576+	}
577+};
578+
579+static struct tls_testvec tls_dec_tv_template[] = {
580+	{
581+#ifdef __LITTLE_ENDIAN
582+		.key	= "\x08\x00"		/* rta length */
583+			"\x01\x00"		/* rta type */
584+#else
585+		.key	= "\x00\x08"		/* rta length */
586+			"\x00\x01"		/* rta type */
587+#endif
588+			"\x00\x00\x00\x10"	/* enc key length */
589+			"authenticationkey20benckeyis16_bytes",
590+		.klen	= 8 + 20 + 16,
591+		.iv	= "iv0123456789abcd",
592+		.input	= "\xd5\xac\xb\xd2\xac\xad\x3f\xb1"
593+			"\x59\x79\x1e\x91\x5f\x52\x14\x9c"
594+			"\xc0\x75\xd8\x4c\x97\x0f\x07\x73"
595+			"\xdc\x89\x47\x49\x49\xcb\x30\x6b"
596+			"\x1b\x45\x23\xa1\xd0\x51\xcf\x02"
597+			"\x2e\xa8\x5d\xa0\xfe\xca\x82\x61",
598+		.ilen	= 16 + 20 + 12,
599+		.assoc	= "\x00\x01\x02\x03\x04\x05\x06\x07"
600+			"\x00\x03\x01\x00\x30",
601+		.alen	= 13,
602+		.result	= "Single block msg",
603+		.rlen	= 16,
604+	}, {
605+#ifdef __LITTLE_ENDIAN
606+		.key	= "\x08\x00"		/* rta length */
607+			"\x01\x00"		/* rta type */
608+#else
609+		.key	= "\x00\x08"		/* rta length */
610+			"\x00\x01"		/* rta type */
611+#endif
612+			"\x00\x00\x00\x10"	/* enc key length */
613+			"authenticationkey20benckeyis16_bytes",
614+		.klen	= 8 + 20 + 16,
615+		.iv	= "iv0123456789abcd",
616+		.input = "\x58\x2a\x11\xc\x86\x8e\x4b\x67"
617+			"\x2d\x16\x26\x1a\xac\x4b\xe2\x1a"
618+			"\xe9\x6a\xcc\x4d\x6f\x79\x8a\x45"
619+			"\x1f\x4e\x27\xf2\xa7\x59\xb4\x5a",
620+		.ilen	= 20 + 12,
621+		.assoc	= "\x00\x01\x02\x03\x04\x05\x06\x07"
622+			"\x00\x03\x01\x00\x20",
623+		.alen	= 13,
624+		.result	= "",
625+		.rlen	= 0,
626+	}, {
627+#ifdef __LITTLE_ENDIAN
628+		.key	= "\x08\x00"		/* rta length */
629+			"\x01\x00"		/* rta type */
630+#else
631+		.key	= "\x00\x08"		/* rta length */
632+			"\x00\x01"		/* rta type */
633+#endif
634+			"\x00\x00\x00\x10"	/* enc key length */
635+			"authenticationkey20benckeyis16_bytes",
636+		.klen	= 8 + 20 + 16,
637+		.iv	= "iv0123456789abcd",
638+		.input = "\x80\x23\x82\x44\x14\x2a\x1d\x94\xc\xc2\x1d\xd"
639+			"\x3a\x32\x89\x4c\x57\x30\xa8\x89\x76\x46\xcc\x90"
640+			"\x1d\x88\xb8\xa6\x1a\x58\xe\x2d\xeb\x2c\xc7\x3a"
641+			"\x52\x4e\xdb\xb3\x1e\x83\x11\xf5\x3c\xce\x6e\x94"
642+			"\xd3\x26\x6a\x9a\xd\xbd\xc7\x98\xb9\xb3\x3a\x51"
643+			"\x1e\x4\x84\x8a\x8f\x54\x9a\x51\x69\x9c\xce\x31"
644+			"\x8d\x5d\x8b\xee\x5f\x70\xc\xc9\xb8\x50\x54\xf8"
645+			"\xb2\x4a\x7a\xcd\xeb\x7a\x82\x81\xc6\x41\xc8\x50"
646+			"\x91\x8d\xc8\xed\xcd\x40\x8f\x55\xd1\xec\xc9\xac"
647+			"\x15\x18\xf9\x20\xa0\xed\x18\xa1\xe3\x56\xe3\x14"
648+			"\xe5\xe8\x66\x63\x20\xed\xe4\x62\x9d\xa3\xa4\x1d"
649+			"\x81\x89\x18\xf2\x36\xae\xc8\x8a\x2b\xbc\xc3\xb8"
650+			"\x80\xf\x97\x21\x36\x39\x8\x84\x23\x18\x9e\x9c"
651+			"\x72\x32\x75\x2d\x2e\xf9\x60\xb\xe8\xcc\xd9\x74"
652+			"\x4\x1b\x8e\x99\xc1\x94\xee\xd0\xac\x4e\xfc\x7e"
653+			"\xf1\x96\xb3\xe7\x14\xb8\xf2\xc\x25\x97\x82\x6b"
654+			"\xbd\x0\x65\xab\x5c\xe3\x16\xfb\x68\xef\xea\x9d"
655+			"\xff\x44\x1d\x2a\x44\xf5\xc8\x56\x77\xb7\xbf\x13"
656+			"\xc8\x54\xdb\x92\xfe\x16\x4c\xbe\x18\xe9\xb\x8d"
657+			"\xb\xd4\x43\x58\x43\xaa\xf4\x3\x80\x97\x62\xd5"
658+			"\xdf\x3c\x28\xaa\xee\x48\x4b\x55\x41\x1b\x31\x2"
659+			"\xbe\xa0\x1c\xbd\xb7\x22\x2a\xe5\x53\x72\x73\x20"
660+			"\x44\x4f\xe6\x1\x2b\x34\x33\x11\x7d\xfb\x10\xc1"
661+			"\x66\x7c\xa6\xf4\x48\x36\x5e\x2\xda\x41\x4b\x3e"
662+			"\xe7\x80\x17\x17\xce\xf1\x3e\x6a\x8e\x26\xf3\xb7"
663+			"\x2b\x85\xd\x31\x8d\xba\x6c\x22\xb4\x28\x55\x7e"
664+			"\x2a\x9e\x26\xf1\x3d\x21\xac\x65",
665+
666+		.ilen	= 285 + 20 + 15,
667+		.assoc	= "\x00\x01\x02\x03\x04\x05\x06\x07"
668+			"\x00\x03\x01\x01\x40",
669+		.alen	= 13,
670+		.result	= "285 bytes plaintext285 bytes plaintext285 bytes"
671+			" plaintext285 bytes plaintext285 bytes plaintext285"
672+			" bytes plaintext285 bytes plaintext285 bytes"
673+			" plaintext285 bytes plaintext285 bytes plaintext285"
674+			" bytes plaintext285 bytes plaintext285 bytes"
675+			" plaintext285 bytes plaintext285 bytes plaintext",
676+		.rlen	= 285,
677+	}
678+};
679+
680 /*
681  * RSA test vectors. Borrowed from openSSL.
682  */
683diff --git a/crypto/tls.c b/crypto/tls.c
684new file mode 100644
685index 000000000..e04f3b450
686--- /dev/null
687+++ b/crypto/tls.c
688@@ -0,0 +1,579 @@
689+/*
690+ * Copyright 2013 Freescale
691+ * Copyright 2017 NXP
692+ *
693+ * This program is free software; you can redistribute it and/or modify it
694+ * under the terms of the GNU General Public License as published by the Free
695+ * Software Foundation; either version 2 of the License, or (at your option)
696+ * any later version.
697+ *
698+ */
699+
700+#include <crypto/internal/aead.h>
701+#include <crypto/internal/hash.h>
702+#include <crypto/internal/skcipher.h>
703+#include <crypto/authenc.h>
704+#include <crypto/null.h>
705+#include <crypto/scatterwalk.h>
706+#include <linux/err.h>
707+#include <linux/init.h>
708+#include <linux/module.h>
709+#include <linux/rtnetlink.h>
710+
711+struct tls_instance_ctx {
712+	struct crypto_ahash_spawn auth;
713+	struct crypto_skcipher_spawn enc;
714+};
715+
716+struct crypto_tls_ctx {
717+	unsigned int reqoff;
718+	struct crypto_ahash *auth;
719+	struct crypto_skcipher *enc;
720+	struct crypto_sync_skcipher *null;
721+};
722+
723+struct tls_request_ctx {
724+	/*
725+	 * cryptlen holds the payload length in the case of encryption or
726+	 * payload_len + icv_len + padding_len in case of decryption
727+	 */
728+	unsigned int cryptlen;
729+	/* working space for partial results */
730+	struct scatterlist tmp[2];
731+	struct scatterlist cipher[2];
732+	struct scatterlist dst[2];
733+	char tail[];
734+};
735+
736+struct async_op {
737+	struct completion completion;
738+	int err;
739+};
740+
741+static void tls_async_op_done(struct crypto_async_request *req, int err)
742+{
743+	struct async_op *areq = req->data;
744+
745+	if (err == -EINPROGRESS)
746+		return;
747+
748+	areq->err = err;
749+	complete(&areq->completion);
750+}
751+
752+static int crypto_tls_setkey(struct crypto_aead *tls, const u8 *key,
753+			     unsigned int keylen)
754+{
755+	struct crypto_tls_ctx *ctx = crypto_aead_ctx(tls);
756+	struct crypto_ahash *auth = ctx->auth;
757+	struct crypto_skcipher *enc = ctx->enc;
758+	struct crypto_authenc_keys keys;
759+	int err = -EINVAL;
760+
761+	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
762+		goto out;
763+
764+	crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
765+	crypto_ahash_set_flags(auth, crypto_aead_get_flags(tls) &
766+				    CRYPTO_TFM_REQ_MASK);
767+	err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen);
768+	if (err)
769+		goto out;
770+
771+	crypto_skcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK);
772+	crypto_skcipher_set_flags(enc, crypto_aead_get_flags(tls) &
773+					 CRYPTO_TFM_REQ_MASK);
774+	err = crypto_skcipher_setkey(enc, keys.enckey, keys.enckeylen);
775+
776+out:
777+	memzero_explicit(&keys, sizeof(keys));
778+	return err;
779+}
780+
781+/**
782+ * crypto_tls_genicv - Calculate hmac digest for a TLS record
783+ * @hash:	(output) buffer to save the digest into
784+ * @src:	(input) scatterlist with the assoc and payload data
785+ * @srclen:	(input) size of the source buffer (assoclen + cryptlen)
786+ * @req:	(input) aead request
787+ **/
788+static int crypto_tls_genicv(u8 *hash, struct scatterlist *src,
789+			     unsigned int srclen, struct aead_request *req)
790+{
791+	struct crypto_aead *tls = crypto_aead_reqtfm(req);
792+	struct crypto_tls_ctx *ctx = crypto_aead_ctx(tls);
793+	struct tls_request_ctx *treq_ctx = aead_request_ctx(req);
794+	struct async_op ahash_op;
795+	struct ahash_request *ahreq = (void *)(treq_ctx->tail + ctx->reqoff);
796+	unsigned int flags = CRYPTO_TFM_REQ_MAY_SLEEP;
797+	int err = -EBADMSG;
798+
799+	 /* Bail out if the request assoc len is 0 */
800+	if (!req->assoclen)
801+		return err;
802+
803+	init_completion(&ahash_op.completion);
804+
805+	/* the hash transform to be executed comes from the original request */
806+	ahash_request_set_tfm(ahreq, ctx->auth);
807+	/* prepare the hash request with input data and result pointer */
808+	ahash_request_set_crypt(ahreq, src, hash, srclen);
809+	/* set the notifier for when the async hash function returns */
810+	ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
811+				   tls_async_op_done, &ahash_op);
812+
813+	/* Calculate the digest on the given data. The result is put in hash */
814+	err = crypto_ahash_digest(ahreq);
815+	if (err == -EINPROGRESS) {
816+		err = wait_for_completion_interruptible(&ahash_op.completion);
817+		if (!err)
818+			err = ahash_op.err;
819+	}
820+
821+	return err;
822+}
823+
824+/**
825+ * crypto_tls_gen_padicv - Calculate and pad hmac digest for a TLS record
826+ * @hash:	(output) buffer to save the digest and padding into
827+ * @phashlen:	(output) the size of digest + padding
828+ * @req:	(input) aead request
829+ **/
830+static int crypto_tls_gen_padicv(u8 *hash, unsigned int *phashlen,
831+				 struct aead_request *req)
832+{
833+	struct crypto_aead *tls = crypto_aead_reqtfm(req);
834+	unsigned int hash_size = crypto_aead_authsize(tls);
835+	unsigned int block_size = crypto_aead_blocksize(tls);
836+	unsigned int srclen = req->cryptlen + hash_size;
837+	unsigned int icvlen = req->cryptlen + req->assoclen;
838+	unsigned int padlen;
839+	int err;
840+
841+	err = crypto_tls_genicv(hash, req->src, icvlen, req);
842+	if (err)
843+		goto out;
844+
845+	/* add padding after digest */
846+	padlen = block_size - (srclen % block_size);
847+	memset(hash + hash_size, padlen - 1, padlen);
848+
849+	*phashlen = hash_size + padlen;
850+out:
851+	return err;
852+}
853+
854+static int crypto_tls_copy_data(struct aead_request *req,
855+				struct scatterlist *src,
856+				struct scatterlist *dst,
857+				unsigned int len)
858+{
859+	struct crypto_aead *tls = crypto_aead_reqtfm(req);
860+	struct crypto_tls_ctx *ctx = crypto_aead_ctx(tls);
861+	SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
862+
863+	skcipher_request_set_sync_tfm(skreq, ctx->null);
864+	skcipher_request_set_callback(skreq, aead_request_flags(req),
865+				      NULL, NULL);
866+	skcipher_request_set_crypt(skreq, src, dst, len, NULL);
867+
868+	return crypto_skcipher_encrypt(skreq);
869+}
870+
871+static int crypto_tls_encrypt(struct aead_request *req)
872+{
873+	struct crypto_aead *tls = crypto_aead_reqtfm(req);
874+	struct crypto_tls_ctx *ctx = crypto_aead_ctx(tls);
875+	struct tls_request_ctx *treq_ctx = aead_request_ctx(req);
876+	struct skcipher_request *skreq;
877+	struct scatterlist *cipher = treq_ctx->cipher;
878+	struct scatterlist *tmp = treq_ctx->tmp;
879+	struct scatterlist *sg, *src, *dst;
880+	unsigned int cryptlen, phashlen;
881+	u8 *hash = treq_ctx->tail;
882+	int err;
883+
884+	/*
885+	 * The hash result is saved at the beginning of the tls request ctx
886+	 * and is aligned as required by the hash transform. Enough space was
887+	 * allocated in crypto_tls_init_tfm to accommodate the difference. The
888+	 * requests themselves start later at treq_ctx->tail + ctx->reqoff so
889+	 * the result is not overwritten by the second (cipher) request.
890+	 */
891+	hash = (u8 *)ALIGN((unsigned long)hash +
892+			   crypto_ahash_alignmask(ctx->auth),
893+			   crypto_ahash_alignmask(ctx->auth) + 1);
894+
895+	/*
896+	 * STEP 1: create ICV together with necessary padding
897+	 */
898+	err = crypto_tls_gen_padicv(hash, &phashlen, req);
899+	if (err)
900+		return err;
901+
902+	/*
903+	 * STEP 2: Hash and padding are combined with the payload
904+	 * depending on the form it arrives. Scatter tables must have at least
905+	 * one page of data before chaining with another table and can't have
906+	 * an empty data page. The following code addresses these requirements.
907+	 *
908+	 * If the payload is empty, only the hash is encrypted, otherwise the
909+	 * payload scatterlist is merged with the hash. A special merging case
910+	 * is when the payload has only one page of data. In that case the
911+	 * payload page is moved to another scatterlist and prepared there for
912+	 * encryption.
913+	 */
914+	if (req->cryptlen) {
915+		src = scatterwalk_ffwd(tmp, req->src, req->assoclen);
916+
917+		sg_init_table(cipher, 2);
918+		sg_set_buf(cipher + 1, hash, phashlen);
919+
920+		if (sg_is_last(src)) {
921+			sg_set_page(cipher, sg_page(src), req->cryptlen,
922+				    src->offset);
923+			src = cipher;
924+		} else {
925+			unsigned int rem_len = req->cryptlen;
926+
927+			for (sg = src; rem_len > sg->length; sg = sg_next(sg))
928+				rem_len -= min(rem_len, sg->length);
929+
930+			sg_set_page(cipher, sg_page(sg), rem_len, sg->offset);
931+			sg_chain(sg, 1, cipher);
932+		}
933+	} else {
934+		sg_init_one(cipher, hash, phashlen);
935+		src = cipher;
936+	}
937+
938+	/**
939+	 * If src != dst copy the associated data from source to destination.
940+	 * In both cases fast-forward passed the associated data in the dest.
941+	 */
942+	if (req->src != req->dst) {
943+		err = crypto_tls_copy_data(req, req->src, req->dst,
944+					   req->assoclen);
945+		if (err)
946+			return err;
947+	}
948+	dst = scatterwalk_ffwd(treq_ctx->dst, req->dst, req->assoclen);
949+
950+	/*
951+	 * STEP 3: encrypt the frame and return the result
952+	 */
953+	cryptlen = req->cryptlen + phashlen;
954+
955+	/*
956+	 * The hash and the cipher are applied at different times and their
957+	 * requests can use the same memory space without interference
958+	 */
959+	skreq = (void *)(treq_ctx->tail + ctx->reqoff);
960+	skcipher_request_set_tfm(skreq, ctx->enc);
961+	skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv);
962+	skcipher_request_set_callback(skreq, aead_request_flags(req),
963+				      req->base.complete, req->base.data);
964+	/*
965+	 * Apply the cipher transform. The result will be in req->dst when the
966+	 * asynchronuous call terminates
967+	 */
968+	err = crypto_skcipher_encrypt(skreq);
969+
970+	return err;
971+}
972+
973+static int crypto_tls_decrypt(struct aead_request *req)
974+{
975+	struct crypto_aead *tls = crypto_aead_reqtfm(req);
976+	struct crypto_tls_ctx *ctx = crypto_aead_ctx(tls);
977+	struct tls_request_ctx *treq_ctx = aead_request_ctx(req);
978+	unsigned int cryptlen = req->cryptlen;
979+	unsigned int hash_size = crypto_aead_authsize(tls);
980+	unsigned int block_size = crypto_aead_blocksize(tls);
981+	struct skcipher_request *skreq = (void *)(treq_ctx->tail + ctx->reqoff);
982+	struct scatterlist *tmp = treq_ctx->tmp;
983+	struct scatterlist *src, *dst;
984+
985+	u8 padding[255]; /* padding can be 0-255 bytes */
986+	u8 pad_size;
987+	u16 *len_field;
988+	u8 *ihash, *hash = treq_ctx->tail;
989+
990+	int paderr = 0;
991+	int err = -EINVAL;
992+	int i;
993+	struct async_op ciph_op;
994+
995+	/*
996+	 * Rule out bad packets. The input packet length must be at least one
997+	 * byte more than the hash_size
998+	 */
999+	if (cryptlen <= hash_size || cryptlen % block_size)
1000+		goto out;
1001+
1002+	/*
1003+	 * Step 1 - Decrypt the source. Fast-forward past the associated data
1004+	 * to the encrypted data. The result will be overwritten in place so
1005+	 * that the decrypted data will be adjacent to the associated data. The
1006+	 * last step (computing the hash) will have it's input data already
1007+	 * prepared and ready to be accessed at req->src.
1008+	 */
1009+	src = scatterwalk_ffwd(tmp, req->src, req->assoclen);
1010+	dst = src;
1011+
1012+	init_completion(&ciph_op.completion);
1013+	skcipher_request_set_tfm(skreq, ctx->enc);
1014+	skcipher_request_set_callback(skreq, aead_request_flags(req),
1015+				      tls_async_op_done, &ciph_op);
1016+	skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv);
1017+	err = crypto_skcipher_decrypt(skreq);
1018+	if (err == -EINPROGRESS) {
1019+		err = wait_for_completion_interruptible(&ciph_op.completion);
1020+		if (!err)
1021+			err = ciph_op.err;
1022+	}
1023+	if (err)
1024+		goto out;
1025+
1026+	/*
1027+	 * Step 2 - Verify padding
1028+	 * Retrieve the last byte of the payload; this is the padding size.
1029+	 */
1030+	cryptlen -= 1;
1031+	scatterwalk_map_and_copy(&pad_size, dst, cryptlen, 1, 0);
1032+
1033+	/* RFC recommendation for invalid padding size. */
1034+	if (cryptlen < pad_size + hash_size) {
1035+		pad_size = 0;
1036+		paderr = -EBADMSG;
1037+	}
1038+	cryptlen -= pad_size;
1039+	scatterwalk_map_and_copy(padding, dst, cryptlen, pad_size, 0);
1040+
1041+	/* Padding content must be equal with pad_size. We verify it all */
1042+	for (i = 0; i < pad_size; i++)
1043+		if (padding[i] != pad_size)
1044+			paderr = -EBADMSG;
1045+
1046+	/*
1047+	 * Step 3 - Verify hash
1048+	 * Align the digest result as required by the hash transform. Enough
1049+	 * space was allocated in crypto_tls_init_tfm
1050+	 */
1051+	hash = (u8 *)ALIGN((unsigned long)hash +
1052+			   crypto_ahash_alignmask(ctx->auth),
1053+			   crypto_ahash_alignmask(ctx->auth) + 1);
1054+	/*
1055+	 * Two bytes at the end of the associated data make the length field.
1056+	 * It must be updated with the length of the cleartext message before
1057+	 * the hash is calculated.
1058+	 */
1059+	len_field = sg_virt(req->src) + req->assoclen - 2;
1060+	cryptlen -= hash_size;
1061+	*len_field = htons(cryptlen);
1062+
1063+	/* This is the hash from the decrypted packet. Save it for later */
1064+	ihash = hash + hash_size;
1065+	scatterwalk_map_and_copy(ihash, dst, cryptlen, hash_size, 0);
1066+
1067+	/* Now compute and compare our ICV with the one from the packet */
1068+	err = crypto_tls_genicv(hash, req->src, cryptlen + req->assoclen, req);
1069+	if (!err)
1070+		err = memcmp(hash, ihash, hash_size) ? -EBADMSG : 0;
1071+
1072+	if (req->src != req->dst) {
1073+		err = crypto_tls_copy_data(req, req->src, req->dst, cryptlen +
1074+					   req->assoclen);
1075+		if (err)
1076+			goto out;
1077+	}
1078+
1079+	/* return the first found error */
1080+	if (paderr)
1081+		err = paderr;
1082+
1083+out:
1084+	aead_request_complete(req, err);
1085+	return err;
1086+}
1087+
1088+static int crypto_tls_init_tfm(struct crypto_aead *tfm)
1089+{
1090+	struct aead_instance *inst = aead_alg_instance(tfm);
1091+	struct tls_instance_ctx *ictx = aead_instance_ctx(inst);
1092+	struct crypto_tls_ctx *ctx = crypto_aead_ctx(tfm);
1093+	struct crypto_ahash *auth;
1094+	struct crypto_skcipher *enc;
1095+	struct crypto_sync_skcipher *null;
1096+	int err;
1097+
1098+	auth = crypto_spawn_ahash(&ictx->auth);
1099+	if (IS_ERR(auth))
1100+		return PTR_ERR(auth);
1101+
1102+	enc = crypto_spawn_skcipher(&ictx->enc);
1103+	err = PTR_ERR(enc);
1104+	if (IS_ERR(enc))
1105+		goto err_free_ahash;
1106+
1107+	null = crypto_get_default_null_skcipher();
1108+	err = PTR_ERR(null);
1109+	if (IS_ERR(null))
1110+		goto err_free_skcipher;
1111+
1112+	ctx->auth = auth;
1113+	ctx->enc = enc;
1114+	ctx->null = null;
1115+
1116+	/*
1117+	 * Allow enough space for two digests. The two digests will be compared
1118+	 * during the decryption phase. One will come from the decrypted packet
1119+	 * and the other will be calculated. For encryption, one digest is
1120+	 * padded (up to a cipher blocksize) and chained with the payload
1121+	 */
1122+	ctx->reqoff = ALIGN(crypto_ahash_digestsize(auth) +
1123+			    crypto_ahash_alignmask(auth),
1124+			    crypto_ahash_alignmask(auth) + 1) +
1125+			    max(crypto_ahash_digestsize(auth),
1126+				crypto_skcipher_blocksize(enc));
1127+
1128+	crypto_aead_set_reqsize(tfm,
1129+				sizeof(struct tls_request_ctx) +
1130+				ctx->reqoff +
1131+				max_t(unsigned int,
1132+				      crypto_ahash_reqsize(auth) +
1133+				      sizeof(struct ahash_request),
1134+				      crypto_skcipher_reqsize(enc) +
1135+				      sizeof(struct skcipher_request)));
1136+
1137+	return 0;
1138+
1139+err_free_skcipher:
1140+	crypto_free_skcipher(enc);
1141+err_free_ahash:
1142+	crypto_free_ahash(auth);
1143+	return err;
1144+}
1145+
1146+static void crypto_tls_exit_tfm(struct crypto_aead *tfm)
1147+{
1148+	struct crypto_tls_ctx *ctx = crypto_aead_ctx(tfm);
1149+
1150+	crypto_free_ahash(ctx->auth);
1151+	crypto_free_skcipher(ctx->enc);
1152+	crypto_put_default_null_skcipher();
1153+}
1154+
1155+static void crypto_tls_free(struct aead_instance *inst)
1156+{
1157+	struct tls_instance_ctx *ctx = aead_instance_ctx(inst);
1158+
1159+	crypto_drop_skcipher(&ctx->enc);
1160+	crypto_drop_ahash(&ctx->auth);
1161+	kfree(inst);
1162+}
1163+
1164+static int crypto_tls_create(struct crypto_template *tmpl, struct rtattr **tb)
1165+{
1166+	struct crypto_attr_type *algt;
1167+	struct aead_instance *inst;
1168+	struct hash_alg_common *auth;
1169+	struct crypto_alg *auth_base;
1170+	struct skcipher_alg *enc;
1171+	struct tls_instance_ctx *ctx;
1172+	u32 mask;
1173+	int err;
1174+
1175+	algt = crypto_get_attr_type(tb);
1176+	if (IS_ERR(algt))
1177+		return PTR_ERR(algt);
1178+
1179+	if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
1180+		return -EINVAL;
1181+
1182+	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask);
1183+	if (err)
1184+		return err;
1185+
1186+	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
1187+	if (!inst)
1188+		return -ENOMEM;
1189+	ctx = aead_instance_ctx(inst);
1190+
1191+	err = crypto_grab_ahash(&ctx->auth, aead_crypto_instance(inst),
1192+				crypto_attr_alg_name(tb[1]), 0, mask);
1193+	if (err)
1194+		goto err_free_inst;
1195+	auth = crypto_spawn_ahash_alg(&ctx->auth);
1196+	auth_base = &auth->base;
1197+
1198+	err = crypto_grab_skcipher(&ctx->enc, aead_crypto_instance(inst),
1199+				   crypto_attr_alg_name(tb[2]), 0, mask);
1200+	if (err)
1201+		goto err_free_inst;
1202+	enc = crypto_spawn_skcipher_alg(&ctx->enc);
1203+
1204+	err = -ENAMETOOLONG;
1205+	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
1206+		     "tls10(%s,%s)", auth_base->cra_name,
1207+		     enc->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
1208+		goto err_free_inst;
1209+
1210+	if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
1211+		     "tls10(%s,%s)", auth_base->cra_driver_name,
1212+		     enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
1213+		goto err_free_inst;
1214+
1215+	inst->alg.base.cra_flags = (auth_base->cra_flags |
1216+					enc->base.cra_flags) & CRYPTO_ALG_ASYNC;
1217+	inst->alg.base.cra_priority = enc->base.cra_priority * 10 +
1218+					auth_base->cra_priority;
1219+	inst->alg.base.cra_blocksize = enc->base.cra_blocksize;
1220+	inst->alg.base.cra_alignmask = auth_base->cra_alignmask |
1221+					enc->base.cra_alignmask;
1222+	inst->alg.base.cra_ctxsize = sizeof(struct crypto_tls_ctx);
1223+
1224+	inst->alg.ivsize = crypto_skcipher_alg_ivsize(enc);
1225+	inst->alg.chunksize = crypto_skcipher_alg_chunksize(enc);
1226+	inst->alg.maxauthsize = auth->digestsize;
1227+
1228+	inst->alg.init = crypto_tls_init_tfm;
1229+	inst->alg.exit = crypto_tls_exit_tfm;
1230+
1231+	inst->alg.setkey = crypto_tls_setkey;
1232+	inst->alg.encrypt = crypto_tls_encrypt;
1233+	inst->alg.decrypt = crypto_tls_decrypt;
1234+
1235+	inst->free = crypto_tls_free;
1236+
1237+	err = aead_register_instance(tmpl, inst);
1238+	if (err) {
1239+err_free_inst:
1240+		crypto_tls_free(inst);
1241+	}
1242+
1243+	return err;
1244+}
1245+
1246+static struct crypto_template crypto_tls_tmpl = {
1247+	.name = "tls10",
1248+	.create = crypto_tls_create,
1249+	.module = THIS_MODULE,
1250+};
1251+
1252+static int __init crypto_tls_module_init(void)
1253+{
1254+	return crypto_register_template(&crypto_tls_tmpl);
1255+}
1256+
1257+static void __exit crypto_tls_module_exit(void)
1258+{
1259+	crypto_unregister_template(&crypto_tls_tmpl);
1260+}
1261+
1262+module_init(crypto_tls_module_init);
1263+module_exit(crypto_tls_module_exit);
1264+
1265+MODULE_LICENSE("GPL");
1266+MODULE_DESCRIPTION("TLS 1.0 record encryption");
1267+MODULE_ALIAS_CRYPTO("tls10");
1268