• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * AMD Cryptographic Coprocessor (CCP) AES GCM crypto API support
3  *
4  * Copyright (C) 2016,2017 Advanced Micro Devices, Inc.
5  *
6  * Author: Gary R Hook <gary.hook@amd.com>
7  *
8  * This program is free software; you can redistribute it and/or modify
9  * it under the terms of the GNU General Public License version 2 as
10  * published by the Free Software Foundation.
11  */
12 
13 #include <linux/module.h>
14 #include <linux/sched.h>
15 #include <linux/delay.h>
16 #include <linux/scatterlist.h>
17 #include <linux/crypto.h>
18 #include <crypto/internal/aead.h>
19 #include <crypto/algapi.h>
20 #include <crypto/aes.h>
21 #include <crypto/ctr.h>
22 #include <crypto/gcm.h>
23 #include <crypto/scatterwalk.h>
24 
25 #include "ccp-crypto.h"
26 
ccp_aes_gcm_complete(struct crypto_async_request * async_req,int ret)27 static int ccp_aes_gcm_complete(struct crypto_async_request *async_req, int ret)
28 {
29 	return ret;
30 }
31 
ccp_aes_gcm_setkey(struct crypto_aead * tfm,const u8 * key,unsigned int key_len)32 static int ccp_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key,
33 			      unsigned int key_len)
34 {
35 	struct ccp_ctx *ctx = crypto_aead_ctx(tfm);
36 
37 	switch (key_len) {
38 	case AES_KEYSIZE_128:
39 		ctx->u.aes.type = CCP_AES_TYPE_128;
40 		break;
41 	case AES_KEYSIZE_192:
42 		ctx->u.aes.type = CCP_AES_TYPE_192;
43 		break;
44 	case AES_KEYSIZE_256:
45 		ctx->u.aes.type = CCP_AES_TYPE_256;
46 		break;
47 	default:
48 		crypto_aead_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
49 		return -EINVAL;
50 	}
51 
52 	ctx->u.aes.mode = CCP_AES_MODE_GCM;
53 	ctx->u.aes.key_len = key_len;
54 
55 	memcpy(ctx->u.aes.key, key, key_len);
56 	sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len);
57 
58 	return 0;
59 }
60 
ccp_aes_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)61 static int ccp_aes_gcm_setauthsize(struct crypto_aead *tfm,
62 				   unsigned int authsize)
63 {
64 	switch (authsize) {
65 	case 16:
66 	case 15:
67 	case 14:
68 	case 13:
69 	case 12:
70 	case 8:
71 	case 4:
72 		break;
73 	default:
74 		return -EINVAL;
75 	}
76 
77 	return 0;
78 }
79 
ccp_aes_gcm_crypt(struct aead_request * req,bool encrypt)80 static int ccp_aes_gcm_crypt(struct aead_request *req, bool encrypt)
81 {
82 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
83 	struct ccp_ctx *ctx = crypto_aead_ctx(tfm);
84 	struct ccp_aes_req_ctx *rctx = aead_request_ctx(req);
85 	struct scatterlist *iv_sg = NULL;
86 	unsigned int iv_len = 0;
87 	int i;
88 	int ret = 0;
89 
90 	if (!ctx->u.aes.key_len)
91 		return -EINVAL;
92 
93 	if (ctx->u.aes.mode != CCP_AES_MODE_GCM)
94 		return -EINVAL;
95 
96 	if (!req->iv)
97 		return -EINVAL;
98 
99 	/*
100 	 * 5 parts:
101 	 *   plaintext/ciphertext input
102 	 *   AAD
103 	 *   key
104 	 *   IV
105 	 *   Destination+tag buffer
106 	 */
107 
108 	/* Prepare the IV: 12 bytes + an integer (counter) */
109 	memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE);
110 	for (i = 0; i < 3; i++)
111 		rctx->iv[i + GCM_AES_IV_SIZE] = 0;
112 	rctx->iv[AES_BLOCK_SIZE - 1] = 1;
113 
114 	/* Set up a scatterlist for the IV */
115 	iv_sg = &rctx->iv_sg;
116 	iv_len = AES_BLOCK_SIZE;
117 	sg_init_one(iv_sg, rctx->iv, iv_len);
118 
119 	/* The AAD + plaintext are concatenated in the src buffer */
120 	memset(&rctx->cmd, 0, sizeof(rctx->cmd));
121 	INIT_LIST_HEAD(&rctx->cmd.entry);
122 	rctx->cmd.engine = CCP_ENGINE_AES;
123 	rctx->cmd.u.aes.authsize = crypto_aead_authsize(tfm);
124 	rctx->cmd.u.aes.type = ctx->u.aes.type;
125 	rctx->cmd.u.aes.mode = ctx->u.aes.mode;
126 	rctx->cmd.u.aes.action = encrypt;
127 	rctx->cmd.u.aes.key = &ctx->u.aes.key_sg;
128 	rctx->cmd.u.aes.key_len = ctx->u.aes.key_len;
129 	rctx->cmd.u.aes.iv = iv_sg;
130 	rctx->cmd.u.aes.iv_len = iv_len;
131 	rctx->cmd.u.aes.src = req->src;
132 	rctx->cmd.u.aes.src_len = req->cryptlen;
133 	rctx->cmd.u.aes.aad_len = req->assoclen;
134 
135 	/* The cipher text + the tag are in the dst buffer */
136 	rctx->cmd.u.aes.dst = req->dst;
137 
138 	ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd);
139 
140 	return ret;
141 }
142 
ccp_aes_gcm_encrypt(struct aead_request * req)143 static int ccp_aes_gcm_encrypt(struct aead_request *req)
144 {
145 	return ccp_aes_gcm_crypt(req, CCP_AES_ACTION_ENCRYPT);
146 }
147 
ccp_aes_gcm_decrypt(struct aead_request * req)148 static int ccp_aes_gcm_decrypt(struct aead_request *req)
149 {
150 	return ccp_aes_gcm_crypt(req, CCP_AES_ACTION_DECRYPT);
151 }
152 
ccp_aes_gcm_cra_init(struct crypto_aead * tfm)153 static int ccp_aes_gcm_cra_init(struct crypto_aead *tfm)
154 {
155 	struct ccp_ctx *ctx = crypto_aead_ctx(tfm);
156 
157 	ctx->complete = ccp_aes_gcm_complete;
158 	ctx->u.aes.key_len = 0;
159 
160 	crypto_aead_set_reqsize(tfm, sizeof(struct ccp_aes_req_ctx));
161 
162 	return 0;
163 }
164 
ccp_aes_gcm_cra_exit(struct crypto_tfm * tfm)165 static void ccp_aes_gcm_cra_exit(struct crypto_tfm *tfm)
166 {
167 }
168 
169 static struct aead_alg ccp_aes_gcm_defaults = {
170 	.setkey = ccp_aes_gcm_setkey,
171 	.setauthsize = ccp_aes_gcm_setauthsize,
172 	.encrypt = ccp_aes_gcm_encrypt,
173 	.decrypt = ccp_aes_gcm_decrypt,
174 	.init = ccp_aes_gcm_cra_init,
175 	.ivsize = GCM_AES_IV_SIZE,
176 	.maxauthsize = AES_BLOCK_SIZE,
177 	.base = {
178 		.cra_flags	= CRYPTO_ALG_TYPE_ABLKCIPHER |
179 				  CRYPTO_ALG_ASYNC |
180 				  CRYPTO_ALG_KERN_DRIVER_ONLY |
181 				  CRYPTO_ALG_NEED_FALLBACK,
182 		.cra_blocksize	= AES_BLOCK_SIZE,
183 		.cra_ctxsize	= sizeof(struct ccp_ctx),
184 		.cra_priority	= CCP_CRA_PRIORITY,
185 		.cra_type	= &crypto_ablkcipher_type,
186 		.cra_exit	= ccp_aes_gcm_cra_exit,
187 		.cra_module	= THIS_MODULE,
188 	},
189 };
190 
191 struct ccp_aes_aead_def {
192 	enum ccp_aes_mode mode;
193 	unsigned int version;
194 	const char *name;
195 	const char *driver_name;
196 	unsigned int blocksize;
197 	unsigned int ivsize;
198 	struct aead_alg *alg_defaults;
199 };
200 
201 static struct ccp_aes_aead_def aes_aead_algs[] = {
202 	{
203 		.mode		= CCP_AES_MODE_GHASH,
204 		.version	= CCP_VERSION(5, 0),
205 		.name		= "gcm(aes)",
206 		.driver_name	= "gcm-aes-ccp",
207 		.blocksize	= 1,
208 		.ivsize		= AES_BLOCK_SIZE,
209 		.alg_defaults	= &ccp_aes_gcm_defaults,
210 	},
211 };
212 
ccp_register_aes_aead(struct list_head * head,const struct ccp_aes_aead_def * def)213 static int ccp_register_aes_aead(struct list_head *head,
214 				 const struct ccp_aes_aead_def *def)
215 {
216 	struct ccp_crypto_aead *ccp_aead;
217 	struct aead_alg *alg;
218 	int ret;
219 
220 	ccp_aead = kzalloc(sizeof(*ccp_aead), GFP_KERNEL);
221 	if (!ccp_aead)
222 		return -ENOMEM;
223 
224 	INIT_LIST_HEAD(&ccp_aead->entry);
225 
226 	ccp_aead->mode = def->mode;
227 
228 	/* Copy the defaults and override as necessary */
229 	alg = &ccp_aead->alg;
230 	*alg = *def->alg_defaults;
231 	snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name);
232 	snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
233 		 def->driver_name);
234 	alg->base.cra_blocksize = def->blocksize;
235 	alg->base.cra_ablkcipher.ivsize = def->ivsize;
236 
237 	ret = crypto_register_aead(alg);
238 	if (ret) {
239 		pr_err("%s ablkcipher algorithm registration error (%d)\n",
240 		       alg->base.cra_name, ret);
241 		kfree(ccp_aead);
242 		return ret;
243 	}
244 
245 	list_add(&ccp_aead->entry, head);
246 
247 	return 0;
248 }
249 
ccp_register_aes_aeads(struct list_head * head)250 int ccp_register_aes_aeads(struct list_head *head)
251 {
252 	int i, ret;
253 	unsigned int ccpversion = ccp_version();
254 
255 	for (i = 0; i < ARRAY_SIZE(aes_aead_algs); i++) {
256 		if (aes_aead_algs[i].version > ccpversion)
257 			continue;
258 		ret = ccp_register_aes_aead(head, &aes_aead_algs[i]);
259 		if (ret)
260 			return ret;
261 	}
262 
263 	return 0;
264 }
265