• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * Copyright (c) 2021 Aspeed Technology Inc.
4  */
5 
6 #include "aspeed-hace.h"
7 
8 #ifdef CONFIG_CRYPTO_DEV_ASPEED_HACE_CRYPTO_DEBUG
9 #define CIPHER_DBG(h, fmt, ...)	\
10 	dev_info((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
11 #else
12 #define CIPHER_DBG(h, fmt, ...)	\
13 	dev_dbg((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
14 #endif
15 
aspeed_crypto_do_fallback(struct skcipher_request * areq)16 static int aspeed_crypto_do_fallback(struct skcipher_request *areq)
17 {
18 	struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(areq);
19 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
20 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
21 	int err;
22 
23 	skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
24 	skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
25 				      areq->base.complete, areq->base.data);
26 	skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
27 				   areq->cryptlen, areq->iv);
28 
29 	if (rctx->enc_cmd & HACE_CMD_ENCRYPT)
30 		err = crypto_skcipher_encrypt(&rctx->fallback_req);
31 	else
32 		err = crypto_skcipher_decrypt(&rctx->fallback_req);
33 
34 	return err;
35 }
36 
aspeed_crypto_need_fallback(struct skcipher_request * areq)37 static bool aspeed_crypto_need_fallback(struct skcipher_request *areq)
38 {
39 	struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(areq);
40 
41 	if (areq->cryptlen == 0)
42 		return true;
43 
44 	if ((rctx->enc_cmd & HACE_CMD_DES_SELECT) &&
45 	    !IS_ALIGNED(areq->cryptlen, DES_BLOCK_SIZE))
46 		return true;
47 
48 	if ((!(rctx->enc_cmd & HACE_CMD_DES_SELECT)) &&
49 	    !IS_ALIGNED(areq->cryptlen, AES_BLOCK_SIZE))
50 		return true;
51 
52 	return false;
53 }
54 
aspeed_hace_crypto_handle_queue(struct aspeed_hace_dev * hace_dev,struct skcipher_request * req)55 static int aspeed_hace_crypto_handle_queue(struct aspeed_hace_dev *hace_dev,
56 					   struct skcipher_request *req)
57 {
58 	if (hace_dev->version == AST2500_VERSION &&
59 	    aspeed_crypto_need_fallback(req)) {
60 		CIPHER_DBG(hace_dev, "SW fallback\n");
61 		return aspeed_crypto_do_fallback(req);
62 	}
63 
64 	return crypto_transfer_skcipher_request_to_engine(
65 			hace_dev->crypt_engine_crypto, req);
66 }
67 
aspeed_crypto_do_request(struct crypto_engine * engine,void * areq)68 static int aspeed_crypto_do_request(struct crypto_engine *engine, void *areq)
69 {
70 	struct skcipher_request *req = skcipher_request_cast(areq);
71 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
72 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
73 	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
74 	struct aspeed_engine_crypto *crypto_engine;
75 	int rc;
76 
77 	crypto_engine = &hace_dev->crypto_engine;
78 	crypto_engine->req = req;
79 	crypto_engine->flags |= CRYPTO_FLAGS_BUSY;
80 
81 	rc = ctx->start(hace_dev);
82 
83 	if (rc != -EINPROGRESS)
84 		return -EIO;
85 
86 	return 0;
87 }
88 
aspeed_sk_complete(struct aspeed_hace_dev * hace_dev,int err)89 static int aspeed_sk_complete(struct aspeed_hace_dev *hace_dev, int err)
90 {
91 	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
92 	struct aspeed_cipher_reqctx *rctx;
93 	struct skcipher_request *req;
94 
95 	CIPHER_DBG(hace_dev, "\n");
96 
97 	req = crypto_engine->req;
98 	rctx = skcipher_request_ctx(req);
99 
100 	if (rctx->enc_cmd & HACE_CMD_IV_REQUIRE) {
101 		if (rctx->enc_cmd & HACE_CMD_DES_SELECT)
102 			memcpy(req->iv, crypto_engine->cipher_ctx +
103 			       DES_KEY_SIZE, DES_KEY_SIZE);
104 		else
105 			memcpy(req->iv, crypto_engine->cipher_ctx,
106 			       AES_BLOCK_SIZE);
107 	}
108 
109 	crypto_engine->flags &= ~CRYPTO_FLAGS_BUSY;
110 
111 	crypto_finalize_skcipher_request(hace_dev->crypt_engine_crypto, req,
112 					 err);
113 
114 	return err;
115 }
116 
aspeed_sk_transfer_sg(struct aspeed_hace_dev * hace_dev)117 static int aspeed_sk_transfer_sg(struct aspeed_hace_dev *hace_dev)
118 {
119 	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
120 	struct device *dev = hace_dev->dev;
121 	struct aspeed_cipher_reqctx *rctx;
122 	struct skcipher_request *req;
123 
124 	CIPHER_DBG(hace_dev, "\n");
125 
126 	req = crypto_engine->req;
127 	rctx = skcipher_request_ctx(req);
128 
129 	if (req->src == req->dst) {
130 		dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_BIDIRECTIONAL);
131 	} else {
132 		dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
133 		dma_unmap_sg(dev, req->dst, rctx->dst_nents, DMA_FROM_DEVICE);
134 	}
135 
136 	return aspeed_sk_complete(hace_dev, 0);
137 }
138 
aspeed_sk_transfer(struct aspeed_hace_dev * hace_dev)139 static int aspeed_sk_transfer(struct aspeed_hace_dev *hace_dev)
140 {
141 	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
142 	struct aspeed_cipher_reqctx *rctx;
143 	struct skcipher_request *req;
144 	struct scatterlist *out_sg;
145 	int nbytes = 0;
146 	int rc = 0;
147 
148 	req = crypto_engine->req;
149 	rctx = skcipher_request_ctx(req);
150 	out_sg = req->dst;
151 
152 	/* Copy output buffer to dst scatter-gather lists */
153 	nbytes = sg_copy_from_buffer(out_sg, rctx->dst_nents,
154 				     crypto_engine->cipher_addr, req->cryptlen);
155 	if (!nbytes) {
156 		dev_warn(hace_dev->dev, "invalid sg copy, %s:0x%x, %s:0x%x\n",
157 			 "nbytes", nbytes, "cryptlen", req->cryptlen);
158 		rc = -EINVAL;
159 	}
160 
161 	CIPHER_DBG(hace_dev, "%s:%d, %s:%d, %s:%d, %s:%p\n",
162 		   "nbytes", nbytes, "req->cryptlen", req->cryptlen,
163 		   "nb_out_sg", rctx->dst_nents,
164 		   "cipher addr", crypto_engine->cipher_addr);
165 
166 	return aspeed_sk_complete(hace_dev, rc);
167 }
168 
aspeed_sk_start(struct aspeed_hace_dev * hace_dev)169 static int aspeed_sk_start(struct aspeed_hace_dev *hace_dev)
170 {
171 	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
172 	struct aspeed_cipher_reqctx *rctx;
173 	struct skcipher_request *req;
174 	struct scatterlist *in_sg;
175 	int nbytes;
176 
177 	req = crypto_engine->req;
178 	rctx = skcipher_request_ctx(req);
179 	in_sg = req->src;
180 
181 	nbytes = sg_copy_to_buffer(in_sg, rctx->src_nents,
182 				   crypto_engine->cipher_addr, req->cryptlen);
183 
184 	CIPHER_DBG(hace_dev, "%s:%d, %s:%d, %s:%d, %s:%p\n",
185 		   "nbytes", nbytes, "req->cryptlen", req->cryptlen,
186 		   "nb_in_sg", rctx->src_nents,
187 		   "cipher addr", crypto_engine->cipher_addr);
188 
189 	if (!nbytes) {
190 		dev_warn(hace_dev->dev, "invalid sg copy, %s:0x%x, %s:0x%x\n",
191 			 "nbytes", nbytes, "cryptlen", req->cryptlen);
192 		return -EINVAL;
193 	}
194 
195 	crypto_engine->resume = aspeed_sk_transfer;
196 
197 	/* Trigger engines */
198 	ast_hace_write(hace_dev, crypto_engine->cipher_dma_addr,
199 		       ASPEED_HACE_SRC);
200 	ast_hace_write(hace_dev, crypto_engine->cipher_dma_addr,
201 		       ASPEED_HACE_DEST);
202 	ast_hace_write(hace_dev, req->cryptlen, ASPEED_HACE_DATA_LEN);
203 	ast_hace_write(hace_dev, rctx->enc_cmd, ASPEED_HACE_CMD);
204 
205 	return -EINPROGRESS;
206 }
207 
aspeed_sk_start_sg(struct aspeed_hace_dev * hace_dev)208 static int aspeed_sk_start_sg(struct aspeed_hace_dev *hace_dev)
209 {
210 	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
211 	struct aspeed_sg_list *src_list, *dst_list;
212 	dma_addr_t src_dma_addr, dst_dma_addr;
213 	struct aspeed_cipher_reqctx *rctx;
214 	struct skcipher_request *req;
215 	struct scatterlist *s;
216 	int src_sg_len;
217 	int dst_sg_len;
218 	int total, i;
219 	int rc;
220 
221 	CIPHER_DBG(hace_dev, "\n");
222 
223 	req = crypto_engine->req;
224 	rctx = skcipher_request_ctx(req);
225 
226 	rctx->enc_cmd |= HACE_CMD_DES_SG_CTRL | HACE_CMD_SRC_SG_CTRL |
227 			 HACE_CMD_AES_KEY_HW_EXP | HACE_CMD_MBUS_REQ_SYNC_EN;
228 
229 	/* BIDIRECTIONAL */
230 	if (req->dst == req->src) {
231 		src_sg_len = dma_map_sg(hace_dev->dev, req->src,
232 					rctx->src_nents, DMA_BIDIRECTIONAL);
233 		dst_sg_len = src_sg_len;
234 		if (!src_sg_len) {
235 			dev_warn(hace_dev->dev, "dma_map_sg() src error\n");
236 			return -EINVAL;
237 		}
238 
239 	} else {
240 		src_sg_len = dma_map_sg(hace_dev->dev, req->src,
241 					rctx->src_nents, DMA_TO_DEVICE);
242 		if (!src_sg_len) {
243 			dev_warn(hace_dev->dev, "dma_map_sg() src error\n");
244 			return -EINVAL;
245 		}
246 
247 		dst_sg_len = dma_map_sg(hace_dev->dev, req->dst,
248 					rctx->dst_nents, DMA_FROM_DEVICE);
249 		if (!dst_sg_len) {
250 			dev_warn(hace_dev->dev, "dma_map_sg() dst error\n");
251 			rc = -EINVAL;
252 			goto free_req_src;
253 		}
254 	}
255 
256 	src_list = (struct aspeed_sg_list *)crypto_engine->cipher_addr;
257 	src_dma_addr = crypto_engine->cipher_dma_addr;
258 	total = req->cryptlen;
259 
260 	for_each_sg(req->src, s, src_sg_len, i) {
261 		u32 phy_addr = sg_dma_address(s);
262 		u32 len = sg_dma_len(s);
263 
264 		if (total > len)
265 			total -= len;
266 		else {
267 			/* last sg list */
268 			len = total;
269 			len |= BIT(31);
270 			total = 0;
271 		}
272 
273 		src_list[i].phy_addr = cpu_to_le32(phy_addr);
274 		src_list[i].len = cpu_to_le32(len);
275 	}
276 
277 	if (total != 0) {
278 		rc = -EINVAL;
279 		goto free_req;
280 	}
281 
282 	if (req->dst == req->src) {
283 		dst_list = src_list;
284 		dst_dma_addr = src_dma_addr;
285 
286 	} else {
287 		dst_list = (struct aspeed_sg_list *)crypto_engine->dst_sg_addr;
288 		dst_dma_addr = crypto_engine->dst_sg_dma_addr;
289 		total = req->cryptlen;
290 
291 		for_each_sg(req->dst, s, dst_sg_len, i) {
292 			u32 phy_addr = sg_dma_address(s);
293 			u32 len = sg_dma_len(s);
294 
295 			if (total > len)
296 				total -= len;
297 			else {
298 				/* last sg list */
299 				len = total;
300 				len |= BIT(31);
301 				total = 0;
302 			}
303 
304 			dst_list[i].phy_addr = cpu_to_le32(phy_addr);
305 			dst_list[i].len = cpu_to_le32(len);
306 
307 		}
308 
309 		dst_list[dst_sg_len].phy_addr = 0;
310 		dst_list[dst_sg_len].len = 0;
311 	}
312 
313 	if (total != 0) {
314 		rc = -EINVAL;
315 		goto free_req;
316 	}
317 
318 	crypto_engine->resume = aspeed_sk_transfer_sg;
319 
320 	/* Memory barrier to ensure all data setup before engine starts */
321 	mb();
322 
323 	/* Trigger engines */
324 	ast_hace_write(hace_dev, src_dma_addr, ASPEED_HACE_SRC);
325 	ast_hace_write(hace_dev, dst_dma_addr, ASPEED_HACE_DEST);
326 	ast_hace_write(hace_dev, req->cryptlen, ASPEED_HACE_DATA_LEN);
327 	ast_hace_write(hace_dev, rctx->enc_cmd, ASPEED_HACE_CMD);
328 
329 	return -EINPROGRESS;
330 
331 free_req:
332 	if (req->dst == req->src) {
333 		dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents,
334 			     DMA_BIDIRECTIONAL);
335 
336 	} else {
337 		dma_unmap_sg(hace_dev->dev, req->dst, rctx->dst_nents,
338 			     DMA_TO_DEVICE);
339 		dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents,
340 			     DMA_TO_DEVICE);
341 	}
342 
343 	return rc;
344 
345 free_req_src:
346 	dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
347 
348 	return rc;
349 }
350 
aspeed_hace_skcipher_trigger(struct aspeed_hace_dev * hace_dev)351 static int aspeed_hace_skcipher_trigger(struct aspeed_hace_dev *hace_dev)
352 {
353 	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
354 	struct aspeed_cipher_reqctx *rctx;
355 	struct crypto_skcipher *cipher;
356 	struct aspeed_cipher_ctx *ctx;
357 	struct skcipher_request *req;
358 
359 	CIPHER_DBG(hace_dev, "\n");
360 
361 	req = crypto_engine->req;
362 	rctx = skcipher_request_ctx(req);
363 	cipher = crypto_skcipher_reqtfm(req);
364 	ctx = crypto_skcipher_ctx(cipher);
365 
366 	/* enable interrupt */
367 	rctx->enc_cmd |= HACE_CMD_ISR_EN;
368 
369 	rctx->dst_nents = sg_nents(req->dst);
370 	rctx->src_nents = sg_nents(req->src);
371 
372 	ast_hace_write(hace_dev, crypto_engine->cipher_ctx_dma,
373 		       ASPEED_HACE_CONTEXT);
374 
375 	if (rctx->enc_cmd & HACE_CMD_IV_REQUIRE) {
376 		if (rctx->enc_cmd & HACE_CMD_DES_SELECT)
377 			memcpy(crypto_engine->cipher_ctx + DES_BLOCK_SIZE,
378 			       req->iv, DES_BLOCK_SIZE);
379 		else
380 			memcpy(crypto_engine->cipher_ctx, req->iv,
381 			       AES_BLOCK_SIZE);
382 	}
383 
384 	if (hace_dev->version == AST2600_VERSION) {
385 		memcpy(crypto_engine->cipher_ctx + 16, ctx->key, ctx->key_len);
386 
387 		return aspeed_sk_start_sg(hace_dev);
388 	}
389 
390 	memcpy(crypto_engine->cipher_ctx + 16, ctx->key, AES_MAX_KEYLENGTH);
391 
392 	return aspeed_sk_start(hace_dev);
393 }
394 
aspeed_des_crypt(struct skcipher_request * req,u32 cmd)395 static int aspeed_des_crypt(struct skcipher_request *req, u32 cmd)
396 {
397 	struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(req);
398 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
399 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
400 	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
401 	u32 crypto_alg = cmd & HACE_CMD_OP_MODE_MASK;
402 
403 	CIPHER_DBG(hace_dev, "\n");
404 
405 	if (crypto_alg == HACE_CMD_CBC || crypto_alg == HACE_CMD_ECB) {
406 		if (!IS_ALIGNED(req->cryptlen, DES_BLOCK_SIZE))
407 			return -EINVAL;
408 	}
409 
410 	rctx->enc_cmd = cmd | HACE_CMD_DES_SELECT | HACE_CMD_RI_WO_DATA_ENABLE |
411 			HACE_CMD_DES | HACE_CMD_CONTEXT_LOAD_ENABLE |
412 			HACE_CMD_CONTEXT_SAVE_ENABLE;
413 
414 	return aspeed_hace_crypto_handle_queue(hace_dev, req);
415 }
416 
aspeed_des_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)417 static int aspeed_des_setkey(struct crypto_skcipher *cipher, const u8 *key,
418 			     unsigned int keylen)
419 {
420 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
421 	struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
422 	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
423 	int rc;
424 
425 	CIPHER_DBG(hace_dev, "keylen: %d bits\n", keylen);
426 
427 	if (keylen != DES_KEY_SIZE && keylen != DES3_EDE_KEY_SIZE) {
428 		dev_warn(hace_dev->dev, "invalid keylen: %d bits\n", keylen);
429 		return -EINVAL;
430 	}
431 
432 	if (keylen == DES_KEY_SIZE) {
433 		rc = crypto_des_verify_key(tfm, key);
434 		if (rc)
435 			return rc;
436 
437 	} else if (keylen == DES3_EDE_KEY_SIZE) {
438 		rc = crypto_des3_ede_verify_key(tfm, key);
439 		if (rc)
440 			return rc;
441 	}
442 
443 	memcpy(ctx->key, key, keylen);
444 	ctx->key_len = keylen;
445 
446 	crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK);
447 	crypto_skcipher_set_flags(ctx->fallback_tfm, cipher->base.crt_flags &
448 				  CRYPTO_TFM_REQ_MASK);
449 
450 	return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
451 }
452 
aspeed_tdes_ctr_decrypt(struct skcipher_request * req)453 static int aspeed_tdes_ctr_decrypt(struct skcipher_request *req)
454 {
455 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR |
456 				HACE_CMD_TRIPLE_DES);
457 }
458 
aspeed_tdes_ctr_encrypt(struct skcipher_request * req)459 static int aspeed_tdes_ctr_encrypt(struct skcipher_request *req)
460 {
461 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR |
462 				HACE_CMD_TRIPLE_DES);
463 }
464 
aspeed_tdes_ofb_decrypt(struct skcipher_request * req)465 static int aspeed_tdes_ofb_decrypt(struct skcipher_request *req)
466 {
467 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_OFB |
468 				HACE_CMD_TRIPLE_DES);
469 }
470 
aspeed_tdes_ofb_encrypt(struct skcipher_request * req)471 static int aspeed_tdes_ofb_encrypt(struct skcipher_request *req)
472 {
473 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_OFB |
474 				HACE_CMD_TRIPLE_DES);
475 }
476 
aspeed_tdes_cfb_decrypt(struct skcipher_request * req)477 static int aspeed_tdes_cfb_decrypt(struct skcipher_request *req)
478 {
479 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CFB |
480 				HACE_CMD_TRIPLE_DES);
481 }
482 
aspeed_tdes_cfb_encrypt(struct skcipher_request * req)483 static int aspeed_tdes_cfb_encrypt(struct skcipher_request *req)
484 {
485 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CFB |
486 				HACE_CMD_TRIPLE_DES);
487 }
488 
aspeed_tdes_cbc_decrypt(struct skcipher_request * req)489 static int aspeed_tdes_cbc_decrypt(struct skcipher_request *req)
490 {
491 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC |
492 				HACE_CMD_TRIPLE_DES);
493 }
494 
aspeed_tdes_cbc_encrypt(struct skcipher_request * req)495 static int aspeed_tdes_cbc_encrypt(struct skcipher_request *req)
496 {
497 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC |
498 				HACE_CMD_TRIPLE_DES);
499 }
500 
aspeed_tdes_ecb_decrypt(struct skcipher_request * req)501 static int aspeed_tdes_ecb_decrypt(struct skcipher_request *req)
502 {
503 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB |
504 				HACE_CMD_TRIPLE_DES);
505 }
506 
aspeed_tdes_ecb_encrypt(struct skcipher_request * req)507 static int aspeed_tdes_ecb_encrypt(struct skcipher_request *req)
508 {
509 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB |
510 				HACE_CMD_TRIPLE_DES);
511 }
512 
aspeed_des_ctr_decrypt(struct skcipher_request * req)513 static int aspeed_des_ctr_decrypt(struct skcipher_request *req)
514 {
515 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR |
516 				HACE_CMD_SINGLE_DES);
517 }
518 
aspeed_des_ctr_encrypt(struct skcipher_request * req)519 static int aspeed_des_ctr_encrypt(struct skcipher_request *req)
520 {
521 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR |
522 				HACE_CMD_SINGLE_DES);
523 }
524 
aspeed_des_ofb_decrypt(struct skcipher_request * req)525 static int aspeed_des_ofb_decrypt(struct skcipher_request *req)
526 {
527 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_OFB |
528 				HACE_CMD_SINGLE_DES);
529 }
530 
aspeed_des_ofb_encrypt(struct skcipher_request * req)531 static int aspeed_des_ofb_encrypt(struct skcipher_request *req)
532 {
533 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_OFB |
534 				HACE_CMD_SINGLE_DES);
535 }
536 
aspeed_des_cfb_decrypt(struct skcipher_request * req)537 static int aspeed_des_cfb_decrypt(struct skcipher_request *req)
538 {
539 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CFB |
540 				HACE_CMD_SINGLE_DES);
541 }
542 
aspeed_des_cfb_encrypt(struct skcipher_request * req)543 static int aspeed_des_cfb_encrypt(struct skcipher_request *req)
544 {
545 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CFB |
546 				HACE_CMD_SINGLE_DES);
547 }
548 
aspeed_des_cbc_decrypt(struct skcipher_request * req)549 static int aspeed_des_cbc_decrypt(struct skcipher_request *req)
550 {
551 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC |
552 				HACE_CMD_SINGLE_DES);
553 }
554 
aspeed_des_cbc_encrypt(struct skcipher_request * req)555 static int aspeed_des_cbc_encrypt(struct skcipher_request *req)
556 {
557 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC |
558 				HACE_CMD_SINGLE_DES);
559 }
560 
aspeed_des_ecb_decrypt(struct skcipher_request * req)561 static int aspeed_des_ecb_decrypt(struct skcipher_request *req)
562 {
563 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB |
564 				HACE_CMD_SINGLE_DES);
565 }
566 
aspeed_des_ecb_encrypt(struct skcipher_request * req)567 static int aspeed_des_ecb_encrypt(struct skcipher_request *req)
568 {
569 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB |
570 				HACE_CMD_SINGLE_DES);
571 }
572 
aspeed_aes_crypt(struct skcipher_request * req,u32 cmd)573 static int aspeed_aes_crypt(struct skcipher_request *req, u32 cmd)
574 {
575 	struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(req);
576 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
577 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
578 	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
579 	u32 crypto_alg = cmd & HACE_CMD_OP_MODE_MASK;
580 
581 	if (crypto_alg == HACE_CMD_CBC || crypto_alg == HACE_CMD_ECB) {
582 		if (!IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
583 			return -EINVAL;
584 	}
585 
586 	CIPHER_DBG(hace_dev, "%s\n",
587 		   (cmd & HACE_CMD_ENCRYPT) ? "encrypt" : "decrypt");
588 
589 	cmd |= HACE_CMD_AES_SELECT | HACE_CMD_RI_WO_DATA_ENABLE |
590 	       HACE_CMD_CONTEXT_LOAD_ENABLE | HACE_CMD_CONTEXT_SAVE_ENABLE;
591 
592 	switch (ctx->key_len) {
593 	case AES_KEYSIZE_128:
594 		cmd |= HACE_CMD_AES128;
595 		break;
596 	case AES_KEYSIZE_192:
597 		cmd |= HACE_CMD_AES192;
598 		break;
599 	case AES_KEYSIZE_256:
600 		cmd |= HACE_CMD_AES256;
601 		break;
602 	default:
603 		return -EINVAL;
604 	}
605 
606 	rctx->enc_cmd = cmd;
607 
608 	return aspeed_hace_crypto_handle_queue(hace_dev, req);
609 }
610 
aspeed_aes_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)611 static int aspeed_aes_setkey(struct crypto_skcipher *cipher, const u8 *key,
612 			     unsigned int keylen)
613 {
614 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
615 	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
616 	struct crypto_aes_ctx gen_aes_key;
617 
618 	CIPHER_DBG(hace_dev, "keylen: %d bits\n", (keylen * 8));
619 
620 	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
621 	    keylen != AES_KEYSIZE_256)
622 		return -EINVAL;
623 
624 	if (ctx->hace_dev->version == AST2500_VERSION) {
625 		aes_expandkey(&gen_aes_key, key, keylen);
626 		memcpy(ctx->key, gen_aes_key.key_enc, AES_MAX_KEYLENGTH);
627 
628 	} else {
629 		memcpy(ctx->key, key, keylen);
630 	}
631 
632 	ctx->key_len = keylen;
633 
634 	crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK);
635 	crypto_skcipher_set_flags(ctx->fallback_tfm, cipher->base.crt_flags &
636 				  CRYPTO_TFM_REQ_MASK);
637 
638 	return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
639 }
640 
aspeed_aes_ctr_decrypt(struct skcipher_request * req)641 static int aspeed_aes_ctr_decrypt(struct skcipher_request *req)
642 {
643 	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR);
644 }
645 
aspeed_aes_ctr_encrypt(struct skcipher_request * req)646 static int aspeed_aes_ctr_encrypt(struct skcipher_request *req)
647 {
648 	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR);
649 }
650 
aspeed_aes_ofb_decrypt(struct skcipher_request * req)651 static int aspeed_aes_ofb_decrypt(struct skcipher_request *req)
652 {
653 	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_OFB);
654 }
655 
aspeed_aes_ofb_encrypt(struct skcipher_request * req)656 static int aspeed_aes_ofb_encrypt(struct skcipher_request *req)
657 {
658 	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_OFB);
659 }
660 
aspeed_aes_cfb_decrypt(struct skcipher_request * req)661 static int aspeed_aes_cfb_decrypt(struct skcipher_request *req)
662 {
663 	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CFB);
664 }
665 
aspeed_aes_cfb_encrypt(struct skcipher_request * req)666 static int aspeed_aes_cfb_encrypt(struct skcipher_request *req)
667 {
668 	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CFB);
669 }
670 
aspeed_aes_cbc_decrypt(struct skcipher_request * req)671 static int aspeed_aes_cbc_decrypt(struct skcipher_request *req)
672 {
673 	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC);
674 }
675 
aspeed_aes_cbc_encrypt(struct skcipher_request * req)676 static int aspeed_aes_cbc_encrypt(struct skcipher_request *req)
677 {
678 	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC);
679 }
680 
aspeed_aes_ecb_decrypt(struct skcipher_request * req)681 static int aspeed_aes_ecb_decrypt(struct skcipher_request *req)
682 {
683 	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB);
684 }
685 
aspeed_aes_ecb_encrypt(struct skcipher_request * req)686 static int aspeed_aes_ecb_encrypt(struct skcipher_request *req)
687 {
688 	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB);
689 }
690 
aspeed_crypto_cra_init(struct crypto_skcipher * tfm)691 static int aspeed_crypto_cra_init(struct crypto_skcipher *tfm)
692 {
693 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
694 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
695 	const char *name = crypto_tfm_alg_name(&tfm->base);
696 	struct aspeed_hace_alg *crypto_alg;
697 
698 
699 	crypto_alg = container_of(alg, struct aspeed_hace_alg, alg.skcipher);
700 	ctx->hace_dev = crypto_alg->hace_dev;
701 	ctx->start = aspeed_hace_skcipher_trigger;
702 
703 	CIPHER_DBG(ctx->hace_dev, "%s\n", name);
704 
705 	ctx->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_ASYNC |
706 						  CRYPTO_ALG_NEED_FALLBACK);
707 	if (IS_ERR(ctx->fallback_tfm)) {
708 		dev_err(ctx->hace_dev->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
709 			name, PTR_ERR(ctx->fallback_tfm));
710 		return PTR_ERR(ctx->fallback_tfm);
711 	}
712 
713 	crypto_skcipher_set_reqsize(tfm, sizeof(struct aspeed_cipher_reqctx) +
714 			 crypto_skcipher_reqsize(ctx->fallback_tfm));
715 
716 	ctx->enginectx.op.do_one_request = aspeed_crypto_do_request;
717 	ctx->enginectx.op.prepare_request = NULL;
718 	ctx->enginectx.op.unprepare_request = NULL;
719 
720 	return 0;
721 }
722 
aspeed_crypto_cra_exit(struct crypto_skcipher * tfm)723 static void aspeed_crypto_cra_exit(struct crypto_skcipher *tfm)
724 {
725 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
726 	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
727 
728 	CIPHER_DBG(hace_dev, "%s\n", crypto_tfm_alg_name(&tfm->base));
729 	crypto_free_skcipher(ctx->fallback_tfm);
730 }
731 
732 static struct aspeed_hace_alg aspeed_crypto_algs[] = {
733 	{
734 		.alg.skcipher = {
735 			.min_keysize	= AES_MIN_KEY_SIZE,
736 			.max_keysize	= AES_MAX_KEY_SIZE,
737 			.setkey		= aspeed_aes_setkey,
738 			.encrypt	= aspeed_aes_ecb_encrypt,
739 			.decrypt	= aspeed_aes_ecb_decrypt,
740 			.init		= aspeed_crypto_cra_init,
741 			.exit		= aspeed_crypto_cra_exit,
742 			.base = {
743 				.cra_name		= "ecb(aes)",
744 				.cra_driver_name	= "aspeed-ecb-aes",
745 				.cra_priority		= 300,
746 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
747 							  CRYPTO_ALG_ASYNC |
748 							  CRYPTO_ALG_NEED_FALLBACK,
749 				.cra_blocksize		= AES_BLOCK_SIZE,
750 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
751 				.cra_alignmask		= 0x0f,
752 				.cra_module		= THIS_MODULE,
753 			}
754 		}
755 	},
756 	{
757 		.alg.skcipher = {
758 			.ivsize		= AES_BLOCK_SIZE,
759 			.min_keysize	= AES_MIN_KEY_SIZE,
760 			.max_keysize	= AES_MAX_KEY_SIZE,
761 			.setkey		= aspeed_aes_setkey,
762 			.encrypt	= aspeed_aes_cbc_encrypt,
763 			.decrypt	= aspeed_aes_cbc_decrypt,
764 			.init		= aspeed_crypto_cra_init,
765 			.exit		= aspeed_crypto_cra_exit,
766 			.base = {
767 				.cra_name		= "cbc(aes)",
768 				.cra_driver_name	= "aspeed-cbc-aes",
769 				.cra_priority		= 300,
770 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
771 							  CRYPTO_ALG_ASYNC |
772 							  CRYPTO_ALG_NEED_FALLBACK,
773 				.cra_blocksize		= AES_BLOCK_SIZE,
774 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
775 				.cra_alignmask		= 0x0f,
776 				.cra_module		= THIS_MODULE,
777 			}
778 		}
779 	},
780 	{
781 		.alg.skcipher = {
782 			.ivsize		= AES_BLOCK_SIZE,
783 			.min_keysize	= AES_MIN_KEY_SIZE,
784 			.max_keysize	= AES_MAX_KEY_SIZE,
785 			.setkey		= aspeed_aes_setkey,
786 			.encrypt	= aspeed_aes_cfb_encrypt,
787 			.decrypt	= aspeed_aes_cfb_decrypt,
788 			.init		= aspeed_crypto_cra_init,
789 			.exit		= aspeed_crypto_cra_exit,
790 			.base = {
791 				.cra_name		= "cfb(aes)",
792 				.cra_driver_name	= "aspeed-cfb-aes",
793 				.cra_priority		= 300,
794 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
795 							  CRYPTO_ALG_ASYNC |
796 							  CRYPTO_ALG_NEED_FALLBACK,
797 				.cra_blocksize		= 1,
798 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
799 				.cra_alignmask		= 0x0f,
800 				.cra_module		= THIS_MODULE,
801 			}
802 		}
803 	},
804 	{
805 		.alg.skcipher = {
806 			.ivsize		= AES_BLOCK_SIZE,
807 			.min_keysize	= AES_MIN_KEY_SIZE,
808 			.max_keysize	= AES_MAX_KEY_SIZE,
809 			.setkey		= aspeed_aes_setkey,
810 			.encrypt	= aspeed_aes_ofb_encrypt,
811 			.decrypt	= aspeed_aes_ofb_decrypt,
812 			.init		= aspeed_crypto_cra_init,
813 			.exit		= aspeed_crypto_cra_exit,
814 			.base = {
815 				.cra_name		= "ofb(aes)",
816 				.cra_driver_name	= "aspeed-ofb-aes",
817 				.cra_priority		= 300,
818 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
819 							  CRYPTO_ALG_ASYNC |
820 							  CRYPTO_ALG_NEED_FALLBACK,
821 				.cra_blocksize		= 1,
822 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
823 				.cra_alignmask		= 0x0f,
824 				.cra_module		= THIS_MODULE,
825 			}
826 		}
827 	},
828 	{
829 		.alg.skcipher = {
830 			.min_keysize	= DES_KEY_SIZE,
831 			.max_keysize	= DES_KEY_SIZE,
832 			.setkey		= aspeed_des_setkey,
833 			.encrypt	= aspeed_des_ecb_encrypt,
834 			.decrypt	= aspeed_des_ecb_decrypt,
835 			.init		= aspeed_crypto_cra_init,
836 			.exit		= aspeed_crypto_cra_exit,
837 			.base = {
838 				.cra_name		= "ecb(des)",
839 				.cra_driver_name	= "aspeed-ecb-des",
840 				.cra_priority		= 300,
841 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
842 							  CRYPTO_ALG_ASYNC |
843 							  CRYPTO_ALG_NEED_FALLBACK,
844 				.cra_blocksize		= DES_BLOCK_SIZE,
845 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
846 				.cra_alignmask		= 0x0f,
847 				.cra_module		= THIS_MODULE,
848 			}
849 		}
850 	},
851 	{
852 		.alg.skcipher = {
853 			.ivsize		= DES_BLOCK_SIZE,
854 			.min_keysize	= DES_KEY_SIZE,
855 			.max_keysize	= DES_KEY_SIZE,
856 			.setkey		= aspeed_des_setkey,
857 			.encrypt	= aspeed_des_cbc_encrypt,
858 			.decrypt	= aspeed_des_cbc_decrypt,
859 			.init		= aspeed_crypto_cra_init,
860 			.exit		= aspeed_crypto_cra_exit,
861 			.base = {
862 				.cra_name		= "cbc(des)",
863 				.cra_driver_name	= "aspeed-cbc-des",
864 				.cra_priority		= 300,
865 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
866 							  CRYPTO_ALG_ASYNC |
867 							  CRYPTO_ALG_NEED_FALLBACK,
868 				.cra_blocksize		= DES_BLOCK_SIZE,
869 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
870 				.cra_alignmask		= 0x0f,
871 				.cra_module		= THIS_MODULE,
872 			}
873 		}
874 	},
875 	{
876 		.alg.skcipher = {
877 			.ivsize		= DES_BLOCK_SIZE,
878 			.min_keysize	= DES_KEY_SIZE,
879 			.max_keysize	= DES_KEY_SIZE,
880 			.setkey		= aspeed_des_setkey,
881 			.encrypt	= aspeed_des_cfb_encrypt,
882 			.decrypt	= aspeed_des_cfb_decrypt,
883 			.init		= aspeed_crypto_cra_init,
884 			.exit		= aspeed_crypto_cra_exit,
885 			.base = {
886 				.cra_name		= "cfb(des)",
887 				.cra_driver_name	= "aspeed-cfb-des",
888 				.cra_priority		= 300,
889 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
890 							  CRYPTO_ALG_ASYNC |
891 							  CRYPTO_ALG_NEED_FALLBACK,
892 				.cra_blocksize		= DES_BLOCK_SIZE,
893 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
894 				.cra_alignmask		= 0x0f,
895 				.cra_module		= THIS_MODULE,
896 			}
897 		}
898 	},
899 	{
900 		.alg.skcipher = {
901 			.ivsize		= DES_BLOCK_SIZE,
902 			.min_keysize	= DES_KEY_SIZE,
903 			.max_keysize	= DES_KEY_SIZE,
904 			.setkey		= aspeed_des_setkey,
905 			.encrypt	= aspeed_des_ofb_encrypt,
906 			.decrypt	= aspeed_des_ofb_decrypt,
907 			.init		= aspeed_crypto_cra_init,
908 			.exit		= aspeed_crypto_cra_exit,
909 			.base = {
910 				.cra_name		= "ofb(des)",
911 				.cra_driver_name	= "aspeed-ofb-des",
912 				.cra_priority		= 300,
913 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
914 							  CRYPTO_ALG_ASYNC |
915 							  CRYPTO_ALG_NEED_FALLBACK,
916 				.cra_blocksize		= DES_BLOCK_SIZE,
917 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
918 				.cra_alignmask		= 0x0f,
919 				.cra_module		= THIS_MODULE,
920 			}
921 		}
922 	},
923 	{
924 		.alg.skcipher = {
925 			.min_keysize	= DES3_EDE_KEY_SIZE,
926 			.max_keysize	= DES3_EDE_KEY_SIZE,
927 			.setkey		= aspeed_des_setkey,
928 			.encrypt	= aspeed_tdes_ecb_encrypt,
929 			.decrypt	= aspeed_tdes_ecb_decrypt,
930 			.init		= aspeed_crypto_cra_init,
931 			.exit		= aspeed_crypto_cra_exit,
932 			.base = {
933 				.cra_name		= "ecb(des3_ede)",
934 				.cra_driver_name	= "aspeed-ecb-tdes",
935 				.cra_priority		= 300,
936 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
937 							  CRYPTO_ALG_ASYNC |
938 							  CRYPTO_ALG_NEED_FALLBACK,
939 				.cra_blocksize		= DES_BLOCK_SIZE,
940 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
941 				.cra_alignmask		= 0x0f,
942 				.cra_module		= THIS_MODULE,
943 			}
944 		}
945 	},
946 	{
947 		.alg.skcipher = {
948 			.ivsize		= DES_BLOCK_SIZE,
949 			.min_keysize	= DES3_EDE_KEY_SIZE,
950 			.max_keysize	= DES3_EDE_KEY_SIZE,
951 			.setkey		= aspeed_des_setkey,
952 			.encrypt	= aspeed_tdes_cbc_encrypt,
953 			.decrypt	= aspeed_tdes_cbc_decrypt,
954 			.init		= aspeed_crypto_cra_init,
955 			.exit		= aspeed_crypto_cra_exit,
956 			.base = {
957 				.cra_name		= "cbc(des3_ede)",
958 				.cra_driver_name	= "aspeed-cbc-tdes",
959 				.cra_priority		= 300,
960 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
961 							  CRYPTO_ALG_ASYNC |
962 							  CRYPTO_ALG_NEED_FALLBACK,
963 				.cra_blocksize		= DES_BLOCK_SIZE,
964 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
965 				.cra_alignmask		= 0x0f,
966 				.cra_module		= THIS_MODULE,
967 			}
968 		}
969 	},
970 	{
971 		.alg.skcipher = {
972 			.ivsize		= DES_BLOCK_SIZE,
973 			.min_keysize	= DES3_EDE_KEY_SIZE,
974 			.max_keysize	= DES3_EDE_KEY_SIZE,
975 			.setkey		= aspeed_des_setkey,
976 			.encrypt	= aspeed_tdes_cfb_encrypt,
977 			.decrypt	= aspeed_tdes_cfb_decrypt,
978 			.init		= aspeed_crypto_cra_init,
979 			.exit		= aspeed_crypto_cra_exit,
980 			.base = {
981 				.cra_name		= "cfb(des3_ede)",
982 				.cra_driver_name	= "aspeed-cfb-tdes",
983 				.cra_priority		= 300,
984 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
985 							  CRYPTO_ALG_ASYNC |
986 							  CRYPTO_ALG_NEED_FALLBACK,
987 				.cra_blocksize		= DES_BLOCK_SIZE,
988 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
989 				.cra_alignmask		= 0x0f,
990 				.cra_module		= THIS_MODULE,
991 			}
992 		}
993 	},
994 	{
995 		.alg.skcipher = {
996 			.ivsize		= DES_BLOCK_SIZE,
997 			.min_keysize	= DES3_EDE_KEY_SIZE,
998 			.max_keysize	= DES3_EDE_KEY_SIZE,
999 			.setkey		= aspeed_des_setkey,
1000 			.encrypt	= aspeed_tdes_ofb_encrypt,
1001 			.decrypt	= aspeed_tdes_ofb_decrypt,
1002 			.init		= aspeed_crypto_cra_init,
1003 			.exit		= aspeed_crypto_cra_exit,
1004 			.base = {
1005 				.cra_name		= "ofb(des3_ede)",
1006 				.cra_driver_name	= "aspeed-ofb-tdes",
1007 				.cra_priority		= 300,
1008 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
1009 							  CRYPTO_ALG_ASYNC |
1010 							  CRYPTO_ALG_NEED_FALLBACK,
1011 				.cra_blocksize		= DES_BLOCK_SIZE,
1012 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
1013 				.cra_alignmask		= 0x0f,
1014 				.cra_module		= THIS_MODULE,
1015 			}
1016 		}
1017 	},
1018 };
1019 
1020 static struct aspeed_hace_alg aspeed_crypto_algs_g6[] = {
1021 	{
1022 		.alg.skcipher = {
1023 			.ivsize		= AES_BLOCK_SIZE,
1024 			.min_keysize	= AES_MIN_KEY_SIZE,
1025 			.max_keysize	= AES_MAX_KEY_SIZE,
1026 			.setkey		= aspeed_aes_setkey,
1027 			.encrypt	= aspeed_aes_ctr_encrypt,
1028 			.decrypt	= aspeed_aes_ctr_decrypt,
1029 			.init		= aspeed_crypto_cra_init,
1030 			.exit		= aspeed_crypto_cra_exit,
1031 			.base = {
1032 				.cra_name		= "ctr(aes)",
1033 				.cra_driver_name	= "aspeed-ctr-aes",
1034 				.cra_priority		= 300,
1035 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
1036 							  CRYPTO_ALG_ASYNC,
1037 				.cra_blocksize		= 1,
1038 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
1039 				.cra_alignmask		= 0x0f,
1040 				.cra_module		= THIS_MODULE,
1041 			}
1042 		}
1043 	},
1044 	{
1045 		.alg.skcipher = {
1046 			.ivsize		= DES_BLOCK_SIZE,
1047 			.min_keysize	= DES_KEY_SIZE,
1048 			.max_keysize	= DES_KEY_SIZE,
1049 			.setkey		= aspeed_des_setkey,
1050 			.encrypt	= aspeed_des_ctr_encrypt,
1051 			.decrypt	= aspeed_des_ctr_decrypt,
1052 			.init		= aspeed_crypto_cra_init,
1053 			.exit		= aspeed_crypto_cra_exit,
1054 			.base = {
1055 				.cra_name		= "ctr(des)",
1056 				.cra_driver_name	= "aspeed-ctr-des",
1057 				.cra_priority		= 300,
1058 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
1059 							  CRYPTO_ALG_ASYNC,
1060 				.cra_blocksize		= 1,
1061 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
1062 				.cra_alignmask		= 0x0f,
1063 				.cra_module		= THIS_MODULE,
1064 			}
1065 		}
1066 	},
1067 	{
1068 		.alg.skcipher = {
1069 			.ivsize		= DES_BLOCK_SIZE,
1070 			.min_keysize	= DES3_EDE_KEY_SIZE,
1071 			.max_keysize	= DES3_EDE_KEY_SIZE,
1072 			.setkey		= aspeed_des_setkey,
1073 			.encrypt	= aspeed_tdes_ctr_encrypt,
1074 			.decrypt	= aspeed_tdes_ctr_decrypt,
1075 			.init		= aspeed_crypto_cra_init,
1076 			.exit		= aspeed_crypto_cra_exit,
1077 			.base = {
1078 				.cra_name		= "ctr(des3_ede)",
1079 				.cra_driver_name	= "aspeed-ctr-tdes",
1080 				.cra_priority		= 300,
1081 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
1082 							  CRYPTO_ALG_ASYNC,
1083 				.cra_blocksize		= 1,
1084 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
1085 				.cra_alignmask		= 0x0f,
1086 				.cra_module		= THIS_MODULE,
1087 			}
1088 		}
1089 	},
1090 
1091 };
1092 
aspeed_unregister_hace_crypto_algs(struct aspeed_hace_dev * hace_dev)1093 void aspeed_unregister_hace_crypto_algs(struct aspeed_hace_dev *hace_dev)
1094 {
1095 	int i;
1096 
1097 	for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs); i++)
1098 		crypto_unregister_skcipher(&aspeed_crypto_algs[i].alg.skcipher);
1099 
1100 	if (hace_dev->version != AST2600_VERSION)
1101 		return;
1102 
1103 	for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs_g6); i++)
1104 		crypto_unregister_skcipher(&aspeed_crypto_algs_g6[i].alg.skcipher);
1105 }
1106 
aspeed_register_hace_crypto_algs(struct aspeed_hace_dev * hace_dev)1107 void aspeed_register_hace_crypto_algs(struct aspeed_hace_dev *hace_dev)
1108 {
1109 	int rc, i;
1110 
1111 	CIPHER_DBG(hace_dev, "\n");
1112 
1113 	for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs); i++) {
1114 		aspeed_crypto_algs[i].hace_dev = hace_dev;
1115 		rc = crypto_register_skcipher(&aspeed_crypto_algs[i].alg.skcipher);
1116 		if (rc) {
1117 			CIPHER_DBG(hace_dev, "Failed to register %s\n",
1118 				   aspeed_crypto_algs[i].alg.skcipher.base.cra_name);
1119 		}
1120 	}
1121 
1122 	if (hace_dev->version != AST2600_VERSION)
1123 		return;
1124 
1125 	for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs_g6); i++) {
1126 		aspeed_crypto_algs_g6[i].hace_dev = hace_dev;
1127 		rc = crypto_register_skcipher(&aspeed_crypto_algs_g6[i].alg.skcipher);
1128 		if (rc) {
1129 			CIPHER_DBG(hace_dev, "Failed to register %s\n",
1130 				   aspeed_crypto_algs_g6[i].alg.skcipher.base.cra_name);
1131 		}
1132 	}
1133 }
1134