xref: /linux/drivers/crypto/aspeed/aspeed-hace-crypto.c (revision 4dd4d5e486ebdeb48dbc558237d4ba8aab8917d5)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * Copyright (c) 2021 Aspeed Technology Inc.
4  */
5 
6 #include "aspeed-hace.h"
7 
8 #ifdef CONFIG_CRYPTO_DEV_ASPEED_HACE_CRYPTO_DEBUG
9 #define CIPHER_DBG(h, fmt, ...)	\
10 	dev_info((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
11 #else
12 #define CIPHER_DBG(h, fmt, ...)	\
13 	dev_dbg((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
14 #endif
15 
16 static int aspeed_crypto_do_fallback(struct skcipher_request *areq)
17 {
18 	struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(areq);
19 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
20 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
21 	int err;
22 
23 	skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
24 	skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
25 				      areq->base.complete, areq->base.data);
26 	skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
27 				   areq->cryptlen, areq->iv);
28 
29 	if (rctx->enc_cmd & HACE_CMD_ENCRYPT)
30 		err = crypto_skcipher_encrypt(&rctx->fallback_req);
31 	else
32 		err = crypto_skcipher_decrypt(&rctx->fallback_req);
33 
34 	return err;
35 }
36 
37 static bool aspeed_crypto_need_fallback(struct skcipher_request *areq)
38 {
39 	struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(areq);
40 
41 	if (areq->cryptlen == 0)
42 		return true;
43 
44 	if ((rctx->enc_cmd & HACE_CMD_DES_SELECT) &&
45 	    !IS_ALIGNED(areq->cryptlen, DES_BLOCK_SIZE))
46 		return true;
47 
48 	if ((!(rctx->enc_cmd & HACE_CMD_DES_SELECT)) &&
49 	    !IS_ALIGNED(areq->cryptlen, AES_BLOCK_SIZE))
50 		return true;
51 
52 	return false;
53 }
54 
55 static int aspeed_hace_crypto_handle_queue(struct aspeed_hace_dev *hace_dev,
56 					   struct skcipher_request *req)
57 {
58 	if (hace_dev->version == AST2500_VERSION &&
59 	    aspeed_crypto_need_fallback(req)) {
60 		CIPHER_DBG(hace_dev, "SW fallback\n");
61 		return aspeed_crypto_do_fallback(req);
62 	}
63 
64 	return crypto_transfer_skcipher_request_to_engine(
65 			hace_dev->crypt_engine_crypto, req);
66 }
67 
68 static int aspeed_crypto_do_request(struct crypto_engine *engine, void *areq)
69 {
70 	struct skcipher_request *req = skcipher_request_cast(areq);
71 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
72 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
73 	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
74 	struct aspeed_engine_crypto *crypto_engine;
75 	int rc;
76 
77 	crypto_engine = &hace_dev->crypto_engine;
78 	crypto_engine->req = req;
79 	crypto_engine->flags |= CRYPTO_FLAGS_BUSY;
80 
81 	rc = ctx->start(hace_dev);
82 
83 	if (rc != -EINPROGRESS)
84 		return -EIO;
85 
86 	return 0;
87 }
88 
89 static int aspeed_sk_complete(struct aspeed_hace_dev *hace_dev, int err)
90 {
91 	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
92 	struct aspeed_cipher_reqctx *rctx;
93 	struct skcipher_request *req;
94 
95 	CIPHER_DBG(hace_dev, "\n");
96 
97 	req = crypto_engine->req;
98 	rctx = skcipher_request_ctx(req);
99 
100 	if (rctx->enc_cmd & HACE_CMD_IV_REQUIRE) {
101 		if (rctx->enc_cmd & HACE_CMD_DES_SELECT)
102 			memcpy(req->iv, crypto_engine->cipher_ctx +
103 			       DES_KEY_SIZE, DES_KEY_SIZE);
104 		else
105 			memcpy(req->iv, crypto_engine->cipher_ctx,
106 			       AES_BLOCK_SIZE);
107 	}
108 
109 	crypto_engine->flags &= ~CRYPTO_FLAGS_BUSY;
110 
111 	crypto_finalize_skcipher_request(hace_dev->crypt_engine_crypto, req,
112 					 err);
113 
114 	return err;
115 }
116 
117 static int aspeed_sk_transfer_sg(struct aspeed_hace_dev *hace_dev)
118 {
119 	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
120 	struct device *dev = hace_dev->dev;
121 	struct aspeed_cipher_reqctx *rctx;
122 	struct skcipher_request *req;
123 
124 	CIPHER_DBG(hace_dev, "\n");
125 
126 	req = crypto_engine->req;
127 	rctx = skcipher_request_ctx(req);
128 
129 	if (req->src == req->dst) {
130 		dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_BIDIRECTIONAL);
131 	} else {
132 		dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
133 		dma_unmap_sg(dev, req->dst, rctx->dst_nents, DMA_FROM_DEVICE);
134 	}
135 
136 	return aspeed_sk_complete(hace_dev, 0);
137 }
138 
139 static int aspeed_sk_transfer(struct aspeed_hace_dev *hace_dev)
140 {
141 	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
142 	struct aspeed_cipher_reqctx *rctx;
143 	struct skcipher_request *req;
144 	struct scatterlist *out_sg;
145 	int nbytes = 0;
146 	int rc = 0;
147 
148 	req = crypto_engine->req;
149 	rctx = skcipher_request_ctx(req);
150 	out_sg = req->dst;
151 
152 	/* Copy output buffer to dst scatter-gather lists */
153 	nbytes = sg_copy_from_buffer(out_sg, rctx->dst_nents,
154 				     crypto_engine->cipher_addr, req->cryptlen);
155 	if (!nbytes) {
156 		dev_warn(hace_dev->dev, "invalid sg copy, %s:0x%x, %s:0x%x\n",
157 			 "nbytes", nbytes, "cryptlen", req->cryptlen);
158 		rc = -EINVAL;
159 	}
160 
161 	CIPHER_DBG(hace_dev, "%s:%d, %s:%d, %s:%d, %s:%p\n",
162 		   "nbytes", nbytes, "req->cryptlen", req->cryptlen,
163 		   "nb_out_sg", rctx->dst_nents,
164 		   "cipher addr", crypto_engine->cipher_addr);
165 
166 	return aspeed_sk_complete(hace_dev, rc);
167 }
168 
169 static int aspeed_sk_start(struct aspeed_hace_dev *hace_dev)
170 {
171 	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
172 	struct aspeed_cipher_reqctx *rctx;
173 	struct skcipher_request *req;
174 	struct scatterlist *in_sg;
175 	int nbytes;
176 
177 	req = crypto_engine->req;
178 	rctx = skcipher_request_ctx(req);
179 	in_sg = req->src;
180 
181 	nbytes = sg_copy_to_buffer(in_sg, rctx->src_nents,
182 				   crypto_engine->cipher_addr, req->cryptlen);
183 
184 	CIPHER_DBG(hace_dev, "%s:%d, %s:%d, %s:%d, %s:%p\n",
185 		   "nbytes", nbytes, "req->cryptlen", req->cryptlen,
186 		   "nb_in_sg", rctx->src_nents,
187 		   "cipher addr", crypto_engine->cipher_addr);
188 
189 	if (!nbytes) {
190 		dev_warn(hace_dev->dev, "invalid sg copy, %s:0x%x, %s:0x%x\n",
191 			 "nbytes", nbytes, "cryptlen", req->cryptlen);
192 		return -EINVAL;
193 	}
194 
195 	crypto_engine->resume = aspeed_sk_transfer;
196 
197 	/* Trigger engines */
198 	ast_hace_write(hace_dev, crypto_engine->cipher_dma_addr,
199 		       ASPEED_HACE_SRC);
200 	ast_hace_write(hace_dev, crypto_engine->cipher_dma_addr,
201 		       ASPEED_HACE_DEST);
202 	ast_hace_write(hace_dev, req->cryptlen, ASPEED_HACE_DATA_LEN);
203 	ast_hace_write(hace_dev, rctx->enc_cmd, ASPEED_HACE_CMD);
204 
205 	return -EINPROGRESS;
206 }
207 
208 static int aspeed_sk_start_sg(struct aspeed_hace_dev *hace_dev)
209 {
210 	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
211 	struct aspeed_sg_list *src_list, *dst_list;
212 	dma_addr_t src_dma_addr, dst_dma_addr;
213 	struct aspeed_cipher_reqctx *rctx;
214 	struct skcipher_request *req;
215 	struct scatterlist *s;
216 	int src_sg_len;
217 	int dst_sg_len;
218 	int total, i;
219 	int rc;
220 
221 	CIPHER_DBG(hace_dev, "\n");
222 
223 	req = crypto_engine->req;
224 	rctx = skcipher_request_ctx(req);
225 
226 	rctx->enc_cmd |= HACE_CMD_DES_SG_CTRL | HACE_CMD_SRC_SG_CTRL |
227 			 HACE_CMD_AES_KEY_HW_EXP | HACE_CMD_MBUS_REQ_SYNC_EN;
228 
229 	/* BIDIRECTIONAL */
230 	if (req->dst == req->src) {
231 		src_sg_len = dma_map_sg(hace_dev->dev, req->src,
232 					rctx->src_nents, DMA_BIDIRECTIONAL);
233 		dst_sg_len = src_sg_len;
234 		if (!src_sg_len) {
235 			dev_warn(hace_dev->dev, "dma_map_sg() src error\n");
236 			return -EINVAL;
237 		}
238 
239 	} else {
240 		src_sg_len = dma_map_sg(hace_dev->dev, req->src,
241 					rctx->src_nents, DMA_TO_DEVICE);
242 		if (!src_sg_len) {
243 			dev_warn(hace_dev->dev, "dma_map_sg() src error\n");
244 			return -EINVAL;
245 		}
246 
247 		dst_sg_len = dma_map_sg(hace_dev->dev, req->dst,
248 					rctx->dst_nents, DMA_FROM_DEVICE);
249 		if (!dst_sg_len) {
250 			dev_warn(hace_dev->dev, "dma_map_sg() dst error\n");
251 			rc = -EINVAL;
252 			goto free_req_src;
253 		}
254 	}
255 
256 	src_list = (struct aspeed_sg_list *)crypto_engine->cipher_addr;
257 	src_dma_addr = crypto_engine->cipher_dma_addr;
258 	total = req->cryptlen;
259 
260 	for_each_sg(req->src, s, src_sg_len, i) {
261 		u32 phy_addr = sg_dma_address(s);
262 		u32 len = sg_dma_len(s);
263 
264 		if (total > len)
265 			total -= len;
266 		else {
267 			/* last sg list */
268 			len = total;
269 			len |= BIT(31);
270 			total = 0;
271 		}
272 
273 		src_list[i].phy_addr = cpu_to_le32(phy_addr);
274 		src_list[i].len = cpu_to_le32(len);
275 	}
276 
277 	if (total != 0) {
278 		rc = -EINVAL;
279 		goto free_req;
280 	}
281 
282 	if (req->dst == req->src) {
283 		dst_list = src_list;
284 		dst_dma_addr = src_dma_addr;
285 
286 	} else {
287 		dst_list = (struct aspeed_sg_list *)crypto_engine->dst_sg_addr;
288 		dst_dma_addr = crypto_engine->dst_sg_dma_addr;
289 		total = req->cryptlen;
290 
291 		for_each_sg(req->dst, s, dst_sg_len, i) {
292 			u32 phy_addr = sg_dma_address(s);
293 			u32 len = sg_dma_len(s);
294 
295 			if (total > len)
296 				total -= len;
297 			else {
298 				/* last sg list */
299 				len = total;
300 				len |= BIT(31);
301 				total = 0;
302 			}
303 
304 			dst_list[i].phy_addr = cpu_to_le32(phy_addr);
305 			dst_list[i].len = cpu_to_le32(len);
306 
307 		}
308 
309 		dst_list[dst_sg_len].phy_addr = 0;
310 		dst_list[dst_sg_len].len = 0;
311 	}
312 
313 	if (total != 0) {
314 		rc = -EINVAL;
315 		goto free_req;
316 	}
317 
318 	crypto_engine->resume = aspeed_sk_transfer_sg;
319 
320 	/* Memory barrier to ensure all data setup before engine starts */
321 	mb();
322 
323 	/* Trigger engines */
324 	ast_hace_write(hace_dev, src_dma_addr, ASPEED_HACE_SRC);
325 	ast_hace_write(hace_dev, dst_dma_addr, ASPEED_HACE_DEST);
326 	ast_hace_write(hace_dev, req->cryptlen, ASPEED_HACE_DATA_LEN);
327 	ast_hace_write(hace_dev, rctx->enc_cmd, ASPEED_HACE_CMD);
328 
329 	return -EINPROGRESS;
330 
331 free_req:
332 	if (req->dst == req->src) {
333 		dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents,
334 			     DMA_BIDIRECTIONAL);
335 
336 	} else {
337 		dma_unmap_sg(hace_dev->dev, req->dst, rctx->dst_nents,
338 			     DMA_TO_DEVICE);
339 		dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents,
340 			     DMA_TO_DEVICE);
341 	}
342 
343 	return rc;
344 
345 free_req_src:
346 	dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
347 
348 	return rc;
349 }
350 
351 static int aspeed_hace_skcipher_trigger(struct aspeed_hace_dev *hace_dev)
352 {
353 	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
354 	struct aspeed_cipher_reqctx *rctx;
355 	struct crypto_skcipher *cipher;
356 	struct aspeed_cipher_ctx *ctx;
357 	struct skcipher_request *req;
358 
359 	CIPHER_DBG(hace_dev, "\n");
360 
361 	req = crypto_engine->req;
362 	rctx = skcipher_request_ctx(req);
363 	cipher = crypto_skcipher_reqtfm(req);
364 	ctx = crypto_skcipher_ctx(cipher);
365 
366 	/* enable interrupt */
367 	rctx->enc_cmd |= HACE_CMD_ISR_EN;
368 
369 	rctx->dst_nents = sg_nents(req->dst);
370 	rctx->src_nents = sg_nents(req->src);
371 
372 	ast_hace_write(hace_dev, crypto_engine->cipher_ctx_dma,
373 		       ASPEED_HACE_CONTEXT);
374 
375 	if (rctx->enc_cmd & HACE_CMD_IV_REQUIRE) {
376 		if (rctx->enc_cmd & HACE_CMD_DES_SELECT)
377 			memcpy(crypto_engine->cipher_ctx + DES_BLOCK_SIZE,
378 			       req->iv, DES_BLOCK_SIZE);
379 		else
380 			memcpy(crypto_engine->cipher_ctx, req->iv,
381 			       AES_BLOCK_SIZE);
382 	}
383 
384 	if (hace_dev->version == AST2600_VERSION) {
385 		memcpy(crypto_engine->cipher_ctx + 16, ctx->key, ctx->key_len);
386 
387 		return aspeed_sk_start_sg(hace_dev);
388 	}
389 
390 	memcpy(crypto_engine->cipher_ctx + 16, ctx->key, AES_MAX_KEYLENGTH);
391 
392 	return aspeed_sk_start(hace_dev);
393 }
394 
395 static int aspeed_des_crypt(struct skcipher_request *req, u32 cmd)
396 {
397 	struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(req);
398 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
399 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
400 	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
401 	u32 crypto_alg = cmd & HACE_CMD_OP_MODE_MASK;
402 
403 	CIPHER_DBG(hace_dev, "\n");
404 
405 	if (crypto_alg == HACE_CMD_CBC || crypto_alg == HACE_CMD_ECB) {
406 		if (!IS_ALIGNED(req->cryptlen, DES_BLOCK_SIZE))
407 			return -EINVAL;
408 	}
409 
410 	rctx->enc_cmd = cmd | HACE_CMD_DES_SELECT | HACE_CMD_RI_WO_DATA_ENABLE |
411 			HACE_CMD_DES | HACE_CMD_CONTEXT_LOAD_ENABLE |
412 			HACE_CMD_CONTEXT_SAVE_ENABLE;
413 
414 	return aspeed_hace_crypto_handle_queue(hace_dev, req);
415 }
416 
417 static int aspeed_des_setkey(struct crypto_skcipher *cipher, const u8 *key,
418 			     unsigned int keylen)
419 {
420 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
421 	struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
422 	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
423 	int rc;
424 
425 	CIPHER_DBG(hace_dev, "keylen: %d bits\n", keylen);
426 
427 	if (keylen != DES_KEY_SIZE && keylen != DES3_EDE_KEY_SIZE) {
428 		dev_warn(hace_dev->dev, "invalid keylen: %d bits\n", keylen);
429 		return -EINVAL;
430 	}
431 
432 	if (keylen == DES_KEY_SIZE) {
433 		rc = crypto_des_verify_key(tfm, key);
434 		if (rc)
435 			return rc;
436 
437 	} else if (keylen == DES3_EDE_KEY_SIZE) {
438 		rc = crypto_des3_ede_verify_key(tfm, key);
439 		if (rc)
440 			return rc;
441 	}
442 
443 	memcpy(ctx->key, key, keylen);
444 	ctx->key_len = keylen;
445 
446 	crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK);
447 	crypto_skcipher_set_flags(ctx->fallback_tfm, cipher->base.crt_flags &
448 				  CRYPTO_TFM_REQ_MASK);
449 
450 	return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
451 }
452 
453 static int aspeed_tdes_ctr_decrypt(struct skcipher_request *req)
454 {
455 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR |
456 				HACE_CMD_TRIPLE_DES);
457 }
458 
459 static int aspeed_tdes_ctr_encrypt(struct skcipher_request *req)
460 {
461 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR |
462 				HACE_CMD_TRIPLE_DES);
463 }
464 
465 static int aspeed_tdes_ofb_decrypt(struct skcipher_request *req)
466 {
467 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_OFB |
468 				HACE_CMD_TRIPLE_DES);
469 }
470 
471 static int aspeed_tdes_ofb_encrypt(struct skcipher_request *req)
472 {
473 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_OFB |
474 				HACE_CMD_TRIPLE_DES);
475 }
476 
477 static int aspeed_tdes_cfb_decrypt(struct skcipher_request *req)
478 {
479 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CFB |
480 				HACE_CMD_TRIPLE_DES);
481 }
482 
483 static int aspeed_tdes_cfb_encrypt(struct skcipher_request *req)
484 {
485 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CFB |
486 				HACE_CMD_TRIPLE_DES);
487 }
488 
489 static int aspeed_tdes_cbc_decrypt(struct skcipher_request *req)
490 {
491 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC |
492 				HACE_CMD_TRIPLE_DES);
493 }
494 
495 static int aspeed_tdes_cbc_encrypt(struct skcipher_request *req)
496 {
497 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC |
498 				HACE_CMD_TRIPLE_DES);
499 }
500 
501 static int aspeed_tdes_ecb_decrypt(struct skcipher_request *req)
502 {
503 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB |
504 				HACE_CMD_TRIPLE_DES);
505 }
506 
507 static int aspeed_tdes_ecb_encrypt(struct skcipher_request *req)
508 {
509 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB |
510 				HACE_CMD_TRIPLE_DES);
511 }
512 
513 static int aspeed_des_ctr_decrypt(struct skcipher_request *req)
514 {
515 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR |
516 				HACE_CMD_SINGLE_DES);
517 }
518 
519 static int aspeed_des_ctr_encrypt(struct skcipher_request *req)
520 {
521 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR |
522 				HACE_CMD_SINGLE_DES);
523 }
524 
525 static int aspeed_des_ofb_decrypt(struct skcipher_request *req)
526 {
527 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_OFB |
528 				HACE_CMD_SINGLE_DES);
529 }
530 
531 static int aspeed_des_ofb_encrypt(struct skcipher_request *req)
532 {
533 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_OFB |
534 				HACE_CMD_SINGLE_DES);
535 }
536 
537 static int aspeed_des_cfb_decrypt(struct skcipher_request *req)
538 {
539 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CFB |
540 				HACE_CMD_SINGLE_DES);
541 }
542 
543 static int aspeed_des_cfb_encrypt(struct skcipher_request *req)
544 {
545 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CFB |
546 				HACE_CMD_SINGLE_DES);
547 }
548 
549 static int aspeed_des_cbc_decrypt(struct skcipher_request *req)
550 {
551 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC |
552 				HACE_CMD_SINGLE_DES);
553 }
554 
555 static int aspeed_des_cbc_encrypt(struct skcipher_request *req)
556 {
557 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC |
558 				HACE_CMD_SINGLE_DES);
559 }
560 
561 static int aspeed_des_ecb_decrypt(struct skcipher_request *req)
562 {
563 	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB |
564 				HACE_CMD_SINGLE_DES);
565 }
566 
567 static int aspeed_des_ecb_encrypt(struct skcipher_request *req)
568 {
569 	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB |
570 				HACE_CMD_SINGLE_DES);
571 }
572 
573 static int aspeed_aes_crypt(struct skcipher_request *req, u32 cmd)
574 {
575 	struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(req);
576 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
577 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
578 	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
579 	u32 crypto_alg = cmd & HACE_CMD_OP_MODE_MASK;
580 
581 	if (crypto_alg == HACE_CMD_CBC || crypto_alg == HACE_CMD_ECB) {
582 		if (!IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
583 			return -EINVAL;
584 	}
585 
586 	CIPHER_DBG(hace_dev, "%s\n",
587 		   (cmd & HACE_CMD_ENCRYPT) ? "encrypt" : "decrypt");
588 
589 	cmd |= HACE_CMD_AES_SELECT | HACE_CMD_RI_WO_DATA_ENABLE |
590 	       HACE_CMD_CONTEXT_LOAD_ENABLE | HACE_CMD_CONTEXT_SAVE_ENABLE;
591 
592 	switch (ctx->key_len) {
593 	case AES_KEYSIZE_128:
594 		cmd |= HACE_CMD_AES128;
595 		break;
596 	case AES_KEYSIZE_192:
597 		cmd |= HACE_CMD_AES192;
598 		break;
599 	case AES_KEYSIZE_256:
600 		cmd |= HACE_CMD_AES256;
601 		break;
602 	default:
603 		return -EINVAL;
604 	}
605 
606 	rctx->enc_cmd = cmd;
607 
608 	return aspeed_hace_crypto_handle_queue(hace_dev, req);
609 }
610 
611 static int aspeed_aes_setkey(struct crypto_skcipher *cipher, const u8 *key,
612 			     unsigned int keylen)
613 {
614 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
615 	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
616 	struct crypto_aes_ctx gen_aes_key;
617 
618 	CIPHER_DBG(hace_dev, "keylen: %d bits\n", (keylen * 8));
619 
620 	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
621 	    keylen != AES_KEYSIZE_256)
622 		return -EINVAL;
623 
624 	if (ctx->hace_dev->version == AST2500_VERSION) {
625 		aes_expandkey(&gen_aes_key, key, keylen);
626 		memcpy(ctx->key, gen_aes_key.key_enc, AES_MAX_KEYLENGTH);
627 
628 	} else {
629 		memcpy(ctx->key, key, keylen);
630 	}
631 
632 	ctx->key_len = keylen;
633 
634 	crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK);
635 	crypto_skcipher_set_flags(ctx->fallback_tfm, cipher->base.crt_flags &
636 				  CRYPTO_TFM_REQ_MASK);
637 
638 	return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
639 }
640 
641 static int aspeed_aes_ctr_decrypt(struct skcipher_request *req)
642 {
643 	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR);
644 }
645 
646 static int aspeed_aes_ctr_encrypt(struct skcipher_request *req)
647 {
648 	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR);
649 }
650 
651 static int aspeed_aes_ofb_decrypt(struct skcipher_request *req)
652 {
653 	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_OFB);
654 }
655 
656 static int aspeed_aes_ofb_encrypt(struct skcipher_request *req)
657 {
658 	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_OFB);
659 }
660 
661 static int aspeed_aes_cfb_decrypt(struct skcipher_request *req)
662 {
663 	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CFB);
664 }
665 
666 static int aspeed_aes_cfb_encrypt(struct skcipher_request *req)
667 {
668 	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CFB);
669 }
670 
671 static int aspeed_aes_cbc_decrypt(struct skcipher_request *req)
672 {
673 	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC);
674 }
675 
676 static int aspeed_aes_cbc_encrypt(struct skcipher_request *req)
677 {
678 	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC);
679 }
680 
681 static int aspeed_aes_ecb_decrypt(struct skcipher_request *req)
682 {
683 	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB);
684 }
685 
686 static int aspeed_aes_ecb_encrypt(struct skcipher_request *req)
687 {
688 	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB);
689 }
690 
691 static int aspeed_crypto_cra_init(struct crypto_skcipher *tfm)
692 {
693 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
694 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
695 	const char *name = crypto_tfm_alg_name(&tfm->base);
696 	struct aspeed_hace_alg *crypto_alg;
697 
698 
699 	crypto_alg = container_of(alg, struct aspeed_hace_alg, alg.skcipher);
700 	ctx->hace_dev = crypto_alg->hace_dev;
701 	ctx->start = aspeed_hace_skcipher_trigger;
702 
703 	CIPHER_DBG(ctx->hace_dev, "%s\n", name);
704 
705 	ctx->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_ASYNC |
706 						  CRYPTO_ALG_NEED_FALLBACK);
707 	if (IS_ERR(ctx->fallback_tfm)) {
708 		dev_err(ctx->hace_dev->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
709 			name, PTR_ERR(ctx->fallback_tfm));
710 		return PTR_ERR(ctx->fallback_tfm);
711 	}
712 
713 	crypto_skcipher_set_reqsize(tfm, sizeof(struct aspeed_cipher_reqctx) +
714 			 crypto_skcipher_reqsize(ctx->fallback_tfm));
715 
716 	ctx->enginectx.op.do_one_request = aspeed_crypto_do_request;
717 
718 	return 0;
719 }
720 
721 static void aspeed_crypto_cra_exit(struct crypto_skcipher *tfm)
722 {
723 	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
724 	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
725 
726 	CIPHER_DBG(hace_dev, "%s\n", crypto_tfm_alg_name(&tfm->base));
727 	crypto_free_skcipher(ctx->fallback_tfm);
728 }
729 
730 static struct aspeed_hace_alg aspeed_crypto_algs[] = {
731 	{
732 		.alg.skcipher = {
733 			.min_keysize	= AES_MIN_KEY_SIZE,
734 			.max_keysize	= AES_MAX_KEY_SIZE,
735 			.setkey		= aspeed_aes_setkey,
736 			.encrypt	= aspeed_aes_ecb_encrypt,
737 			.decrypt	= aspeed_aes_ecb_decrypt,
738 			.init		= aspeed_crypto_cra_init,
739 			.exit		= aspeed_crypto_cra_exit,
740 			.base = {
741 				.cra_name		= "ecb(aes)",
742 				.cra_driver_name	= "aspeed-ecb-aes",
743 				.cra_priority		= 300,
744 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
745 							  CRYPTO_ALG_ASYNC |
746 							  CRYPTO_ALG_NEED_FALLBACK,
747 				.cra_blocksize		= AES_BLOCK_SIZE,
748 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
749 				.cra_alignmask		= 0x0f,
750 				.cra_module		= THIS_MODULE,
751 			}
752 		}
753 	},
754 	{
755 		.alg.skcipher = {
756 			.ivsize		= AES_BLOCK_SIZE,
757 			.min_keysize	= AES_MIN_KEY_SIZE,
758 			.max_keysize	= AES_MAX_KEY_SIZE,
759 			.setkey		= aspeed_aes_setkey,
760 			.encrypt	= aspeed_aes_cbc_encrypt,
761 			.decrypt	= aspeed_aes_cbc_decrypt,
762 			.init		= aspeed_crypto_cra_init,
763 			.exit		= aspeed_crypto_cra_exit,
764 			.base = {
765 				.cra_name		= "cbc(aes)",
766 				.cra_driver_name	= "aspeed-cbc-aes",
767 				.cra_priority		= 300,
768 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
769 							  CRYPTO_ALG_ASYNC |
770 							  CRYPTO_ALG_NEED_FALLBACK,
771 				.cra_blocksize		= AES_BLOCK_SIZE,
772 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
773 				.cra_alignmask		= 0x0f,
774 				.cra_module		= THIS_MODULE,
775 			}
776 		}
777 	},
778 	{
779 		.alg.skcipher = {
780 			.ivsize		= AES_BLOCK_SIZE,
781 			.min_keysize	= AES_MIN_KEY_SIZE,
782 			.max_keysize	= AES_MAX_KEY_SIZE,
783 			.setkey		= aspeed_aes_setkey,
784 			.encrypt	= aspeed_aes_cfb_encrypt,
785 			.decrypt	= aspeed_aes_cfb_decrypt,
786 			.init		= aspeed_crypto_cra_init,
787 			.exit		= aspeed_crypto_cra_exit,
788 			.base = {
789 				.cra_name		= "cfb(aes)",
790 				.cra_driver_name	= "aspeed-cfb-aes",
791 				.cra_priority		= 300,
792 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
793 							  CRYPTO_ALG_ASYNC |
794 							  CRYPTO_ALG_NEED_FALLBACK,
795 				.cra_blocksize		= 1,
796 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
797 				.cra_alignmask		= 0x0f,
798 				.cra_module		= THIS_MODULE,
799 			}
800 		}
801 	},
802 	{
803 		.alg.skcipher = {
804 			.ivsize		= AES_BLOCK_SIZE,
805 			.min_keysize	= AES_MIN_KEY_SIZE,
806 			.max_keysize	= AES_MAX_KEY_SIZE,
807 			.setkey		= aspeed_aes_setkey,
808 			.encrypt	= aspeed_aes_ofb_encrypt,
809 			.decrypt	= aspeed_aes_ofb_decrypt,
810 			.init		= aspeed_crypto_cra_init,
811 			.exit		= aspeed_crypto_cra_exit,
812 			.base = {
813 				.cra_name		= "ofb(aes)",
814 				.cra_driver_name	= "aspeed-ofb-aes",
815 				.cra_priority		= 300,
816 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
817 							  CRYPTO_ALG_ASYNC |
818 							  CRYPTO_ALG_NEED_FALLBACK,
819 				.cra_blocksize		= 1,
820 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
821 				.cra_alignmask		= 0x0f,
822 				.cra_module		= THIS_MODULE,
823 			}
824 		}
825 	},
826 	{
827 		.alg.skcipher = {
828 			.min_keysize	= DES_KEY_SIZE,
829 			.max_keysize	= DES_KEY_SIZE,
830 			.setkey		= aspeed_des_setkey,
831 			.encrypt	= aspeed_des_ecb_encrypt,
832 			.decrypt	= aspeed_des_ecb_decrypt,
833 			.init		= aspeed_crypto_cra_init,
834 			.exit		= aspeed_crypto_cra_exit,
835 			.base = {
836 				.cra_name		= "ecb(des)",
837 				.cra_driver_name	= "aspeed-ecb-des",
838 				.cra_priority		= 300,
839 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
840 							  CRYPTO_ALG_ASYNC |
841 							  CRYPTO_ALG_NEED_FALLBACK,
842 				.cra_blocksize		= DES_BLOCK_SIZE,
843 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
844 				.cra_alignmask		= 0x0f,
845 				.cra_module		= THIS_MODULE,
846 			}
847 		}
848 	},
849 	{
850 		.alg.skcipher = {
851 			.ivsize		= DES_BLOCK_SIZE,
852 			.min_keysize	= DES_KEY_SIZE,
853 			.max_keysize	= DES_KEY_SIZE,
854 			.setkey		= aspeed_des_setkey,
855 			.encrypt	= aspeed_des_cbc_encrypt,
856 			.decrypt	= aspeed_des_cbc_decrypt,
857 			.init		= aspeed_crypto_cra_init,
858 			.exit		= aspeed_crypto_cra_exit,
859 			.base = {
860 				.cra_name		= "cbc(des)",
861 				.cra_driver_name	= "aspeed-cbc-des",
862 				.cra_priority		= 300,
863 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
864 							  CRYPTO_ALG_ASYNC |
865 							  CRYPTO_ALG_NEED_FALLBACK,
866 				.cra_blocksize		= DES_BLOCK_SIZE,
867 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
868 				.cra_alignmask		= 0x0f,
869 				.cra_module		= THIS_MODULE,
870 			}
871 		}
872 	},
873 	{
874 		.alg.skcipher = {
875 			.ivsize		= DES_BLOCK_SIZE,
876 			.min_keysize	= DES_KEY_SIZE,
877 			.max_keysize	= DES_KEY_SIZE,
878 			.setkey		= aspeed_des_setkey,
879 			.encrypt	= aspeed_des_cfb_encrypt,
880 			.decrypt	= aspeed_des_cfb_decrypt,
881 			.init		= aspeed_crypto_cra_init,
882 			.exit		= aspeed_crypto_cra_exit,
883 			.base = {
884 				.cra_name		= "cfb(des)",
885 				.cra_driver_name	= "aspeed-cfb-des",
886 				.cra_priority		= 300,
887 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
888 							  CRYPTO_ALG_ASYNC |
889 							  CRYPTO_ALG_NEED_FALLBACK,
890 				.cra_blocksize		= DES_BLOCK_SIZE,
891 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
892 				.cra_alignmask		= 0x0f,
893 				.cra_module		= THIS_MODULE,
894 			}
895 		}
896 	},
897 	{
898 		.alg.skcipher = {
899 			.ivsize		= DES_BLOCK_SIZE,
900 			.min_keysize	= DES_KEY_SIZE,
901 			.max_keysize	= DES_KEY_SIZE,
902 			.setkey		= aspeed_des_setkey,
903 			.encrypt	= aspeed_des_ofb_encrypt,
904 			.decrypt	= aspeed_des_ofb_decrypt,
905 			.init		= aspeed_crypto_cra_init,
906 			.exit		= aspeed_crypto_cra_exit,
907 			.base = {
908 				.cra_name		= "ofb(des)",
909 				.cra_driver_name	= "aspeed-ofb-des",
910 				.cra_priority		= 300,
911 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
912 							  CRYPTO_ALG_ASYNC |
913 							  CRYPTO_ALG_NEED_FALLBACK,
914 				.cra_blocksize		= DES_BLOCK_SIZE,
915 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
916 				.cra_alignmask		= 0x0f,
917 				.cra_module		= THIS_MODULE,
918 			}
919 		}
920 	},
921 	{
922 		.alg.skcipher = {
923 			.min_keysize	= DES3_EDE_KEY_SIZE,
924 			.max_keysize	= DES3_EDE_KEY_SIZE,
925 			.setkey		= aspeed_des_setkey,
926 			.encrypt	= aspeed_tdes_ecb_encrypt,
927 			.decrypt	= aspeed_tdes_ecb_decrypt,
928 			.init		= aspeed_crypto_cra_init,
929 			.exit		= aspeed_crypto_cra_exit,
930 			.base = {
931 				.cra_name		= "ecb(des3_ede)",
932 				.cra_driver_name	= "aspeed-ecb-tdes",
933 				.cra_priority		= 300,
934 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
935 							  CRYPTO_ALG_ASYNC |
936 							  CRYPTO_ALG_NEED_FALLBACK,
937 				.cra_blocksize		= DES_BLOCK_SIZE,
938 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
939 				.cra_alignmask		= 0x0f,
940 				.cra_module		= THIS_MODULE,
941 			}
942 		}
943 	},
944 	{
945 		.alg.skcipher = {
946 			.ivsize		= DES_BLOCK_SIZE,
947 			.min_keysize	= DES3_EDE_KEY_SIZE,
948 			.max_keysize	= DES3_EDE_KEY_SIZE,
949 			.setkey		= aspeed_des_setkey,
950 			.encrypt	= aspeed_tdes_cbc_encrypt,
951 			.decrypt	= aspeed_tdes_cbc_decrypt,
952 			.init		= aspeed_crypto_cra_init,
953 			.exit		= aspeed_crypto_cra_exit,
954 			.base = {
955 				.cra_name		= "cbc(des3_ede)",
956 				.cra_driver_name	= "aspeed-cbc-tdes",
957 				.cra_priority		= 300,
958 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
959 							  CRYPTO_ALG_ASYNC |
960 							  CRYPTO_ALG_NEED_FALLBACK,
961 				.cra_blocksize		= DES_BLOCK_SIZE,
962 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
963 				.cra_alignmask		= 0x0f,
964 				.cra_module		= THIS_MODULE,
965 			}
966 		}
967 	},
968 	{
969 		.alg.skcipher = {
970 			.ivsize		= DES_BLOCK_SIZE,
971 			.min_keysize	= DES3_EDE_KEY_SIZE,
972 			.max_keysize	= DES3_EDE_KEY_SIZE,
973 			.setkey		= aspeed_des_setkey,
974 			.encrypt	= aspeed_tdes_cfb_encrypt,
975 			.decrypt	= aspeed_tdes_cfb_decrypt,
976 			.init		= aspeed_crypto_cra_init,
977 			.exit		= aspeed_crypto_cra_exit,
978 			.base = {
979 				.cra_name		= "cfb(des3_ede)",
980 				.cra_driver_name	= "aspeed-cfb-tdes",
981 				.cra_priority		= 300,
982 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
983 							  CRYPTO_ALG_ASYNC |
984 							  CRYPTO_ALG_NEED_FALLBACK,
985 				.cra_blocksize		= DES_BLOCK_SIZE,
986 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
987 				.cra_alignmask		= 0x0f,
988 				.cra_module		= THIS_MODULE,
989 			}
990 		}
991 	},
992 	{
993 		.alg.skcipher = {
994 			.ivsize		= DES_BLOCK_SIZE,
995 			.min_keysize	= DES3_EDE_KEY_SIZE,
996 			.max_keysize	= DES3_EDE_KEY_SIZE,
997 			.setkey		= aspeed_des_setkey,
998 			.encrypt	= aspeed_tdes_ofb_encrypt,
999 			.decrypt	= aspeed_tdes_ofb_decrypt,
1000 			.init		= aspeed_crypto_cra_init,
1001 			.exit		= aspeed_crypto_cra_exit,
1002 			.base = {
1003 				.cra_name		= "ofb(des3_ede)",
1004 				.cra_driver_name	= "aspeed-ofb-tdes",
1005 				.cra_priority		= 300,
1006 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
1007 							  CRYPTO_ALG_ASYNC |
1008 							  CRYPTO_ALG_NEED_FALLBACK,
1009 				.cra_blocksize		= DES_BLOCK_SIZE,
1010 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
1011 				.cra_alignmask		= 0x0f,
1012 				.cra_module		= THIS_MODULE,
1013 			}
1014 		}
1015 	},
1016 };
1017 
1018 static struct aspeed_hace_alg aspeed_crypto_algs_g6[] = {
1019 	{
1020 		.alg.skcipher = {
1021 			.ivsize		= AES_BLOCK_SIZE,
1022 			.min_keysize	= AES_MIN_KEY_SIZE,
1023 			.max_keysize	= AES_MAX_KEY_SIZE,
1024 			.setkey		= aspeed_aes_setkey,
1025 			.encrypt	= aspeed_aes_ctr_encrypt,
1026 			.decrypt	= aspeed_aes_ctr_decrypt,
1027 			.init		= aspeed_crypto_cra_init,
1028 			.exit		= aspeed_crypto_cra_exit,
1029 			.base = {
1030 				.cra_name		= "ctr(aes)",
1031 				.cra_driver_name	= "aspeed-ctr-aes",
1032 				.cra_priority		= 300,
1033 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
1034 							  CRYPTO_ALG_ASYNC,
1035 				.cra_blocksize		= 1,
1036 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
1037 				.cra_alignmask		= 0x0f,
1038 				.cra_module		= THIS_MODULE,
1039 			}
1040 		}
1041 	},
1042 	{
1043 		.alg.skcipher = {
1044 			.ivsize		= DES_BLOCK_SIZE,
1045 			.min_keysize	= DES_KEY_SIZE,
1046 			.max_keysize	= DES_KEY_SIZE,
1047 			.setkey		= aspeed_des_setkey,
1048 			.encrypt	= aspeed_des_ctr_encrypt,
1049 			.decrypt	= aspeed_des_ctr_decrypt,
1050 			.init		= aspeed_crypto_cra_init,
1051 			.exit		= aspeed_crypto_cra_exit,
1052 			.base = {
1053 				.cra_name		= "ctr(des)",
1054 				.cra_driver_name	= "aspeed-ctr-des",
1055 				.cra_priority		= 300,
1056 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
1057 							  CRYPTO_ALG_ASYNC,
1058 				.cra_blocksize		= 1,
1059 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
1060 				.cra_alignmask		= 0x0f,
1061 				.cra_module		= THIS_MODULE,
1062 			}
1063 		}
1064 	},
1065 	{
1066 		.alg.skcipher = {
1067 			.ivsize		= DES_BLOCK_SIZE,
1068 			.min_keysize	= DES3_EDE_KEY_SIZE,
1069 			.max_keysize	= DES3_EDE_KEY_SIZE,
1070 			.setkey		= aspeed_des_setkey,
1071 			.encrypt	= aspeed_tdes_ctr_encrypt,
1072 			.decrypt	= aspeed_tdes_ctr_decrypt,
1073 			.init		= aspeed_crypto_cra_init,
1074 			.exit		= aspeed_crypto_cra_exit,
1075 			.base = {
1076 				.cra_name		= "ctr(des3_ede)",
1077 				.cra_driver_name	= "aspeed-ctr-tdes",
1078 				.cra_priority		= 300,
1079 				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
1080 							  CRYPTO_ALG_ASYNC,
1081 				.cra_blocksize		= 1,
1082 				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
1083 				.cra_alignmask		= 0x0f,
1084 				.cra_module		= THIS_MODULE,
1085 			}
1086 		}
1087 	},
1088 
1089 };
1090 
1091 void aspeed_unregister_hace_crypto_algs(struct aspeed_hace_dev *hace_dev)
1092 {
1093 	int i;
1094 
1095 	for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs); i++)
1096 		crypto_unregister_skcipher(&aspeed_crypto_algs[i].alg.skcipher);
1097 
1098 	if (hace_dev->version != AST2600_VERSION)
1099 		return;
1100 
1101 	for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs_g6); i++)
1102 		crypto_unregister_skcipher(&aspeed_crypto_algs_g6[i].alg.skcipher);
1103 }
1104 
1105 void aspeed_register_hace_crypto_algs(struct aspeed_hace_dev *hace_dev)
1106 {
1107 	int rc, i;
1108 
1109 	CIPHER_DBG(hace_dev, "\n");
1110 
1111 	for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs); i++) {
1112 		aspeed_crypto_algs[i].hace_dev = hace_dev;
1113 		rc = crypto_register_skcipher(&aspeed_crypto_algs[i].alg.skcipher);
1114 		if (rc) {
1115 			CIPHER_DBG(hace_dev, "Failed to register %s\n",
1116 				   aspeed_crypto_algs[i].alg.skcipher.base.cra_name);
1117 		}
1118 	}
1119 
1120 	if (hace_dev->version != AST2600_VERSION)
1121 		return;
1122 
1123 	for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs_g6); i++) {
1124 		aspeed_crypto_algs_g6[i].hace_dev = hace_dev;
1125 		rc = crypto_register_skcipher(&aspeed_crypto_algs_g6[i].alg.skcipher);
1126 		if (rc) {
1127 			CIPHER_DBG(hace_dev, "Failed to register %s\n",
1128 				   aspeed_crypto_algs_g6[i].alg.skcipher.base.cra_name);
1129 		}
1130 	}
1131 }
1132