xref: /linux/drivers/crypto/starfive/jh7110-aes.c (revision 4dd4d5e486ebdeb48dbc558237d4ba8aab8917d5)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * StarFive AES acceleration driver
4  *
5  * Copyright (c) 2022 StarFive Technology
6  */
7 
8 #include <linux/iopoll.h>
9 #include <crypto/gcm.h>
10 #include <crypto/scatterwalk.h>
11 #include <crypto/internal/aead.h>
12 #include <crypto/internal/skcipher.h>
13 #include "jh7110-cryp.h"
14 
15 #define STARFIVE_AES_REGS_OFFSET	0x100
16 #define STARFIVE_AES_AESDIO0R		(STARFIVE_AES_REGS_OFFSET + 0x0)
17 #define STARFIVE_AES_KEY0		(STARFIVE_AES_REGS_OFFSET + 0x4)
18 #define STARFIVE_AES_KEY1		(STARFIVE_AES_REGS_OFFSET + 0x8)
19 #define STARFIVE_AES_KEY2		(STARFIVE_AES_REGS_OFFSET + 0xC)
20 #define STARFIVE_AES_KEY3		(STARFIVE_AES_REGS_OFFSET + 0x10)
21 #define STARFIVE_AES_KEY4		(STARFIVE_AES_REGS_OFFSET + 0x14)
22 #define STARFIVE_AES_KEY5		(STARFIVE_AES_REGS_OFFSET + 0x18)
23 #define STARFIVE_AES_KEY6		(STARFIVE_AES_REGS_OFFSET + 0x1C)
24 #define STARFIVE_AES_KEY7		(STARFIVE_AES_REGS_OFFSET + 0x20)
25 #define STARFIVE_AES_CSR		(STARFIVE_AES_REGS_OFFSET + 0x24)
26 #define STARFIVE_AES_IV0		(STARFIVE_AES_REGS_OFFSET + 0x28)
27 #define STARFIVE_AES_IV1		(STARFIVE_AES_REGS_OFFSET + 0x2C)
28 #define STARFIVE_AES_IV2		(STARFIVE_AES_REGS_OFFSET + 0x30)
29 #define STARFIVE_AES_IV3		(STARFIVE_AES_REGS_OFFSET + 0x34)
30 #define STARFIVE_AES_NONCE0		(STARFIVE_AES_REGS_OFFSET + 0x3C)
31 #define STARFIVE_AES_NONCE1		(STARFIVE_AES_REGS_OFFSET + 0x40)
32 #define STARFIVE_AES_NONCE2		(STARFIVE_AES_REGS_OFFSET + 0x44)
33 #define STARFIVE_AES_NONCE3		(STARFIVE_AES_REGS_OFFSET + 0x48)
34 #define STARFIVE_AES_ALEN0		(STARFIVE_AES_REGS_OFFSET + 0x4C)
35 #define STARFIVE_AES_ALEN1		(STARFIVE_AES_REGS_OFFSET + 0x50)
36 #define STARFIVE_AES_MLEN0		(STARFIVE_AES_REGS_OFFSET + 0x54)
37 #define STARFIVE_AES_MLEN1		(STARFIVE_AES_REGS_OFFSET + 0x58)
38 #define STARFIVE_AES_IVLEN		(STARFIVE_AES_REGS_OFFSET + 0x5C)
39 
40 #define FLG_MODE_MASK			GENMASK(2, 0)
41 #define FLG_ENCRYPT			BIT(4)
42 
43 /* Misc */
44 #define CCM_B0_ADATA			0x40
45 #define AES_BLOCK_32			(AES_BLOCK_SIZE / sizeof(u32))
46 
47 static inline int starfive_aes_wait_busy(struct starfive_cryp_dev *cryp)
48 {
49 	u32 status;
50 
51 	return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
52 					  !(status & STARFIVE_AES_BUSY), 10, 100000);
53 }
54 
55 static inline int starfive_aes_wait_keydone(struct starfive_cryp_dev *cryp)
56 {
57 	u32 status;
58 
59 	return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
60 					  (status & STARFIVE_AES_KEY_DONE), 10, 100000);
61 }
62 
63 static inline int starfive_aes_wait_gcmdone(struct starfive_cryp_dev *cryp)
64 {
65 	u32 status;
66 
67 	return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
68 					  (status & STARFIVE_AES_GCM_DONE), 10, 100000);
69 }
70 
71 static inline int is_gcm(struct starfive_cryp_dev *cryp)
72 {
73 	return (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM;
74 }
75 
76 static inline int is_encrypt(struct starfive_cryp_dev *cryp)
77 {
78 	return cryp->flags & FLG_ENCRYPT;
79 }
80 
81 static void starfive_aes_aead_hw_start(struct starfive_cryp_ctx *ctx, u32 hw_mode)
82 {
83 	struct starfive_cryp_dev *cryp = ctx->cryp;
84 	unsigned int value;
85 
86 	switch (hw_mode) {
87 	case STARFIVE_AES_MODE_GCM:
88 		value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
89 		value |= STARFIVE_AES_GCM_START;
90 		writel(value, cryp->base + STARFIVE_AES_CSR);
91 		starfive_aes_wait_gcmdone(cryp);
92 		break;
93 	case STARFIVE_AES_MODE_CCM:
94 		value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
95 		value |= STARFIVE_AES_CCM_START;
96 		writel(value, cryp->base + STARFIVE_AES_CSR);
97 		break;
98 	}
99 }
100 
101 static inline void starfive_aes_set_ivlen(struct starfive_cryp_ctx *ctx)
102 {
103 	struct starfive_cryp_dev *cryp = ctx->cryp;
104 
105 	if (is_gcm(cryp))
106 		writel(GCM_AES_IV_SIZE, cryp->base + STARFIVE_AES_IVLEN);
107 	else
108 		writel(AES_BLOCK_SIZE, cryp->base + STARFIVE_AES_IVLEN);
109 }
110 
111 static inline void starfive_aes_set_alen(struct starfive_cryp_ctx *ctx)
112 {
113 	struct starfive_cryp_dev *cryp = ctx->cryp;
114 
115 	writel(upper_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN0);
116 	writel(lower_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN1);
117 }
118 
119 static inline void starfive_aes_set_mlen(struct starfive_cryp_ctx *ctx)
120 {
121 	struct starfive_cryp_dev *cryp = ctx->cryp;
122 
123 	writel(upper_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN0);
124 	writel(lower_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN1);
125 }
126 
127 static inline int starfive_aes_ccm_check_iv(const u8 *iv)
128 {
129 	/* 2 <= L <= 8, so 1 <= L' <= 7. */
130 	if (iv[0] < 1 || iv[0] > 7)
131 		return -EINVAL;
132 
133 	return 0;
134 }
135 
136 static int starfive_aes_write_iv(struct starfive_cryp_ctx *ctx, u32 *iv)
137 {
138 	struct starfive_cryp_dev *cryp = ctx->cryp;
139 
140 	writel(iv[0], cryp->base + STARFIVE_AES_IV0);
141 	writel(iv[1], cryp->base + STARFIVE_AES_IV1);
142 	writel(iv[2], cryp->base + STARFIVE_AES_IV2);
143 
144 	if (is_gcm(cryp)) {
145 		if (starfive_aes_wait_gcmdone(cryp))
146 			return -ETIMEDOUT;
147 
148 		return 0;
149 	}
150 
151 	writel(iv[3], cryp->base + STARFIVE_AES_IV3);
152 
153 	return 0;
154 }
155 
156 static inline void starfive_aes_get_iv(struct starfive_cryp_dev *cryp, u32 *iv)
157 {
158 	iv[0] = readl(cryp->base + STARFIVE_AES_IV0);
159 	iv[1] = readl(cryp->base + STARFIVE_AES_IV1);
160 	iv[2] = readl(cryp->base + STARFIVE_AES_IV2);
161 	iv[3] = readl(cryp->base + STARFIVE_AES_IV3);
162 }
163 
164 static inline void starfive_aes_write_nonce(struct starfive_cryp_ctx *ctx, u32 *nonce)
165 {
166 	struct starfive_cryp_dev *cryp = ctx->cryp;
167 
168 	writel(nonce[0], cryp->base + STARFIVE_AES_NONCE0);
169 	writel(nonce[1], cryp->base + STARFIVE_AES_NONCE1);
170 	writel(nonce[2], cryp->base + STARFIVE_AES_NONCE2);
171 	writel(nonce[3], cryp->base + STARFIVE_AES_NONCE3);
172 }
173 
174 static int starfive_aes_write_key(struct starfive_cryp_ctx *ctx)
175 {
176 	struct starfive_cryp_dev *cryp = ctx->cryp;
177 	u32 *key = (u32 *)ctx->key;
178 
179 	if (ctx->keylen >= AES_KEYSIZE_128) {
180 		writel(key[0], cryp->base + STARFIVE_AES_KEY0);
181 		writel(key[1], cryp->base + STARFIVE_AES_KEY1);
182 		writel(key[2], cryp->base + STARFIVE_AES_KEY2);
183 		writel(key[3], cryp->base + STARFIVE_AES_KEY3);
184 	}
185 
186 	if (ctx->keylen >= AES_KEYSIZE_192) {
187 		writel(key[4], cryp->base + STARFIVE_AES_KEY4);
188 		writel(key[5], cryp->base + STARFIVE_AES_KEY5);
189 	}
190 
191 	if (ctx->keylen >= AES_KEYSIZE_256) {
192 		writel(key[6], cryp->base + STARFIVE_AES_KEY6);
193 		writel(key[7], cryp->base + STARFIVE_AES_KEY7);
194 	}
195 
196 	if (starfive_aes_wait_keydone(cryp))
197 		return -ETIMEDOUT;
198 
199 	return 0;
200 }
201 
202 static int starfive_aes_ccm_init(struct starfive_cryp_ctx *ctx)
203 {
204 	struct starfive_cryp_dev *cryp = ctx->cryp;
205 	u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE];
206 	unsigned int textlen;
207 
208 	memcpy(iv, cryp->req.areq->iv, AES_BLOCK_SIZE);
209 	memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
210 
211 	/* Build B0 */
212 	memcpy(b0, iv, AES_BLOCK_SIZE);
213 
214 	b0[0] |= (8 * ((cryp->authsize - 2) / 2));
215 
216 	if (cryp->assoclen)
217 		b0[0] |= CCM_B0_ADATA;
218 
219 	textlen = cryp->total_in;
220 
221 	b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
222 	b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
223 
224 	starfive_aes_write_nonce(ctx, (u32 *)b0);
225 
226 	return 0;
227 }
228 
229 static int starfive_aes_hw_init(struct starfive_cryp_ctx *ctx)
230 {
231 	struct starfive_cryp_request_ctx *rctx = ctx->rctx;
232 	struct starfive_cryp_dev *cryp = ctx->cryp;
233 	u32 hw_mode;
234 
235 	/* reset */
236 	rctx->csr.aes.v = 0;
237 	rctx->csr.aes.aesrst = 1;
238 	writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
239 
240 	/* csr setup */
241 	hw_mode = cryp->flags & FLG_MODE_MASK;
242 
243 	rctx->csr.aes.v = 0;
244 
245 	switch (ctx->keylen) {
246 	case AES_KEYSIZE_128:
247 		rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_128;
248 		break;
249 	case AES_KEYSIZE_192:
250 		rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_192;
251 		break;
252 	case AES_KEYSIZE_256:
253 		rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_256;
254 		break;
255 	}
256 
257 	rctx->csr.aes.mode  = hw_mode;
258 	rctx->csr.aes.cmode = !is_encrypt(cryp);
259 	rctx->csr.aes.ie = 1;
260 
261 	if (hw_mode == STARFIVE_AES_MODE_CFB ||
262 	    hw_mode == STARFIVE_AES_MODE_OFB)
263 		rctx->csr.aes.stmode = STARFIVE_AES_MODE_XFB_128;
264 	else
265 		rctx->csr.aes.stmode = STARFIVE_AES_MODE_XFB_1;
266 
267 	if (cryp->side_chan) {
268 		rctx->csr.aes.delay_aes = 1;
269 		rctx->csr.aes.vaes_start = 1;
270 	}
271 
272 	writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
273 
274 	cryp->err = starfive_aes_write_key(ctx);
275 	if (cryp->err)
276 		return cryp->err;
277 
278 	switch (hw_mode) {
279 	case STARFIVE_AES_MODE_GCM:
280 		starfive_aes_set_alen(ctx);
281 		starfive_aes_set_mlen(ctx);
282 		starfive_aes_set_ivlen(ctx);
283 		starfive_aes_aead_hw_start(ctx, hw_mode);
284 		starfive_aes_write_iv(ctx, (void *)cryp->req.areq->iv);
285 		break;
286 	case STARFIVE_AES_MODE_CCM:
287 		starfive_aes_set_alen(ctx);
288 		starfive_aes_set_mlen(ctx);
289 		starfive_aes_ccm_init(ctx);
290 		starfive_aes_aead_hw_start(ctx, hw_mode);
291 		break;
292 	case STARFIVE_AES_MODE_OFB:
293 	case STARFIVE_AES_MODE_CFB:
294 	case STARFIVE_AES_MODE_CBC:
295 	case STARFIVE_AES_MODE_CTR:
296 		starfive_aes_write_iv(ctx, (void *)cryp->req.sreq->iv);
297 		break;
298 	default:
299 		break;
300 	}
301 
302 	return cryp->err;
303 }
304 
305 static int starfive_aes_read_authtag(struct starfive_cryp_dev *cryp)
306 {
307 	int i, start_addr;
308 
309 	if (starfive_aes_wait_busy(cryp))
310 		return dev_err_probe(cryp->dev, -ETIMEDOUT,
311 				     "Timeout waiting for tag generation.");
312 
313 	start_addr = STARFIVE_AES_NONCE0;
314 
315 	if (is_gcm(cryp))
316 		for (i = 0; i < AES_BLOCK_32; i++, start_addr += 4)
317 			cryp->tag_out[i] = readl(cryp->base + start_addr);
318 	else
319 		for (i = 0; i < AES_BLOCK_32; i++)
320 			cryp->tag_out[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R);
321 
322 	if (is_encrypt(cryp)) {
323 		scatterwalk_copychunks(cryp->tag_out, &cryp->out_walk, cryp->authsize, 1);
324 	} else {
325 		scatterwalk_copychunks(cryp->tag_in, &cryp->in_walk, cryp->authsize, 0);
326 
327 		if (crypto_memneq(cryp->tag_in, cryp->tag_out, cryp->authsize))
328 			return dev_err_probe(cryp->dev, -EBADMSG, "Failed tag verification\n");
329 	}
330 
331 	return 0;
332 }
333 
334 static void starfive_aes_finish_req(struct starfive_cryp_dev *cryp)
335 {
336 	union starfive_aes_csr csr;
337 	int err = cryp->err;
338 
339 	if (!err && cryp->authsize)
340 		err = starfive_aes_read_authtag(cryp);
341 
342 	if (!err && ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC ||
343 		     (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CTR))
344 		starfive_aes_get_iv(cryp, (void *)cryp->req.sreq->iv);
345 
346 	/* reset irq flags*/
347 	csr.v = 0;
348 	csr.aesrst = 1;
349 	writel(csr.v, cryp->base + STARFIVE_AES_CSR);
350 
351 	if (cryp->authsize)
352 		crypto_finalize_aead_request(cryp->engine, cryp->req.areq, err);
353 	else
354 		crypto_finalize_skcipher_request(cryp->engine, cryp->req.sreq,
355 						 err);
356 }
357 
358 void starfive_aes_done_task(unsigned long param)
359 {
360 	struct starfive_cryp_dev *cryp = (struct starfive_cryp_dev *)param;
361 	u32 block[AES_BLOCK_32];
362 	u32 stat;
363 	int i;
364 
365 	for (i = 0; i < AES_BLOCK_32; i++)
366 		block[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R);
367 
368 	scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, AES_BLOCK_SIZE,
369 							     cryp->total_out), 1);
370 
371 	cryp->total_out -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_out);
372 
373 	if (!cryp->total_out) {
374 		starfive_aes_finish_req(cryp);
375 		return;
376 	}
377 
378 	memset(block, 0, AES_BLOCK_SIZE);
379 	scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE,
380 							    cryp->total_in), 0);
381 	cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in);
382 
383 	for (i = 0; i < AES_BLOCK_32; i++)
384 		writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R);
385 
386 	stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
387 	stat &= ~STARFIVE_IE_MASK_AES_DONE;
388 	writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET);
389 }
390 
391 static int starfive_aes_gcm_write_adata(struct starfive_cryp_ctx *ctx)
392 {
393 	struct starfive_cryp_dev *cryp = ctx->cryp;
394 	struct starfive_cryp_request_ctx *rctx = ctx->rctx;
395 	u32 *buffer;
396 	int total_len, loop;
397 
398 	total_len = ALIGN(cryp->assoclen, AES_BLOCK_SIZE) / sizeof(unsigned int);
399 	buffer = (u32 *)rctx->adata;
400 
401 	for (loop = 0; loop < total_len; loop += 4) {
402 		writel(*buffer, cryp->base + STARFIVE_AES_NONCE0);
403 		buffer++;
404 		writel(*buffer, cryp->base + STARFIVE_AES_NONCE1);
405 		buffer++;
406 		writel(*buffer, cryp->base + STARFIVE_AES_NONCE2);
407 		buffer++;
408 		writel(*buffer, cryp->base + STARFIVE_AES_NONCE3);
409 		buffer++;
410 	}
411 
412 	if (starfive_aes_wait_gcmdone(cryp))
413 		return dev_err_probe(cryp->dev, -ETIMEDOUT,
414 				     "Timeout processing gcm aad block");
415 
416 	return 0;
417 }
418 
419 static int starfive_aes_ccm_write_adata(struct starfive_cryp_ctx *ctx)
420 {
421 	struct starfive_cryp_dev *cryp = ctx->cryp;
422 	struct starfive_cryp_request_ctx *rctx = ctx->rctx;
423 	u32 *buffer;
424 	u8 *ci;
425 	int total_len, loop;
426 
427 	total_len = cryp->assoclen;
428 
429 	ci = rctx->adata;
430 	writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R);
431 	ci++;
432 	writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R);
433 	ci++;
434 	total_len -= 2;
435 	buffer = (u32 *)ci;
436 
437 	for (loop = 0; loop < 3; loop++, buffer++)
438 		writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R);
439 
440 	total_len -= 12;
441 
442 	while (total_len > 0) {
443 		for (loop = 0; loop < AES_BLOCK_32; loop++, buffer++)
444 			writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R);
445 
446 		total_len -= AES_BLOCK_SIZE;
447 	}
448 
449 	if (starfive_aes_wait_busy(cryp))
450 		return dev_err_probe(cryp->dev, -ETIMEDOUT,
451 				     "Timeout processing ccm aad block");
452 
453 	return 0;
454 }
455 
456 static int starfive_aes_prepare_req(struct skcipher_request *req,
457 				    struct aead_request *areq)
458 {
459 	struct starfive_cryp_ctx *ctx;
460 	struct starfive_cryp_request_ctx *rctx;
461 	struct starfive_cryp_dev *cryp;
462 
463 	if (!req && !areq)
464 		return -EINVAL;
465 
466 	ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
467 		    crypto_aead_ctx(crypto_aead_reqtfm(areq));
468 
469 	cryp = ctx->cryp;
470 	rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
471 
472 	if (req) {
473 		cryp->req.sreq = req;
474 		cryp->total_in = req->cryptlen;
475 		cryp->total_out = req->cryptlen;
476 		cryp->assoclen = 0;
477 		cryp->authsize = 0;
478 	} else {
479 		cryp->req.areq = areq;
480 		cryp->assoclen = areq->assoclen;
481 		cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
482 		if (is_encrypt(cryp)) {
483 			cryp->total_in = areq->cryptlen;
484 			cryp->total_out = areq->cryptlen;
485 		} else {
486 			cryp->total_in = areq->cryptlen - cryp->authsize;
487 			cryp->total_out = cryp->total_in;
488 		}
489 	}
490 
491 	rctx->in_sg = req ? req->src : areq->src;
492 	scatterwalk_start(&cryp->in_walk, rctx->in_sg);
493 
494 	rctx->out_sg = req ? req->dst : areq->dst;
495 	scatterwalk_start(&cryp->out_walk, rctx->out_sg);
496 
497 	if (cryp->assoclen) {
498 		rctx->adata = kzalloc(ALIGN(cryp->assoclen, AES_BLOCK_SIZE), GFP_KERNEL);
499 		if (!rctx->adata)
500 			return dev_err_probe(cryp->dev, -ENOMEM,
501 					     "Failed to alloc memory for adata");
502 
503 		scatterwalk_copychunks(rctx->adata, &cryp->in_walk, cryp->assoclen, 0);
504 		scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->assoclen, 2);
505 	}
506 
507 	ctx->rctx = rctx;
508 
509 	return starfive_aes_hw_init(ctx);
510 }
511 
512 static int starfive_aes_do_one_req(struct crypto_engine *engine, void *areq)
513 {
514 	struct skcipher_request *req =
515 		container_of(areq, struct skcipher_request, base);
516 	struct starfive_cryp_ctx *ctx =
517 		crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
518 	struct starfive_cryp_dev *cryp = ctx->cryp;
519 	u32 block[AES_BLOCK_32];
520 	u32 stat;
521 	int err;
522 	int i;
523 
524 	err = starfive_aes_prepare_req(req, NULL);
525 	if (err)
526 		return err;
527 
528 	/*
529 	 * Write first plain/ciphertext block to start the module
530 	 * then let irq tasklet handle the rest of the data blocks.
531 	 */
532 	scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE,
533 							    cryp->total_in), 0);
534 	cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in);
535 
536 	for (i = 0; i < AES_BLOCK_32; i++)
537 		writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R);
538 
539 	stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
540 	stat &= ~STARFIVE_IE_MASK_AES_DONE;
541 	writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET);
542 
543 	return 0;
544 }
545 
546 static int starfive_aes_init_tfm(struct crypto_skcipher *tfm)
547 {
548 	struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
549 
550 	ctx->cryp = starfive_cryp_find_dev(ctx);
551 	if (!ctx->cryp)
552 		return -ENODEV;
553 
554 	crypto_skcipher_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) +
555 				    sizeof(struct skcipher_request));
556 
557 	ctx->enginectx.op.do_one_request = starfive_aes_do_one_req;
558 
559 	return 0;
560 }
561 
562 static int starfive_aes_aead_do_one_req(struct crypto_engine *engine, void *areq)
563 {
564 	struct aead_request *req =
565 		container_of(areq, struct aead_request, base);
566 	struct starfive_cryp_ctx *ctx =
567 		crypto_aead_ctx(crypto_aead_reqtfm(req));
568 	struct starfive_cryp_dev *cryp = ctx->cryp;
569 	struct starfive_cryp_request_ctx *rctx = ctx->rctx;
570 	u32 block[AES_BLOCK_32];
571 	u32 stat;
572 	int err;
573 	int i;
574 
575 	err = starfive_aes_prepare_req(NULL, req);
576 	if (err)
577 		return err;
578 
579 	if (!cryp->assoclen)
580 		goto write_text;
581 
582 	if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM)
583 		cryp->err = starfive_aes_ccm_write_adata(ctx);
584 	else
585 		cryp->err = starfive_aes_gcm_write_adata(ctx);
586 
587 	kfree(rctx->adata);
588 
589 	if (cryp->err)
590 		return cryp->err;
591 
592 write_text:
593 	if (!cryp->total_in)
594 		goto finish_req;
595 
596 	/*
597 	 * Write first plain/ciphertext block to start the module
598 	 * then let irq tasklet handle the rest of the data blocks.
599 	 */
600 	scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE,
601 							    cryp->total_in), 0);
602 	cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in);
603 
604 	for (i = 0; i < AES_BLOCK_32; i++)
605 		writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R);
606 
607 	stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
608 	stat &= ~STARFIVE_IE_MASK_AES_DONE;
609 	writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET);
610 
611 	return 0;
612 
613 finish_req:
614 	starfive_aes_finish_req(cryp);
615 	return 0;
616 }
617 
618 static int starfive_aes_aead_init_tfm(struct crypto_aead *tfm)
619 {
620 	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
621 	struct starfive_cryp_dev *cryp = ctx->cryp;
622 	struct crypto_tfm *aead = crypto_aead_tfm(tfm);
623 	struct crypto_alg *alg = aead->__crt_alg;
624 
625 	ctx->cryp = starfive_cryp_find_dev(ctx);
626 	if (!ctx->cryp)
627 		return -ENODEV;
628 
629 	if (alg->cra_flags & CRYPTO_ALG_NEED_FALLBACK) {
630 		ctx->aead_fbk = crypto_alloc_aead(alg->cra_name, 0,
631 						  CRYPTO_ALG_NEED_FALLBACK);
632 		if (IS_ERR(ctx->aead_fbk))
633 			return dev_err_probe(cryp->dev, PTR_ERR(ctx->aead_fbk),
634 					     "%s() failed to allocate fallback for %s\n",
635 					     __func__, alg->cra_name);
636 	}
637 
638 	crypto_aead_set_reqsize(tfm, sizeof(struct starfive_cryp_ctx) +
639 				sizeof(struct aead_request));
640 
641 	ctx->enginectx.op.do_one_request = starfive_aes_aead_do_one_req;
642 
643 	return 0;
644 }
645 
646 static void starfive_aes_aead_exit_tfm(struct crypto_aead *tfm)
647 {
648 	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
649 
650 	crypto_free_aead(ctx->aead_fbk);
651 }
652 
653 static int starfive_aes_crypt(struct skcipher_request *req, unsigned long flags)
654 {
655 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
656 	struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
657 	struct starfive_cryp_dev *cryp = ctx->cryp;
658 	unsigned int blocksize_align = crypto_skcipher_blocksize(tfm) - 1;
659 
660 	cryp->flags = flags;
661 
662 	if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_ECB ||
663 	    (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC)
664 		if (req->cryptlen & blocksize_align)
665 			return -EINVAL;
666 
667 	return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
668 }
669 
670 static int starfive_aes_aead_crypt(struct aead_request *req, unsigned long flags)
671 {
672 	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
673 	struct starfive_cryp_dev *cryp = ctx->cryp;
674 
675 	cryp->flags = flags;
676 
677 	/*
678 	 * HW engine could not perform CCM tag verification on
679 	 * non-blocksize aligned text, use fallback algo instead
680 	 */
681 	if (ctx->aead_fbk && !is_encrypt(cryp)) {
682 		struct aead_request *subreq = aead_request_ctx(req);
683 
684 		aead_request_set_tfm(subreq, ctx->aead_fbk);
685 		aead_request_set_callback(subreq, req->base.flags,
686 					  req->base.complete, req->base.data);
687 		aead_request_set_crypt(subreq, req->src,
688 				       req->dst, req->cryptlen, req->iv);
689 		aead_request_set_ad(subreq, req->assoclen);
690 
691 		return crypto_aead_decrypt(subreq);
692 	}
693 
694 	return crypto_transfer_aead_request_to_engine(cryp->engine, req);
695 }
696 
697 static int starfive_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
698 			       unsigned int keylen)
699 {
700 	struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
701 
702 	if (!key || !keylen)
703 		return -EINVAL;
704 
705 	if (keylen != AES_KEYSIZE_128 &&
706 	    keylen != AES_KEYSIZE_192 &&
707 	    keylen != AES_KEYSIZE_256)
708 		return -EINVAL;
709 
710 	memcpy(ctx->key, key, keylen);
711 	ctx->keylen = keylen;
712 
713 	return 0;
714 }
715 
716 static int starfive_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
717 				    unsigned int keylen)
718 {
719 	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
720 
721 	if (!key || !keylen)
722 		return -EINVAL;
723 
724 	if (keylen != AES_KEYSIZE_128 &&
725 	    keylen != AES_KEYSIZE_192 &&
726 	    keylen != AES_KEYSIZE_256)
727 		return -EINVAL;
728 
729 	memcpy(ctx->key, key, keylen);
730 	ctx->keylen = keylen;
731 
732 	if (ctx->aead_fbk)
733 		return crypto_aead_setkey(ctx->aead_fbk, key, keylen);
734 
735 	return 0;
736 }
737 
738 static int starfive_aes_gcm_setauthsize(struct crypto_aead *tfm,
739 					unsigned int authsize)
740 {
741 	return crypto_gcm_check_authsize(authsize);
742 }
743 
744 static int starfive_aes_ccm_setauthsize(struct crypto_aead *tfm,
745 					unsigned int authsize)
746 {
747 	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
748 
749 	switch (authsize) {
750 	case 4:
751 	case 6:
752 	case 8:
753 	case 10:
754 	case 12:
755 	case 14:
756 	case 16:
757 		break;
758 	default:
759 		return -EINVAL;
760 	}
761 
762 	return crypto_aead_setauthsize(ctx->aead_fbk, authsize);
763 }
764 
765 static int starfive_aes_ecb_encrypt(struct skcipher_request *req)
766 {
767 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_ECB | FLG_ENCRYPT);
768 }
769 
770 static int starfive_aes_ecb_decrypt(struct skcipher_request *req)
771 {
772 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_ECB);
773 }
774 
775 static int starfive_aes_cbc_encrypt(struct skcipher_request *req)
776 {
777 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CBC | FLG_ENCRYPT);
778 }
779 
780 static int starfive_aes_cbc_decrypt(struct skcipher_request *req)
781 {
782 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CBC);
783 }
784 
785 static int starfive_aes_cfb_encrypt(struct skcipher_request *req)
786 {
787 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CFB | FLG_ENCRYPT);
788 }
789 
790 static int starfive_aes_cfb_decrypt(struct skcipher_request *req)
791 {
792 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CFB);
793 }
794 
795 static int starfive_aes_ofb_encrypt(struct skcipher_request *req)
796 {
797 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_OFB | FLG_ENCRYPT);
798 }
799 
800 static int starfive_aes_ofb_decrypt(struct skcipher_request *req)
801 {
802 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_OFB);
803 }
804 
805 static int starfive_aes_ctr_encrypt(struct skcipher_request *req)
806 {
807 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CTR | FLG_ENCRYPT);
808 }
809 
810 static int starfive_aes_ctr_decrypt(struct skcipher_request *req)
811 {
812 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CTR);
813 }
814 
815 static int starfive_aes_gcm_encrypt(struct aead_request *req)
816 {
817 	return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_GCM | FLG_ENCRYPT);
818 }
819 
820 static int starfive_aes_gcm_decrypt(struct aead_request *req)
821 {
822 	return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_GCM);
823 }
824 
825 static int starfive_aes_ccm_encrypt(struct aead_request *req)
826 {
827 	int ret;
828 
829 	ret = starfive_aes_ccm_check_iv(req->iv);
830 	if (ret)
831 		return ret;
832 
833 	return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM | FLG_ENCRYPT);
834 }
835 
836 static int starfive_aes_ccm_decrypt(struct aead_request *req)
837 {
838 	int ret;
839 
840 	ret = starfive_aes_ccm_check_iv(req->iv);
841 	if (ret)
842 		return ret;
843 
844 	return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM);
845 }
846 
847 static struct skcipher_alg skcipher_algs[] = {
848 {
849 	.init				= starfive_aes_init_tfm,
850 	.setkey				= starfive_aes_setkey,
851 	.encrypt			= starfive_aes_ecb_encrypt,
852 	.decrypt			= starfive_aes_ecb_decrypt,
853 	.min_keysize			= AES_MIN_KEY_SIZE,
854 	.max_keysize			= AES_MAX_KEY_SIZE,
855 	.base = {
856 		.cra_name		= "ecb(aes)",
857 		.cra_driver_name	= "starfive-ecb-aes",
858 		.cra_priority		= 200,
859 		.cra_flags		= CRYPTO_ALG_ASYNC,
860 		.cra_blocksize		= AES_BLOCK_SIZE,
861 		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
862 		.cra_alignmask		= 0xf,
863 		.cra_module		= THIS_MODULE,
864 	},
865 }, {
866 	.init				= starfive_aes_init_tfm,
867 	.setkey				= starfive_aes_setkey,
868 	.encrypt			= starfive_aes_cbc_encrypt,
869 	.decrypt			= starfive_aes_cbc_decrypt,
870 	.min_keysize			= AES_MIN_KEY_SIZE,
871 	.max_keysize			= AES_MAX_KEY_SIZE,
872 	.ivsize				= AES_BLOCK_SIZE,
873 	.base = {
874 		.cra_name		= "cbc(aes)",
875 		.cra_driver_name	= "starfive-cbc-aes",
876 		.cra_priority		= 200,
877 		.cra_flags		= CRYPTO_ALG_ASYNC,
878 		.cra_blocksize		= AES_BLOCK_SIZE,
879 		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
880 		.cra_alignmask		= 0xf,
881 		.cra_module		= THIS_MODULE,
882 	},
883 }, {
884 	.init				= starfive_aes_init_tfm,
885 	.setkey				= starfive_aes_setkey,
886 	.encrypt			= starfive_aes_ctr_encrypt,
887 	.decrypt			= starfive_aes_ctr_decrypt,
888 	.min_keysize			= AES_MIN_KEY_SIZE,
889 	.max_keysize			= AES_MAX_KEY_SIZE,
890 	.ivsize				= AES_BLOCK_SIZE,
891 	.base = {
892 		.cra_name		= "ctr(aes)",
893 		.cra_driver_name	= "starfive-ctr-aes",
894 		.cra_priority		= 200,
895 		.cra_flags		= CRYPTO_ALG_ASYNC,
896 		.cra_blocksize		= 1,
897 		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
898 		.cra_alignmask		= 0xf,
899 		.cra_module		= THIS_MODULE,
900 	},
901 }, {
902 	.init				= starfive_aes_init_tfm,
903 	.setkey				= starfive_aes_setkey,
904 	.encrypt			= starfive_aes_cfb_encrypt,
905 	.decrypt			= starfive_aes_cfb_decrypt,
906 	.min_keysize			= AES_MIN_KEY_SIZE,
907 	.max_keysize			= AES_MAX_KEY_SIZE,
908 	.ivsize				= AES_BLOCK_SIZE,
909 	.base = {
910 		.cra_name		= "cfb(aes)",
911 		.cra_driver_name	= "starfive-cfb-aes",
912 		.cra_priority		= 200,
913 		.cra_flags		= CRYPTO_ALG_ASYNC,
914 		.cra_blocksize		= 1,
915 		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
916 		.cra_alignmask		= 0xf,
917 		.cra_module		= THIS_MODULE,
918 	},
919 }, {
920 	.init				= starfive_aes_init_tfm,
921 	.setkey				= starfive_aes_setkey,
922 	.encrypt			= starfive_aes_ofb_encrypt,
923 	.decrypt			= starfive_aes_ofb_decrypt,
924 	.min_keysize			= AES_MIN_KEY_SIZE,
925 	.max_keysize			= AES_MAX_KEY_SIZE,
926 	.ivsize				= AES_BLOCK_SIZE,
927 	.base = {
928 		.cra_name		= "ofb(aes)",
929 		.cra_driver_name	= "starfive-ofb-aes",
930 		.cra_priority		= 200,
931 		.cra_flags		= CRYPTO_ALG_ASYNC,
932 		.cra_blocksize		= 1,
933 		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
934 		.cra_alignmask		= 0xf,
935 		.cra_module		= THIS_MODULE,
936 	},
937 },
938 };
939 
940 static struct aead_alg aead_algs[] = {
941 {
942 	.setkey                         = starfive_aes_aead_setkey,
943 	.setauthsize                    = starfive_aes_gcm_setauthsize,
944 	.encrypt                        = starfive_aes_gcm_encrypt,
945 	.decrypt                        = starfive_aes_gcm_decrypt,
946 	.init                           = starfive_aes_aead_init_tfm,
947 	.exit                           = starfive_aes_aead_exit_tfm,
948 	.ivsize                         = GCM_AES_IV_SIZE,
949 	.maxauthsize                    = AES_BLOCK_SIZE,
950 	.base = {
951 		.cra_name               = "gcm(aes)",
952 		.cra_driver_name        = "starfive-gcm-aes",
953 		.cra_priority           = 200,
954 		.cra_flags              = CRYPTO_ALG_ASYNC,
955 		.cra_blocksize          = 1,
956 		.cra_ctxsize            = sizeof(struct starfive_cryp_ctx),
957 		.cra_alignmask          = 0xf,
958 		.cra_module             = THIS_MODULE,
959 	},
960 }, {
961 	.setkey		                = starfive_aes_aead_setkey,
962 	.setauthsize	                = starfive_aes_ccm_setauthsize,
963 	.encrypt	                = starfive_aes_ccm_encrypt,
964 	.decrypt	                = starfive_aes_ccm_decrypt,
965 	.init		                = starfive_aes_aead_init_tfm,
966 	.exit		                = starfive_aes_aead_exit_tfm,
967 	.ivsize		                = AES_BLOCK_SIZE,
968 	.maxauthsize	                = AES_BLOCK_SIZE,
969 	.base = {
970 		.cra_name		= "ccm(aes)",
971 		.cra_driver_name	= "starfive-ccm-aes",
972 		.cra_priority		= 200,
973 		.cra_flags		= CRYPTO_ALG_ASYNC |
974 					  CRYPTO_ALG_NEED_FALLBACK,
975 		.cra_blocksize		= 1,
976 		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
977 		.cra_alignmask		= 0xf,
978 		.cra_module		= THIS_MODULE,
979 	},
980 },
981 };
982 
983 int starfive_aes_register_algs(void)
984 {
985 	int ret;
986 
987 	ret = crypto_register_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
988 	if (ret)
989 		return ret;
990 
991 	ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
992 	if (ret)
993 		crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
994 
995 	return ret;
996 }
997 
998 void starfive_aes_unregister_algs(void)
999 {
1000 	crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1001 	crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
1002 }
1003