xref: /linux/drivers/crypto/starfive/jh7110-aes.c (revision 3daee2e4b3568f0ed88b0598df96547fcf21cb9b)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * StarFive AES acceleration driver
4  *
5  * Copyright (c) 2022 StarFive Technology
6  */
7 
8 #include <crypto/engine.h>
9 #include <crypto/gcm.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/skcipher.h>
12 #include <crypto/scatterwalk.h>
13 #include "jh7110-cryp.h"
14 #include <linux/err.h>
15 #include <linux/iopoll.h>
16 #include <linux/kernel.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19 
20 #define STARFIVE_AES_REGS_OFFSET	0x100
21 #define STARFIVE_AES_AESDIO0R		(STARFIVE_AES_REGS_OFFSET + 0x0)
22 #define STARFIVE_AES_KEY0		(STARFIVE_AES_REGS_OFFSET + 0x4)
23 #define STARFIVE_AES_KEY1		(STARFIVE_AES_REGS_OFFSET + 0x8)
24 #define STARFIVE_AES_KEY2		(STARFIVE_AES_REGS_OFFSET + 0xC)
25 #define STARFIVE_AES_KEY3		(STARFIVE_AES_REGS_OFFSET + 0x10)
26 #define STARFIVE_AES_KEY4		(STARFIVE_AES_REGS_OFFSET + 0x14)
27 #define STARFIVE_AES_KEY5		(STARFIVE_AES_REGS_OFFSET + 0x18)
28 #define STARFIVE_AES_KEY6		(STARFIVE_AES_REGS_OFFSET + 0x1C)
29 #define STARFIVE_AES_KEY7		(STARFIVE_AES_REGS_OFFSET + 0x20)
30 #define STARFIVE_AES_CSR		(STARFIVE_AES_REGS_OFFSET + 0x24)
31 #define STARFIVE_AES_IV0		(STARFIVE_AES_REGS_OFFSET + 0x28)
32 #define STARFIVE_AES_IV1		(STARFIVE_AES_REGS_OFFSET + 0x2C)
33 #define STARFIVE_AES_IV2		(STARFIVE_AES_REGS_OFFSET + 0x30)
34 #define STARFIVE_AES_IV3		(STARFIVE_AES_REGS_OFFSET + 0x34)
35 #define STARFIVE_AES_NONCE0		(STARFIVE_AES_REGS_OFFSET + 0x3C)
36 #define STARFIVE_AES_NONCE1		(STARFIVE_AES_REGS_OFFSET + 0x40)
37 #define STARFIVE_AES_NONCE2		(STARFIVE_AES_REGS_OFFSET + 0x44)
38 #define STARFIVE_AES_NONCE3		(STARFIVE_AES_REGS_OFFSET + 0x48)
39 #define STARFIVE_AES_ALEN0		(STARFIVE_AES_REGS_OFFSET + 0x4C)
40 #define STARFIVE_AES_ALEN1		(STARFIVE_AES_REGS_OFFSET + 0x50)
41 #define STARFIVE_AES_MLEN0		(STARFIVE_AES_REGS_OFFSET + 0x54)
42 #define STARFIVE_AES_MLEN1		(STARFIVE_AES_REGS_OFFSET + 0x58)
43 #define STARFIVE_AES_IVLEN		(STARFIVE_AES_REGS_OFFSET + 0x5C)
44 
45 #define FLG_MODE_MASK			GENMASK(2, 0)
46 #define FLG_ENCRYPT			BIT(4)
47 
48 /* Misc */
49 #define CCM_B0_ADATA			0x40
50 #define AES_BLOCK_32			(AES_BLOCK_SIZE / sizeof(u32))
51 
52 static inline int starfive_aes_wait_busy(struct starfive_cryp_dev *cryp)
53 {
54 	u32 status;
55 
56 	return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
57 					  !(status & STARFIVE_AES_BUSY), 10, 100000);
58 }
59 
60 static inline int starfive_aes_wait_keydone(struct starfive_cryp_dev *cryp)
61 {
62 	u32 status;
63 
64 	return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
65 					  (status & STARFIVE_AES_KEY_DONE), 10, 100000);
66 }
67 
68 static inline int starfive_aes_wait_gcmdone(struct starfive_cryp_dev *cryp)
69 {
70 	u32 status;
71 
72 	return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
73 					  (status & STARFIVE_AES_GCM_DONE), 10, 100000);
74 }
75 
76 static inline int is_gcm(struct starfive_cryp_dev *cryp)
77 {
78 	return (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM;
79 }
80 
81 static inline bool is_encrypt(struct starfive_cryp_dev *cryp)
82 {
83 	return cryp->flags & FLG_ENCRYPT;
84 }
85 
86 static void starfive_aes_aead_hw_start(struct starfive_cryp_ctx *ctx, u32 hw_mode)
87 {
88 	struct starfive_cryp_dev *cryp = ctx->cryp;
89 	unsigned int value;
90 
91 	switch (hw_mode) {
92 	case STARFIVE_AES_MODE_GCM:
93 		value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
94 		value |= STARFIVE_AES_GCM_START;
95 		writel(value, cryp->base + STARFIVE_AES_CSR);
96 		starfive_aes_wait_gcmdone(cryp);
97 		break;
98 	case STARFIVE_AES_MODE_CCM:
99 		value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
100 		value |= STARFIVE_AES_CCM_START;
101 		writel(value, cryp->base + STARFIVE_AES_CSR);
102 		break;
103 	}
104 }
105 
106 static inline void starfive_aes_set_alen(struct starfive_cryp_ctx *ctx)
107 {
108 	struct starfive_cryp_dev *cryp = ctx->cryp;
109 
110 	writel(upper_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN0);
111 	writel(lower_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN1);
112 }
113 
114 static inline void starfive_aes_set_mlen(struct starfive_cryp_ctx *ctx)
115 {
116 	struct starfive_cryp_dev *cryp = ctx->cryp;
117 
118 	writel(upper_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN0);
119 	writel(lower_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN1);
120 }
121 
122 static inline int starfive_aes_ccm_check_iv(const u8 *iv)
123 {
124 	/* 2 <= L <= 8, so 1 <= L' <= 7. */
125 	if (iv[0] < 1 || iv[0] > 7)
126 		return -EINVAL;
127 
128 	return 0;
129 }
130 
131 static int starfive_aes_write_iv(struct starfive_cryp_ctx *ctx, u32 *iv)
132 {
133 	struct starfive_cryp_dev *cryp = ctx->cryp;
134 
135 	writel(iv[0], cryp->base + STARFIVE_AES_IV0);
136 	writel(iv[1], cryp->base + STARFIVE_AES_IV1);
137 	writel(iv[2], cryp->base + STARFIVE_AES_IV2);
138 
139 	if (is_gcm(cryp)) {
140 		if (starfive_aes_wait_gcmdone(cryp))
141 			return -ETIMEDOUT;
142 
143 		return 0;
144 	}
145 
146 	writel(iv[3], cryp->base + STARFIVE_AES_IV3);
147 
148 	return 0;
149 }
150 
151 static inline void starfive_aes_get_iv(struct starfive_cryp_dev *cryp, u32 *iv)
152 {
153 	iv[0] = readl(cryp->base + STARFIVE_AES_IV0);
154 	iv[1] = readl(cryp->base + STARFIVE_AES_IV1);
155 	iv[2] = readl(cryp->base + STARFIVE_AES_IV2);
156 	iv[3] = readl(cryp->base + STARFIVE_AES_IV3);
157 }
158 
159 static inline void starfive_aes_write_nonce(struct starfive_cryp_ctx *ctx, u32 *nonce)
160 {
161 	struct starfive_cryp_dev *cryp = ctx->cryp;
162 
163 	writel(nonce[0], cryp->base + STARFIVE_AES_NONCE0);
164 	writel(nonce[1], cryp->base + STARFIVE_AES_NONCE1);
165 	writel(nonce[2], cryp->base + STARFIVE_AES_NONCE2);
166 	writel(nonce[3], cryp->base + STARFIVE_AES_NONCE3);
167 }
168 
169 static int starfive_aes_write_key(struct starfive_cryp_ctx *ctx)
170 {
171 	struct starfive_cryp_dev *cryp = ctx->cryp;
172 	u32 *key = (u32 *)ctx->key;
173 
174 	if (ctx->keylen >= AES_KEYSIZE_128) {
175 		writel(key[0], cryp->base + STARFIVE_AES_KEY0);
176 		writel(key[1], cryp->base + STARFIVE_AES_KEY1);
177 		writel(key[2], cryp->base + STARFIVE_AES_KEY2);
178 		writel(key[3], cryp->base + STARFIVE_AES_KEY3);
179 	}
180 
181 	if (ctx->keylen >= AES_KEYSIZE_192) {
182 		writel(key[4], cryp->base + STARFIVE_AES_KEY4);
183 		writel(key[5], cryp->base + STARFIVE_AES_KEY5);
184 	}
185 
186 	if (ctx->keylen >= AES_KEYSIZE_256) {
187 		writel(key[6], cryp->base + STARFIVE_AES_KEY6);
188 		writel(key[7], cryp->base + STARFIVE_AES_KEY7);
189 	}
190 
191 	if (starfive_aes_wait_keydone(cryp))
192 		return -ETIMEDOUT;
193 
194 	return 0;
195 }
196 
197 static int starfive_aes_ccm_init(struct starfive_cryp_ctx *ctx)
198 {
199 	struct starfive_cryp_dev *cryp = ctx->cryp;
200 	u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE];
201 	unsigned int textlen;
202 
203 	memcpy(iv, cryp->req.areq->iv, AES_BLOCK_SIZE);
204 	memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
205 
206 	/* Build B0 */
207 	memcpy(b0, iv, AES_BLOCK_SIZE);
208 
209 	b0[0] |= (8 * ((cryp->authsize - 2) / 2));
210 
211 	if (cryp->assoclen)
212 		b0[0] |= CCM_B0_ADATA;
213 
214 	textlen = cryp->total_in;
215 
216 	b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
217 	b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
218 
219 	starfive_aes_write_nonce(ctx, (u32 *)b0);
220 
221 	return 0;
222 }
223 
224 static int starfive_aes_hw_init(struct starfive_cryp_ctx *ctx)
225 {
226 	struct starfive_cryp_request_ctx *rctx = ctx->rctx;
227 	struct starfive_cryp_dev *cryp = ctx->cryp;
228 	u32 hw_mode;
229 
230 	/* reset */
231 	rctx->csr.aes.v = 0;
232 	rctx->csr.aes.aesrst = 1;
233 	writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
234 
235 	/* csr setup */
236 	hw_mode = cryp->flags & FLG_MODE_MASK;
237 
238 	rctx->csr.aes.v = 0;
239 
240 	switch (ctx->keylen) {
241 	case AES_KEYSIZE_128:
242 		rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_128;
243 		break;
244 	case AES_KEYSIZE_192:
245 		rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_192;
246 		break;
247 	case AES_KEYSIZE_256:
248 		rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_256;
249 		break;
250 	}
251 
252 	rctx->csr.aes.mode  = hw_mode;
253 	rctx->csr.aes.cmode = !is_encrypt(cryp);
254 	rctx->csr.aes.stmode = STARFIVE_AES_MODE_XFB_1;
255 
256 	if (cryp->side_chan) {
257 		rctx->csr.aes.delay_aes = 1;
258 		rctx->csr.aes.vaes_start = 1;
259 	}
260 
261 	writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
262 
263 	cryp->err = starfive_aes_write_key(ctx);
264 	if (cryp->err)
265 		return cryp->err;
266 
267 	switch (hw_mode) {
268 	case STARFIVE_AES_MODE_GCM:
269 		starfive_aes_set_alen(ctx);
270 		starfive_aes_set_mlen(ctx);
271 		writel(GCM_AES_IV_SIZE, cryp->base + STARFIVE_AES_IVLEN);
272 		starfive_aes_aead_hw_start(ctx, hw_mode);
273 		starfive_aes_write_iv(ctx, (void *)cryp->req.areq->iv);
274 		break;
275 	case STARFIVE_AES_MODE_CCM:
276 		starfive_aes_set_alen(ctx);
277 		starfive_aes_set_mlen(ctx);
278 		starfive_aes_ccm_init(ctx);
279 		starfive_aes_aead_hw_start(ctx, hw_mode);
280 		break;
281 	case STARFIVE_AES_MODE_CBC:
282 	case STARFIVE_AES_MODE_CTR:
283 		starfive_aes_write_iv(ctx, (void *)cryp->req.sreq->iv);
284 		break;
285 	default:
286 		break;
287 	}
288 
289 	return cryp->err;
290 }
291 
292 static int starfive_aes_read_authtag(struct starfive_cryp_ctx *ctx)
293 {
294 	struct starfive_cryp_dev *cryp = ctx->cryp;
295 	struct starfive_cryp_request_ctx *rctx = ctx->rctx;
296 	int i;
297 
298 	if (starfive_aes_wait_busy(cryp))
299 		return dev_err_probe(cryp->dev, -ETIMEDOUT,
300 				     "Timeout waiting for tag generation.");
301 
302 	if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM) {
303 		cryp->tag_out[0] = readl(cryp->base + STARFIVE_AES_NONCE0);
304 		cryp->tag_out[1] = readl(cryp->base + STARFIVE_AES_NONCE1);
305 		cryp->tag_out[2] = readl(cryp->base + STARFIVE_AES_NONCE2);
306 		cryp->tag_out[3] = readl(cryp->base + STARFIVE_AES_NONCE3);
307 	} else {
308 		for (i = 0; i < AES_BLOCK_32; i++)
309 			cryp->tag_out[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R);
310 	}
311 
312 	if (is_encrypt(cryp)) {
313 		scatterwalk_map_and_copy(cryp->tag_out, rctx->out_sg,
314 					 cryp->total_in, cryp->authsize, 1);
315 	} else {
316 		if (crypto_memneq(cryp->tag_in, cryp->tag_out, cryp->authsize))
317 			return -EBADMSG;
318 	}
319 
320 	return 0;
321 }
322 
323 static void starfive_aes_finish_req(struct starfive_cryp_ctx *ctx)
324 {
325 	struct starfive_cryp_dev *cryp = ctx->cryp;
326 	int err = cryp->err;
327 
328 	if (!err && cryp->authsize)
329 		err = starfive_aes_read_authtag(ctx);
330 
331 	if (!err && ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC ||
332 		     (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CTR))
333 		starfive_aes_get_iv(cryp, (void *)cryp->req.sreq->iv);
334 
335 	if (cryp->authsize)
336 		crypto_finalize_aead_request(cryp->engine, cryp->req.areq, err);
337 	else
338 		crypto_finalize_skcipher_request(cryp->engine, cryp->req.sreq,
339 						 err);
340 }
341 
342 static int starfive_aes_gcm_write_adata(struct starfive_cryp_ctx *ctx)
343 {
344 	struct starfive_cryp_dev *cryp = ctx->cryp;
345 	struct starfive_cryp_request_ctx *rctx = ctx->rctx;
346 	u32 *buffer;
347 	int total_len, loop;
348 
349 	total_len = ALIGN(cryp->assoclen, AES_BLOCK_SIZE) / sizeof(unsigned int);
350 	buffer = (u32 *)rctx->adata;
351 
352 	for (loop = 0; loop < total_len; loop += 4) {
353 		writel(*buffer, cryp->base + STARFIVE_AES_NONCE0);
354 		buffer++;
355 		writel(*buffer, cryp->base + STARFIVE_AES_NONCE1);
356 		buffer++;
357 		writel(*buffer, cryp->base + STARFIVE_AES_NONCE2);
358 		buffer++;
359 		writel(*buffer, cryp->base + STARFIVE_AES_NONCE3);
360 		buffer++;
361 	}
362 
363 	if (starfive_aes_wait_gcmdone(cryp))
364 		return dev_err_probe(cryp->dev, -ETIMEDOUT,
365 				     "Timeout processing gcm aad block");
366 
367 	return 0;
368 }
369 
370 static int starfive_aes_ccm_write_adata(struct starfive_cryp_ctx *ctx)
371 {
372 	struct starfive_cryp_dev *cryp = ctx->cryp;
373 	struct starfive_cryp_request_ctx *rctx = ctx->rctx;
374 	u32 *buffer;
375 	u8 *ci;
376 	int total_len, loop;
377 
378 	total_len = cryp->assoclen;
379 
380 	ci = rctx->adata;
381 	writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R);
382 	ci++;
383 	writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R);
384 	ci++;
385 	total_len -= 2;
386 	buffer = (u32 *)ci;
387 
388 	for (loop = 0; loop < 3; loop++, buffer++)
389 		writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R);
390 
391 	total_len -= 12;
392 
393 	while (total_len > 0) {
394 		for (loop = 0; loop < AES_BLOCK_32; loop++, buffer++)
395 			writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R);
396 
397 		total_len -= AES_BLOCK_SIZE;
398 	}
399 
400 	if (starfive_aes_wait_busy(cryp))
401 		return dev_err_probe(cryp->dev, -ETIMEDOUT,
402 				     "Timeout processing ccm aad block");
403 
404 	return 0;
405 }
406 
407 static void starfive_aes_dma_done(void *param)
408 {
409 	struct starfive_cryp_dev *cryp = param;
410 
411 	complete(&cryp->dma_done);
412 }
413 
414 static void starfive_aes_dma_init(struct starfive_cryp_dev *cryp)
415 {
416 	cryp->cfg_in.direction = DMA_MEM_TO_DEV;
417 	cryp->cfg_in.src_addr_width = DMA_SLAVE_BUSWIDTH_16_BYTES;
418 	cryp->cfg_in.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
419 	cryp->cfg_in.src_maxburst = cryp->dma_maxburst;
420 	cryp->cfg_in.dst_maxburst = cryp->dma_maxburst;
421 	cryp->cfg_in.dst_addr = cryp->phys_base + STARFIVE_ALG_FIFO_OFFSET;
422 
423 	dmaengine_slave_config(cryp->tx, &cryp->cfg_in);
424 
425 	cryp->cfg_out.direction = DMA_DEV_TO_MEM;
426 	cryp->cfg_out.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
427 	cryp->cfg_out.dst_addr_width = DMA_SLAVE_BUSWIDTH_16_BYTES;
428 	cryp->cfg_out.src_maxburst = 4;
429 	cryp->cfg_out.dst_maxburst = 4;
430 	cryp->cfg_out.src_addr = cryp->phys_base + STARFIVE_ALG_FIFO_OFFSET;
431 
432 	dmaengine_slave_config(cryp->rx, &cryp->cfg_out);
433 
434 	init_completion(&cryp->dma_done);
435 }
436 
437 static int starfive_aes_dma_xfer(struct starfive_cryp_dev *cryp,
438 				 struct scatterlist *src,
439 				 struct scatterlist *dst,
440 				 int len)
441 {
442 	struct dma_async_tx_descriptor *in_desc, *out_desc;
443 	union starfive_alg_cr alg_cr;
444 	int ret = 0, in_save, out_save;
445 
446 	alg_cr.v = 0;
447 	alg_cr.start = 1;
448 	alg_cr.aes_dma_en = 1;
449 	writel(alg_cr.v, cryp->base + STARFIVE_ALG_CR_OFFSET);
450 
451 	in_save = sg_dma_len(src);
452 	out_save = sg_dma_len(dst);
453 
454 	writel(ALIGN(len, AES_BLOCK_SIZE), cryp->base + STARFIVE_DMA_IN_LEN_OFFSET);
455 	writel(ALIGN(len, AES_BLOCK_SIZE), cryp->base + STARFIVE_DMA_OUT_LEN_OFFSET);
456 
457 	sg_dma_len(src) = ALIGN(len, AES_BLOCK_SIZE);
458 	sg_dma_len(dst) = ALIGN(len, AES_BLOCK_SIZE);
459 
460 	out_desc = dmaengine_prep_slave_sg(cryp->rx, dst, 1, DMA_DEV_TO_MEM,
461 					   DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
462 	if (!out_desc) {
463 		ret = -EINVAL;
464 		goto dma_err;
465 	}
466 
467 	out_desc->callback = starfive_aes_dma_done;
468 	out_desc->callback_param = cryp;
469 
470 	reinit_completion(&cryp->dma_done);
471 	dmaengine_submit(out_desc);
472 	dma_async_issue_pending(cryp->rx);
473 
474 	in_desc = dmaengine_prep_slave_sg(cryp->tx, src, 1, DMA_MEM_TO_DEV,
475 					  DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
476 	if (!in_desc) {
477 		ret = -EINVAL;
478 		goto dma_err;
479 	}
480 
481 	dmaengine_submit(in_desc);
482 	dma_async_issue_pending(cryp->tx);
483 
484 	if (!wait_for_completion_timeout(&cryp->dma_done,
485 					 msecs_to_jiffies(1000)))
486 		ret = -ETIMEDOUT;
487 
488 dma_err:
489 	sg_dma_len(src) = in_save;
490 	sg_dma_len(dst) = out_save;
491 
492 	alg_cr.v = 0;
493 	alg_cr.clear = 1;
494 	writel(alg_cr.v, cryp->base + STARFIVE_ALG_CR_OFFSET);
495 
496 	return ret;
497 }
498 
499 static int starfive_aes_map_sg(struct starfive_cryp_dev *cryp,
500 			       struct scatterlist *src,
501 			       struct scatterlist *dst)
502 {
503 	struct scatterlist *stsg, *dtsg;
504 	struct scatterlist _src[2], _dst[2];
505 	unsigned int remain = cryp->total_in;
506 	unsigned int len, src_nents, dst_nents;
507 	int ret;
508 
509 	if (src == dst) {
510 		for (stsg = src, dtsg = dst; remain > 0;
511 		     stsg = sg_next(stsg), dtsg = sg_next(dtsg)) {
512 			src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL);
513 			if (src_nents == 0)
514 				return dev_err_probe(cryp->dev, -ENOMEM,
515 						     "dma_map_sg error\n");
516 
517 			dst_nents = src_nents;
518 			len = min(sg_dma_len(stsg), remain);
519 
520 			ret = starfive_aes_dma_xfer(cryp, stsg, dtsg, len);
521 			dma_unmap_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL);
522 			if (ret)
523 				return ret;
524 
525 			remain -= len;
526 		}
527 	} else {
528 		for (stsg = src, dtsg = dst;;) {
529 			src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE);
530 			if (src_nents == 0)
531 				return dev_err_probe(cryp->dev, -ENOMEM,
532 						     "dma_map_sg src error\n");
533 
534 			dst_nents = dma_map_sg(cryp->dev, dtsg, 1, DMA_FROM_DEVICE);
535 			if (dst_nents == 0)
536 				return dev_err_probe(cryp->dev, -ENOMEM,
537 						     "dma_map_sg dst error\n");
538 
539 			len = min(sg_dma_len(stsg), sg_dma_len(dtsg));
540 			len = min(len, remain);
541 
542 			ret = starfive_aes_dma_xfer(cryp, stsg, dtsg, len);
543 			dma_unmap_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE);
544 			dma_unmap_sg(cryp->dev, dtsg, 1, DMA_FROM_DEVICE);
545 			if (ret)
546 				return ret;
547 
548 			remain -= len;
549 			if (remain == 0)
550 				break;
551 
552 			if (sg_dma_len(stsg) - len) {
553 				stsg = scatterwalk_ffwd(_src, stsg, len);
554 				dtsg = sg_next(dtsg);
555 			} else if (sg_dma_len(dtsg) - len) {
556 				dtsg = scatterwalk_ffwd(_dst, dtsg, len);
557 				stsg = sg_next(stsg);
558 			} else {
559 				stsg = sg_next(stsg);
560 				dtsg = sg_next(dtsg);
561 			}
562 		}
563 	}
564 
565 	return 0;
566 }
567 
568 static int starfive_aes_do_one_req(struct crypto_engine *engine, void *areq)
569 {
570 	struct skcipher_request *req =
571 		container_of(areq, struct skcipher_request, base);
572 	struct starfive_cryp_ctx *ctx =
573 		crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
574 	struct starfive_cryp_request_ctx *rctx = skcipher_request_ctx(req);
575 	struct starfive_cryp_dev *cryp = ctx->cryp;
576 	int ret;
577 
578 	cryp->req.sreq = req;
579 	cryp->total_in = req->cryptlen;
580 	cryp->total_out = req->cryptlen;
581 	cryp->assoclen = 0;
582 	cryp->authsize = 0;
583 
584 	rctx->in_sg = req->src;
585 	rctx->out_sg = req->dst;
586 
587 	ctx->rctx = rctx;
588 
589 	ret = starfive_aes_hw_init(ctx);
590 	if (ret)
591 		return ret;
592 
593 	if (!cryp->total_in)
594 		goto finish_req;
595 
596 	starfive_aes_dma_init(cryp);
597 
598 	ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg);
599 	if (ret)
600 		return ret;
601 
602 finish_req:
603 	starfive_aes_finish_req(ctx);
604 
605 	return 0;
606 }
607 
608 static int starfive_aes_init_tfm(struct crypto_skcipher *tfm,
609 				 const char *alg_name)
610 {
611 	struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
612 
613 	ctx->cryp = starfive_cryp_find_dev(ctx);
614 	if (!ctx->cryp)
615 		return -ENODEV;
616 
617 	ctx->skcipher_fbk = crypto_alloc_skcipher(alg_name, 0,
618 						  CRYPTO_ALG_NEED_FALLBACK);
619 	if (IS_ERR(ctx->skcipher_fbk))
620 		return dev_err_probe(ctx->cryp->dev, PTR_ERR(ctx->skcipher_fbk),
621 				     "%s() failed to allocate fallback for %s\n",
622 				     __func__, alg_name);
623 
624 	crypto_skcipher_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) +
625 				    crypto_skcipher_reqsize(ctx->skcipher_fbk));
626 
627 	return 0;
628 }
629 
630 static void starfive_aes_exit_tfm(struct crypto_skcipher *tfm)
631 {
632 	struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
633 
634 	crypto_free_skcipher(ctx->skcipher_fbk);
635 }
636 
637 static int starfive_aes_aead_do_one_req(struct crypto_engine *engine, void *areq)
638 {
639 	struct aead_request *req =
640 		container_of(areq, struct aead_request, base);
641 	struct starfive_cryp_ctx *ctx =
642 		crypto_aead_ctx(crypto_aead_reqtfm(req));
643 	struct starfive_cryp_dev *cryp = ctx->cryp;
644 	struct starfive_cryp_request_ctx *rctx = aead_request_ctx(req);
645 	struct scatterlist _src[2], _dst[2];
646 	int ret;
647 
648 	cryp->req.areq = req;
649 	cryp->assoclen = req->assoclen;
650 	cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(req));
651 
652 	rctx->in_sg = scatterwalk_ffwd(_src, req->src, cryp->assoclen);
653 	if (req->src == req->dst)
654 		rctx->out_sg = rctx->in_sg;
655 	else
656 		rctx->out_sg = scatterwalk_ffwd(_dst, req->dst, cryp->assoclen);
657 
658 	if (is_encrypt(cryp)) {
659 		cryp->total_in = req->cryptlen;
660 		cryp->total_out = req->cryptlen;
661 	} else {
662 		cryp->total_in = req->cryptlen - cryp->authsize;
663 		cryp->total_out = cryp->total_in;
664 		scatterwalk_map_and_copy(cryp->tag_in, req->src,
665 					 cryp->total_in + cryp->assoclen,
666 					 cryp->authsize, 0);
667 	}
668 
669 	if (cryp->assoclen) {
670 		rctx->adata = kzalloc(cryp->assoclen + AES_BLOCK_SIZE, GFP_KERNEL);
671 		if (!rctx->adata)
672 			return dev_err_probe(cryp->dev, -ENOMEM,
673 					     "Failed to alloc memory for adata");
674 
675 		if (sg_copy_to_buffer(req->src, sg_nents_for_len(req->src, cryp->assoclen),
676 				      rctx->adata, cryp->assoclen) != cryp->assoclen)
677 			return -EINVAL;
678 	}
679 
680 	if (cryp->total_in)
681 		sg_zero_buffer(rctx->in_sg, sg_nents(rctx->in_sg),
682 			       sg_dma_len(rctx->in_sg) - cryp->total_in,
683 			       cryp->total_in);
684 
685 	ctx->rctx = rctx;
686 
687 	ret = starfive_aes_hw_init(ctx);
688 	if (ret)
689 		return ret;
690 
691 	if (!cryp->assoclen)
692 		goto write_text;
693 
694 	if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM)
695 		ret = starfive_aes_ccm_write_adata(ctx);
696 	else
697 		ret = starfive_aes_gcm_write_adata(ctx);
698 
699 	kfree(rctx->adata);
700 
701 	if (ret)
702 		return ret;
703 
704 write_text:
705 	if (!cryp->total_in)
706 		goto finish_req;
707 
708 	starfive_aes_dma_init(cryp);
709 
710 	ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg);
711 	if (ret)
712 		return ret;
713 
714 finish_req:
715 	starfive_aes_finish_req(ctx);
716 	return 0;
717 }
718 
719 static int starfive_aes_aead_init_tfm(struct crypto_aead *tfm,
720 				      const char *alg_name)
721 {
722 	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
723 
724 	ctx->cryp = starfive_cryp_find_dev(ctx);
725 	if (!ctx->cryp)
726 		return -ENODEV;
727 
728 	ctx->aead_fbk = crypto_alloc_aead(alg_name, 0,
729 					  CRYPTO_ALG_NEED_FALLBACK);
730 	if (IS_ERR(ctx->aead_fbk))
731 		return dev_err_probe(ctx->cryp->dev, PTR_ERR(ctx->aead_fbk),
732 				     "%s() failed to allocate fallback for %s\n",
733 				     __func__, alg_name);
734 
735 	crypto_aead_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) +
736 				crypto_aead_reqsize(ctx->aead_fbk));
737 
738 	return 0;
739 }
740 
741 static void starfive_aes_aead_exit_tfm(struct crypto_aead *tfm)
742 {
743 	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
744 
745 	crypto_free_aead(ctx->aead_fbk);
746 }
747 
748 static bool starfive_aes_check_unaligned(struct starfive_cryp_dev *cryp,
749 					 struct scatterlist *src,
750 					 struct scatterlist *dst)
751 {
752 	struct scatterlist *tsg;
753 	int i;
754 
755 	for_each_sg(src, tsg, sg_nents(src), i)
756 		if (!IS_ALIGNED(tsg->offset, sizeof(u32)) ||
757 		    (!IS_ALIGNED(tsg->length, AES_BLOCK_SIZE) &&
758 		     !sg_is_last(tsg)))
759 			return true;
760 
761 	if (src != dst)
762 		for_each_sg(dst, tsg, sg_nents(dst), i)
763 			if (!IS_ALIGNED(tsg->offset, sizeof(u32)) ||
764 			    (!IS_ALIGNED(tsg->length, AES_BLOCK_SIZE) &&
765 			     !sg_is_last(tsg)))
766 				return true;
767 
768 	return false;
769 }
770 
771 static int starfive_aes_do_fallback(struct skcipher_request *req, bool enc)
772 {
773 	struct starfive_cryp_ctx *ctx =
774 		crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
775 	struct skcipher_request *subreq = skcipher_request_ctx(req);
776 
777 	skcipher_request_set_tfm(subreq, ctx->skcipher_fbk);
778 	skcipher_request_set_callback(subreq, req->base.flags,
779 				      req->base.complete,
780 				      req->base.data);
781 	skcipher_request_set_crypt(subreq, req->src, req->dst,
782 				   req->cryptlen, req->iv);
783 
784 	return enc ? crypto_skcipher_encrypt(subreq) :
785 		     crypto_skcipher_decrypt(subreq);
786 }
787 
788 static int starfive_aes_crypt(struct skcipher_request *req, unsigned long flags)
789 {
790 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
791 	struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
792 	struct starfive_cryp_dev *cryp = ctx->cryp;
793 	unsigned int blocksize_align = crypto_skcipher_blocksize(tfm) - 1;
794 
795 	cryp->flags = flags;
796 
797 	if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_ECB ||
798 	    (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC)
799 		if (req->cryptlen & blocksize_align)
800 			return -EINVAL;
801 
802 	if (starfive_aes_check_unaligned(cryp, req->src, req->dst))
803 		return starfive_aes_do_fallback(req, is_encrypt(cryp));
804 
805 	return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
806 }
807 
808 static int starfive_aes_aead_do_fallback(struct aead_request *req, bool enc)
809 {
810 	struct starfive_cryp_ctx *ctx =
811 		crypto_aead_ctx(crypto_aead_reqtfm(req));
812 	struct aead_request *subreq = aead_request_ctx(req);
813 
814 	aead_request_set_tfm(subreq, ctx->aead_fbk);
815 	aead_request_set_callback(subreq, req->base.flags,
816 				  req->base.complete,
817 				  req->base.data);
818 	aead_request_set_crypt(subreq, req->src, req->dst,
819 			       req->cryptlen, req->iv);
820 	aead_request_set_ad(subreq, req->assoclen);
821 
822 	return enc ? crypto_aead_encrypt(subreq) :
823 		     crypto_aead_decrypt(subreq);
824 }
825 
826 static int starfive_aes_aead_crypt(struct aead_request *req, unsigned long flags)
827 {
828 	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
829 	struct starfive_cryp_dev *cryp = ctx->cryp;
830 	struct scatterlist *src, *dst, _src[2], _dst[2];
831 
832 	cryp->flags = flags;
833 
834 	/* aes-ccm does not support tag verification for non-aligned text,
835 	 * use fallback for ccm decryption instead.
836 	 */
837 	if (((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM) &&
838 	    !is_encrypt(cryp))
839 		return starfive_aes_aead_do_fallback(req, 0);
840 
841 	src = scatterwalk_ffwd(_src, req->src, req->assoclen);
842 
843 	if (req->src == req->dst)
844 		dst = src;
845 	else
846 		dst = scatterwalk_ffwd(_dst, req->dst, req->assoclen);
847 
848 	if (starfive_aes_check_unaligned(cryp, src, dst))
849 		return starfive_aes_aead_do_fallback(req, is_encrypt(cryp));
850 
851 	return crypto_transfer_aead_request_to_engine(cryp->engine, req);
852 }
853 
854 static int starfive_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
855 			       unsigned int keylen)
856 {
857 	struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
858 
859 	if (!key || !keylen)
860 		return -EINVAL;
861 
862 	if (keylen != AES_KEYSIZE_128 &&
863 	    keylen != AES_KEYSIZE_192 &&
864 	    keylen != AES_KEYSIZE_256)
865 		return -EINVAL;
866 
867 	memcpy(ctx->key, key, keylen);
868 	ctx->keylen = keylen;
869 
870 	return crypto_skcipher_setkey(ctx->skcipher_fbk, key, keylen);
871 }
872 
873 static int starfive_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
874 				    unsigned int keylen)
875 {
876 	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
877 
878 	if (!key || !keylen)
879 		return -EINVAL;
880 
881 	if (keylen != AES_KEYSIZE_128 &&
882 	    keylen != AES_KEYSIZE_192 &&
883 	    keylen != AES_KEYSIZE_256)
884 		return -EINVAL;
885 
886 	memcpy(ctx->key, key, keylen);
887 	ctx->keylen = keylen;
888 
889 	return crypto_aead_setkey(ctx->aead_fbk, key, keylen);
890 }
891 
892 static int starfive_aes_gcm_setauthsize(struct crypto_aead *tfm,
893 					unsigned int authsize)
894 {
895 	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
896 	int ret;
897 
898 	ret = crypto_gcm_check_authsize(authsize);
899 	if (ret)
900 		return ret;
901 
902 	return crypto_aead_setauthsize(ctx->aead_fbk, authsize);
903 }
904 
905 static int starfive_aes_ccm_setauthsize(struct crypto_aead *tfm,
906 					unsigned int authsize)
907 {
908 	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
909 
910 	switch (authsize) {
911 	case 4:
912 	case 6:
913 	case 8:
914 	case 10:
915 	case 12:
916 	case 14:
917 	case 16:
918 		break;
919 	default:
920 		return -EINVAL;
921 	}
922 
923 	return crypto_aead_setauthsize(ctx->aead_fbk, authsize);
924 }
925 
926 static int starfive_aes_ecb_encrypt(struct skcipher_request *req)
927 {
928 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_ECB | FLG_ENCRYPT);
929 }
930 
931 static int starfive_aes_ecb_decrypt(struct skcipher_request *req)
932 {
933 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_ECB);
934 }
935 
936 static int starfive_aes_cbc_encrypt(struct skcipher_request *req)
937 {
938 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CBC | FLG_ENCRYPT);
939 }
940 
941 static int starfive_aes_cbc_decrypt(struct skcipher_request *req)
942 {
943 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CBC);
944 }
945 
946 static int starfive_aes_ctr_encrypt(struct skcipher_request *req)
947 {
948 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CTR | FLG_ENCRYPT);
949 }
950 
951 static int starfive_aes_ctr_decrypt(struct skcipher_request *req)
952 {
953 	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CTR);
954 }
955 
956 static int starfive_aes_gcm_encrypt(struct aead_request *req)
957 {
958 	return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_GCM | FLG_ENCRYPT);
959 }
960 
961 static int starfive_aes_gcm_decrypt(struct aead_request *req)
962 {
963 	return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_GCM);
964 }
965 
966 static int starfive_aes_ccm_encrypt(struct aead_request *req)
967 {
968 	int ret;
969 
970 	ret = starfive_aes_ccm_check_iv(req->iv);
971 	if (ret)
972 		return ret;
973 
974 	return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM | FLG_ENCRYPT);
975 }
976 
977 static int starfive_aes_ccm_decrypt(struct aead_request *req)
978 {
979 	int ret;
980 
981 	ret = starfive_aes_ccm_check_iv(req->iv);
982 	if (ret)
983 		return ret;
984 
985 	return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM);
986 }
987 
988 static int starfive_aes_ecb_init_tfm(struct crypto_skcipher *tfm)
989 {
990 	return starfive_aes_init_tfm(tfm, "ecb(aes-generic)");
991 }
992 
993 static int starfive_aes_cbc_init_tfm(struct crypto_skcipher *tfm)
994 {
995 	return starfive_aes_init_tfm(tfm, "cbc(aes-generic)");
996 }
997 
998 static int starfive_aes_ctr_init_tfm(struct crypto_skcipher *tfm)
999 {
1000 	return starfive_aes_init_tfm(tfm, "ctr(aes-generic)");
1001 }
1002 
1003 static int starfive_aes_ccm_init_tfm(struct crypto_aead *tfm)
1004 {
1005 	return starfive_aes_aead_init_tfm(tfm, "ccm_base(ctr(aes-generic),cbcmac(aes-generic))");
1006 }
1007 
1008 static int starfive_aes_gcm_init_tfm(struct crypto_aead *tfm)
1009 {
1010 	return starfive_aes_aead_init_tfm(tfm, "gcm_base(ctr(aes-generic),ghash-generic)");
1011 }
1012 
1013 static struct skcipher_engine_alg skcipher_algs[] = {
1014 {
1015 	.base.init			= starfive_aes_ecb_init_tfm,
1016 	.base.exit			= starfive_aes_exit_tfm,
1017 	.base.setkey			= starfive_aes_setkey,
1018 	.base.encrypt			= starfive_aes_ecb_encrypt,
1019 	.base.decrypt			= starfive_aes_ecb_decrypt,
1020 	.base.min_keysize		= AES_MIN_KEY_SIZE,
1021 	.base.max_keysize		= AES_MAX_KEY_SIZE,
1022 	.base.base = {
1023 		.cra_name		= "ecb(aes)",
1024 		.cra_driver_name	= "starfive-ecb-aes",
1025 		.cra_priority		= 200,
1026 		.cra_flags		= CRYPTO_ALG_ASYNC |
1027 					  CRYPTO_ALG_NEED_FALLBACK,
1028 		.cra_blocksize		= AES_BLOCK_SIZE,
1029 		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
1030 		.cra_alignmask		= 0xf,
1031 		.cra_module		= THIS_MODULE,
1032 	},
1033 	.op = {
1034 		.do_one_request = starfive_aes_do_one_req,
1035 	},
1036 }, {
1037 	.base.init			= starfive_aes_cbc_init_tfm,
1038 	.base.exit			= starfive_aes_exit_tfm,
1039 	.base.setkey			= starfive_aes_setkey,
1040 	.base.encrypt			= starfive_aes_cbc_encrypt,
1041 	.base.decrypt			= starfive_aes_cbc_decrypt,
1042 	.base.min_keysize		= AES_MIN_KEY_SIZE,
1043 	.base.max_keysize		= AES_MAX_KEY_SIZE,
1044 	.base.ivsize			= AES_BLOCK_SIZE,
1045 	.base.base = {
1046 		.cra_name		= "cbc(aes)",
1047 		.cra_driver_name	= "starfive-cbc-aes",
1048 		.cra_priority		= 200,
1049 		.cra_flags		= CRYPTO_ALG_ASYNC |
1050 					  CRYPTO_ALG_NEED_FALLBACK,
1051 		.cra_blocksize		= AES_BLOCK_SIZE,
1052 		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
1053 		.cra_alignmask		= 0xf,
1054 		.cra_module		= THIS_MODULE,
1055 	},
1056 	.op = {
1057 		.do_one_request = starfive_aes_do_one_req,
1058 	},
1059 }, {
1060 	.base.init			= starfive_aes_ctr_init_tfm,
1061 	.base.exit			= starfive_aes_exit_tfm,
1062 	.base.setkey			= starfive_aes_setkey,
1063 	.base.encrypt			= starfive_aes_ctr_encrypt,
1064 	.base.decrypt			= starfive_aes_ctr_decrypt,
1065 	.base.min_keysize		= AES_MIN_KEY_SIZE,
1066 	.base.max_keysize		= AES_MAX_KEY_SIZE,
1067 	.base.ivsize			= AES_BLOCK_SIZE,
1068 	.base.base = {
1069 		.cra_name		= "ctr(aes)",
1070 		.cra_driver_name	= "starfive-ctr-aes",
1071 		.cra_priority		= 200,
1072 		.cra_flags		= CRYPTO_ALG_ASYNC |
1073 					  CRYPTO_ALG_NEED_FALLBACK,
1074 		.cra_blocksize		= 1,
1075 		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
1076 		.cra_alignmask		= 0xf,
1077 		.cra_module		= THIS_MODULE,
1078 	},
1079 	.op = {
1080 		.do_one_request = starfive_aes_do_one_req,
1081 	},
1082 },
1083 };
1084 
1085 static struct aead_engine_alg aead_algs[] = {
1086 {
1087 	.base.setkey			= starfive_aes_aead_setkey,
1088 	.base.setauthsize		= starfive_aes_gcm_setauthsize,
1089 	.base.encrypt			= starfive_aes_gcm_encrypt,
1090 	.base.decrypt			= starfive_aes_gcm_decrypt,
1091 	.base.init			= starfive_aes_gcm_init_tfm,
1092 	.base.exit			= starfive_aes_aead_exit_tfm,
1093 	.base.ivsize			= GCM_AES_IV_SIZE,
1094 	.base.maxauthsize		= AES_BLOCK_SIZE,
1095 	.base.base = {
1096 		.cra_name               = "gcm(aes)",
1097 		.cra_driver_name        = "starfive-gcm-aes",
1098 		.cra_priority           = 200,
1099 		.cra_flags              = CRYPTO_ALG_ASYNC |
1100 					  CRYPTO_ALG_NEED_FALLBACK,
1101 		.cra_blocksize          = 1,
1102 		.cra_ctxsize            = sizeof(struct starfive_cryp_ctx),
1103 		.cra_alignmask          = 0xf,
1104 		.cra_module             = THIS_MODULE,
1105 	},
1106 	.op = {
1107 		.do_one_request = starfive_aes_aead_do_one_req,
1108 	},
1109 }, {
1110 	.base.setkey			= starfive_aes_aead_setkey,
1111 	.base.setauthsize		= starfive_aes_ccm_setauthsize,
1112 	.base.encrypt			= starfive_aes_ccm_encrypt,
1113 	.base.decrypt			= starfive_aes_ccm_decrypt,
1114 	.base.init			= starfive_aes_ccm_init_tfm,
1115 	.base.exit			= starfive_aes_aead_exit_tfm,
1116 	.base.ivsize			= AES_BLOCK_SIZE,
1117 	.base.maxauthsize		= AES_BLOCK_SIZE,
1118 	.base.base = {
1119 		.cra_name		= "ccm(aes)",
1120 		.cra_driver_name	= "starfive-ccm-aes",
1121 		.cra_priority		= 200,
1122 		.cra_flags		= CRYPTO_ALG_ASYNC |
1123 					  CRYPTO_ALG_NEED_FALLBACK,
1124 		.cra_blocksize		= 1,
1125 		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
1126 		.cra_alignmask		= 0xf,
1127 		.cra_module		= THIS_MODULE,
1128 	},
1129 	.op = {
1130 		.do_one_request = starfive_aes_aead_do_one_req,
1131 	},
1132 },
1133 };
1134 
1135 int starfive_aes_register_algs(void)
1136 {
1137 	int ret;
1138 
1139 	ret = crypto_engine_register_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
1140 	if (ret)
1141 		return ret;
1142 
1143 	ret = crypto_engine_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1144 	if (ret)
1145 		crypto_engine_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
1146 
1147 	return ret;
1148 }
1149 
1150 void starfive_aes_unregister_algs(void)
1151 {
1152 	crypto_engine_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1153 	crypto_engine_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
1154 }
1155