jh7110-aes.c (41c177cf354126a22443b5c80cec9fdd313e67e1) jh7110-aes.c (7467147ef9bf42d1ea5b3314c7a05cd542b3518e)
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * StarFive AES acceleration driver
4 *
5 * Copyright (c) 2022 StarFive Technology
6 */
7
8#include <crypto/engine.h>

--- 64 unchanged lines hidden (view full) ---

73 (status & STARFIVE_AES_GCM_DONE), 10, 100000);
74}
75
76static inline int is_gcm(struct starfive_cryp_dev *cryp)
77{
78 return (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM;
79}
80
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * StarFive AES acceleration driver
4 *
5 * Copyright (c) 2022 StarFive Technology
6 */
7
8#include <crypto/engine.h>

--- 64 unchanged lines hidden (view full) ---

73 (status & STARFIVE_AES_GCM_DONE), 10, 100000);
74}
75
76static inline int is_gcm(struct starfive_cryp_dev *cryp)
77{
78 return (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM;
79}
80
81static inline int is_encrypt(struct starfive_cryp_dev *cryp)
81static inline bool is_encrypt(struct starfive_cryp_dev *cryp)
82{
83 return cryp->flags & FLG_ENCRYPT;
84}
85
86static void starfive_aes_aead_hw_start(struct starfive_cryp_ctx *ctx, u32 hw_mode)
87{
88 struct starfive_cryp_dev *cryp = ctx->cryp;
89 unsigned int value;

--- 8 unchanged lines hidden (view full) ---

98 case STARFIVE_AES_MODE_CCM:
99 value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
100 value |= STARFIVE_AES_CCM_START;
101 writel(value, cryp->base + STARFIVE_AES_CSR);
102 break;
103 }
104}
105
82{
83 return cryp->flags & FLG_ENCRYPT;
84}
85
86static void starfive_aes_aead_hw_start(struct starfive_cryp_ctx *ctx, u32 hw_mode)
87{
88 struct starfive_cryp_dev *cryp = ctx->cryp;
89 unsigned int value;

--- 8 unchanged lines hidden (view full) ---

98 case STARFIVE_AES_MODE_CCM:
99 value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
100 value |= STARFIVE_AES_CCM_START;
101 writel(value, cryp->base + STARFIVE_AES_CSR);
102 break;
103 }
104}
105
106static inline void starfive_aes_set_ivlen(struct starfive_cryp_ctx *ctx)
107{
108 struct starfive_cryp_dev *cryp = ctx->cryp;
109
110 if (is_gcm(cryp))
111 writel(GCM_AES_IV_SIZE, cryp->base + STARFIVE_AES_IVLEN);
112 else
113 writel(AES_BLOCK_SIZE, cryp->base + STARFIVE_AES_IVLEN);
114}
115
116static inline void starfive_aes_set_alen(struct starfive_cryp_ctx *ctx)
117{
118 struct starfive_cryp_dev *cryp = ctx->cryp;
119
120 writel(upper_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN0);
121 writel(lower_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN1);
122}
123

--- 132 unchanged lines hidden (view full) ---

256 break;
257 case AES_KEYSIZE_256:
258 rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_256;
259 break;
260 }
261
262 rctx->csr.aes.mode = hw_mode;
263 rctx->csr.aes.cmode = !is_encrypt(cryp);
106static inline void starfive_aes_set_alen(struct starfive_cryp_ctx *ctx)
107{
108 struct starfive_cryp_dev *cryp = ctx->cryp;
109
110 writel(upper_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN0);
111 writel(lower_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN1);
112}
113

--- 132 unchanged lines hidden (view full) ---

246 break;
247 case AES_KEYSIZE_256:
248 rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_256;
249 break;
250 }
251
252 rctx->csr.aes.mode = hw_mode;
253 rctx->csr.aes.cmode = !is_encrypt(cryp);
264 rctx->csr.aes.ie = 1;
265 rctx->csr.aes.stmode = STARFIVE_AES_MODE_XFB_1;
266
267 if (cryp->side_chan) {
268 rctx->csr.aes.delay_aes = 1;
269 rctx->csr.aes.vaes_start = 1;
270 }
271
272 writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
273
274 cryp->err = starfive_aes_write_key(ctx);
275 if (cryp->err)
276 return cryp->err;
277
278 switch (hw_mode) {
279 case STARFIVE_AES_MODE_GCM:
280 starfive_aes_set_alen(ctx);
281 starfive_aes_set_mlen(ctx);
254 rctx->csr.aes.stmode = STARFIVE_AES_MODE_XFB_1;
255
256 if (cryp->side_chan) {
257 rctx->csr.aes.delay_aes = 1;
258 rctx->csr.aes.vaes_start = 1;
259 }
260
261 writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
262
263 cryp->err = starfive_aes_write_key(ctx);
264 if (cryp->err)
265 return cryp->err;
266
267 switch (hw_mode) {
268 case STARFIVE_AES_MODE_GCM:
269 starfive_aes_set_alen(ctx);
270 starfive_aes_set_mlen(ctx);
282 starfive_aes_set_ivlen(ctx);
271 writel(GCM_AES_IV_SIZE, cryp->base + STARFIVE_AES_IVLEN);
283 starfive_aes_aead_hw_start(ctx, hw_mode);
284 starfive_aes_write_iv(ctx, (void *)cryp->req.areq->iv);
285 break;
286 case STARFIVE_AES_MODE_CCM:
287 starfive_aes_set_alen(ctx);
288 starfive_aes_set_mlen(ctx);
289 starfive_aes_ccm_init(ctx);
290 starfive_aes_aead_hw_start(ctx, hw_mode);

--- 4 unchanged lines hidden (view full) ---

295 break;
296 default:
297 break;
298 }
299
300 return cryp->err;
301}
302
272 starfive_aes_aead_hw_start(ctx, hw_mode);
273 starfive_aes_write_iv(ctx, (void *)cryp->req.areq->iv);
274 break;
275 case STARFIVE_AES_MODE_CCM:
276 starfive_aes_set_alen(ctx);
277 starfive_aes_set_mlen(ctx);
278 starfive_aes_ccm_init(ctx);
279 starfive_aes_aead_hw_start(ctx, hw_mode);

--- 4 unchanged lines hidden (view full) ---

284 break;
285 default:
286 break;
287 }
288
289 return cryp->err;
290}
291
303static int starfive_aes_read_authtag(struct starfive_cryp_dev *cryp)
292static int starfive_aes_read_authtag(struct starfive_cryp_ctx *ctx)
304{
293{
305 int i, start_addr;
294 struct starfive_cryp_dev *cryp = ctx->cryp;
295 struct starfive_cryp_request_ctx *rctx = ctx->rctx;
296 int i;
306
307 if (starfive_aes_wait_busy(cryp))
308 return dev_err_probe(cryp->dev, -ETIMEDOUT,
309 "Timeout waiting for tag generation.");
310
297
298 if (starfive_aes_wait_busy(cryp))
299 return dev_err_probe(cryp->dev, -ETIMEDOUT,
300 "Timeout waiting for tag generation.");
301
311 start_addr = STARFIVE_AES_NONCE0;
312
313 if (is_gcm(cryp))
314 for (i = 0; i < AES_BLOCK_32; i++, start_addr += 4)
315 cryp->tag_out[i] = readl(cryp->base + start_addr);
316 else
302 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM) {
303 cryp->tag_out[0] = readl(cryp->base + STARFIVE_AES_NONCE0);
304 cryp->tag_out[1] = readl(cryp->base + STARFIVE_AES_NONCE1);
305 cryp->tag_out[2] = readl(cryp->base + STARFIVE_AES_NONCE2);
306 cryp->tag_out[3] = readl(cryp->base + STARFIVE_AES_NONCE3);
307 } else {
317 for (i = 0; i < AES_BLOCK_32; i++)
318 cryp->tag_out[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R);
308 for (i = 0; i < AES_BLOCK_32; i++)
309 cryp->tag_out[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R);
310 }
319
320 if (is_encrypt(cryp)) {
311
312 if (is_encrypt(cryp)) {
321 scatterwalk_copychunks(cryp->tag_out, &cryp->out_walk, cryp->authsize, 1);
313 scatterwalk_map_and_copy(cryp->tag_out, rctx->out_sg,
314 cryp->total_in, cryp->authsize, 1);
322 } else {
315 } else {
323 scatterwalk_copychunks(cryp->tag_in, &cryp->in_walk, cryp->authsize, 0);
324
325 if (crypto_memneq(cryp->tag_in, cryp->tag_out, cryp->authsize))
326 return dev_err_probe(cryp->dev, -EBADMSG, "Failed tag verification\n");
327 }
328
329 return 0;
330}
331
316 if (crypto_memneq(cryp->tag_in, cryp->tag_out, cryp->authsize))
317 return dev_err_probe(cryp->dev, -EBADMSG, "Failed tag verification\n");
318 }
319
320 return 0;
321}
322
332static void starfive_aes_finish_req(struct starfive_cryp_dev *cryp)
323static void starfive_aes_finish_req(struct starfive_cryp_ctx *ctx)
333{
324{
334 union starfive_aes_csr csr;
325 struct starfive_cryp_dev *cryp = ctx->cryp;
335 int err = cryp->err;
336
337 if (!err && cryp->authsize)
326 int err = cryp->err;
327
328 if (!err && cryp->authsize)
338 err = starfive_aes_read_authtag(cryp);
329 err = starfive_aes_read_authtag(ctx);
339
340 if (!err && ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC ||
341 (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CTR))
342 starfive_aes_get_iv(cryp, (void *)cryp->req.sreq->iv);
343
330
331 if (!err && ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC ||
332 (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CTR))
333 starfive_aes_get_iv(cryp, (void *)cryp->req.sreq->iv);
334
344 /* reset irq flags*/
345 csr.v = 0;
346 csr.aesrst = 1;
347 writel(csr.v, cryp->base + STARFIVE_AES_CSR);
348
349 if (cryp->authsize)
350 crypto_finalize_aead_request(cryp->engine, cryp->req.areq, err);
351 else
352 crypto_finalize_skcipher_request(cryp->engine, cryp->req.sreq,
353 err);
354}
355
335 if (cryp->authsize)
336 crypto_finalize_aead_request(cryp->engine, cryp->req.areq, err);
337 else
338 crypto_finalize_skcipher_request(cryp->engine, cryp->req.sreq,
339 err);
340}
341
356void starfive_aes_done_task(unsigned long param)
357{
358 struct starfive_cryp_dev *cryp = (struct starfive_cryp_dev *)param;
359 u32 block[AES_BLOCK_32];
360 u32 stat;
361 int i;
362
363 for (i = 0; i < AES_BLOCK_32; i++)
364 block[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R);
365
366 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, AES_BLOCK_SIZE,
367 cryp->total_out), 1);
368
369 cryp->total_out -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_out);
370
371 if (!cryp->total_out) {
372 starfive_aes_finish_req(cryp);
373 return;
374 }
375
376 memset(block, 0, AES_BLOCK_SIZE);
377 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE,
378 cryp->total_in), 0);
379 cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in);
380
381 for (i = 0; i < AES_BLOCK_32; i++)
382 writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R);
383
384 stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
385 stat &= ~STARFIVE_IE_MASK_AES_DONE;
386 writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET);
387}
388
389static int starfive_aes_gcm_write_adata(struct starfive_cryp_ctx *ctx)
390{
391 struct starfive_cryp_dev *cryp = ctx->cryp;
392 struct starfive_cryp_request_ctx *rctx = ctx->rctx;
393 u32 *buffer;
394 int total_len, loop;
395
396 total_len = ALIGN(cryp->assoclen, AES_BLOCK_SIZE) / sizeof(unsigned int);

--- 49 unchanged lines hidden (view full) ---

446
447 if (starfive_aes_wait_busy(cryp))
448 return dev_err_probe(cryp->dev, -ETIMEDOUT,
449 "Timeout processing ccm aad block");
450
451 return 0;
452}
453
342static int starfive_aes_gcm_write_adata(struct starfive_cryp_ctx *ctx)
343{
344 struct starfive_cryp_dev *cryp = ctx->cryp;
345 struct starfive_cryp_request_ctx *rctx = ctx->rctx;
346 u32 *buffer;
347 int total_len, loop;
348
349 total_len = ALIGN(cryp->assoclen, AES_BLOCK_SIZE) / sizeof(unsigned int);

--- 49 unchanged lines hidden (view full) ---

399
400 if (starfive_aes_wait_busy(cryp))
401 return dev_err_probe(cryp->dev, -ETIMEDOUT,
402 "Timeout processing ccm aad block");
403
404 return 0;
405}
406
454static int starfive_aes_prepare_req(struct skcipher_request *req,
455 struct aead_request *areq)
407static void starfive_aes_dma_done(void *param)
456{
408{
457 struct starfive_cryp_ctx *ctx;
458 struct starfive_cryp_request_ctx *rctx;
459 struct starfive_cryp_dev *cryp;
409 struct starfive_cryp_dev *cryp = param;
460
410
461 if (!req && !areq)
462 return -EINVAL;
411 complete(&cryp->dma_done);
412}
463
413
464 ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
465 crypto_aead_ctx(crypto_aead_reqtfm(areq));
414static void starfive_aes_dma_init(struct starfive_cryp_dev *cryp)
415{
416 cryp->cfg_in.direction = DMA_MEM_TO_DEV;
417 cryp->cfg_in.src_addr_width = DMA_SLAVE_BUSWIDTH_16_BYTES;
418 cryp->cfg_in.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
419 cryp->cfg_in.src_maxburst = cryp->dma_maxburst;
420 cryp->cfg_in.dst_maxburst = cryp->dma_maxburst;
421 cryp->cfg_in.dst_addr = cryp->phys_base + STARFIVE_ALG_FIFO_OFFSET;
466
422
467 cryp = ctx->cryp;
468 rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
423 dmaengine_slave_config(cryp->tx, &cryp->cfg_in);
469
424
470 if (req) {
471 cryp->req.sreq = req;
472 cryp->total_in = req->cryptlen;
473 cryp->total_out = req->cryptlen;
474 cryp->assoclen = 0;
475 cryp->authsize = 0;
476 } else {
477 cryp->req.areq = areq;
478 cryp->assoclen = areq->assoclen;
479 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
480 if (is_encrypt(cryp)) {
481 cryp->total_in = areq->cryptlen;
482 cryp->total_out = areq->cryptlen;
483 } else {
484 cryp->total_in = areq->cryptlen - cryp->authsize;
485 cryp->total_out = cryp->total_in;
486 }
487 }
425 cryp->cfg_out.direction = DMA_DEV_TO_MEM;
426 cryp->cfg_out.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
427 cryp->cfg_out.dst_addr_width = DMA_SLAVE_BUSWIDTH_16_BYTES;
428 cryp->cfg_out.src_maxburst = 4;
429 cryp->cfg_out.dst_maxburst = 4;
430 cryp->cfg_out.src_addr = cryp->phys_base + STARFIVE_ALG_FIFO_OFFSET;
488
431
489 rctx->in_sg = req ? req->src : areq->src;
490 scatterwalk_start(&cryp->in_walk, rctx->in_sg);
432 dmaengine_slave_config(cryp->rx, &cryp->cfg_out);
491
433
492 rctx->out_sg = req ? req->dst : areq->dst;
493 scatterwalk_start(&cryp->out_walk, rctx->out_sg);
434 init_completion(&cryp->dma_done);
435}
494
436
495 if (cryp->assoclen) {
496 rctx->adata = kzalloc(cryp->assoclen + AES_BLOCK_SIZE, GFP_KERNEL);
497 if (!rctx->adata)
498 return dev_err_probe(cryp->dev, -ENOMEM,
499 "Failed to alloc memory for adata");
437static int starfive_aes_dma_xfer(struct starfive_cryp_dev *cryp,
438 struct scatterlist *src,
439 struct scatterlist *dst,
440 int len)
441{
442 struct dma_async_tx_descriptor *in_desc, *out_desc;
443 union starfive_alg_cr alg_cr;
444 int ret = 0, in_save, out_save;
500
445
501 scatterwalk_copychunks(rctx->adata, &cryp->in_walk, cryp->assoclen, 0);
502 scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->assoclen, 2);
446 alg_cr.v = 0;
447 alg_cr.start = 1;
448 alg_cr.aes_dma_en = 1;
449 writel(alg_cr.v, cryp->base + STARFIVE_ALG_CR_OFFSET);
450
451 in_save = sg_dma_len(src);
452 out_save = sg_dma_len(dst);
453
454 writel(ALIGN(len, AES_BLOCK_SIZE), cryp->base + STARFIVE_DMA_IN_LEN_OFFSET);
455 writel(ALIGN(len, AES_BLOCK_SIZE), cryp->base + STARFIVE_DMA_OUT_LEN_OFFSET);
456
457 sg_dma_len(src) = ALIGN(len, AES_BLOCK_SIZE);
458 sg_dma_len(dst) = ALIGN(len, AES_BLOCK_SIZE);
459
460 out_desc = dmaengine_prep_slave_sg(cryp->rx, dst, 1, DMA_DEV_TO_MEM,
461 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
462 if (!out_desc) {
463 ret = -EINVAL;
464 goto dma_err;
503 }
504
465 }
466
505 ctx->rctx = rctx;
467 out_desc->callback = starfive_aes_dma_done;
468 out_desc->callback_param = cryp;
506
469
507 return starfive_aes_hw_init(ctx);
470 reinit_completion(&cryp->dma_done);
471 dmaengine_submit(out_desc);
472 dma_async_issue_pending(cryp->rx);
473
474 in_desc = dmaengine_prep_slave_sg(cryp->tx, src, 1, DMA_MEM_TO_DEV,
475 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
476 if (!in_desc) {
477 ret = -EINVAL;
478 goto dma_err;
479 }
480
481 dmaengine_submit(in_desc);
482 dma_async_issue_pending(cryp->tx);
483
484 if (!wait_for_completion_timeout(&cryp->dma_done,
485 msecs_to_jiffies(1000)))
486 ret = -ETIMEDOUT;
487
488dma_err:
489 sg_dma_len(src) = in_save;
490 sg_dma_len(dst) = out_save;
491
492 alg_cr.v = 0;
493 alg_cr.clear = 1;
494 writel(alg_cr.v, cryp->base + STARFIVE_ALG_CR_OFFSET);
495
496 return ret;
508}
509
497}
498
499static int starfive_aes_map_sg(struct starfive_cryp_dev *cryp,
500 struct scatterlist *src,
501 struct scatterlist *dst)
502{
503 struct scatterlist *stsg, *dtsg;
504 struct scatterlist _src[2], _dst[2];
505 unsigned int remain = cryp->total_in;
506 unsigned int len, src_nents, dst_nents;
507 int ret;
508
509 if (src == dst) {
510 for (stsg = src, dtsg = dst; remain > 0;
511 stsg = sg_next(stsg), dtsg = sg_next(dtsg)) {
512 src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL);
513 if (src_nents == 0)
514 return dev_err_probe(cryp->dev, -ENOMEM,
515 "dma_map_sg error\n");
516
517 dst_nents = src_nents;
518 len = min(sg_dma_len(stsg), remain);
519
520 ret = starfive_aes_dma_xfer(cryp, stsg, dtsg, len);
521 dma_unmap_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL);
522 if (ret)
523 return ret;
524
525 remain -= len;
526 }
527 } else {
528 for (stsg = src, dtsg = dst;;) {
529 src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE);
530 if (src_nents == 0)
531 return dev_err_probe(cryp->dev, -ENOMEM,
532 "dma_map_sg src error\n");
533
534 dst_nents = dma_map_sg(cryp->dev, dtsg, 1, DMA_FROM_DEVICE);
535 if (dst_nents == 0)
536 return dev_err_probe(cryp->dev, -ENOMEM,
537 "dma_map_sg dst error\n");
538
539 len = min(sg_dma_len(stsg), sg_dma_len(dtsg));
540 len = min(len, remain);
541
542 ret = starfive_aes_dma_xfer(cryp, stsg, dtsg, len);
543 dma_unmap_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE);
544 dma_unmap_sg(cryp->dev, dtsg, 1, DMA_FROM_DEVICE);
545 if (ret)
546 return ret;
547
548 remain -= len;
549 if (remain == 0)
550 break;
551
552 if (sg_dma_len(stsg) - len) {
553 stsg = scatterwalk_ffwd(_src, stsg, len);
554 dtsg = sg_next(dtsg);
555 } else if (sg_dma_len(dtsg) - len) {
556 dtsg = scatterwalk_ffwd(_dst, dtsg, len);
557 stsg = sg_next(stsg);
558 } else {
559 stsg = sg_next(stsg);
560 dtsg = sg_next(dtsg);
561 }
562 }
563 }
564
565 return 0;
566}
567
510static int starfive_aes_do_one_req(struct crypto_engine *engine, void *areq)
511{
512 struct skcipher_request *req =
513 container_of(areq, struct skcipher_request, base);
514 struct starfive_cryp_ctx *ctx =
515 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
568static int starfive_aes_do_one_req(struct crypto_engine *engine, void *areq)
569{
570 struct skcipher_request *req =
571 container_of(areq, struct skcipher_request, base);
572 struct starfive_cryp_ctx *ctx =
573 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
574 struct starfive_cryp_request_ctx *rctx = skcipher_request_ctx(req);
516 struct starfive_cryp_dev *cryp = ctx->cryp;
575 struct starfive_cryp_dev *cryp = ctx->cryp;
517 u32 block[AES_BLOCK_32];
518 u32 stat;
519 int err;
520 int i;
576 int ret;
521
577
522 err = starfive_aes_prepare_req(req, NULL);
523 if (err)
524 return err;
578 cryp->req.sreq = req;
579 cryp->total_in = req->cryptlen;
580 cryp->total_out = req->cryptlen;
581 cryp->assoclen = 0;
582 cryp->authsize = 0;
525
583
526 /*
527 * Write first plain/ciphertext block to start the module
528 * then let irq tasklet handle the rest of the data blocks.
529 */
530 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE,
531 cryp->total_in), 0);
532 cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in);
584 rctx->in_sg = req->src;
585 rctx->out_sg = req->dst;
533
586
534 for (i = 0; i < AES_BLOCK_32; i++)
535 writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R);
587 ctx->rctx = rctx;
536
588
537 stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
538 stat &= ~STARFIVE_IE_MASK_AES_DONE;
539 writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET);
589 ret = starfive_aes_hw_init(ctx);
590 if (ret)
591 return ret;
540
592
593 starfive_aes_dma_init(cryp);
594
595 ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg);
596 if (ret)
597 return ret;
598
599 starfive_aes_finish_req(ctx);
600
541 return 0;
542}
543
601 return 0;
602}
603
544static int starfive_aes_init_tfm(struct crypto_skcipher *tfm)
604static int starfive_aes_init_tfm(struct crypto_skcipher *tfm,
605 const char *alg_name)
545{
546 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
547
548 ctx->cryp = starfive_cryp_find_dev(ctx);
549 if (!ctx->cryp)
550 return -ENODEV;
551
606{
607 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
608
609 ctx->cryp = starfive_cryp_find_dev(ctx);
610 if (!ctx->cryp)
611 return -ENODEV;
612
613 ctx->skcipher_fbk = crypto_alloc_skcipher(alg_name, 0,
614 CRYPTO_ALG_NEED_FALLBACK);
615 if (IS_ERR(ctx->skcipher_fbk))
616 return dev_err_probe(ctx->cryp->dev, PTR_ERR(ctx->skcipher_fbk),
617 "%s() failed to allocate fallback for %s\n",
618 __func__, alg_name);
619
552 crypto_skcipher_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) +
620 crypto_skcipher_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) +
553 sizeof(struct skcipher_request));
621 crypto_skcipher_reqsize(ctx->skcipher_fbk));
554
555 return 0;
556}
557
622
623 return 0;
624}
625
626static void starfive_aes_exit_tfm(struct crypto_skcipher *tfm)
627{
628 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
629
630 crypto_free_skcipher(ctx->skcipher_fbk);
631}
632
558static int starfive_aes_aead_do_one_req(struct crypto_engine *engine, void *areq)
559{
560 struct aead_request *req =
561 container_of(areq, struct aead_request, base);
562 struct starfive_cryp_ctx *ctx =
563 crypto_aead_ctx(crypto_aead_reqtfm(req));
564 struct starfive_cryp_dev *cryp = ctx->cryp;
633static int starfive_aes_aead_do_one_req(struct crypto_engine *engine, void *areq)
634{
635 struct aead_request *req =
636 container_of(areq, struct aead_request, base);
637 struct starfive_cryp_ctx *ctx =
638 crypto_aead_ctx(crypto_aead_reqtfm(req));
639 struct starfive_cryp_dev *cryp = ctx->cryp;
565 struct starfive_cryp_request_ctx *rctx;
566 u32 block[AES_BLOCK_32];
567 u32 stat;
568 int err;
569 int i;
640 struct starfive_cryp_request_ctx *rctx = aead_request_ctx(req);
641 struct scatterlist _src[2], _dst[2];
642 int ret;
570
643
571 err = starfive_aes_prepare_req(NULL, req);
572 if (err)
573 return err;
644 cryp->req.areq = req;
645 cryp->assoclen = req->assoclen;
646 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(req));
574
647
575 rctx = ctx->rctx;
648 rctx->in_sg = scatterwalk_ffwd(_src, req->src, cryp->assoclen);
649 if (req->src == req->dst)
650 rctx->out_sg = rctx->in_sg;
651 else
652 rctx->out_sg = scatterwalk_ffwd(_dst, req->dst, cryp->assoclen);
576
653
654 if (is_encrypt(cryp)) {
655 cryp->total_in = req->cryptlen;
656 cryp->total_out = req->cryptlen;
657 } else {
658 cryp->total_in = req->cryptlen - cryp->authsize;
659 cryp->total_out = cryp->total_in;
660 scatterwalk_map_and_copy(cryp->tag_in, req->src,
661 cryp->total_in + cryp->assoclen,
662 cryp->authsize, 0);
663 }
664
665 if (cryp->assoclen) {
666 rctx->adata = kzalloc(cryp->assoclen + AES_BLOCK_SIZE, GFP_KERNEL);
667 if (!rctx->adata)
668 return dev_err_probe(cryp->dev, -ENOMEM,
669 "Failed to alloc memory for adata");
670
671 if (sg_copy_to_buffer(req->src, sg_nents_for_len(req->src, cryp->assoclen),
672 rctx->adata, cryp->assoclen) != cryp->assoclen)
673 return -EINVAL;
674 }
675
676 if (cryp->total_in)
677 sg_zero_buffer(rctx->in_sg, sg_nents(rctx->in_sg),
678 sg_dma_len(rctx->in_sg) - cryp->total_in,
679 cryp->total_in);
680
681 ctx->rctx = rctx;
682
683 ret = starfive_aes_hw_init(ctx);
684 if (ret)
685 return ret;
686
577 if (!cryp->assoclen)
578 goto write_text;
579
580 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM)
687 if (!cryp->assoclen)
688 goto write_text;
689
690 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM)
581 cryp->err = starfive_aes_ccm_write_adata(ctx);
691 ret = starfive_aes_ccm_write_adata(ctx);
582 else
692 else
583 cryp->err = starfive_aes_gcm_write_adata(ctx);
693 ret = starfive_aes_gcm_write_adata(ctx);
584
585 kfree(rctx->adata);
586
694
695 kfree(rctx->adata);
696
587 if (cryp->err)
588 return cryp->err;
697 if (ret)
698 return ret;
589
590write_text:
591 if (!cryp->total_in)
592 goto finish_req;
593
699
700write_text:
701 if (!cryp->total_in)
702 goto finish_req;
703
594 /*
595 * Write first plain/ciphertext block to start the module
596 * then let irq tasklet handle the rest of the data blocks.
597 */
598 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE,
599 cryp->total_in), 0);
600 cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in);
704 starfive_aes_dma_init(cryp);
601
705
602 for (i = 0; i < AES_BLOCK_32; i++)
603 writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R);
706 ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg);
707 if (ret)
708 return ret;
604
709
605 stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
606 stat &= ~STARFIVE_IE_MASK_AES_DONE;
607 writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET);
608
609 return 0;
610
611finish_req:
710finish_req:
612 starfive_aes_finish_req(cryp);
711 starfive_aes_finish_req(ctx);
613 return 0;
614}
615
712 return 0;
713}
714
616static int starfive_aes_aead_init_tfm(struct crypto_aead *tfm)
715static int starfive_aes_aead_init_tfm(struct crypto_aead *tfm,
716 const char *alg_name)
617{
618 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
717{
718 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
619 struct starfive_cryp_dev *cryp = ctx->cryp;
620 struct crypto_tfm *aead = crypto_aead_tfm(tfm);
621 struct crypto_alg *alg = aead->__crt_alg;
622
623 ctx->cryp = starfive_cryp_find_dev(ctx);
624 if (!ctx->cryp)
625 return -ENODEV;
626
719
720 ctx->cryp = starfive_cryp_find_dev(ctx);
721 if (!ctx->cryp)
722 return -ENODEV;
723
627 if (alg->cra_flags & CRYPTO_ALG_NEED_FALLBACK) {
628 ctx->aead_fbk = crypto_alloc_aead(alg->cra_name, 0,
629 CRYPTO_ALG_NEED_FALLBACK);
630 if (IS_ERR(ctx->aead_fbk))
631 return dev_err_probe(cryp->dev, PTR_ERR(ctx->aead_fbk),
632 "%s() failed to allocate fallback for %s\n",
633 __func__, alg->cra_name);
634 }
724 ctx->aead_fbk = crypto_alloc_aead(alg_name, 0,
725 CRYPTO_ALG_NEED_FALLBACK);
726 if (IS_ERR(ctx->aead_fbk))
727 return dev_err_probe(ctx->cryp->dev, PTR_ERR(ctx->aead_fbk),
728 "%s() failed to allocate fallback for %s\n",
729 __func__, alg_name);
635
730
636 crypto_aead_set_reqsize(tfm, sizeof(struct starfive_cryp_ctx) +
637 sizeof(struct aead_request));
731 crypto_aead_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) +
732 crypto_aead_reqsize(ctx->aead_fbk));
638
639 return 0;
640}
641
642static void starfive_aes_aead_exit_tfm(struct crypto_aead *tfm)
643{
644 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
645
646 crypto_free_aead(ctx->aead_fbk);
647}
648
733
734 return 0;
735}
736
737static void starfive_aes_aead_exit_tfm(struct crypto_aead *tfm)
738{
739 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
740
741 crypto_free_aead(ctx->aead_fbk);
742}
743
744static bool starfive_aes_check_unaligned(struct starfive_cryp_dev *cryp,
745 struct scatterlist *src,
746 struct scatterlist *dst)
747{
748 struct scatterlist *tsg;
749 int i;
750
751 for_each_sg(src, tsg, sg_nents(src), i)
752 if (!IS_ALIGNED(tsg->length, AES_BLOCK_SIZE) &&
753 !sg_is_last(tsg))
754 return true;
755
756 if (src != dst)
757 for_each_sg(dst, tsg, sg_nents(dst), i)
758 if (!IS_ALIGNED(tsg->length, AES_BLOCK_SIZE) &&
759 !sg_is_last(tsg))
760 return true;
761
762 return false;
763}
764
765static int starfive_aes_do_fallback(struct skcipher_request *req, bool enc)
766{
767 struct starfive_cryp_ctx *ctx =
768 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
769 struct skcipher_request *subreq = skcipher_request_ctx(req);
770
771 skcipher_request_set_tfm(subreq, ctx->skcipher_fbk);
772 skcipher_request_set_callback(subreq, req->base.flags,
773 req->base.complete,
774 req->base.data);
775 skcipher_request_set_crypt(subreq, req->src, req->dst,
776 req->cryptlen, req->iv);
777
778 return enc ? crypto_skcipher_encrypt(subreq) :
779 crypto_skcipher_decrypt(subreq);
780}
781
649static int starfive_aes_crypt(struct skcipher_request *req, unsigned long flags)
650{
651 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
652 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
653 struct starfive_cryp_dev *cryp = ctx->cryp;
654 unsigned int blocksize_align = crypto_skcipher_blocksize(tfm) - 1;
655
656 cryp->flags = flags;
657
658 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_ECB ||
659 (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC)
660 if (req->cryptlen & blocksize_align)
661 return -EINVAL;
662
782static int starfive_aes_crypt(struct skcipher_request *req, unsigned long flags)
783{
784 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
785 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
786 struct starfive_cryp_dev *cryp = ctx->cryp;
787 unsigned int blocksize_align = crypto_skcipher_blocksize(tfm) - 1;
788
789 cryp->flags = flags;
790
791 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_ECB ||
792 (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC)
793 if (req->cryptlen & blocksize_align)
794 return -EINVAL;
795
796 if (starfive_aes_check_unaligned(cryp, req->src, req->dst))
797 return starfive_aes_do_fallback(req, is_encrypt(cryp));
798
663 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
664}
665
799 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
800}
801
802static int starfive_aes_aead_do_fallback(struct aead_request *req, bool enc)
803{
804 struct starfive_cryp_ctx *ctx =
805 crypto_aead_ctx(crypto_aead_reqtfm(req));
806 struct aead_request *subreq = aead_request_ctx(req);
807
808 aead_request_set_tfm(subreq, ctx->aead_fbk);
809 aead_request_set_callback(subreq, req->base.flags,
810 req->base.complete,
811 req->base.data);
812 aead_request_set_crypt(subreq, req->src, req->dst,
813 req->cryptlen, req->iv);
814 aead_request_set_ad(subreq, req->assoclen);
815
816 return enc ? crypto_aead_encrypt(subreq) :
817 crypto_aead_decrypt(subreq);
818}
819
666static int starfive_aes_aead_crypt(struct aead_request *req, unsigned long flags)
667{
668 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
669 struct starfive_cryp_dev *cryp = ctx->cryp;
820static int starfive_aes_aead_crypt(struct aead_request *req, unsigned long flags)
821{
822 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
823 struct starfive_cryp_dev *cryp = ctx->cryp;
824 struct scatterlist *src, *dst, _src[2], _dst[2];
670
671 cryp->flags = flags;
672
825
826 cryp->flags = flags;
827
673 /*
674 * HW engine could not perform CCM tag verification on
675 * non-blocksize aligned text, use fallback algo instead
828 /* aes-ccm does not support tag verification for non-aligned text,
829 * use fallback for ccm decryption instead.
676 */
830 */
677 if (ctx->aead_fbk && !is_encrypt(cryp)) {
678 struct aead_request *subreq = aead_request_ctx(req);
831 if (((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM) &&
832 !is_encrypt(cryp))
833 return starfive_aes_aead_do_fallback(req, 0);
679
834
680 aead_request_set_tfm(subreq, ctx->aead_fbk);
681 aead_request_set_callback(subreq, req->base.flags,
682 req->base.complete, req->base.data);
683 aead_request_set_crypt(subreq, req->src,
684 req->dst, req->cryptlen, req->iv);
685 aead_request_set_ad(subreq, req->assoclen);
835 src = scatterwalk_ffwd(_src, req->src, req->assoclen);
686
836
687 return crypto_aead_decrypt(subreq);
688 }
837 if (req->src == req->dst)
838 dst = src;
839 else
840 dst = scatterwalk_ffwd(_dst, req->dst, req->assoclen);
689
841
842 if (starfive_aes_check_unaligned(cryp, src, dst))
843 return starfive_aes_aead_do_fallback(req, is_encrypt(cryp));
844
690 return crypto_transfer_aead_request_to_engine(cryp->engine, req);
691}
692
693static int starfive_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
694 unsigned int keylen)
695{
696 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
697
698 if (!key || !keylen)
699 return -EINVAL;
700
701 if (keylen != AES_KEYSIZE_128 &&
702 keylen != AES_KEYSIZE_192 &&
703 keylen != AES_KEYSIZE_256)
704 return -EINVAL;
705
706 memcpy(ctx->key, key, keylen);
707 ctx->keylen = keylen;
708
845 return crypto_transfer_aead_request_to_engine(cryp->engine, req);
846}
847
848static int starfive_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
849 unsigned int keylen)
850{
851 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
852
853 if (!key || !keylen)
854 return -EINVAL;
855
856 if (keylen != AES_KEYSIZE_128 &&
857 keylen != AES_KEYSIZE_192 &&
858 keylen != AES_KEYSIZE_256)
859 return -EINVAL;
860
861 memcpy(ctx->key, key, keylen);
862 ctx->keylen = keylen;
863
709 return 0;
864 return crypto_skcipher_setkey(ctx->skcipher_fbk, key, keylen);
710}
711
712static int starfive_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
713 unsigned int keylen)
714{
715 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
716
717 if (!key || !keylen)
718 return -EINVAL;
719
720 if (keylen != AES_KEYSIZE_128 &&
721 keylen != AES_KEYSIZE_192 &&
722 keylen != AES_KEYSIZE_256)
723 return -EINVAL;
724
725 memcpy(ctx->key, key, keylen);
726 ctx->keylen = keylen;
727
865}
866
867static int starfive_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
868 unsigned int keylen)
869{
870 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
871
872 if (!key || !keylen)
873 return -EINVAL;
874
875 if (keylen != AES_KEYSIZE_128 &&
876 keylen != AES_KEYSIZE_192 &&
877 keylen != AES_KEYSIZE_256)
878 return -EINVAL;
879
880 memcpy(ctx->key, key, keylen);
881 ctx->keylen = keylen;
882
728 if (ctx->aead_fbk)
729 return crypto_aead_setkey(ctx->aead_fbk, key, keylen);
730
731 return 0;
883 return crypto_aead_setkey(ctx->aead_fbk, key, keylen);
732}
733
734static int starfive_aes_gcm_setauthsize(struct crypto_aead *tfm,
735 unsigned int authsize)
736{
884}
885
886static int starfive_aes_gcm_setauthsize(struct crypto_aead *tfm,
887 unsigned int authsize)
888{
737 return crypto_gcm_check_authsize(authsize);
889 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
890 int ret;
891
892 ret = crypto_gcm_check_authsize(authsize);
893 if (ret)
894 return ret;
895
896 return crypto_aead_setauthsize(ctx->aead_fbk, authsize);
738}
739
740static int starfive_aes_ccm_setauthsize(struct crypto_aead *tfm,
741 unsigned int authsize)
742{
743 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
744
745 switch (authsize) {

--- 69 unchanged lines hidden (view full) ---

815
816 ret = starfive_aes_ccm_check_iv(req->iv);
817 if (ret)
818 return ret;
819
820 return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM);
821}
822
897}
898
899static int starfive_aes_ccm_setauthsize(struct crypto_aead *tfm,
900 unsigned int authsize)
901{
902 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
903
904 switch (authsize) {

--- 69 unchanged lines hidden (view full) ---

974
975 ret = starfive_aes_ccm_check_iv(req->iv);
976 if (ret)
977 return ret;
978
979 return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM);
980}
981
982static int starfive_aes_ecb_init_tfm(struct crypto_skcipher *tfm)
983{
984 return starfive_aes_init_tfm(tfm, "ecb(aes-generic)");
985}
986
987static int starfive_aes_cbc_init_tfm(struct crypto_skcipher *tfm)
988{
989 return starfive_aes_init_tfm(tfm, "cbc(aes-generic)");
990}
991
992static int starfive_aes_ctr_init_tfm(struct crypto_skcipher *tfm)
993{
994 return starfive_aes_init_tfm(tfm, "ctr(aes-generic)");
995}
996
997static int starfive_aes_ccm_init_tfm(struct crypto_aead *tfm)
998{
999 return starfive_aes_aead_init_tfm(tfm, "ccm_base(ctr(aes-generic),cbcmac(aes-generic))");
1000}
1001
1002static int starfive_aes_gcm_init_tfm(struct crypto_aead *tfm)
1003{
1004 return starfive_aes_aead_init_tfm(tfm, "gcm_base(ctr(aes-generic),ghash-generic)");
1005}
1006
823static struct skcipher_engine_alg skcipher_algs[] = {
824{
1007static struct skcipher_engine_alg skcipher_algs[] = {
1008{
825 .base.init = starfive_aes_init_tfm,
1009 .base.init = starfive_aes_ecb_init_tfm,
1010 .base.exit = starfive_aes_exit_tfm,
826 .base.setkey = starfive_aes_setkey,
827 .base.encrypt = starfive_aes_ecb_encrypt,
828 .base.decrypt = starfive_aes_ecb_decrypt,
829 .base.min_keysize = AES_MIN_KEY_SIZE,
830 .base.max_keysize = AES_MAX_KEY_SIZE,
831 .base.base = {
832 .cra_name = "ecb(aes)",
833 .cra_driver_name = "starfive-ecb-aes",
834 .cra_priority = 200,
1011 .base.setkey = starfive_aes_setkey,
1012 .base.encrypt = starfive_aes_ecb_encrypt,
1013 .base.decrypt = starfive_aes_ecb_decrypt,
1014 .base.min_keysize = AES_MIN_KEY_SIZE,
1015 .base.max_keysize = AES_MAX_KEY_SIZE,
1016 .base.base = {
1017 .cra_name = "ecb(aes)",
1018 .cra_driver_name = "starfive-ecb-aes",
1019 .cra_priority = 200,
835 .cra_flags = CRYPTO_ALG_ASYNC,
1020 .cra_flags = CRYPTO_ALG_ASYNC |
1021 CRYPTO_ALG_NEED_FALLBACK,
836 .cra_blocksize = AES_BLOCK_SIZE,
837 .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
838 .cra_alignmask = 0xf,
839 .cra_module = THIS_MODULE,
840 },
841 .op = {
842 .do_one_request = starfive_aes_do_one_req,
843 },
844}, {
1022 .cra_blocksize = AES_BLOCK_SIZE,
1023 .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
1024 .cra_alignmask = 0xf,
1025 .cra_module = THIS_MODULE,
1026 },
1027 .op = {
1028 .do_one_request = starfive_aes_do_one_req,
1029 },
1030}, {
845 .base.init = starfive_aes_init_tfm,
1031 .base.init = starfive_aes_cbc_init_tfm,
1032 .base.exit = starfive_aes_exit_tfm,
846 .base.setkey = starfive_aes_setkey,
847 .base.encrypt = starfive_aes_cbc_encrypt,
848 .base.decrypt = starfive_aes_cbc_decrypt,
849 .base.min_keysize = AES_MIN_KEY_SIZE,
850 .base.max_keysize = AES_MAX_KEY_SIZE,
851 .base.ivsize = AES_BLOCK_SIZE,
852 .base.base = {
853 .cra_name = "cbc(aes)",
854 .cra_driver_name = "starfive-cbc-aes",
855 .cra_priority = 200,
1033 .base.setkey = starfive_aes_setkey,
1034 .base.encrypt = starfive_aes_cbc_encrypt,
1035 .base.decrypt = starfive_aes_cbc_decrypt,
1036 .base.min_keysize = AES_MIN_KEY_SIZE,
1037 .base.max_keysize = AES_MAX_KEY_SIZE,
1038 .base.ivsize = AES_BLOCK_SIZE,
1039 .base.base = {
1040 .cra_name = "cbc(aes)",
1041 .cra_driver_name = "starfive-cbc-aes",
1042 .cra_priority = 200,
856 .cra_flags = CRYPTO_ALG_ASYNC,
1043 .cra_flags = CRYPTO_ALG_ASYNC |
1044 CRYPTO_ALG_NEED_FALLBACK,
857 .cra_blocksize = AES_BLOCK_SIZE,
858 .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
859 .cra_alignmask = 0xf,
860 .cra_module = THIS_MODULE,
861 },
862 .op = {
863 .do_one_request = starfive_aes_do_one_req,
864 },
865}, {
1045 .cra_blocksize = AES_BLOCK_SIZE,
1046 .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
1047 .cra_alignmask = 0xf,
1048 .cra_module = THIS_MODULE,
1049 },
1050 .op = {
1051 .do_one_request = starfive_aes_do_one_req,
1052 },
1053}, {
866 .base.init = starfive_aes_init_tfm,
1054 .base.init = starfive_aes_ctr_init_tfm,
1055 .base.exit = starfive_aes_exit_tfm,
867 .base.setkey = starfive_aes_setkey,
868 .base.encrypt = starfive_aes_ctr_encrypt,
869 .base.decrypt = starfive_aes_ctr_decrypt,
870 .base.min_keysize = AES_MIN_KEY_SIZE,
871 .base.max_keysize = AES_MAX_KEY_SIZE,
872 .base.ivsize = AES_BLOCK_SIZE,
873 .base.base = {
874 .cra_name = "ctr(aes)",
875 .cra_driver_name = "starfive-ctr-aes",
876 .cra_priority = 200,
1056 .base.setkey = starfive_aes_setkey,
1057 .base.encrypt = starfive_aes_ctr_encrypt,
1058 .base.decrypt = starfive_aes_ctr_decrypt,
1059 .base.min_keysize = AES_MIN_KEY_SIZE,
1060 .base.max_keysize = AES_MAX_KEY_SIZE,
1061 .base.ivsize = AES_BLOCK_SIZE,
1062 .base.base = {
1063 .cra_name = "ctr(aes)",
1064 .cra_driver_name = "starfive-ctr-aes",
1065 .cra_priority = 200,
877 .cra_flags = CRYPTO_ALG_ASYNC,
1066 .cra_flags = CRYPTO_ALG_ASYNC |
1067 CRYPTO_ALG_NEED_FALLBACK,
878 .cra_blocksize = 1,
879 .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
880 .cra_alignmask = 0xf,
881 .cra_module = THIS_MODULE,
882 },
883 .op = {
884 .do_one_request = starfive_aes_do_one_req,
885 },
886},
887};
888
889static struct aead_engine_alg aead_algs[] = {
890{
891 .base.setkey = starfive_aes_aead_setkey,
892 .base.setauthsize = starfive_aes_gcm_setauthsize,
893 .base.encrypt = starfive_aes_gcm_encrypt,
894 .base.decrypt = starfive_aes_gcm_decrypt,
1068 .cra_blocksize = 1,
1069 .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
1070 .cra_alignmask = 0xf,
1071 .cra_module = THIS_MODULE,
1072 },
1073 .op = {
1074 .do_one_request = starfive_aes_do_one_req,
1075 },
1076},
1077};
1078
1079static struct aead_engine_alg aead_algs[] = {
1080{
1081 .base.setkey = starfive_aes_aead_setkey,
1082 .base.setauthsize = starfive_aes_gcm_setauthsize,
1083 .base.encrypt = starfive_aes_gcm_encrypt,
1084 .base.decrypt = starfive_aes_gcm_decrypt,
895 .base.init = starfive_aes_aead_init_tfm,
1085 .base.init = starfive_aes_gcm_init_tfm,
896 .base.exit = starfive_aes_aead_exit_tfm,
897 .base.ivsize = GCM_AES_IV_SIZE,
898 .base.maxauthsize = AES_BLOCK_SIZE,
899 .base.base = {
900 .cra_name = "gcm(aes)",
901 .cra_driver_name = "starfive-gcm-aes",
902 .cra_priority = 200,
1086 .base.exit = starfive_aes_aead_exit_tfm,
1087 .base.ivsize = GCM_AES_IV_SIZE,
1088 .base.maxauthsize = AES_BLOCK_SIZE,
1089 .base.base = {
1090 .cra_name = "gcm(aes)",
1091 .cra_driver_name = "starfive-gcm-aes",
1092 .cra_priority = 200,
903 .cra_flags = CRYPTO_ALG_ASYNC,
1093 .cra_flags = CRYPTO_ALG_ASYNC |
1094 CRYPTO_ALG_NEED_FALLBACK,
904 .cra_blocksize = 1,
905 .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
906 .cra_alignmask = 0xf,
907 .cra_module = THIS_MODULE,
908 },
909 .op = {
910 .do_one_request = starfive_aes_aead_do_one_req,
911 },
912}, {
913 .base.setkey = starfive_aes_aead_setkey,
914 .base.setauthsize = starfive_aes_ccm_setauthsize,
915 .base.encrypt = starfive_aes_ccm_encrypt,
916 .base.decrypt = starfive_aes_ccm_decrypt,
1095 .cra_blocksize = 1,
1096 .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
1097 .cra_alignmask = 0xf,
1098 .cra_module = THIS_MODULE,
1099 },
1100 .op = {
1101 .do_one_request = starfive_aes_aead_do_one_req,
1102 },
1103}, {
1104 .base.setkey = starfive_aes_aead_setkey,
1105 .base.setauthsize = starfive_aes_ccm_setauthsize,
1106 .base.encrypt = starfive_aes_ccm_encrypt,
1107 .base.decrypt = starfive_aes_ccm_decrypt,
917 .base.init = starfive_aes_aead_init_tfm,
1108 .base.init = starfive_aes_ccm_init_tfm,
918 .base.exit = starfive_aes_aead_exit_tfm,
919 .base.ivsize = AES_BLOCK_SIZE,
920 .base.maxauthsize = AES_BLOCK_SIZE,
921 .base.base = {
922 .cra_name = "ccm(aes)",
923 .cra_driver_name = "starfive-ccm-aes",
924 .cra_priority = 200,
925 .cra_flags = CRYPTO_ALG_ASYNC |

--- 32 unchanged lines hidden ---
1109 .base.exit = starfive_aes_aead_exit_tfm,
1110 .base.ivsize = AES_BLOCK_SIZE,
1111 .base.maxauthsize = AES_BLOCK_SIZE,
1112 .base.base = {
1113 .cra_name = "ccm(aes)",
1114 .cra_driver_name = "starfive-ccm-aes",
1115 .cra_priority = 200,
1116 .cra_flags = CRYPTO_ALG_ASYNC |

--- 32 unchanged lines hidden ---