1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * StarFive AES acceleration driver 4 * 5 * Copyright (c) 2022 StarFive Technology 6 */ 7 8 #include <crypto/engine.h> 9 #include <crypto/gcm.h> 10 #include <crypto/internal/aead.h> 11 #include <crypto/internal/skcipher.h> 12 #include <crypto/scatterwalk.h> 13 #include "jh7110-cryp.h" 14 #include <linux/err.h> 15 #include <linux/iopoll.h> 16 #include <linux/kernel.h> 17 #include <linux/slab.h> 18 #include <linux/string.h> 19 20 #define STARFIVE_AES_REGS_OFFSET 0x100 21 #define STARFIVE_AES_AESDIO0R (STARFIVE_AES_REGS_OFFSET + 0x0) 22 #define STARFIVE_AES_KEY0 (STARFIVE_AES_REGS_OFFSET + 0x4) 23 #define STARFIVE_AES_KEY1 (STARFIVE_AES_REGS_OFFSET + 0x8) 24 #define STARFIVE_AES_KEY2 (STARFIVE_AES_REGS_OFFSET + 0xC) 25 #define STARFIVE_AES_KEY3 (STARFIVE_AES_REGS_OFFSET + 0x10) 26 #define STARFIVE_AES_KEY4 (STARFIVE_AES_REGS_OFFSET + 0x14) 27 #define STARFIVE_AES_KEY5 (STARFIVE_AES_REGS_OFFSET + 0x18) 28 #define STARFIVE_AES_KEY6 (STARFIVE_AES_REGS_OFFSET + 0x1C) 29 #define STARFIVE_AES_KEY7 (STARFIVE_AES_REGS_OFFSET + 0x20) 30 #define STARFIVE_AES_CSR (STARFIVE_AES_REGS_OFFSET + 0x24) 31 #define STARFIVE_AES_IV0 (STARFIVE_AES_REGS_OFFSET + 0x28) 32 #define STARFIVE_AES_IV1 (STARFIVE_AES_REGS_OFFSET + 0x2C) 33 #define STARFIVE_AES_IV2 (STARFIVE_AES_REGS_OFFSET + 0x30) 34 #define STARFIVE_AES_IV3 (STARFIVE_AES_REGS_OFFSET + 0x34) 35 #define STARFIVE_AES_NONCE0 (STARFIVE_AES_REGS_OFFSET + 0x3C) 36 #define STARFIVE_AES_NONCE1 (STARFIVE_AES_REGS_OFFSET + 0x40) 37 #define STARFIVE_AES_NONCE2 (STARFIVE_AES_REGS_OFFSET + 0x44) 38 #define STARFIVE_AES_NONCE3 (STARFIVE_AES_REGS_OFFSET + 0x48) 39 #define STARFIVE_AES_ALEN0 (STARFIVE_AES_REGS_OFFSET + 0x4C) 40 #define STARFIVE_AES_ALEN1 (STARFIVE_AES_REGS_OFFSET + 0x50) 41 #define STARFIVE_AES_MLEN0 (STARFIVE_AES_REGS_OFFSET + 0x54) 42 #define STARFIVE_AES_MLEN1 (STARFIVE_AES_REGS_OFFSET + 0x58) 43 #define STARFIVE_AES_IVLEN (STARFIVE_AES_REGS_OFFSET + 0x5C) 44 45 #define FLG_MODE_MASK GENMASK(2, 0) 46 #define FLG_ENCRYPT BIT(4) 47 48 /* Misc */ 49 #define CCM_B0_ADATA 0x40 50 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32)) 51 52 static inline int starfive_aes_wait_busy(struct starfive_cryp_dev *cryp) 53 { 54 u32 status; 55 56 return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status, 57 !(status & STARFIVE_AES_BUSY), 10, 100000); 58 } 59 60 static inline int starfive_aes_wait_keydone(struct starfive_cryp_dev *cryp) 61 { 62 u32 status; 63 64 return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status, 65 (status & STARFIVE_AES_KEY_DONE), 10, 100000); 66 } 67 68 static inline int starfive_aes_wait_gcmdone(struct starfive_cryp_dev *cryp) 69 { 70 u32 status; 71 72 return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status, 73 (status & STARFIVE_AES_GCM_DONE), 10, 100000); 74 } 75 76 static inline int is_gcm(struct starfive_cryp_dev *cryp) 77 { 78 return (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM; 79 } 80 81 static inline bool is_encrypt(struct starfive_cryp_dev *cryp) 82 { 83 return cryp->flags & FLG_ENCRYPT; 84 } 85 86 static void starfive_aes_aead_hw_start(struct starfive_cryp_ctx *ctx, u32 hw_mode) 87 { 88 struct starfive_cryp_dev *cryp = ctx->cryp; 89 unsigned int value; 90 91 switch (hw_mode) { 92 case STARFIVE_AES_MODE_GCM: 93 value = readl(ctx->cryp->base + STARFIVE_AES_CSR); 94 value |= STARFIVE_AES_GCM_START; 95 writel(value, cryp->base + STARFIVE_AES_CSR); 96 starfive_aes_wait_gcmdone(cryp); 97 break; 98 case STARFIVE_AES_MODE_CCM: 99 value = readl(ctx->cryp->base + STARFIVE_AES_CSR); 100 value |= STARFIVE_AES_CCM_START; 101 writel(value, cryp->base + STARFIVE_AES_CSR); 102 break; 103 } 104 } 105 106 static inline void starfive_aes_set_alen(struct starfive_cryp_ctx *ctx) 107 { 108 struct starfive_cryp_dev *cryp = ctx->cryp; 109 110 writel(upper_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN0); 111 writel(lower_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN1); 112 } 113 114 static inline void starfive_aes_set_mlen(struct starfive_cryp_ctx *ctx) 115 { 116 struct starfive_cryp_dev *cryp = ctx->cryp; 117 118 writel(upper_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN0); 119 writel(lower_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN1); 120 } 121 122 static inline int starfive_aes_ccm_check_iv(const u8 *iv) 123 { 124 /* 2 <= L <= 8, so 1 <= L' <= 7. */ 125 if (iv[0] < 1 || iv[0] > 7) 126 return -EINVAL; 127 128 return 0; 129 } 130 131 static int starfive_aes_write_iv(struct starfive_cryp_ctx *ctx, u32 *iv) 132 { 133 struct starfive_cryp_dev *cryp = ctx->cryp; 134 135 writel(iv[0], cryp->base + STARFIVE_AES_IV0); 136 writel(iv[1], cryp->base + STARFIVE_AES_IV1); 137 writel(iv[2], cryp->base + STARFIVE_AES_IV2); 138 139 if (is_gcm(cryp)) { 140 if (starfive_aes_wait_gcmdone(cryp)) 141 return -ETIMEDOUT; 142 143 return 0; 144 } 145 146 writel(iv[3], cryp->base + STARFIVE_AES_IV3); 147 148 return 0; 149 } 150 151 static inline void starfive_aes_get_iv(struct starfive_cryp_dev *cryp, u32 *iv) 152 { 153 iv[0] = readl(cryp->base + STARFIVE_AES_IV0); 154 iv[1] = readl(cryp->base + STARFIVE_AES_IV1); 155 iv[2] = readl(cryp->base + STARFIVE_AES_IV2); 156 iv[3] = readl(cryp->base + STARFIVE_AES_IV3); 157 } 158 159 static inline void starfive_aes_write_nonce(struct starfive_cryp_ctx *ctx, u32 *nonce) 160 { 161 struct starfive_cryp_dev *cryp = ctx->cryp; 162 163 writel(nonce[0], cryp->base + STARFIVE_AES_NONCE0); 164 writel(nonce[1], cryp->base + STARFIVE_AES_NONCE1); 165 writel(nonce[2], cryp->base + STARFIVE_AES_NONCE2); 166 writel(nonce[3], cryp->base + STARFIVE_AES_NONCE3); 167 } 168 169 static int starfive_aes_write_key(struct starfive_cryp_ctx *ctx) 170 { 171 struct starfive_cryp_dev *cryp = ctx->cryp; 172 u32 *key = (u32 *)ctx->key; 173 174 if (ctx->keylen >= AES_KEYSIZE_128) { 175 writel(key[0], cryp->base + STARFIVE_AES_KEY0); 176 writel(key[1], cryp->base + STARFIVE_AES_KEY1); 177 writel(key[2], cryp->base + STARFIVE_AES_KEY2); 178 writel(key[3], cryp->base + STARFIVE_AES_KEY3); 179 } 180 181 if (ctx->keylen >= AES_KEYSIZE_192) { 182 writel(key[4], cryp->base + STARFIVE_AES_KEY4); 183 writel(key[5], cryp->base + STARFIVE_AES_KEY5); 184 } 185 186 if (ctx->keylen >= AES_KEYSIZE_256) { 187 writel(key[6], cryp->base + STARFIVE_AES_KEY6); 188 writel(key[7], cryp->base + STARFIVE_AES_KEY7); 189 } 190 191 if (starfive_aes_wait_keydone(cryp)) 192 return -ETIMEDOUT; 193 194 return 0; 195 } 196 197 static int starfive_aes_ccm_init(struct starfive_cryp_ctx *ctx) 198 { 199 struct starfive_cryp_dev *cryp = ctx->cryp; 200 u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE]; 201 unsigned int textlen; 202 203 memcpy(iv, cryp->req.areq->iv, AES_BLOCK_SIZE); 204 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); 205 206 /* Build B0 */ 207 memcpy(b0, iv, AES_BLOCK_SIZE); 208 209 b0[0] |= (8 * ((cryp->authsize - 2) / 2)); 210 211 if (cryp->assoclen) 212 b0[0] |= CCM_B0_ADATA; 213 214 textlen = cryp->total_in; 215 216 b0[AES_BLOCK_SIZE - 2] = textlen >> 8; 217 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF; 218 219 starfive_aes_write_nonce(ctx, (u32 *)b0); 220 221 return 0; 222 } 223 224 static int starfive_aes_hw_init(struct starfive_cryp_ctx *ctx) 225 { 226 struct starfive_cryp_request_ctx *rctx = ctx->rctx; 227 struct starfive_cryp_dev *cryp = ctx->cryp; 228 u32 hw_mode; 229 230 /* reset */ 231 rctx->csr.aes.v = 0; 232 rctx->csr.aes.aesrst = 1; 233 writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR); 234 235 /* csr setup */ 236 hw_mode = cryp->flags & FLG_MODE_MASK; 237 238 rctx->csr.aes.v = 0; 239 240 switch (ctx->keylen) { 241 case AES_KEYSIZE_128: 242 rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_128; 243 break; 244 case AES_KEYSIZE_192: 245 rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_192; 246 break; 247 case AES_KEYSIZE_256: 248 rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_256; 249 break; 250 } 251 252 rctx->csr.aes.mode = hw_mode; 253 rctx->csr.aes.cmode = !is_encrypt(cryp); 254 rctx->csr.aes.stmode = STARFIVE_AES_MODE_XFB_1; 255 256 if (cryp->side_chan) { 257 rctx->csr.aes.delay_aes = 1; 258 rctx->csr.aes.vaes_start = 1; 259 } 260 261 writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR); 262 263 cryp->err = starfive_aes_write_key(ctx); 264 if (cryp->err) 265 return cryp->err; 266 267 switch (hw_mode) { 268 case STARFIVE_AES_MODE_GCM: 269 starfive_aes_set_alen(ctx); 270 starfive_aes_set_mlen(ctx); 271 writel(GCM_AES_IV_SIZE, cryp->base + STARFIVE_AES_IVLEN); 272 starfive_aes_aead_hw_start(ctx, hw_mode); 273 starfive_aes_write_iv(ctx, (void *)cryp->req.areq->iv); 274 break; 275 case STARFIVE_AES_MODE_CCM: 276 starfive_aes_set_alen(ctx); 277 starfive_aes_set_mlen(ctx); 278 starfive_aes_ccm_init(ctx); 279 starfive_aes_aead_hw_start(ctx, hw_mode); 280 break; 281 case STARFIVE_AES_MODE_CBC: 282 case STARFIVE_AES_MODE_CTR: 283 starfive_aes_write_iv(ctx, (void *)cryp->req.sreq->iv); 284 break; 285 default: 286 break; 287 } 288 289 return cryp->err; 290 } 291 292 static int starfive_aes_read_authtag(struct starfive_cryp_ctx *ctx) 293 { 294 struct starfive_cryp_dev *cryp = ctx->cryp; 295 struct starfive_cryp_request_ctx *rctx = ctx->rctx; 296 int i; 297 298 if (starfive_aes_wait_busy(cryp)) 299 return dev_err_probe(cryp->dev, -ETIMEDOUT, 300 "Timeout waiting for tag generation."); 301 302 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM) { 303 cryp->tag_out[0] = readl(cryp->base + STARFIVE_AES_NONCE0); 304 cryp->tag_out[1] = readl(cryp->base + STARFIVE_AES_NONCE1); 305 cryp->tag_out[2] = readl(cryp->base + STARFIVE_AES_NONCE2); 306 cryp->tag_out[3] = readl(cryp->base + STARFIVE_AES_NONCE3); 307 } else { 308 for (i = 0; i < AES_BLOCK_32; i++) 309 cryp->tag_out[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R); 310 } 311 312 if (is_encrypt(cryp)) { 313 scatterwalk_map_and_copy(cryp->tag_out, rctx->out_sg, 314 cryp->total_in, cryp->authsize, 1); 315 } else { 316 if (crypto_memneq(cryp->tag_in, cryp->tag_out, cryp->authsize)) 317 return -EBADMSG; 318 } 319 320 return 0; 321 } 322 323 static void starfive_aes_finish_req(struct starfive_cryp_ctx *ctx) 324 { 325 struct starfive_cryp_dev *cryp = ctx->cryp; 326 int err = cryp->err; 327 328 if (!err && cryp->authsize) 329 err = starfive_aes_read_authtag(ctx); 330 331 if (!err && ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC || 332 (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CTR)) 333 starfive_aes_get_iv(cryp, (void *)cryp->req.sreq->iv); 334 335 if (cryp->authsize) 336 crypto_finalize_aead_request(cryp->engine, cryp->req.areq, err); 337 else 338 crypto_finalize_skcipher_request(cryp->engine, cryp->req.sreq, 339 err); 340 } 341 342 static int starfive_aes_gcm_write_adata(struct starfive_cryp_ctx *ctx) 343 { 344 struct starfive_cryp_dev *cryp = ctx->cryp; 345 struct starfive_cryp_request_ctx *rctx = ctx->rctx; 346 u32 *buffer; 347 int total_len, loop; 348 349 total_len = ALIGN(cryp->assoclen, AES_BLOCK_SIZE) / sizeof(unsigned int); 350 buffer = (u32 *)rctx->adata; 351 352 for (loop = 0; loop < total_len; loop += 4) { 353 writel(*buffer, cryp->base + STARFIVE_AES_NONCE0); 354 buffer++; 355 writel(*buffer, cryp->base + STARFIVE_AES_NONCE1); 356 buffer++; 357 writel(*buffer, cryp->base + STARFIVE_AES_NONCE2); 358 buffer++; 359 writel(*buffer, cryp->base + STARFIVE_AES_NONCE3); 360 buffer++; 361 } 362 363 if (starfive_aes_wait_gcmdone(cryp)) 364 return dev_err_probe(cryp->dev, -ETIMEDOUT, 365 "Timeout processing gcm aad block"); 366 367 return 0; 368 } 369 370 static int starfive_aes_ccm_write_adata(struct starfive_cryp_ctx *ctx) 371 { 372 struct starfive_cryp_dev *cryp = ctx->cryp; 373 struct starfive_cryp_request_ctx *rctx = ctx->rctx; 374 u32 *buffer; 375 u8 *ci; 376 int total_len, loop; 377 378 total_len = cryp->assoclen; 379 380 ci = rctx->adata; 381 writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R); 382 ci++; 383 writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R); 384 ci++; 385 total_len -= 2; 386 buffer = (u32 *)ci; 387 388 for (loop = 0; loop < 3; loop++, buffer++) 389 writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R); 390 391 total_len -= 12; 392 393 while (total_len > 0) { 394 for (loop = 0; loop < AES_BLOCK_32; loop++, buffer++) 395 writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R); 396 397 total_len -= AES_BLOCK_SIZE; 398 } 399 400 if (starfive_aes_wait_busy(cryp)) 401 return dev_err_probe(cryp->dev, -ETIMEDOUT, 402 "Timeout processing ccm aad block"); 403 404 return 0; 405 } 406 407 static void starfive_aes_dma_done(void *param) 408 { 409 struct starfive_cryp_dev *cryp = param; 410 411 complete(&cryp->dma_done); 412 } 413 414 static void starfive_aes_dma_init(struct starfive_cryp_dev *cryp) 415 { 416 cryp->cfg_in.direction = DMA_MEM_TO_DEV; 417 cryp->cfg_in.src_addr_width = DMA_SLAVE_BUSWIDTH_16_BYTES; 418 cryp->cfg_in.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; 419 cryp->cfg_in.src_maxburst = cryp->dma_maxburst; 420 cryp->cfg_in.dst_maxburst = cryp->dma_maxburst; 421 cryp->cfg_in.dst_addr = cryp->phys_base + STARFIVE_ALG_FIFO_OFFSET; 422 423 dmaengine_slave_config(cryp->tx, &cryp->cfg_in); 424 425 cryp->cfg_out.direction = DMA_DEV_TO_MEM; 426 cryp->cfg_out.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; 427 cryp->cfg_out.dst_addr_width = DMA_SLAVE_BUSWIDTH_16_BYTES; 428 cryp->cfg_out.src_maxburst = 4; 429 cryp->cfg_out.dst_maxburst = 4; 430 cryp->cfg_out.src_addr = cryp->phys_base + STARFIVE_ALG_FIFO_OFFSET; 431 432 dmaengine_slave_config(cryp->rx, &cryp->cfg_out); 433 434 init_completion(&cryp->dma_done); 435 } 436 437 static int starfive_aes_dma_xfer(struct starfive_cryp_dev *cryp, 438 struct scatterlist *src, 439 struct scatterlist *dst, 440 int len) 441 { 442 struct dma_async_tx_descriptor *in_desc, *out_desc; 443 union starfive_alg_cr alg_cr; 444 int ret = 0, in_save, out_save; 445 446 alg_cr.v = 0; 447 alg_cr.start = 1; 448 alg_cr.aes_dma_en = 1; 449 writel(alg_cr.v, cryp->base + STARFIVE_ALG_CR_OFFSET); 450 451 in_save = sg_dma_len(src); 452 out_save = sg_dma_len(dst); 453 454 writel(ALIGN(len, AES_BLOCK_SIZE), cryp->base + STARFIVE_DMA_IN_LEN_OFFSET); 455 writel(ALIGN(len, AES_BLOCK_SIZE), cryp->base + STARFIVE_DMA_OUT_LEN_OFFSET); 456 457 sg_dma_len(src) = ALIGN(len, AES_BLOCK_SIZE); 458 sg_dma_len(dst) = ALIGN(len, AES_BLOCK_SIZE); 459 460 out_desc = dmaengine_prep_slave_sg(cryp->rx, dst, 1, DMA_DEV_TO_MEM, 461 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 462 if (!out_desc) { 463 ret = -EINVAL; 464 goto dma_err; 465 } 466 467 out_desc->callback = starfive_aes_dma_done; 468 out_desc->callback_param = cryp; 469 470 reinit_completion(&cryp->dma_done); 471 dmaengine_submit(out_desc); 472 dma_async_issue_pending(cryp->rx); 473 474 in_desc = dmaengine_prep_slave_sg(cryp->tx, src, 1, DMA_MEM_TO_DEV, 475 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 476 if (!in_desc) { 477 ret = -EINVAL; 478 goto dma_err; 479 } 480 481 dmaengine_submit(in_desc); 482 dma_async_issue_pending(cryp->tx); 483 484 if (!wait_for_completion_timeout(&cryp->dma_done, 485 msecs_to_jiffies(1000))) 486 ret = -ETIMEDOUT; 487 488 dma_err: 489 sg_dma_len(src) = in_save; 490 sg_dma_len(dst) = out_save; 491 492 alg_cr.v = 0; 493 alg_cr.clear = 1; 494 writel(alg_cr.v, cryp->base + STARFIVE_ALG_CR_OFFSET); 495 496 return ret; 497 } 498 499 static int starfive_aes_map_sg(struct starfive_cryp_dev *cryp, 500 struct scatterlist *src, 501 struct scatterlist *dst) 502 { 503 struct scatterlist *stsg, *dtsg; 504 struct scatterlist _src[2], _dst[2]; 505 unsigned int remain = cryp->total_in; 506 unsigned int len, src_nents, dst_nents; 507 int ret; 508 509 if (src == dst) { 510 for (stsg = src, dtsg = dst; remain > 0; 511 stsg = sg_next(stsg), dtsg = sg_next(dtsg)) { 512 src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL); 513 if (src_nents == 0) 514 return -ENOMEM; 515 516 dst_nents = src_nents; 517 len = min(sg_dma_len(stsg), remain); 518 519 ret = starfive_aes_dma_xfer(cryp, stsg, dtsg, len); 520 dma_unmap_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL); 521 if (ret) 522 return ret; 523 524 remain -= len; 525 } 526 } else { 527 for (stsg = src, dtsg = dst;;) { 528 src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE); 529 if (src_nents == 0) 530 return -ENOMEM; 531 532 dst_nents = dma_map_sg(cryp->dev, dtsg, 1, DMA_FROM_DEVICE); 533 if (dst_nents == 0) 534 return -ENOMEM; 535 536 len = min(sg_dma_len(stsg), sg_dma_len(dtsg)); 537 len = min(len, remain); 538 539 ret = starfive_aes_dma_xfer(cryp, stsg, dtsg, len); 540 dma_unmap_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE); 541 dma_unmap_sg(cryp->dev, dtsg, 1, DMA_FROM_DEVICE); 542 if (ret) 543 return ret; 544 545 remain -= len; 546 if (remain == 0) 547 break; 548 549 if (sg_dma_len(stsg) - len) { 550 stsg = scatterwalk_ffwd(_src, stsg, len); 551 dtsg = sg_next(dtsg); 552 } else if (sg_dma_len(dtsg) - len) { 553 dtsg = scatterwalk_ffwd(_dst, dtsg, len); 554 stsg = sg_next(stsg); 555 } else { 556 stsg = sg_next(stsg); 557 dtsg = sg_next(dtsg); 558 } 559 } 560 } 561 562 return 0; 563 } 564 565 static int starfive_aes_do_one_req(struct crypto_engine *engine, void *areq) 566 { 567 struct skcipher_request *req = 568 container_of(areq, struct skcipher_request, base); 569 struct starfive_cryp_ctx *ctx = 570 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); 571 struct starfive_cryp_request_ctx *rctx = skcipher_request_ctx(req); 572 struct starfive_cryp_dev *cryp = ctx->cryp; 573 int ret; 574 575 cryp->req.sreq = req; 576 cryp->total_in = req->cryptlen; 577 cryp->total_out = req->cryptlen; 578 cryp->assoclen = 0; 579 cryp->authsize = 0; 580 581 rctx->in_sg = req->src; 582 rctx->out_sg = req->dst; 583 584 ctx->rctx = rctx; 585 586 ret = starfive_aes_hw_init(ctx); 587 if (ret) 588 return ret; 589 590 if (!cryp->total_in) 591 goto finish_req; 592 593 starfive_aes_dma_init(cryp); 594 595 ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg); 596 if (ret) 597 return ret; 598 599 finish_req: 600 starfive_aes_finish_req(ctx); 601 602 return 0; 603 } 604 605 static int starfive_aes_init_tfm(struct crypto_skcipher *tfm, 606 const char *alg_name) 607 { 608 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm); 609 610 ctx->cryp = starfive_cryp_find_dev(ctx); 611 if (!ctx->cryp) 612 return -ENODEV; 613 614 ctx->skcipher_fbk = crypto_alloc_skcipher(alg_name, 0, 615 CRYPTO_ALG_NEED_FALLBACK); 616 if (IS_ERR(ctx->skcipher_fbk)) 617 return dev_err_probe(ctx->cryp->dev, PTR_ERR(ctx->skcipher_fbk), 618 "%s() failed to allocate fallback for %s\n", 619 __func__, alg_name); 620 621 crypto_skcipher_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) + 622 crypto_skcipher_reqsize(ctx->skcipher_fbk)); 623 624 return 0; 625 } 626 627 static void starfive_aes_exit_tfm(struct crypto_skcipher *tfm) 628 { 629 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm); 630 631 crypto_free_skcipher(ctx->skcipher_fbk); 632 } 633 634 static int starfive_aes_aead_do_one_req(struct crypto_engine *engine, void *areq) 635 { 636 struct aead_request *req = 637 container_of(areq, struct aead_request, base); 638 struct starfive_cryp_ctx *ctx = 639 crypto_aead_ctx(crypto_aead_reqtfm(req)); 640 struct starfive_cryp_dev *cryp = ctx->cryp; 641 struct starfive_cryp_request_ctx *rctx = aead_request_ctx(req); 642 struct scatterlist _src[2], _dst[2]; 643 int ret; 644 645 cryp->req.areq = req; 646 cryp->assoclen = req->assoclen; 647 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(req)); 648 649 rctx->in_sg = scatterwalk_ffwd(_src, req->src, cryp->assoclen); 650 if (req->src == req->dst) 651 rctx->out_sg = rctx->in_sg; 652 else 653 rctx->out_sg = scatterwalk_ffwd(_dst, req->dst, cryp->assoclen); 654 655 if (is_encrypt(cryp)) { 656 cryp->total_in = req->cryptlen; 657 cryp->total_out = req->cryptlen; 658 } else { 659 cryp->total_in = req->cryptlen - cryp->authsize; 660 cryp->total_out = cryp->total_in; 661 scatterwalk_map_and_copy(cryp->tag_in, req->src, 662 cryp->total_in + cryp->assoclen, 663 cryp->authsize, 0); 664 } 665 666 if (cryp->assoclen) { 667 rctx->adata = kzalloc(cryp->assoclen + AES_BLOCK_SIZE, GFP_KERNEL); 668 if (!rctx->adata) 669 return -ENOMEM; 670 671 if (sg_copy_to_buffer(req->src, sg_nents_for_len(req->src, cryp->assoclen), 672 rctx->adata, cryp->assoclen) != cryp->assoclen) 673 return -EINVAL; 674 } 675 676 if (cryp->total_in) 677 sg_zero_buffer(rctx->in_sg, sg_nents(rctx->in_sg), 678 sg_dma_len(rctx->in_sg) - cryp->total_in, 679 cryp->total_in); 680 681 ctx->rctx = rctx; 682 683 ret = starfive_aes_hw_init(ctx); 684 if (ret) 685 return ret; 686 687 if (!cryp->assoclen) 688 goto write_text; 689 690 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM) 691 ret = starfive_aes_ccm_write_adata(ctx); 692 else 693 ret = starfive_aes_gcm_write_adata(ctx); 694 695 kfree(rctx->adata); 696 697 if (ret) 698 return ret; 699 700 write_text: 701 if (!cryp->total_in) 702 goto finish_req; 703 704 starfive_aes_dma_init(cryp); 705 706 ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg); 707 if (ret) 708 return ret; 709 710 finish_req: 711 starfive_aes_finish_req(ctx); 712 return 0; 713 } 714 715 static int starfive_aes_aead_init_tfm(struct crypto_aead *tfm, 716 const char *alg_name) 717 { 718 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm); 719 720 ctx->cryp = starfive_cryp_find_dev(ctx); 721 if (!ctx->cryp) 722 return -ENODEV; 723 724 ctx->aead_fbk = crypto_alloc_aead(alg_name, 0, 725 CRYPTO_ALG_NEED_FALLBACK); 726 if (IS_ERR(ctx->aead_fbk)) 727 return dev_err_probe(ctx->cryp->dev, PTR_ERR(ctx->aead_fbk), 728 "%s() failed to allocate fallback for %s\n", 729 __func__, alg_name); 730 731 crypto_aead_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) + 732 crypto_aead_reqsize(ctx->aead_fbk)); 733 734 return 0; 735 } 736 737 static void starfive_aes_aead_exit_tfm(struct crypto_aead *tfm) 738 { 739 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm); 740 741 crypto_free_aead(ctx->aead_fbk); 742 } 743 744 static bool starfive_aes_check_unaligned(struct starfive_cryp_dev *cryp, 745 struct scatterlist *src, 746 struct scatterlist *dst) 747 { 748 struct scatterlist *tsg; 749 int i; 750 751 for_each_sg(src, tsg, sg_nents(src), i) 752 if (!IS_ALIGNED(tsg->offset, sizeof(u32)) || 753 (!IS_ALIGNED(tsg->length, AES_BLOCK_SIZE) && 754 !sg_is_last(tsg))) 755 return true; 756 757 if (src != dst) 758 for_each_sg(dst, tsg, sg_nents(dst), i) 759 if (!IS_ALIGNED(tsg->offset, sizeof(u32)) || 760 (!IS_ALIGNED(tsg->length, AES_BLOCK_SIZE) && 761 !sg_is_last(tsg))) 762 return true; 763 764 return false; 765 } 766 767 static int starfive_aes_do_fallback(struct skcipher_request *req, bool enc) 768 { 769 struct starfive_cryp_ctx *ctx = 770 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); 771 struct skcipher_request *subreq = skcipher_request_ctx(req); 772 773 skcipher_request_set_tfm(subreq, ctx->skcipher_fbk); 774 skcipher_request_set_callback(subreq, req->base.flags, 775 req->base.complete, 776 req->base.data); 777 skcipher_request_set_crypt(subreq, req->src, req->dst, 778 req->cryptlen, req->iv); 779 780 return enc ? crypto_skcipher_encrypt(subreq) : 781 crypto_skcipher_decrypt(subreq); 782 } 783 784 static int starfive_aes_crypt(struct skcipher_request *req, unsigned long flags) 785 { 786 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 787 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm); 788 struct starfive_cryp_dev *cryp = ctx->cryp; 789 unsigned int blocksize_align = crypto_skcipher_blocksize(tfm) - 1; 790 791 cryp->flags = flags; 792 793 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_ECB || 794 (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC) 795 if (req->cryptlen & blocksize_align) 796 return -EINVAL; 797 798 if (starfive_aes_check_unaligned(cryp, req->src, req->dst)) 799 return starfive_aes_do_fallback(req, is_encrypt(cryp)); 800 801 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req); 802 } 803 804 static int starfive_aes_aead_do_fallback(struct aead_request *req, bool enc) 805 { 806 struct starfive_cryp_ctx *ctx = 807 crypto_aead_ctx(crypto_aead_reqtfm(req)); 808 struct aead_request *subreq = aead_request_ctx(req); 809 810 aead_request_set_tfm(subreq, ctx->aead_fbk); 811 aead_request_set_callback(subreq, req->base.flags, 812 req->base.complete, 813 req->base.data); 814 aead_request_set_crypt(subreq, req->src, req->dst, 815 req->cryptlen, req->iv); 816 aead_request_set_ad(subreq, req->assoclen); 817 818 return enc ? crypto_aead_encrypt(subreq) : 819 crypto_aead_decrypt(subreq); 820 } 821 822 static int starfive_aes_aead_crypt(struct aead_request *req, unsigned long flags) 823 { 824 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 825 struct starfive_cryp_dev *cryp = ctx->cryp; 826 struct scatterlist *src, *dst, _src[2], _dst[2]; 827 828 cryp->flags = flags; 829 830 /* aes-ccm does not support tag verification for non-aligned text, 831 * use fallback for ccm decryption instead. 832 */ 833 if (((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM) && 834 !is_encrypt(cryp)) 835 return starfive_aes_aead_do_fallback(req, 0); 836 837 src = scatterwalk_ffwd(_src, req->src, req->assoclen); 838 839 if (req->src == req->dst) 840 dst = src; 841 else 842 dst = scatterwalk_ffwd(_dst, req->dst, req->assoclen); 843 844 if (starfive_aes_check_unaligned(cryp, src, dst)) 845 return starfive_aes_aead_do_fallback(req, is_encrypt(cryp)); 846 847 return crypto_transfer_aead_request_to_engine(cryp->engine, req); 848 } 849 850 static int starfive_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, 851 unsigned int keylen) 852 { 853 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm); 854 855 if (!key || !keylen) 856 return -EINVAL; 857 858 if (keylen != AES_KEYSIZE_128 && 859 keylen != AES_KEYSIZE_192 && 860 keylen != AES_KEYSIZE_256) 861 return -EINVAL; 862 863 memcpy(ctx->key, key, keylen); 864 ctx->keylen = keylen; 865 866 return crypto_skcipher_setkey(ctx->skcipher_fbk, key, keylen); 867 } 868 869 static int starfive_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key, 870 unsigned int keylen) 871 { 872 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm); 873 874 if (!key || !keylen) 875 return -EINVAL; 876 877 if (keylen != AES_KEYSIZE_128 && 878 keylen != AES_KEYSIZE_192 && 879 keylen != AES_KEYSIZE_256) 880 return -EINVAL; 881 882 memcpy(ctx->key, key, keylen); 883 ctx->keylen = keylen; 884 885 return crypto_aead_setkey(ctx->aead_fbk, key, keylen); 886 } 887 888 static int starfive_aes_gcm_setauthsize(struct crypto_aead *tfm, 889 unsigned int authsize) 890 { 891 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm); 892 int ret; 893 894 ret = crypto_gcm_check_authsize(authsize); 895 if (ret) 896 return ret; 897 898 return crypto_aead_setauthsize(ctx->aead_fbk, authsize); 899 } 900 901 static int starfive_aes_ccm_setauthsize(struct crypto_aead *tfm, 902 unsigned int authsize) 903 { 904 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm); 905 906 switch (authsize) { 907 case 4: 908 case 6: 909 case 8: 910 case 10: 911 case 12: 912 case 14: 913 case 16: 914 break; 915 default: 916 return -EINVAL; 917 } 918 919 return crypto_aead_setauthsize(ctx->aead_fbk, authsize); 920 } 921 922 static int starfive_aes_ecb_encrypt(struct skcipher_request *req) 923 { 924 return starfive_aes_crypt(req, STARFIVE_AES_MODE_ECB | FLG_ENCRYPT); 925 } 926 927 static int starfive_aes_ecb_decrypt(struct skcipher_request *req) 928 { 929 return starfive_aes_crypt(req, STARFIVE_AES_MODE_ECB); 930 } 931 932 static int starfive_aes_cbc_encrypt(struct skcipher_request *req) 933 { 934 return starfive_aes_crypt(req, STARFIVE_AES_MODE_CBC | FLG_ENCRYPT); 935 } 936 937 static int starfive_aes_cbc_decrypt(struct skcipher_request *req) 938 { 939 return starfive_aes_crypt(req, STARFIVE_AES_MODE_CBC); 940 } 941 942 static int starfive_aes_ctr_encrypt(struct skcipher_request *req) 943 { 944 return starfive_aes_crypt(req, STARFIVE_AES_MODE_CTR | FLG_ENCRYPT); 945 } 946 947 static int starfive_aes_ctr_decrypt(struct skcipher_request *req) 948 { 949 return starfive_aes_crypt(req, STARFIVE_AES_MODE_CTR); 950 } 951 952 static int starfive_aes_gcm_encrypt(struct aead_request *req) 953 { 954 return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_GCM | FLG_ENCRYPT); 955 } 956 957 static int starfive_aes_gcm_decrypt(struct aead_request *req) 958 { 959 return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_GCM); 960 } 961 962 static int starfive_aes_ccm_encrypt(struct aead_request *req) 963 { 964 int ret; 965 966 ret = starfive_aes_ccm_check_iv(req->iv); 967 if (ret) 968 return ret; 969 970 return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM | FLG_ENCRYPT); 971 } 972 973 static int starfive_aes_ccm_decrypt(struct aead_request *req) 974 { 975 int ret; 976 977 ret = starfive_aes_ccm_check_iv(req->iv); 978 if (ret) 979 return ret; 980 981 return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM); 982 } 983 984 static int starfive_aes_ecb_init_tfm(struct crypto_skcipher *tfm) 985 { 986 return starfive_aes_init_tfm(tfm, "ecb(aes-generic)"); 987 } 988 989 static int starfive_aes_cbc_init_tfm(struct crypto_skcipher *tfm) 990 { 991 return starfive_aes_init_tfm(tfm, "cbc(aes-generic)"); 992 } 993 994 static int starfive_aes_ctr_init_tfm(struct crypto_skcipher *tfm) 995 { 996 return starfive_aes_init_tfm(tfm, "ctr(aes-generic)"); 997 } 998 999 static int starfive_aes_ccm_init_tfm(struct crypto_aead *tfm) 1000 { 1001 return starfive_aes_aead_init_tfm(tfm, "ccm_base(ctr(aes-generic),cbcmac(aes-generic))"); 1002 } 1003 1004 static int starfive_aes_gcm_init_tfm(struct crypto_aead *tfm) 1005 { 1006 return starfive_aes_aead_init_tfm(tfm, "gcm_base(ctr(aes-generic),ghash-generic)"); 1007 } 1008 1009 static struct skcipher_engine_alg skcipher_algs[] = { 1010 { 1011 .base.init = starfive_aes_ecb_init_tfm, 1012 .base.exit = starfive_aes_exit_tfm, 1013 .base.setkey = starfive_aes_setkey, 1014 .base.encrypt = starfive_aes_ecb_encrypt, 1015 .base.decrypt = starfive_aes_ecb_decrypt, 1016 .base.min_keysize = AES_MIN_KEY_SIZE, 1017 .base.max_keysize = AES_MAX_KEY_SIZE, 1018 .base.base = { 1019 .cra_name = "ecb(aes)", 1020 .cra_driver_name = "starfive-ecb-aes", 1021 .cra_priority = 200, 1022 .cra_flags = CRYPTO_ALG_ASYNC | 1023 CRYPTO_ALG_NEED_FALLBACK, 1024 .cra_blocksize = AES_BLOCK_SIZE, 1025 .cra_ctxsize = sizeof(struct starfive_cryp_ctx), 1026 .cra_alignmask = 0xf, 1027 .cra_module = THIS_MODULE, 1028 }, 1029 .op = { 1030 .do_one_request = starfive_aes_do_one_req, 1031 }, 1032 }, { 1033 .base.init = starfive_aes_cbc_init_tfm, 1034 .base.exit = starfive_aes_exit_tfm, 1035 .base.setkey = starfive_aes_setkey, 1036 .base.encrypt = starfive_aes_cbc_encrypt, 1037 .base.decrypt = starfive_aes_cbc_decrypt, 1038 .base.min_keysize = AES_MIN_KEY_SIZE, 1039 .base.max_keysize = AES_MAX_KEY_SIZE, 1040 .base.ivsize = AES_BLOCK_SIZE, 1041 .base.base = { 1042 .cra_name = "cbc(aes)", 1043 .cra_driver_name = "starfive-cbc-aes", 1044 .cra_priority = 200, 1045 .cra_flags = CRYPTO_ALG_ASYNC | 1046 CRYPTO_ALG_NEED_FALLBACK, 1047 .cra_blocksize = AES_BLOCK_SIZE, 1048 .cra_ctxsize = sizeof(struct starfive_cryp_ctx), 1049 .cra_alignmask = 0xf, 1050 .cra_module = THIS_MODULE, 1051 }, 1052 .op = { 1053 .do_one_request = starfive_aes_do_one_req, 1054 }, 1055 }, { 1056 .base.init = starfive_aes_ctr_init_tfm, 1057 .base.exit = starfive_aes_exit_tfm, 1058 .base.setkey = starfive_aes_setkey, 1059 .base.encrypt = starfive_aes_ctr_encrypt, 1060 .base.decrypt = starfive_aes_ctr_decrypt, 1061 .base.min_keysize = AES_MIN_KEY_SIZE, 1062 .base.max_keysize = AES_MAX_KEY_SIZE, 1063 .base.ivsize = AES_BLOCK_SIZE, 1064 .base.base = { 1065 .cra_name = "ctr(aes)", 1066 .cra_driver_name = "starfive-ctr-aes", 1067 .cra_priority = 200, 1068 .cra_flags = CRYPTO_ALG_ASYNC | 1069 CRYPTO_ALG_NEED_FALLBACK, 1070 .cra_blocksize = 1, 1071 .cra_ctxsize = sizeof(struct starfive_cryp_ctx), 1072 .cra_alignmask = 0xf, 1073 .cra_module = THIS_MODULE, 1074 }, 1075 .op = { 1076 .do_one_request = starfive_aes_do_one_req, 1077 }, 1078 }, 1079 }; 1080 1081 static struct aead_engine_alg aead_algs[] = { 1082 { 1083 .base.setkey = starfive_aes_aead_setkey, 1084 .base.setauthsize = starfive_aes_gcm_setauthsize, 1085 .base.encrypt = starfive_aes_gcm_encrypt, 1086 .base.decrypt = starfive_aes_gcm_decrypt, 1087 .base.init = starfive_aes_gcm_init_tfm, 1088 .base.exit = starfive_aes_aead_exit_tfm, 1089 .base.ivsize = GCM_AES_IV_SIZE, 1090 .base.maxauthsize = AES_BLOCK_SIZE, 1091 .base.base = { 1092 .cra_name = "gcm(aes)", 1093 .cra_driver_name = "starfive-gcm-aes", 1094 .cra_priority = 200, 1095 .cra_flags = CRYPTO_ALG_ASYNC | 1096 CRYPTO_ALG_NEED_FALLBACK, 1097 .cra_blocksize = 1, 1098 .cra_ctxsize = sizeof(struct starfive_cryp_ctx), 1099 .cra_alignmask = 0xf, 1100 .cra_module = THIS_MODULE, 1101 }, 1102 .op = { 1103 .do_one_request = starfive_aes_aead_do_one_req, 1104 }, 1105 }, { 1106 .base.setkey = starfive_aes_aead_setkey, 1107 .base.setauthsize = starfive_aes_ccm_setauthsize, 1108 .base.encrypt = starfive_aes_ccm_encrypt, 1109 .base.decrypt = starfive_aes_ccm_decrypt, 1110 .base.init = starfive_aes_ccm_init_tfm, 1111 .base.exit = starfive_aes_aead_exit_tfm, 1112 .base.ivsize = AES_BLOCK_SIZE, 1113 .base.maxauthsize = AES_BLOCK_SIZE, 1114 .base.base = { 1115 .cra_name = "ccm(aes)", 1116 .cra_driver_name = "starfive-ccm-aes", 1117 .cra_priority = 200, 1118 .cra_flags = CRYPTO_ALG_ASYNC | 1119 CRYPTO_ALG_NEED_FALLBACK, 1120 .cra_blocksize = 1, 1121 .cra_ctxsize = sizeof(struct starfive_cryp_ctx), 1122 .cra_alignmask = 0xf, 1123 .cra_module = THIS_MODULE, 1124 }, 1125 .op = { 1126 .do_one_request = starfive_aes_aead_do_one_req, 1127 }, 1128 }, 1129 }; 1130 1131 int starfive_aes_register_algs(void) 1132 { 1133 int ret; 1134 1135 ret = crypto_engine_register_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs)); 1136 if (ret) 1137 return ret; 1138 1139 ret = crypto_engine_register_aeads(aead_algs, ARRAY_SIZE(aead_algs)); 1140 if (ret) 1141 crypto_engine_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs)); 1142 1143 return ret; 1144 } 1145 1146 void starfive_aes_unregister_algs(void) 1147 { 1148 crypto_engine_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs)); 1149 crypto_engine_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs)); 1150 } 1151