1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (C) STMicroelectronics SA 2017 4 * Author: Fabien Dessenne <fabien.dessenne@st.com> 5 */ 6 7 #include <linux/clk.h> 8 #include <linux/delay.h> 9 #include <linux/interrupt.h> 10 #include <linux/iopoll.h> 11 #include <linux/module.h> 12 #include <linux/of_device.h> 13 #include <linux/platform_device.h> 14 #include <linux/pm_runtime.h> 15 #include <linux/reset.h> 16 17 #include <crypto/aes.h> 18 #include <crypto/internal/des.h> 19 #include <crypto/engine.h> 20 #include <crypto/scatterwalk.h> 21 #include <crypto/internal/aead.h> 22 23 #define DRIVER_NAME "stm32-cryp" 24 25 /* Bit [0] encrypt / decrypt */ 26 #define FLG_ENCRYPT BIT(0) 27 /* Bit [8..1] algo & operation mode */ 28 #define FLG_AES BIT(1) 29 #define FLG_DES BIT(2) 30 #define FLG_TDES BIT(3) 31 #define FLG_ECB BIT(4) 32 #define FLG_CBC BIT(5) 33 #define FLG_CTR BIT(6) 34 #define FLG_GCM BIT(7) 35 #define FLG_CCM BIT(8) 36 /* Mode mask = bits [15..0] */ 37 #define FLG_MODE_MASK GENMASK(15, 0) 38 /* Bit [31..16] status */ 39 #define FLG_CCM_PADDED_WA BIT(16) 40 41 /* Registers */ 42 #define CRYP_CR 0x00000000 43 #define CRYP_SR 0x00000004 44 #define CRYP_DIN 0x00000008 45 #define CRYP_DOUT 0x0000000C 46 #define CRYP_DMACR 0x00000010 47 #define CRYP_IMSCR 0x00000014 48 #define CRYP_RISR 0x00000018 49 #define CRYP_MISR 0x0000001C 50 #define CRYP_K0LR 0x00000020 51 #define CRYP_K0RR 0x00000024 52 #define CRYP_K1LR 0x00000028 53 #define CRYP_K1RR 0x0000002C 54 #define CRYP_K2LR 0x00000030 55 #define CRYP_K2RR 0x00000034 56 #define CRYP_K3LR 0x00000038 57 #define CRYP_K3RR 0x0000003C 58 #define CRYP_IV0LR 0x00000040 59 #define CRYP_IV0RR 0x00000044 60 #define CRYP_IV1LR 0x00000048 61 #define CRYP_IV1RR 0x0000004C 62 #define CRYP_CSGCMCCM0R 0x00000050 63 #define CRYP_CSGCM0R 0x00000070 64 65 /* Registers values */ 66 #define CR_DEC_NOT_ENC 0x00000004 67 #define CR_TDES_ECB 0x00000000 68 #define CR_TDES_CBC 0x00000008 69 #define CR_DES_ECB 0x00000010 70 #define CR_DES_CBC 0x00000018 71 #define CR_AES_ECB 0x00000020 72 #define CR_AES_CBC 0x00000028 73 #define CR_AES_CTR 0x00000030 74 #define CR_AES_KP 0x00000038 75 #define CR_AES_GCM 0x00080000 76 #define CR_AES_CCM 0x00080008 77 #define CR_AES_UNKNOWN 0xFFFFFFFF 78 #define CR_ALGO_MASK 0x00080038 79 #define CR_DATA32 0x00000000 80 #define CR_DATA16 0x00000040 81 #define CR_DATA8 0x00000080 82 #define CR_DATA1 0x000000C0 83 #define CR_KEY128 0x00000000 84 #define CR_KEY192 0x00000100 85 #define CR_KEY256 0x00000200 86 #define CR_FFLUSH 0x00004000 87 #define CR_CRYPEN 0x00008000 88 #define CR_PH_INIT 0x00000000 89 #define CR_PH_HEADER 0x00010000 90 #define CR_PH_PAYLOAD 0x00020000 91 #define CR_PH_FINAL 0x00030000 92 #define CR_PH_MASK 0x00030000 93 #define CR_NBPBL_SHIFT 20 94 95 #define SR_BUSY 0x00000010 96 #define SR_OFNE 0x00000004 97 98 #define IMSCR_IN BIT(0) 99 #define IMSCR_OUT BIT(1) 100 101 #define MISR_IN BIT(0) 102 #define MISR_OUT BIT(1) 103 104 /* Misc */ 105 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32)) 106 #define GCM_CTR_INIT 2 107 #define _walked_in (cryp->in_walk.offset - cryp->in_sg->offset) 108 #define _walked_out (cryp->out_walk.offset - cryp->out_sg->offset) 109 #define CRYP_AUTOSUSPEND_DELAY 50 110 111 struct stm32_cryp_caps { 112 bool swap_final; 113 bool padding_wa; 114 }; 115 116 struct stm32_cryp_ctx { 117 struct crypto_engine_ctx enginectx; 118 struct stm32_cryp *cryp; 119 int keylen; 120 u32 key[AES_KEYSIZE_256 / sizeof(u32)]; 121 unsigned long flags; 122 }; 123 124 struct stm32_cryp_reqctx { 125 unsigned long mode; 126 }; 127 128 struct stm32_cryp { 129 struct list_head list; 130 struct device *dev; 131 void __iomem *regs; 132 struct clk *clk; 133 unsigned long flags; 134 u32 irq_status; 135 const struct stm32_cryp_caps *caps; 136 struct stm32_cryp_ctx *ctx; 137 138 struct crypto_engine *engine; 139 140 struct ablkcipher_request *req; 141 struct aead_request *areq; 142 143 size_t authsize; 144 size_t hw_blocksize; 145 146 size_t total_in; 147 size_t total_in_save; 148 size_t total_out; 149 size_t total_out_save; 150 151 struct scatterlist *in_sg; 152 struct scatterlist *out_sg; 153 struct scatterlist *out_sg_save; 154 155 struct scatterlist in_sgl; 156 struct scatterlist out_sgl; 157 bool sgs_copied; 158 159 int in_sg_len; 160 int out_sg_len; 161 162 struct scatter_walk in_walk; 163 struct scatter_walk out_walk; 164 165 u32 last_ctr[4]; 166 u32 gcm_ctr; 167 }; 168 169 struct stm32_cryp_list { 170 struct list_head dev_list; 171 spinlock_t lock; /* protect dev_list */ 172 }; 173 174 static struct stm32_cryp_list cryp_list = { 175 .dev_list = LIST_HEAD_INIT(cryp_list.dev_list), 176 .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock), 177 }; 178 179 static inline bool is_aes(struct stm32_cryp *cryp) 180 { 181 return cryp->flags & FLG_AES; 182 } 183 184 static inline bool is_des(struct stm32_cryp *cryp) 185 { 186 return cryp->flags & FLG_DES; 187 } 188 189 static inline bool is_tdes(struct stm32_cryp *cryp) 190 { 191 return cryp->flags & FLG_TDES; 192 } 193 194 static inline bool is_ecb(struct stm32_cryp *cryp) 195 { 196 return cryp->flags & FLG_ECB; 197 } 198 199 static inline bool is_cbc(struct stm32_cryp *cryp) 200 { 201 return cryp->flags & FLG_CBC; 202 } 203 204 static inline bool is_ctr(struct stm32_cryp *cryp) 205 { 206 return cryp->flags & FLG_CTR; 207 } 208 209 static inline bool is_gcm(struct stm32_cryp *cryp) 210 { 211 return cryp->flags & FLG_GCM; 212 } 213 214 static inline bool is_ccm(struct stm32_cryp *cryp) 215 { 216 return cryp->flags & FLG_CCM; 217 } 218 219 static inline bool is_encrypt(struct stm32_cryp *cryp) 220 { 221 return cryp->flags & FLG_ENCRYPT; 222 } 223 224 static inline bool is_decrypt(struct stm32_cryp *cryp) 225 { 226 return !is_encrypt(cryp); 227 } 228 229 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst) 230 { 231 return readl_relaxed(cryp->regs + ofst); 232 } 233 234 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val) 235 { 236 writel_relaxed(val, cryp->regs + ofst); 237 } 238 239 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp) 240 { 241 u32 status; 242 243 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status, 244 !(status & SR_BUSY), 10, 100000); 245 } 246 247 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp) 248 { 249 u32 status; 250 251 return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status, 252 !(status & CR_CRYPEN), 10, 100000); 253 } 254 255 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp) 256 { 257 u32 status; 258 259 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status, 260 status & SR_OFNE, 10, 100000); 261 } 262 263 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp); 264 265 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx) 266 { 267 struct stm32_cryp *tmp, *cryp = NULL; 268 269 spin_lock_bh(&cryp_list.lock); 270 if (!ctx->cryp) { 271 list_for_each_entry(tmp, &cryp_list.dev_list, list) { 272 cryp = tmp; 273 break; 274 } 275 ctx->cryp = cryp; 276 } else { 277 cryp = ctx->cryp; 278 } 279 280 spin_unlock_bh(&cryp_list.lock); 281 282 return cryp; 283 } 284 285 static int stm32_cryp_check_aligned(struct scatterlist *sg, size_t total, 286 size_t align) 287 { 288 int len = 0; 289 290 if (!total) 291 return 0; 292 293 if (!IS_ALIGNED(total, align)) 294 return -EINVAL; 295 296 while (sg) { 297 if (!IS_ALIGNED(sg->offset, sizeof(u32))) 298 return -EINVAL; 299 300 if (!IS_ALIGNED(sg->length, align)) 301 return -EINVAL; 302 303 len += sg->length; 304 sg = sg_next(sg); 305 } 306 307 if (len != total) 308 return -EINVAL; 309 310 return 0; 311 } 312 313 static int stm32_cryp_check_io_aligned(struct stm32_cryp *cryp) 314 { 315 int ret; 316 317 ret = stm32_cryp_check_aligned(cryp->in_sg, cryp->total_in, 318 cryp->hw_blocksize); 319 if (ret) 320 return ret; 321 322 ret = stm32_cryp_check_aligned(cryp->out_sg, cryp->total_out, 323 cryp->hw_blocksize); 324 325 return ret; 326 } 327 328 static void sg_copy_buf(void *buf, struct scatterlist *sg, 329 unsigned int start, unsigned int nbytes, int out) 330 { 331 struct scatter_walk walk; 332 333 if (!nbytes) 334 return; 335 336 scatterwalk_start(&walk, sg); 337 scatterwalk_advance(&walk, start); 338 scatterwalk_copychunks(buf, &walk, nbytes, out); 339 scatterwalk_done(&walk, out, 0); 340 } 341 342 static int stm32_cryp_copy_sgs(struct stm32_cryp *cryp) 343 { 344 void *buf_in, *buf_out; 345 int pages, total_in, total_out; 346 347 if (!stm32_cryp_check_io_aligned(cryp)) { 348 cryp->sgs_copied = 0; 349 return 0; 350 } 351 352 total_in = ALIGN(cryp->total_in, cryp->hw_blocksize); 353 pages = total_in ? get_order(total_in) : 1; 354 buf_in = (void *)__get_free_pages(GFP_ATOMIC, pages); 355 356 total_out = ALIGN(cryp->total_out, cryp->hw_blocksize); 357 pages = total_out ? get_order(total_out) : 1; 358 buf_out = (void *)__get_free_pages(GFP_ATOMIC, pages); 359 360 if (!buf_in || !buf_out) { 361 dev_err(cryp->dev, "Can't allocate pages when unaligned\n"); 362 cryp->sgs_copied = 0; 363 return -EFAULT; 364 } 365 366 sg_copy_buf(buf_in, cryp->in_sg, 0, cryp->total_in, 0); 367 368 sg_init_one(&cryp->in_sgl, buf_in, total_in); 369 cryp->in_sg = &cryp->in_sgl; 370 cryp->in_sg_len = 1; 371 372 sg_init_one(&cryp->out_sgl, buf_out, total_out); 373 cryp->out_sg_save = cryp->out_sg; 374 cryp->out_sg = &cryp->out_sgl; 375 cryp->out_sg_len = 1; 376 377 cryp->sgs_copied = 1; 378 379 return 0; 380 } 381 382 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, u32 *iv) 383 { 384 if (!iv) 385 return; 386 387 stm32_cryp_write(cryp, CRYP_IV0LR, cpu_to_be32(*iv++)); 388 stm32_cryp_write(cryp, CRYP_IV0RR, cpu_to_be32(*iv++)); 389 390 if (is_aes(cryp)) { 391 stm32_cryp_write(cryp, CRYP_IV1LR, cpu_to_be32(*iv++)); 392 stm32_cryp_write(cryp, CRYP_IV1RR, cpu_to_be32(*iv++)); 393 } 394 } 395 396 static void stm32_cryp_get_iv(struct stm32_cryp *cryp) 397 { 398 struct ablkcipher_request *req = cryp->req; 399 u32 *tmp = req->info; 400 401 if (!tmp) 402 return; 403 404 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR)); 405 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR)); 406 407 if (is_aes(cryp)) { 408 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR)); 409 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR)); 410 } 411 } 412 413 static void stm32_cryp_hw_write_key(struct stm32_cryp *c) 414 { 415 unsigned int i; 416 int r_id; 417 418 if (is_des(c)) { 419 stm32_cryp_write(c, CRYP_K1LR, cpu_to_be32(c->ctx->key[0])); 420 stm32_cryp_write(c, CRYP_K1RR, cpu_to_be32(c->ctx->key[1])); 421 } else { 422 r_id = CRYP_K3RR; 423 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4) 424 stm32_cryp_write(c, r_id, 425 cpu_to_be32(c->ctx->key[i - 1])); 426 } 427 } 428 429 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp) 430 { 431 if (is_aes(cryp) && is_ecb(cryp)) 432 return CR_AES_ECB; 433 434 if (is_aes(cryp) && is_cbc(cryp)) 435 return CR_AES_CBC; 436 437 if (is_aes(cryp) && is_ctr(cryp)) 438 return CR_AES_CTR; 439 440 if (is_aes(cryp) && is_gcm(cryp)) 441 return CR_AES_GCM; 442 443 if (is_aes(cryp) && is_ccm(cryp)) 444 return CR_AES_CCM; 445 446 if (is_des(cryp) && is_ecb(cryp)) 447 return CR_DES_ECB; 448 449 if (is_des(cryp) && is_cbc(cryp)) 450 return CR_DES_CBC; 451 452 if (is_tdes(cryp) && is_ecb(cryp)) 453 return CR_TDES_ECB; 454 455 if (is_tdes(cryp) && is_cbc(cryp)) 456 return CR_TDES_CBC; 457 458 dev_err(cryp->dev, "Unknown mode\n"); 459 return CR_AES_UNKNOWN; 460 } 461 462 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp) 463 { 464 return is_encrypt(cryp) ? cryp->areq->cryptlen : 465 cryp->areq->cryptlen - cryp->authsize; 466 } 467 468 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg) 469 { 470 int ret; 471 u32 iv[4]; 472 473 /* Phase 1 : init */ 474 memcpy(iv, cryp->areq->iv, 12); 475 iv[3] = cpu_to_be32(GCM_CTR_INIT); 476 cryp->gcm_ctr = GCM_CTR_INIT; 477 stm32_cryp_hw_write_iv(cryp, iv); 478 479 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN); 480 481 /* Wait for end of processing */ 482 ret = stm32_cryp_wait_enable(cryp); 483 if (ret) 484 dev_err(cryp->dev, "Timeout (gcm init)\n"); 485 486 return ret; 487 } 488 489 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg) 490 { 491 int ret; 492 u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE]; 493 u32 *d; 494 unsigned int i, textlen; 495 496 /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */ 497 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); 498 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); 499 iv[AES_BLOCK_SIZE - 1] = 1; 500 stm32_cryp_hw_write_iv(cryp, (u32 *)iv); 501 502 /* Build B0 */ 503 memcpy(b0, iv, AES_BLOCK_SIZE); 504 505 b0[0] |= (8 * ((cryp->authsize - 2) / 2)); 506 507 if (cryp->areq->assoclen) 508 b0[0] |= 0x40; 509 510 textlen = stm32_cryp_get_input_text_len(cryp); 511 512 b0[AES_BLOCK_SIZE - 2] = textlen >> 8; 513 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF; 514 515 /* Enable HW */ 516 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN); 517 518 /* Write B0 */ 519 d = (u32 *)b0; 520 521 for (i = 0; i < AES_BLOCK_32; i++) { 522 if (!cryp->caps->padding_wa) 523 *d = cpu_to_be32(*d); 524 stm32_cryp_write(cryp, CRYP_DIN, *d++); 525 } 526 527 /* Wait for end of processing */ 528 ret = stm32_cryp_wait_enable(cryp); 529 if (ret) 530 dev_err(cryp->dev, "Timeout (ccm init)\n"); 531 532 return ret; 533 } 534 535 static int stm32_cryp_hw_init(struct stm32_cryp *cryp) 536 { 537 int ret; 538 u32 cfg, hw_mode; 539 540 pm_runtime_get_sync(cryp->dev); 541 542 /* Disable interrupt */ 543 stm32_cryp_write(cryp, CRYP_IMSCR, 0); 544 545 /* Set key */ 546 stm32_cryp_hw_write_key(cryp); 547 548 /* Set configuration */ 549 cfg = CR_DATA8 | CR_FFLUSH; 550 551 switch (cryp->ctx->keylen) { 552 case AES_KEYSIZE_128: 553 cfg |= CR_KEY128; 554 break; 555 556 case AES_KEYSIZE_192: 557 cfg |= CR_KEY192; 558 break; 559 560 default: 561 case AES_KEYSIZE_256: 562 cfg |= CR_KEY256; 563 break; 564 } 565 566 hw_mode = stm32_cryp_get_hw_mode(cryp); 567 if (hw_mode == CR_AES_UNKNOWN) 568 return -EINVAL; 569 570 /* AES ECB/CBC decrypt: run key preparation first */ 571 if (is_decrypt(cryp) && 572 ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) { 573 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN); 574 575 /* Wait for end of processing */ 576 ret = stm32_cryp_wait_busy(cryp); 577 if (ret) { 578 dev_err(cryp->dev, "Timeout (key preparation)\n"); 579 return ret; 580 } 581 } 582 583 cfg |= hw_mode; 584 585 if (is_decrypt(cryp)) 586 cfg |= CR_DEC_NOT_ENC; 587 588 /* Apply config and flush (valid when CRYPEN = 0) */ 589 stm32_cryp_write(cryp, CRYP_CR, cfg); 590 591 switch (hw_mode) { 592 case CR_AES_GCM: 593 case CR_AES_CCM: 594 /* Phase 1 : init */ 595 if (hw_mode == CR_AES_CCM) 596 ret = stm32_cryp_ccm_init(cryp, cfg); 597 else 598 ret = stm32_cryp_gcm_init(cryp, cfg); 599 600 if (ret) 601 return ret; 602 603 /* Phase 2 : header (authenticated data) */ 604 if (cryp->areq->assoclen) { 605 cfg |= CR_PH_HEADER; 606 } else if (stm32_cryp_get_input_text_len(cryp)) { 607 cfg |= CR_PH_PAYLOAD; 608 stm32_cryp_write(cryp, CRYP_CR, cfg); 609 } else { 610 cfg |= CR_PH_INIT; 611 } 612 613 break; 614 615 case CR_DES_CBC: 616 case CR_TDES_CBC: 617 case CR_AES_CBC: 618 case CR_AES_CTR: 619 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->req->info); 620 break; 621 622 default: 623 break; 624 } 625 626 /* Enable now */ 627 cfg |= CR_CRYPEN; 628 629 stm32_cryp_write(cryp, CRYP_CR, cfg); 630 631 cryp->flags &= ~FLG_CCM_PADDED_WA; 632 633 return 0; 634 } 635 636 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err) 637 { 638 if (!err && (is_gcm(cryp) || is_ccm(cryp))) 639 /* Phase 4 : output tag */ 640 err = stm32_cryp_read_auth_tag(cryp); 641 642 if (!err && (!(is_gcm(cryp) || is_ccm(cryp)))) 643 stm32_cryp_get_iv(cryp); 644 645 if (cryp->sgs_copied) { 646 void *buf_in, *buf_out; 647 int pages, len; 648 649 buf_in = sg_virt(&cryp->in_sgl); 650 buf_out = sg_virt(&cryp->out_sgl); 651 652 sg_copy_buf(buf_out, cryp->out_sg_save, 0, 653 cryp->total_out_save, 1); 654 655 len = ALIGN(cryp->total_in_save, cryp->hw_blocksize); 656 pages = len ? get_order(len) : 1; 657 free_pages((unsigned long)buf_in, pages); 658 659 len = ALIGN(cryp->total_out_save, cryp->hw_blocksize); 660 pages = len ? get_order(len) : 1; 661 free_pages((unsigned long)buf_out, pages); 662 } 663 664 pm_runtime_mark_last_busy(cryp->dev); 665 pm_runtime_put_autosuspend(cryp->dev); 666 667 if (is_gcm(cryp) || is_ccm(cryp)) 668 crypto_finalize_aead_request(cryp->engine, cryp->areq, err); 669 else 670 crypto_finalize_ablkcipher_request(cryp->engine, cryp->req, 671 err); 672 673 memset(cryp->ctx->key, 0, cryp->ctx->keylen); 674 } 675 676 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp) 677 { 678 /* Enable interrupt and let the IRQ handler do everything */ 679 stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT); 680 681 return 0; 682 } 683 684 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq); 685 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine, 686 void *areq); 687 688 static int stm32_cryp_cra_init(struct crypto_tfm *tfm) 689 { 690 struct stm32_cryp_ctx *ctx = crypto_tfm_ctx(tfm); 691 692 tfm->crt_ablkcipher.reqsize = sizeof(struct stm32_cryp_reqctx); 693 694 ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req; 695 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req; 696 ctx->enginectx.op.unprepare_request = NULL; 697 return 0; 698 } 699 700 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq); 701 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, 702 void *areq); 703 704 static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm) 705 { 706 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm); 707 708 tfm->reqsize = sizeof(struct stm32_cryp_reqctx); 709 710 ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req; 711 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req; 712 ctx->enginectx.op.unprepare_request = NULL; 713 714 return 0; 715 } 716 717 static int stm32_cryp_crypt(struct ablkcipher_request *req, unsigned long mode) 718 { 719 struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx( 720 crypto_ablkcipher_reqtfm(req)); 721 struct stm32_cryp_reqctx *rctx = ablkcipher_request_ctx(req); 722 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); 723 724 if (!cryp) 725 return -ENODEV; 726 727 rctx->mode = mode; 728 729 return crypto_transfer_ablkcipher_request_to_engine(cryp->engine, req); 730 } 731 732 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode) 733 { 734 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 735 struct stm32_cryp_reqctx *rctx = aead_request_ctx(req); 736 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); 737 738 if (!cryp) 739 return -ENODEV; 740 741 rctx->mode = mode; 742 743 return crypto_transfer_aead_request_to_engine(cryp->engine, req); 744 } 745 746 static int stm32_cryp_setkey(struct crypto_ablkcipher *tfm, const u8 *key, 747 unsigned int keylen) 748 { 749 struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(tfm); 750 751 memcpy(ctx->key, key, keylen); 752 ctx->keylen = keylen; 753 754 return 0; 755 } 756 757 static int stm32_cryp_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key, 758 unsigned int keylen) 759 { 760 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && 761 keylen != AES_KEYSIZE_256) 762 return -EINVAL; 763 else 764 return stm32_cryp_setkey(tfm, key, keylen); 765 } 766 767 static int stm32_cryp_des_setkey(struct crypto_ablkcipher *tfm, const u8 *key, 768 unsigned int keylen) 769 { 770 return verify_ablkcipher_des_key(tfm, key) ?: 771 stm32_cryp_setkey(tfm, key, keylen); 772 } 773 774 static int stm32_cryp_tdes_setkey(struct crypto_ablkcipher *tfm, const u8 *key, 775 unsigned int keylen) 776 { 777 return verify_ablkcipher_des3_key(tfm, key) ?: 778 stm32_cryp_setkey(tfm, key, keylen); 779 } 780 781 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key, 782 unsigned int keylen) 783 { 784 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm); 785 786 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && 787 keylen != AES_KEYSIZE_256) 788 return -EINVAL; 789 790 memcpy(ctx->key, key, keylen); 791 ctx->keylen = keylen; 792 793 return 0; 794 } 795 796 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm, 797 unsigned int authsize) 798 { 799 return authsize == AES_BLOCK_SIZE ? 0 : -EINVAL; 800 } 801 802 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm, 803 unsigned int authsize) 804 { 805 switch (authsize) { 806 case 4: 807 case 6: 808 case 8: 809 case 10: 810 case 12: 811 case 14: 812 case 16: 813 break; 814 default: 815 return -EINVAL; 816 } 817 818 return 0; 819 } 820 821 static int stm32_cryp_aes_ecb_encrypt(struct ablkcipher_request *req) 822 { 823 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT); 824 } 825 826 static int stm32_cryp_aes_ecb_decrypt(struct ablkcipher_request *req) 827 { 828 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB); 829 } 830 831 static int stm32_cryp_aes_cbc_encrypt(struct ablkcipher_request *req) 832 { 833 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT); 834 } 835 836 static int stm32_cryp_aes_cbc_decrypt(struct ablkcipher_request *req) 837 { 838 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC); 839 } 840 841 static int stm32_cryp_aes_ctr_encrypt(struct ablkcipher_request *req) 842 { 843 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT); 844 } 845 846 static int stm32_cryp_aes_ctr_decrypt(struct ablkcipher_request *req) 847 { 848 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR); 849 } 850 851 static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req) 852 { 853 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT); 854 } 855 856 static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req) 857 { 858 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM); 859 } 860 861 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req) 862 { 863 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT); 864 } 865 866 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req) 867 { 868 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM); 869 } 870 871 static int stm32_cryp_des_ecb_encrypt(struct ablkcipher_request *req) 872 { 873 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT); 874 } 875 876 static int stm32_cryp_des_ecb_decrypt(struct ablkcipher_request *req) 877 { 878 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB); 879 } 880 881 static int stm32_cryp_des_cbc_encrypt(struct ablkcipher_request *req) 882 { 883 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT); 884 } 885 886 static int stm32_cryp_des_cbc_decrypt(struct ablkcipher_request *req) 887 { 888 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC); 889 } 890 891 static int stm32_cryp_tdes_ecb_encrypt(struct ablkcipher_request *req) 892 { 893 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT); 894 } 895 896 static int stm32_cryp_tdes_ecb_decrypt(struct ablkcipher_request *req) 897 { 898 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB); 899 } 900 901 static int stm32_cryp_tdes_cbc_encrypt(struct ablkcipher_request *req) 902 { 903 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT); 904 } 905 906 static int stm32_cryp_tdes_cbc_decrypt(struct ablkcipher_request *req) 907 { 908 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC); 909 } 910 911 static int stm32_cryp_prepare_req(struct ablkcipher_request *req, 912 struct aead_request *areq) 913 { 914 struct stm32_cryp_ctx *ctx; 915 struct stm32_cryp *cryp; 916 struct stm32_cryp_reqctx *rctx; 917 int ret; 918 919 if (!req && !areq) 920 return -EINVAL; 921 922 ctx = req ? crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req)) : 923 crypto_aead_ctx(crypto_aead_reqtfm(areq)); 924 925 cryp = ctx->cryp; 926 927 if (!cryp) 928 return -ENODEV; 929 930 rctx = req ? ablkcipher_request_ctx(req) : aead_request_ctx(areq); 931 rctx->mode &= FLG_MODE_MASK; 932 933 ctx->cryp = cryp; 934 935 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode; 936 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE; 937 cryp->ctx = ctx; 938 939 if (req) { 940 cryp->req = req; 941 cryp->areq = NULL; 942 cryp->total_in = req->nbytes; 943 cryp->total_out = cryp->total_in; 944 } else { 945 /* 946 * Length of input and output data: 947 * Encryption case: 948 * INPUT = AssocData || PlainText 949 * <- assoclen -> <- cryptlen -> 950 * <------- total_in -----------> 951 * 952 * OUTPUT = AssocData || CipherText || AuthTag 953 * <- assoclen -> <- cryptlen -> <- authsize -> 954 * <---------------- total_out -----------------> 955 * 956 * Decryption case: 957 * INPUT = AssocData || CipherText || AuthTag 958 * <- assoclen -> <--------- cryptlen ---------> 959 * <- authsize -> 960 * <---------------- total_in ------------------> 961 * 962 * OUTPUT = AssocData || PlainText 963 * <- assoclen -> <- crypten - authsize -> 964 * <---------- total_out -----------------> 965 */ 966 cryp->areq = areq; 967 cryp->req = NULL; 968 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq)); 969 cryp->total_in = areq->assoclen + areq->cryptlen; 970 if (is_encrypt(cryp)) 971 /* Append auth tag to output */ 972 cryp->total_out = cryp->total_in + cryp->authsize; 973 else 974 /* No auth tag in output */ 975 cryp->total_out = cryp->total_in - cryp->authsize; 976 } 977 978 cryp->total_in_save = cryp->total_in; 979 cryp->total_out_save = cryp->total_out; 980 981 cryp->in_sg = req ? req->src : areq->src; 982 cryp->out_sg = req ? req->dst : areq->dst; 983 cryp->out_sg_save = cryp->out_sg; 984 985 cryp->in_sg_len = sg_nents_for_len(cryp->in_sg, cryp->total_in); 986 if (cryp->in_sg_len < 0) { 987 dev_err(cryp->dev, "Cannot get in_sg_len\n"); 988 ret = cryp->in_sg_len; 989 return ret; 990 } 991 992 cryp->out_sg_len = sg_nents_for_len(cryp->out_sg, cryp->total_out); 993 if (cryp->out_sg_len < 0) { 994 dev_err(cryp->dev, "Cannot get out_sg_len\n"); 995 ret = cryp->out_sg_len; 996 return ret; 997 } 998 999 ret = stm32_cryp_copy_sgs(cryp); 1000 if (ret) 1001 return ret; 1002 1003 scatterwalk_start(&cryp->in_walk, cryp->in_sg); 1004 scatterwalk_start(&cryp->out_walk, cryp->out_sg); 1005 1006 if (is_gcm(cryp) || is_ccm(cryp)) { 1007 /* In output, jump after assoc data */ 1008 scatterwalk_advance(&cryp->out_walk, cryp->areq->assoclen); 1009 cryp->total_out -= cryp->areq->assoclen; 1010 } 1011 1012 ret = stm32_cryp_hw_init(cryp); 1013 return ret; 1014 } 1015 1016 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine, 1017 void *areq) 1018 { 1019 struct ablkcipher_request *req = container_of(areq, 1020 struct ablkcipher_request, 1021 base); 1022 1023 return stm32_cryp_prepare_req(req, NULL); 1024 } 1025 1026 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq) 1027 { 1028 struct ablkcipher_request *req = container_of(areq, 1029 struct ablkcipher_request, 1030 base); 1031 struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx( 1032 crypto_ablkcipher_reqtfm(req)); 1033 struct stm32_cryp *cryp = ctx->cryp; 1034 1035 if (!cryp) 1036 return -ENODEV; 1037 1038 return stm32_cryp_cpu_start(cryp); 1039 } 1040 1041 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq) 1042 { 1043 struct aead_request *req = container_of(areq, struct aead_request, 1044 base); 1045 1046 return stm32_cryp_prepare_req(NULL, req); 1047 } 1048 1049 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq) 1050 { 1051 struct aead_request *req = container_of(areq, struct aead_request, 1052 base); 1053 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 1054 struct stm32_cryp *cryp = ctx->cryp; 1055 1056 if (!cryp) 1057 return -ENODEV; 1058 1059 if (unlikely(!cryp->areq->assoclen && 1060 !stm32_cryp_get_input_text_len(cryp))) { 1061 /* No input data to process: get tag and finish */ 1062 stm32_cryp_finish_req(cryp, 0); 1063 return 0; 1064 } 1065 1066 return stm32_cryp_cpu_start(cryp); 1067 } 1068 1069 static u32 *stm32_cryp_next_out(struct stm32_cryp *cryp, u32 *dst, 1070 unsigned int n) 1071 { 1072 scatterwalk_advance(&cryp->out_walk, n); 1073 1074 if (unlikely(cryp->out_sg->length == _walked_out)) { 1075 cryp->out_sg = sg_next(cryp->out_sg); 1076 if (cryp->out_sg) { 1077 scatterwalk_start(&cryp->out_walk, cryp->out_sg); 1078 return (sg_virt(cryp->out_sg) + _walked_out); 1079 } 1080 } 1081 1082 return (u32 *)((u8 *)dst + n); 1083 } 1084 1085 static u32 *stm32_cryp_next_in(struct stm32_cryp *cryp, u32 *src, 1086 unsigned int n) 1087 { 1088 scatterwalk_advance(&cryp->in_walk, n); 1089 1090 if (unlikely(cryp->in_sg->length == _walked_in)) { 1091 cryp->in_sg = sg_next(cryp->in_sg); 1092 if (cryp->in_sg) { 1093 scatterwalk_start(&cryp->in_walk, cryp->in_sg); 1094 return (sg_virt(cryp->in_sg) + _walked_in); 1095 } 1096 } 1097 1098 return (u32 *)((u8 *)src + n); 1099 } 1100 1101 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp) 1102 { 1103 u32 cfg, size_bit, *dst, d32; 1104 u8 *d8; 1105 unsigned int i, j; 1106 int ret = 0; 1107 1108 /* Update Config */ 1109 cfg = stm32_cryp_read(cryp, CRYP_CR); 1110 1111 cfg &= ~CR_PH_MASK; 1112 cfg |= CR_PH_FINAL; 1113 cfg &= ~CR_DEC_NOT_ENC; 1114 cfg |= CR_CRYPEN; 1115 1116 stm32_cryp_write(cryp, CRYP_CR, cfg); 1117 1118 if (is_gcm(cryp)) { 1119 /* GCM: write aad and payload size (in bits) */ 1120 size_bit = cryp->areq->assoclen * 8; 1121 if (cryp->caps->swap_final) 1122 size_bit = cpu_to_be32(size_bit); 1123 1124 stm32_cryp_write(cryp, CRYP_DIN, 0); 1125 stm32_cryp_write(cryp, CRYP_DIN, size_bit); 1126 1127 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen : 1128 cryp->areq->cryptlen - AES_BLOCK_SIZE; 1129 size_bit *= 8; 1130 if (cryp->caps->swap_final) 1131 size_bit = cpu_to_be32(size_bit); 1132 1133 stm32_cryp_write(cryp, CRYP_DIN, 0); 1134 stm32_cryp_write(cryp, CRYP_DIN, size_bit); 1135 } else { 1136 /* CCM: write CTR0 */ 1137 u8 iv[AES_BLOCK_SIZE]; 1138 u32 *iv32 = (u32 *)iv; 1139 1140 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); 1141 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); 1142 1143 for (i = 0; i < AES_BLOCK_32; i++) { 1144 if (!cryp->caps->padding_wa) 1145 *iv32 = cpu_to_be32(*iv32); 1146 stm32_cryp_write(cryp, CRYP_DIN, *iv32++); 1147 } 1148 } 1149 1150 /* Wait for output data */ 1151 ret = stm32_cryp_wait_output(cryp); 1152 if (ret) { 1153 dev_err(cryp->dev, "Timeout (read tag)\n"); 1154 return ret; 1155 } 1156 1157 if (is_encrypt(cryp)) { 1158 /* Get and write tag */ 1159 dst = sg_virt(cryp->out_sg) + _walked_out; 1160 1161 for (i = 0; i < AES_BLOCK_32; i++) { 1162 if (cryp->total_out >= sizeof(u32)) { 1163 /* Read a full u32 */ 1164 *dst = stm32_cryp_read(cryp, CRYP_DOUT); 1165 1166 dst = stm32_cryp_next_out(cryp, dst, 1167 sizeof(u32)); 1168 cryp->total_out -= sizeof(u32); 1169 } else if (!cryp->total_out) { 1170 /* Empty fifo out (data from input padding) */ 1171 stm32_cryp_read(cryp, CRYP_DOUT); 1172 } else { 1173 /* Read less than an u32 */ 1174 d32 = stm32_cryp_read(cryp, CRYP_DOUT); 1175 d8 = (u8 *)&d32; 1176 1177 for (j = 0; j < cryp->total_out; j++) { 1178 *((u8 *)dst) = *(d8++); 1179 dst = stm32_cryp_next_out(cryp, dst, 1); 1180 } 1181 cryp->total_out = 0; 1182 } 1183 } 1184 } else { 1185 /* Get and check tag */ 1186 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32]; 1187 1188 scatterwalk_map_and_copy(in_tag, cryp->in_sg, 1189 cryp->total_in_save - cryp->authsize, 1190 cryp->authsize, 0); 1191 1192 for (i = 0; i < AES_BLOCK_32; i++) 1193 out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT); 1194 1195 if (crypto_memneq(in_tag, out_tag, cryp->authsize)) 1196 ret = -EBADMSG; 1197 } 1198 1199 /* Disable cryp */ 1200 cfg &= ~CR_CRYPEN; 1201 stm32_cryp_write(cryp, CRYP_CR, cfg); 1202 1203 return ret; 1204 } 1205 1206 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp) 1207 { 1208 u32 cr; 1209 1210 if (unlikely(cryp->last_ctr[3] == 0xFFFFFFFF)) { 1211 cryp->last_ctr[3] = 0; 1212 cryp->last_ctr[2]++; 1213 if (!cryp->last_ctr[2]) { 1214 cryp->last_ctr[1]++; 1215 if (!cryp->last_ctr[1]) 1216 cryp->last_ctr[0]++; 1217 } 1218 1219 cr = stm32_cryp_read(cryp, CRYP_CR); 1220 stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN); 1221 1222 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->last_ctr); 1223 1224 stm32_cryp_write(cryp, CRYP_CR, cr); 1225 } 1226 1227 cryp->last_ctr[0] = stm32_cryp_read(cryp, CRYP_IV0LR); 1228 cryp->last_ctr[1] = stm32_cryp_read(cryp, CRYP_IV0RR); 1229 cryp->last_ctr[2] = stm32_cryp_read(cryp, CRYP_IV1LR); 1230 cryp->last_ctr[3] = stm32_cryp_read(cryp, CRYP_IV1RR); 1231 } 1232 1233 static bool stm32_cryp_irq_read_data(struct stm32_cryp *cryp) 1234 { 1235 unsigned int i, j; 1236 u32 d32, *dst; 1237 u8 *d8; 1238 size_t tag_size; 1239 1240 /* Do no read tag now (if any) */ 1241 if (is_encrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp))) 1242 tag_size = cryp->authsize; 1243 else 1244 tag_size = 0; 1245 1246 dst = sg_virt(cryp->out_sg) + _walked_out; 1247 1248 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) { 1249 if (likely(cryp->total_out - tag_size >= sizeof(u32))) { 1250 /* Read a full u32 */ 1251 *dst = stm32_cryp_read(cryp, CRYP_DOUT); 1252 1253 dst = stm32_cryp_next_out(cryp, dst, sizeof(u32)); 1254 cryp->total_out -= sizeof(u32); 1255 } else if (cryp->total_out == tag_size) { 1256 /* Empty fifo out (data from input padding) */ 1257 d32 = stm32_cryp_read(cryp, CRYP_DOUT); 1258 } else { 1259 /* Read less than an u32 */ 1260 d32 = stm32_cryp_read(cryp, CRYP_DOUT); 1261 d8 = (u8 *)&d32; 1262 1263 for (j = 0; j < cryp->total_out - tag_size; j++) { 1264 *((u8 *)dst) = *(d8++); 1265 dst = stm32_cryp_next_out(cryp, dst, 1); 1266 } 1267 cryp->total_out = tag_size; 1268 } 1269 } 1270 1271 return !(cryp->total_out - tag_size) || !cryp->total_in; 1272 } 1273 1274 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp) 1275 { 1276 unsigned int i, j; 1277 u32 *src; 1278 u8 d8[4]; 1279 size_t tag_size; 1280 1281 /* Do no write tag (if any) */ 1282 if (is_decrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp))) 1283 tag_size = cryp->authsize; 1284 else 1285 tag_size = 0; 1286 1287 src = sg_virt(cryp->in_sg) + _walked_in; 1288 1289 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) { 1290 if (likely(cryp->total_in - tag_size >= sizeof(u32))) { 1291 /* Write a full u32 */ 1292 stm32_cryp_write(cryp, CRYP_DIN, *src); 1293 1294 src = stm32_cryp_next_in(cryp, src, sizeof(u32)); 1295 cryp->total_in -= sizeof(u32); 1296 } else if (cryp->total_in == tag_size) { 1297 /* Write padding data */ 1298 stm32_cryp_write(cryp, CRYP_DIN, 0); 1299 } else { 1300 /* Write less than an u32 */ 1301 memset(d8, 0, sizeof(u32)); 1302 for (j = 0; j < cryp->total_in - tag_size; j++) { 1303 d8[j] = *((u8 *)src); 1304 src = stm32_cryp_next_in(cryp, src, 1); 1305 } 1306 1307 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); 1308 cryp->total_in = tag_size; 1309 } 1310 } 1311 } 1312 1313 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp) 1314 { 1315 int err; 1316 u32 cfg, tmp[AES_BLOCK_32]; 1317 size_t total_in_ori = cryp->total_in; 1318 struct scatterlist *out_sg_ori = cryp->out_sg; 1319 unsigned int i; 1320 1321 /* 'Special workaround' procedure described in the datasheet */ 1322 1323 /* a) disable ip */ 1324 stm32_cryp_write(cryp, CRYP_IMSCR, 0); 1325 cfg = stm32_cryp_read(cryp, CRYP_CR); 1326 cfg &= ~CR_CRYPEN; 1327 stm32_cryp_write(cryp, CRYP_CR, cfg); 1328 1329 /* b) Update IV1R */ 1330 stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2); 1331 1332 /* c) change mode to CTR */ 1333 cfg &= ~CR_ALGO_MASK; 1334 cfg |= CR_AES_CTR; 1335 stm32_cryp_write(cryp, CRYP_CR, cfg); 1336 1337 /* a) enable IP */ 1338 cfg |= CR_CRYPEN; 1339 stm32_cryp_write(cryp, CRYP_CR, cfg); 1340 1341 /* b) pad and write the last block */ 1342 stm32_cryp_irq_write_block(cryp); 1343 cryp->total_in = total_in_ori; 1344 err = stm32_cryp_wait_output(cryp); 1345 if (err) { 1346 dev_err(cryp->dev, "Timeout (write gcm header)\n"); 1347 return stm32_cryp_finish_req(cryp, err); 1348 } 1349 1350 /* c) get and store encrypted data */ 1351 stm32_cryp_irq_read_data(cryp); 1352 scatterwalk_map_and_copy(tmp, out_sg_ori, 1353 cryp->total_in_save - total_in_ori, 1354 total_in_ori, 0); 1355 1356 /* d) change mode back to AES GCM */ 1357 cfg &= ~CR_ALGO_MASK; 1358 cfg |= CR_AES_GCM; 1359 stm32_cryp_write(cryp, CRYP_CR, cfg); 1360 1361 /* e) change phase to Final */ 1362 cfg &= ~CR_PH_MASK; 1363 cfg |= CR_PH_FINAL; 1364 stm32_cryp_write(cryp, CRYP_CR, cfg); 1365 1366 /* f) write padded data */ 1367 for (i = 0; i < AES_BLOCK_32; i++) { 1368 if (cryp->total_in) 1369 stm32_cryp_write(cryp, CRYP_DIN, tmp[i]); 1370 else 1371 stm32_cryp_write(cryp, CRYP_DIN, 0); 1372 1373 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in); 1374 } 1375 1376 /* g) Empty fifo out */ 1377 err = stm32_cryp_wait_output(cryp); 1378 if (err) { 1379 dev_err(cryp->dev, "Timeout (write gcm header)\n"); 1380 return stm32_cryp_finish_req(cryp, err); 1381 } 1382 1383 for (i = 0; i < AES_BLOCK_32; i++) 1384 stm32_cryp_read(cryp, CRYP_DOUT); 1385 1386 /* h) run the he normal Final phase */ 1387 stm32_cryp_finish_req(cryp, 0); 1388 } 1389 1390 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp) 1391 { 1392 u32 cfg, payload_bytes; 1393 1394 /* disable ip, set NPBLB and reneable ip */ 1395 cfg = stm32_cryp_read(cryp, CRYP_CR); 1396 cfg &= ~CR_CRYPEN; 1397 stm32_cryp_write(cryp, CRYP_CR, cfg); 1398 1399 payload_bytes = is_decrypt(cryp) ? cryp->total_in - cryp->authsize : 1400 cryp->total_in; 1401 cfg |= (cryp->hw_blocksize - payload_bytes) << CR_NBPBL_SHIFT; 1402 cfg |= CR_CRYPEN; 1403 stm32_cryp_write(cryp, CRYP_CR, cfg); 1404 } 1405 1406 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp) 1407 { 1408 int err = 0; 1409 u32 cfg, iv1tmp; 1410 u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32], tmp[AES_BLOCK_32]; 1411 size_t last_total_out, total_in_ori = cryp->total_in; 1412 struct scatterlist *out_sg_ori = cryp->out_sg; 1413 unsigned int i; 1414 1415 /* 'Special workaround' procedure described in the datasheet */ 1416 cryp->flags |= FLG_CCM_PADDED_WA; 1417 1418 /* a) disable ip */ 1419 stm32_cryp_write(cryp, CRYP_IMSCR, 0); 1420 1421 cfg = stm32_cryp_read(cryp, CRYP_CR); 1422 cfg &= ~CR_CRYPEN; 1423 stm32_cryp_write(cryp, CRYP_CR, cfg); 1424 1425 /* b) get IV1 from CRYP_CSGCMCCM7 */ 1426 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4); 1427 1428 /* c) Load CRYP_CSGCMCCMxR */ 1429 for (i = 0; i < ARRAY_SIZE(cstmp1); i++) 1430 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); 1431 1432 /* d) Write IV1R */ 1433 stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp); 1434 1435 /* e) change mode to CTR */ 1436 cfg &= ~CR_ALGO_MASK; 1437 cfg |= CR_AES_CTR; 1438 stm32_cryp_write(cryp, CRYP_CR, cfg); 1439 1440 /* a) enable IP */ 1441 cfg |= CR_CRYPEN; 1442 stm32_cryp_write(cryp, CRYP_CR, cfg); 1443 1444 /* b) pad and write the last block */ 1445 stm32_cryp_irq_write_block(cryp); 1446 cryp->total_in = total_in_ori; 1447 err = stm32_cryp_wait_output(cryp); 1448 if (err) { 1449 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n"); 1450 return stm32_cryp_finish_req(cryp, err); 1451 } 1452 1453 /* c) get and store decrypted data */ 1454 last_total_out = cryp->total_out; 1455 stm32_cryp_irq_read_data(cryp); 1456 1457 memset(tmp, 0, sizeof(tmp)); 1458 scatterwalk_map_and_copy(tmp, out_sg_ori, 1459 cryp->total_out_save - last_total_out, 1460 last_total_out, 0); 1461 1462 /* d) Load again CRYP_CSGCMCCMxR */ 1463 for (i = 0; i < ARRAY_SIZE(cstmp2); i++) 1464 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); 1465 1466 /* e) change mode back to AES CCM */ 1467 cfg &= ~CR_ALGO_MASK; 1468 cfg |= CR_AES_CCM; 1469 stm32_cryp_write(cryp, CRYP_CR, cfg); 1470 1471 /* f) change phase to header */ 1472 cfg &= ~CR_PH_MASK; 1473 cfg |= CR_PH_HEADER; 1474 stm32_cryp_write(cryp, CRYP_CR, cfg); 1475 1476 /* g) XOR and write padded data */ 1477 for (i = 0; i < ARRAY_SIZE(tmp); i++) { 1478 tmp[i] ^= cstmp1[i]; 1479 tmp[i] ^= cstmp2[i]; 1480 stm32_cryp_write(cryp, CRYP_DIN, tmp[i]); 1481 } 1482 1483 /* h) wait for completion */ 1484 err = stm32_cryp_wait_busy(cryp); 1485 if (err) 1486 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n"); 1487 1488 /* i) run the he normal Final phase */ 1489 stm32_cryp_finish_req(cryp, err); 1490 } 1491 1492 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp) 1493 { 1494 if (unlikely(!cryp->total_in)) { 1495 dev_warn(cryp->dev, "No more data to process\n"); 1496 return; 1497 } 1498 1499 if (unlikely(cryp->total_in < AES_BLOCK_SIZE && 1500 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) && 1501 is_encrypt(cryp))) { 1502 /* Padding for AES GCM encryption */ 1503 if (cryp->caps->padding_wa) 1504 /* Special case 1 */ 1505 return stm32_cryp_irq_write_gcm_padded_data(cryp); 1506 1507 /* Setting padding bytes (NBBLB) */ 1508 stm32_cryp_irq_set_npblb(cryp); 1509 } 1510 1511 if (unlikely((cryp->total_in - cryp->authsize < AES_BLOCK_SIZE) && 1512 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) && 1513 is_decrypt(cryp))) { 1514 /* Padding for AES CCM decryption */ 1515 if (cryp->caps->padding_wa) 1516 /* Special case 2 */ 1517 return stm32_cryp_irq_write_ccm_padded_data(cryp); 1518 1519 /* Setting padding bytes (NBBLB) */ 1520 stm32_cryp_irq_set_npblb(cryp); 1521 } 1522 1523 if (is_aes(cryp) && is_ctr(cryp)) 1524 stm32_cryp_check_ctr_counter(cryp); 1525 1526 stm32_cryp_irq_write_block(cryp); 1527 } 1528 1529 static void stm32_cryp_irq_write_gcm_header(struct stm32_cryp *cryp) 1530 { 1531 int err; 1532 unsigned int i, j; 1533 u32 cfg, *src; 1534 1535 src = sg_virt(cryp->in_sg) + _walked_in; 1536 1537 for (i = 0; i < AES_BLOCK_32; i++) { 1538 stm32_cryp_write(cryp, CRYP_DIN, *src); 1539 1540 src = stm32_cryp_next_in(cryp, src, sizeof(u32)); 1541 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in); 1542 1543 /* Check if whole header written */ 1544 if ((cryp->total_in_save - cryp->total_in) == 1545 cryp->areq->assoclen) { 1546 /* Write padding if needed */ 1547 for (j = i + 1; j < AES_BLOCK_32; j++) 1548 stm32_cryp_write(cryp, CRYP_DIN, 0); 1549 1550 /* Wait for completion */ 1551 err = stm32_cryp_wait_busy(cryp); 1552 if (err) { 1553 dev_err(cryp->dev, "Timeout (gcm header)\n"); 1554 return stm32_cryp_finish_req(cryp, err); 1555 } 1556 1557 if (stm32_cryp_get_input_text_len(cryp)) { 1558 /* Phase 3 : payload */ 1559 cfg = stm32_cryp_read(cryp, CRYP_CR); 1560 cfg &= ~CR_CRYPEN; 1561 stm32_cryp_write(cryp, CRYP_CR, cfg); 1562 1563 cfg &= ~CR_PH_MASK; 1564 cfg |= CR_PH_PAYLOAD; 1565 cfg |= CR_CRYPEN; 1566 stm32_cryp_write(cryp, CRYP_CR, cfg); 1567 } else { 1568 /* Phase 4 : tag */ 1569 stm32_cryp_write(cryp, CRYP_IMSCR, 0); 1570 stm32_cryp_finish_req(cryp, 0); 1571 } 1572 1573 break; 1574 } 1575 1576 if (!cryp->total_in) 1577 break; 1578 } 1579 } 1580 1581 static void stm32_cryp_irq_write_ccm_header(struct stm32_cryp *cryp) 1582 { 1583 int err; 1584 unsigned int i = 0, j, k; 1585 u32 alen, cfg, *src; 1586 u8 d8[4]; 1587 1588 src = sg_virt(cryp->in_sg) + _walked_in; 1589 alen = cryp->areq->assoclen; 1590 1591 if (!_walked_in) { 1592 if (cryp->areq->assoclen <= 65280) { 1593 /* Write first u32 of B1 */ 1594 d8[0] = (alen >> 8) & 0xFF; 1595 d8[1] = alen & 0xFF; 1596 d8[2] = *((u8 *)src); 1597 src = stm32_cryp_next_in(cryp, src, 1); 1598 d8[3] = *((u8 *)src); 1599 src = stm32_cryp_next_in(cryp, src, 1); 1600 1601 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); 1602 i++; 1603 1604 cryp->total_in -= min_t(size_t, 2, cryp->total_in); 1605 } else { 1606 /* Build the two first u32 of B1 */ 1607 d8[0] = 0xFF; 1608 d8[1] = 0xFE; 1609 d8[2] = alen & 0xFF000000; 1610 d8[3] = alen & 0x00FF0000; 1611 1612 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); 1613 i++; 1614 1615 d8[0] = alen & 0x0000FF00; 1616 d8[1] = alen & 0x000000FF; 1617 d8[2] = *((u8 *)src); 1618 src = stm32_cryp_next_in(cryp, src, 1); 1619 d8[3] = *((u8 *)src); 1620 src = stm32_cryp_next_in(cryp, src, 1); 1621 1622 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); 1623 i++; 1624 1625 cryp->total_in -= min_t(size_t, 2, cryp->total_in); 1626 } 1627 } 1628 1629 /* Write next u32 */ 1630 for (; i < AES_BLOCK_32; i++) { 1631 /* Build an u32 */ 1632 memset(d8, 0, sizeof(u32)); 1633 for (k = 0; k < sizeof(u32); k++) { 1634 d8[k] = *((u8 *)src); 1635 src = stm32_cryp_next_in(cryp, src, 1); 1636 1637 cryp->total_in -= min_t(size_t, 1, cryp->total_in); 1638 if ((cryp->total_in_save - cryp->total_in) == alen) 1639 break; 1640 } 1641 1642 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); 1643 1644 if ((cryp->total_in_save - cryp->total_in) == alen) { 1645 /* Write padding if needed */ 1646 for (j = i + 1; j < AES_BLOCK_32; j++) 1647 stm32_cryp_write(cryp, CRYP_DIN, 0); 1648 1649 /* Wait for completion */ 1650 err = stm32_cryp_wait_busy(cryp); 1651 if (err) { 1652 dev_err(cryp->dev, "Timeout (ccm header)\n"); 1653 return stm32_cryp_finish_req(cryp, err); 1654 } 1655 1656 if (stm32_cryp_get_input_text_len(cryp)) { 1657 /* Phase 3 : payload */ 1658 cfg = stm32_cryp_read(cryp, CRYP_CR); 1659 cfg &= ~CR_CRYPEN; 1660 stm32_cryp_write(cryp, CRYP_CR, cfg); 1661 1662 cfg &= ~CR_PH_MASK; 1663 cfg |= CR_PH_PAYLOAD; 1664 cfg |= CR_CRYPEN; 1665 stm32_cryp_write(cryp, CRYP_CR, cfg); 1666 } else { 1667 /* Phase 4 : tag */ 1668 stm32_cryp_write(cryp, CRYP_IMSCR, 0); 1669 stm32_cryp_finish_req(cryp, 0); 1670 } 1671 1672 break; 1673 } 1674 } 1675 } 1676 1677 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg) 1678 { 1679 struct stm32_cryp *cryp = arg; 1680 u32 ph; 1681 1682 if (cryp->irq_status & MISR_OUT) 1683 /* Output FIFO IRQ: read data */ 1684 if (unlikely(stm32_cryp_irq_read_data(cryp))) { 1685 /* All bytes processed, finish */ 1686 stm32_cryp_write(cryp, CRYP_IMSCR, 0); 1687 stm32_cryp_finish_req(cryp, 0); 1688 return IRQ_HANDLED; 1689 } 1690 1691 if (cryp->irq_status & MISR_IN) { 1692 if (is_gcm(cryp)) { 1693 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK; 1694 if (unlikely(ph == CR_PH_HEADER)) 1695 /* Write Header */ 1696 stm32_cryp_irq_write_gcm_header(cryp); 1697 else 1698 /* Input FIFO IRQ: write data */ 1699 stm32_cryp_irq_write_data(cryp); 1700 cryp->gcm_ctr++; 1701 } else if (is_ccm(cryp)) { 1702 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK; 1703 if (unlikely(ph == CR_PH_HEADER)) 1704 /* Write Header */ 1705 stm32_cryp_irq_write_ccm_header(cryp); 1706 else 1707 /* Input FIFO IRQ: write data */ 1708 stm32_cryp_irq_write_data(cryp); 1709 } else { 1710 /* Input FIFO IRQ: write data */ 1711 stm32_cryp_irq_write_data(cryp); 1712 } 1713 } 1714 1715 return IRQ_HANDLED; 1716 } 1717 1718 static irqreturn_t stm32_cryp_irq(int irq, void *arg) 1719 { 1720 struct stm32_cryp *cryp = arg; 1721 1722 cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR); 1723 1724 return IRQ_WAKE_THREAD; 1725 } 1726 1727 static struct crypto_alg crypto_algs[] = { 1728 { 1729 .cra_name = "ecb(aes)", 1730 .cra_driver_name = "stm32-ecb-aes", 1731 .cra_priority = 200, 1732 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1733 CRYPTO_ALG_ASYNC, 1734 .cra_blocksize = AES_BLOCK_SIZE, 1735 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1736 .cra_alignmask = 0xf, 1737 .cra_type = &crypto_ablkcipher_type, 1738 .cra_module = THIS_MODULE, 1739 .cra_init = stm32_cryp_cra_init, 1740 .cra_ablkcipher = { 1741 .min_keysize = AES_MIN_KEY_SIZE, 1742 .max_keysize = AES_MAX_KEY_SIZE, 1743 .setkey = stm32_cryp_aes_setkey, 1744 .encrypt = stm32_cryp_aes_ecb_encrypt, 1745 .decrypt = stm32_cryp_aes_ecb_decrypt, 1746 } 1747 }, 1748 { 1749 .cra_name = "cbc(aes)", 1750 .cra_driver_name = "stm32-cbc-aes", 1751 .cra_priority = 200, 1752 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1753 CRYPTO_ALG_ASYNC, 1754 .cra_blocksize = AES_BLOCK_SIZE, 1755 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1756 .cra_alignmask = 0xf, 1757 .cra_type = &crypto_ablkcipher_type, 1758 .cra_module = THIS_MODULE, 1759 .cra_init = stm32_cryp_cra_init, 1760 .cra_ablkcipher = { 1761 .min_keysize = AES_MIN_KEY_SIZE, 1762 .max_keysize = AES_MAX_KEY_SIZE, 1763 .ivsize = AES_BLOCK_SIZE, 1764 .setkey = stm32_cryp_aes_setkey, 1765 .encrypt = stm32_cryp_aes_cbc_encrypt, 1766 .decrypt = stm32_cryp_aes_cbc_decrypt, 1767 } 1768 }, 1769 { 1770 .cra_name = "ctr(aes)", 1771 .cra_driver_name = "stm32-ctr-aes", 1772 .cra_priority = 200, 1773 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1774 CRYPTO_ALG_ASYNC, 1775 .cra_blocksize = 1, 1776 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1777 .cra_alignmask = 0xf, 1778 .cra_type = &crypto_ablkcipher_type, 1779 .cra_module = THIS_MODULE, 1780 .cra_init = stm32_cryp_cra_init, 1781 .cra_ablkcipher = { 1782 .min_keysize = AES_MIN_KEY_SIZE, 1783 .max_keysize = AES_MAX_KEY_SIZE, 1784 .ivsize = AES_BLOCK_SIZE, 1785 .setkey = stm32_cryp_aes_setkey, 1786 .encrypt = stm32_cryp_aes_ctr_encrypt, 1787 .decrypt = stm32_cryp_aes_ctr_decrypt, 1788 } 1789 }, 1790 { 1791 .cra_name = "ecb(des)", 1792 .cra_driver_name = "stm32-ecb-des", 1793 .cra_priority = 200, 1794 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1795 CRYPTO_ALG_ASYNC, 1796 .cra_blocksize = DES_BLOCK_SIZE, 1797 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1798 .cra_alignmask = 0xf, 1799 .cra_type = &crypto_ablkcipher_type, 1800 .cra_module = THIS_MODULE, 1801 .cra_init = stm32_cryp_cra_init, 1802 .cra_ablkcipher = { 1803 .min_keysize = DES_BLOCK_SIZE, 1804 .max_keysize = DES_BLOCK_SIZE, 1805 .setkey = stm32_cryp_des_setkey, 1806 .encrypt = stm32_cryp_des_ecb_encrypt, 1807 .decrypt = stm32_cryp_des_ecb_decrypt, 1808 } 1809 }, 1810 { 1811 .cra_name = "cbc(des)", 1812 .cra_driver_name = "stm32-cbc-des", 1813 .cra_priority = 200, 1814 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1815 CRYPTO_ALG_ASYNC, 1816 .cra_blocksize = DES_BLOCK_SIZE, 1817 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1818 .cra_alignmask = 0xf, 1819 .cra_type = &crypto_ablkcipher_type, 1820 .cra_module = THIS_MODULE, 1821 .cra_init = stm32_cryp_cra_init, 1822 .cra_ablkcipher = { 1823 .min_keysize = DES_BLOCK_SIZE, 1824 .max_keysize = DES_BLOCK_SIZE, 1825 .ivsize = DES_BLOCK_SIZE, 1826 .setkey = stm32_cryp_des_setkey, 1827 .encrypt = stm32_cryp_des_cbc_encrypt, 1828 .decrypt = stm32_cryp_des_cbc_decrypt, 1829 } 1830 }, 1831 { 1832 .cra_name = "ecb(des3_ede)", 1833 .cra_driver_name = "stm32-ecb-des3", 1834 .cra_priority = 200, 1835 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1836 CRYPTO_ALG_ASYNC, 1837 .cra_blocksize = DES_BLOCK_SIZE, 1838 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1839 .cra_alignmask = 0xf, 1840 .cra_type = &crypto_ablkcipher_type, 1841 .cra_module = THIS_MODULE, 1842 .cra_init = stm32_cryp_cra_init, 1843 .cra_ablkcipher = { 1844 .min_keysize = 3 * DES_BLOCK_SIZE, 1845 .max_keysize = 3 * DES_BLOCK_SIZE, 1846 .setkey = stm32_cryp_tdes_setkey, 1847 .encrypt = stm32_cryp_tdes_ecb_encrypt, 1848 .decrypt = stm32_cryp_tdes_ecb_decrypt, 1849 } 1850 }, 1851 { 1852 .cra_name = "cbc(des3_ede)", 1853 .cra_driver_name = "stm32-cbc-des3", 1854 .cra_priority = 200, 1855 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1856 CRYPTO_ALG_ASYNC, 1857 .cra_blocksize = DES_BLOCK_SIZE, 1858 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1859 .cra_alignmask = 0xf, 1860 .cra_type = &crypto_ablkcipher_type, 1861 .cra_module = THIS_MODULE, 1862 .cra_init = stm32_cryp_cra_init, 1863 .cra_ablkcipher = { 1864 .min_keysize = 3 * DES_BLOCK_SIZE, 1865 .max_keysize = 3 * DES_BLOCK_SIZE, 1866 .ivsize = DES_BLOCK_SIZE, 1867 .setkey = stm32_cryp_tdes_setkey, 1868 .encrypt = stm32_cryp_tdes_cbc_encrypt, 1869 .decrypt = stm32_cryp_tdes_cbc_decrypt, 1870 } 1871 }, 1872 }; 1873 1874 static struct aead_alg aead_algs[] = { 1875 { 1876 .setkey = stm32_cryp_aes_aead_setkey, 1877 .setauthsize = stm32_cryp_aes_gcm_setauthsize, 1878 .encrypt = stm32_cryp_aes_gcm_encrypt, 1879 .decrypt = stm32_cryp_aes_gcm_decrypt, 1880 .init = stm32_cryp_aes_aead_init, 1881 .ivsize = 12, 1882 .maxauthsize = AES_BLOCK_SIZE, 1883 1884 .base = { 1885 .cra_name = "gcm(aes)", 1886 .cra_driver_name = "stm32-gcm-aes", 1887 .cra_priority = 200, 1888 .cra_flags = CRYPTO_ALG_ASYNC, 1889 .cra_blocksize = 1, 1890 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1891 .cra_alignmask = 0xf, 1892 .cra_module = THIS_MODULE, 1893 }, 1894 }, 1895 { 1896 .setkey = stm32_cryp_aes_aead_setkey, 1897 .setauthsize = stm32_cryp_aes_ccm_setauthsize, 1898 .encrypt = stm32_cryp_aes_ccm_encrypt, 1899 .decrypt = stm32_cryp_aes_ccm_decrypt, 1900 .init = stm32_cryp_aes_aead_init, 1901 .ivsize = AES_BLOCK_SIZE, 1902 .maxauthsize = AES_BLOCK_SIZE, 1903 1904 .base = { 1905 .cra_name = "ccm(aes)", 1906 .cra_driver_name = "stm32-ccm-aes", 1907 .cra_priority = 200, 1908 .cra_flags = CRYPTO_ALG_ASYNC, 1909 .cra_blocksize = 1, 1910 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1911 .cra_alignmask = 0xf, 1912 .cra_module = THIS_MODULE, 1913 }, 1914 }, 1915 }; 1916 1917 static const struct stm32_cryp_caps f7_data = { 1918 .swap_final = true, 1919 .padding_wa = true, 1920 }; 1921 1922 static const struct stm32_cryp_caps mp1_data = { 1923 .swap_final = false, 1924 .padding_wa = false, 1925 }; 1926 1927 static const struct of_device_id stm32_dt_ids[] = { 1928 { .compatible = "st,stm32f756-cryp", .data = &f7_data}, 1929 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data}, 1930 {}, 1931 }; 1932 MODULE_DEVICE_TABLE(of, stm32_dt_ids); 1933 1934 static int stm32_cryp_probe(struct platform_device *pdev) 1935 { 1936 struct device *dev = &pdev->dev; 1937 struct stm32_cryp *cryp; 1938 struct reset_control *rst; 1939 int irq, ret; 1940 1941 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL); 1942 if (!cryp) 1943 return -ENOMEM; 1944 1945 cryp->caps = of_device_get_match_data(dev); 1946 if (!cryp->caps) 1947 return -ENODEV; 1948 1949 cryp->dev = dev; 1950 1951 cryp->regs = devm_platform_ioremap_resource(pdev, 0); 1952 if (IS_ERR(cryp->regs)) 1953 return PTR_ERR(cryp->regs); 1954 1955 irq = platform_get_irq(pdev, 0); 1956 if (irq < 0) 1957 return irq; 1958 1959 ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq, 1960 stm32_cryp_irq_thread, IRQF_ONESHOT, 1961 dev_name(dev), cryp); 1962 if (ret) { 1963 dev_err(dev, "Cannot grab IRQ\n"); 1964 return ret; 1965 } 1966 1967 cryp->clk = devm_clk_get(dev, NULL); 1968 if (IS_ERR(cryp->clk)) { 1969 dev_err(dev, "Could not get clock\n"); 1970 return PTR_ERR(cryp->clk); 1971 } 1972 1973 ret = clk_prepare_enable(cryp->clk); 1974 if (ret) { 1975 dev_err(cryp->dev, "Failed to enable clock\n"); 1976 return ret; 1977 } 1978 1979 pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY); 1980 pm_runtime_use_autosuspend(dev); 1981 1982 pm_runtime_get_noresume(dev); 1983 pm_runtime_set_active(dev); 1984 pm_runtime_enable(dev); 1985 1986 rst = devm_reset_control_get(dev, NULL); 1987 if (!IS_ERR(rst)) { 1988 reset_control_assert(rst); 1989 udelay(2); 1990 reset_control_deassert(rst); 1991 } 1992 1993 platform_set_drvdata(pdev, cryp); 1994 1995 spin_lock(&cryp_list.lock); 1996 list_add(&cryp->list, &cryp_list.dev_list); 1997 spin_unlock(&cryp_list.lock); 1998 1999 /* Initialize crypto engine */ 2000 cryp->engine = crypto_engine_alloc_init(dev, 1); 2001 if (!cryp->engine) { 2002 dev_err(dev, "Could not init crypto engine\n"); 2003 ret = -ENOMEM; 2004 goto err_engine1; 2005 } 2006 2007 ret = crypto_engine_start(cryp->engine); 2008 if (ret) { 2009 dev_err(dev, "Could not start crypto engine\n"); 2010 goto err_engine2; 2011 } 2012 2013 ret = crypto_register_algs(crypto_algs, ARRAY_SIZE(crypto_algs)); 2014 if (ret) { 2015 dev_err(dev, "Could not register algs\n"); 2016 goto err_algs; 2017 } 2018 2019 ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs)); 2020 if (ret) 2021 goto err_aead_algs; 2022 2023 dev_info(dev, "Initialized\n"); 2024 2025 pm_runtime_put_sync(dev); 2026 2027 return 0; 2028 2029 err_aead_algs: 2030 crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs)); 2031 err_algs: 2032 err_engine2: 2033 crypto_engine_exit(cryp->engine); 2034 err_engine1: 2035 spin_lock(&cryp_list.lock); 2036 list_del(&cryp->list); 2037 spin_unlock(&cryp_list.lock); 2038 2039 pm_runtime_disable(dev); 2040 pm_runtime_put_noidle(dev); 2041 pm_runtime_disable(dev); 2042 pm_runtime_put_noidle(dev); 2043 2044 clk_disable_unprepare(cryp->clk); 2045 2046 return ret; 2047 } 2048 2049 static int stm32_cryp_remove(struct platform_device *pdev) 2050 { 2051 struct stm32_cryp *cryp = platform_get_drvdata(pdev); 2052 int ret; 2053 2054 if (!cryp) 2055 return -ENODEV; 2056 2057 ret = pm_runtime_get_sync(cryp->dev); 2058 if (ret < 0) 2059 return ret; 2060 2061 crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs)); 2062 crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs)); 2063 2064 crypto_engine_exit(cryp->engine); 2065 2066 spin_lock(&cryp_list.lock); 2067 list_del(&cryp->list); 2068 spin_unlock(&cryp_list.lock); 2069 2070 pm_runtime_disable(cryp->dev); 2071 pm_runtime_put_noidle(cryp->dev); 2072 2073 clk_disable_unprepare(cryp->clk); 2074 2075 return 0; 2076 } 2077 2078 #ifdef CONFIG_PM 2079 static int stm32_cryp_runtime_suspend(struct device *dev) 2080 { 2081 struct stm32_cryp *cryp = dev_get_drvdata(dev); 2082 2083 clk_disable_unprepare(cryp->clk); 2084 2085 return 0; 2086 } 2087 2088 static int stm32_cryp_runtime_resume(struct device *dev) 2089 { 2090 struct stm32_cryp *cryp = dev_get_drvdata(dev); 2091 int ret; 2092 2093 ret = clk_prepare_enable(cryp->clk); 2094 if (ret) { 2095 dev_err(cryp->dev, "Failed to prepare_enable clock\n"); 2096 return ret; 2097 } 2098 2099 return 0; 2100 } 2101 #endif 2102 2103 static const struct dev_pm_ops stm32_cryp_pm_ops = { 2104 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend, 2105 pm_runtime_force_resume) 2106 SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend, 2107 stm32_cryp_runtime_resume, NULL) 2108 }; 2109 2110 static struct platform_driver stm32_cryp_driver = { 2111 .probe = stm32_cryp_probe, 2112 .remove = stm32_cryp_remove, 2113 .driver = { 2114 .name = DRIVER_NAME, 2115 .pm = &stm32_cryp_pm_ops, 2116 .of_match_table = stm32_dt_ids, 2117 }, 2118 }; 2119 2120 module_platform_driver(stm32_cryp_driver); 2121 2122 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>"); 2123 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver"); 2124 MODULE_LICENSE("GPL"); 2125