1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (C) STMicroelectronics SA 2017 4 * Author: Fabien Dessenne <fabien.dessenne@st.com> 5 * Ux500 support taken from snippets in the old Ux500 cryp driver 6 */ 7 8 #include <crypto/aes.h> 9 #include <crypto/engine.h> 10 #include <crypto/internal/aead.h> 11 #include <crypto/internal/des.h> 12 #include <crypto/internal/skcipher.h> 13 #include <crypto/scatterwalk.h> 14 #include <linux/clk.h> 15 #include <linux/delay.h> 16 #include <linux/err.h> 17 #include <linux/iopoll.h> 18 #include <linux/interrupt.h> 19 #include <linux/kernel.h> 20 #include <linux/module.h> 21 #include <linux/of.h> 22 #include <linux/platform_device.h> 23 #include <linux/pm_runtime.h> 24 #include <linux/reset.h> 25 #include <linux/string.h> 26 27 #define DRIVER_NAME "stm32-cryp" 28 29 /* Bit [0] encrypt / decrypt */ 30 #define FLG_ENCRYPT BIT(0) 31 /* Bit [8..1] algo & operation mode */ 32 #define FLG_AES BIT(1) 33 #define FLG_DES BIT(2) 34 #define FLG_TDES BIT(3) 35 #define FLG_ECB BIT(4) 36 #define FLG_CBC BIT(5) 37 #define FLG_CTR BIT(6) 38 #define FLG_GCM BIT(7) 39 #define FLG_CCM BIT(8) 40 /* Mode mask = bits [15..0] */ 41 #define FLG_MODE_MASK GENMASK(15, 0) 42 /* Bit [31..16] status */ 43 44 /* Registers */ 45 #define CRYP_CR 0x00000000 46 #define CRYP_SR 0x00000004 47 #define CRYP_DIN 0x00000008 48 #define CRYP_DOUT 0x0000000C 49 #define CRYP_DMACR 0x00000010 50 #define CRYP_IMSCR 0x00000014 51 #define CRYP_RISR 0x00000018 52 #define CRYP_MISR 0x0000001C 53 #define CRYP_K0LR 0x00000020 54 #define CRYP_K0RR 0x00000024 55 #define CRYP_K1LR 0x00000028 56 #define CRYP_K1RR 0x0000002C 57 #define CRYP_K2LR 0x00000030 58 #define CRYP_K2RR 0x00000034 59 #define CRYP_K3LR 0x00000038 60 #define CRYP_K3RR 0x0000003C 61 #define CRYP_IV0LR 0x00000040 62 #define CRYP_IV0RR 0x00000044 63 #define CRYP_IV1LR 0x00000048 64 #define CRYP_IV1RR 0x0000004C 65 #define CRYP_CSGCMCCM0R 0x00000050 66 #define CRYP_CSGCM0R 0x00000070 67 68 #define UX500_CRYP_CR 0x00000000 69 #define UX500_CRYP_SR 0x00000004 70 #define UX500_CRYP_DIN 0x00000008 71 #define UX500_CRYP_DINSIZE 0x0000000C 72 #define UX500_CRYP_DOUT 0x00000010 73 #define UX500_CRYP_DOUSIZE 0x00000014 74 #define UX500_CRYP_DMACR 0x00000018 75 #define UX500_CRYP_IMSC 0x0000001C 76 #define UX500_CRYP_RIS 0x00000020 77 #define UX500_CRYP_MIS 0x00000024 78 #define UX500_CRYP_K1L 0x00000028 79 #define UX500_CRYP_K1R 0x0000002C 80 #define UX500_CRYP_K2L 0x00000030 81 #define UX500_CRYP_K2R 0x00000034 82 #define UX500_CRYP_K3L 0x00000038 83 #define UX500_CRYP_K3R 0x0000003C 84 #define UX500_CRYP_K4L 0x00000040 85 #define UX500_CRYP_K4R 0x00000044 86 #define UX500_CRYP_IV0L 0x00000048 87 #define UX500_CRYP_IV0R 0x0000004C 88 #define UX500_CRYP_IV1L 0x00000050 89 #define UX500_CRYP_IV1R 0x00000054 90 91 /* Registers values */ 92 #define CR_DEC_NOT_ENC 0x00000004 93 #define CR_TDES_ECB 0x00000000 94 #define CR_TDES_CBC 0x00000008 95 #define CR_DES_ECB 0x00000010 96 #define CR_DES_CBC 0x00000018 97 #define CR_AES_ECB 0x00000020 98 #define CR_AES_CBC 0x00000028 99 #define CR_AES_CTR 0x00000030 100 #define CR_AES_KP 0x00000038 /* Not on Ux500 */ 101 #define CR_AES_XTS 0x00000038 /* Only on Ux500 */ 102 #define CR_AES_GCM 0x00080000 103 #define CR_AES_CCM 0x00080008 104 #define CR_AES_UNKNOWN 0xFFFFFFFF 105 #define CR_ALGO_MASK 0x00080038 106 #define CR_DATA32 0x00000000 107 #define CR_DATA16 0x00000040 108 #define CR_DATA8 0x00000080 109 #define CR_DATA1 0x000000C0 110 #define CR_KEY128 0x00000000 111 #define CR_KEY192 0x00000100 112 #define CR_KEY256 0x00000200 113 #define CR_KEYRDEN 0x00000400 /* Only on Ux500 */ 114 #define CR_KSE 0x00000800 /* Only on Ux500 */ 115 #define CR_FFLUSH 0x00004000 116 #define CR_CRYPEN 0x00008000 117 #define CR_PH_INIT 0x00000000 118 #define CR_PH_HEADER 0x00010000 119 #define CR_PH_PAYLOAD 0x00020000 120 #define CR_PH_FINAL 0x00030000 121 #define CR_PH_MASK 0x00030000 122 #define CR_NBPBL_SHIFT 20 123 124 #define SR_BUSY 0x00000010 125 #define SR_OFNE 0x00000004 126 127 #define IMSCR_IN BIT(0) 128 #define IMSCR_OUT BIT(1) 129 130 #define MISR_IN BIT(0) 131 #define MISR_OUT BIT(1) 132 133 /* Misc */ 134 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32)) 135 #define GCM_CTR_INIT 2 136 #define CRYP_AUTOSUSPEND_DELAY 50 137 138 struct stm32_cryp_caps { 139 bool aeads_support; 140 bool linear_aes_key; 141 bool kp_mode; 142 bool iv_protection; 143 bool swap_final; 144 bool padding_wa; 145 u32 cr; 146 u32 sr; 147 u32 din; 148 u32 dout; 149 u32 imsc; 150 u32 mis; 151 u32 k1l; 152 u32 k1r; 153 u32 k3r; 154 u32 iv0l; 155 u32 iv0r; 156 u32 iv1l; 157 u32 iv1r; 158 }; 159 160 struct stm32_cryp_ctx { 161 struct stm32_cryp *cryp; 162 int keylen; 163 __be32 key[AES_KEYSIZE_256 / sizeof(u32)]; 164 unsigned long flags; 165 }; 166 167 struct stm32_cryp_reqctx { 168 unsigned long mode; 169 }; 170 171 struct stm32_cryp { 172 struct list_head list; 173 struct device *dev; 174 void __iomem *regs; 175 struct clk *clk; 176 unsigned long flags; 177 u32 irq_status; 178 const struct stm32_cryp_caps *caps; 179 struct stm32_cryp_ctx *ctx; 180 181 struct crypto_engine *engine; 182 183 struct skcipher_request *req; 184 struct aead_request *areq; 185 186 size_t authsize; 187 size_t hw_blocksize; 188 189 size_t payload_in; 190 size_t header_in; 191 size_t payload_out; 192 193 struct scatterlist *out_sg; 194 195 struct scatter_walk in_walk; 196 struct scatter_walk out_walk; 197 198 __be32 last_ctr[4]; 199 u32 gcm_ctr; 200 }; 201 202 struct stm32_cryp_list { 203 struct list_head dev_list; 204 spinlock_t lock; /* protect dev_list */ 205 }; 206 207 static struct stm32_cryp_list cryp_list = { 208 .dev_list = LIST_HEAD_INIT(cryp_list.dev_list), 209 .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock), 210 }; 211 212 static inline bool is_aes(struct stm32_cryp *cryp) 213 { 214 return cryp->flags & FLG_AES; 215 } 216 217 static inline bool is_des(struct stm32_cryp *cryp) 218 { 219 return cryp->flags & FLG_DES; 220 } 221 222 static inline bool is_tdes(struct stm32_cryp *cryp) 223 { 224 return cryp->flags & FLG_TDES; 225 } 226 227 static inline bool is_ecb(struct stm32_cryp *cryp) 228 { 229 return cryp->flags & FLG_ECB; 230 } 231 232 static inline bool is_cbc(struct stm32_cryp *cryp) 233 { 234 return cryp->flags & FLG_CBC; 235 } 236 237 static inline bool is_ctr(struct stm32_cryp *cryp) 238 { 239 return cryp->flags & FLG_CTR; 240 } 241 242 static inline bool is_gcm(struct stm32_cryp *cryp) 243 { 244 return cryp->flags & FLG_GCM; 245 } 246 247 static inline bool is_ccm(struct stm32_cryp *cryp) 248 { 249 return cryp->flags & FLG_CCM; 250 } 251 252 static inline bool is_encrypt(struct stm32_cryp *cryp) 253 { 254 return cryp->flags & FLG_ENCRYPT; 255 } 256 257 static inline bool is_decrypt(struct stm32_cryp *cryp) 258 { 259 return !is_encrypt(cryp); 260 } 261 262 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst) 263 { 264 return readl_relaxed(cryp->regs + ofst); 265 } 266 267 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val) 268 { 269 writel_relaxed(val, cryp->regs + ofst); 270 } 271 272 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp) 273 { 274 u32 status; 275 276 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status, 277 !(status & SR_BUSY), 10, 100000); 278 } 279 280 static inline void stm32_cryp_enable(struct stm32_cryp *cryp) 281 { 282 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_CRYPEN, 283 cryp->regs + cryp->caps->cr); 284 } 285 286 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp) 287 { 288 u32 status; 289 290 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->cr, status, 291 !(status & CR_CRYPEN), 10, 100000); 292 } 293 294 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp) 295 { 296 u32 status; 297 298 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status, 299 status & SR_OFNE, 10, 100000); 300 } 301 302 static inline void stm32_cryp_key_read_enable(struct stm32_cryp *cryp) 303 { 304 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_KEYRDEN, 305 cryp->regs + cryp->caps->cr); 306 } 307 308 static inline void stm32_cryp_key_read_disable(struct stm32_cryp *cryp) 309 { 310 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) & ~CR_KEYRDEN, 311 cryp->regs + cryp->caps->cr); 312 } 313 314 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp); 315 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err); 316 317 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx) 318 { 319 struct stm32_cryp *tmp, *cryp = NULL; 320 321 spin_lock_bh(&cryp_list.lock); 322 if (!ctx->cryp) { 323 list_for_each_entry(tmp, &cryp_list.dev_list, list) { 324 cryp = tmp; 325 break; 326 } 327 ctx->cryp = cryp; 328 } else { 329 cryp = ctx->cryp; 330 } 331 332 spin_unlock_bh(&cryp_list.lock); 333 334 return cryp; 335 } 336 337 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv) 338 { 339 if (!iv) 340 return; 341 342 stm32_cryp_write(cryp, cryp->caps->iv0l, be32_to_cpu(*iv++)); 343 stm32_cryp_write(cryp, cryp->caps->iv0r, be32_to_cpu(*iv++)); 344 345 if (is_aes(cryp)) { 346 stm32_cryp_write(cryp, cryp->caps->iv1l, be32_to_cpu(*iv++)); 347 stm32_cryp_write(cryp, cryp->caps->iv1r, be32_to_cpu(*iv++)); 348 } 349 } 350 351 static void stm32_cryp_get_iv(struct stm32_cryp *cryp) 352 { 353 struct skcipher_request *req = cryp->req; 354 __be32 *tmp = (void *)req->iv; 355 356 if (!tmp) 357 return; 358 359 if (cryp->caps->iv_protection) 360 stm32_cryp_key_read_enable(cryp); 361 362 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l)); 363 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r)); 364 365 if (is_aes(cryp)) { 366 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l)); 367 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r)); 368 } 369 370 if (cryp->caps->iv_protection) 371 stm32_cryp_key_read_disable(cryp); 372 } 373 374 /** 375 * ux500_swap_bits_in_byte() - mirror the bits in a byte 376 * @b: the byte to be mirrored 377 * 378 * The bits are swapped the following way: 379 * Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and 380 * nibble 2 (n2) bits 4-7. 381 * 382 * Nibble 1 (n1): 383 * (The "old" (moved) bit is replaced with a zero) 384 * 1. Move bit 6 and 7, 4 positions to the left. 385 * 2. Move bit 3 and 5, 2 positions to the left. 386 * 3. Move bit 1-4, 1 position to the left. 387 * 388 * Nibble 2 (n2): 389 * 1. Move bit 0 and 1, 4 positions to the right. 390 * 2. Move bit 2 and 4, 2 positions to the right. 391 * 3. Move bit 3-6, 1 position to the right. 392 * 393 * Combine the two nibbles to a complete and swapped byte. 394 */ 395 static inline u8 ux500_swap_bits_in_byte(u8 b) 396 { 397 #define R_SHIFT_4_MASK 0xc0 /* Bits 6 and 7, right shift 4 */ 398 #define R_SHIFT_2_MASK 0x28 /* (After right shift 4) Bits 3 and 5, 399 right shift 2 */ 400 #define R_SHIFT_1_MASK 0x1e /* (After right shift 2) Bits 1-4, 401 right shift 1 */ 402 #define L_SHIFT_4_MASK 0x03 /* Bits 0 and 1, left shift 4 */ 403 #define L_SHIFT_2_MASK 0x14 /* (After left shift 4) Bits 2 and 4, 404 left shift 2 */ 405 #define L_SHIFT_1_MASK 0x78 /* (After left shift 1) Bits 3-6, 406 left shift 1 */ 407 408 u8 n1; 409 u8 n2; 410 411 /* Swap most significant nibble */ 412 /* Right shift 4, bits 6 and 7 */ 413 n1 = ((b & R_SHIFT_4_MASK) >> 4) | (b & ~(R_SHIFT_4_MASK >> 4)); 414 /* Right shift 2, bits 3 and 5 */ 415 n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2)); 416 /* Right shift 1, bits 1-4 */ 417 n1 = (n1 & R_SHIFT_1_MASK) >> 1; 418 419 /* Swap least significant nibble */ 420 /* Left shift 4, bits 0 and 1 */ 421 n2 = ((b & L_SHIFT_4_MASK) << 4) | (b & ~(L_SHIFT_4_MASK << 4)); 422 /* Left shift 2, bits 2 and 4 */ 423 n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2)); 424 /* Left shift 1, bits 3-6 */ 425 n2 = (n2 & L_SHIFT_1_MASK) << 1; 426 427 return n1 | n2; 428 } 429 430 /** 431 * ux500_swizzle_key() - Shuffle around words and bits in the AES key 432 * @in: key to swizzle 433 * @out: swizzled key 434 * @len: length of key, in bytes 435 * 436 * This "key swizzling procedure" is described in the examples in the 437 * DB8500 design specification. There is no real description of why 438 * the bits have been arranged like this in the hardware. 439 */ 440 static inline void ux500_swizzle_key(const u8 *in, u8 *out, u32 len) 441 { 442 int i = 0; 443 int bpw = sizeof(u32); 444 int j; 445 int index = 0; 446 447 j = len - bpw; 448 while (j >= 0) { 449 for (i = 0; i < bpw; i++) { 450 index = len - j - bpw + i; 451 out[j + i] = 452 ux500_swap_bits_in_byte(in[index]); 453 } 454 j -= bpw; 455 } 456 } 457 458 static void stm32_cryp_hw_write_key(struct stm32_cryp *c) 459 { 460 unsigned int i; 461 int r_id; 462 463 if (is_des(c)) { 464 stm32_cryp_write(c, c->caps->k1l, be32_to_cpu(c->ctx->key[0])); 465 stm32_cryp_write(c, c->caps->k1r, be32_to_cpu(c->ctx->key[1])); 466 return; 467 } 468 469 /* 470 * On the Ux500 the AES key is considered as a single bit sequence 471 * of 128, 192 or 256 bits length. It is written linearly into the 472 * registers from K1L and down, and need to be processed to become 473 * a proper big-endian bit sequence. 474 */ 475 if (is_aes(c) && c->caps->linear_aes_key) { 476 u32 tmpkey[8]; 477 478 ux500_swizzle_key((u8 *)c->ctx->key, 479 (u8 *)tmpkey, c->ctx->keylen); 480 481 r_id = c->caps->k1l; 482 for (i = 0; i < c->ctx->keylen / sizeof(u32); i++, r_id += 4) 483 stm32_cryp_write(c, r_id, tmpkey[i]); 484 485 return; 486 } 487 488 r_id = c->caps->k3r; 489 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4) 490 stm32_cryp_write(c, r_id, be32_to_cpu(c->ctx->key[i - 1])); 491 } 492 493 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp) 494 { 495 if (is_aes(cryp) && is_ecb(cryp)) 496 return CR_AES_ECB; 497 498 if (is_aes(cryp) && is_cbc(cryp)) 499 return CR_AES_CBC; 500 501 if (is_aes(cryp) && is_ctr(cryp)) 502 return CR_AES_CTR; 503 504 if (is_aes(cryp) && is_gcm(cryp)) 505 return CR_AES_GCM; 506 507 if (is_aes(cryp) && is_ccm(cryp)) 508 return CR_AES_CCM; 509 510 if (is_des(cryp) && is_ecb(cryp)) 511 return CR_DES_ECB; 512 513 if (is_des(cryp) && is_cbc(cryp)) 514 return CR_DES_CBC; 515 516 if (is_tdes(cryp) && is_ecb(cryp)) 517 return CR_TDES_ECB; 518 519 if (is_tdes(cryp) && is_cbc(cryp)) 520 return CR_TDES_CBC; 521 522 dev_err(cryp->dev, "Unknown mode\n"); 523 return CR_AES_UNKNOWN; 524 } 525 526 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp) 527 { 528 return is_encrypt(cryp) ? cryp->areq->cryptlen : 529 cryp->areq->cryptlen - cryp->authsize; 530 } 531 532 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg) 533 { 534 int ret; 535 __be32 iv[4]; 536 537 /* Phase 1 : init */ 538 memcpy(iv, cryp->areq->iv, 12); 539 iv[3] = cpu_to_be32(GCM_CTR_INIT); 540 cryp->gcm_ctr = GCM_CTR_INIT; 541 stm32_cryp_hw_write_iv(cryp, iv); 542 543 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN); 544 545 /* Wait for end of processing */ 546 ret = stm32_cryp_wait_enable(cryp); 547 if (ret) { 548 dev_err(cryp->dev, "Timeout (gcm init)\n"); 549 return ret; 550 } 551 552 /* Prepare next phase */ 553 if (cryp->areq->assoclen) { 554 cfg |= CR_PH_HEADER; 555 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 556 } else if (stm32_cryp_get_input_text_len(cryp)) { 557 cfg |= CR_PH_PAYLOAD; 558 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 559 } 560 561 return 0; 562 } 563 564 static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp) 565 { 566 u32 cfg; 567 int err; 568 569 /* Check if whole header written */ 570 if (!cryp->header_in) { 571 /* Wait for completion */ 572 err = stm32_cryp_wait_busy(cryp); 573 if (err) { 574 dev_err(cryp->dev, "Timeout (gcm/ccm header)\n"); 575 stm32_cryp_write(cryp, cryp->caps->imsc, 0); 576 stm32_cryp_finish_req(cryp, err); 577 return; 578 } 579 580 if (stm32_cryp_get_input_text_len(cryp)) { 581 /* Phase 3 : payload */ 582 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 583 cfg &= ~CR_CRYPEN; 584 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 585 586 cfg &= ~CR_PH_MASK; 587 cfg |= CR_PH_PAYLOAD | CR_CRYPEN; 588 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 589 } else { 590 /* 591 * Phase 4 : tag. 592 * Nothing to read, nothing to write, caller have to 593 * end request 594 */ 595 } 596 } 597 } 598 599 static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp) 600 { 601 size_t written; 602 size_t len; 603 u32 alen = cryp->areq->assoclen; 604 u32 block[AES_BLOCK_32] = {0}; 605 u8 *b8 = (u8 *)block; 606 607 if (alen <= 65280) { 608 /* Write first u32 of B1 */ 609 b8[0] = (alen >> 8) & 0xFF; 610 b8[1] = alen & 0xFF; 611 len = 2; 612 } else { 613 /* Build the two first u32 of B1 */ 614 b8[0] = 0xFF; 615 b8[1] = 0xFE; 616 b8[2] = (alen & 0xFF000000) >> 24; 617 b8[3] = (alen & 0x00FF0000) >> 16; 618 b8[4] = (alen & 0x0000FF00) >> 8; 619 b8[5] = alen & 0x000000FF; 620 len = 6; 621 } 622 623 written = min_t(size_t, AES_BLOCK_SIZE - len, alen); 624 625 scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0); 626 627 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32); 628 629 cryp->header_in -= written; 630 631 stm32_crypt_gcmccm_end_header(cryp); 632 } 633 634 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg) 635 { 636 int ret; 637 u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32]; 638 u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32; 639 __be32 *bd; 640 u32 *d; 641 unsigned int i, textlen; 642 643 /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */ 644 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); 645 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); 646 iv[AES_BLOCK_SIZE - 1] = 1; 647 stm32_cryp_hw_write_iv(cryp, (__be32 *)iv); 648 649 /* Build B0 */ 650 memcpy(b0, iv, AES_BLOCK_SIZE); 651 652 b0[0] |= (8 * ((cryp->authsize - 2) / 2)); 653 654 if (cryp->areq->assoclen) 655 b0[0] |= 0x40; 656 657 textlen = stm32_cryp_get_input_text_len(cryp); 658 659 b0[AES_BLOCK_SIZE - 2] = textlen >> 8; 660 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF; 661 662 /* Enable HW */ 663 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN); 664 665 /* Write B0 */ 666 d = (u32 *)b0; 667 bd = (__be32 *)b0; 668 669 for (i = 0; i < AES_BLOCK_32; i++) { 670 u32 xd = d[i]; 671 672 if (!cryp->caps->padding_wa) 673 xd = be32_to_cpu(bd[i]); 674 stm32_cryp_write(cryp, cryp->caps->din, xd); 675 } 676 677 /* Wait for end of processing */ 678 ret = stm32_cryp_wait_enable(cryp); 679 if (ret) { 680 dev_err(cryp->dev, "Timeout (ccm init)\n"); 681 return ret; 682 } 683 684 /* Prepare next phase */ 685 if (cryp->areq->assoclen) { 686 cfg |= CR_PH_HEADER | CR_CRYPEN; 687 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 688 689 /* Write first (special) block (may move to next phase [payload]) */ 690 stm32_cryp_write_ccm_first_header(cryp); 691 } else if (stm32_cryp_get_input_text_len(cryp)) { 692 cfg |= CR_PH_PAYLOAD; 693 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 694 } 695 696 return 0; 697 } 698 699 static int stm32_cryp_hw_init(struct stm32_cryp *cryp) 700 { 701 int ret; 702 u32 cfg, hw_mode; 703 704 pm_runtime_get_sync(cryp->dev); 705 706 /* Disable interrupt */ 707 stm32_cryp_write(cryp, cryp->caps->imsc, 0); 708 709 /* Set configuration */ 710 cfg = CR_DATA8 | CR_FFLUSH; 711 712 switch (cryp->ctx->keylen) { 713 case AES_KEYSIZE_128: 714 cfg |= CR_KEY128; 715 break; 716 717 case AES_KEYSIZE_192: 718 cfg |= CR_KEY192; 719 break; 720 721 default: 722 case AES_KEYSIZE_256: 723 cfg |= CR_KEY256; 724 break; 725 } 726 727 hw_mode = stm32_cryp_get_hw_mode(cryp); 728 if (hw_mode == CR_AES_UNKNOWN) 729 return -EINVAL; 730 731 /* AES ECB/CBC decrypt: run key preparation first */ 732 if (is_decrypt(cryp) && 733 ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) { 734 /* Configure in key preparation mode */ 735 if (cryp->caps->kp_mode) 736 stm32_cryp_write(cryp, cryp->caps->cr, 737 cfg | CR_AES_KP); 738 else 739 stm32_cryp_write(cryp, 740 cryp->caps->cr, cfg | CR_AES_ECB | CR_KSE); 741 742 /* Set key only after full configuration done */ 743 stm32_cryp_hw_write_key(cryp); 744 745 /* Start prepare key */ 746 stm32_cryp_enable(cryp); 747 /* Wait for end of processing */ 748 ret = stm32_cryp_wait_busy(cryp); 749 if (ret) { 750 dev_err(cryp->dev, "Timeout (key preparation)\n"); 751 return ret; 752 } 753 754 cfg |= hw_mode | CR_DEC_NOT_ENC; 755 756 /* Apply updated config (Decrypt + algo) and flush */ 757 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 758 } else { 759 cfg |= hw_mode; 760 if (is_decrypt(cryp)) 761 cfg |= CR_DEC_NOT_ENC; 762 763 /* Apply config and flush */ 764 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 765 766 /* Set key only after configuration done */ 767 stm32_cryp_hw_write_key(cryp); 768 } 769 770 switch (hw_mode) { 771 case CR_AES_GCM: 772 case CR_AES_CCM: 773 /* Phase 1 : init */ 774 if (hw_mode == CR_AES_CCM) 775 ret = stm32_cryp_ccm_init(cryp, cfg); 776 else 777 ret = stm32_cryp_gcm_init(cryp, cfg); 778 779 if (ret) 780 return ret; 781 782 break; 783 784 case CR_DES_CBC: 785 case CR_TDES_CBC: 786 case CR_AES_CBC: 787 case CR_AES_CTR: 788 stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv); 789 break; 790 791 default: 792 break; 793 } 794 795 /* Enable now */ 796 stm32_cryp_enable(cryp); 797 798 return 0; 799 } 800 801 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err) 802 { 803 if (!err && (is_gcm(cryp) || is_ccm(cryp))) 804 /* Phase 4 : output tag */ 805 err = stm32_cryp_read_auth_tag(cryp); 806 807 if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp)))) 808 stm32_cryp_get_iv(cryp); 809 810 pm_runtime_mark_last_busy(cryp->dev); 811 pm_runtime_put_autosuspend(cryp->dev); 812 813 if (is_gcm(cryp) || is_ccm(cryp)) 814 crypto_finalize_aead_request(cryp->engine, cryp->areq, err); 815 else 816 crypto_finalize_skcipher_request(cryp->engine, cryp->req, 817 err); 818 } 819 820 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp) 821 { 822 /* Enable interrupt and let the IRQ handler do everything */ 823 stm32_cryp_write(cryp, cryp->caps->imsc, IMSCR_IN | IMSCR_OUT); 824 825 return 0; 826 } 827 828 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq); 829 830 static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm) 831 { 832 crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx)); 833 834 return 0; 835 } 836 837 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq); 838 839 static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm) 840 { 841 tfm->reqsize = sizeof(struct stm32_cryp_reqctx); 842 843 return 0; 844 } 845 846 static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode) 847 { 848 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx( 849 crypto_skcipher_reqtfm(req)); 850 struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req); 851 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); 852 853 if (!cryp) 854 return -ENODEV; 855 856 rctx->mode = mode; 857 858 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req); 859 } 860 861 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode) 862 { 863 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 864 struct stm32_cryp_reqctx *rctx = aead_request_ctx(req); 865 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); 866 867 if (!cryp) 868 return -ENODEV; 869 870 rctx->mode = mode; 871 872 return crypto_transfer_aead_request_to_engine(cryp->engine, req); 873 } 874 875 static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key, 876 unsigned int keylen) 877 { 878 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm); 879 880 memcpy(ctx->key, key, keylen); 881 ctx->keylen = keylen; 882 883 return 0; 884 } 885 886 static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, 887 unsigned int keylen) 888 { 889 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && 890 keylen != AES_KEYSIZE_256) 891 return -EINVAL; 892 else 893 return stm32_cryp_setkey(tfm, key, keylen); 894 } 895 896 static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key, 897 unsigned int keylen) 898 { 899 return verify_skcipher_des_key(tfm, key) ?: 900 stm32_cryp_setkey(tfm, key, keylen); 901 } 902 903 static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key, 904 unsigned int keylen) 905 { 906 return verify_skcipher_des3_key(tfm, key) ?: 907 stm32_cryp_setkey(tfm, key, keylen); 908 } 909 910 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key, 911 unsigned int keylen) 912 { 913 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm); 914 915 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && 916 keylen != AES_KEYSIZE_256) 917 return -EINVAL; 918 919 memcpy(ctx->key, key, keylen); 920 ctx->keylen = keylen; 921 922 return 0; 923 } 924 925 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm, 926 unsigned int authsize) 927 { 928 switch (authsize) { 929 case 4: 930 case 8: 931 case 12: 932 case 13: 933 case 14: 934 case 15: 935 case 16: 936 break; 937 default: 938 return -EINVAL; 939 } 940 941 return 0; 942 } 943 944 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm, 945 unsigned int authsize) 946 { 947 switch (authsize) { 948 case 4: 949 case 6: 950 case 8: 951 case 10: 952 case 12: 953 case 14: 954 case 16: 955 break; 956 default: 957 return -EINVAL; 958 } 959 960 return 0; 961 } 962 963 static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req) 964 { 965 if (req->cryptlen % AES_BLOCK_SIZE) 966 return -EINVAL; 967 968 if (req->cryptlen == 0) 969 return 0; 970 971 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT); 972 } 973 974 static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req) 975 { 976 if (req->cryptlen % AES_BLOCK_SIZE) 977 return -EINVAL; 978 979 if (req->cryptlen == 0) 980 return 0; 981 982 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB); 983 } 984 985 static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req) 986 { 987 if (req->cryptlen % AES_BLOCK_SIZE) 988 return -EINVAL; 989 990 if (req->cryptlen == 0) 991 return 0; 992 993 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT); 994 } 995 996 static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req) 997 { 998 if (req->cryptlen % AES_BLOCK_SIZE) 999 return -EINVAL; 1000 1001 if (req->cryptlen == 0) 1002 return 0; 1003 1004 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC); 1005 } 1006 1007 static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req) 1008 { 1009 if (req->cryptlen == 0) 1010 return 0; 1011 1012 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT); 1013 } 1014 1015 static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req) 1016 { 1017 if (req->cryptlen == 0) 1018 return 0; 1019 1020 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR); 1021 } 1022 1023 static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req) 1024 { 1025 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT); 1026 } 1027 1028 static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req) 1029 { 1030 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM); 1031 } 1032 1033 static inline int crypto_ccm_check_iv(const u8 *iv) 1034 { 1035 /* 2 <= L <= 8, so 1 <= L' <= 7. */ 1036 if (iv[0] < 1 || iv[0] > 7) 1037 return -EINVAL; 1038 1039 return 0; 1040 } 1041 1042 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req) 1043 { 1044 int err; 1045 1046 err = crypto_ccm_check_iv(req->iv); 1047 if (err) 1048 return err; 1049 1050 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT); 1051 } 1052 1053 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req) 1054 { 1055 int err; 1056 1057 err = crypto_ccm_check_iv(req->iv); 1058 if (err) 1059 return err; 1060 1061 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM); 1062 } 1063 1064 static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req) 1065 { 1066 if (req->cryptlen % DES_BLOCK_SIZE) 1067 return -EINVAL; 1068 1069 if (req->cryptlen == 0) 1070 return 0; 1071 1072 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT); 1073 } 1074 1075 static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req) 1076 { 1077 if (req->cryptlen % DES_BLOCK_SIZE) 1078 return -EINVAL; 1079 1080 if (req->cryptlen == 0) 1081 return 0; 1082 1083 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB); 1084 } 1085 1086 static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req) 1087 { 1088 if (req->cryptlen % DES_BLOCK_SIZE) 1089 return -EINVAL; 1090 1091 if (req->cryptlen == 0) 1092 return 0; 1093 1094 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT); 1095 } 1096 1097 static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req) 1098 { 1099 if (req->cryptlen % DES_BLOCK_SIZE) 1100 return -EINVAL; 1101 1102 if (req->cryptlen == 0) 1103 return 0; 1104 1105 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC); 1106 } 1107 1108 static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req) 1109 { 1110 if (req->cryptlen % DES_BLOCK_SIZE) 1111 return -EINVAL; 1112 1113 if (req->cryptlen == 0) 1114 return 0; 1115 1116 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT); 1117 } 1118 1119 static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req) 1120 { 1121 if (req->cryptlen % DES_BLOCK_SIZE) 1122 return -EINVAL; 1123 1124 if (req->cryptlen == 0) 1125 return 0; 1126 1127 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB); 1128 } 1129 1130 static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req) 1131 { 1132 if (req->cryptlen % DES_BLOCK_SIZE) 1133 return -EINVAL; 1134 1135 if (req->cryptlen == 0) 1136 return 0; 1137 1138 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT); 1139 } 1140 1141 static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req) 1142 { 1143 if (req->cryptlen % DES_BLOCK_SIZE) 1144 return -EINVAL; 1145 1146 if (req->cryptlen == 0) 1147 return 0; 1148 1149 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC); 1150 } 1151 1152 static int stm32_cryp_prepare_req(struct skcipher_request *req, 1153 struct aead_request *areq) 1154 { 1155 struct stm32_cryp_ctx *ctx; 1156 struct stm32_cryp *cryp; 1157 struct stm32_cryp_reqctx *rctx; 1158 struct scatterlist *in_sg; 1159 int ret; 1160 1161 if (!req && !areq) 1162 return -EINVAL; 1163 1164 ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) : 1165 crypto_aead_ctx(crypto_aead_reqtfm(areq)); 1166 1167 cryp = ctx->cryp; 1168 1169 rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq); 1170 rctx->mode &= FLG_MODE_MASK; 1171 1172 ctx->cryp = cryp; 1173 1174 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode; 1175 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE; 1176 cryp->ctx = ctx; 1177 1178 if (req) { 1179 cryp->req = req; 1180 cryp->areq = NULL; 1181 cryp->header_in = 0; 1182 cryp->payload_in = req->cryptlen; 1183 cryp->payload_out = req->cryptlen; 1184 cryp->authsize = 0; 1185 } else { 1186 /* 1187 * Length of input and output data: 1188 * Encryption case: 1189 * INPUT = AssocData || PlainText 1190 * <- assoclen -> <- cryptlen -> 1191 * 1192 * OUTPUT = AssocData || CipherText || AuthTag 1193 * <- assoclen -> <-- cryptlen --> <- authsize -> 1194 * 1195 * Decryption case: 1196 * INPUT = AssocData || CipherTex || AuthTag 1197 * <- assoclen ---> <---------- cryptlen ----------> 1198 * 1199 * OUTPUT = AssocData || PlainText 1200 * <- assoclen -> <- cryptlen - authsize -> 1201 */ 1202 cryp->areq = areq; 1203 cryp->req = NULL; 1204 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq)); 1205 if (is_encrypt(cryp)) { 1206 cryp->payload_in = areq->cryptlen; 1207 cryp->header_in = areq->assoclen; 1208 cryp->payload_out = areq->cryptlen; 1209 } else { 1210 cryp->payload_in = areq->cryptlen - cryp->authsize; 1211 cryp->header_in = areq->assoclen; 1212 cryp->payload_out = cryp->payload_in; 1213 } 1214 } 1215 1216 in_sg = req ? req->src : areq->src; 1217 scatterwalk_start(&cryp->in_walk, in_sg); 1218 1219 cryp->out_sg = req ? req->dst : areq->dst; 1220 scatterwalk_start(&cryp->out_walk, cryp->out_sg); 1221 1222 if (is_gcm(cryp) || is_ccm(cryp)) { 1223 /* In output, jump after assoc data */ 1224 scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2); 1225 } 1226 1227 if (is_ctr(cryp)) 1228 memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr)); 1229 1230 ret = stm32_cryp_hw_init(cryp); 1231 return ret; 1232 } 1233 1234 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq) 1235 { 1236 struct skcipher_request *req = container_of(areq, 1237 struct skcipher_request, 1238 base); 1239 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx( 1240 crypto_skcipher_reqtfm(req)); 1241 struct stm32_cryp *cryp = ctx->cryp; 1242 1243 if (!cryp) 1244 return -ENODEV; 1245 1246 return stm32_cryp_prepare_req(req, NULL) ?: 1247 stm32_cryp_cpu_start(cryp); 1248 } 1249 1250 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq) 1251 { 1252 struct aead_request *req = container_of(areq, struct aead_request, 1253 base); 1254 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 1255 struct stm32_cryp *cryp = ctx->cryp; 1256 int err; 1257 1258 if (!cryp) 1259 return -ENODEV; 1260 1261 err = stm32_cryp_prepare_req(NULL, req); 1262 if (err) 1263 return err; 1264 1265 if (unlikely(!cryp->payload_in && !cryp->header_in)) { 1266 /* No input data to process: get tag and finish */ 1267 stm32_cryp_finish_req(cryp, 0); 1268 return 0; 1269 } 1270 1271 return stm32_cryp_cpu_start(cryp); 1272 } 1273 1274 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp) 1275 { 1276 u32 cfg, size_bit; 1277 unsigned int i; 1278 int ret = 0; 1279 1280 /* Update Config */ 1281 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 1282 1283 cfg &= ~CR_PH_MASK; 1284 cfg |= CR_PH_FINAL; 1285 cfg &= ~CR_DEC_NOT_ENC; 1286 cfg |= CR_CRYPEN; 1287 1288 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1289 1290 if (is_gcm(cryp)) { 1291 /* GCM: write aad and payload size (in bits) */ 1292 size_bit = cryp->areq->assoclen * 8; 1293 if (cryp->caps->swap_final) 1294 size_bit = (__force u32)cpu_to_be32(size_bit); 1295 1296 stm32_cryp_write(cryp, cryp->caps->din, 0); 1297 stm32_cryp_write(cryp, cryp->caps->din, size_bit); 1298 1299 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen : 1300 cryp->areq->cryptlen - cryp->authsize; 1301 size_bit *= 8; 1302 if (cryp->caps->swap_final) 1303 size_bit = (__force u32)cpu_to_be32(size_bit); 1304 1305 stm32_cryp_write(cryp, cryp->caps->din, 0); 1306 stm32_cryp_write(cryp, cryp->caps->din, size_bit); 1307 } else { 1308 /* CCM: write CTR0 */ 1309 u32 iv32[AES_BLOCK_32]; 1310 u8 *iv = (u8 *)iv32; 1311 __be32 *biv = (__be32 *)iv32; 1312 1313 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); 1314 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); 1315 1316 for (i = 0; i < AES_BLOCK_32; i++) { 1317 u32 xiv = iv32[i]; 1318 1319 if (!cryp->caps->padding_wa) 1320 xiv = be32_to_cpu(biv[i]); 1321 stm32_cryp_write(cryp, cryp->caps->din, xiv); 1322 } 1323 } 1324 1325 /* Wait for output data */ 1326 ret = stm32_cryp_wait_output(cryp); 1327 if (ret) { 1328 dev_err(cryp->dev, "Timeout (read tag)\n"); 1329 return ret; 1330 } 1331 1332 if (is_encrypt(cryp)) { 1333 u32 out_tag[AES_BLOCK_32]; 1334 1335 /* Get and write tag */ 1336 readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32); 1337 scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1); 1338 } else { 1339 /* Get and check tag */ 1340 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32]; 1341 1342 scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0); 1343 readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32); 1344 1345 if (crypto_memneq(in_tag, out_tag, cryp->authsize)) 1346 ret = -EBADMSG; 1347 } 1348 1349 /* Disable cryp */ 1350 cfg &= ~CR_CRYPEN; 1351 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1352 1353 return ret; 1354 } 1355 1356 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp) 1357 { 1358 u32 cr; 1359 1360 if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) { 1361 /* 1362 * In this case, we need to increment manually the ctr counter, 1363 * as HW doesn't handle the U32 carry. 1364 */ 1365 crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr)); 1366 1367 cr = stm32_cryp_read(cryp, cryp->caps->cr); 1368 stm32_cryp_write(cryp, cryp->caps->cr, cr & ~CR_CRYPEN); 1369 1370 stm32_cryp_hw_write_iv(cryp, cryp->last_ctr); 1371 1372 stm32_cryp_write(cryp, cryp->caps->cr, cr); 1373 } 1374 1375 /* The IV registers are BE */ 1376 cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l)); 1377 cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r)); 1378 cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l)); 1379 cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r)); 1380 } 1381 1382 static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp) 1383 { 1384 u32 block[AES_BLOCK_32]; 1385 1386 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32)); 1387 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, 1388 cryp->payload_out), 1); 1389 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, 1390 cryp->payload_out); 1391 } 1392 1393 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp) 1394 { 1395 u32 block[AES_BLOCK_32] = {0}; 1396 1397 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize, 1398 cryp->payload_in), 0); 1399 writesl(cryp->regs + cryp->caps->din, block, cryp->hw_blocksize / sizeof(u32)); 1400 cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in); 1401 } 1402 1403 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp) 1404 { 1405 int err; 1406 u32 cfg, block[AES_BLOCK_32] = {0}; 1407 unsigned int i; 1408 1409 /* 'Special workaround' procedure described in the datasheet */ 1410 1411 /* a) disable ip */ 1412 stm32_cryp_write(cryp, cryp->caps->imsc, 0); 1413 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 1414 cfg &= ~CR_CRYPEN; 1415 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1416 1417 /* b) Update IV1R */ 1418 stm32_cryp_write(cryp, cryp->caps->iv1r, cryp->gcm_ctr - 2); 1419 1420 /* c) change mode to CTR */ 1421 cfg &= ~CR_ALGO_MASK; 1422 cfg |= CR_AES_CTR; 1423 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1424 1425 /* a) enable IP */ 1426 cfg |= CR_CRYPEN; 1427 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1428 1429 /* b) pad and write the last block */ 1430 stm32_cryp_irq_write_block(cryp); 1431 /* wait end of process */ 1432 err = stm32_cryp_wait_output(cryp); 1433 if (err) { 1434 dev_err(cryp->dev, "Timeout (write gcm last data)\n"); 1435 return stm32_cryp_finish_req(cryp, err); 1436 } 1437 1438 /* c) get and store encrypted data */ 1439 /* 1440 * Same code as stm32_cryp_irq_read_data(), but we want to store 1441 * block value 1442 */ 1443 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32)); 1444 1445 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, 1446 cryp->payload_out), 1); 1447 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, 1448 cryp->payload_out); 1449 1450 /* d) change mode back to AES GCM */ 1451 cfg &= ~CR_ALGO_MASK; 1452 cfg |= CR_AES_GCM; 1453 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1454 1455 /* e) change phase to Final */ 1456 cfg &= ~CR_PH_MASK; 1457 cfg |= CR_PH_FINAL; 1458 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1459 1460 /* f) write padded data */ 1461 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32); 1462 1463 /* g) Empty fifo out */ 1464 err = stm32_cryp_wait_output(cryp); 1465 if (err) { 1466 dev_err(cryp->dev, "Timeout (write gcm padded data)\n"); 1467 return stm32_cryp_finish_req(cryp, err); 1468 } 1469 1470 for (i = 0; i < AES_BLOCK_32; i++) 1471 stm32_cryp_read(cryp, cryp->caps->dout); 1472 1473 /* h) run the he normal Final phase */ 1474 stm32_cryp_finish_req(cryp, 0); 1475 } 1476 1477 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp) 1478 { 1479 u32 cfg; 1480 1481 /* disable ip, set NPBLB and reneable ip */ 1482 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 1483 cfg &= ~CR_CRYPEN; 1484 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1485 1486 cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT; 1487 cfg |= CR_CRYPEN; 1488 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1489 } 1490 1491 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp) 1492 { 1493 int err = 0; 1494 u32 cfg, iv1tmp; 1495 u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32]; 1496 u32 block[AES_BLOCK_32] = {0}; 1497 unsigned int i; 1498 1499 /* 'Special workaround' procedure described in the datasheet */ 1500 1501 /* a) disable ip */ 1502 stm32_cryp_write(cryp, cryp->caps->imsc, 0); 1503 1504 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 1505 cfg &= ~CR_CRYPEN; 1506 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1507 1508 /* b) get IV1 from CRYP_CSGCMCCM7 */ 1509 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4); 1510 1511 /* c) Load CRYP_CSGCMCCMxR */ 1512 for (i = 0; i < ARRAY_SIZE(cstmp1); i++) 1513 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); 1514 1515 /* d) Write IV1R */ 1516 stm32_cryp_write(cryp, cryp->caps->iv1r, iv1tmp); 1517 1518 /* e) change mode to CTR */ 1519 cfg &= ~CR_ALGO_MASK; 1520 cfg |= CR_AES_CTR; 1521 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1522 1523 /* a) enable IP */ 1524 cfg |= CR_CRYPEN; 1525 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1526 1527 /* b) pad and write the last block */ 1528 stm32_cryp_irq_write_block(cryp); 1529 /* wait end of process */ 1530 err = stm32_cryp_wait_output(cryp); 1531 if (err) { 1532 dev_err(cryp->dev, "Timeout (write ccm padded data)\n"); 1533 return stm32_cryp_finish_req(cryp, err); 1534 } 1535 1536 /* c) get and store decrypted data */ 1537 /* 1538 * Same code as stm32_cryp_irq_read_data(), but we want to store 1539 * block value 1540 */ 1541 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32)); 1542 1543 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, 1544 cryp->payload_out), 1); 1545 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out); 1546 1547 /* d) Load again CRYP_CSGCMCCMxR */ 1548 for (i = 0; i < ARRAY_SIZE(cstmp2); i++) 1549 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); 1550 1551 /* e) change mode back to AES CCM */ 1552 cfg &= ~CR_ALGO_MASK; 1553 cfg |= CR_AES_CCM; 1554 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1555 1556 /* f) change phase to header */ 1557 cfg &= ~CR_PH_MASK; 1558 cfg |= CR_PH_HEADER; 1559 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1560 1561 /* g) XOR and write padded data */ 1562 for (i = 0; i < ARRAY_SIZE(block); i++) { 1563 block[i] ^= cstmp1[i]; 1564 block[i] ^= cstmp2[i]; 1565 stm32_cryp_write(cryp, cryp->caps->din, block[i]); 1566 } 1567 1568 /* h) wait for completion */ 1569 err = stm32_cryp_wait_busy(cryp); 1570 if (err) 1571 dev_err(cryp->dev, "Timeout (write ccm padded data)\n"); 1572 1573 /* i) run the he normal Final phase */ 1574 stm32_cryp_finish_req(cryp, err); 1575 } 1576 1577 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp) 1578 { 1579 if (unlikely(!cryp->payload_in)) { 1580 dev_warn(cryp->dev, "No more data to process\n"); 1581 return; 1582 } 1583 1584 if (unlikely(cryp->payload_in < AES_BLOCK_SIZE && 1585 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) && 1586 is_encrypt(cryp))) { 1587 /* Padding for AES GCM encryption */ 1588 if (cryp->caps->padding_wa) { 1589 /* Special case 1 */ 1590 stm32_cryp_irq_write_gcm_padded_data(cryp); 1591 return; 1592 } 1593 1594 /* Setting padding bytes (NBBLB) */ 1595 stm32_cryp_irq_set_npblb(cryp); 1596 } 1597 1598 if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) && 1599 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) && 1600 is_decrypt(cryp))) { 1601 /* Padding for AES CCM decryption */ 1602 if (cryp->caps->padding_wa) { 1603 /* Special case 2 */ 1604 stm32_cryp_irq_write_ccm_padded_data(cryp); 1605 return; 1606 } 1607 1608 /* Setting padding bytes (NBBLB) */ 1609 stm32_cryp_irq_set_npblb(cryp); 1610 } 1611 1612 if (is_aes(cryp) && is_ctr(cryp)) 1613 stm32_cryp_check_ctr_counter(cryp); 1614 1615 stm32_cryp_irq_write_block(cryp); 1616 } 1617 1618 static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp) 1619 { 1620 u32 block[AES_BLOCK_32] = {0}; 1621 size_t written; 1622 1623 written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in); 1624 1625 scatterwalk_copychunks(block, &cryp->in_walk, written, 0); 1626 1627 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32); 1628 1629 cryp->header_in -= written; 1630 1631 stm32_crypt_gcmccm_end_header(cryp); 1632 } 1633 1634 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg) 1635 { 1636 struct stm32_cryp *cryp = arg; 1637 u32 ph; 1638 u32 it_mask = stm32_cryp_read(cryp, cryp->caps->imsc); 1639 1640 if (cryp->irq_status & MISR_OUT) 1641 /* Output FIFO IRQ: read data */ 1642 stm32_cryp_irq_read_data(cryp); 1643 1644 if (cryp->irq_status & MISR_IN) { 1645 if (is_gcm(cryp) || is_ccm(cryp)) { 1646 ph = stm32_cryp_read(cryp, cryp->caps->cr) & CR_PH_MASK; 1647 if (unlikely(ph == CR_PH_HEADER)) 1648 /* Write Header */ 1649 stm32_cryp_irq_write_gcmccm_header(cryp); 1650 else 1651 /* Input FIFO IRQ: write data */ 1652 stm32_cryp_irq_write_data(cryp); 1653 if (is_gcm(cryp)) 1654 cryp->gcm_ctr++; 1655 } else { 1656 /* Input FIFO IRQ: write data */ 1657 stm32_cryp_irq_write_data(cryp); 1658 } 1659 } 1660 1661 /* Mask useless interrupts */ 1662 if (!cryp->payload_in && !cryp->header_in) 1663 it_mask &= ~IMSCR_IN; 1664 if (!cryp->payload_out) 1665 it_mask &= ~IMSCR_OUT; 1666 stm32_cryp_write(cryp, cryp->caps->imsc, it_mask); 1667 1668 if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out) 1669 stm32_cryp_finish_req(cryp, 0); 1670 1671 return IRQ_HANDLED; 1672 } 1673 1674 static irqreturn_t stm32_cryp_irq(int irq, void *arg) 1675 { 1676 struct stm32_cryp *cryp = arg; 1677 1678 cryp->irq_status = stm32_cryp_read(cryp, cryp->caps->mis); 1679 1680 return IRQ_WAKE_THREAD; 1681 } 1682 1683 static struct skcipher_engine_alg crypto_algs[] = { 1684 { 1685 .base = { 1686 .base.cra_name = "ecb(aes)", 1687 .base.cra_driver_name = "stm32-ecb-aes", 1688 .base.cra_priority = 200, 1689 .base.cra_flags = CRYPTO_ALG_ASYNC, 1690 .base.cra_blocksize = AES_BLOCK_SIZE, 1691 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1692 .base.cra_alignmask = 0, 1693 .base.cra_module = THIS_MODULE, 1694 1695 .init = stm32_cryp_init_tfm, 1696 .min_keysize = AES_MIN_KEY_SIZE, 1697 .max_keysize = AES_MAX_KEY_SIZE, 1698 .setkey = stm32_cryp_aes_setkey, 1699 .encrypt = stm32_cryp_aes_ecb_encrypt, 1700 .decrypt = stm32_cryp_aes_ecb_decrypt, 1701 }, 1702 .op = { 1703 .do_one_request = stm32_cryp_cipher_one_req, 1704 }, 1705 }, 1706 { 1707 .base = { 1708 .base.cra_name = "cbc(aes)", 1709 .base.cra_driver_name = "stm32-cbc-aes", 1710 .base.cra_priority = 200, 1711 .base.cra_flags = CRYPTO_ALG_ASYNC, 1712 .base.cra_blocksize = AES_BLOCK_SIZE, 1713 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1714 .base.cra_alignmask = 0, 1715 .base.cra_module = THIS_MODULE, 1716 1717 .init = stm32_cryp_init_tfm, 1718 .min_keysize = AES_MIN_KEY_SIZE, 1719 .max_keysize = AES_MAX_KEY_SIZE, 1720 .ivsize = AES_BLOCK_SIZE, 1721 .setkey = stm32_cryp_aes_setkey, 1722 .encrypt = stm32_cryp_aes_cbc_encrypt, 1723 .decrypt = stm32_cryp_aes_cbc_decrypt, 1724 }, 1725 .op = { 1726 .do_one_request = stm32_cryp_cipher_one_req, 1727 }, 1728 }, 1729 { 1730 .base = { 1731 .base.cra_name = "ctr(aes)", 1732 .base.cra_driver_name = "stm32-ctr-aes", 1733 .base.cra_priority = 200, 1734 .base.cra_flags = CRYPTO_ALG_ASYNC, 1735 .base.cra_blocksize = 1, 1736 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1737 .base.cra_alignmask = 0, 1738 .base.cra_module = THIS_MODULE, 1739 1740 .init = stm32_cryp_init_tfm, 1741 .min_keysize = AES_MIN_KEY_SIZE, 1742 .max_keysize = AES_MAX_KEY_SIZE, 1743 .ivsize = AES_BLOCK_SIZE, 1744 .setkey = stm32_cryp_aes_setkey, 1745 .encrypt = stm32_cryp_aes_ctr_encrypt, 1746 .decrypt = stm32_cryp_aes_ctr_decrypt, 1747 }, 1748 .op = { 1749 .do_one_request = stm32_cryp_cipher_one_req, 1750 }, 1751 }, 1752 { 1753 .base = { 1754 .base.cra_name = "ecb(des)", 1755 .base.cra_driver_name = "stm32-ecb-des", 1756 .base.cra_priority = 200, 1757 .base.cra_flags = CRYPTO_ALG_ASYNC, 1758 .base.cra_blocksize = DES_BLOCK_SIZE, 1759 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1760 .base.cra_alignmask = 0, 1761 .base.cra_module = THIS_MODULE, 1762 1763 .init = stm32_cryp_init_tfm, 1764 .min_keysize = DES_BLOCK_SIZE, 1765 .max_keysize = DES_BLOCK_SIZE, 1766 .setkey = stm32_cryp_des_setkey, 1767 .encrypt = stm32_cryp_des_ecb_encrypt, 1768 .decrypt = stm32_cryp_des_ecb_decrypt, 1769 }, 1770 .op = { 1771 .do_one_request = stm32_cryp_cipher_one_req, 1772 }, 1773 }, 1774 { 1775 .base = { 1776 .base.cra_name = "cbc(des)", 1777 .base.cra_driver_name = "stm32-cbc-des", 1778 .base.cra_priority = 200, 1779 .base.cra_flags = CRYPTO_ALG_ASYNC, 1780 .base.cra_blocksize = DES_BLOCK_SIZE, 1781 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1782 .base.cra_alignmask = 0, 1783 .base.cra_module = THIS_MODULE, 1784 1785 .init = stm32_cryp_init_tfm, 1786 .min_keysize = DES_BLOCK_SIZE, 1787 .max_keysize = DES_BLOCK_SIZE, 1788 .ivsize = DES_BLOCK_SIZE, 1789 .setkey = stm32_cryp_des_setkey, 1790 .encrypt = stm32_cryp_des_cbc_encrypt, 1791 .decrypt = stm32_cryp_des_cbc_decrypt, 1792 }, 1793 .op = { 1794 .do_one_request = stm32_cryp_cipher_one_req, 1795 }, 1796 }, 1797 { 1798 .base = { 1799 .base.cra_name = "ecb(des3_ede)", 1800 .base.cra_driver_name = "stm32-ecb-des3", 1801 .base.cra_priority = 200, 1802 .base.cra_flags = CRYPTO_ALG_ASYNC, 1803 .base.cra_blocksize = DES_BLOCK_SIZE, 1804 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1805 .base.cra_alignmask = 0, 1806 .base.cra_module = THIS_MODULE, 1807 1808 .init = stm32_cryp_init_tfm, 1809 .min_keysize = 3 * DES_BLOCK_SIZE, 1810 .max_keysize = 3 * DES_BLOCK_SIZE, 1811 .setkey = stm32_cryp_tdes_setkey, 1812 .encrypt = stm32_cryp_tdes_ecb_encrypt, 1813 .decrypt = stm32_cryp_tdes_ecb_decrypt, 1814 }, 1815 .op = { 1816 .do_one_request = stm32_cryp_cipher_one_req, 1817 }, 1818 }, 1819 { 1820 .base = { 1821 .base.cra_name = "cbc(des3_ede)", 1822 .base.cra_driver_name = "stm32-cbc-des3", 1823 .base.cra_priority = 200, 1824 .base.cra_flags = CRYPTO_ALG_ASYNC, 1825 .base.cra_blocksize = DES_BLOCK_SIZE, 1826 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1827 .base.cra_alignmask = 0, 1828 .base.cra_module = THIS_MODULE, 1829 1830 .init = stm32_cryp_init_tfm, 1831 .min_keysize = 3 * DES_BLOCK_SIZE, 1832 .max_keysize = 3 * DES_BLOCK_SIZE, 1833 .ivsize = DES_BLOCK_SIZE, 1834 .setkey = stm32_cryp_tdes_setkey, 1835 .encrypt = stm32_cryp_tdes_cbc_encrypt, 1836 .decrypt = stm32_cryp_tdes_cbc_decrypt, 1837 }, 1838 .op = { 1839 .do_one_request = stm32_cryp_cipher_one_req, 1840 }, 1841 }, 1842 }; 1843 1844 static struct aead_engine_alg aead_algs[] = { 1845 { 1846 .base.setkey = stm32_cryp_aes_aead_setkey, 1847 .base.setauthsize = stm32_cryp_aes_gcm_setauthsize, 1848 .base.encrypt = stm32_cryp_aes_gcm_encrypt, 1849 .base.decrypt = stm32_cryp_aes_gcm_decrypt, 1850 .base.init = stm32_cryp_aes_aead_init, 1851 .base.ivsize = 12, 1852 .base.maxauthsize = AES_BLOCK_SIZE, 1853 1854 .base.base = { 1855 .cra_name = "gcm(aes)", 1856 .cra_driver_name = "stm32-gcm-aes", 1857 .cra_priority = 200, 1858 .cra_flags = CRYPTO_ALG_ASYNC, 1859 .cra_blocksize = 1, 1860 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1861 .cra_alignmask = 0, 1862 .cra_module = THIS_MODULE, 1863 }, 1864 .op = { 1865 .do_one_request = stm32_cryp_aead_one_req, 1866 }, 1867 }, 1868 { 1869 .base.setkey = stm32_cryp_aes_aead_setkey, 1870 .base.setauthsize = stm32_cryp_aes_ccm_setauthsize, 1871 .base.encrypt = stm32_cryp_aes_ccm_encrypt, 1872 .base.decrypt = stm32_cryp_aes_ccm_decrypt, 1873 .base.init = stm32_cryp_aes_aead_init, 1874 .base.ivsize = AES_BLOCK_SIZE, 1875 .base.maxauthsize = AES_BLOCK_SIZE, 1876 1877 .base.base = { 1878 .cra_name = "ccm(aes)", 1879 .cra_driver_name = "stm32-ccm-aes", 1880 .cra_priority = 200, 1881 .cra_flags = CRYPTO_ALG_ASYNC, 1882 .cra_blocksize = 1, 1883 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1884 .cra_alignmask = 0, 1885 .cra_module = THIS_MODULE, 1886 }, 1887 .op = { 1888 .do_one_request = stm32_cryp_aead_one_req, 1889 }, 1890 }, 1891 }; 1892 1893 static const struct stm32_cryp_caps ux500_data = { 1894 .aeads_support = false, 1895 .linear_aes_key = true, 1896 .kp_mode = false, 1897 .iv_protection = true, 1898 .swap_final = true, 1899 .padding_wa = true, 1900 .cr = UX500_CRYP_CR, 1901 .sr = UX500_CRYP_SR, 1902 .din = UX500_CRYP_DIN, 1903 .dout = UX500_CRYP_DOUT, 1904 .imsc = UX500_CRYP_IMSC, 1905 .mis = UX500_CRYP_MIS, 1906 .k1l = UX500_CRYP_K1L, 1907 .k1r = UX500_CRYP_K1R, 1908 .k3r = UX500_CRYP_K3R, 1909 .iv0l = UX500_CRYP_IV0L, 1910 .iv0r = UX500_CRYP_IV0R, 1911 .iv1l = UX500_CRYP_IV1L, 1912 .iv1r = UX500_CRYP_IV1R, 1913 }; 1914 1915 static const struct stm32_cryp_caps f7_data = { 1916 .aeads_support = true, 1917 .linear_aes_key = false, 1918 .kp_mode = true, 1919 .iv_protection = false, 1920 .swap_final = true, 1921 .padding_wa = true, 1922 .cr = CRYP_CR, 1923 .sr = CRYP_SR, 1924 .din = CRYP_DIN, 1925 .dout = CRYP_DOUT, 1926 .imsc = CRYP_IMSCR, 1927 .mis = CRYP_MISR, 1928 .k1l = CRYP_K1LR, 1929 .k1r = CRYP_K1RR, 1930 .k3r = CRYP_K3RR, 1931 .iv0l = CRYP_IV0LR, 1932 .iv0r = CRYP_IV0RR, 1933 .iv1l = CRYP_IV1LR, 1934 .iv1r = CRYP_IV1RR, 1935 }; 1936 1937 static const struct stm32_cryp_caps mp1_data = { 1938 .aeads_support = true, 1939 .linear_aes_key = false, 1940 .kp_mode = true, 1941 .iv_protection = false, 1942 .swap_final = false, 1943 .padding_wa = false, 1944 .cr = CRYP_CR, 1945 .sr = CRYP_SR, 1946 .din = CRYP_DIN, 1947 .dout = CRYP_DOUT, 1948 .imsc = CRYP_IMSCR, 1949 .mis = CRYP_MISR, 1950 .k1l = CRYP_K1LR, 1951 .k1r = CRYP_K1RR, 1952 .k3r = CRYP_K3RR, 1953 .iv0l = CRYP_IV0LR, 1954 .iv0r = CRYP_IV0RR, 1955 .iv1l = CRYP_IV1LR, 1956 .iv1r = CRYP_IV1RR, 1957 }; 1958 1959 static const struct of_device_id stm32_dt_ids[] = { 1960 { .compatible = "stericsson,ux500-cryp", .data = &ux500_data}, 1961 { .compatible = "st,stm32f756-cryp", .data = &f7_data}, 1962 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data}, 1963 {}, 1964 }; 1965 MODULE_DEVICE_TABLE(of, stm32_dt_ids); 1966 1967 static int stm32_cryp_probe(struct platform_device *pdev) 1968 { 1969 struct device *dev = &pdev->dev; 1970 struct stm32_cryp *cryp; 1971 struct reset_control *rst; 1972 int irq, ret; 1973 1974 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL); 1975 if (!cryp) 1976 return -ENOMEM; 1977 1978 cryp->caps = of_device_get_match_data(dev); 1979 if (!cryp->caps) 1980 return -ENODEV; 1981 1982 cryp->dev = dev; 1983 1984 cryp->regs = devm_platform_ioremap_resource(pdev, 0); 1985 if (IS_ERR(cryp->regs)) 1986 return PTR_ERR(cryp->regs); 1987 1988 irq = platform_get_irq(pdev, 0); 1989 if (irq < 0) 1990 return irq; 1991 1992 ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq, 1993 stm32_cryp_irq_thread, IRQF_ONESHOT, 1994 dev_name(dev), cryp); 1995 if (ret) { 1996 dev_err(dev, "Cannot grab IRQ\n"); 1997 return ret; 1998 } 1999 2000 cryp->clk = devm_clk_get(dev, NULL); 2001 if (IS_ERR(cryp->clk)) { 2002 dev_err_probe(dev, PTR_ERR(cryp->clk), "Could not get clock\n"); 2003 2004 return PTR_ERR(cryp->clk); 2005 } 2006 2007 ret = clk_prepare_enable(cryp->clk); 2008 if (ret) { 2009 dev_err(cryp->dev, "Failed to enable clock\n"); 2010 return ret; 2011 } 2012 2013 pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY); 2014 pm_runtime_use_autosuspend(dev); 2015 2016 pm_runtime_get_noresume(dev); 2017 pm_runtime_set_active(dev); 2018 pm_runtime_enable(dev); 2019 2020 rst = devm_reset_control_get(dev, NULL); 2021 if (IS_ERR(rst)) { 2022 ret = PTR_ERR(rst); 2023 if (ret == -EPROBE_DEFER) 2024 goto err_rst; 2025 } else { 2026 reset_control_assert(rst); 2027 udelay(2); 2028 reset_control_deassert(rst); 2029 } 2030 2031 platform_set_drvdata(pdev, cryp); 2032 2033 spin_lock(&cryp_list.lock); 2034 list_add(&cryp->list, &cryp_list.dev_list); 2035 spin_unlock(&cryp_list.lock); 2036 2037 /* Initialize crypto engine */ 2038 cryp->engine = crypto_engine_alloc_init(dev, 1); 2039 if (!cryp->engine) { 2040 dev_err(dev, "Could not init crypto engine\n"); 2041 ret = -ENOMEM; 2042 goto err_engine1; 2043 } 2044 2045 ret = crypto_engine_start(cryp->engine); 2046 if (ret) { 2047 dev_err(dev, "Could not start crypto engine\n"); 2048 goto err_engine2; 2049 } 2050 2051 ret = crypto_engine_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs)); 2052 if (ret) { 2053 dev_err(dev, "Could not register algs\n"); 2054 goto err_algs; 2055 } 2056 2057 if (cryp->caps->aeads_support) { 2058 ret = crypto_engine_register_aeads(aead_algs, ARRAY_SIZE(aead_algs)); 2059 if (ret) 2060 goto err_aead_algs; 2061 } 2062 2063 dev_info(dev, "Initialized\n"); 2064 2065 pm_runtime_put_sync(dev); 2066 2067 return 0; 2068 2069 err_aead_algs: 2070 crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs)); 2071 err_algs: 2072 err_engine2: 2073 crypto_engine_exit(cryp->engine); 2074 err_engine1: 2075 spin_lock(&cryp_list.lock); 2076 list_del(&cryp->list); 2077 spin_unlock(&cryp_list.lock); 2078 err_rst: 2079 pm_runtime_disable(dev); 2080 pm_runtime_put_noidle(dev); 2081 2082 clk_disable_unprepare(cryp->clk); 2083 2084 return ret; 2085 } 2086 2087 static void stm32_cryp_remove(struct platform_device *pdev) 2088 { 2089 struct stm32_cryp *cryp = platform_get_drvdata(pdev); 2090 int ret; 2091 2092 ret = pm_runtime_get_sync(cryp->dev); 2093 2094 if (cryp->caps->aeads_support) 2095 crypto_engine_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs)); 2096 crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs)); 2097 2098 crypto_engine_exit(cryp->engine); 2099 2100 spin_lock(&cryp_list.lock); 2101 list_del(&cryp->list); 2102 spin_unlock(&cryp_list.lock); 2103 2104 pm_runtime_disable(cryp->dev); 2105 pm_runtime_put_noidle(cryp->dev); 2106 2107 if (ret >= 0) 2108 clk_disable_unprepare(cryp->clk); 2109 } 2110 2111 #ifdef CONFIG_PM 2112 static int stm32_cryp_runtime_suspend(struct device *dev) 2113 { 2114 struct stm32_cryp *cryp = dev_get_drvdata(dev); 2115 2116 clk_disable_unprepare(cryp->clk); 2117 2118 return 0; 2119 } 2120 2121 static int stm32_cryp_runtime_resume(struct device *dev) 2122 { 2123 struct stm32_cryp *cryp = dev_get_drvdata(dev); 2124 int ret; 2125 2126 ret = clk_prepare_enable(cryp->clk); 2127 if (ret) { 2128 dev_err(cryp->dev, "Failed to prepare_enable clock\n"); 2129 return ret; 2130 } 2131 2132 return 0; 2133 } 2134 #endif 2135 2136 static const struct dev_pm_ops stm32_cryp_pm_ops = { 2137 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend, 2138 pm_runtime_force_resume) 2139 SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend, 2140 stm32_cryp_runtime_resume, NULL) 2141 }; 2142 2143 static struct platform_driver stm32_cryp_driver = { 2144 .probe = stm32_cryp_probe, 2145 .remove_new = stm32_cryp_remove, 2146 .driver = { 2147 .name = DRIVER_NAME, 2148 .pm = &stm32_cryp_pm_ops, 2149 .of_match_table = stm32_dt_ids, 2150 }, 2151 }; 2152 2153 module_platform_driver(stm32_cryp_driver); 2154 2155 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>"); 2156 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver"); 2157 MODULE_LICENSE("GPL"); 2158