1 /* 2 * caam - Freescale FSL CAAM support for crypto API 3 * 4 * Copyright 2008-2011 Freescale Semiconductor, Inc. 5 * Copyright 2016 NXP 6 * 7 * Based on talitos crypto API driver. 8 * 9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008): 10 * 11 * --------------- --------------- 12 * | JobDesc #1 |-------------------->| ShareDesc | 13 * | *(packet 1) | | (PDB) | 14 * --------------- |------------->| (hashKey) | 15 * . | | (cipherKey) | 16 * . | |-------->| (operation) | 17 * --------------- | | --------------- 18 * | JobDesc #2 |------| | 19 * | *(packet 2) | | 20 * --------------- | 21 * . | 22 * . | 23 * --------------- | 24 * | JobDesc #3 |------------ 25 * | *(packet 3) | 26 * --------------- 27 * 28 * The SharedDesc never changes for a connection unless rekeyed, but 29 * each packet will likely be in a different place. So all we need 30 * to know to process the packet is where the input is, where the 31 * output goes, and what context we want to process with. Context is 32 * in the SharedDesc, packet references in the JobDesc. 33 * 34 * So, a job desc looks like: 35 * 36 * --------------------- 37 * | Header | 38 * | ShareDesc Pointer | 39 * | SEQ_OUT_PTR | 40 * | (output buffer) | 41 * | (output length) | 42 * | SEQ_IN_PTR | 43 * | (input buffer) | 44 * | (input length) | 45 * --------------------- 46 */ 47 48 #include "compat.h" 49 50 #include "regs.h" 51 #include "intern.h" 52 #include "desc_constr.h" 53 #include "jr.h" 54 #include "error.h" 55 #include "sg_sw_sec4.h" 56 #include "key_gen.h" 57 #include "caamalg_desc.h" 58 59 /* 60 * crypto alg 61 */ 62 #define CAAM_CRA_PRIORITY 3000 63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 64 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 65 CTR_RFC3686_NONCE_SIZE + \ 66 SHA512_DIGEST_SIZE * 2) 67 68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2) 69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 70 CAAM_CMD_SZ * 4) 71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 72 CAAM_CMD_SZ * 5) 73 74 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN) 75 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ) 76 77 #ifdef DEBUG 78 /* for print_hex_dumps with line references */ 79 #define debug(format, arg...) printk(format, arg) 80 #else 81 #define debug(format, arg...) 82 #endif 83 84 #ifdef DEBUG 85 #include <linux/highmem.h> 86 87 static void dbg_dump_sg(const char *level, const char *prefix_str, 88 int prefix_type, int rowsize, int groupsize, 89 struct scatterlist *sg, size_t tlen, bool ascii) 90 { 91 struct scatterlist *it; 92 void *it_page; 93 size_t len; 94 void *buf; 95 96 for (it = sg; it != NULL && tlen > 0 ; it = sg_next(sg)) { 97 /* 98 * make sure the scatterlist's page 99 * has a valid virtual memory mapping 100 */ 101 it_page = kmap_atomic(sg_page(it)); 102 if (unlikely(!it_page)) { 103 printk(KERN_ERR "dbg_dump_sg: kmap failed\n"); 104 return; 105 } 106 107 buf = it_page + it->offset; 108 len = min_t(size_t, tlen, it->length); 109 print_hex_dump(level, prefix_str, prefix_type, rowsize, 110 groupsize, buf, len, ascii); 111 tlen -= len; 112 113 kunmap_atomic(it_page); 114 } 115 } 116 #endif 117 118 static struct list_head alg_list; 119 120 struct caam_alg_entry { 121 int class1_alg_type; 122 int class2_alg_type; 123 bool rfc3686; 124 bool geniv; 125 }; 126 127 struct caam_aead_alg { 128 struct aead_alg aead; 129 struct caam_alg_entry caam; 130 bool registered; 131 }; 132 133 /* 134 * per-session context 135 */ 136 struct caam_ctx { 137 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 138 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 139 u32 sh_desc_givenc[DESC_MAX_USED_LEN]; 140 u8 key[CAAM_MAX_KEY_SIZE]; 141 dma_addr_t sh_desc_enc_dma; 142 dma_addr_t sh_desc_dec_dma; 143 dma_addr_t sh_desc_givenc_dma; 144 dma_addr_t key_dma; 145 struct device *jrdev; 146 struct alginfo adata; 147 struct alginfo cdata; 148 unsigned int authsize; 149 }; 150 151 static int aead_null_set_sh_desc(struct crypto_aead *aead) 152 { 153 struct caam_ctx *ctx = crypto_aead_ctx(aead); 154 struct device *jrdev = ctx->jrdev; 155 u32 *desc; 156 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 157 ctx->adata.keylen_pad; 158 159 /* 160 * Job Descriptor and Shared Descriptors 161 * must all fit into the 64-word Descriptor h/w Buffer 162 */ 163 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) { 164 ctx->adata.key_inline = true; 165 ctx->adata.key_virt = ctx->key; 166 } else { 167 ctx->adata.key_inline = false; 168 ctx->adata.key_dma = ctx->key_dma; 169 } 170 171 /* aead_encrypt shared descriptor */ 172 desc = ctx->sh_desc_enc; 173 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize); 174 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 175 desc_bytes(desc), DMA_TO_DEVICE); 176 177 /* 178 * Job Descriptor and Shared Descriptors 179 * must all fit into the 64-word Descriptor h/w Buffer 180 */ 181 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) { 182 ctx->adata.key_inline = true; 183 ctx->adata.key_virt = ctx->key; 184 } else { 185 ctx->adata.key_inline = false; 186 ctx->adata.key_dma = ctx->key_dma; 187 } 188 189 /* aead_decrypt shared descriptor */ 190 desc = ctx->sh_desc_dec; 191 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize); 192 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 193 desc_bytes(desc), DMA_TO_DEVICE); 194 195 return 0; 196 } 197 198 static int aead_set_sh_desc(struct crypto_aead *aead) 199 { 200 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 201 struct caam_aead_alg, aead); 202 unsigned int ivsize = crypto_aead_ivsize(aead); 203 struct caam_ctx *ctx = crypto_aead_ctx(aead); 204 struct device *jrdev = ctx->jrdev; 205 u32 ctx1_iv_off = 0; 206 u32 *desc, *nonce = NULL; 207 u32 inl_mask; 208 unsigned int data_len[2]; 209 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 210 OP_ALG_AAI_CTR_MOD128); 211 const bool is_rfc3686 = alg->caam.rfc3686; 212 213 if (!ctx->authsize) 214 return 0; 215 216 /* NULL encryption / decryption */ 217 if (!ctx->cdata.keylen) 218 return aead_null_set_sh_desc(aead); 219 220 /* 221 * AES-CTR needs to load IV in CONTEXT1 reg 222 * at an offset of 128bits (16bytes) 223 * CONTEXT1[255:128] = IV 224 */ 225 if (ctr_mode) 226 ctx1_iv_off = 16; 227 228 /* 229 * RFC3686 specific: 230 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 231 */ 232 if (is_rfc3686) { 233 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 234 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + 235 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); 236 } 237 238 data_len[0] = ctx->adata.keylen_pad; 239 data_len[1] = ctx->cdata.keylen; 240 241 if (alg->caam.geniv) 242 goto skip_enc; 243 244 /* 245 * Job Descriptor and Shared Descriptors 246 * must all fit into the 64-word Descriptor h/w Buffer 247 */ 248 if (desc_inline_query(DESC_AEAD_ENC_LEN + 249 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 250 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 251 ARRAY_SIZE(data_len)) < 0) 252 return -EINVAL; 253 254 if (inl_mask & 1) 255 ctx->adata.key_virt = ctx->key; 256 else 257 ctx->adata.key_dma = ctx->key_dma; 258 259 if (inl_mask & 2) 260 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 261 else 262 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 263 264 ctx->adata.key_inline = !!(inl_mask & 1); 265 ctx->cdata.key_inline = !!(inl_mask & 2); 266 267 /* aead_encrypt shared descriptor */ 268 desc = ctx->sh_desc_enc; 269 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, 270 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, 271 false); 272 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 273 desc_bytes(desc), DMA_TO_DEVICE); 274 275 skip_enc: 276 /* 277 * Job Descriptor and Shared Descriptors 278 * must all fit into the 64-word Descriptor h/w Buffer 279 */ 280 if (desc_inline_query(DESC_AEAD_DEC_LEN + 281 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 282 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 283 ARRAY_SIZE(data_len)) < 0) 284 return -EINVAL; 285 286 if (inl_mask & 1) 287 ctx->adata.key_virt = ctx->key; 288 else 289 ctx->adata.key_dma = ctx->key_dma; 290 291 if (inl_mask & 2) 292 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 293 else 294 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 295 296 ctx->adata.key_inline = !!(inl_mask & 1); 297 ctx->cdata.key_inline = !!(inl_mask & 2); 298 299 /* aead_decrypt shared descriptor */ 300 desc = ctx->sh_desc_dec; 301 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, 302 ctx->authsize, alg->caam.geniv, is_rfc3686, 303 nonce, ctx1_iv_off, false); 304 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 305 desc_bytes(desc), DMA_TO_DEVICE); 306 307 if (!alg->caam.geniv) 308 goto skip_givenc; 309 310 /* 311 * Job Descriptor and Shared Descriptors 312 * must all fit into the 64-word Descriptor h/w Buffer 313 */ 314 if (desc_inline_query(DESC_AEAD_GIVENC_LEN + 315 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 316 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 317 ARRAY_SIZE(data_len)) < 0) 318 return -EINVAL; 319 320 if (inl_mask & 1) 321 ctx->adata.key_virt = ctx->key; 322 else 323 ctx->adata.key_dma = ctx->key_dma; 324 325 if (inl_mask & 2) 326 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 327 else 328 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 329 330 ctx->adata.key_inline = !!(inl_mask & 1); 331 ctx->cdata.key_inline = !!(inl_mask & 2); 332 333 /* aead_givencrypt shared descriptor */ 334 desc = ctx->sh_desc_enc; 335 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, 336 ctx->authsize, is_rfc3686, nonce, 337 ctx1_iv_off, false); 338 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 339 desc_bytes(desc), DMA_TO_DEVICE); 340 341 skip_givenc: 342 return 0; 343 } 344 345 static int aead_setauthsize(struct crypto_aead *authenc, 346 unsigned int authsize) 347 { 348 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 349 350 ctx->authsize = authsize; 351 aead_set_sh_desc(authenc); 352 353 return 0; 354 } 355 356 static int gcm_set_sh_desc(struct crypto_aead *aead) 357 { 358 struct caam_ctx *ctx = crypto_aead_ctx(aead); 359 struct device *jrdev = ctx->jrdev; 360 u32 *desc; 361 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 362 ctx->cdata.keylen; 363 364 if (!ctx->cdata.keylen || !ctx->authsize) 365 return 0; 366 367 /* 368 * AES GCM encrypt shared descriptor 369 * Job Descriptor and Shared Descriptor 370 * must fit into the 64-word Descriptor h/w Buffer 371 */ 372 if (rem_bytes >= DESC_GCM_ENC_LEN) { 373 ctx->cdata.key_inline = true; 374 ctx->cdata.key_virt = ctx->key; 375 } else { 376 ctx->cdata.key_inline = false; 377 ctx->cdata.key_dma = ctx->key_dma; 378 } 379 380 desc = ctx->sh_desc_enc; 381 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize); 382 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 383 desc_bytes(desc), DMA_TO_DEVICE); 384 385 /* 386 * Job Descriptor and Shared Descriptors 387 * must all fit into the 64-word Descriptor h/w Buffer 388 */ 389 if (rem_bytes >= DESC_GCM_DEC_LEN) { 390 ctx->cdata.key_inline = true; 391 ctx->cdata.key_virt = ctx->key; 392 } else { 393 ctx->cdata.key_inline = false; 394 ctx->cdata.key_dma = ctx->key_dma; 395 } 396 397 desc = ctx->sh_desc_dec; 398 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize); 399 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 400 desc_bytes(desc), DMA_TO_DEVICE); 401 402 return 0; 403 } 404 405 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize) 406 { 407 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 408 409 ctx->authsize = authsize; 410 gcm_set_sh_desc(authenc); 411 412 return 0; 413 } 414 415 static int rfc4106_set_sh_desc(struct crypto_aead *aead) 416 { 417 struct caam_ctx *ctx = crypto_aead_ctx(aead); 418 struct device *jrdev = ctx->jrdev; 419 u32 *desc; 420 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 421 ctx->cdata.keylen; 422 423 if (!ctx->cdata.keylen || !ctx->authsize) 424 return 0; 425 426 /* 427 * RFC4106 encrypt shared descriptor 428 * Job Descriptor and Shared Descriptor 429 * must fit into the 64-word Descriptor h/w Buffer 430 */ 431 if (rem_bytes >= DESC_RFC4106_ENC_LEN) { 432 ctx->cdata.key_inline = true; 433 ctx->cdata.key_virt = ctx->key; 434 } else { 435 ctx->cdata.key_inline = false; 436 ctx->cdata.key_dma = ctx->key_dma; 437 } 438 439 desc = ctx->sh_desc_enc; 440 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize); 441 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 442 desc_bytes(desc), DMA_TO_DEVICE); 443 444 /* 445 * Job Descriptor and Shared Descriptors 446 * must all fit into the 64-word Descriptor h/w Buffer 447 */ 448 if (rem_bytes >= DESC_RFC4106_DEC_LEN) { 449 ctx->cdata.key_inline = true; 450 ctx->cdata.key_virt = ctx->key; 451 } else { 452 ctx->cdata.key_inline = false; 453 ctx->cdata.key_dma = ctx->key_dma; 454 } 455 456 desc = ctx->sh_desc_dec; 457 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize); 458 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 459 desc_bytes(desc), DMA_TO_DEVICE); 460 461 return 0; 462 } 463 464 static int rfc4106_setauthsize(struct crypto_aead *authenc, 465 unsigned int authsize) 466 { 467 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 468 469 ctx->authsize = authsize; 470 rfc4106_set_sh_desc(authenc); 471 472 return 0; 473 } 474 475 static int rfc4543_set_sh_desc(struct crypto_aead *aead) 476 { 477 struct caam_ctx *ctx = crypto_aead_ctx(aead); 478 struct device *jrdev = ctx->jrdev; 479 u32 *desc; 480 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 481 ctx->cdata.keylen; 482 483 if (!ctx->cdata.keylen || !ctx->authsize) 484 return 0; 485 486 /* 487 * RFC4543 encrypt shared descriptor 488 * Job Descriptor and Shared Descriptor 489 * must fit into the 64-word Descriptor h/w Buffer 490 */ 491 if (rem_bytes >= DESC_RFC4543_ENC_LEN) { 492 ctx->cdata.key_inline = true; 493 ctx->cdata.key_virt = ctx->key; 494 } else { 495 ctx->cdata.key_inline = false; 496 ctx->cdata.key_dma = ctx->key_dma; 497 } 498 499 desc = ctx->sh_desc_enc; 500 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize); 501 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 502 desc_bytes(desc), DMA_TO_DEVICE); 503 504 /* 505 * Job Descriptor and Shared Descriptors 506 * must all fit into the 64-word Descriptor h/w Buffer 507 */ 508 if (rem_bytes >= DESC_RFC4543_DEC_LEN) { 509 ctx->cdata.key_inline = true; 510 ctx->cdata.key_virt = ctx->key; 511 } else { 512 ctx->cdata.key_inline = false; 513 ctx->cdata.key_dma = ctx->key_dma; 514 } 515 516 desc = ctx->sh_desc_dec; 517 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize); 518 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 519 desc_bytes(desc), DMA_TO_DEVICE); 520 521 return 0; 522 } 523 524 static int rfc4543_setauthsize(struct crypto_aead *authenc, 525 unsigned int authsize) 526 { 527 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 528 529 ctx->authsize = authsize; 530 rfc4543_set_sh_desc(authenc); 531 532 return 0; 533 } 534 535 static int aead_setkey(struct crypto_aead *aead, 536 const u8 *key, unsigned int keylen) 537 { 538 struct caam_ctx *ctx = crypto_aead_ctx(aead); 539 struct device *jrdev = ctx->jrdev; 540 struct crypto_authenc_keys keys; 541 int ret = 0; 542 543 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) 544 goto badkey; 545 546 #ifdef DEBUG 547 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n", 548 keys.authkeylen + keys.enckeylen, keys.enckeylen, 549 keys.authkeylen); 550 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 551 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 552 #endif 553 554 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, 555 keys.authkeylen, CAAM_MAX_KEY_SIZE - 556 keys.enckeylen); 557 if (ret) { 558 goto badkey; 559 } 560 561 /* postpend encryption key to auth split key */ 562 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); 563 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + 564 keys.enckeylen, DMA_TO_DEVICE); 565 #ifdef DEBUG 566 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ", 567 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, 568 ctx->adata.keylen_pad + keys.enckeylen, 1); 569 #endif 570 ctx->cdata.keylen = keys.enckeylen; 571 return aead_set_sh_desc(aead); 572 badkey: 573 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 574 return -EINVAL; 575 } 576 577 static int gcm_setkey(struct crypto_aead *aead, 578 const u8 *key, unsigned int keylen) 579 { 580 struct caam_ctx *ctx = crypto_aead_ctx(aead); 581 struct device *jrdev = ctx->jrdev; 582 583 #ifdef DEBUG 584 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 585 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 586 #endif 587 588 memcpy(ctx->key, key, keylen); 589 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE); 590 ctx->cdata.keylen = keylen; 591 592 return gcm_set_sh_desc(aead); 593 } 594 595 static int rfc4106_setkey(struct crypto_aead *aead, 596 const u8 *key, unsigned int keylen) 597 { 598 struct caam_ctx *ctx = crypto_aead_ctx(aead); 599 struct device *jrdev = ctx->jrdev; 600 601 if (keylen < 4) 602 return -EINVAL; 603 604 #ifdef DEBUG 605 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 606 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 607 #endif 608 609 memcpy(ctx->key, key, keylen); 610 611 /* 612 * The last four bytes of the key material are used as the salt value 613 * in the nonce. Update the AES key length. 614 */ 615 ctx->cdata.keylen = keylen - 4; 616 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 617 DMA_TO_DEVICE); 618 return rfc4106_set_sh_desc(aead); 619 } 620 621 static int rfc4543_setkey(struct crypto_aead *aead, 622 const u8 *key, unsigned int keylen) 623 { 624 struct caam_ctx *ctx = crypto_aead_ctx(aead); 625 struct device *jrdev = ctx->jrdev; 626 627 if (keylen < 4) 628 return -EINVAL; 629 630 #ifdef DEBUG 631 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 632 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 633 #endif 634 635 memcpy(ctx->key, key, keylen); 636 637 /* 638 * The last four bytes of the key material are used as the salt value 639 * in the nonce. Update the AES key length. 640 */ 641 ctx->cdata.keylen = keylen - 4; 642 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 643 DMA_TO_DEVICE); 644 return rfc4543_set_sh_desc(aead); 645 } 646 647 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher, 648 const u8 *key, unsigned int keylen) 649 { 650 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 651 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher); 652 const char *alg_name = crypto_tfm_alg_name(tfm); 653 struct device *jrdev = ctx->jrdev; 654 unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 655 u32 *desc; 656 u32 ctx1_iv_off = 0; 657 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 658 OP_ALG_AAI_CTR_MOD128); 659 const bool is_rfc3686 = (ctr_mode && 660 (strstr(alg_name, "rfc3686") != NULL)); 661 662 memcpy(ctx->key, key, keylen); 663 #ifdef DEBUG 664 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 665 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 666 #endif 667 /* 668 * AES-CTR needs to load IV in CONTEXT1 reg 669 * at an offset of 128bits (16bytes) 670 * CONTEXT1[255:128] = IV 671 */ 672 if (ctr_mode) 673 ctx1_iv_off = 16; 674 675 /* 676 * RFC3686 specific: 677 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER} 678 * | *key = {KEY, NONCE} 679 */ 680 if (is_rfc3686) { 681 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 682 keylen -= CTR_RFC3686_NONCE_SIZE; 683 } 684 685 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE); 686 ctx->cdata.keylen = keylen; 687 ctx->cdata.key_virt = ctx->key; 688 ctx->cdata.key_inline = true; 689 690 /* ablkcipher_encrypt shared descriptor */ 691 desc = ctx->sh_desc_enc; 692 cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, 693 ctx1_iv_off); 694 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 695 desc_bytes(desc), DMA_TO_DEVICE); 696 697 /* ablkcipher_decrypt shared descriptor */ 698 desc = ctx->sh_desc_dec; 699 cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, 700 ctx1_iv_off); 701 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 702 desc_bytes(desc), DMA_TO_DEVICE); 703 704 /* ablkcipher_givencrypt shared descriptor */ 705 desc = ctx->sh_desc_givenc; 706 cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686, 707 ctx1_iv_off); 708 dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma, 709 desc_bytes(desc), DMA_TO_DEVICE); 710 711 return 0; 712 } 713 714 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher, 715 const u8 *key, unsigned int keylen) 716 { 717 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 718 struct device *jrdev = ctx->jrdev; 719 u32 *desc; 720 721 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) { 722 crypto_ablkcipher_set_flags(ablkcipher, 723 CRYPTO_TFM_RES_BAD_KEY_LEN); 724 dev_err(jrdev, "key size mismatch\n"); 725 return -EINVAL; 726 } 727 728 memcpy(ctx->key, key, keylen); 729 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE); 730 ctx->cdata.keylen = keylen; 731 ctx->cdata.key_virt = ctx->key; 732 ctx->cdata.key_inline = true; 733 734 /* xts_ablkcipher_encrypt shared descriptor */ 735 desc = ctx->sh_desc_enc; 736 cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata); 737 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 738 desc_bytes(desc), DMA_TO_DEVICE); 739 740 /* xts_ablkcipher_decrypt shared descriptor */ 741 desc = ctx->sh_desc_dec; 742 cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata); 743 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 744 desc_bytes(desc), DMA_TO_DEVICE); 745 746 return 0; 747 } 748 749 /* 750 * aead_edesc - s/w-extended aead descriptor 751 * @src_nents: number of segments in input s/w scatterlist 752 * @dst_nents: number of segments in output s/w scatterlist 753 * @sec4_sg_bytes: length of dma mapped sec4_sg space 754 * @sec4_sg_dma: bus physical mapped address of h/w link table 755 * @sec4_sg: pointer to h/w link table 756 * @hw_desc: the h/w job descriptor followed by any referenced link tables 757 */ 758 struct aead_edesc { 759 int src_nents; 760 int dst_nents; 761 int sec4_sg_bytes; 762 dma_addr_t sec4_sg_dma; 763 struct sec4_sg_entry *sec4_sg; 764 u32 hw_desc[]; 765 }; 766 767 /* 768 * ablkcipher_edesc - s/w-extended ablkcipher descriptor 769 * @src_nents: number of segments in input s/w scatterlist 770 * @dst_nents: number of segments in output s/w scatterlist 771 * @iv_dma: dma address of iv for checking continuity and link table 772 * @sec4_sg_bytes: length of dma mapped sec4_sg space 773 * @sec4_sg_dma: bus physical mapped address of h/w link table 774 * @sec4_sg: pointer to h/w link table 775 * @hw_desc: the h/w job descriptor followed by any referenced link tables 776 */ 777 struct ablkcipher_edesc { 778 int src_nents; 779 int dst_nents; 780 dma_addr_t iv_dma; 781 int sec4_sg_bytes; 782 dma_addr_t sec4_sg_dma; 783 struct sec4_sg_entry *sec4_sg; 784 u32 hw_desc[0]; 785 }; 786 787 static void caam_unmap(struct device *dev, struct scatterlist *src, 788 struct scatterlist *dst, int src_nents, 789 int dst_nents, 790 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, 791 int sec4_sg_bytes) 792 { 793 if (dst != src) { 794 if (src_nents) 795 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); 796 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); 797 } else { 798 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); 799 } 800 801 if (iv_dma) 802 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE); 803 if (sec4_sg_bytes) 804 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes, 805 DMA_TO_DEVICE); 806 } 807 808 static void aead_unmap(struct device *dev, 809 struct aead_edesc *edesc, 810 struct aead_request *req) 811 { 812 caam_unmap(dev, req->src, req->dst, 813 edesc->src_nents, edesc->dst_nents, 0, 0, 814 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 815 } 816 817 static void ablkcipher_unmap(struct device *dev, 818 struct ablkcipher_edesc *edesc, 819 struct ablkcipher_request *req) 820 { 821 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 822 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 823 824 caam_unmap(dev, req->src, req->dst, 825 edesc->src_nents, edesc->dst_nents, 826 edesc->iv_dma, ivsize, 827 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 828 } 829 830 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 831 void *context) 832 { 833 struct aead_request *req = context; 834 struct aead_edesc *edesc; 835 836 #ifdef DEBUG 837 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 838 #endif 839 840 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 841 842 if (err) 843 caam_jr_strstatus(jrdev, err); 844 845 aead_unmap(jrdev, edesc, req); 846 847 kfree(edesc); 848 849 aead_request_complete(req, err); 850 } 851 852 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 853 void *context) 854 { 855 struct aead_request *req = context; 856 struct aead_edesc *edesc; 857 858 #ifdef DEBUG 859 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 860 #endif 861 862 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 863 864 if (err) 865 caam_jr_strstatus(jrdev, err); 866 867 aead_unmap(jrdev, edesc, req); 868 869 /* 870 * verify hw auth check passed else return -EBADMSG 871 */ 872 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK) 873 err = -EBADMSG; 874 875 kfree(edesc); 876 877 aead_request_complete(req, err); 878 } 879 880 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 881 void *context) 882 { 883 struct ablkcipher_request *req = context; 884 struct ablkcipher_edesc *edesc; 885 #ifdef DEBUG 886 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 887 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 888 889 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 890 #endif 891 892 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]); 893 894 if (err) 895 caam_jr_strstatus(jrdev, err); 896 897 #ifdef DEBUG 898 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 899 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 900 edesc->src_nents > 1 ? 100 : ivsize, 1); 901 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ", 902 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 903 edesc->dst_nents > 1 ? 100 : req->nbytes, 1); 904 #endif 905 906 ablkcipher_unmap(jrdev, edesc, req); 907 kfree(edesc); 908 909 ablkcipher_request_complete(req, err); 910 } 911 912 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 913 void *context) 914 { 915 struct ablkcipher_request *req = context; 916 struct ablkcipher_edesc *edesc; 917 #ifdef DEBUG 918 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 919 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 920 921 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 922 #endif 923 924 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]); 925 if (err) 926 caam_jr_strstatus(jrdev, err); 927 928 #ifdef DEBUG 929 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 930 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 931 ivsize, 1); 932 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ", 933 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 934 edesc->dst_nents > 1 ? 100 : req->nbytes, 1); 935 #endif 936 937 ablkcipher_unmap(jrdev, edesc, req); 938 kfree(edesc); 939 940 ablkcipher_request_complete(req, err); 941 } 942 943 /* 944 * Fill in aead job descriptor 945 */ 946 static void init_aead_job(struct aead_request *req, 947 struct aead_edesc *edesc, 948 bool all_contig, bool encrypt) 949 { 950 struct crypto_aead *aead = crypto_aead_reqtfm(req); 951 struct caam_ctx *ctx = crypto_aead_ctx(aead); 952 int authsize = ctx->authsize; 953 u32 *desc = edesc->hw_desc; 954 u32 out_options, in_options; 955 dma_addr_t dst_dma, src_dma; 956 int len, sec4_sg_index = 0; 957 dma_addr_t ptr; 958 u32 *sh_desc; 959 960 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 961 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 962 963 len = desc_len(sh_desc); 964 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 965 966 if (all_contig) { 967 src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0; 968 in_options = 0; 969 } else { 970 src_dma = edesc->sec4_sg_dma; 971 sec4_sg_index += edesc->src_nents; 972 in_options = LDST_SGF; 973 } 974 975 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, 976 in_options); 977 978 dst_dma = src_dma; 979 out_options = in_options; 980 981 if (unlikely(req->src != req->dst)) { 982 if (edesc->dst_nents == 1) { 983 dst_dma = sg_dma_address(req->dst); 984 } else { 985 dst_dma = edesc->sec4_sg_dma + 986 sec4_sg_index * 987 sizeof(struct sec4_sg_entry); 988 out_options = LDST_SGF; 989 } 990 } 991 992 if (encrypt) 993 append_seq_out_ptr(desc, dst_dma, 994 req->assoclen + req->cryptlen + authsize, 995 out_options); 996 else 997 append_seq_out_ptr(desc, dst_dma, 998 req->assoclen + req->cryptlen - authsize, 999 out_options); 1000 1001 /* REG3 = assoclen */ 1002 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1003 } 1004 1005 static void init_gcm_job(struct aead_request *req, 1006 struct aead_edesc *edesc, 1007 bool all_contig, bool encrypt) 1008 { 1009 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1010 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1011 unsigned int ivsize = crypto_aead_ivsize(aead); 1012 u32 *desc = edesc->hw_desc; 1013 bool generic_gcm = (ivsize == 12); 1014 unsigned int last; 1015 1016 init_aead_job(req, edesc, all_contig, encrypt); 1017 1018 /* BUG This should not be specific to generic GCM. */ 1019 last = 0; 1020 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) 1021 last = FIFOLD_TYPE_LAST1; 1022 1023 /* Read GCM IV */ 1024 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE | 1025 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last); 1026 /* Append Salt */ 1027 if (!generic_gcm) 1028 append_data(desc, ctx->key + ctx->cdata.keylen, 4); 1029 /* Append IV */ 1030 append_data(desc, req->iv, ivsize); 1031 /* End of blank commands */ 1032 } 1033 1034 static void init_authenc_job(struct aead_request *req, 1035 struct aead_edesc *edesc, 1036 bool all_contig, bool encrypt) 1037 { 1038 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1039 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 1040 struct caam_aead_alg, aead); 1041 unsigned int ivsize = crypto_aead_ivsize(aead); 1042 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1043 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 1044 OP_ALG_AAI_CTR_MOD128); 1045 const bool is_rfc3686 = alg->caam.rfc3686; 1046 u32 *desc = edesc->hw_desc; 1047 u32 ivoffset = 0; 1048 1049 /* 1050 * AES-CTR needs to load IV in CONTEXT1 reg 1051 * at an offset of 128bits (16bytes) 1052 * CONTEXT1[255:128] = IV 1053 */ 1054 if (ctr_mode) 1055 ivoffset = 16; 1056 1057 /* 1058 * RFC3686 specific: 1059 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 1060 */ 1061 if (is_rfc3686) 1062 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE; 1063 1064 init_aead_job(req, edesc, all_contig, encrypt); 1065 1066 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) 1067 append_load_as_imm(desc, req->iv, ivsize, 1068 LDST_CLASS_1_CCB | 1069 LDST_SRCDST_BYTE_CONTEXT | 1070 (ivoffset << LDST_OFFSET_SHIFT)); 1071 } 1072 1073 /* 1074 * Fill in ablkcipher job descriptor 1075 */ 1076 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr, 1077 struct ablkcipher_edesc *edesc, 1078 struct ablkcipher_request *req, 1079 bool iv_contig) 1080 { 1081 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1082 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1083 u32 *desc = edesc->hw_desc; 1084 u32 out_options = 0, in_options; 1085 dma_addr_t dst_dma, src_dma; 1086 int len, sec4_sg_index = 0; 1087 1088 #ifdef DEBUG 1089 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ", 1090 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 1091 ivsize, 1); 1092 pr_err("asked=%d, nbytes%d\n", 1093 (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes); 1094 dbg_dump_sg(KERN_ERR, "src @"__stringify(__LINE__)": ", 1095 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1096 edesc->src_nents > 1 ? 100 : req->nbytes, 1); 1097 #endif 1098 1099 len = desc_len(sh_desc); 1100 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1101 1102 if (iv_contig) { 1103 src_dma = edesc->iv_dma; 1104 in_options = 0; 1105 } else { 1106 src_dma = edesc->sec4_sg_dma; 1107 sec4_sg_index += edesc->src_nents + 1; 1108 in_options = LDST_SGF; 1109 } 1110 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options); 1111 1112 if (likely(req->src == req->dst)) { 1113 if (edesc->src_nents == 1 && iv_contig) { 1114 dst_dma = sg_dma_address(req->src); 1115 } else { 1116 dst_dma = edesc->sec4_sg_dma + 1117 sizeof(struct sec4_sg_entry); 1118 out_options = LDST_SGF; 1119 } 1120 } else { 1121 if (edesc->dst_nents == 1) { 1122 dst_dma = sg_dma_address(req->dst); 1123 } else { 1124 dst_dma = edesc->sec4_sg_dma + 1125 sec4_sg_index * sizeof(struct sec4_sg_entry); 1126 out_options = LDST_SGF; 1127 } 1128 } 1129 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options); 1130 } 1131 1132 /* 1133 * Fill in ablkcipher givencrypt job descriptor 1134 */ 1135 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr, 1136 struct ablkcipher_edesc *edesc, 1137 struct ablkcipher_request *req, 1138 bool iv_contig) 1139 { 1140 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1141 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1142 u32 *desc = edesc->hw_desc; 1143 u32 out_options, in_options; 1144 dma_addr_t dst_dma, src_dma; 1145 int len, sec4_sg_index = 0; 1146 1147 #ifdef DEBUG 1148 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ", 1149 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 1150 ivsize, 1); 1151 dbg_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ", 1152 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1153 edesc->src_nents > 1 ? 100 : req->nbytes, 1); 1154 #endif 1155 1156 len = desc_len(sh_desc); 1157 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1158 1159 if (edesc->src_nents == 1) { 1160 src_dma = sg_dma_address(req->src); 1161 in_options = 0; 1162 } else { 1163 src_dma = edesc->sec4_sg_dma; 1164 sec4_sg_index += edesc->src_nents; 1165 in_options = LDST_SGF; 1166 } 1167 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options); 1168 1169 if (iv_contig) { 1170 dst_dma = edesc->iv_dma; 1171 out_options = 0; 1172 } else { 1173 dst_dma = edesc->sec4_sg_dma + 1174 sec4_sg_index * sizeof(struct sec4_sg_entry); 1175 out_options = LDST_SGF; 1176 } 1177 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options); 1178 } 1179 1180 /* 1181 * allocate and map the aead extended descriptor 1182 */ 1183 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1184 int desc_bytes, bool *all_contig_ptr, 1185 bool encrypt) 1186 { 1187 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1188 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1189 struct device *jrdev = ctx->jrdev; 1190 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1191 GFP_KERNEL : GFP_ATOMIC; 1192 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1193 struct aead_edesc *edesc; 1194 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; 1195 unsigned int authsize = ctx->authsize; 1196 1197 if (unlikely(req->dst != req->src)) { 1198 src_nents = sg_nents_for_len(req->src, req->assoclen + 1199 req->cryptlen); 1200 if (unlikely(src_nents < 0)) { 1201 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1202 req->assoclen + req->cryptlen); 1203 return ERR_PTR(src_nents); 1204 } 1205 1206 dst_nents = sg_nents_for_len(req->dst, req->assoclen + 1207 req->cryptlen + 1208 (encrypt ? authsize : 1209 (-authsize))); 1210 if (unlikely(dst_nents < 0)) { 1211 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1212 req->assoclen + req->cryptlen + 1213 (encrypt ? authsize : (-authsize))); 1214 return ERR_PTR(dst_nents); 1215 } 1216 } else { 1217 src_nents = sg_nents_for_len(req->src, req->assoclen + 1218 req->cryptlen + 1219 (encrypt ? authsize : 0)); 1220 if (unlikely(src_nents < 0)) { 1221 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1222 req->assoclen + req->cryptlen + 1223 (encrypt ? authsize : 0)); 1224 return ERR_PTR(src_nents); 1225 } 1226 } 1227 1228 if (likely(req->src == req->dst)) { 1229 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1230 DMA_BIDIRECTIONAL); 1231 if (unlikely(!mapped_src_nents)) { 1232 dev_err(jrdev, "unable to map source\n"); 1233 return ERR_PTR(-ENOMEM); 1234 } 1235 } else { 1236 /* Cover also the case of null (zero length) input data */ 1237 if (src_nents) { 1238 mapped_src_nents = dma_map_sg(jrdev, req->src, 1239 src_nents, DMA_TO_DEVICE); 1240 if (unlikely(!mapped_src_nents)) { 1241 dev_err(jrdev, "unable to map source\n"); 1242 return ERR_PTR(-ENOMEM); 1243 } 1244 } else { 1245 mapped_src_nents = 0; 1246 } 1247 1248 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1249 DMA_FROM_DEVICE); 1250 if (unlikely(!mapped_dst_nents)) { 1251 dev_err(jrdev, "unable to map destination\n"); 1252 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1253 return ERR_PTR(-ENOMEM); 1254 } 1255 } 1256 1257 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0; 1258 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1259 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1260 1261 /* allocate space for base edesc and hw desc commands, link tables */ 1262 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1263 GFP_DMA | flags); 1264 if (!edesc) { 1265 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1266 0, 0, 0); 1267 return ERR_PTR(-ENOMEM); 1268 } 1269 1270 edesc->src_nents = src_nents; 1271 edesc->dst_nents = dst_nents; 1272 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + 1273 desc_bytes; 1274 *all_contig_ptr = !(mapped_src_nents > 1); 1275 1276 sec4_sg_index = 0; 1277 if (mapped_src_nents > 1) { 1278 sg_to_sec4_sg_last(req->src, mapped_src_nents, 1279 edesc->sec4_sg + sec4_sg_index, 0); 1280 sec4_sg_index += mapped_src_nents; 1281 } 1282 if (mapped_dst_nents > 1) { 1283 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1284 edesc->sec4_sg + sec4_sg_index, 0); 1285 } 1286 1287 if (!sec4_sg_bytes) 1288 return edesc; 1289 1290 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1291 sec4_sg_bytes, DMA_TO_DEVICE); 1292 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1293 dev_err(jrdev, "unable to map S/G table\n"); 1294 aead_unmap(jrdev, edesc, req); 1295 kfree(edesc); 1296 return ERR_PTR(-ENOMEM); 1297 } 1298 1299 edesc->sec4_sg_bytes = sec4_sg_bytes; 1300 1301 return edesc; 1302 } 1303 1304 static int gcm_encrypt(struct aead_request *req) 1305 { 1306 struct aead_edesc *edesc; 1307 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1308 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1309 struct device *jrdev = ctx->jrdev; 1310 bool all_contig; 1311 u32 *desc; 1312 int ret = 0; 1313 1314 /* allocate extended descriptor */ 1315 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true); 1316 if (IS_ERR(edesc)) 1317 return PTR_ERR(edesc); 1318 1319 /* Create and submit job descriptor */ 1320 init_gcm_job(req, edesc, all_contig, true); 1321 #ifdef DEBUG 1322 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1323 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1324 desc_bytes(edesc->hw_desc), 1); 1325 #endif 1326 1327 desc = edesc->hw_desc; 1328 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1329 if (!ret) { 1330 ret = -EINPROGRESS; 1331 } else { 1332 aead_unmap(jrdev, edesc, req); 1333 kfree(edesc); 1334 } 1335 1336 return ret; 1337 } 1338 1339 static int ipsec_gcm_encrypt(struct aead_request *req) 1340 { 1341 if (req->assoclen < 8) 1342 return -EINVAL; 1343 1344 return gcm_encrypt(req); 1345 } 1346 1347 static int aead_encrypt(struct aead_request *req) 1348 { 1349 struct aead_edesc *edesc; 1350 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1351 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1352 struct device *jrdev = ctx->jrdev; 1353 bool all_contig; 1354 u32 *desc; 1355 int ret = 0; 1356 1357 /* allocate extended descriptor */ 1358 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1359 &all_contig, true); 1360 if (IS_ERR(edesc)) 1361 return PTR_ERR(edesc); 1362 1363 /* Create and submit job descriptor */ 1364 init_authenc_job(req, edesc, all_contig, true); 1365 #ifdef DEBUG 1366 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1367 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1368 desc_bytes(edesc->hw_desc), 1); 1369 #endif 1370 1371 desc = edesc->hw_desc; 1372 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1373 if (!ret) { 1374 ret = -EINPROGRESS; 1375 } else { 1376 aead_unmap(jrdev, edesc, req); 1377 kfree(edesc); 1378 } 1379 1380 return ret; 1381 } 1382 1383 static int gcm_decrypt(struct aead_request *req) 1384 { 1385 struct aead_edesc *edesc; 1386 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1387 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1388 struct device *jrdev = ctx->jrdev; 1389 bool all_contig; 1390 u32 *desc; 1391 int ret = 0; 1392 1393 /* allocate extended descriptor */ 1394 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false); 1395 if (IS_ERR(edesc)) 1396 return PTR_ERR(edesc); 1397 1398 /* Create and submit job descriptor*/ 1399 init_gcm_job(req, edesc, all_contig, false); 1400 #ifdef DEBUG 1401 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1402 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1403 desc_bytes(edesc->hw_desc), 1); 1404 #endif 1405 1406 desc = edesc->hw_desc; 1407 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1408 if (!ret) { 1409 ret = -EINPROGRESS; 1410 } else { 1411 aead_unmap(jrdev, edesc, req); 1412 kfree(edesc); 1413 } 1414 1415 return ret; 1416 } 1417 1418 static int ipsec_gcm_decrypt(struct aead_request *req) 1419 { 1420 if (req->assoclen < 8) 1421 return -EINVAL; 1422 1423 return gcm_decrypt(req); 1424 } 1425 1426 static int aead_decrypt(struct aead_request *req) 1427 { 1428 struct aead_edesc *edesc; 1429 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1430 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1431 struct device *jrdev = ctx->jrdev; 1432 bool all_contig; 1433 u32 *desc; 1434 int ret = 0; 1435 1436 #ifdef DEBUG 1437 dbg_dump_sg(KERN_ERR, "dec src@"__stringify(__LINE__)": ", 1438 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1439 req->assoclen + req->cryptlen, 1); 1440 #endif 1441 1442 /* allocate extended descriptor */ 1443 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1444 &all_contig, false); 1445 if (IS_ERR(edesc)) 1446 return PTR_ERR(edesc); 1447 1448 /* Create and submit job descriptor*/ 1449 init_authenc_job(req, edesc, all_contig, false); 1450 #ifdef DEBUG 1451 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1452 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1453 desc_bytes(edesc->hw_desc), 1); 1454 #endif 1455 1456 desc = edesc->hw_desc; 1457 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1458 if (!ret) { 1459 ret = -EINPROGRESS; 1460 } else { 1461 aead_unmap(jrdev, edesc, req); 1462 kfree(edesc); 1463 } 1464 1465 return ret; 1466 } 1467 1468 /* 1469 * allocate and map the ablkcipher extended descriptor for ablkcipher 1470 */ 1471 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request 1472 *req, int desc_bytes, 1473 bool *iv_contig_out) 1474 { 1475 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1476 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1477 struct device *jrdev = ctx->jrdev; 1478 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1479 GFP_KERNEL : GFP_ATOMIC; 1480 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1481 struct ablkcipher_edesc *edesc; 1482 dma_addr_t iv_dma = 0; 1483 bool in_contig; 1484 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1485 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1486 1487 src_nents = sg_nents_for_len(req->src, req->nbytes); 1488 if (unlikely(src_nents < 0)) { 1489 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1490 req->nbytes); 1491 return ERR_PTR(src_nents); 1492 } 1493 1494 if (req->dst != req->src) { 1495 dst_nents = sg_nents_for_len(req->dst, req->nbytes); 1496 if (unlikely(dst_nents < 0)) { 1497 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1498 req->nbytes); 1499 return ERR_PTR(dst_nents); 1500 } 1501 } 1502 1503 if (likely(req->src == req->dst)) { 1504 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1505 DMA_BIDIRECTIONAL); 1506 if (unlikely(!mapped_src_nents)) { 1507 dev_err(jrdev, "unable to map source\n"); 1508 return ERR_PTR(-ENOMEM); 1509 } 1510 } else { 1511 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1512 DMA_TO_DEVICE); 1513 if (unlikely(!mapped_src_nents)) { 1514 dev_err(jrdev, "unable to map source\n"); 1515 return ERR_PTR(-ENOMEM); 1516 } 1517 1518 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1519 DMA_FROM_DEVICE); 1520 if (unlikely(!mapped_dst_nents)) { 1521 dev_err(jrdev, "unable to map destination\n"); 1522 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1523 return ERR_PTR(-ENOMEM); 1524 } 1525 } 1526 1527 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE); 1528 if (dma_mapping_error(jrdev, iv_dma)) { 1529 dev_err(jrdev, "unable to map IV\n"); 1530 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1531 0, 0, 0); 1532 return ERR_PTR(-ENOMEM); 1533 } 1534 1535 if (mapped_src_nents == 1 && 1536 iv_dma + ivsize == sg_dma_address(req->src)) { 1537 in_contig = true; 1538 sec4_sg_ents = 0; 1539 } else { 1540 in_contig = false; 1541 sec4_sg_ents = 1 + mapped_src_nents; 1542 } 1543 dst_sg_idx = sec4_sg_ents; 1544 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1545 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1546 1547 /* allocate space for base edesc and hw desc commands, link tables */ 1548 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1549 GFP_DMA | flags); 1550 if (!edesc) { 1551 dev_err(jrdev, "could not allocate extended descriptor\n"); 1552 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1553 iv_dma, ivsize, 0, 0); 1554 return ERR_PTR(-ENOMEM); 1555 } 1556 1557 edesc->src_nents = src_nents; 1558 edesc->dst_nents = dst_nents; 1559 edesc->sec4_sg_bytes = sec4_sg_bytes; 1560 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) + 1561 desc_bytes; 1562 1563 if (!in_contig) { 1564 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); 1565 sg_to_sec4_sg_last(req->src, mapped_src_nents, 1566 edesc->sec4_sg + 1, 0); 1567 } 1568 1569 if (mapped_dst_nents > 1) { 1570 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1571 edesc->sec4_sg + dst_sg_idx, 0); 1572 } 1573 1574 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1575 sec4_sg_bytes, DMA_TO_DEVICE); 1576 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1577 dev_err(jrdev, "unable to map S/G table\n"); 1578 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1579 iv_dma, ivsize, 0, 0); 1580 kfree(edesc); 1581 return ERR_PTR(-ENOMEM); 1582 } 1583 1584 edesc->iv_dma = iv_dma; 1585 1586 #ifdef DEBUG 1587 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ", 1588 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1589 sec4_sg_bytes, 1); 1590 #endif 1591 1592 *iv_contig_out = in_contig; 1593 return edesc; 1594 } 1595 1596 static int ablkcipher_encrypt(struct ablkcipher_request *req) 1597 { 1598 struct ablkcipher_edesc *edesc; 1599 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1600 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1601 struct device *jrdev = ctx->jrdev; 1602 bool iv_contig; 1603 u32 *desc; 1604 int ret = 0; 1605 1606 /* allocate extended descriptor */ 1607 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * 1608 CAAM_CMD_SZ, &iv_contig); 1609 if (IS_ERR(edesc)) 1610 return PTR_ERR(edesc); 1611 1612 /* Create and submit job descriptor*/ 1613 init_ablkcipher_job(ctx->sh_desc_enc, 1614 ctx->sh_desc_enc_dma, edesc, req, iv_contig); 1615 #ifdef DEBUG 1616 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ", 1617 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1618 desc_bytes(edesc->hw_desc), 1); 1619 #endif 1620 desc = edesc->hw_desc; 1621 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req); 1622 1623 if (!ret) { 1624 ret = -EINPROGRESS; 1625 } else { 1626 ablkcipher_unmap(jrdev, edesc, req); 1627 kfree(edesc); 1628 } 1629 1630 return ret; 1631 } 1632 1633 static int ablkcipher_decrypt(struct ablkcipher_request *req) 1634 { 1635 struct ablkcipher_edesc *edesc; 1636 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1637 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1638 struct device *jrdev = ctx->jrdev; 1639 bool iv_contig; 1640 u32 *desc; 1641 int ret = 0; 1642 1643 /* allocate extended descriptor */ 1644 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * 1645 CAAM_CMD_SZ, &iv_contig); 1646 if (IS_ERR(edesc)) 1647 return PTR_ERR(edesc); 1648 1649 /* Create and submit job descriptor*/ 1650 init_ablkcipher_job(ctx->sh_desc_dec, 1651 ctx->sh_desc_dec_dma, edesc, req, iv_contig); 1652 desc = edesc->hw_desc; 1653 #ifdef DEBUG 1654 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ", 1655 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1656 desc_bytes(edesc->hw_desc), 1); 1657 #endif 1658 1659 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req); 1660 if (!ret) { 1661 ret = -EINPROGRESS; 1662 } else { 1663 ablkcipher_unmap(jrdev, edesc, req); 1664 kfree(edesc); 1665 } 1666 1667 return ret; 1668 } 1669 1670 /* 1671 * allocate and map the ablkcipher extended descriptor 1672 * for ablkcipher givencrypt 1673 */ 1674 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc( 1675 struct skcipher_givcrypt_request *greq, 1676 int desc_bytes, 1677 bool *iv_contig_out) 1678 { 1679 struct ablkcipher_request *req = &greq->creq; 1680 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1681 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1682 struct device *jrdev = ctx->jrdev; 1683 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1684 GFP_KERNEL : GFP_ATOMIC; 1685 int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents; 1686 struct ablkcipher_edesc *edesc; 1687 dma_addr_t iv_dma = 0; 1688 bool out_contig; 1689 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1690 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1691 1692 src_nents = sg_nents_for_len(req->src, req->nbytes); 1693 if (unlikely(src_nents < 0)) { 1694 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1695 req->nbytes); 1696 return ERR_PTR(src_nents); 1697 } 1698 1699 if (likely(req->src == req->dst)) { 1700 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1701 DMA_BIDIRECTIONAL); 1702 if (unlikely(!mapped_src_nents)) { 1703 dev_err(jrdev, "unable to map source\n"); 1704 return ERR_PTR(-ENOMEM); 1705 } 1706 1707 dst_nents = src_nents; 1708 mapped_dst_nents = src_nents; 1709 } else { 1710 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1711 DMA_TO_DEVICE); 1712 if (unlikely(!mapped_src_nents)) { 1713 dev_err(jrdev, "unable to map source\n"); 1714 return ERR_PTR(-ENOMEM); 1715 } 1716 1717 dst_nents = sg_nents_for_len(req->dst, req->nbytes); 1718 if (unlikely(dst_nents < 0)) { 1719 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1720 req->nbytes); 1721 return ERR_PTR(dst_nents); 1722 } 1723 1724 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1725 DMA_FROM_DEVICE); 1726 if (unlikely(!mapped_dst_nents)) { 1727 dev_err(jrdev, "unable to map destination\n"); 1728 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1729 return ERR_PTR(-ENOMEM); 1730 } 1731 } 1732 1733 /* 1734 * Check if iv can be contiguous with source and destination. 1735 * If so, include it. If not, create scatterlist. 1736 */ 1737 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE); 1738 if (dma_mapping_error(jrdev, iv_dma)) { 1739 dev_err(jrdev, "unable to map IV\n"); 1740 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1741 0, 0, 0); 1742 return ERR_PTR(-ENOMEM); 1743 } 1744 1745 sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0; 1746 dst_sg_idx = sec4_sg_ents; 1747 if (mapped_dst_nents == 1 && 1748 iv_dma + ivsize == sg_dma_address(req->dst)) { 1749 out_contig = true; 1750 } else { 1751 out_contig = false; 1752 sec4_sg_ents += 1 + mapped_dst_nents; 1753 } 1754 1755 /* allocate space for base edesc and hw desc commands, link tables */ 1756 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1757 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1758 GFP_DMA | flags); 1759 if (!edesc) { 1760 dev_err(jrdev, "could not allocate extended descriptor\n"); 1761 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1762 iv_dma, ivsize, 0, 0); 1763 return ERR_PTR(-ENOMEM); 1764 } 1765 1766 edesc->src_nents = src_nents; 1767 edesc->dst_nents = dst_nents; 1768 edesc->sec4_sg_bytes = sec4_sg_bytes; 1769 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) + 1770 desc_bytes; 1771 1772 if (mapped_src_nents > 1) 1773 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg, 1774 0); 1775 1776 if (!out_contig) { 1777 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx, 1778 iv_dma, ivsize, 0); 1779 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1780 edesc->sec4_sg + dst_sg_idx + 1, 0); 1781 } 1782 1783 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1784 sec4_sg_bytes, DMA_TO_DEVICE); 1785 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1786 dev_err(jrdev, "unable to map S/G table\n"); 1787 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1788 iv_dma, ivsize, 0, 0); 1789 kfree(edesc); 1790 return ERR_PTR(-ENOMEM); 1791 } 1792 edesc->iv_dma = iv_dma; 1793 1794 #ifdef DEBUG 1795 print_hex_dump(KERN_ERR, 1796 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ", 1797 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1798 sec4_sg_bytes, 1); 1799 #endif 1800 1801 *iv_contig_out = out_contig; 1802 return edesc; 1803 } 1804 1805 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq) 1806 { 1807 struct ablkcipher_request *req = &creq->creq; 1808 struct ablkcipher_edesc *edesc; 1809 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1810 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1811 struct device *jrdev = ctx->jrdev; 1812 bool iv_contig = false; 1813 u32 *desc; 1814 int ret = 0; 1815 1816 /* allocate extended descriptor */ 1817 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN * 1818 CAAM_CMD_SZ, &iv_contig); 1819 if (IS_ERR(edesc)) 1820 return PTR_ERR(edesc); 1821 1822 /* Create and submit job descriptor*/ 1823 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma, 1824 edesc, req, iv_contig); 1825 #ifdef DEBUG 1826 print_hex_dump(KERN_ERR, 1827 "ablkcipher jobdesc@" __stringify(__LINE__) ": ", 1828 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1829 desc_bytes(edesc->hw_desc), 1); 1830 #endif 1831 desc = edesc->hw_desc; 1832 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req); 1833 1834 if (!ret) { 1835 ret = -EINPROGRESS; 1836 } else { 1837 ablkcipher_unmap(jrdev, edesc, req); 1838 kfree(edesc); 1839 } 1840 1841 return ret; 1842 } 1843 1844 #define template_aead template_u.aead 1845 #define template_ablkcipher template_u.ablkcipher 1846 struct caam_alg_template { 1847 char name[CRYPTO_MAX_ALG_NAME]; 1848 char driver_name[CRYPTO_MAX_ALG_NAME]; 1849 unsigned int blocksize; 1850 u32 type; 1851 union { 1852 struct ablkcipher_alg ablkcipher; 1853 } template_u; 1854 u32 class1_alg_type; 1855 u32 class2_alg_type; 1856 }; 1857 1858 static struct caam_alg_template driver_algs[] = { 1859 /* ablkcipher descriptor */ 1860 { 1861 .name = "cbc(aes)", 1862 .driver_name = "cbc-aes-caam", 1863 .blocksize = AES_BLOCK_SIZE, 1864 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1865 .template_ablkcipher = { 1866 .setkey = ablkcipher_setkey, 1867 .encrypt = ablkcipher_encrypt, 1868 .decrypt = ablkcipher_decrypt, 1869 .givencrypt = ablkcipher_givencrypt, 1870 .geniv = "<built-in>", 1871 .min_keysize = AES_MIN_KEY_SIZE, 1872 .max_keysize = AES_MAX_KEY_SIZE, 1873 .ivsize = AES_BLOCK_SIZE, 1874 }, 1875 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1876 }, 1877 { 1878 .name = "cbc(des3_ede)", 1879 .driver_name = "cbc-3des-caam", 1880 .blocksize = DES3_EDE_BLOCK_SIZE, 1881 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1882 .template_ablkcipher = { 1883 .setkey = ablkcipher_setkey, 1884 .encrypt = ablkcipher_encrypt, 1885 .decrypt = ablkcipher_decrypt, 1886 .givencrypt = ablkcipher_givencrypt, 1887 .geniv = "<built-in>", 1888 .min_keysize = DES3_EDE_KEY_SIZE, 1889 .max_keysize = DES3_EDE_KEY_SIZE, 1890 .ivsize = DES3_EDE_BLOCK_SIZE, 1891 }, 1892 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 1893 }, 1894 { 1895 .name = "cbc(des)", 1896 .driver_name = "cbc-des-caam", 1897 .blocksize = DES_BLOCK_SIZE, 1898 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1899 .template_ablkcipher = { 1900 .setkey = ablkcipher_setkey, 1901 .encrypt = ablkcipher_encrypt, 1902 .decrypt = ablkcipher_decrypt, 1903 .givencrypt = ablkcipher_givencrypt, 1904 .geniv = "<built-in>", 1905 .min_keysize = DES_KEY_SIZE, 1906 .max_keysize = DES_KEY_SIZE, 1907 .ivsize = DES_BLOCK_SIZE, 1908 }, 1909 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 1910 }, 1911 { 1912 .name = "ctr(aes)", 1913 .driver_name = "ctr-aes-caam", 1914 .blocksize = 1, 1915 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1916 .template_ablkcipher = { 1917 .setkey = ablkcipher_setkey, 1918 .encrypt = ablkcipher_encrypt, 1919 .decrypt = ablkcipher_decrypt, 1920 .geniv = "chainiv", 1921 .min_keysize = AES_MIN_KEY_SIZE, 1922 .max_keysize = AES_MAX_KEY_SIZE, 1923 .ivsize = AES_BLOCK_SIZE, 1924 }, 1925 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128, 1926 }, 1927 { 1928 .name = "rfc3686(ctr(aes))", 1929 .driver_name = "rfc3686-ctr-aes-caam", 1930 .blocksize = 1, 1931 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1932 .template_ablkcipher = { 1933 .setkey = ablkcipher_setkey, 1934 .encrypt = ablkcipher_encrypt, 1935 .decrypt = ablkcipher_decrypt, 1936 .givencrypt = ablkcipher_givencrypt, 1937 .geniv = "<built-in>", 1938 .min_keysize = AES_MIN_KEY_SIZE + 1939 CTR_RFC3686_NONCE_SIZE, 1940 .max_keysize = AES_MAX_KEY_SIZE + 1941 CTR_RFC3686_NONCE_SIZE, 1942 .ivsize = CTR_RFC3686_IV_SIZE, 1943 }, 1944 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128, 1945 }, 1946 { 1947 .name = "xts(aes)", 1948 .driver_name = "xts-aes-caam", 1949 .blocksize = AES_BLOCK_SIZE, 1950 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1951 .template_ablkcipher = { 1952 .setkey = xts_ablkcipher_setkey, 1953 .encrypt = ablkcipher_encrypt, 1954 .decrypt = ablkcipher_decrypt, 1955 .geniv = "eseqiv", 1956 .min_keysize = 2 * AES_MIN_KEY_SIZE, 1957 .max_keysize = 2 * AES_MAX_KEY_SIZE, 1958 .ivsize = AES_BLOCK_SIZE, 1959 }, 1960 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS, 1961 }, 1962 }; 1963 1964 static struct caam_aead_alg driver_aeads[] = { 1965 { 1966 .aead = { 1967 .base = { 1968 .cra_name = "rfc4106(gcm(aes))", 1969 .cra_driver_name = "rfc4106-gcm-aes-caam", 1970 .cra_blocksize = 1, 1971 }, 1972 .setkey = rfc4106_setkey, 1973 .setauthsize = rfc4106_setauthsize, 1974 .encrypt = ipsec_gcm_encrypt, 1975 .decrypt = ipsec_gcm_decrypt, 1976 .ivsize = 8, 1977 .maxauthsize = AES_BLOCK_SIZE, 1978 }, 1979 .caam = { 1980 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 1981 }, 1982 }, 1983 { 1984 .aead = { 1985 .base = { 1986 .cra_name = "rfc4543(gcm(aes))", 1987 .cra_driver_name = "rfc4543-gcm-aes-caam", 1988 .cra_blocksize = 1, 1989 }, 1990 .setkey = rfc4543_setkey, 1991 .setauthsize = rfc4543_setauthsize, 1992 .encrypt = ipsec_gcm_encrypt, 1993 .decrypt = ipsec_gcm_decrypt, 1994 .ivsize = 8, 1995 .maxauthsize = AES_BLOCK_SIZE, 1996 }, 1997 .caam = { 1998 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 1999 }, 2000 }, 2001 /* Galois Counter Mode */ 2002 { 2003 .aead = { 2004 .base = { 2005 .cra_name = "gcm(aes)", 2006 .cra_driver_name = "gcm-aes-caam", 2007 .cra_blocksize = 1, 2008 }, 2009 .setkey = gcm_setkey, 2010 .setauthsize = gcm_setauthsize, 2011 .encrypt = gcm_encrypt, 2012 .decrypt = gcm_decrypt, 2013 .ivsize = 12, 2014 .maxauthsize = AES_BLOCK_SIZE, 2015 }, 2016 .caam = { 2017 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2018 }, 2019 }, 2020 /* single-pass ipsec_esp descriptor */ 2021 { 2022 .aead = { 2023 .base = { 2024 .cra_name = "authenc(hmac(md5)," 2025 "ecb(cipher_null))", 2026 .cra_driver_name = "authenc-hmac-md5-" 2027 "ecb-cipher_null-caam", 2028 .cra_blocksize = NULL_BLOCK_SIZE, 2029 }, 2030 .setkey = aead_setkey, 2031 .setauthsize = aead_setauthsize, 2032 .encrypt = aead_encrypt, 2033 .decrypt = aead_decrypt, 2034 .ivsize = NULL_IV_SIZE, 2035 .maxauthsize = MD5_DIGEST_SIZE, 2036 }, 2037 .caam = { 2038 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2039 OP_ALG_AAI_HMAC_PRECOMP, 2040 }, 2041 }, 2042 { 2043 .aead = { 2044 .base = { 2045 .cra_name = "authenc(hmac(sha1)," 2046 "ecb(cipher_null))", 2047 .cra_driver_name = "authenc-hmac-sha1-" 2048 "ecb-cipher_null-caam", 2049 .cra_blocksize = NULL_BLOCK_SIZE, 2050 }, 2051 .setkey = aead_setkey, 2052 .setauthsize = aead_setauthsize, 2053 .encrypt = aead_encrypt, 2054 .decrypt = aead_decrypt, 2055 .ivsize = NULL_IV_SIZE, 2056 .maxauthsize = SHA1_DIGEST_SIZE, 2057 }, 2058 .caam = { 2059 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2060 OP_ALG_AAI_HMAC_PRECOMP, 2061 }, 2062 }, 2063 { 2064 .aead = { 2065 .base = { 2066 .cra_name = "authenc(hmac(sha224)," 2067 "ecb(cipher_null))", 2068 .cra_driver_name = "authenc-hmac-sha224-" 2069 "ecb-cipher_null-caam", 2070 .cra_blocksize = NULL_BLOCK_SIZE, 2071 }, 2072 .setkey = aead_setkey, 2073 .setauthsize = aead_setauthsize, 2074 .encrypt = aead_encrypt, 2075 .decrypt = aead_decrypt, 2076 .ivsize = NULL_IV_SIZE, 2077 .maxauthsize = SHA224_DIGEST_SIZE, 2078 }, 2079 .caam = { 2080 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2081 OP_ALG_AAI_HMAC_PRECOMP, 2082 }, 2083 }, 2084 { 2085 .aead = { 2086 .base = { 2087 .cra_name = "authenc(hmac(sha256)," 2088 "ecb(cipher_null))", 2089 .cra_driver_name = "authenc-hmac-sha256-" 2090 "ecb-cipher_null-caam", 2091 .cra_blocksize = NULL_BLOCK_SIZE, 2092 }, 2093 .setkey = aead_setkey, 2094 .setauthsize = aead_setauthsize, 2095 .encrypt = aead_encrypt, 2096 .decrypt = aead_decrypt, 2097 .ivsize = NULL_IV_SIZE, 2098 .maxauthsize = SHA256_DIGEST_SIZE, 2099 }, 2100 .caam = { 2101 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2102 OP_ALG_AAI_HMAC_PRECOMP, 2103 }, 2104 }, 2105 { 2106 .aead = { 2107 .base = { 2108 .cra_name = "authenc(hmac(sha384)," 2109 "ecb(cipher_null))", 2110 .cra_driver_name = "authenc-hmac-sha384-" 2111 "ecb-cipher_null-caam", 2112 .cra_blocksize = NULL_BLOCK_SIZE, 2113 }, 2114 .setkey = aead_setkey, 2115 .setauthsize = aead_setauthsize, 2116 .encrypt = aead_encrypt, 2117 .decrypt = aead_decrypt, 2118 .ivsize = NULL_IV_SIZE, 2119 .maxauthsize = SHA384_DIGEST_SIZE, 2120 }, 2121 .caam = { 2122 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2123 OP_ALG_AAI_HMAC_PRECOMP, 2124 }, 2125 }, 2126 { 2127 .aead = { 2128 .base = { 2129 .cra_name = "authenc(hmac(sha512)," 2130 "ecb(cipher_null))", 2131 .cra_driver_name = "authenc-hmac-sha512-" 2132 "ecb-cipher_null-caam", 2133 .cra_blocksize = NULL_BLOCK_SIZE, 2134 }, 2135 .setkey = aead_setkey, 2136 .setauthsize = aead_setauthsize, 2137 .encrypt = aead_encrypt, 2138 .decrypt = aead_decrypt, 2139 .ivsize = NULL_IV_SIZE, 2140 .maxauthsize = SHA512_DIGEST_SIZE, 2141 }, 2142 .caam = { 2143 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2144 OP_ALG_AAI_HMAC_PRECOMP, 2145 }, 2146 }, 2147 { 2148 .aead = { 2149 .base = { 2150 .cra_name = "authenc(hmac(md5),cbc(aes))", 2151 .cra_driver_name = "authenc-hmac-md5-" 2152 "cbc-aes-caam", 2153 .cra_blocksize = AES_BLOCK_SIZE, 2154 }, 2155 .setkey = aead_setkey, 2156 .setauthsize = aead_setauthsize, 2157 .encrypt = aead_encrypt, 2158 .decrypt = aead_decrypt, 2159 .ivsize = AES_BLOCK_SIZE, 2160 .maxauthsize = MD5_DIGEST_SIZE, 2161 }, 2162 .caam = { 2163 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2164 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2165 OP_ALG_AAI_HMAC_PRECOMP, 2166 }, 2167 }, 2168 { 2169 .aead = { 2170 .base = { 2171 .cra_name = "echainiv(authenc(hmac(md5)," 2172 "cbc(aes)))", 2173 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2174 "cbc-aes-caam", 2175 .cra_blocksize = AES_BLOCK_SIZE, 2176 }, 2177 .setkey = aead_setkey, 2178 .setauthsize = aead_setauthsize, 2179 .encrypt = aead_encrypt, 2180 .decrypt = aead_decrypt, 2181 .ivsize = AES_BLOCK_SIZE, 2182 .maxauthsize = MD5_DIGEST_SIZE, 2183 }, 2184 .caam = { 2185 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2186 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2187 OP_ALG_AAI_HMAC_PRECOMP, 2188 .geniv = true, 2189 }, 2190 }, 2191 { 2192 .aead = { 2193 .base = { 2194 .cra_name = "authenc(hmac(sha1),cbc(aes))", 2195 .cra_driver_name = "authenc-hmac-sha1-" 2196 "cbc-aes-caam", 2197 .cra_blocksize = AES_BLOCK_SIZE, 2198 }, 2199 .setkey = aead_setkey, 2200 .setauthsize = aead_setauthsize, 2201 .encrypt = aead_encrypt, 2202 .decrypt = aead_decrypt, 2203 .ivsize = AES_BLOCK_SIZE, 2204 .maxauthsize = SHA1_DIGEST_SIZE, 2205 }, 2206 .caam = { 2207 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2208 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2209 OP_ALG_AAI_HMAC_PRECOMP, 2210 }, 2211 }, 2212 { 2213 .aead = { 2214 .base = { 2215 .cra_name = "echainiv(authenc(hmac(sha1)," 2216 "cbc(aes)))", 2217 .cra_driver_name = "echainiv-authenc-" 2218 "hmac-sha1-cbc-aes-caam", 2219 .cra_blocksize = AES_BLOCK_SIZE, 2220 }, 2221 .setkey = aead_setkey, 2222 .setauthsize = aead_setauthsize, 2223 .encrypt = aead_encrypt, 2224 .decrypt = aead_decrypt, 2225 .ivsize = AES_BLOCK_SIZE, 2226 .maxauthsize = SHA1_DIGEST_SIZE, 2227 }, 2228 .caam = { 2229 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2230 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2231 OP_ALG_AAI_HMAC_PRECOMP, 2232 .geniv = true, 2233 }, 2234 }, 2235 { 2236 .aead = { 2237 .base = { 2238 .cra_name = "authenc(hmac(sha224),cbc(aes))", 2239 .cra_driver_name = "authenc-hmac-sha224-" 2240 "cbc-aes-caam", 2241 .cra_blocksize = AES_BLOCK_SIZE, 2242 }, 2243 .setkey = aead_setkey, 2244 .setauthsize = aead_setauthsize, 2245 .encrypt = aead_encrypt, 2246 .decrypt = aead_decrypt, 2247 .ivsize = AES_BLOCK_SIZE, 2248 .maxauthsize = SHA224_DIGEST_SIZE, 2249 }, 2250 .caam = { 2251 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2252 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2253 OP_ALG_AAI_HMAC_PRECOMP, 2254 }, 2255 }, 2256 { 2257 .aead = { 2258 .base = { 2259 .cra_name = "echainiv(authenc(hmac(sha224)," 2260 "cbc(aes)))", 2261 .cra_driver_name = "echainiv-authenc-" 2262 "hmac-sha224-cbc-aes-caam", 2263 .cra_blocksize = AES_BLOCK_SIZE, 2264 }, 2265 .setkey = aead_setkey, 2266 .setauthsize = aead_setauthsize, 2267 .encrypt = aead_encrypt, 2268 .decrypt = aead_decrypt, 2269 .ivsize = AES_BLOCK_SIZE, 2270 .maxauthsize = SHA224_DIGEST_SIZE, 2271 }, 2272 .caam = { 2273 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2274 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2275 OP_ALG_AAI_HMAC_PRECOMP, 2276 .geniv = true, 2277 }, 2278 }, 2279 { 2280 .aead = { 2281 .base = { 2282 .cra_name = "authenc(hmac(sha256),cbc(aes))", 2283 .cra_driver_name = "authenc-hmac-sha256-" 2284 "cbc-aes-caam", 2285 .cra_blocksize = AES_BLOCK_SIZE, 2286 }, 2287 .setkey = aead_setkey, 2288 .setauthsize = aead_setauthsize, 2289 .encrypt = aead_encrypt, 2290 .decrypt = aead_decrypt, 2291 .ivsize = AES_BLOCK_SIZE, 2292 .maxauthsize = SHA256_DIGEST_SIZE, 2293 }, 2294 .caam = { 2295 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2296 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2297 OP_ALG_AAI_HMAC_PRECOMP, 2298 }, 2299 }, 2300 { 2301 .aead = { 2302 .base = { 2303 .cra_name = "echainiv(authenc(hmac(sha256)," 2304 "cbc(aes)))", 2305 .cra_driver_name = "echainiv-authenc-" 2306 "hmac-sha256-cbc-aes-caam", 2307 .cra_blocksize = AES_BLOCK_SIZE, 2308 }, 2309 .setkey = aead_setkey, 2310 .setauthsize = aead_setauthsize, 2311 .encrypt = aead_encrypt, 2312 .decrypt = aead_decrypt, 2313 .ivsize = AES_BLOCK_SIZE, 2314 .maxauthsize = SHA256_DIGEST_SIZE, 2315 }, 2316 .caam = { 2317 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2318 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2319 OP_ALG_AAI_HMAC_PRECOMP, 2320 .geniv = true, 2321 }, 2322 }, 2323 { 2324 .aead = { 2325 .base = { 2326 .cra_name = "authenc(hmac(sha384),cbc(aes))", 2327 .cra_driver_name = "authenc-hmac-sha384-" 2328 "cbc-aes-caam", 2329 .cra_blocksize = AES_BLOCK_SIZE, 2330 }, 2331 .setkey = aead_setkey, 2332 .setauthsize = aead_setauthsize, 2333 .encrypt = aead_encrypt, 2334 .decrypt = aead_decrypt, 2335 .ivsize = AES_BLOCK_SIZE, 2336 .maxauthsize = SHA384_DIGEST_SIZE, 2337 }, 2338 .caam = { 2339 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2340 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2341 OP_ALG_AAI_HMAC_PRECOMP, 2342 }, 2343 }, 2344 { 2345 .aead = { 2346 .base = { 2347 .cra_name = "echainiv(authenc(hmac(sha384)," 2348 "cbc(aes)))", 2349 .cra_driver_name = "echainiv-authenc-" 2350 "hmac-sha384-cbc-aes-caam", 2351 .cra_blocksize = AES_BLOCK_SIZE, 2352 }, 2353 .setkey = aead_setkey, 2354 .setauthsize = aead_setauthsize, 2355 .encrypt = aead_encrypt, 2356 .decrypt = aead_decrypt, 2357 .ivsize = AES_BLOCK_SIZE, 2358 .maxauthsize = SHA384_DIGEST_SIZE, 2359 }, 2360 .caam = { 2361 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2362 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2363 OP_ALG_AAI_HMAC_PRECOMP, 2364 .geniv = true, 2365 }, 2366 }, 2367 { 2368 .aead = { 2369 .base = { 2370 .cra_name = "authenc(hmac(sha512),cbc(aes))", 2371 .cra_driver_name = "authenc-hmac-sha512-" 2372 "cbc-aes-caam", 2373 .cra_blocksize = AES_BLOCK_SIZE, 2374 }, 2375 .setkey = aead_setkey, 2376 .setauthsize = aead_setauthsize, 2377 .encrypt = aead_encrypt, 2378 .decrypt = aead_decrypt, 2379 .ivsize = AES_BLOCK_SIZE, 2380 .maxauthsize = SHA512_DIGEST_SIZE, 2381 }, 2382 .caam = { 2383 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2384 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2385 OP_ALG_AAI_HMAC_PRECOMP, 2386 }, 2387 }, 2388 { 2389 .aead = { 2390 .base = { 2391 .cra_name = "echainiv(authenc(hmac(sha512)," 2392 "cbc(aes)))", 2393 .cra_driver_name = "echainiv-authenc-" 2394 "hmac-sha512-cbc-aes-caam", 2395 .cra_blocksize = AES_BLOCK_SIZE, 2396 }, 2397 .setkey = aead_setkey, 2398 .setauthsize = aead_setauthsize, 2399 .encrypt = aead_encrypt, 2400 .decrypt = aead_decrypt, 2401 .ivsize = AES_BLOCK_SIZE, 2402 .maxauthsize = SHA512_DIGEST_SIZE, 2403 }, 2404 .caam = { 2405 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2406 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2407 OP_ALG_AAI_HMAC_PRECOMP, 2408 .geniv = true, 2409 }, 2410 }, 2411 { 2412 .aead = { 2413 .base = { 2414 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 2415 .cra_driver_name = "authenc-hmac-md5-" 2416 "cbc-des3_ede-caam", 2417 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2418 }, 2419 .setkey = aead_setkey, 2420 .setauthsize = aead_setauthsize, 2421 .encrypt = aead_encrypt, 2422 .decrypt = aead_decrypt, 2423 .ivsize = DES3_EDE_BLOCK_SIZE, 2424 .maxauthsize = MD5_DIGEST_SIZE, 2425 }, 2426 .caam = { 2427 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2428 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2429 OP_ALG_AAI_HMAC_PRECOMP, 2430 } 2431 }, 2432 { 2433 .aead = { 2434 .base = { 2435 .cra_name = "echainiv(authenc(hmac(md5)," 2436 "cbc(des3_ede)))", 2437 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2438 "cbc-des3_ede-caam", 2439 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2440 }, 2441 .setkey = aead_setkey, 2442 .setauthsize = aead_setauthsize, 2443 .encrypt = aead_encrypt, 2444 .decrypt = aead_decrypt, 2445 .ivsize = DES3_EDE_BLOCK_SIZE, 2446 .maxauthsize = MD5_DIGEST_SIZE, 2447 }, 2448 .caam = { 2449 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2450 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2451 OP_ALG_AAI_HMAC_PRECOMP, 2452 .geniv = true, 2453 } 2454 }, 2455 { 2456 .aead = { 2457 .base = { 2458 .cra_name = "authenc(hmac(sha1)," 2459 "cbc(des3_ede))", 2460 .cra_driver_name = "authenc-hmac-sha1-" 2461 "cbc-des3_ede-caam", 2462 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2463 }, 2464 .setkey = aead_setkey, 2465 .setauthsize = aead_setauthsize, 2466 .encrypt = aead_encrypt, 2467 .decrypt = aead_decrypt, 2468 .ivsize = DES3_EDE_BLOCK_SIZE, 2469 .maxauthsize = SHA1_DIGEST_SIZE, 2470 }, 2471 .caam = { 2472 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2473 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2474 OP_ALG_AAI_HMAC_PRECOMP, 2475 }, 2476 }, 2477 { 2478 .aead = { 2479 .base = { 2480 .cra_name = "echainiv(authenc(hmac(sha1)," 2481 "cbc(des3_ede)))", 2482 .cra_driver_name = "echainiv-authenc-" 2483 "hmac-sha1-" 2484 "cbc-des3_ede-caam", 2485 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2486 }, 2487 .setkey = aead_setkey, 2488 .setauthsize = aead_setauthsize, 2489 .encrypt = aead_encrypt, 2490 .decrypt = aead_decrypt, 2491 .ivsize = DES3_EDE_BLOCK_SIZE, 2492 .maxauthsize = SHA1_DIGEST_SIZE, 2493 }, 2494 .caam = { 2495 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2496 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2497 OP_ALG_AAI_HMAC_PRECOMP, 2498 .geniv = true, 2499 }, 2500 }, 2501 { 2502 .aead = { 2503 .base = { 2504 .cra_name = "authenc(hmac(sha224)," 2505 "cbc(des3_ede))", 2506 .cra_driver_name = "authenc-hmac-sha224-" 2507 "cbc-des3_ede-caam", 2508 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2509 }, 2510 .setkey = aead_setkey, 2511 .setauthsize = aead_setauthsize, 2512 .encrypt = aead_encrypt, 2513 .decrypt = aead_decrypt, 2514 .ivsize = DES3_EDE_BLOCK_SIZE, 2515 .maxauthsize = SHA224_DIGEST_SIZE, 2516 }, 2517 .caam = { 2518 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2519 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2520 OP_ALG_AAI_HMAC_PRECOMP, 2521 }, 2522 }, 2523 { 2524 .aead = { 2525 .base = { 2526 .cra_name = "echainiv(authenc(hmac(sha224)," 2527 "cbc(des3_ede)))", 2528 .cra_driver_name = "echainiv-authenc-" 2529 "hmac-sha224-" 2530 "cbc-des3_ede-caam", 2531 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2532 }, 2533 .setkey = aead_setkey, 2534 .setauthsize = aead_setauthsize, 2535 .encrypt = aead_encrypt, 2536 .decrypt = aead_decrypt, 2537 .ivsize = DES3_EDE_BLOCK_SIZE, 2538 .maxauthsize = SHA224_DIGEST_SIZE, 2539 }, 2540 .caam = { 2541 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2542 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2543 OP_ALG_AAI_HMAC_PRECOMP, 2544 .geniv = true, 2545 }, 2546 }, 2547 { 2548 .aead = { 2549 .base = { 2550 .cra_name = "authenc(hmac(sha256)," 2551 "cbc(des3_ede))", 2552 .cra_driver_name = "authenc-hmac-sha256-" 2553 "cbc-des3_ede-caam", 2554 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2555 }, 2556 .setkey = aead_setkey, 2557 .setauthsize = aead_setauthsize, 2558 .encrypt = aead_encrypt, 2559 .decrypt = aead_decrypt, 2560 .ivsize = DES3_EDE_BLOCK_SIZE, 2561 .maxauthsize = SHA256_DIGEST_SIZE, 2562 }, 2563 .caam = { 2564 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2565 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2566 OP_ALG_AAI_HMAC_PRECOMP, 2567 }, 2568 }, 2569 { 2570 .aead = { 2571 .base = { 2572 .cra_name = "echainiv(authenc(hmac(sha256)," 2573 "cbc(des3_ede)))", 2574 .cra_driver_name = "echainiv-authenc-" 2575 "hmac-sha256-" 2576 "cbc-des3_ede-caam", 2577 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2578 }, 2579 .setkey = aead_setkey, 2580 .setauthsize = aead_setauthsize, 2581 .encrypt = aead_encrypt, 2582 .decrypt = aead_decrypt, 2583 .ivsize = DES3_EDE_BLOCK_SIZE, 2584 .maxauthsize = SHA256_DIGEST_SIZE, 2585 }, 2586 .caam = { 2587 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2588 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2589 OP_ALG_AAI_HMAC_PRECOMP, 2590 .geniv = true, 2591 }, 2592 }, 2593 { 2594 .aead = { 2595 .base = { 2596 .cra_name = "authenc(hmac(sha384)," 2597 "cbc(des3_ede))", 2598 .cra_driver_name = "authenc-hmac-sha384-" 2599 "cbc-des3_ede-caam", 2600 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2601 }, 2602 .setkey = aead_setkey, 2603 .setauthsize = aead_setauthsize, 2604 .encrypt = aead_encrypt, 2605 .decrypt = aead_decrypt, 2606 .ivsize = DES3_EDE_BLOCK_SIZE, 2607 .maxauthsize = SHA384_DIGEST_SIZE, 2608 }, 2609 .caam = { 2610 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2611 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2612 OP_ALG_AAI_HMAC_PRECOMP, 2613 }, 2614 }, 2615 { 2616 .aead = { 2617 .base = { 2618 .cra_name = "echainiv(authenc(hmac(sha384)," 2619 "cbc(des3_ede)))", 2620 .cra_driver_name = "echainiv-authenc-" 2621 "hmac-sha384-" 2622 "cbc-des3_ede-caam", 2623 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2624 }, 2625 .setkey = aead_setkey, 2626 .setauthsize = aead_setauthsize, 2627 .encrypt = aead_encrypt, 2628 .decrypt = aead_decrypt, 2629 .ivsize = DES3_EDE_BLOCK_SIZE, 2630 .maxauthsize = SHA384_DIGEST_SIZE, 2631 }, 2632 .caam = { 2633 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2634 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2635 OP_ALG_AAI_HMAC_PRECOMP, 2636 .geniv = true, 2637 }, 2638 }, 2639 { 2640 .aead = { 2641 .base = { 2642 .cra_name = "authenc(hmac(sha512)," 2643 "cbc(des3_ede))", 2644 .cra_driver_name = "authenc-hmac-sha512-" 2645 "cbc-des3_ede-caam", 2646 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2647 }, 2648 .setkey = aead_setkey, 2649 .setauthsize = aead_setauthsize, 2650 .encrypt = aead_encrypt, 2651 .decrypt = aead_decrypt, 2652 .ivsize = DES3_EDE_BLOCK_SIZE, 2653 .maxauthsize = SHA512_DIGEST_SIZE, 2654 }, 2655 .caam = { 2656 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2657 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2658 OP_ALG_AAI_HMAC_PRECOMP, 2659 }, 2660 }, 2661 { 2662 .aead = { 2663 .base = { 2664 .cra_name = "echainiv(authenc(hmac(sha512)," 2665 "cbc(des3_ede)))", 2666 .cra_driver_name = "echainiv-authenc-" 2667 "hmac-sha512-" 2668 "cbc-des3_ede-caam", 2669 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2670 }, 2671 .setkey = aead_setkey, 2672 .setauthsize = aead_setauthsize, 2673 .encrypt = aead_encrypt, 2674 .decrypt = aead_decrypt, 2675 .ivsize = DES3_EDE_BLOCK_SIZE, 2676 .maxauthsize = SHA512_DIGEST_SIZE, 2677 }, 2678 .caam = { 2679 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2680 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2681 OP_ALG_AAI_HMAC_PRECOMP, 2682 .geniv = true, 2683 }, 2684 }, 2685 { 2686 .aead = { 2687 .base = { 2688 .cra_name = "authenc(hmac(md5),cbc(des))", 2689 .cra_driver_name = "authenc-hmac-md5-" 2690 "cbc-des-caam", 2691 .cra_blocksize = DES_BLOCK_SIZE, 2692 }, 2693 .setkey = aead_setkey, 2694 .setauthsize = aead_setauthsize, 2695 .encrypt = aead_encrypt, 2696 .decrypt = aead_decrypt, 2697 .ivsize = DES_BLOCK_SIZE, 2698 .maxauthsize = MD5_DIGEST_SIZE, 2699 }, 2700 .caam = { 2701 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2702 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2703 OP_ALG_AAI_HMAC_PRECOMP, 2704 }, 2705 }, 2706 { 2707 .aead = { 2708 .base = { 2709 .cra_name = "echainiv(authenc(hmac(md5)," 2710 "cbc(des)))", 2711 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2712 "cbc-des-caam", 2713 .cra_blocksize = DES_BLOCK_SIZE, 2714 }, 2715 .setkey = aead_setkey, 2716 .setauthsize = aead_setauthsize, 2717 .encrypt = aead_encrypt, 2718 .decrypt = aead_decrypt, 2719 .ivsize = DES_BLOCK_SIZE, 2720 .maxauthsize = MD5_DIGEST_SIZE, 2721 }, 2722 .caam = { 2723 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2724 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2725 OP_ALG_AAI_HMAC_PRECOMP, 2726 .geniv = true, 2727 }, 2728 }, 2729 { 2730 .aead = { 2731 .base = { 2732 .cra_name = "authenc(hmac(sha1),cbc(des))", 2733 .cra_driver_name = "authenc-hmac-sha1-" 2734 "cbc-des-caam", 2735 .cra_blocksize = DES_BLOCK_SIZE, 2736 }, 2737 .setkey = aead_setkey, 2738 .setauthsize = aead_setauthsize, 2739 .encrypt = aead_encrypt, 2740 .decrypt = aead_decrypt, 2741 .ivsize = DES_BLOCK_SIZE, 2742 .maxauthsize = SHA1_DIGEST_SIZE, 2743 }, 2744 .caam = { 2745 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2746 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2747 OP_ALG_AAI_HMAC_PRECOMP, 2748 }, 2749 }, 2750 { 2751 .aead = { 2752 .base = { 2753 .cra_name = "echainiv(authenc(hmac(sha1)," 2754 "cbc(des)))", 2755 .cra_driver_name = "echainiv-authenc-" 2756 "hmac-sha1-cbc-des-caam", 2757 .cra_blocksize = DES_BLOCK_SIZE, 2758 }, 2759 .setkey = aead_setkey, 2760 .setauthsize = aead_setauthsize, 2761 .encrypt = aead_encrypt, 2762 .decrypt = aead_decrypt, 2763 .ivsize = DES_BLOCK_SIZE, 2764 .maxauthsize = SHA1_DIGEST_SIZE, 2765 }, 2766 .caam = { 2767 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2768 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2769 OP_ALG_AAI_HMAC_PRECOMP, 2770 .geniv = true, 2771 }, 2772 }, 2773 { 2774 .aead = { 2775 .base = { 2776 .cra_name = "authenc(hmac(sha224),cbc(des))", 2777 .cra_driver_name = "authenc-hmac-sha224-" 2778 "cbc-des-caam", 2779 .cra_blocksize = DES_BLOCK_SIZE, 2780 }, 2781 .setkey = aead_setkey, 2782 .setauthsize = aead_setauthsize, 2783 .encrypt = aead_encrypt, 2784 .decrypt = aead_decrypt, 2785 .ivsize = DES_BLOCK_SIZE, 2786 .maxauthsize = SHA224_DIGEST_SIZE, 2787 }, 2788 .caam = { 2789 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2790 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2791 OP_ALG_AAI_HMAC_PRECOMP, 2792 }, 2793 }, 2794 { 2795 .aead = { 2796 .base = { 2797 .cra_name = "echainiv(authenc(hmac(sha224)," 2798 "cbc(des)))", 2799 .cra_driver_name = "echainiv-authenc-" 2800 "hmac-sha224-cbc-des-caam", 2801 .cra_blocksize = DES_BLOCK_SIZE, 2802 }, 2803 .setkey = aead_setkey, 2804 .setauthsize = aead_setauthsize, 2805 .encrypt = aead_encrypt, 2806 .decrypt = aead_decrypt, 2807 .ivsize = DES_BLOCK_SIZE, 2808 .maxauthsize = SHA224_DIGEST_SIZE, 2809 }, 2810 .caam = { 2811 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2812 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2813 OP_ALG_AAI_HMAC_PRECOMP, 2814 .geniv = true, 2815 }, 2816 }, 2817 { 2818 .aead = { 2819 .base = { 2820 .cra_name = "authenc(hmac(sha256),cbc(des))", 2821 .cra_driver_name = "authenc-hmac-sha256-" 2822 "cbc-des-caam", 2823 .cra_blocksize = DES_BLOCK_SIZE, 2824 }, 2825 .setkey = aead_setkey, 2826 .setauthsize = aead_setauthsize, 2827 .encrypt = aead_encrypt, 2828 .decrypt = aead_decrypt, 2829 .ivsize = DES_BLOCK_SIZE, 2830 .maxauthsize = SHA256_DIGEST_SIZE, 2831 }, 2832 .caam = { 2833 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2834 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2835 OP_ALG_AAI_HMAC_PRECOMP, 2836 }, 2837 }, 2838 { 2839 .aead = { 2840 .base = { 2841 .cra_name = "echainiv(authenc(hmac(sha256)," 2842 "cbc(des)))", 2843 .cra_driver_name = "echainiv-authenc-" 2844 "hmac-sha256-cbc-des-caam", 2845 .cra_blocksize = DES_BLOCK_SIZE, 2846 }, 2847 .setkey = aead_setkey, 2848 .setauthsize = aead_setauthsize, 2849 .encrypt = aead_encrypt, 2850 .decrypt = aead_decrypt, 2851 .ivsize = DES_BLOCK_SIZE, 2852 .maxauthsize = SHA256_DIGEST_SIZE, 2853 }, 2854 .caam = { 2855 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2856 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2857 OP_ALG_AAI_HMAC_PRECOMP, 2858 .geniv = true, 2859 }, 2860 }, 2861 { 2862 .aead = { 2863 .base = { 2864 .cra_name = "authenc(hmac(sha384),cbc(des))", 2865 .cra_driver_name = "authenc-hmac-sha384-" 2866 "cbc-des-caam", 2867 .cra_blocksize = DES_BLOCK_SIZE, 2868 }, 2869 .setkey = aead_setkey, 2870 .setauthsize = aead_setauthsize, 2871 .encrypt = aead_encrypt, 2872 .decrypt = aead_decrypt, 2873 .ivsize = DES_BLOCK_SIZE, 2874 .maxauthsize = SHA384_DIGEST_SIZE, 2875 }, 2876 .caam = { 2877 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2878 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2879 OP_ALG_AAI_HMAC_PRECOMP, 2880 }, 2881 }, 2882 { 2883 .aead = { 2884 .base = { 2885 .cra_name = "echainiv(authenc(hmac(sha384)," 2886 "cbc(des)))", 2887 .cra_driver_name = "echainiv-authenc-" 2888 "hmac-sha384-cbc-des-caam", 2889 .cra_blocksize = DES_BLOCK_SIZE, 2890 }, 2891 .setkey = aead_setkey, 2892 .setauthsize = aead_setauthsize, 2893 .encrypt = aead_encrypt, 2894 .decrypt = aead_decrypt, 2895 .ivsize = DES_BLOCK_SIZE, 2896 .maxauthsize = SHA384_DIGEST_SIZE, 2897 }, 2898 .caam = { 2899 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2900 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2901 OP_ALG_AAI_HMAC_PRECOMP, 2902 .geniv = true, 2903 }, 2904 }, 2905 { 2906 .aead = { 2907 .base = { 2908 .cra_name = "authenc(hmac(sha512),cbc(des))", 2909 .cra_driver_name = "authenc-hmac-sha512-" 2910 "cbc-des-caam", 2911 .cra_blocksize = DES_BLOCK_SIZE, 2912 }, 2913 .setkey = aead_setkey, 2914 .setauthsize = aead_setauthsize, 2915 .encrypt = aead_encrypt, 2916 .decrypt = aead_decrypt, 2917 .ivsize = DES_BLOCK_SIZE, 2918 .maxauthsize = SHA512_DIGEST_SIZE, 2919 }, 2920 .caam = { 2921 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2922 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2923 OP_ALG_AAI_HMAC_PRECOMP, 2924 }, 2925 }, 2926 { 2927 .aead = { 2928 .base = { 2929 .cra_name = "echainiv(authenc(hmac(sha512)," 2930 "cbc(des)))", 2931 .cra_driver_name = "echainiv-authenc-" 2932 "hmac-sha512-cbc-des-caam", 2933 .cra_blocksize = DES_BLOCK_SIZE, 2934 }, 2935 .setkey = aead_setkey, 2936 .setauthsize = aead_setauthsize, 2937 .encrypt = aead_encrypt, 2938 .decrypt = aead_decrypt, 2939 .ivsize = DES_BLOCK_SIZE, 2940 .maxauthsize = SHA512_DIGEST_SIZE, 2941 }, 2942 .caam = { 2943 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2944 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2945 OP_ALG_AAI_HMAC_PRECOMP, 2946 .geniv = true, 2947 }, 2948 }, 2949 { 2950 .aead = { 2951 .base = { 2952 .cra_name = "authenc(hmac(md5)," 2953 "rfc3686(ctr(aes)))", 2954 .cra_driver_name = "authenc-hmac-md5-" 2955 "rfc3686-ctr-aes-caam", 2956 .cra_blocksize = 1, 2957 }, 2958 .setkey = aead_setkey, 2959 .setauthsize = aead_setauthsize, 2960 .encrypt = aead_encrypt, 2961 .decrypt = aead_decrypt, 2962 .ivsize = CTR_RFC3686_IV_SIZE, 2963 .maxauthsize = MD5_DIGEST_SIZE, 2964 }, 2965 .caam = { 2966 .class1_alg_type = OP_ALG_ALGSEL_AES | 2967 OP_ALG_AAI_CTR_MOD128, 2968 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2969 OP_ALG_AAI_HMAC_PRECOMP, 2970 .rfc3686 = true, 2971 }, 2972 }, 2973 { 2974 .aead = { 2975 .base = { 2976 .cra_name = "seqiv(authenc(" 2977 "hmac(md5),rfc3686(ctr(aes))))", 2978 .cra_driver_name = "seqiv-authenc-hmac-md5-" 2979 "rfc3686-ctr-aes-caam", 2980 .cra_blocksize = 1, 2981 }, 2982 .setkey = aead_setkey, 2983 .setauthsize = aead_setauthsize, 2984 .encrypt = aead_encrypt, 2985 .decrypt = aead_decrypt, 2986 .ivsize = CTR_RFC3686_IV_SIZE, 2987 .maxauthsize = MD5_DIGEST_SIZE, 2988 }, 2989 .caam = { 2990 .class1_alg_type = OP_ALG_ALGSEL_AES | 2991 OP_ALG_AAI_CTR_MOD128, 2992 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2993 OP_ALG_AAI_HMAC_PRECOMP, 2994 .rfc3686 = true, 2995 .geniv = true, 2996 }, 2997 }, 2998 { 2999 .aead = { 3000 .base = { 3001 .cra_name = "authenc(hmac(sha1)," 3002 "rfc3686(ctr(aes)))", 3003 .cra_driver_name = "authenc-hmac-sha1-" 3004 "rfc3686-ctr-aes-caam", 3005 .cra_blocksize = 1, 3006 }, 3007 .setkey = aead_setkey, 3008 .setauthsize = aead_setauthsize, 3009 .encrypt = aead_encrypt, 3010 .decrypt = aead_decrypt, 3011 .ivsize = CTR_RFC3686_IV_SIZE, 3012 .maxauthsize = SHA1_DIGEST_SIZE, 3013 }, 3014 .caam = { 3015 .class1_alg_type = OP_ALG_ALGSEL_AES | 3016 OP_ALG_AAI_CTR_MOD128, 3017 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3018 OP_ALG_AAI_HMAC_PRECOMP, 3019 .rfc3686 = true, 3020 }, 3021 }, 3022 { 3023 .aead = { 3024 .base = { 3025 .cra_name = "seqiv(authenc(" 3026 "hmac(sha1),rfc3686(ctr(aes))))", 3027 .cra_driver_name = "seqiv-authenc-hmac-sha1-" 3028 "rfc3686-ctr-aes-caam", 3029 .cra_blocksize = 1, 3030 }, 3031 .setkey = aead_setkey, 3032 .setauthsize = aead_setauthsize, 3033 .encrypt = aead_encrypt, 3034 .decrypt = aead_decrypt, 3035 .ivsize = CTR_RFC3686_IV_SIZE, 3036 .maxauthsize = SHA1_DIGEST_SIZE, 3037 }, 3038 .caam = { 3039 .class1_alg_type = OP_ALG_ALGSEL_AES | 3040 OP_ALG_AAI_CTR_MOD128, 3041 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3042 OP_ALG_AAI_HMAC_PRECOMP, 3043 .rfc3686 = true, 3044 .geniv = true, 3045 }, 3046 }, 3047 { 3048 .aead = { 3049 .base = { 3050 .cra_name = "authenc(hmac(sha224)," 3051 "rfc3686(ctr(aes)))", 3052 .cra_driver_name = "authenc-hmac-sha224-" 3053 "rfc3686-ctr-aes-caam", 3054 .cra_blocksize = 1, 3055 }, 3056 .setkey = aead_setkey, 3057 .setauthsize = aead_setauthsize, 3058 .encrypt = aead_encrypt, 3059 .decrypt = aead_decrypt, 3060 .ivsize = CTR_RFC3686_IV_SIZE, 3061 .maxauthsize = SHA224_DIGEST_SIZE, 3062 }, 3063 .caam = { 3064 .class1_alg_type = OP_ALG_ALGSEL_AES | 3065 OP_ALG_AAI_CTR_MOD128, 3066 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3067 OP_ALG_AAI_HMAC_PRECOMP, 3068 .rfc3686 = true, 3069 }, 3070 }, 3071 { 3072 .aead = { 3073 .base = { 3074 .cra_name = "seqiv(authenc(" 3075 "hmac(sha224),rfc3686(ctr(aes))))", 3076 .cra_driver_name = "seqiv-authenc-hmac-sha224-" 3077 "rfc3686-ctr-aes-caam", 3078 .cra_blocksize = 1, 3079 }, 3080 .setkey = aead_setkey, 3081 .setauthsize = aead_setauthsize, 3082 .encrypt = aead_encrypt, 3083 .decrypt = aead_decrypt, 3084 .ivsize = CTR_RFC3686_IV_SIZE, 3085 .maxauthsize = SHA224_DIGEST_SIZE, 3086 }, 3087 .caam = { 3088 .class1_alg_type = OP_ALG_ALGSEL_AES | 3089 OP_ALG_AAI_CTR_MOD128, 3090 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3091 OP_ALG_AAI_HMAC_PRECOMP, 3092 .rfc3686 = true, 3093 .geniv = true, 3094 }, 3095 }, 3096 { 3097 .aead = { 3098 .base = { 3099 .cra_name = "authenc(hmac(sha256)," 3100 "rfc3686(ctr(aes)))", 3101 .cra_driver_name = "authenc-hmac-sha256-" 3102 "rfc3686-ctr-aes-caam", 3103 .cra_blocksize = 1, 3104 }, 3105 .setkey = aead_setkey, 3106 .setauthsize = aead_setauthsize, 3107 .encrypt = aead_encrypt, 3108 .decrypt = aead_decrypt, 3109 .ivsize = CTR_RFC3686_IV_SIZE, 3110 .maxauthsize = SHA256_DIGEST_SIZE, 3111 }, 3112 .caam = { 3113 .class1_alg_type = OP_ALG_ALGSEL_AES | 3114 OP_ALG_AAI_CTR_MOD128, 3115 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3116 OP_ALG_AAI_HMAC_PRECOMP, 3117 .rfc3686 = true, 3118 }, 3119 }, 3120 { 3121 .aead = { 3122 .base = { 3123 .cra_name = "seqiv(authenc(hmac(sha256)," 3124 "rfc3686(ctr(aes))))", 3125 .cra_driver_name = "seqiv-authenc-hmac-sha256-" 3126 "rfc3686-ctr-aes-caam", 3127 .cra_blocksize = 1, 3128 }, 3129 .setkey = aead_setkey, 3130 .setauthsize = aead_setauthsize, 3131 .encrypt = aead_encrypt, 3132 .decrypt = aead_decrypt, 3133 .ivsize = CTR_RFC3686_IV_SIZE, 3134 .maxauthsize = SHA256_DIGEST_SIZE, 3135 }, 3136 .caam = { 3137 .class1_alg_type = OP_ALG_ALGSEL_AES | 3138 OP_ALG_AAI_CTR_MOD128, 3139 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3140 OP_ALG_AAI_HMAC_PRECOMP, 3141 .rfc3686 = true, 3142 .geniv = true, 3143 }, 3144 }, 3145 { 3146 .aead = { 3147 .base = { 3148 .cra_name = "authenc(hmac(sha384)," 3149 "rfc3686(ctr(aes)))", 3150 .cra_driver_name = "authenc-hmac-sha384-" 3151 "rfc3686-ctr-aes-caam", 3152 .cra_blocksize = 1, 3153 }, 3154 .setkey = aead_setkey, 3155 .setauthsize = aead_setauthsize, 3156 .encrypt = aead_encrypt, 3157 .decrypt = aead_decrypt, 3158 .ivsize = CTR_RFC3686_IV_SIZE, 3159 .maxauthsize = SHA384_DIGEST_SIZE, 3160 }, 3161 .caam = { 3162 .class1_alg_type = OP_ALG_ALGSEL_AES | 3163 OP_ALG_AAI_CTR_MOD128, 3164 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3165 OP_ALG_AAI_HMAC_PRECOMP, 3166 .rfc3686 = true, 3167 }, 3168 }, 3169 { 3170 .aead = { 3171 .base = { 3172 .cra_name = "seqiv(authenc(hmac(sha384)," 3173 "rfc3686(ctr(aes))))", 3174 .cra_driver_name = "seqiv-authenc-hmac-sha384-" 3175 "rfc3686-ctr-aes-caam", 3176 .cra_blocksize = 1, 3177 }, 3178 .setkey = aead_setkey, 3179 .setauthsize = aead_setauthsize, 3180 .encrypt = aead_encrypt, 3181 .decrypt = aead_decrypt, 3182 .ivsize = CTR_RFC3686_IV_SIZE, 3183 .maxauthsize = SHA384_DIGEST_SIZE, 3184 }, 3185 .caam = { 3186 .class1_alg_type = OP_ALG_ALGSEL_AES | 3187 OP_ALG_AAI_CTR_MOD128, 3188 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3189 OP_ALG_AAI_HMAC_PRECOMP, 3190 .rfc3686 = true, 3191 .geniv = true, 3192 }, 3193 }, 3194 { 3195 .aead = { 3196 .base = { 3197 .cra_name = "authenc(hmac(sha512)," 3198 "rfc3686(ctr(aes)))", 3199 .cra_driver_name = "authenc-hmac-sha512-" 3200 "rfc3686-ctr-aes-caam", 3201 .cra_blocksize = 1, 3202 }, 3203 .setkey = aead_setkey, 3204 .setauthsize = aead_setauthsize, 3205 .encrypt = aead_encrypt, 3206 .decrypt = aead_decrypt, 3207 .ivsize = CTR_RFC3686_IV_SIZE, 3208 .maxauthsize = SHA512_DIGEST_SIZE, 3209 }, 3210 .caam = { 3211 .class1_alg_type = OP_ALG_ALGSEL_AES | 3212 OP_ALG_AAI_CTR_MOD128, 3213 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3214 OP_ALG_AAI_HMAC_PRECOMP, 3215 .rfc3686 = true, 3216 }, 3217 }, 3218 { 3219 .aead = { 3220 .base = { 3221 .cra_name = "seqiv(authenc(hmac(sha512)," 3222 "rfc3686(ctr(aes))))", 3223 .cra_driver_name = "seqiv-authenc-hmac-sha512-" 3224 "rfc3686-ctr-aes-caam", 3225 .cra_blocksize = 1, 3226 }, 3227 .setkey = aead_setkey, 3228 .setauthsize = aead_setauthsize, 3229 .encrypt = aead_encrypt, 3230 .decrypt = aead_decrypt, 3231 .ivsize = CTR_RFC3686_IV_SIZE, 3232 .maxauthsize = SHA512_DIGEST_SIZE, 3233 }, 3234 .caam = { 3235 .class1_alg_type = OP_ALG_ALGSEL_AES | 3236 OP_ALG_AAI_CTR_MOD128, 3237 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3238 OP_ALG_AAI_HMAC_PRECOMP, 3239 .rfc3686 = true, 3240 .geniv = true, 3241 }, 3242 }, 3243 }; 3244 3245 struct caam_crypto_alg { 3246 struct crypto_alg crypto_alg; 3247 struct list_head entry; 3248 struct caam_alg_entry caam; 3249 }; 3250 3251 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam) 3252 { 3253 dma_addr_t dma_addr; 3254 3255 ctx->jrdev = caam_jr_alloc(); 3256 if (IS_ERR(ctx->jrdev)) { 3257 pr_err("Job Ring Device allocation for transform failed\n"); 3258 return PTR_ERR(ctx->jrdev); 3259 } 3260 3261 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3262 offsetof(struct caam_ctx, 3263 sh_desc_enc_dma), 3264 DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC); 3265 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3266 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3267 caam_jr_free(ctx->jrdev); 3268 return -ENOMEM; 3269 } 3270 3271 ctx->sh_desc_enc_dma = dma_addr; 3272 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, 3273 sh_desc_dec); 3274 ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx, 3275 sh_desc_givenc); 3276 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key); 3277 3278 /* copy descriptor header template value */ 3279 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3280 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3281 3282 return 0; 3283 } 3284 3285 static int caam_cra_init(struct crypto_tfm *tfm) 3286 { 3287 struct crypto_alg *alg = tfm->__crt_alg; 3288 struct caam_crypto_alg *caam_alg = 3289 container_of(alg, struct caam_crypto_alg, crypto_alg); 3290 struct caam_ctx *ctx = crypto_tfm_ctx(tfm); 3291 3292 return caam_init_common(ctx, &caam_alg->caam); 3293 } 3294 3295 static int caam_aead_init(struct crypto_aead *tfm) 3296 { 3297 struct aead_alg *alg = crypto_aead_alg(tfm); 3298 struct caam_aead_alg *caam_alg = 3299 container_of(alg, struct caam_aead_alg, aead); 3300 struct caam_ctx *ctx = crypto_aead_ctx(tfm); 3301 3302 return caam_init_common(ctx, &caam_alg->caam); 3303 } 3304 3305 static void caam_exit_common(struct caam_ctx *ctx) 3306 { 3307 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, 3308 offsetof(struct caam_ctx, sh_desc_enc_dma), 3309 DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC); 3310 caam_jr_free(ctx->jrdev); 3311 } 3312 3313 static void caam_cra_exit(struct crypto_tfm *tfm) 3314 { 3315 caam_exit_common(crypto_tfm_ctx(tfm)); 3316 } 3317 3318 static void caam_aead_exit(struct crypto_aead *tfm) 3319 { 3320 caam_exit_common(crypto_aead_ctx(tfm)); 3321 } 3322 3323 static void __exit caam_algapi_exit(void) 3324 { 3325 3326 struct caam_crypto_alg *t_alg, *n; 3327 int i; 3328 3329 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3330 struct caam_aead_alg *t_alg = driver_aeads + i; 3331 3332 if (t_alg->registered) 3333 crypto_unregister_aead(&t_alg->aead); 3334 } 3335 3336 if (!alg_list.next) 3337 return; 3338 3339 list_for_each_entry_safe(t_alg, n, &alg_list, entry) { 3340 crypto_unregister_alg(&t_alg->crypto_alg); 3341 list_del(&t_alg->entry); 3342 kfree(t_alg); 3343 } 3344 } 3345 3346 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template 3347 *template) 3348 { 3349 struct caam_crypto_alg *t_alg; 3350 struct crypto_alg *alg; 3351 3352 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL); 3353 if (!t_alg) { 3354 pr_err("failed to allocate t_alg\n"); 3355 return ERR_PTR(-ENOMEM); 3356 } 3357 3358 alg = &t_alg->crypto_alg; 3359 3360 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name); 3361 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", 3362 template->driver_name); 3363 alg->cra_module = THIS_MODULE; 3364 alg->cra_init = caam_cra_init; 3365 alg->cra_exit = caam_cra_exit; 3366 alg->cra_priority = CAAM_CRA_PRIORITY; 3367 alg->cra_blocksize = template->blocksize; 3368 alg->cra_alignmask = 0; 3369 alg->cra_ctxsize = sizeof(struct caam_ctx); 3370 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY | 3371 template->type; 3372 switch (template->type) { 3373 case CRYPTO_ALG_TYPE_GIVCIPHER: 3374 alg->cra_type = &crypto_givcipher_type; 3375 alg->cra_ablkcipher = template->template_ablkcipher; 3376 break; 3377 case CRYPTO_ALG_TYPE_ABLKCIPHER: 3378 alg->cra_type = &crypto_ablkcipher_type; 3379 alg->cra_ablkcipher = template->template_ablkcipher; 3380 break; 3381 } 3382 3383 t_alg->caam.class1_alg_type = template->class1_alg_type; 3384 t_alg->caam.class2_alg_type = template->class2_alg_type; 3385 3386 return t_alg; 3387 } 3388 3389 static void caam_aead_alg_init(struct caam_aead_alg *t_alg) 3390 { 3391 struct aead_alg *alg = &t_alg->aead; 3392 3393 alg->base.cra_module = THIS_MODULE; 3394 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3395 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3396 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3397 3398 alg->init = caam_aead_init; 3399 alg->exit = caam_aead_exit; 3400 } 3401 3402 static int __init caam_algapi_init(void) 3403 { 3404 struct device_node *dev_node; 3405 struct platform_device *pdev; 3406 struct device *ctrldev; 3407 struct caam_drv_private *priv; 3408 int i = 0, err = 0; 3409 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst; 3410 unsigned int md_limit = SHA512_DIGEST_SIZE; 3411 bool registered = false; 3412 3413 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0"); 3414 if (!dev_node) { 3415 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0"); 3416 if (!dev_node) 3417 return -ENODEV; 3418 } 3419 3420 pdev = of_find_device_by_node(dev_node); 3421 if (!pdev) { 3422 of_node_put(dev_node); 3423 return -ENODEV; 3424 } 3425 3426 ctrldev = &pdev->dev; 3427 priv = dev_get_drvdata(ctrldev); 3428 of_node_put(dev_node); 3429 3430 /* 3431 * If priv is NULL, it's probably because the caam driver wasn't 3432 * properly initialized (e.g. RNG4 init failed). Thus, bail out here. 3433 */ 3434 if (!priv) 3435 return -ENODEV; 3436 3437 3438 INIT_LIST_HEAD(&alg_list); 3439 3440 /* 3441 * Register crypto algorithms the device supports. 3442 * First, detect presence and attributes of DES, AES, and MD blocks. 3443 */ 3444 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls); 3445 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls); 3446 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT; 3447 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT; 3448 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3449 3450 /* If MD is present, limit digest size based on LP256 */ 3451 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256)) 3452 md_limit = SHA256_DIGEST_SIZE; 3453 3454 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3455 struct caam_crypto_alg *t_alg; 3456 struct caam_alg_template *alg = driver_algs + i; 3457 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK; 3458 3459 /* Skip DES algorithms if not supported by device */ 3460 if (!des_inst && 3461 ((alg_sel == OP_ALG_ALGSEL_3DES) || 3462 (alg_sel == OP_ALG_ALGSEL_DES))) 3463 continue; 3464 3465 /* Skip AES algorithms if not supported by device */ 3466 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES)) 3467 continue; 3468 3469 /* 3470 * Check support for AES modes not available 3471 * on LP devices. 3472 */ 3473 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP) 3474 if ((alg->class1_alg_type & OP_ALG_AAI_MASK) == 3475 OP_ALG_AAI_XTS) 3476 continue; 3477 3478 t_alg = caam_alg_alloc(alg); 3479 if (IS_ERR(t_alg)) { 3480 err = PTR_ERR(t_alg); 3481 pr_warn("%s alg allocation failed\n", alg->driver_name); 3482 continue; 3483 } 3484 3485 err = crypto_register_alg(&t_alg->crypto_alg); 3486 if (err) { 3487 pr_warn("%s alg registration failed\n", 3488 t_alg->crypto_alg.cra_driver_name); 3489 kfree(t_alg); 3490 continue; 3491 } 3492 3493 list_add_tail(&t_alg->entry, &alg_list); 3494 registered = true; 3495 } 3496 3497 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3498 struct caam_aead_alg *t_alg = driver_aeads + i; 3499 u32 c1_alg_sel = t_alg->caam.class1_alg_type & 3500 OP_ALG_ALGSEL_MASK; 3501 u32 c2_alg_sel = t_alg->caam.class2_alg_type & 3502 OP_ALG_ALGSEL_MASK; 3503 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3504 3505 /* Skip DES algorithms if not supported by device */ 3506 if (!des_inst && 3507 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) || 3508 (c1_alg_sel == OP_ALG_ALGSEL_DES))) 3509 continue; 3510 3511 /* Skip AES algorithms if not supported by device */ 3512 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES)) 3513 continue; 3514 3515 /* 3516 * Check support for AES algorithms not available 3517 * on LP devices. 3518 */ 3519 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP) 3520 if (alg_aai == OP_ALG_AAI_GCM) 3521 continue; 3522 3523 /* 3524 * Skip algorithms requiring message digests 3525 * if MD or MD size is not supported by device. 3526 */ 3527 if (c2_alg_sel && 3528 (!md_inst || (t_alg->aead.maxauthsize > md_limit))) 3529 continue; 3530 3531 caam_aead_alg_init(t_alg); 3532 3533 err = crypto_register_aead(&t_alg->aead); 3534 if (err) { 3535 pr_warn("%s alg registration failed\n", 3536 t_alg->aead.base.cra_driver_name); 3537 continue; 3538 } 3539 3540 t_alg->registered = true; 3541 registered = true; 3542 } 3543 3544 if (registered) 3545 pr_info("caam algorithms registered in /proc/crypto\n"); 3546 3547 return err; 3548 } 3549 3550 module_init(caam_algapi_init); 3551 module_exit(caam_algapi_exit); 3552 3553 MODULE_LICENSE("GPL"); 3554 MODULE_DESCRIPTION("FSL CAAM support for crypto API"); 3555 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC"); 3556