1 /* 2 * caam - Freescale FSL CAAM support for crypto API 3 * 4 * Copyright 2008-2011 Freescale Semiconductor, Inc. 5 * Copyright 2016 NXP 6 * 7 * Based on talitos crypto API driver. 8 * 9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008): 10 * 11 * --------------- --------------- 12 * | JobDesc #1 |-------------------->| ShareDesc | 13 * | *(packet 1) | | (PDB) | 14 * --------------- |------------->| (hashKey) | 15 * . | | (cipherKey) | 16 * . | |-------->| (operation) | 17 * --------------- | | --------------- 18 * | JobDesc #2 |------| | 19 * | *(packet 2) | | 20 * --------------- | 21 * . | 22 * . | 23 * --------------- | 24 * | JobDesc #3 |------------ 25 * | *(packet 3) | 26 * --------------- 27 * 28 * The SharedDesc never changes for a connection unless rekeyed, but 29 * each packet will likely be in a different place. So all we need 30 * to know to process the packet is where the input is, where the 31 * output goes, and what context we want to process with. Context is 32 * in the SharedDesc, packet references in the JobDesc. 33 * 34 * So, a job desc looks like: 35 * 36 * --------------------- 37 * | Header | 38 * | ShareDesc Pointer | 39 * | SEQ_OUT_PTR | 40 * | (output buffer) | 41 * | (output length) | 42 * | SEQ_IN_PTR | 43 * | (input buffer) | 44 * | (input length) | 45 * --------------------- 46 */ 47 48 #include "compat.h" 49 50 #include "regs.h" 51 #include "intern.h" 52 #include "desc_constr.h" 53 #include "jr.h" 54 #include "error.h" 55 #include "sg_sw_sec4.h" 56 #include "key_gen.h" 57 #include "caamalg_desc.h" 58 59 /* 60 * crypto alg 61 */ 62 #define CAAM_CRA_PRIORITY 3000 63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 64 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 65 CTR_RFC3686_NONCE_SIZE + \ 66 SHA512_DIGEST_SIZE * 2) 67 68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2) 69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 70 CAAM_CMD_SZ * 4) 71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 72 CAAM_CMD_SZ * 5) 73 74 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN) 75 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ) 76 77 #ifdef DEBUG 78 /* for print_hex_dumps with line references */ 79 #define debug(format, arg...) printk(format, arg) 80 #else 81 #define debug(format, arg...) 82 #endif 83 84 #ifdef DEBUG 85 #include <linux/highmem.h> 86 87 static void dbg_dump_sg(const char *level, const char *prefix_str, 88 int prefix_type, int rowsize, int groupsize, 89 struct scatterlist *sg, size_t tlen, bool ascii) 90 { 91 struct scatterlist *it; 92 void *it_page; 93 size_t len; 94 void *buf; 95 96 for (it = sg; it != NULL && tlen > 0 ; it = sg_next(sg)) { 97 /* 98 * make sure the scatterlist's page 99 * has a valid virtual memory mapping 100 */ 101 it_page = kmap_atomic(sg_page(it)); 102 if (unlikely(!it_page)) { 103 printk(KERN_ERR "dbg_dump_sg: kmap failed\n"); 104 return; 105 } 106 107 buf = it_page + it->offset; 108 len = min_t(size_t, tlen, it->length); 109 print_hex_dump(level, prefix_str, prefix_type, rowsize, 110 groupsize, buf, len, ascii); 111 tlen -= len; 112 113 kunmap_atomic(it_page); 114 } 115 } 116 #endif 117 118 static struct list_head alg_list; 119 120 struct caam_alg_entry { 121 int class1_alg_type; 122 int class2_alg_type; 123 bool rfc3686; 124 bool geniv; 125 }; 126 127 struct caam_aead_alg { 128 struct aead_alg aead; 129 struct caam_alg_entry caam; 130 bool registered; 131 }; 132 133 /* 134 * per-session context 135 */ 136 struct caam_ctx { 137 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 138 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 139 u32 sh_desc_givenc[DESC_MAX_USED_LEN]; 140 u8 key[CAAM_MAX_KEY_SIZE]; 141 dma_addr_t sh_desc_enc_dma; 142 dma_addr_t sh_desc_dec_dma; 143 dma_addr_t sh_desc_givenc_dma; 144 dma_addr_t key_dma; 145 struct device *jrdev; 146 struct alginfo adata; 147 struct alginfo cdata; 148 unsigned int authsize; 149 }; 150 151 static int aead_null_set_sh_desc(struct crypto_aead *aead) 152 { 153 struct caam_ctx *ctx = crypto_aead_ctx(aead); 154 struct device *jrdev = ctx->jrdev; 155 u32 *desc; 156 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 157 ctx->adata.keylen_pad; 158 159 /* 160 * Job Descriptor and Shared Descriptors 161 * must all fit into the 64-word Descriptor h/w Buffer 162 */ 163 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) { 164 ctx->adata.key_inline = true; 165 ctx->adata.key_virt = ctx->key; 166 } else { 167 ctx->adata.key_inline = false; 168 ctx->adata.key_dma = ctx->key_dma; 169 } 170 171 /* aead_encrypt shared descriptor */ 172 desc = ctx->sh_desc_enc; 173 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize); 174 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 175 desc_bytes(desc), DMA_TO_DEVICE); 176 177 /* 178 * Job Descriptor and Shared Descriptors 179 * must all fit into the 64-word Descriptor h/w Buffer 180 */ 181 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) { 182 ctx->adata.key_inline = true; 183 ctx->adata.key_virt = ctx->key; 184 } else { 185 ctx->adata.key_inline = false; 186 ctx->adata.key_dma = ctx->key_dma; 187 } 188 189 /* aead_decrypt shared descriptor */ 190 desc = ctx->sh_desc_dec; 191 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize); 192 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 193 desc_bytes(desc), DMA_TO_DEVICE); 194 195 return 0; 196 } 197 198 static int aead_set_sh_desc(struct crypto_aead *aead) 199 { 200 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 201 struct caam_aead_alg, aead); 202 unsigned int ivsize = crypto_aead_ivsize(aead); 203 struct caam_ctx *ctx = crypto_aead_ctx(aead); 204 struct device *jrdev = ctx->jrdev; 205 u32 ctx1_iv_off = 0; 206 u32 *desc, *nonce = NULL; 207 u32 inl_mask; 208 unsigned int data_len[2]; 209 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 210 OP_ALG_AAI_CTR_MOD128); 211 const bool is_rfc3686 = alg->caam.rfc3686; 212 213 if (!ctx->authsize) 214 return 0; 215 216 /* NULL encryption / decryption */ 217 if (!ctx->cdata.keylen) 218 return aead_null_set_sh_desc(aead); 219 220 /* 221 * AES-CTR needs to load IV in CONTEXT1 reg 222 * at an offset of 128bits (16bytes) 223 * CONTEXT1[255:128] = IV 224 */ 225 if (ctr_mode) 226 ctx1_iv_off = 16; 227 228 /* 229 * RFC3686 specific: 230 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 231 */ 232 if (is_rfc3686) { 233 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 234 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + 235 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); 236 } 237 238 data_len[0] = ctx->adata.keylen_pad; 239 data_len[1] = ctx->cdata.keylen; 240 241 if (alg->caam.geniv) 242 goto skip_enc; 243 244 /* 245 * Job Descriptor and Shared Descriptors 246 * must all fit into the 64-word Descriptor h/w Buffer 247 */ 248 if (desc_inline_query(DESC_AEAD_ENC_LEN + 249 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 250 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 251 ARRAY_SIZE(data_len)) < 0) 252 return -EINVAL; 253 254 if (inl_mask & 1) 255 ctx->adata.key_virt = ctx->key; 256 else 257 ctx->adata.key_dma = ctx->key_dma; 258 259 if (inl_mask & 2) 260 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 261 else 262 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 263 264 ctx->adata.key_inline = !!(inl_mask & 1); 265 ctx->cdata.key_inline = !!(inl_mask & 2); 266 267 /* aead_encrypt shared descriptor */ 268 desc = ctx->sh_desc_enc; 269 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, 270 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, 271 false); 272 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 273 desc_bytes(desc), DMA_TO_DEVICE); 274 275 skip_enc: 276 /* 277 * Job Descriptor and Shared Descriptors 278 * must all fit into the 64-word Descriptor h/w Buffer 279 */ 280 if (desc_inline_query(DESC_AEAD_DEC_LEN + 281 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 282 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 283 ARRAY_SIZE(data_len)) < 0) 284 return -EINVAL; 285 286 if (inl_mask & 1) 287 ctx->adata.key_virt = ctx->key; 288 else 289 ctx->adata.key_dma = ctx->key_dma; 290 291 if (inl_mask & 2) 292 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 293 else 294 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 295 296 ctx->adata.key_inline = !!(inl_mask & 1); 297 ctx->cdata.key_inline = !!(inl_mask & 2); 298 299 /* aead_decrypt shared descriptor */ 300 desc = ctx->sh_desc_dec; 301 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, 302 ctx->authsize, alg->caam.geniv, is_rfc3686, 303 nonce, ctx1_iv_off, false); 304 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 305 desc_bytes(desc), DMA_TO_DEVICE); 306 307 if (!alg->caam.geniv) 308 goto skip_givenc; 309 310 /* 311 * Job Descriptor and Shared Descriptors 312 * must all fit into the 64-word Descriptor h/w Buffer 313 */ 314 if (desc_inline_query(DESC_AEAD_GIVENC_LEN + 315 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 316 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 317 ARRAY_SIZE(data_len)) < 0) 318 return -EINVAL; 319 320 if (inl_mask & 1) 321 ctx->adata.key_virt = ctx->key; 322 else 323 ctx->adata.key_dma = ctx->key_dma; 324 325 if (inl_mask & 2) 326 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 327 else 328 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 329 330 ctx->adata.key_inline = !!(inl_mask & 1); 331 ctx->cdata.key_inline = !!(inl_mask & 2); 332 333 /* aead_givencrypt shared descriptor */ 334 desc = ctx->sh_desc_enc; 335 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, 336 ctx->authsize, is_rfc3686, nonce, 337 ctx1_iv_off, false); 338 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 339 desc_bytes(desc), DMA_TO_DEVICE); 340 341 skip_givenc: 342 return 0; 343 } 344 345 static int aead_setauthsize(struct crypto_aead *authenc, 346 unsigned int authsize) 347 { 348 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 349 350 ctx->authsize = authsize; 351 aead_set_sh_desc(authenc); 352 353 return 0; 354 } 355 356 static int gcm_set_sh_desc(struct crypto_aead *aead) 357 { 358 struct caam_ctx *ctx = crypto_aead_ctx(aead); 359 struct device *jrdev = ctx->jrdev; 360 u32 *desc; 361 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 362 ctx->cdata.keylen; 363 364 if (!ctx->cdata.keylen || !ctx->authsize) 365 return 0; 366 367 /* 368 * AES GCM encrypt shared descriptor 369 * Job Descriptor and Shared Descriptor 370 * must fit into the 64-word Descriptor h/w Buffer 371 */ 372 if (rem_bytes >= DESC_GCM_ENC_LEN) { 373 ctx->cdata.key_inline = true; 374 ctx->cdata.key_virt = ctx->key; 375 } else { 376 ctx->cdata.key_inline = false; 377 ctx->cdata.key_dma = ctx->key_dma; 378 } 379 380 desc = ctx->sh_desc_enc; 381 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize); 382 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 383 desc_bytes(desc), DMA_TO_DEVICE); 384 385 /* 386 * Job Descriptor and Shared Descriptors 387 * must all fit into the 64-word Descriptor h/w Buffer 388 */ 389 if (rem_bytes >= DESC_GCM_DEC_LEN) { 390 ctx->cdata.key_inline = true; 391 ctx->cdata.key_virt = ctx->key; 392 } else { 393 ctx->cdata.key_inline = false; 394 ctx->cdata.key_dma = ctx->key_dma; 395 } 396 397 desc = ctx->sh_desc_dec; 398 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize); 399 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 400 desc_bytes(desc), DMA_TO_DEVICE); 401 402 return 0; 403 } 404 405 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize) 406 { 407 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 408 409 ctx->authsize = authsize; 410 gcm_set_sh_desc(authenc); 411 412 return 0; 413 } 414 415 static int rfc4106_set_sh_desc(struct crypto_aead *aead) 416 { 417 struct caam_ctx *ctx = crypto_aead_ctx(aead); 418 struct device *jrdev = ctx->jrdev; 419 u32 *desc; 420 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 421 ctx->cdata.keylen; 422 423 if (!ctx->cdata.keylen || !ctx->authsize) 424 return 0; 425 426 /* 427 * RFC4106 encrypt shared descriptor 428 * Job Descriptor and Shared Descriptor 429 * must fit into the 64-word Descriptor h/w Buffer 430 */ 431 if (rem_bytes >= DESC_RFC4106_ENC_LEN) { 432 ctx->cdata.key_inline = true; 433 ctx->cdata.key_virt = ctx->key; 434 } else { 435 ctx->cdata.key_inline = false; 436 ctx->cdata.key_dma = ctx->key_dma; 437 } 438 439 desc = ctx->sh_desc_enc; 440 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize); 441 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 442 desc_bytes(desc), DMA_TO_DEVICE); 443 444 /* 445 * Job Descriptor and Shared Descriptors 446 * must all fit into the 64-word Descriptor h/w Buffer 447 */ 448 if (rem_bytes >= DESC_RFC4106_DEC_LEN) { 449 ctx->cdata.key_inline = true; 450 ctx->cdata.key_virt = ctx->key; 451 } else { 452 ctx->cdata.key_inline = false; 453 ctx->cdata.key_dma = ctx->key_dma; 454 } 455 456 desc = ctx->sh_desc_dec; 457 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize); 458 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 459 desc_bytes(desc), DMA_TO_DEVICE); 460 461 return 0; 462 } 463 464 static int rfc4106_setauthsize(struct crypto_aead *authenc, 465 unsigned int authsize) 466 { 467 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 468 469 ctx->authsize = authsize; 470 rfc4106_set_sh_desc(authenc); 471 472 return 0; 473 } 474 475 static int rfc4543_set_sh_desc(struct crypto_aead *aead) 476 { 477 struct caam_ctx *ctx = crypto_aead_ctx(aead); 478 struct device *jrdev = ctx->jrdev; 479 u32 *desc; 480 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 481 ctx->cdata.keylen; 482 483 if (!ctx->cdata.keylen || !ctx->authsize) 484 return 0; 485 486 /* 487 * RFC4543 encrypt shared descriptor 488 * Job Descriptor and Shared Descriptor 489 * must fit into the 64-word Descriptor h/w Buffer 490 */ 491 if (rem_bytes >= DESC_RFC4543_ENC_LEN) { 492 ctx->cdata.key_inline = true; 493 ctx->cdata.key_virt = ctx->key; 494 } else { 495 ctx->cdata.key_inline = false; 496 ctx->cdata.key_dma = ctx->key_dma; 497 } 498 499 desc = ctx->sh_desc_enc; 500 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize); 501 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 502 desc_bytes(desc), DMA_TO_DEVICE); 503 504 /* 505 * Job Descriptor and Shared Descriptors 506 * must all fit into the 64-word Descriptor h/w Buffer 507 */ 508 if (rem_bytes >= DESC_RFC4543_DEC_LEN) { 509 ctx->cdata.key_inline = true; 510 ctx->cdata.key_virt = ctx->key; 511 } else { 512 ctx->cdata.key_inline = false; 513 ctx->cdata.key_dma = ctx->key_dma; 514 } 515 516 desc = ctx->sh_desc_dec; 517 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize); 518 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 519 desc_bytes(desc), DMA_TO_DEVICE); 520 521 return 0; 522 } 523 524 static int rfc4543_setauthsize(struct crypto_aead *authenc, 525 unsigned int authsize) 526 { 527 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 528 529 ctx->authsize = authsize; 530 rfc4543_set_sh_desc(authenc); 531 532 return 0; 533 } 534 535 static int aead_setkey(struct crypto_aead *aead, 536 const u8 *key, unsigned int keylen) 537 { 538 struct caam_ctx *ctx = crypto_aead_ctx(aead); 539 struct device *jrdev = ctx->jrdev; 540 struct crypto_authenc_keys keys; 541 int ret = 0; 542 543 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) 544 goto badkey; 545 546 #ifdef DEBUG 547 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n", 548 keys.authkeylen + keys.enckeylen, keys.enckeylen, 549 keys.authkeylen); 550 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 551 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 552 #endif 553 554 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, 555 keys.authkeylen, CAAM_MAX_KEY_SIZE - 556 keys.enckeylen); 557 if (ret) { 558 goto badkey; 559 } 560 561 /* postpend encryption key to auth split key */ 562 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); 563 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + 564 keys.enckeylen, DMA_TO_DEVICE); 565 #ifdef DEBUG 566 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ", 567 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, 568 ctx->adata.keylen_pad + keys.enckeylen, 1); 569 #endif 570 ctx->cdata.keylen = keys.enckeylen; 571 return aead_set_sh_desc(aead); 572 badkey: 573 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 574 return -EINVAL; 575 } 576 577 static int gcm_setkey(struct crypto_aead *aead, 578 const u8 *key, unsigned int keylen) 579 { 580 struct caam_ctx *ctx = crypto_aead_ctx(aead); 581 struct device *jrdev = ctx->jrdev; 582 583 #ifdef DEBUG 584 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 585 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 586 #endif 587 588 memcpy(ctx->key, key, keylen); 589 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE); 590 ctx->cdata.keylen = keylen; 591 592 return gcm_set_sh_desc(aead); 593 } 594 595 static int rfc4106_setkey(struct crypto_aead *aead, 596 const u8 *key, unsigned int keylen) 597 { 598 struct caam_ctx *ctx = crypto_aead_ctx(aead); 599 struct device *jrdev = ctx->jrdev; 600 601 if (keylen < 4) 602 return -EINVAL; 603 604 #ifdef DEBUG 605 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 606 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 607 #endif 608 609 memcpy(ctx->key, key, keylen); 610 611 /* 612 * The last four bytes of the key material are used as the salt value 613 * in the nonce. Update the AES key length. 614 */ 615 ctx->cdata.keylen = keylen - 4; 616 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 617 DMA_TO_DEVICE); 618 return rfc4106_set_sh_desc(aead); 619 } 620 621 static int rfc4543_setkey(struct crypto_aead *aead, 622 const u8 *key, unsigned int keylen) 623 { 624 struct caam_ctx *ctx = crypto_aead_ctx(aead); 625 struct device *jrdev = ctx->jrdev; 626 627 if (keylen < 4) 628 return -EINVAL; 629 630 #ifdef DEBUG 631 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 632 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 633 #endif 634 635 memcpy(ctx->key, key, keylen); 636 637 /* 638 * The last four bytes of the key material are used as the salt value 639 * in the nonce. Update the AES key length. 640 */ 641 ctx->cdata.keylen = keylen - 4; 642 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 643 DMA_TO_DEVICE); 644 return rfc4543_set_sh_desc(aead); 645 } 646 647 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher, 648 const u8 *key, unsigned int keylen) 649 { 650 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 651 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher); 652 const char *alg_name = crypto_tfm_alg_name(tfm); 653 struct device *jrdev = ctx->jrdev; 654 unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 655 u32 *desc; 656 u32 ctx1_iv_off = 0; 657 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 658 OP_ALG_AAI_CTR_MOD128); 659 const bool is_rfc3686 = (ctr_mode && 660 (strstr(alg_name, "rfc3686") != NULL)); 661 662 memcpy(ctx->key, key, keylen); 663 #ifdef DEBUG 664 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 665 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 666 #endif 667 /* 668 * AES-CTR needs to load IV in CONTEXT1 reg 669 * at an offset of 128bits (16bytes) 670 * CONTEXT1[255:128] = IV 671 */ 672 if (ctr_mode) 673 ctx1_iv_off = 16; 674 675 /* 676 * RFC3686 specific: 677 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER} 678 * | *key = {KEY, NONCE} 679 */ 680 if (is_rfc3686) { 681 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 682 keylen -= CTR_RFC3686_NONCE_SIZE; 683 } 684 685 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE); 686 ctx->cdata.keylen = keylen; 687 ctx->cdata.key_virt = ctx->key; 688 ctx->cdata.key_inline = true; 689 690 /* ablkcipher_encrypt shared descriptor */ 691 desc = ctx->sh_desc_enc; 692 cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, 693 ctx1_iv_off); 694 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 695 desc_bytes(desc), DMA_TO_DEVICE); 696 697 /* ablkcipher_decrypt shared descriptor */ 698 desc = ctx->sh_desc_dec; 699 cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, 700 ctx1_iv_off); 701 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 702 desc_bytes(desc), DMA_TO_DEVICE); 703 704 /* ablkcipher_givencrypt shared descriptor */ 705 desc = ctx->sh_desc_givenc; 706 cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686, 707 ctx1_iv_off); 708 dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma, 709 desc_bytes(desc), DMA_TO_DEVICE); 710 711 return 0; 712 } 713 714 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher, 715 const u8 *key, unsigned int keylen) 716 { 717 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 718 struct device *jrdev = ctx->jrdev; 719 u32 *desc; 720 721 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) { 722 crypto_ablkcipher_set_flags(ablkcipher, 723 CRYPTO_TFM_RES_BAD_KEY_LEN); 724 dev_err(jrdev, "key size mismatch\n"); 725 return -EINVAL; 726 } 727 728 memcpy(ctx->key, key, keylen); 729 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE); 730 ctx->cdata.keylen = keylen; 731 ctx->cdata.key_virt = ctx->key; 732 ctx->cdata.key_inline = true; 733 734 /* xts_ablkcipher_encrypt shared descriptor */ 735 desc = ctx->sh_desc_enc; 736 cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata); 737 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 738 desc_bytes(desc), DMA_TO_DEVICE); 739 740 /* xts_ablkcipher_decrypt shared descriptor */ 741 desc = ctx->sh_desc_dec; 742 cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata); 743 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 744 desc_bytes(desc), DMA_TO_DEVICE); 745 746 return 0; 747 } 748 749 /* 750 * aead_edesc - s/w-extended aead descriptor 751 * @src_nents: number of segments in input s/w scatterlist 752 * @dst_nents: number of segments in output s/w scatterlist 753 * @sec4_sg_bytes: length of dma mapped sec4_sg space 754 * @sec4_sg_dma: bus physical mapped address of h/w link table 755 * @sec4_sg: pointer to h/w link table 756 * @hw_desc: the h/w job descriptor followed by any referenced link tables 757 */ 758 struct aead_edesc { 759 int src_nents; 760 int dst_nents; 761 int sec4_sg_bytes; 762 dma_addr_t sec4_sg_dma; 763 struct sec4_sg_entry *sec4_sg; 764 u32 hw_desc[]; 765 }; 766 767 /* 768 * ablkcipher_edesc - s/w-extended ablkcipher descriptor 769 * @src_nents: number of segments in input s/w scatterlist 770 * @dst_nents: number of segments in output s/w scatterlist 771 * @iv_dma: dma address of iv for checking continuity and link table 772 * @sec4_sg_bytes: length of dma mapped sec4_sg space 773 * @sec4_sg_dma: bus physical mapped address of h/w link table 774 * @sec4_sg: pointer to h/w link table 775 * @hw_desc: the h/w job descriptor followed by any referenced link tables 776 */ 777 struct ablkcipher_edesc { 778 int src_nents; 779 int dst_nents; 780 dma_addr_t iv_dma; 781 int sec4_sg_bytes; 782 dma_addr_t sec4_sg_dma; 783 struct sec4_sg_entry *sec4_sg; 784 u32 hw_desc[0]; 785 }; 786 787 static void caam_unmap(struct device *dev, struct scatterlist *src, 788 struct scatterlist *dst, int src_nents, 789 int dst_nents, 790 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, 791 int sec4_sg_bytes) 792 { 793 if (dst != src) { 794 if (src_nents) 795 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); 796 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); 797 } else { 798 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); 799 } 800 801 if (iv_dma) 802 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE); 803 if (sec4_sg_bytes) 804 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes, 805 DMA_TO_DEVICE); 806 } 807 808 static void aead_unmap(struct device *dev, 809 struct aead_edesc *edesc, 810 struct aead_request *req) 811 { 812 caam_unmap(dev, req->src, req->dst, 813 edesc->src_nents, edesc->dst_nents, 0, 0, 814 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 815 } 816 817 static void ablkcipher_unmap(struct device *dev, 818 struct ablkcipher_edesc *edesc, 819 struct ablkcipher_request *req) 820 { 821 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 822 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 823 824 caam_unmap(dev, req->src, req->dst, 825 edesc->src_nents, edesc->dst_nents, 826 edesc->iv_dma, ivsize, 827 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 828 } 829 830 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 831 void *context) 832 { 833 struct aead_request *req = context; 834 struct aead_edesc *edesc; 835 836 #ifdef DEBUG 837 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 838 #endif 839 840 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 841 842 if (err) 843 caam_jr_strstatus(jrdev, err); 844 845 aead_unmap(jrdev, edesc, req); 846 847 kfree(edesc); 848 849 aead_request_complete(req, err); 850 } 851 852 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 853 void *context) 854 { 855 struct aead_request *req = context; 856 struct aead_edesc *edesc; 857 858 #ifdef DEBUG 859 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 860 #endif 861 862 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 863 864 if (err) 865 caam_jr_strstatus(jrdev, err); 866 867 aead_unmap(jrdev, edesc, req); 868 869 /* 870 * verify hw auth check passed else return -EBADMSG 871 */ 872 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK) 873 err = -EBADMSG; 874 875 kfree(edesc); 876 877 aead_request_complete(req, err); 878 } 879 880 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 881 void *context) 882 { 883 struct ablkcipher_request *req = context; 884 struct ablkcipher_edesc *edesc; 885 #ifdef DEBUG 886 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 887 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 888 889 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 890 #endif 891 892 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]); 893 894 if (err) 895 caam_jr_strstatus(jrdev, err); 896 897 #ifdef DEBUG 898 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 899 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 900 edesc->src_nents > 1 ? 100 : ivsize, 1); 901 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ", 902 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 903 edesc->dst_nents > 1 ? 100 : req->nbytes, 1); 904 #endif 905 906 ablkcipher_unmap(jrdev, edesc, req); 907 kfree(edesc); 908 909 ablkcipher_request_complete(req, err); 910 } 911 912 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 913 void *context) 914 { 915 struct ablkcipher_request *req = context; 916 struct ablkcipher_edesc *edesc; 917 #ifdef DEBUG 918 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 919 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 920 921 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 922 #endif 923 924 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]); 925 if (err) 926 caam_jr_strstatus(jrdev, err); 927 928 #ifdef DEBUG 929 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 930 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 931 ivsize, 1); 932 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ", 933 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 934 edesc->dst_nents > 1 ? 100 : req->nbytes, 1); 935 #endif 936 937 ablkcipher_unmap(jrdev, edesc, req); 938 kfree(edesc); 939 940 ablkcipher_request_complete(req, err); 941 } 942 943 /* 944 * Fill in aead job descriptor 945 */ 946 static void init_aead_job(struct aead_request *req, 947 struct aead_edesc *edesc, 948 bool all_contig, bool encrypt) 949 { 950 struct crypto_aead *aead = crypto_aead_reqtfm(req); 951 struct caam_ctx *ctx = crypto_aead_ctx(aead); 952 int authsize = ctx->authsize; 953 u32 *desc = edesc->hw_desc; 954 u32 out_options, in_options; 955 dma_addr_t dst_dma, src_dma; 956 int len, sec4_sg_index = 0; 957 dma_addr_t ptr; 958 u32 *sh_desc; 959 960 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 961 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 962 963 len = desc_len(sh_desc); 964 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 965 966 if (all_contig) { 967 src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0; 968 in_options = 0; 969 } else { 970 src_dma = edesc->sec4_sg_dma; 971 sec4_sg_index += edesc->src_nents; 972 in_options = LDST_SGF; 973 } 974 975 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, 976 in_options); 977 978 dst_dma = src_dma; 979 out_options = in_options; 980 981 if (unlikely(req->src != req->dst)) { 982 if (edesc->dst_nents == 1) { 983 dst_dma = sg_dma_address(req->dst); 984 } else { 985 dst_dma = edesc->sec4_sg_dma + 986 sec4_sg_index * 987 sizeof(struct sec4_sg_entry); 988 out_options = LDST_SGF; 989 } 990 } 991 992 if (encrypt) 993 append_seq_out_ptr(desc, dst_dma, 994 req->assoclen + req->cryptlen + authsize, 995 out_options); 996 else 997 append_seq_out_ptr(desc, dst_dma, 998 req->assoclen + req->cryptlen - authsize, 999 out_options); 1000 1001 /* REG3 = assoclen */ 1002 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1003 } 1004 1005 static void init_gcm_job(struct aead_request *req, 1006 struct aead_edesc *edesc, 1007 bool all_contig, bool encrypt) 1008 { 1009 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1010 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1011 unsigned int ivsize = crypto_aead_ivsize(aead); 1012 u32 *desc = edesc->hw_desc; 1013 bool generic_gcm = (ivsize == 12); 1014 unsigned int last; 1015 1016 init_aead_job(req, edesc, all_contig, encrypt); 1017 1018 /* BUG This should not be specific to generic GCM. */ 1019 last = 0; 1020 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) 1021 last = FIFOLD_TYPE_LAST1; 1022 1023 /* Read GCM IV */ 1024 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE | 1025 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last); 1026 /* Append Salt */ 1027 if (!generic_gcm) 1028 append_data(desc, ctx->key + ctx->cdata.keylen, 4); 1029 /* Append IV */ 1030 append_data(desc, req->iv, ivsize); 1031 /* End of blank commands */ 1032 } 1033 1034 static void init_authenc_job(struct aead_request *req, 1035 struct aead_edesc *edesc, 1036 bool all_contig, bool encrypt) 1037 { 1038 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1039 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 1040 struct caam_aead_alg, aead); 1041 unsigned int ivsize = crypto_aead_ivsize(aead); 1042 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1043 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 1044 OP_ALG_AAI_CTR_MOD128); 1045 const bool is_rfc3686 = alg->caam.rfc3686; 1046 u32 *desc = edesc->hw_desc; 1047 u32 ivoffset = 0; 1048 1049 /* 1050 * AES-CTR needs to load IV in CONTEXT1 reg 1051 * at an offset of 128bits (16bytes) 1052 * CONTEXT1[255:128] = IV 1053 */ 1054 if (ctr_mode) 1055 ivoffset = 16; 1056 1057 /* 1058 * RFC3686 specific: 1059 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 1060 */ 1061 if (is_rfc3686) 1062 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE; 1063 1064 init_aead_job(req, edesc, all_contig, encrypt); 1065 1066 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) 1067 append_load_as_imm(desc, req->iv, ivsize, 1068 LDST_CLASS_1_CCB | 1069 LDST_SRCDST_BYTE_CONTEXT | 1070 (ivoffset << LDST_OFFSET_SHIFT)); 1071 } 1072 1073 /* 1074 * Fill in ablkcipher job descriptor 1075 */ 1076 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr, 1077 struct ablkcipher_edesc *edesc, 1078 struct ablkcipher_request *req, 1079 bool iv_contig) 1080 { 1081 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1082 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1083 u32 *desc = edesc->hw_desc; 1084 u32 out_options = 0, in_options; 1085 dma_addr_t dst_dma, src_dma; 1086 int len, sec4_sg_index = 0; 1087 1088 #ifdef DEBUG 1089 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ", 1090 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 1091 ivsize, 1); 1092 pr_err("asked=%d, nbytes%d\n", 1093 (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes); 1094 dbg_dump_sg(KERN_ERR, "src @"__stringify(__LINE__)": ", 1095 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1096 edesc->src_nents > 1 ? 100 : req->nbytes, 1); 1097 #endif 1098 1099 len = desc_len(sh_desc); 1100 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1101 1102 if (iv_contig) { 1103 src_dma = edesc->iv_dma; 1104 in_options = 0; 1105 } else { 1106 src_dma = edesc->sec4_sg_dma; 1107 sec4_sg_index += edesc->src_nents + 1; 1108 in_options = LDST_SGF; 1109 } 1110 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options); 1111 1112 if (likely(req->src == req->dst)) { 1113 if (edesc->src_nents == 1 && iv_contig) { 1114 dst_dma = sg_dma_address(req->src); 1115 } else { 1116 dst_dma = edesc->sec4_sg_dma + 1117 sizeof(struct sec4_sg_entry); 1118 out_options = LDST_SGF; 1119 } 1120 } else { 1121 if (edesc->dst_nents == 1) { 1122 dst_dma = sg_dma_address(req->dst); 1123 } else { 1124 dst_dma = edesc->sec4_sg_dma + 1125 sec4_sg_index * sizeof(struct sec4_sg_entry); 1126 out_options = LDST_SGF; 1127 } 1128 } 1129 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options); 1130 } 1131 1132 /* 1133 * Fill in ablkcipher givencrypt job descriptor 1134 */ 1135 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr, 1136 struct ablkcipher_edesc *edesc, 1137 struct ablkcipher_request *req, 1138 bool iv_contig) 1139 { 1140 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1141 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1142 u32 *desc = edesc->hw_desc; 1143 u32 out_options, in_options; 1144 dma_addr_t dst_dma, src_dma; 1145 int len, sec4_sg_index = 0; 1146 1147 #ifdef DEBUG 1148 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ", 1149 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 1150 ivsize, 1); 1151 dbg_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ", 1152 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1153 edesc->src_nents > 1 ? 100 : req->nbytes, 1); 1154 #endif 1155 1156 len = desc_len(sh_desc); 1157 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1158 1159 if (edesc->src_nents == 1) { 1160 src_dma = sg_dma_address(req->src); 1161 in_options = 0; 1162 } else { 1163 src_dma = edesc->sec4_sg_dma; 1164 sec4_sg_index += edesc->src_nents; 1165 in_options = LDST_SGF; 1166 } 1167 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options); 1168 1169 if (iv_contig) { 1170 dst_dma = edesc->iv_dma; 1171 out_options = 0; 1172 } else { 1173 dst_dma = edesc->sec4_sg_dma + 1174 sec4_sg_index * sizeof(struct sec4_sg_entry); 1175 out_options = LDST_SGF; 1176 } 1177 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options); 1178 } 1179 1180 /* 1181 * allocate and map the aead extended descriptor 1182 */ 1183 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1184 int desc_bytes, bool *all_contig_ptr, 1185 bool encrypt) 1186 { 1187 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1188 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1189 struct device *jrdev = ctx->jrdev; 1190 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 1191 CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC; 1192 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1193 struct aead_edesc *edesc; 1194 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; 1195 unsigned int authsize = ctx->authsize; 1196 1197 if (unlikely(req->dst != req->src)) { 1198 src_nents = sg_nents_for_len(req->src, req->assoclen + 1199 req->cryptlen); 1200 if (unlikely(src_nents < 0)) { 1201 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1202 req->assoclen + req->cryptlen); 1203 return ERR_PTR(src_nents); 1204 } 1205 1206 dst_nents = sg_nents_for_len(req->dst, req->assoclen + 1207 req->cryptlen + 1208 (encrypt ? authsize : 1209 (-authsize))); 1210 if (unlikely(dst_nents < 0)) { 1211 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1212 req->assoclen + req->cryptlen + 1213 (encrypt ? authsize : (-authsize))); 1214 return ERR_PTR(dst_nents); 1215 } 1216 } else { 1217 src_nents = sg_nents_for_len(req->src, req->assoclen + 1218 req->cryptlen + 1219 (encrypt ? authsize : 0)); 1220 if (unlikely(src_nents < 0)) { 1221 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1222 req->assoclen + req->cryptlen + 1223 (encrypt ? authsize : 0)); 1224 return ERR_PTR(src_nents); 1225 } 1226 } 1227 1228 if (likely(req->src == req->dst)) { 1229 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1230 DMA_BIDIRECTIONAL); 1231 if (unlikely(!mapped_src_nents)) { 1232 dev_err(jrdev, "unable to map source\n"); 1233 return ERR_PTR(-ENOMEM); 1234 } 1235 } else { 1236 /* Cover also the case of null (zero length) input data */ 1237 if (src_nents) { 1238 mapped_src_nents = dma_map_sg(jrdev, req->src, 1239 src_nents, DMA_TO_DEVICE); 1240 if (unlikely(!mapped_src_nents)) { 1241 dev_err(jrdev, "unable to map source\n"); 1242 return ERR_PTR(-ENOMEM); 1243 } 1244 } else { 1245 mapped_src_nents = 0; 1246 } 1247 1248 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1249 DMA_FROM_DEVICE); 1250 if (unlikely(!mapped_dst_nents)) { 1251 dev_err(jrdev, "unable to map destination\n"); 1252 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1253 return ERR_PTR(-ENOMEM); 1254 } 1255 } 1256 1257 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0; 1258 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1259 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1260 1261 /* allocate space for base edesc and hw desc commands, link tables */ 1262 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1263 GFP_DMA | flags); 1264 if (!edesc) { 1265 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1266 0, 0, 0); 1267 return ERR_PTR(-ENOMEM); 1268 } 1269 1270 edesc->src_nents = src_nents; 1271 edesc->dst_nents = dst_nents; 1272 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + 1273 desc_bytes; 1274 *all_contig_ptr = !(mapped_src_nents > 1); 1275 1276 sec4_sg_index = 0; 1277 if (mapped_src_nents > 1) { 1278 sg_to_sec4_sg_last(req->src, mapped_src_nents, 1279 edesc->sec4_sg + sec4_sg_index, 0); 1280 sec4_sg_index += mapped_src_nents; 1281 } 1282 if (mapped_dst_nents > 1) { 1283 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1284 edesc->sec4_sg + sec4_sg_index, 0); 1285 } 1286 1287 if (!sec4_sg_bytes) 1288 return edesc; 1289 1290 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1291 sec4_sg_bytes, DMA_TO_DEVICE); 1292 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1293 dev_err(jrdev, "unable to map S/G table\n"); 1294 aead_unmap(jrdev, edesc, req); 1295 kfree(edesc); 1296 return ERR_PTR(-ENOMEM); 1297 } 1298 1299 edesc->sec4_sg_bytes = sec4_sg_bytes; 1300 1301 return edesc; 1302 } 1303 1304 static int gcm_encrypt(struct aead_request *req) 1305 { 1306 struct aead_edesc *edesc; 1307 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1308 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1309 struct device *jrdev = ctx->jrdev; 1310 bool all_contig; 1311 u32 *desc; 1312 int ret = 0; 1313 1314 /* allocate extended descriptor */ 1315 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true); 1316 if (IS_ERR(edesc)) 1317 return PTR_ERR(edesc); 1318 1319 /* Create and submit job descriptor */ 1320 init_gcm_job(req, edesc, all_contig, true); 1321 #ifdef DEBUG 1322 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1323 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1324 desc_bytes(edesc->hw_desc), 1); 1325 #endif 1326 1327 desc = edesc->hw_desc; 1328 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1329 if (!ret) { 1330 ret = -EINPROGRESS; 1331 } else { 1332 aead_unmap(jrdev, edesc, req); 1333 kfree(edesc); 1334 } 1335 1336 return ret; 1337 } 1338 1339 static int ipsec_gcm_encrypt(struct aead_request *req) 1340 { 1341 if (req->assoclen < 8) 1342 return -EINVAL; 1343 1344 return gcm_encrypt(req); 1345 } 1346 1347 static int aead_encrypt(struct aead_request *req) 1348 { 1349 struct aead_edesc *edesc; 1350 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1351 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1352 struct device *jrdev = ctx->jrdev; 1353 bool all_contig; 1354 u32 *desc; 1355 int ret = 0; 1356 1357 /* allocate extended descriptor */ 1358 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1359 &all_contig, true); 1360 if (IS_ERR(edesc)) 1361 return PTR_ERR(edesc); 1362 1363 /* Create and submit job descriptor */ 1364 init_authenc_job(req, edesc, all_contig, true); 1365 #ifdef DEBUG 1366 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1367 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1368 desc_bytes(edesc->hw_desc), 1); 1369 #endif 1370 1371 desc = edesc->hw_desc; 1372 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1373 if (!ret) { 1374 ret = -EINPROGRESS; 1375 } else { 1376 aead_unmap(jrdev, edesc, req); 1377 kfree(edesc); 1378 } 1379 1380 return ret; 1381 } 1382 1383 static int gcm_decrypt(struct aead_request *req) 1384 { 1385 struct aead_edesc *edesc; 1386 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1387 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1388 struct device *jrdev = ctx->jrdev; 1389 bool all_contig; 1390 u32 *desc; 1391 int ret = 0; 1392 1393 /* allocate extended descriptor */ 1394 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false); 1395 if (IS_ERR(edesc)) 1396 return PTR_ERR(edesc); 1397 1398 /* Create and submit job descriptor*/ 1399 init_gcm_job(req, edesc, all_contig, false); 1400 #ifdef DEBUG 1401 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1402 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1403 desc_bytes(edesc->hw_desc), 1); 1404 #endif 1405 1406 desc = edesc->hw_desc; 1407 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1408 if (!ret) { 1409 ret = -EINPROGRESS; 1410 } else { 1411 aead_unmap(jrdev, edesc, req); 1412 kfree(edesc); 1413 } 1414 1415 return ret; 1416 } 1417 1418 static int ipsec_gcm_decrypt(struct aead_request *req) 1419 { 1420 if (req->assoclen < 8) 1421 return -EINVAL; 1422 1423 return gcm_decrypt(req); 1424 } 1425 1426 static int aead_decrypt(struct aead_request *req) 1427 { 1428 struct aead_edesc *edesc; 1429 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1430 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1431 struct device *jrdev = ctx->jrdev; 1432 bool all_contig; 1433 u32 *desc; 1434 int ret = 0; 1435 1436 #ifdef DEBUG 1437 dbg_dump_sg(KERN_ERR, "dec src@"__stringify(__LINE__)": ", 1438 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1439 req->assoclen + req->cryptlen, 1); 1440 #endif 1441 1442 /* allocate extended descriptor */ 1443 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1444 &all_contig, false); 1445 if (IS_ERR(edesc)) 1446 return PTR_ERR(edesc); 1447 1448 /* Create and submit job descriptor*/ 1449 init_authenc_job(req, edesc, all_contig, false); 1450 #ifdef DEBUG 1451 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1452 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1453 desc_bytes(edesc->hw_desc), 1); 1454 #endif 1455 1456 desc = edesc->hw_desc; 1457 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1458 if (!ret) { 1459 ret = -EINPROGRESS; 1460 } else { 1461 aead_unmap(jrdev, edesc, req); 1462 kfree(edesc); 1463 } 1464 1465 return ret; 1466 } 1467 1468 /* 1469 * allocate and map the ablkcipher extended descriptor for ablkcipher 1470 */ 1471 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request 1472 *req, int desc_bytes, 1473 bool *iv_contig_out) 1474 { 1475 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1476 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1477 struct device *jrdev = ctx->jrdev; 1478 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 1479 CRYPTO_TFM_REQ_MAY_SLEEP)) ? 1480 GFP_KERNEL : GFP_ATOMIC; 1481 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1482 struct ablkcipher_edesc *edesc; 1483 dma_addr_t iv_dma = 0; 1484 bool in_contig; 1485 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1486 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1487 1488 src_nents = sg_nents_for_len(req->src, req->nbytes); 1489 if (unlikely(src_nents < 0)) { 1490 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1491 req->nbytes); 1492 return ERR_PTR(src_nents); 1493 } 1494 1495 if (req->dst != req->src) { 1496 dst_nents = sg_nents_for_len(req->dst, req->nbytes); 1497 if (unlikely(dst_nents < 0)) { 1498 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1499 req->nbytes); 1500 return ERR_PTR(dst_nents); 1501 } 1502 } 1503 1504 if (likely(req->src == req->dst)) { 1505 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1506 DMA_BIDIRECTIONAL); 1507 if (unlikely(!mapped_src_nents)) { 1508 dev_err(jrdev, "unable to map source\n"); 1509 return ERR_PTR(-ENOMEM); 1510 } 1511 } else { 1512 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1513 DMA_TO_DEVICE); 1514 if (unlikely(!mapped_src_nents)) { 1515 dev_err(jrdev, "unable to map source\n"); 1516 return ERR_PTR(-ENOMEM); 1517 } 1518 1519 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1520 DMA_FROM_DEVICE); 1521 if (unlikely(!mapped_dst_nents)) { 1522 dev_err(jrdev, "unable to map destination\n"); 1523 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1524 return ERR_PTR(-ENOMEM); 1525 } 1526 } 1527 1528 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE); 1529 if (dma_mapping_error(jrdev, iv_dma)) { 1530 dev_err(jrdev, "unable to map IV\n"); 1531 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1532 0, 0, 0); 1533 return ERR_PTR(-ENOMEM); 1534 } 1535 1536 if (mapped_src_nents == 1 && 1537 iv_dma + ivsize == sg_dma_address(req->src)) { 1538 in_contig = true; 1539 sec4_sg_ents = 0; 1540 } else { 1541 in_contig = false; 1542 sec4_sg_ents = 1 + mapped_src_nents; 1543 } 1544 dst_sg_idx = sec4_sg_ents; 1545 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1546 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1547 1548 /* allocate space for base edesc and hw desc commands, link tables */ 1549 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1550 GFP_DMA | flags); 1551 if (!edesc) { 1552 dev_err(jrdev, "could not allocate extended descriptor\n"); 1553 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1554 iv_dma, ivsize, 0, 0); 1555 return ERR_PTR(-ENOMEM); 1556 } 1557 1558 edesc->src_nents = src_nents; 1559 edesc->dst_nents = dst_nents; 1560 edesc->sec4_sg_bytes = sec4_sg_bytes; 1561 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) + 1562 desc_bytes; 1563 1564 if (!in_contig) { 1565 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); 1566 sg_to_sec4_sg_last(req->src, mapped_src_nents, 1567 edesc->sec4_sg + 1, 0); 1568 } 1569 1570 if (mapped_dst_nents > 1) { 1571 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1572 edesc->sec4_sg + dst_sg_idx, 0); 1573 } 1574 1575 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1576 sec4_sg_bytes, DMA_TO_DEVICE); 1577 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1578 dev_err(jrdev, "unable to map S/G table\n"); 1579 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1580 iv_dma, ivsize, 0, 0); 1581 kfree(edesc); 1582 return ERR_PTR(-ENOMEM); 1583 } 1584 1585 edesc->iv_dma = iv_dma; 1586 1587 #ifdef DEBUG 1588 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ", 1589 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1590 sec4_sg_bytes, 1); 1591 #endif 1592 1593 *iv_contig_out = in_contig; 1594 return edesc; 1595 } 1596 1597 static int ablkcipher_encrypt(struct ablkcipher_request *req) 1598 { 1599 struct ablkcipher_edesc *edesc; 1600 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1601 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1602 struct device *jrdev = ctx->jrdev; 1603 bool iv_contig; 1604 u32 *desc; 1605 int ret = 0; 1606 1607 /* allocate extended descriptor */ 1608 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * 1609 CAAM_CMD_SZ, &iv_contig); 1610 if (IS_ERR(edesc)) 1611 return PTR_ERR(edesc); 1612 1613 /* Create and submit job descriptor*/ 1614 init_ablkcipher_job(ctx->sh_desc_enc, 1615 ctx->sh_desc_enc_dma, edesc, req, iv_contig); 1616 #ifdef DEBUG 1617 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ", 1618 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1619 desc_bytes(edesc->hw_desc), 1); 1620 #endif 1621 desc = edesc->hw_desc; 1622 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req); 1623 1624 if (!ret) { 1625 ret = -EINPROGRESS; 1626 } else { 1627 ablkcipher_unmap(jrdev, edesc, req); 1628 kfree(edesc); 1629 } 1630 1631 return ret; 1632 } 1633 1634 static int ablkcipher_decrypt(struct ablkcipher_request *req) 1635 { 1636 struct ablkcipher_edesc *edesc; 1637 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1638 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1639 struct device *jrdev = ctx->jrdev; 1640 bool iv_contig; 1641 u32 *desc; 1642 int ret = 0; 1643 1644 /* allocate extended descriptor */ 1645 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * 1646 CAAM_CMD_SZ, &iv_contig); 1647 if (IS_ERR(edesc)) 1648 return PTR_ERR(edesc); 1649 1650 /* Create and submit job descriptor*/ 1651 init_ablkcipher_job(ctx->sh_desc_dec, 1652 ctx->sh_desc_dec_dma, edesc, req, iv_contig); 1653 desc = edesc->hw_desc; 1654 #ifdef DEBUG 1655 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ", 1656 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1657 desc_bytes(edesc->hw_desc), 1); 1658 #endif 1659 1660 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req); 1661 if (!ret) { 1662 ret = -EINPROGRESS; 1663 } else { 1664 ablkcipher_unmap(jrdev, edesc, req); 1665 kfree(edesc); 1666 } 1667 1668 return ret; 1669 } 1670 1671 /* 1672 * allocate and map the ablkcipher extended descriptor 1673 * for ablkcipher givencrypt 1674 */ 1675 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc( 1676 struct skcipher_givcrypt_request *greq, 1677 int desc_bytes, 1678 bool *iv_contig_out) 1679 { 1680 struct ablkcipher_request *req = &greq->creq; 1681 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1682 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1683 struct device *jrdev = ctx->jrdev; 1684 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 1685 CRYPTO_TFM_REQ_MAY_SLEEP)) ? 1686 GFP_KERNEL : GFP_ATOMIC; 1687 int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents; 1688 struct ablkcipher_edesc *edesc; 1689 dma_addr_t iv_dma = 0; 1690 bool out_contig; 1691 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1692 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1693 1694 src_nents = sg_nents_for_len(req->src, req->nbytes); 1695 if (unlikely(src_nents < 0)) { 1696 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1697 req->nbytes); 1698 return ERR_PTR(src_nents); 1699 } 1700 1701 if (likely(req->src == req->dst)) { 1702 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1703 DMA_BIDIRECTIONAL); 1704 if (unlikely(!mapped_src_nents)) { 1705 dev_err(jrdev, "unable to map source\n"); 1706 return ERR_PTR(-ENOMEM); 1707 } 1708 1709 dst_nents = src_nents; 1710 mapped_dst_nents = src_nents; 1711 } else { 1712 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1713 DMA_TO_DEVICE); 1714 if (unlikely(!mapped_src_nents)) { 1715 dev_err(jrdev, "unable to map source\n"); 1716 return ERR_PTR(-ENOMEM); 1717 } 1718 1719 dst_nents = sg_nents_for_len(req->dst, req->nbytes); 1720 if (unlikely(dst_nents < 0)) { 1721 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1722 req->nbytes); 1723 return ERR_PTR(dst_nents); 1724 } 1725 1726 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1727 DMA_FROM_DEVICE); 1728 if (unlikely(!mapped_dst_nents)) { 1729 dev_err(jrdev, "unable to map destination\n"); 1730 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1731 return ERR_PTR(-ENOMEM); 1732 } 1733 } 1734 1735 /* 1736 * Check if iv can be contiguous with source and destination. 1737 * If so, include it. If not, create scatterlist. 1738 */ 1739 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE); 1740 if (dma_mapping_error(jrdev, iv_dma)) { 1741 dev_err(jrdev, "unable to map IV\n"); 1742 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1743 0, 0, 0); 1744 return ERR_PTR(-ENOMEM); 1745 } 1746 1747 sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0; 1748 dst_sg_idx = sec4_sg_ents; 1749 if (mapped_dst_nents == 1 && 1750 iv_dma + ivsize == sg_dma_address(req->dst)) { 1751 out_contig = true; 1752 } else { 1753 out_contig = false; 1754 sec4_sg_ents += 1 + mapped_dst_nents; 1755 } 1756 1757 /* allocate space for base edesc and hw desc commands, link tables */ 1758 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1759 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1760 GFP_DMA | flags); 1761 if (!edesc) { 1762 dev_err(jrdev, "could not allocate extended descriptor\n"); 1763 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1764 iv_dma, ivsize, 0, 0); 1765 return ERR_PTR(-ENOMEM); 1766 } 1767 1768 edesc->src_nents = src_nents; 1769 edesc->dst_nents = dst_nents; 1770 edesc->sec4_sg_bytes = sec4_sg_bytes; 1771 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) + 1772 desc_bytes; 1773 1774 if (mapped_src_nents > 1) 1775 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg, 1776 0); 1777 1778 if (!out_contig) { 1779 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx, 1780 iv_dma, ivsize, 0); 1781 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1782 edesc->sec4_sg + dst_sg_idx + 1, 0); 1783 } 1784 1785 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1786 sec4_sg_bytes, DMA_TO_DEVICE); 1787 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1788 dev_err(jrdev, "unable to map S/G table\n"); 1789 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1790 iv_dma, ivsize, 0, 0); 1791 kfree(edesc); 1792 return ERR_PTR(-ENOMEM); 1793 } 1794 edesc->iv_dma = iv_dma; 1795 1796 #ifdef DEBUG 1797 print_hex_dump(KERN_ERR, 1798 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ", 1799 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1800 sec4_sg_bytes, 1); 1801 #endif 1802 1803 *iv_contig_out = out_contig; 1804 return edesc; 1805 } 1806 1807 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq) 1808 { 1809 struct ablkcipher_request *req = &creq->creq; 1810 struct ablkcipher_edesc *edesc; 1811 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1812 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1813 struct device *jrdev = ctx->jrdev; 1814 bool iv_contig = false; 1815 u32 *desc; 1816 int ret = 0; 1817 1818 /* allocate extended descriptor */ 1819 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN * 1820 CAAM_CMD_SZ, &iv_contig); 1821 if (IS_ERR(edesc)) 1822 return PTR_ERR(edesc); 1823 1824 /* Create and submit job descriptor*/ 1825 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma, 1826 edesc, req, iv_contig); 1827 #ifdef DEBUG 1828 print_hex_dump(KERN_ERR, 1829 "ablkcipher jobdesc@" __stringify(__LINE__) ": ", 1830 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1831 desc_bytes(edesc->hw_desc), 1); 1832 #endif 1833 desc = edesc->hw_desc; 1834 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req); 1835 1836 if (!ret) { 1837 ret = -EINPROGRESS; 1838 } else { 1839 ablkcipher_unmap(jrdev, edesc, req); 1840 kfree(edesc); 1841 } 1842 1843 return ret; 1844 } 1845 1846 #define template_aead template_u.aead 1847 #define template_ablkcipher template_u.ablkcipher 1848 struct caam_alg_template { 1849 char name[CRYPTO_MAX_ALG_NAME]; 1850 char driver_name[CRYPTO_MAX_ALG_NAME]; 1851 unsigned int blocksize; 1852 u32 type; 1853 union { 1854 struct ablkcipher_alg ablkcipher; 1855 } template_u; 1856 u32 class1_alg_type; 1857 u32 class2_alg_type; 1858 }; 1859 1860 static struct caam_alg_template driver_algs[] = { 1861 /* ablkcipher descriptor */ 1862 { 1863 .name = "cbc(aes)", 1864 .driver_name = "cbc-aes-caam", 1865 .blocksize = AES_BLOCK_SIZE, 1866 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1867 .template_ablkcipher = { 1868 .setkey = ablkcipher_setkey, 1869 .encrypt = ablkcipher_encrypt, 1870 .decrypt = ablkcipher_decrypt, 1871 .givencrypt = ablkcipher_givencrypt, 1872 .geniv = "<built-in>", 1873 .min_keysize = AES_MIN_KEY_SIZE, 1874 .max_keysize = AES_MAX_KEY_SIZE, 1875 .ivsize = AES_BLOCK_SIZE, 1876 }, 1877 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1878 }, 1879 { 1880 .name = "cbc(des3_ede)", 1881 .driver_name = "cbc-3des-caam", 1882 .blocksize = DES3_EDE_BLOCK_SIZE, 1883 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1884 .template_ablkcipher = { 1885 .setkey = ablkcipher_setkey, 1886 .encrypt = ablkcipher_encrypt, 1887 .decrypt = ablkcipher_decrypt, 1888 .givencrypt = ablkcipher_givencrypt, 1889 .geniv = "<built-in>", 1890 .min_keysize = DES3_EDE_KEY_SIZE, 1891 .max_keysize = DES3_EDE_KEY_SIZE, 1892 .ivsize = DES3_EDE_BLOCK_SIZE, 1893 }, 1894 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 1895 }, 1896 { 1897 .name = "cbc(des)", 1898 .driver_name = "cbc-des-caam", 1899 .blocksize = DES_BLOCK_SIZE, 1900 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1901 .template_ablkcipher = { 1902 .setkey = ablkcipher_setkey, 1903 .encrypt = ablkcipher_encrypt, 1904 .decrypt = ablkcipher_decrypt, 1905 .givencrypt = ablkcipher_givencrypt, 1906 .geniv = "<built-in>", 1907 .min_keysize = DES_KEY_SIZE, 1908 .max_keysize = DES_KEY_SIZE, 1909 .ivsize = DES_BLOCK_SIZE, 1910 }, 1911 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 1912 }, 1913 { 1914 .name = "ctr(aes)", 1915 .driver_name = "ctr-aes-caam", 1916 .blocksize = 1, 1917 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1918 .template_ablkcipher = { 1919 .setkey = ablkcipher_setkey, 1920 .encrypt = ablkcipher_encrypt, 1921 .decrypt = ablkcipher_decrypt, 1922 .geniv = "chainiv", 1923 .min_keysize = AES_MIN_KEY_SIZE, 1924 .max_keysize = AES_MAX_KEY_SIZE, 1925 .ivsize = AES_BLOCK_SIZE, 1926 }, 1927 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128, 1928 }, 1929 { 1930 .name = "rfc3686(ctr(aes))", 1931 .driver_name = "rfc3686-ctr-aes-caam", 1932 .blocksize = 1, 1933 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1934 .template_ablkcipher = { 1935 .setkey = ablkcipher_setkey, 1936 .encrypt = ablkcipher_encrypt, 1937 .decrypt = ablkcipher_decrypt, 1938 .givencrypt = ablkcipher_givencrypt, 1939 .geniv = "<built-in>", 1940 .min_keysize = AES_MIN_KEY_SIZE + 1941 CTR_RFC3686_NONCE_SIZE, 1942 .max_keysize = AES_MAX_KEY_SIZE + 1943 CTR_RFC3686_NONCE_SIZE, 1944 .ivsize = CTR_RFC3686_IV_SIZE, 1945 }, 1946 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128, 1947 }, 1948 { 1949 .name = "xts(aes)", 1950 .driver_name = "xts-aes-caam", 1951 .blocksize = AES_BLOCK_SIZE, 1952 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1953 .template_ablkcipher = { 1954 .setkey = xts_ablkcipher_setkey, 1955 .encrypt = ablkcipher_encrypt, 1956 .decrypt = ablkcipher_decrypt, 1957 .geniv = "eseqiv", 1958 .min_keysize = 2 * AES_MIN_KEY_SIZE, 1959 .max_keysize = 2 * AES_MAX_KEY_SIZE, 1960 .ivsize = AES_BLOCK_SIZE, 1961 }, 1962 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS, 1963 }, 1964 }; 1965 1966 static struct caam_aead_alg driver_aeads[] = { 1967 { 1968 .aead = { 1969 .base = { 1970 .cra_name = "rfc4106(gcm(aes))", 1971 .cra_driver_name = "rfc4106-gcm-aes-caam", 1972 .cra_blocksize = 1, 1973 }, 1974 .setkey = rfc4106_setkey, 1975 .setauthsize = rfc4106_setauthsize, 1976 .encrypt = ipsec_gcm_encrypt, 1977 .decrypt = ipsec_gcm_decrypt, 1978 .ivsize = 8, 1979 .maxauthsize = AES_BLOCK_SIZE, 1980 }, 1981 .caam = { 1982 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 1983 }, 1984 }, 1985 { 1986 .aead = { 1987 .base = { 1988 .cra_name = "rfc4543(gcm(aes))", 1989 .cra_driver_name = "rfc4543-gcm-aes-caam", 1990 .cra_blocksize = 1, 1991 }, 1992 .setkey = rfc4543_setkey, 1993 .setauthsize = rfc4543_setauthsize, 1994 .encrypt = ipsec_gcm_encrypt, 1995 .decrypt = ipsec_gcm_decrypt, 1996 .ivsize = 8, 1997 .maxauthsize = AES_BLOCK_SIZE, 1998 }, 1999 .caam = { 2000 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2001 }, 2002 }, 2003 /* Galois Counter Mode */ 2004 { 2005 .aead = { 2006 .base = { 2007 .cra_name = "gcm(aes)", 2008 .cra_driver_name = "gcm-aes-caam", 2009 .cra_blocksize = 1, 2010 }, 2011 .setkey = gcm_setkey, 2012 .setauthsize = gcm_setauthsize, 2013 .encrypt = gcm_encrypt, 2014 .decrypt = gcm_decrypt, 2015 .ivsize = 12, 2016 .maxauthsize = AES_BLOCK_SIZE, 2017 }, 2018 .caam = { 2019 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2020 }, 2021 }, 2022 /* single-pass ipsec_esp descriptor */ 2023 { 2024 .aead = { 2025 .base = { 2026 .cra_name = "authenc(hmac(md5)," 2027 "ecb(cipher_null))", 2028 .cra_driver_name = "authenc-hmac-md5-" 2029 "ecb-cipher_null-caam", 2030 .cra_blocksize = NULL_BLOCK_SIZE, 2031 }, 2032 .setkey = aead_setkey, 2033 .setauthsize = aead_setauthsize, 2034 .encrypt = aead_encrypt, 2035 .decrypt = aead_decrypt, 2036 .ivsize = NULL_IV_SIZE, 2037 .maxauthsize = MD5_DIGEST_SIZE, 2038 }, 2039 .caam = { 2040 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2041 OP_ALG_AAI_HMAC_PRECOMP, 2042 }, 2043 }, 2044 { 2045 .aead = { 2046 .base = { 2047 .cra_name = "authenc(hmac(sha1)," 2048 "ecb(cipher_null))", 2049 .cra_driver_name = "authenc-hmac-sha1-" 2050 "ecb-cipher_null-caam", 2051 .cra_blocksize = NULL_BLOCK_SIZE, 2052 }, 2053 .setkey = aead_setkey, 2054 .setauthsize = aead_setauthsize, 2055 .encrypt = aead_encrypt, 2056 .decrypt = aead_decrypt, 2057 .ivsize = NULL_IV_SIZE, 2058 .maxauthsize = SHA1_DIGEST_SIZE, 2059 }, 2060 .caam = { 2061 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2062 OP_ALG_AAI_HMAC_PRECOMP, 2063 }, 2064 }, 2065 { 2066 .aead = { 2067 .base = { 2068 .cra_name = "authenc(hmac(sha224)," 2069 "ecb(cipher_null))", 2070 .cra_driver_name = "authenc-hmac-sha224-" 2071 "ecb-cipher_null-caam", 2072 .cra_blocksize = NULL_BLOCK_SIZE, 2073 }, 2074 .setkey = aead_setkey, 2075 .setauthsize = aead_setauthsize, 2076 .encrypt = aead_encrypt, 2077 .decrypt = aead_decrypt, 2078 .ivsize = NULL_IV_SIZE, 2079 .maxauthsize = SHA224_DIGEST_SIZE, 2080 }, 2081 .caam = { 2082 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2083 OP_ALG_AAI_HMAC_PRECOMP, 2084 }, 2085 }, 2086 { 2087 .aead = { 2088 .base = { 2089 .cra_name = "authenc(hmac(sha256)," 2090 "ecb(cipher_null))", 2091 .cra_driver_name = "authenc-hmac-sha256-" 2092 "ecb-cipher_null-caam", 2093 .cra_blocksize = NULL_BLOCK_SIZE, 2094 }, 2095 .setkey = aead_setkey, 2096 .setauthsize = aead_setauthsize, 2097 .encrypt = aead_encrypt, 2098 .decrypt = aead_decrypt, 2099 .ivsize = NULL_IV_SIZE, 2100 .maxauthsize = SHA256_DIGEST_SIZE, 2101 }, 2102 .caam = { 2103 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2104 OP_ALG_AAI_HMAC_PRECOMP, 2105 }, 2106 }, 2107 { 2108 .aead = { 2109 .base = { 2110 .cra_name = "authenc(hmac(sha384)," 2111 "ecb(cipher_null))", 2112 .cra_driver_name = "authenc-hmac-sha384-" 2113 "ecb-cipher_null-caam", 2114 .cra_blocksize = NULL_BLOCK_SIZE, 2115 }, 2116 .setkey = aead_setkey, 2117 .setauthsize = aead_setauthsize, 2118 .encrypt = aead_encrypt, 2119 .decrypt = aead_decrypt, 2120 .ivsize = NULL_IV_SIZE, 2121 .maxauthsize = SHA384_DIGEST_SIZE, 2122 }, 2123 .caam = { 2124 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2125 OP_ALG_AAI_HMAC_PRECOMP, 2126 }, 2127 }, 2128 { 2129 .aead = { 2130 .base = { 2131 .cra_name = "authenc(hmac(sha512)," 2132 "ecb(cipher_null))", 2133 .cra_driver_name = "authenc-hmac-sha512-" 2134 "ecb-cipher_null-caam", 2135 .cra_blocksize = NULL_BLOCK_SIZE, 2136 }, 2137 .setkey = aead_setkey, 2138 .setauthsize = aead_setauthsize, 2139 .encrypt = aead_encrypt, 2140 .decrypt = aead_decrypt, 2141 .ivsize = NULL_IV_SIZE, 2142 .maxauthsize = SHA512_DIGEST_SIZE, 2143 }, 2144 .caam = { 2145 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2146 OP_ALG_AAI_HMAC_PRECOMP, 2147 }, 2148 }, 2149 { 2150 .aead = { 2151 .base = { 2152 .cra_name = "authenc(hmac(md5),cbc(aes))", 2153 .cra_driver_name = "authenc-hmac-md5-" 2154 "cbc-aes-caam", 2155 .cra_blocksize = AES_BLOCK_SIZE, 2156 }, 2157 .setkey = aead_setkey, 2158 .setauthsize = aead_setauthsize, 2159 .encrypt = aead_encrypt, 2160 .decrypt = aead_decrypt, 2161 .ivsize = AES_BLOCK_SIZE, 2162 .maxauthsize = MD5_DIGEST_SIZE, 2163 }, 2164 .caam = { 2165 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2166 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2167 OP_ALG_AAI_HMAC_PRECOMP, 2168 }, 2169 }, 2170 { 2171 .aead = { 2172 .base = { 2173 .cra_name = "echainiv(authenc(hmac(md5)," 2174 "cbc(aes)))", 2175 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2176 "cbc-aes-caam", 2177 .cra_blocksize = AES_BLOCK_SIZE, 2178 }, 2179 .setkey = aead_setkey, 2180 .setauthsize = aead_setauthsize, 2181 .encrypt = aead_encrypt, 2182 .decrypt = aead_decrypt, 2183 .ivsize = AES_BLOCK_SIZE, 2184 .maxauthsize = MD5_DIGEST_SIZE, 2185 }, 2186 .caam = { 2187 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2188 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2189 OP_ALG_AAI_HMAC_PRECOMP, 2190 .geniv = true, 2191 }, 2192 }, 2193 { 2194 .aead = { 2195 .base = { 2196 .cra_name = "authenc(hmac(sha1),cbc(aes))", 2197 .cra_driver_name = "authenc-hmac-sha1-" 2198 "cbc-aes-caam", 2199 .cra_blocksize = AES_BLOCK_SIZE, 2200 }, 2201 .setkey = aead_setkey, 2202 .setauthsize = aead_setauthsize, 2203 .encrypt = aead_encrypt, 2204 .decrypt = aead_decrypt, 2205 .ivsize = AES_BLOCK_SIZE, 2206 .maxauthsize = SHA1_DIGEST_SIZE, 2207 }, 2208 .caam = { 2209 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2210 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2211 OP_ALG_AAI_HMAC_PRECOMP, 2212 }, 2213 }, 2214 { 2215 .aead = { 2216 .base = { 2217 .cra_name = "echainiv(authenc(hmac(sha1)," 2218 "cbc(aes)))", 2219 .cra_driver_name = "echainiv-authenc-" 2220 "hmac-sha1-cbc-aes-caam", 2221 .cra_blocksize = AES_BLOCK_SIZE, 2222 }, 2223 .setkey = aead_setkey, 2224 .setauthsize = aead_setauthsize, 2225 .encrypt = aead_encrypt, 2226 .decrypt = aead_decrypt, 2227 .ivsize = AES_BLOCK_SIZE, 2228 .maxauthsize = SHA1_DIGEST_SIZE, 2229 }, 2230 .caam = { 2231 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2232 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2233 OP_ALG_AAI_HMAC_PRECOMP, 2234 .geniv = true, 2235 }, 2236 }, 2237 { 2238 .aead = { 2239 .base = { 2240 .cra_name = "authenc(hmac(sha224),cbc(aes))", 2241 .cra_driver_name = "authenc-hmac-sha224-" 2242 "cbc-aes-caam", 2243 .cra_blocksize = AES_BLOCK_SIZE, 2244 }, 2245 .setkey = aead_setkey, 2246 .setauthsize = aead_setauthsize, 2247 .encrypt = aead_encrypt, 2248 .decrypt = aead_decrypt, 2249 .ivsize = AES_BLOCK_SIZE, 2250 .maxauthsize = SHA224_DIGEST_SIZE, 2251 }, 2252 .caam = { 2253 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2254 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2255 OP_ALG_AAI_HMAC_PRECOMP, 2256 }, 2257 }, 2258 { 2259 .aead = { 2260 .base = { 2261 .cra_name = "echainiv(authenc(hmac(sha224)," 2262 "cbc(aes)))", 2263 .cra_driver_name = "echainiv-authenc-" 2264 "hmac-sha224-cbc-aes-caam", 2265 .cra_blocksize = AES_BLOCK_SIZE, 2266 }, 2267 .setkey = aead_setkey, 2268 .setauthsize = aead_setauthsize, 2269 .encrypt = aead_encrypt, 2270 .decrypt = aead_decrypt, 2271 .ivsize = AES_BLOCK_SIZE, 2272 .maxauthsize = SHA224_DIGEST_SIZE, 2273 }, 2274 .caam = { 2275 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2276 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2277 OP_ALG_AAI_HMAC_PRECOMP, 2278 .geniv = true, 2279 }, 2280 }, 2281 { 2282 .aead = { 2283 .base = { 2284 .cra_name = "authenc(hmac(sha256),cbc(aes))", 2285 .cra_driver_name = "authenc-hmac-sha256-" 2286 "cbc-aes-caam", 2287 .cra_blocksize = AES_BLOCK_SIZE, 2288 }, 2289 .setkey = aead_setkey, 2290 .setauthsize = aead_setauthsize, 2291 .encrypt = aead_encrypt, 2292 .decrypt = aead_decrypt, 2293 .ivsize = AES_BLOCK_SIZE, 2294 .maxauthsize = SHA256_DIGEST_SIZE, 2295 }, 2296 .caam = { 2297 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2298 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2299 OP_ALG_AAI_HMAC_PRECOMP, 2300 }, 2301 }, 2302 { 2303 .aead = { 2304 .base = { 2305 .cra_name = "echainiv(authenc(hmac(sha256)," 2306 "cbc(aes)))", 2307 .cra_driver_name = "echainiv-authenc-" 2308 "hmac-sha256-cbc-aes-caam", 2309 .cra_blocksize = AES_BLOCK_SIZE, 2310 }, 2311 .setkey = aead_setkey, 2312 .setauthsize = aead_setauthsize, 2313 .encrypt = aead_encrypt, 2314 .decrypt = aead_decrypt, 2315 .ivsize = AES_BLOCK_SIZE, 2316 .maxauthsize = SHA256_DIGEST_SIZE, 2317 }, 2318 .caam = { 2319 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2320 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2321 OP_ALG_AAI_HMAC_PRECOMP, 2322 .geniv = true, 2323 }, 2324 }, 2325 { 2326 .aead = { 2327 .base = { 2328 .cra_name = "authenc(hmac(sha384),cbc(aes))", 2329 .cra_driver_name = "authenc-hmac-sha384-" 2330 "cbc-aes-caam", 2331 .cra_blocksize = AES_BLOCK_SIZE, 2332 }, 2333 .setkey = aead_setkey, 2334 .setauthsize = aead_setauthsize, 2335 .encrypt = aead_encrypt, 2336 .decrypt = aead_decrypt, 2337 .ivsize = AES_BLOCK_SIZE, 2338 .maxauthsize = SHA384_DIGEST_SIZE, 2339 }, 2340 .caam = { 2341 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2342 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2343 OP_ALG_AAI_HMAC_PRECOMP, 2344 }, 2345 }, 2346 { 2347 .aead = { 2348 .base = { 2349 .cra_name = "echainiv(authenc(hmac(sha384)," 2350 "cbc(aes)))", 2351 .cra_driver_name = "echainiv-authenc-" 2352 "hmac-sha384-cbc-aes-caam", 2353 .cra_blocksize = AES_BLOCK_SIZE, 2354 }, 2355 .setkey = aead_setkey, 2356 .setauthsize = aead_setauthsize, 2357 .encrypt = aead_encrypt, 2358 .decrypt = aead_decrypt, 2359 .ivsize = AES_BLOCK_SIZE, 2360 .maxauthsize = SHA384_DIGEST_SIZE, 2361 }, 2362 .caam = { 2363 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2364 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2365 OP_ALG_AAI_HMAC_PRECOMP, 2366 .geniv = true, 2367 }, 2368 }, 2369 { 2370 .aead = { 2371 .base = { 2372 .cra_name = "authenc(hmac(sha512),cbc(aes))", 2373 .cra_driver_name = "authenc-hmac-sha512-" 2374 "cbc-aes-caam", 2375 .cra_blocksize = AES_BLOCK_SIZE, 2376 }, 2377 .setkey = aead_setkey, 2378 .setauthsize = aead_setauthsize, 2379 .encrypt = aead_encrypt, 2380 .decrypt = aead_decrypt, 2381 .ivsize = AES_BLOCK_SIZE, 2382 .maxauthsize = SHA512_DIGEST_SIZE, 2383 }, 2384 .caam = { 2385 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2386 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2387 OP_ALG_AAI_HMAC_PRECOMP, 2388 }, 2389 }, 2390 { 2391 .aead = { 2392 .base = { 2393 .cra_name = "echainiv(authenc(hmac(sha512)," 2394 "cbc(aes)))", 2395 .cra_driver_name = "echainiv-authenc-" 2396 "hmac-sha512-cbc-aes-caam", 2397 .cra_blocksize = AES_BLOCK_SIZE, 2398 }, 2399 .setkey = aead_setkey, 2400 .setauthsize = aead_setauthsize, 2401 .encrypt = aead_encrypt, 2402 .decrypt = aead_decrypt, 2403 .ivsize = AES_BLOCK_SIZE, 2404 .maxauthsize = SHA512_DIGEST_SIZE, 2405 }, 2406 .caam = { 2407 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2408 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2409 OP_ALG_AAI_HMAC_PRECOMP, 2410 .geniv = true, 2411 }, 2412 }, 2413 { 2414 .aead = { 2415 .base = { 2416 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 2417 .cra_driver_name = "authenc-hmac-md5-" 2418 "cbc-des3_ede-caam", 2419 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2420 }, 2421 .setkey = aead_setkey, 2422 .setauthsize = aead_setauthsize, 2423 .encrypt = aead_encrypt, 2424 .decrypt = aead_decrypt, 2425 .ivsize = DES3_EDE_BLOCK_SIZE, 2426 .maxauthsize = MD5_DIGEST_SIZE, 2427 }, 2428 .caam = { 2429 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2430 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2431 OP_ALG_AAI_HMAC_PRECOMP, 2432 } 2433 }, 2434 { 2435 .aead = { 2436 .base = { 2437 .cra_name = "echainiv(authenc(hmac(md5)," 2438 "cbc(des3_ede)))", 2439 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2440 "cbc-des3_ede-caam", 2441 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2442 }, 2443 .setkey = aead_setkey, 2444 .setauthsize = aead_setauthsize, 2445 .encrypt = aead_encrypt, 2446 .decrypt = aead_decrypt, 2447 .ivsize = DES3_EDE_BLOCK_SIZE, 2448 .maxauthsize = MD5_DIGEST_SIZE, 2449 }, 2450 .caam = { 2451 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2452 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2453 OP_ALG_AAI_HMAC_PRECOMP, 2454 .geniv = true, 2455 } 2456 }, 2457 { 2458 .aead = { 2459 .base = { 2460 .cra_name = "authenc(hmac(sha1)," 2461 "cbc(des3_ede))", 2462 .cra_driver_name = "authenc-hmac-sha1-" 2463 "cbc-des3_ede-caam", 2464 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2465 }, 2466 .setkey = aead_setkey, 2467 .setauthsize = aead_setauthsize, 2468 .encrypt = aead_encrypt, 2469 .decrypt = aead_decrypt, 2470 .ivsize = DES3_EDE_BLOCK_SIZE, 2471 .maxauthsize = SHA1_DIGEST_SIZE, 2472 }, 2473 .caam = { 2474 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2475 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2476 OP_ALG_AAI_HMAC_PRECOMP, 2477 }, 2478 }, 2479 { 2480 .aead = { 2481 .base = { 2482 .cra_name = "echainiv(authenc(hmac(sha1)," 2483 "cbc(des3_ede)))", 2484 .cra_driver_name = "echainiv-authenc-" 2485 "hmac-sha1-" 2486 "cbc-des3_ede-caam", 2487 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2488 }, 2489 .setkey = aead_setkey, 2490 .setauthsize = aead_setauthsize, 2491 .encrypt = aead_encrypt, 2492 .decrypt = aead_decrypt, 2493 .ivsize = DES3_EDE_BLOCK_SIZE, 2494 .maxauthsize = SHA1_DIGEST_SIZE, 2495 }, 2496 .caam = { 2497 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2498 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2499 OP_ALG_AAI_HMAC_PRECOMP, 2500 .geniv = true, 2501 }, 2502 }, 2503 { 2504 .aead = { 2505 .base = { 2506 .cra_name = "authenc(hmac(sha224)," 2507 "cbc(des3_ede))", 2508 .cra_driver_name = "authenc-hmac-sha224-" 2509 "cbc-des3_ede-caam", 2510 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2511 }, 2512 .setkey = aead_setkey, 2513 .setauthsize = aead_setauthsize, 2514 .encrypt = aead_encrypt, 2515 .decrypt = aead_decrypt, 2516 .ivsize = DES3_EDE_BLOCK_SIZE, 2517 .maxauthsize = SHA224_DIGEST_SIZE, 2518 }, 2519 .caam = { 2520 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2521 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2522 OP_ALG_AAI_HMAC_PRECOMP, 2523 }, 2524 }, 2525 { 2526 .aead = { 2527 .base = { 2528 .cra_name = "echainiv(authenc(hmac(sha224)," 2529 "cbc(des3_ede)))", 2530 .cra_driver_name = "echainiv-authenc-" 2531 "hmac-sha224-" 2532 "cbc-des3_ede-caam", 2533 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2534 }, 2535 .setkey = aead_setkey, 2536 .setauthsize = aead_setauthsize, 2537 .encrypt = aead_encrypt, 2538 .decrypt = aead_decrypt, 2539 .ivsize = DES3_EDE_BLOCK_SIZE, 2540 .maxauthsize = SHA224_DIGEST_SIZE, 2541 }, 2542 .caam = { 2543 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2544 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2545 OP_ALG_AAI_HMAC_PRECOMP, 2546 .geniv = true, 2547 }, 2548 }, 2549 { 2550 .aead = { 2551 .base = { 2552 .cra_name = "authenc(hmac(sha256)," 2553 "cbc(des3_ede))", 2554 .cra_driver_name = "authenc-hmac-sha256-" 2555 "cbc-des3_ede-caam", 2556 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2557 }, 2558 .setkey = aead_setkey, 2559 .setauthsize = aead_setauthsize, 2560 .encrypt = aead_encrypt, 2561 .decrypt = aead_decrypt, 2562 .ivsize = DES3_EDE_BLOCK_SIZE, 2563 .maxauthsize = SHA256_DIGEST_SIZE, 2564 }, 2565 .caam = { 2566 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2567 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2568 OP_ALG_AAI_HMAC_PRECOMP, 2569 }, 2570 }, 2571 { 2572 .aead = { 2573 .base = { 2574 .cra_name = "echainiv(authenc(hmac(sha256)," 2575 "cbc(des3_ede)))", 2576 .cra_driver_name = "echainiv-authenc-" 2577 "hmac-sha256-" 2578 "cbc-des3_ede-caam", 2579 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2580 }, 2581 .setkey = aead_setkey, 2582 .setauthsize = aead_setauthsize, 2583 .encrypt = aead_encrypt, 2584 .decrypt = aead_decrypt, 2585 .ivsize = DES3_EDE_BLOCK_SIZE, 2586 .maxauthsize = SHA256_DIGEST_SIZE, 2587 }, 2588 .caam = { 2589 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2590 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2591 OP_ALG_AAI_HMAC_PRECOMP, 2592 .geniv = true, 2593 }, 2594 }, 2595 { 2596 .aead = { 2597 .base = { 2598 .cra_name = "authenc(hmac(sha384)," 2599 "cbc(des3_ede))", 2600 .cra_driver_name = "authenc-hmac-sha384-" 2601 "cbc-des3_ede-caam", 2602 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2603 }, 2604 .setkey = aead_setkey, 2605 .setauthsize = aead_setauthsize, 2606 .encrypt = aead_encrypt, 2607 .decrypt = aead_decrypt, 2608 .ivsize = DES3_EDE_BLOCK_SIZE, 2609 .maxauthsize = SHA384_DIGEST_SIZE, 2610 }, 2611 .caam = { 2612 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2613 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2614 OP_ALG_AAI_HMAC_PRECOMP, 2615 }, 2616 }, 2617 { 2618 .aead = { 2619 .base = { 2620 .cra_name = "echainiv(authenc(hmac(sha384)," 2621 "cbc(des3_ede)))", 2622 .cra_driver_name = "echainiv-authenc-" 2623 "hmac-sha384-" 2624 "cbc-des3_ede-caam", 2625 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2626 }, 2627 .setkey = aead_setkey, 2628 .setauthsize = aead_setauthsize, 2629 .encrypt = aead_encrypt, 2630 .decrypt = aead_decrypt, 2631 .ivsize = DES3_EDE_BLOCK_SIZE, 2632 .maxauthsize = SHA384_DIGEST_SIZE, 2633 }, 2634 .caam = { 2635 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2636 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2637 OP_ALG_AAI_HMAC_PRECOMP, 2638 .geniv = true, 2639 }, 2640 }, 2641 { 2642 .aead = { 2643 .base = { 2644 .cra_name = "authenc(hmac(sha512)," 2645 "cbc(des3_ede))", 2646 .cra_driver_name = "authenc-hmac-sha512-" 2647 "cbc-des3_ede-caam", 2648 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2649 }, 2650 .setkey = aead_setkey, 2651 .setauthsize = aead_setauthsize, 2652 .encrypt = aead_encrypt, 2653 .decrypt = aead_decrypt, 2654 .ivsize = DES3_EDE_BLOCK_SIZE, 2655 .maxauthsize = SHA512_DIGEST_SIZE, 2656 }, 2657 .caam = { 2658 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2659 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2660 OP_ALG_AAI_HMAC_PRECOMP, 2661 }, 2662 }, 2663 { 2664 .aead = { 2665 .base = { 2666 .cra_name = "echainiv(authenc(hmac(sha512)," 2667 "cbc(des3_ede)))", 2668 .cra_driver_name = "echainiv-authenc-" 2669 "hmac-sha512-" 2670 "cbc-des3_ede-caam", 2671 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2672 }, 2673 .setkey = aead_setkey, 2674 .setauthsize = aead_setauthsize, 2675 .encrypt = aead_encrypt, 2676 .decrypt = aead_decrypt, 2677 .ivsize = DES3_EDE_BLOCK_SIZE, 2678 .maxauthsize = SHA512_DIGEST_SIZE, 2679 }, 2680 .caam = { 2681 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2682 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2683 OP_ALG_AAI_HMAC_PRECOMP, 2684 .geniv = true, 2685 }, 2686 }, 2687 { 2688 .aead = { 2689 .base = { 2690 .cra_name = "authenc(hmac(md5),cbc(des))", 2691 .cra_driver_name = "authenc-hmac-md5-" 2692 "cbc-des-caam", 2693 .cra_blocksize = DES_BLOCK_SIZE, 2694 }, 2695 .setkey = aead_setkey, 2696 .setauthsize = aead_setauthsize, 2697 .encrypt = aead_encrypt, 2698 .decrypt = aead_decrypt, 2699 .ivsize = DES_BLOCK_SIZE, 2700 .maxauthsize = MD5_DIGEST_SIZE, 2701 }, 2702 .caam = { 2703 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2704 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2705 OP_ALG_AAI_HMAC_PRECOMP, 2706 }, 2707 }, 2708 { 2709 .aead = { 2710 .base = { 2711 .cra_name = "echainiv(authenc(hmac(md5)," 2712 "cbc(des)))", 2713 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2714 "cbc-des-caam", 2715 .cra_blocksize = DES_BLOCK_SIZE, 2716 }, 2717 .setkey = aead_setkey, 2718 .setauthsize = aead_setauthsize, 2719 .encrypt = aead_encrypt, 2720 .decrypt = aead_decrypt, 2721 .ivsize = DES_BLOCK_SIZE, 2722 .maxauthsize = MD5_DIGEST_SIZE, 2723 }, 2724 .caam = { 2725 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2726 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2727 OP_ALG_AAI_HMAC_PRECOMP, 2728 .geniv = true, 2729 }, 2730 }, 2731 { 2732 .aead = { 2733 .base = { 2734 .cra_name = "authenc(hmac(sha1),cbc(des))", 2735 .cra_driver_name = "authenc-hmac-sha1-" 2736 "cbc-des-caam", 2737 .cra_blocksize = DES_BLOCK_SIZE, 2738 }, 2739 .setkey = aead_setkey, 2740 .setauthsize = aead_setauthsize, 2741 .encrypt = aead_encrypt, 2742 .decrypt = aead_decrypt, 2743 .ivsize = DES_BLOCK_SIZE, 2744 .maxauthsize = SHA1_DIGEST_SIZE, 2745 }, 2746 .caam = { 2747 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2748 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2749 OP_ALG_AAI_HMAC_PRECOMP, 2750 }, 2751 }, 2752 { 2753 .aead = { 2754 .base = { 2755 .cra_name = "echainiv(authenc(hmac(sha1)," 2756 "cbc(des)))", 2757 .cra_driver_name = "echainiv-authenc-" 2758 "hmac-sha1-cbc-des-caam", 2759 .cra_blocksize = DES_BLOCK_SIZE, 2760 }, 2761 .setkey = aead_setkey, 2762 .setauthsize = aead_setauthsize, 2763 .encrypt = aead_encrypt, 2764 .decrypt = aead_decrypt, 2765 .ivsize = DES_BLOCK_SIZE, 2766 .maxauthsize = SHA1_DIGEST_SIZE, 2767 }, 2768 .caam = { 2769 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2770 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2771 OP_ALG_AAI_HMAC_PRECOMP, 2772 .geniv = true, 2773 }, 2774 }, 2775 { 2776 .aead = { 2777 .base = { 2778 .cra_name = "authenc(hmac(sha224),cbc(des))", 2779 .cra_driver_name = "authenc-hmac-sha224-" 2780 "cbc-des-caam", 2781 .cra_blocksize = DES_BLOCK_SIZE, 2782 }, 2783 .setkey = aead_setkey, 2784 .setauthsize = aead_setauthsize, 2785 .encrypt = aead_encrypt, 2786 .decrypt = aead_decrypt, 2787 .ivsize = DES_BLOCK_SIZE, 2788 .maxauthsize = SHA224_DIGEST_SIZE, 2789 }, 2790 .caam = { 2791 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2792 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2793 OP_ALG_AAI_HMAC_PRECOMP, 2794 }, 2795 }, 2796 { 2797 .aead = { 2798 .base = { 2799 .cra_name = "echainiv(authenc(hmac(sha224)," 2800 "cbc(des)))", 2801 .cra_driver_name = "echainiv-authenc-" 2802 "hmac-sha224-cbc-des-caam", 2803 .cra_blocksize = DES_BLOCK_SIZE, 2804 }, 2805 .setkey = aead_setkey, 2806 .setauthsize = aead_setauthsize, 2807 .encrypt = aead_encrypt, 2808 .decrypt = aead_decrypt, 2809 .ivsize = DES_BLOCK_SIZE, 2810 .maxauthsize = SHA224_DIGEST_SIZE, 2811 }, 2812 .caam = { 2813 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2814 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2815 OP_ALG_AAI_HMAC_PRECOMP, 2816 .geniv = true, 2817 }, 2818 }, 2819 { 2820 .aead = { 2821 .base = { 2822 .cra_name = "authenc(hmac(sha256),cbc(des))", 2823 .cra_driver_name = "authenc-hmac-sha256-" 2824 "cbc-des-caam", 2825 .cra_blocksize = DES_BLOCK_SIZE, 2826 }, 2827 .setkey = aead_setkey, 2828 .setauthsize = aead_setauthsize, 2829 .encrypt = aead_encrypt, 2830 .decrypt = aead_decrypt, 2831 .ivsize = DES_BLOCK_SIZE, 2832 .maxauthsize = SHA256_DIGEST_SIZE, 2833 }, 2834 .caam = { 2835 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2836 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2837 OP_ALG_AAI_HMAC_PRECOMP, 2838 }, 2839 }, 2840 { 2841 .aead = { 2842 .base = { 2843 .cra_name = "echainiv(authenc(hmac(sha256)," 2844 "cbc(des)))", 2845 .cra_driver_name = "echainiv-authenc-" 2846 "hmac-sha256-cbc-des-caam", 2847 .cra_blocksize = DES_BLOCK_SIZE, 2848 }, 2849 .setkey = aead_setkey, 2850 .setauthsize = aead_setauthsize, 2851 .encrypt = aead_encrypt, 2852 .decrypt = aead_decrypt, 2853 .ivsize = DES_BLOCK_SIZE, 2854 .maxauthsize = SHA256_DIGEST_SIZE, 2855 }, 2856 .caam = { 2857 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2858 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2859 OP_ALG_AAI_HMAC_PRECOMP, 2860 .geniv = true, 2861 }, 2862 }, 2863 { 2864 .aead = { 2865 .base = { 2866 .cra_name = "authenc(hmac(sha384),cbc(des))", 2867 .cra_driver_name = "authenc-hmac-sha384-" 2868 "cbc-des-caam", 2869 .cra_blocksize = DES_BLOCK_SIZE, 2870 }, 2871 .setkey = aead_setkey, 2872 .setauthsize = aead_setauthsize, 2873 .encrypt = aead_encrypt, 2874 .decrypt = aead_decrypt, 2875 .ivsize = DES_BLOCK_SIZE, 2876 .maxauthsize = SHA384_DIGEST_SIZE, 2877 }, 2878 .caam = { 2879 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2880 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2881 OP_ALG_AAI_HMAC_PRECOMP, 2882 }, 2883 }, 2884 { 2885 .aead = { 2886 .base = { 2887 .cra_name = "echainiv(authenc(hmac(sha384)," 2888 "cbc(des)))", 2889 .cra_driver_name = "echainiv-authenc-" 2890 "hmac-sha384-cbc-des-caam", 2891 .cra_blocksize = DES_BLOCK_SIZE, 2892 }, 2893 .setkey = aead_setkey, 2894 .setauthsize = aead_setauthsize, 2895 .encrypt = aead_encrypt, 2896 .decrypt = aead_decrypt, 2897 .ivsize = DES_BLOCK_SIZE, 2898 .maxauthsize = SHA384_DIGEST_SIZE, 2899 }, 2900 .caam = { 2901 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2902 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2903 OP_ALG_AAI_HMAC_PRECOMP, 2904 .geniv = true, 2905 }, 2906 }, 2907 { 2908 .aead = { 2909 .base = { 2910 .cra_name = "authenc(hmac(sha512),cbc(des))", 2911 .cra_driver_name = "authenc-hmac-sha512-" 2912 "cbc-des-caam", 2913 .cra_blocksize = DES_BLOCK_SIZE, 2914 }, 2915 .setkey = aead_setkey, 2916 .setauthsize = aead_setauthsize, 2917 .encrypt = aead_encrypt, 2918 .decrypt = aead_decrypt, 2919 .ivsize = DES_BLOCK_SIZE, 2920 .maxauthsize = SHA512_DIGEST_SIZE, 2921 }, 2922 .caam = { 2923 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2924 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2925 OP_ALG_AAI_HMAC_PRECOMP, 2926 }, 2927 }, 2928 { 2929 .aead = { 2930 .base = { 2931 .cra_name = "echainiv(authenc(hmac(sha512)," 2932 "cbc(des)))", 2933 .cra_driver_name = "echainiv-authenc-" 2934 "hmac-sha512-cbc-des-caam", 2935 .cra_blocksize = DES_BLOCK_SIZE, 2936 }, 2937 .setkey = aead_setkey, 2938 .setauthsize = aead_setauthsize, 2939 .encrypt = aead_encrypt, 2940 .decrypt = aead_decrypt, 2941 .ivsize = DES_BLOCK_SIZE, 2942 .maxauthsize = SHA512_DIGEST_SIZE, 2943 }, 2944 .caam = { 2945 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2946 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2947 OP_ALG_AAI_HMAC_PRECOMP, 2948 .geniv = true, 2949 }, 2950 }, 2951 { 2952 .aead = { 2953 .base = { 2954 .cra_name = "authenc(hmac(md5)," 2955 "rfc3686(ctr(aes)))", 2956 .cra_driver_name = "authenc-hmac-md5-" 2957 "rfc3686-ctr-aes-caam", 2958 .cra_blocksize = 1, 2959 }, 2960 .setkey = aead_setkey, 2961 .setauthsize = aead_setauthsize, 2962 .encrypt = aead_encrypt, 2963 .decrypt = aead_decrypt, 2964 .ivsize = CTR_RFC3686_IV_SIZE, 2965 .maxauthsize = MD5_DIGEST_SIZE, 2966 }, 2967 .caam = { 2968 .class1_alg_type = OP_ALG_ALGSEL_AES | 2969 OP_ALG_AAI_CTR_MOD128, 2970 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2971 OP_ALG_AAI_HMAC_PRECOMP, 2972 .rfc3686 = true, 2973 }, 2974 }, 2975 { 2976 .aead = { 2977 .base = { 2978 .cra_name = "seqiv(authenc(" 2979 "hmac(md5),rfc3686(ctr(aes))))", 2980 .cra_driver_name = "seqiv-authenc-hmac-md5-" 2981 "rfc3686-ctr-aes-caam", 2982 .cra_blocksize = 1, 2983 }, 2984 .setkey = aead_setkey, 2985 .setauthsize = aead_setauthsize, 2986 .encrypt = aead_encrypt, 2987 .decrypt = aead_decrypt, 2988 .ivsize = CTR_RFC3686_IV_SIZE, 2989 .maxauthsize = MD5_DIGEST_SIZE, 2990 }, 2991 .caam = { 2992 .class1_alg_type = OP_ALG_ALGSEL_AES | 2993 OP_ALG_AAI_CTR_MOD128, 2994 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2995 OP_ALG_AAI_HMAC_PRECOMP, 2996 .rfc3686 = true, 2997 .geniv = true, 2998 }, 2999 }, 3000 { 3001 .aead = { 3002 .base = { 3003 .cra_name = "authenc(hmac(sha1)," 3004 "rfc3686(ctr(aes)))", 3005 .cra_driver_name = "authenc-hmac-sha1-" 3006 "rfc3686-ctr-aes-caam", 3007 .cra_blocksize = 1, 3008 }, 3009 .setkey = aead_setkey, 3010 .setauthsize = aead_setauthsize, 3011 .encrypt = aead_encrypt, 3012 .decrypt = aead_decrypt, 3013 .ivsize = CTR_RFC3686_IV_SIZE, 3014 .maxauthsize = SHA1_DIGEST_SIZE, 3015 }, 3016 .caam = { 3017 .class1_alg_type = OP_ALG_ALGSEL_AES | 3018 OP_ALG_AAI_CTR_MOD128, 3019 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3020 OP_ALG_AAI_HMAC_PRECOMP, 3021 .rfc3686 = true, 3022 }, 3023 }, 3024 { 3025 .aead = { 3026 .base = { 3027 .cra_name = "seqiv(authenc(" 3028 "hmac(sha1),rfc3686(ctr(aes))))", 3029 .cra_driver_name = "seqiv-authenc-hmac-sha1-" 3030 "rfc3686-ctr-aes-caam", 3031 .cra_blocksize = 1, 3032 }, 3033 .setkey = aead_setkey, 3034 .setauthsize = aead_setauthsize, 3035 .encrypt = aead_encrypt, 3036 .decrypt = aead_decrypt, 3037 .ivsize = CTR_RFC3686_IV_SIZE, 3038 .maxauthsize = SHA1_DIGEST_SIZE, 3039 }, 3040 .caam = { 3041 .class1_alg_type = OP_ALG_ALGSEL_AES | 3042 OP_ALG_AAI_CTR_MOD128, 3043 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3044 OP_ALG_AAI_HMAC_PRECOMP, 3045 .rfc3686 = true, 3046 .geniv = true, 3047 }, 3048 }, 3049 { 3050 .aead = { 3051 .base = { 3052 .cra_name = "authenc(hmac(sha224)," 3053 "rfc3686(ctr(aes)))", 3054 .cra_driver_name = "authenc-hmac-sha224-" 3055 "rfc3686-ctr-aes-caam", 3056 .cra_blocksize = 1, 3057 }, 3058 .setkey = aead_setkey, 3059 .setauthsize = aead_setauthsize, 3060 .encrypt = aead_encrypt, 3061 .decrypt = aead_decrypt, 3062 .ivsize = CTR_RFC3686_IV_SIZE, 3063 .maxauthsize = SHA224_DIGEST_SIZE, 3064 }, 3065 .caam = { 3066 .class1_alg_type = OP_ALG_ALGSEL_AES | 3067 OP_ALG_AAI_CTR_MOD128, 3068 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3069 OP_ALG_AAI_HMAC_PRECOMP, 3070 .rfc3686 = true, 3071 }, 3072 }, 3073 { 3074 .aead = { 3075 .base = { 3076 .cra_name = "seqiv(authenc(" 3077 "hmac(sha224),rfc3686(ctr(aes))))", 3078 .cra_driver_name = "seqiv-authenc-hmac-sha224-" 3079 "rfc3686-ctr-aes-caam", 3080 .cra_blocksize = 1, 3081 }, 3082 .setkey = aead_setkey, 3083 .setauthsize = aead_setauthsize, 3084 .encrypt = aead_encrypt, 3085 .decrypt = aead_decrypt, 3086 .ivsize = CTR_RFC3686_IV_SIZE, 3087 .maxauthsize = SHA224_DIGEST_SIZE, 3088 }, 3089 .caam = { 3090 .class1_alg_type = OP_ALG_ALGSEL_AES | 3091 OP_ALG_AAI_CTR_MOD128, 3092 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3093 OP_ALG_AAI_HMAC_PRECOMP, 3094 .rfc3686 = true, 3095 .geniv = true, 3096 }, 3097 }, 3098 { 3099 .aead = { 3100 .base = { 3101 .cra_name = "authenc(hmac(sha256)," 3102 "rfc3686(ctr(aes)))", 3103 .cra_driver_name = "authenc-hmac-sha256-" 3104 "rfc3686-ctr-aes-caam", 3105 .cra_blocksize = 1, 3106 }, 3107 .setkey = aead_setkey, 3108 .setauthsize = aead_setauthsize, 3109 .encrypt = aead_encrypt, 3110 .decrypt = aead_decrypt, 3111 .ivsize = CTR_RFC3686_IV_SIZE, 3112 .maxauthsize = SHA256_DIGEST_SIZE, 3113 }, 3114 .caam = { 3115 .class1_alg_type = OP_ALG_ALGSEL_AES | 3116 OP_ALG_AAI_CTR_MOD128, 3117 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3118 OP_ALG_AAI_HMAC_PRECOMP, 3119 .rfc3686 = true, 3120 }, 3121 }, 3122 { 3123 .aead = { 3124 .base = { 3125 .cra_name = "seqiv(authenc(hmac(sha256)," 3126 "rfc3686(ctr(aes))))", 3127 .cra_driver_name = "seqiv-authenc-hmac-sha256-" 3128 "rfc3686-ctr-aes-caam", 3129 .cra_blocksize = 1, 3130 }, 3131 .setkey = aead_setkey, 3132 .setauthsize = aead_setauthsize, 3133 .encrypt = aead_encrypt, 3134 .decrypt = aead_decrypt, 3135 .ivsize = CTR_RFC3686_IV_SIZE, 3136 .maxauthsize = SHA256_DIGEST_SIZE, 3137 }, 3138 .caam = { 3139 .class1_alg_type = OP_ALG_ALGSEL_AES | 3140 OP_ALG_AAI_CTR_MOD128, 3141 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3142 OP_ALG_AAI_HMAC_PRECOMP, 3143 .rfc3686 = true, 3144 .geniv = true, 3145 }, 3146 }, 3147 { 3148 .aead = { 3149 .base = { 3150 .cra_name = "authenc(hmac(sha384)," 3151 "rfc3686(ctr(aes)))", 3152 .cra_driver_name = "authenc-hmac-sha384-" 3153 "rfc3686-ctr-aes-caam", 3154 .cra_blocksize = 1, 3155 }, 3156 .setkey = aead_setkey, 3157 .setauthsize = aead_setauthsize, 3158 .encrypt = aead_encrypt, 3159 .decrypt = aead_decrypt, 3160 .ivsize = CTR_RFC3686_IV_SIZE, 3161 .maxauthsize = SHA384_DIGEST_SIZE, 3162 }, 3163 .caam = { 3164 .class1_alg_type = OP_ALG_ALGSEL_AES | 3165 OP_ALG_AAI_CTR_MOD128, 3166 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3167 OP_ALG_AAI_HMAC_PRECOMP, 3168 .rfc3686 = true, 3169 }, 3170 }, 3171 { 3172 .aead = { 3173 .base = { 3174 .cra_name = "seqiv(authenc(hmac(sha384)," 3175 "rfc3686(ctr(aes))))", 3176 .cra_driver_name = "seqiv-authenc-hmac-sha384-" 3177 "rfc3686-ctr-aes-caam", 3178 .cra_blocksize = 1, 3179 }, 3180 .setkey = aead_setkey, 3181 .setauthsize = aead_setauthsize, 3182 .encrypt = aead_encrypt, 3183 .decrypt = aead_decrypt, 3184 .ivsize = CTR_RFC3686_IV_SIZE, 3185 .maxauthsize = SHA384_DIGEST_SIZE, 3186 }, 3187 .caam = { 3188 .class1_alg_type = OP_ALG_ALGSEL_AES | 3189 OP_ALG_AAI_CTR_MOD128, 3190 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3191 OP_ALG_AAI_HMAC_PRECOMP, 3192 .rfc3686 = true, 3193 .geniv = true, 3194 }, 3195 }, 3196 { 3197 .aead = { 3198 .base = { 3199 .cra_name = "authenc(hmac(sha512)," 3200 "rfc3686(ctr(aes)))", 3201 .cra_driver_name = "authenc-hmac-sha512-" 3202 "rfc3686-ctr-aes-caam", 3203 .cra_blocksize = 1, 3204 }, 3205 .setkey = aead_setkey, 3206 .setauthsize = aead_setauthsize, 3207 .encrypt = aead_encrypt, 3208 .decrypt = aead_decrypt, 3209 .ivsize = CTR_RFC3686_IV_SIZE, 3210 .maxauthsize = SHA512_DIGEST_SIZE, 3211 }, 3212 .caam = { 3213 .class1_alg_type = OP_ALG_ALGSEL_AES | 3214 OP_ALG_AAI_CTR_MOD128, 3215 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3216 OP_ALG_AAI_HMAC_PRECOMP, 3217 .rfc3686 = true, 3218 }, 3219 }, 3220 { 3221 .aead = { 3222 .base = { 3223 .cra_name = "seqiv(authenc(hmac(sha512)," 3224 "rfc3686(ctr(aes))))", 3225 .cra_driver_name = "seqiv-authenc-hmac-sha512-" 3226 "rfc3686-ctr-aes-caam", 3227 .cra_blocksize = 1, 3228 }, 3229 .setkey = aead_setkey, 3230 .setauthsize = aead_setauthsize, 3231 .encrypt = aead_encrypt, 3232 .decrypt = aead_decrypt, 3233 .ivsize = CTR_RFC3686_IV_SIZE, 3234 .maxauthsize = SHA512_DIGEST_SIZE, 3235 }, 3236 .caam = { 3237 .class1_alg_type = OP_ALG_ALGSEL_AES | 3238 OP_ALG_AAI_CTR_MOD128, 3239 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3240 OP_ALG_AAI_HMAC_PRECOMP, 3241 .rfc3686 = true, 3242 .geniv = true, 3243 }, 3244 }, 3245 }; 3246 3247 struct caam_crypto_alg { 3248 struct crypto_alg crypto_alg; 3249 struct list_head entry; 3250 struct caam_alg_entry caam; 3251 }; 3252 3253 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam) 3254 { 3255 dma_addr_t dma_addr; 3256 3257 ctx->jrdev = caam_jr_alloc(); 3258 if (IS_ERR(ctx->jrdev)) { 3259 pr_err("Job Ring Device allocation for transform failed\n"); 3260 return PTR_ERR(ctx->jrdev); 3261 } 3262 3263 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3264 offsetof(struct caam_ctx, 3265 sh_desc_enc_dma), 3266 DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC); 3267 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3268 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3269 caam_jr_free(ctx->jrdev); 3270 return -ENOMEM; 3271 } 3272 3273 ctx->sh_desc_enc_dma = dma_addr; 3274 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, 3275 sh_desc_dec); 3276 ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx, 3277 sh_desc_givenc); 3278 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key); 3279 3280 /* copy descriptor header template value */ 3281 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3282 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3283 3284 return 0; 3285 } 3286 3287 static int caam_cra_init(struct crypto_tfm *tfm) 3288 { 3289 struct crypto_alg *alg = tfm->__crt_alg; 3290 struct caam_crypto_alg *caam_alg = 3291 container_of(alg, struct caam_crypto_alg, crypto_alg); 3292 struct caam_ctx *ctx = crypto_tfm_ctx(tfm); 3293 3294 return caam_init_common(ctx, &caam_alg->caam); 3295 } 3296 3297 static int caam_aead_init(struct crypto_aead *tfm) 3298 { 3299 struct aead_alg *alg = crypto_aead_alg(tfm); 3300 struct caam_aead_alg *caam_alg = 3301 container_of(alg, struct caam_aead_alg, aead); 3302 struct caam_ctx *ctx = crypto_aead_ctx(tfm); 3303 3304 return caam_init_common(ctx, &caam_alg->caam); 3305 } 3306 3307 static void caam_exit_common(struct caam_ctx *ctx) 3308 { 3309 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, 3310 offsetof(struct caam_ctx, sh_desc_enc_dma), 3311 DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC); 3312 caam_jr_free(ctx->jrdev); 3313 } 3314 3315 static void caam_cra_exit(struct crypto_tfm *tfm) 3316 { 3317 caam_exit_common(crypto_tfm_ctx(tfm)); 3318 } 3319 3320 static void caam_aead_exit(struct crypto_aead *tfm) 3321 { 3322 caam_exit_common(crypto_aead_ctx(tfm)); 3323 } 3324 3325 static void __exit caam_algapi_exit(void) 3326 { 3327 3328 struct caam_crypto_alg *t_alg, *n; 3329 int i; 3330 3331 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3332 struct caam_aead_alg *t_alg = driver_aeads + i; 3333 3334 if (t_alg->registered) 3335 crypto_unregister_aead(&t_alg->aead); 3336 } 3337 3338 if (!alg_list.next) 3339 return; 3340 3341 list_for_each_entry_safe(t_alg, n, &alg_list, entry) { 3342 crypto_unregister_alg(&t_alg->crypto_alg); 3343 list_del(&t_alg->entry); 3344 kfree(t_alg); 3345 } 3346 } 3347 3348 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template 3349 *template) 3350 { 3351 struct caam_crypto_alg *t_alg; 3352 struct crypto_alg *alg; 3353 3354 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL); 3355 if (!t_alg) { 3356 pr_err("failed to allocate t_alg\n"); 3357 return ERR_PTR(-ENOMEM); 3358 } 3359 3360 alg = &t_alg->crypto_alg; 3361 3362 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name); 3363 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", 3364 template->driver_name); 3365 alg->cra_module = THIS_MODULE; 3366 alg->cra_init = caam_cra_init; 3367 alg->cra_exit = caam_cra_exit; 3368 alg->cra_priority = CAAM_CRA_PRIORITY; 3369 alg->cra_blocksize = template->blocksize; 3370 alg->cra_alignmask = 0; 3371 alg->cra_ctxsize = sizeof(struct caam_ctx); 3372 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY | 3373 template->type; 3374 switch (template->type) { 3375 case CRYPTO_ALG_TYPE_GIVCIPHER: 3376 alg->cra_type = &crypto_givcipher_type; 3377 alg->cra_ablkcipher = template->template_ablkcipher; 3378 break; 3379 case CRYPTO_ALG_TYPE_ABLKCIPHER: 3380 alg->cra_type = &crypto_ablkcipher_type; 3381 alg->cra_ablkcipher = template->template_ablkcipher; 3382 break; 3383 } 3384 3385 t_alg->caam.class1_alg_type = template->class1_alg_type; 3386 t_alg->caam.class2_alg_type = template->class2_alg_type; 3387 3388 return t_alg; 3389 } 3390 3391 static void caam_aead_alg_init(struct caam_aead_alg *t_alg) 3392 { 3393 struct aead_alg *alg = &t_alg->aead; 3394 3395 alg->base.cra_module = THIS_MODULE; 3396 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3397 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3398 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3399 3400 alg->init = caam_aead_init; 3401 alg->exit = caam_aead_exit; 3402 } 3403 3404 static int __init caam_algapi_init(void) 3405 { 3406 struct device_node *dev_node; 3407 struct platform_device *pdev; 3408 struct device *ctrldev; 3409 struct caam_drv_private *priv; 3410 int i = 0, err = 0; 3411 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst; 3412 unsigned int md_limit = SHA512_DIGEST_SIZE; 3413 bool registered = false; 3414 3415 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0"); 3416 if (!dev_node) { 3417 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0"); 3418 if (!dev_node) 3419 return -ENODEV; 3420 } 3421 3422 pdev = of_find_device_by_node(dev_node); 3423 if (!pdev) { 3424 of_node_put(dev_node); 3425 return -ENODEV; 3426 } 3427 3428 ctrldev = &pdev->dev; 3429 priv = dev_get_drvdata(ctrldev); 3430 of_node_put(dev_node); 3431 3432 /* 3433 * If priv is NULL, it's probably because the caam driver wasn't 3434 * properly initialized (e.g. RNG4 init failed). Thus, bail out here. 3435 */ 3436 if (!priv) 3437 return -ENODEV; 3438 3439 3440 INIT_LIST_HEAD(&alg_list); 3441 3442 /* 3443 * Register crypto algorithms the device supports. 3444 * First, detect presence and attributes of DES, AES, and MD blocks. 3445 */ 3446 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls); 3447 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls); 3448 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT; 3449 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT; 3450 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3451 3452 /* If MD is present, limit digest size based on LP256 */ 3453 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256)) 3454 md_limit = SHA256_DIGEST_SIZE; 3455 3456 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3457 struct caam_crypto_alg *t_alg; 3458 struct caam_alg_template *alg = driver_algs + i; 3459 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK; 3460 3461 /* Skip DES algorithms if not supported by device */ 3462 if (!des_inst && 3463 ((alg_sel == OP_ALG_ALGSEL_3DES) || 3464 (alg_sel == OP_ALG_ALGSEL_DES))) 3465 continue; 3466 3467 /* Skip AES algorithms if not supported by device */ 3468 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES)) 3469 continue; 3470 3471 /* 3472 * Check support for AES modes not available 3473 * on LP devices. 3474 */ 3475 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP) 3476 if ((alg->class1_alg_type & OP_ALG_AAI_MASK) == 3477 OP_ALG_AAI_XTS) 3478 continue; 3479 3480 t_alg = caam_alg_alloc(alg); 3481 if (IS_ERR(t_alg)) { 3482 err = PTR_ERR(t_alg); 3483 pr_warn("%s alg allocation failed\n", alg->driver_name); 3484 continue; 3485 } 3486 3487 err = crypto_register_alg(&t_alg->crypto_alg); 3488 if (err) { 3489 pr_warn("%s alg registration failed\n", 3490 t_alg->crypto_alg.cra_driver_name); 3491 kfree(t_alg); 3492 continue; 3493 } 3494 3495 list_add_tail(&t_alg->entry, &alg_list); 3496 registered = true; 3497 } 3498 3499 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3500 struct caam_aead_alg *t_alg = driver_aeads + i; 3501 u32 c1_alg_sel = t_alg->caam.class1_alg_type & 3502 OP_ALG_ALGSEL_MASK; 3503 u32 c2_alg_sel = t_alg->caam.class2_alg_type & 3504 OP_ALG_ALGSEL_MASK; 3505 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3506 3507 /* Skip DES algorithms if not supported by device */ 3508 if (!des_inst && 3509 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) || 3510 (c1_alg_sel == OP_ALG_ALGSEL_DES))) 3511 continue; 3512 3513 /* Skip AES algorithms if not supported by device */ 3514 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES)) 3515 continue; 3516 3517 /* 3518 * Check support for AES algorithms not available 3519 * on LP devices. 3520 */ 3521 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP) 3522 if (alg_aai == OP_ALG_AAI_GCM) 3523 continue; 3524 3525 /* 3526 * Skip algorithms requiring message digests 3527 * if MD or MD size is not supported by device. 3528 */ 3529 if (c2_alg_sel && 3530 (!md_inst || (t_alg->aead.maxauthsize > md_limit))) 3531 continue; 3532 3533 caam_aead_alg_init(t_alg); 3534 3535 err = crypto_register_aead(&t_alg->aead); 3536 if (err) { 3537 pr_warn("%s alg registration failed\n", 3538 t_alg->aead.base.cra_driver_name); 3539 continue; 3540 } 3541 3542 t_alg->registered = true; 3543 registered = true; 3544 } 3545 3546 if (registered) 3547 pr_info("caam algorithms registered in /proc/crypto\n"); 3548 3549 return err; 3550 } 3551 3552 module_init(caam_algapi_init); 3553 module_exit(caam_algapi_exit); 3554 3555 MODULE_LICENSE("GPL"); 3556 MODULE_DESCRIPTION("FSL CAAM support for crypto API"); 3557 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC"); 3558