1 /* 2 * caam - Freescale FSL CAAM support for crypto API 3 * 4 * Copyright 2008-2011 Freescale Semiconductor, Inc. 5 * Copyright 2016-2018 NXP 6 * 7 * Based on talitos crypto API driver. 8 * 9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008): 10 * 11 * --------------- --------------- 12 * | JobDesc #1 |-------------------->| ShareDesc | 13 * | *(packet 1) | | (PDB) | 14 * --------------- |------------->| (hashKey) | 15 * . | | (cipherKey) | 16 * . | |-------->| (operation) | 17 * --------------- | | --------------- 18 * | JobDesc #2 |------| | 19 * | *(packet 2) | | 20 * --------------- | 21 * . | 22 * . | 23 * --------------- | 24 * | JobDesc #3 |------------ 25 * | *(packet 3) | 26 * --------------- 27 * 28 * The SharedDesc never changes for a connection unless rekeyed, but 29 * each packet will likely be in a different place. So all we need 30 * to know to process the packet is where the input is, where the 31 * output goes, and what context we want to process with. Context is 32 * in the SharedDesc, packet references in the JobDesc. 33 * 34 * So, a job desc looks like: 35 * 36 * --------------------- 37 * | Header | 38 * | ShareDesc Pointer | 39 * | SEQ_OUT_PTR | 40 * | (output buffer) | 41 * | (output length) | 42 * | SEQ_IN_PTR | 43 * | (input buffer) | 44 * | (input length) | 45 * --------------------- 46 */ 47 48 #include "compat.h" 49 50 #include "regs.h" 51 #include "intern.h" 52 #include "desc_constr.h" 53 #include "jr.h" 54 #include "error.h" 55 #include "sg_sw_sec4.h" 56 #include "key_gen.h" 57 #include "caamalg_desc.h" 58 59 /* 60 * crypto alg 61 */ 62 #define CAAM_CRA_PRIORITY 3000 63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 64 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 65 CTR_RFC3686_NONCE_SIZE + \ 66 SHA512_DIGEST_SIZE * 2) 67 68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2) 69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 70 CAAM_CMD_SZ * 4) 71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 72 CAAM_CMD_SZ * 5) 73 74 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN) 75 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ) 76 77 #ifdef DEBUG 78 /* for print_hex_dumps with line references */ 79 #define debug(format, arg...) printk(format, arg) 80 #else 81 #define debug(format, arg...) 82 #endif 83 84 struct caam_alg_entry { 85 int class1_alg_type; 86 int class2_alg_type; 87 bool rfc3686; 88 bool geniv; 89 }; 90 91 struct caam_aead_alg { 92 struct aead_alg aead; 93 struct caam_alg_entry caam; 94 bool registered; 95 }; 96 97 struct caam_skcipher_alg { 98 struct skcipher_alg skcipher; 99 struct caam_alg_entry caam; 100 bool registered; 101 }; 102 103 /* 104 * per-session context 105 */ 106 struct caam_ctx { 107 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 108 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 109 u8 key[CAAM_MAX_KEY_SIZE]; 110 dma_addr_t sh_desc_enc_dma; 111 dma_addr_t sh_desc_dec_dma; 112 dma_addr_t key_dma; 113 enum dma_data_direction dir; 114 struct device *jrdev; 115 struct alginfo adata; 116 struct alginfo cdata; 117 unsigned int authsize; 118 }; 119 120 static int aead_null_set_sh_desc(struct crypto_aead *aead) 121 { 122 struct caam_ctx *ctx = crypto_aead_ctx(aead); 123 struct device *jrdev = ctx->jrdev; 124 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 125 u32 *desc; 126 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 127 ctx->adata.keylen_pad; 128 129 /* 130 * Job Descriptor and Shared Descriptors 131 * must all fit into the 64-word Descriptor h/w Buffer 132 */ 133 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) { 134 ctx->adata.key_inline = true; 135 ctx->adata.key_virt = ctx->key; 136 } else { 137 ctx->adata.key_inline = false; 138 ctx->adata.key_dma = ctx->key_dma; 139 } 140 141 /* aead_encrypt shared descriptor */ 142 desc = ctx->sh_desc_enc; 143 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize, 144 ctrlpriv->era); 145 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 146 desc_bytes(desc), ctx->dir); 147 148 /* 149 * Job Descriptor and Shared Descriptors 150 * must all fit into the 64-word Descriptor h/w Buffer 151 */ 152 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) { 153 ctx->adata.key_inline = true; 154 ctx->adata.key_virt = ctx->key; 155 } else { 156 ctx->adata.key_inline = false; 157 ctx->adata.key_dma = ctx->key_dma; 158 } 159 160 /* aead_decrypt shared descriptor */ 161 desc = ctx->sh_desc_dec; 162 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize, 163 ctrlpriv->era); 164 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 165 desc_bytes(desc), ctx->dir); 166 167 return 0; 168 } 169 170 static int aead_set_sh_desc(struct crypto_aead *aead) 171 { 172 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 173 struct caam_aead_alg, aead); 174 unsigned int ivsize = crypto_aead_ivsize(aead); 175 struct caam_ctx *ctx = crypto_aead_ctx(aead); 176 struct device *jrdev = ctx->jrdev; 177 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 178 u32 ctx1_iv_off = 0; 179 u32 *desc, *nonce = NULL; 180 u32 inl_mask; 181 unsigned int data_len[2]; 182 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 183 OP_ALG_AAI_CTR_MOD128); 184 const bool is_rfc3686 = alg->caam.rfc3686; 185 186 if (!ctx->authsize) 187 return 0; 188 189 /* NULL encryption / decryption */ 190 if (!ctx->cdata.keylen) 191 return aead_null_set_sh_desc(aead); 192 193 /* 194 * AES-CTR needs to load IV in CONTEXT1 reg 195 * at an offset of 128bits (16bytes) 196 * CONTEXT1[255:128] = IV 197 */ 198 if (ctr_mode) 199 ctx1_iv_off = 16; 200 201 /* 202 * RFC3686 specific: 203 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 204 */ 205 if (is_rfc3686) { 206 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 207 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + 208 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); 209 } 210 211 data_len[0] = ctx->adata.keylen_pad; 212 data_len[1] = ctx->cdata.keylen; 213 214 if (alg->caam.geniv) 215 goto skip_enc; 216 217 /* 218 * Job Descriptor and Shared Descriptors 219 * must all fit into the 64-word Descriptor h/w Buffer 220 */ 221 if (desc_inline_query(DESC_AEAD_ENC_LEN + 222 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 223 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 224 ARRAY_SIZE(data_len)) < 0) 225 return -EINVAL; 226 227 if (inl_mask & 1) 228 ctx->adata.key_virt = ctx->key; 229 else 230 ctx->adata.key_dma = ctx->key_dma; 231 232 if (inl_mask & 2) 233 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 234 else 235 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 236 237 ctx->adata.key_inline = !!(inl_mask & 1); 238 ctx->cdata.key_inline = !!(inl_mask & 2); 239 240 /* aead_encrypt shared descriptor */ 241 desc = ctx->sh_desc_enc; 242 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, 243 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, 244 false, ctrlpriv->era); 245 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 246 desc_bytes(desc), ctx->dir); 247 248 skip_enc: 249 /* 250 * Job Descriptor and Shared Descriptors 251 * must all fit into the 64-word Descriptor h/w Buffer 252 */ 253 if (desc_inline_query(DESC_AEAD_DEC_LEN + 254 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 255 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 256 ARRAY_SIZE(data_len)) < 0) 257 return -EINVAL; 258 259 if (inl_mask & 1) 260 ctx->adata.key_virt = ctx->key; 261 else 262 ctx->adata.key_dma = ctx->key_dma; 263 264 if (inl_mask & 2) 265 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 266 else 267 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 268 269 ctx->adata.key_inline = !!(inl_mask & 1); 270 ctx->cdata.key_inline = !!(inl_mask & 2); 271 272 /* aead_decrypt shared descriptor */ 273 desc = ctx->sh_desc_dec; 274 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, 275 ctx->authsize, alg->caam.geniv, is_rfc3686, 276 nonce, ctx1_iv_off, false, ctrlpriv->era); 277 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 278 desc_bytes(desc), ctx->dir); 279 280 if (!alg->caam.geniv) 281 goto skip_givenc; 282 283 /* 284 * Job Descriptor and Shared Descriptors 285 * must all fit into the 64-word Descriptor h/w Buffer 286 */ 287 if (desc_inline_query(DESC_AEAD_GIVENC_LEN + 288 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 289 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 290 ARRAY_SIZE(data_len)) < 0) 291 return -EINVAL; 292 293 if (inl_mask & 1) 294 ctx->adata.key_virt = ctx->key; 295 else 296 ctx->adata.key_dma = ctx->key_dma; 297 298 if (inl_mask & 2) 299 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 300 else 301 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 302 303 ctx->adata.key_inline = !!(inl_mask & 1); 304 ctx->cdata.key_inline = !!(inl_mask & 2); 305 306 /* aead_givencrypt shared descriptor */ 307 desc = ctx->sh_desc_enc; 308 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, 309 ctx->authsize, is_rfc3686, nonce, 310 ctx1_iv_off, false, ctrlpriv->era); 311 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 312 desc_bytes(desc), ctx->dir); 313 314 skip_givenc: 315 return 0; 316 } 317 318 static int aead_setauthsize(struct crypto_aead *authenc, 319 unsigned int authsize) 320 { 321 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 322 323 ctx->authsize = authsize; 324 aead_set_sh_desc(authenc); 325 326 return 0; 327 } 328 329 static int gcm_set_sh_desc(struct crypto_aead *aead) 330 { 331 struct caam_ctx *ctx = crypto_aead_ctx(aead); 332 struct device *jrdev = ctx->jrdev; 333 unsigned int ivsize = crypto_aead_ivsize(aead); 334 u32 *desc; 335 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 336 ctx->cdata.keylen; 337 338 if (!ctx->cdata.keylen || !ctx->authsize) 339 return 0; 340 341 /* 342 * AES GCM encrypt shared descriptor 343 * Job Descriptor and Shared Descriptor 344 * must fit into the 64-word Descriptor h/w Buffer 345 */ 346 if (rem_bytes >= DESC_GCM_ENC_LEN) { 347 ctx->cdata.key_inline = true; 348 ctx->cdata.key_virt = ctx->key; 349 } else { 350 ctx->cdata.key_inline = false; 351 ctx->cdata.key_dma = ctx->key_dma; 352 } 353 354 desc = ctx->sh_desc_enc; 355 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 356 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 357 desc_bytes(desc), ctx->dir); 358 359 /* 360 * Job Descriptor and Shared Descriptors 361 * must all fit into the 64-word Descriptor h/w Buffer 362 */ 363 if (rem_bytes >= DESC_GCM_DEC_LEN) { 364 ctx->cdata.key_inline = true; 365 ctx->cdata.key_virt = ctx->key; 366 } else { 367 ctx->cdata.key_inline = false; 368 ctx->cdata.key_dma = ctx->key_dma; 369 } 370 371 desc = ctx->sh_desc_dec; 372 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 373 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 374 desc_bytes(desc), ctx->dir); 375 376 return 0; 377 } 378 379 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize) 380 { 381 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 382 383 ctx->authsize = authsize; 384 gcm_set_sh_desc(authenc); 385 386 return 0; 387 } 388 389 static int rfc4106_set_sh_desc(struct crypto_aead *aead) 390 { 391 struct caam_ctx *ctx = crypto_aead_ctx(aead); 392 struct device *jrdev = ctx->jrdev; 393 unsigned int ivsize = crypto_aead_ivsize(aead); 394 u32 *desc; 395 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 396 ctx->cdata.keylen; 397 398 if (!ctx->cdata.keylen || !ctx->authsize) 399 return 0; 400 401 /* 402 * RFC4106 encrypt shared descriptor 403 * Job Descriptor and Shared Descriptor 404 * must fit into the 64-word Descriptor h/w Buffer 405 */ 406 if (rem_bytes >= DESC_RFC4106_ENC_LEN) { 407 ctx->cdata.key_inline = true; 408 ctx->cdata.key_virt = ctx->key; 409 } else { 410 ctx->cdata.key_inline = false; 411 ctx->cdata.key_dma = ctx->key_dma; 412 } 413 414 desc = ctx->sh_desc_enc; 415 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 416 false); 417 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 418 desc_bytes(desc), ctx->dir); 419 420 /* 421 * Job Descriptor and Shared Descriptors 422 * must all fit into the 64-word Descriptor h/w Buffer 423 */ 424 if (rem_bytes >= DESC_RFC4106_DEC_LEN) { 425 ctx->cdata.key_inline = true; 426 ctx->cdata.key_virt = ctx->key; 427 } else { 428 ctx->cdata.key_inline = false; 429 ctx->cdata.key_dma = ctx->key_dma; 430 } 431 432 desc = ctx->sh_desc_dec; 433 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 434 false); 435 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 436 desc_bytes(desc), ctx->dir); 437 438 return 0; 439 } 440 441 static int rfc4106_setauthsize(struct crypto_aead *authenc, 442 unsigned int authsize) 443 { 444 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 445 446 ctx->authsize = authsize; 447 rfc4106_set_sh_desc(authenc); 448 449 return 0; 450 } 451 452 static int rfc4543_set_sh_desc(struct crypto_aead *aead) 453 { 454 struct caam_ctx *ctx = crypto_aead_ctx(aead); 455 struct device *jrdev = ctx->jrdev; 456 unsigned int ivsize = crypto_aead_ivsize(aead); 457 u32 *desc; 458 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 459 ctx->cdata.keylen; 460 461 if (!ctx->cdata.keylen || !ctx->authsize) 462 return 0; 463 464 /* 465 * RFC4543 encrypt shared descriptor 466 * Job Descriptor and Shared Descriptor 467 * must fit into the 64-word Descriptor h/w Buffer 468 */ 469 if (rem_bytes >= DESC_RFC4543_ENC_LEN) { 470 ctx->cdata.key_inline = true; 471 ctx->cdata.key_virt = ctx->key; 472 } else { 473 ctx->cdata.key_inline = false; 474 ctx->cdata.key_dma = ctx->key_dma; 475 } 476 477 desc = ctx->sh_desc_enc; 478 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 479 false); 480 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 481 desc_bytes(desc), ctx->dir); 482 483 /* 484 * Job Descriptor and Shared Descriptors 485 * must all fit into the 64-word Descriptor h/w Buffer 486 */ 487 if (rem_bytes >= DESC_RFC4543_DEC_LEN) { 488 ctx->cdata.key_inline = true; 489 ctx->cdata.key_virt = ctx->key; 490 } else { 491 ctx->cdata.key_inline = false; 492 ctx->cdata.key_dma = ctx->key_dma; 493 } 494 495 desc = ctx->sh_desc_dec; 496 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 497 false); 498 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 499 desc_bytes(desc), ctx->dir); 500 501 return 0; 502 } 503 504 static int rfc4543_setauthsize(struct crypto_aead *authenc, 505 unsigned int authsize) 506 { 507 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 508 509 ctx->authsize = authsize; 510 rfc4543_set_sh_desc(authenc); 511 512 return 0; 513 } 514 515 static int aead_setkey(struct crypto_aead *aead, 516 const u8 *key, unsigned int keylen) 517 { 518 struct caam_ctx *ctx = crypto_aead_ctx(aead); 519 struct device *jrdev = ctx->jrdev; 520 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 521 struct crypto_authenc_keys keys; 522 int ret = 0; 523 524 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) 525 goto badkey; 526 527 #ifdef DEBUG 528 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n", 529 keys.authkeylen + keys.enckeylen, keys.enckeylen, 530 keys.authkeylen); 531 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 532 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 533 #endif 534 535 /* 536 * If DKP is supported, use it in the shared descriptor to generate 537 * the split key. 538 */ 539 if (ctrlpriv->era >= 6) { 540 ctx->adata.keylen = keys.authkeylen; 541 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype & 542 OP_ALG_ALGSEL_MASK); 543 544 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE) 545 goto badkey; 546 547 memcpy(ctx->key, keys.authkey, keys.authkeylen); 548 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, 549 keys.enckeylen); 550 dma_sync_single_for_device(jrdev, ctx->key_dma, 551 ctx->adata.keylen_pad + 552 keys.enckeylen, ctx->dir); 553 goto skip_split_key; 554 } 555 556 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, 557 keys.authkeylen, CAAM_MAX_KEY_SIZE - 558 keys.enckeylen); 559 if (ret) { 560 goto badkey; 561 } 562 563 /* postpend encryption key to auth split key */ 564 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); 565 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + 566 keys.enckeylen, ctx->dir); 567 #ifdef DEBUG 568 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ", 569 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, 570 ctx->adata.keylen_pad + keys.enckeylen, 1); 571 #endif 572 573 skip_split_key: 574 ctx->cdata.keylen = keys.enckeylen; 575 memzero_explicit(&keys, sizeof(keys)); 576 return aead_set_sh_desc(aead); 577 badkey: 578 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 579 memzero_explicit(&keys, sizeof(keys)); 580 return -EINVAL; 581 } 582 583 static int gcm_setkey(struct crypto_aead *aead, 584 const u8 *key, unsigned int keylen) 585 { 586 struct caam_ctx *ctx = crypto_aead_ctx(aead); 587 struct device *jrdev = ctx->jrdev; 588 589 #ifdef DEBUG 590 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 591 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 592 #endif 593 594 memcpy(ctx->key, key, keylen); 595 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); 596 ctx->cdata.keylen = keylen; 597 598 return gcm_set_sh_desc(aead); 599 } 600 601 static int rfc4106_setkey(struct crypto_aead *aead, 602 const u8 *key, unsigned int keylen) 603 { 604 struct caam_ctx *ctx = crypto_aead_ctx(aead); 605 struct device *jrdev = ctx->jrdev; 606 607 if (keylen < 4) 608 return -EINVAL; 609 610 #ifdef DEBUG 611 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 612 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 613 #endif 614 615 memcpy(ctx->key, key, keylen); 616 617 /* 618 * The last four bytes of the key material are used as the salt value 619 * in the nonce. Update the AES key length. 620 */ 621 ctx->cdata.keylen = keylen - 4; 622 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 623 ctx->dir); 624 return rfc4106_set_sh_desc(aead); 625 } 626 627 static int rfc4543_setkey(struct crypto_aead *aead, 628 const u8 *key, unsigned int keylen) 629 { 630 struct caam_ctx *ctx = crypto_aead_ctx(aead); 631 struct device *jrdev = ctx->jrdev; 632 633 if (keylen < 4) 634 return -EINVAL; 635 636 #ifdef DEBUG 637 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 638 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 639 #endif 640 641 memcpy(ctx->key, key, keylen); 642 643 /* 644 * The last four bytes of the key material are used as the salt value 645 * in the nonce. Update the AES key length. 646 */ 647 ctx->cdata.keylen = keylen - 4; 648 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 649 ctx->dir); 650 return rfc4543_set_sh_desc(aead); 651 } 652 653 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 654 unsigned int keylen) 655 { 656 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 657 struct caam_skcipher_alg *alg = 658 container_of(crypto_skcipher_alg(skcipher), typeof(*alg), 659 skcipher); 660 struct device *jrdev = ctx->jrdev; 661 unsigned int ivsize = crypto_skcipher_ivsize(skcipher); 662 u32 *desc; 663 u32 ctx1_iv_off = 0; 664 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 665 OP_ALG_AAI_CTR_MOD128); 666 const bool is_rfc3686 = alg->caam.rfc3686; 667 668 #ifdef DEBUG 669 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 670 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 671 #endif 672 /* 673 * AES-CTR needs to load IV in CONTEXT1 reg 674 * at an offset of 128bits (16bytes) 675 * CONTEXT1[255:128] = IV 676 */ 677 if (ctr_mode) 678 ctx1_iv_off = 16; 679 680 /* 681 * RFC3686 specific: 682 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER} 683 * | *key = {KEY, NONCE} 684 */ 685 if (is_rfc3686) { 686 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 687 keylen -= CTR_RFC3686_NONCE_SIZE; 688 } 689 690 ctx->cdata.keylen = keylen; 691 ctx->cdata.key_virt = key; 692 ctx->cdata.key_inline = true; 693 694 /* skcipher_encrypt shared descriptor */ 695 desc = ctx->sh_desc_enc; 696 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, 697 ctx1_iv_off); 698 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 699 desc_bytes(desc), ctx->dir); 700 701 /* skcipher_decrypt shared descriptor */ 702 desc = ctx->sh_desc_dec; 703 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, 704 ctx1_iv_off); 705 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 706 desc_bytes(desc), ctx->dir); 707 708 return 0; 709 } 710 711 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 712 unsigned int keylen) 713 { 714 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 715 struct device *jrdev = ctx->jrdev; 716 u32 *desc; 717 718 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) { 719 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 720 dev_err(jrdev, "key size mismatch\n"); 721 return -EINVAL; 722 } 723 724 ctx->cdata.keylen = keylen; 725 ctx->cdata.key_virt = key; 726 ctx->cdata.key_inline = true; 727 728 /* xts_skcipher_encrypt shared descriptor */ 729 desc = ctx->sh_desc_enc; 730 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata); 731 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 732 desc_bytes(desc), ctx->dir); 733 734 /* xts_skcipher_decrypt shared descriptor */ 735 desc = ctx->sh_desc_dec; 736 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata); 737 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 738 desc_bytes(desc), ctx->dir); 739 740 return 0; 741 } 742 743 /* 744 * aead_edesc - s/w-extended aead descriptor 745 * @src_nents: number of segments in input s/w scatterlist 746 * @dst_nents: number of segments in output s/w scatterlist 747 * @sec4_sg_bytes: length of dma mapped sec4_sg space 748 * @sec4_sg_dma: bus physical mapped address of h/w link table 749 * @sec4_sg: pointer to h/w link table 750 * @hw_desc: the h/w job descriptor followed by any referenced link tables 751 */ 752 struct aead_edesc { 753 int src_nents; 754 int dst_nents; 755 int sec4_sg_bytes; 756 dma_addr_t sec4_sg_dma; 757 struct sec4_sg_entry *sec4_sg; 758 u32 hw_desc[]; 759 }; 760 761 /* 762 * skcipher_edesc - s/w-extended skcipher descriptor 763 * @src_nents: number of segments in input s/w scatterlist 764 * @dst_nents: number of segments in output s/w scatterlist 765 * @iv_dma: dma address of iv for checking continuity and link table 766 * @sec4_sg_bytes: length of dma mapped sec4_sg space 767 * @sec4_sg_dma: bus physical mapped address of h/w link table 768 * @sec4_sg: pointer to h/w link table 769 * @hw_desc: the h/w job descriptor followed by any referenced link tables 770 * and IV 771 */ 772 struct skcipher_edesc { 773 int src_nents; 774 int dst_nents; 775 dma_addr_t iv_dma; 776 int sec4_sg_bytes; 777 dma_addr_t sec4_sg_dma; 778 struct sec4_sg_entry *sec4_sg; 779 u32 hw_desc[0]; 780 }; 781 782 static void caam_unmap(struct device *dev, struct scatterlist *src, 783 struct scatterlist *dst, int src_nents, 784 int dst_nents, 785 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, 786 int sec4_sg_bytes) 787 { 788 if (dst != src) { 789 if (src_nents) 790 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); 791 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); 792 } else { 793 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); 794 } 795 796 if (iv_dma) 797 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE); 798 if (sec4_sg_bytes) 799 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes, 800 DMA_TO_DEVICE); 801 } 802 803 static void aead_unmap(struct device *dev, 804 struct aead_edesc *edesc, 805 struct aead_request *req) 806 { 807 caam_unmap(dev, req->src, req->dst, 808 edesc->src_nents, edesc->dst_nents, 0, 0, 809 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 810 } 811 812 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, 813 struct skcipher_request *req) 814 { 815 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 816 int ivsize = crypto_skcipher_ivsize(skcipher); 817 818 caam_unmap(dev, req->src, req->dst, 819 edesc->src_nents, edesc->dst_nents, 820 edesc->iv_dma, ivsize, 821 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 822 } 823 824 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 825 void *context) 826 { 827 struct aead_request *req = context; 828 struct aead_edesc *edesc; 829 830 #ifdef DEBUG 831 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 832 #endif 833 834 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 835 836 if (err) 837 caam_jr_strstatus(jrdev, err); 838 839 aead_unmap(jrdev, edesc, req); 840 841 kfree(edesc); 842 843 aead_request_complete(req, err); 844 } 845 846 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 847 void *context) 848 { 849 struct aead_request *req = context; 850 struct aead_edesc *edesc; 851 852 #ifdef DEBUG 853 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 854 #endif 855 856 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 857 858 if (err) 859 caam_jr_strstatus(jrdev, err); 860 861 aead_unmap(jrdev, edesc, req); 862 863 /* 864 * verify hw auth check passed else return -EBADMSG 865 */ 866 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK) 867 err = -EBADMSG; 868 869 kfree(edesc); 870 871 aead_request_complete(req, err); 872 } 873 874 static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 875 void *context) 876 { 877 struct skcipher_request *req = context; 878 struct skcipher_edesc *edesc; 879 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 880 int ivsize = crypto_skcipher_ivsize(skcipher); 881 882 #ifdef DEBUG 883 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 884 #endif 885 886 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); 887 888 if (err) 889 caam_jr_strstatus(jrdev, err); 890 891 #ifdef DEBUG 892 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 893 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, 894 edesc->src_nents > 1 ? 100 : ivsize, 1); 895 #endif 896 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ", 897 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 898 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 899 900 skcipher_unmap(jrdev, edesc, req); 901 902 /* 903 * The crypto API expects us to set the IV (req->iv) to the last 904 * ciphertext block. This is used e.g. by the CTS mode. 905 */ 906 scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen - ivsize, 907 ivsize, 0); 908 909 kfree(edesc); 910 911 skcipher_request_complete(req, err); 912 } 913 914 static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 915 void *context) 916 { 917 struct skcipher_request *req = context; 918 struct skcipher_edesc *edesc; 919 #ifdef DEBUG 920 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 921 int ivsize = crypto_skcipher_ivsize(skcipher); 922 923 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 924 #endif 925 926 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); 927 if (err) 928 caam_jr_strstatus(jrdev, err); 929 930 #ifdef DEBUG 931 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 932 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); 933 #endif 934 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ", 935 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 936 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 937 938 skcipher_unmap(jrdev, edesc, req); 939 kfree(edesc); 940 941 skcipher_request_complete(req, err); 942 } 943 944 /* 945 * Fill in aead job descriptor 946 */ 947 static void init_aead_job(struct aead_request *req, 948 struct aead_edesc *edesc, 949 bool all_contig, bool encrypt) 950 { 951 struct crypto_aead *aead = crypto_aead_reqtfm(req); 952 struct caam_ctx *ctx = crypto_aead_ctx(aead); 953 int authsize = ctx->authsize; 954 u32 *desc = edesc->hw_desc; 955 u32 out_options, in_options; 956 dma_addr_t dst_dma, src_dma; 957 int len, sec4_sg_index = 0; 958 dma_addr_t ptr; 959 u32 *sh_desc; 960 961 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 962 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 963 964 len = desc_len(sh_desc); 965 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 966 967 if (all_contig) { 968 src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0; 969 in_options = 0; 970 } else { 971 src_dma = edesc->sec4_sg_dma; 972 sec4_sg_index += edesc->src_nents; 973 in_options = LDST_SGF; 974 } 975 976 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, 977 in_options); 978 979 dst_dma = src_dma; 980 out_options = in_options; 981 982 if (unlikely(req->src != req->dst)) { 983 if (edesc->dst_nents == 1) { 984 dst_dma = sg_dma_address(req->dst); 985 } else { 986 dst_dma = edesc->sec4_sg_dma + 987 sec4_sg_index * 988 sizeof(struct sec4_sg_entry); 989 out_options = LDST_SGF; 990 } 991 } 992 993 if (encrypt) 994 append_seq_out_ptr(desc, dst_dma, 995 req->assoclen + req->cryptlen + authsize, 996 out_options); 997 else 998 append_seq_out_ptr(desc, dst_dma, 999 req->assoclen + req->cryptlen - authsize, 1000 out_options); 1001 } 1002 1003 static void init_gcm_job(struct aead_request *req, 1004 struct aead_edesc *edesc, 1005 bool all_contig, bool encrypt) 1006 { 1007 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1008 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1009 unsigned int ivsize = crypto_aead_ivsize(aead); 1010 u32 *desc = edesc->hw_desc; 1011 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE); 1012 unsigned int last; 1013 1014 init_aead_job(req, edesc, all_contig, encrypt); 1015 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1016 1017 /* BUG This should not be specific to generic GCM. */ 1018 last = 0; 1019 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) 1020 last = FIFOLD_TYPE_LAST1; 1021 1022 /* Read GCM IV */ 1023 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE | 1024 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last); 1025 /* Append Salt */ 1026 if (!generic_gcm) 1027 append_data(desc, ctx->key + ctx->cdata.keylen, 4); 1028 /* Append IV */ 1029 append_data(desc, req->iv, ivsize); 1030 /* End of blank commands */ 1031 } 1032 1033 static void init_authenc_job(struct aead_request *req, 1034 struct aead_edesc *edesc, 1035 bool all_contig, bool encrypt) 1036 { 1037 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1038 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 1039 struct caam_aead_alg, aead); 1040 unsigned int ivsize = crypto_aead_ivsize(aead); 1041 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1042 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); 1043 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 1044 OP_ALG_AAI_CTR_MOD128); 1045 const bool is_rfc3686 = alg->caam.rfc3686; 1046 u32 *desc = edesc->hw_desc; 1047 u32 ivoffset = 0; 1048 1049 /* 1050 * AES-CTR needs to load IV in CONTEXT1 reg 1051 * at an offset of 128bits (16bytes) 1052 * CONTEXT1[255:128] = IV 1053 */ 1054 if (ctr_mode) 1055 ivoffset = 16; 1056 1057 /* 1058 * RFC3686 specific: 1059 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 1060 */ 1061 if (is_rfc3686) 1062 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE; 1063 1064 init_aead_job(req, edesc, all_contig, encrypt); 1065 1066 /* 1067 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports 1068 * having DPOVRD as destination. 1069 */ 1070 if (ctrlpriv->era < 3) 1071 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1072 else 1073 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen); 1074 1075 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) 1076 append_load_as_imm(desc, req->iv, ivsize, 1077 LDST_CLASS_1_CCB | 1078 LDST_SRCDST_BYTE_CONTEXT | 1079 (ivoffset << LDST_OFFSET_SHIFT)); 1080 } 1081 1082 /* 1083 * Fill in skcipher job descriptor 1084 */ 1085 static void init_skcipher_job(struct skcipher_request *req, 1086 struct skcipher_edesc *edesc, 1087 const bool encrypt) 1088 { 1089 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1090 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1091 int ivsize = crypto_skcipher_ivsize(skcipher); 1092 u32 *desc = edesc->hw_desc; 1093 u32 *sh_desc; 1094 u32 out_options = 0; 1095 dma_addr_t dst_dma, ptr; 1096 int len; 1097 1098 #ifdef DEBUG 1099 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ", 1100 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); 1101 pr_err("asked=%d, cryptlen%d\n", 1102 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); 1103 #endif 1104 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ", 1105 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1106 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); 1107 1108 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1109 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1110 1111 len = desc_len(sh_desc); 1112 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1113 1114 append_seq_in_ptr(desc, edesc->sec4_sg_dma, req->cryptlen + ivsize, 1115 LDST_SGF); 1116 1117 if (likely(req->src == req->dst)) { 1118 dst_dma = edesc->sec4_sg_dma + sizeof(struct sec4_sg_entry); 1119 out_options = LDST_SGF; 1120 } else { 1121 if (edesc->dst_nents == 1) { 1122 dst_dma = sg_dma_address(req->dst); 1123 } else { 1124 dst_dma = edesc->sec4_sg_dma + (edesc->src_nents + 1) * 1125 sizeof(struct sec4_sg_entry); 1126 out_options = LDST_SGF; 1127 } 1128 } 1129 append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options); 1130 } 1131 1132 /* 1133 * allocate and map the aead extended descriptor 1134 */ 1135 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1136 int desc_bytes, bool *all_contig_ptr, 1137 bool encrypt) 1138 { 1139 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1140 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1141 struct device *jrdev = ctx->jrdev; 1142 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1143 GFP_KERNEL : GFP_ATOMIC; 1144 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1145 struct aead_edesc *edesc; 1146 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; 1147 unsigned int authsize = ctx->authsize; 1148 1149 if (unlikely(req->dst != req->src)) { 1150 src_nents = sg_nents_for_len(req->src, req->assoclen + 1151 req->cryptlen); 1152 if (unlikely(src_nents < 0)) { 1153 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1154 req->assoclen + req->cryptlen); 1155 return ERR_PTR(src_nents); 1156 } 1157 1158 dst_nents = sg_nents_for_len(req->dst, req->assoclen + 1159 req->cryptlen + 1160 (encrypt ? authsize : 1161 (-authsize))); 1162 if (unlikely(dst_nents < 0)) { 1163 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1164 req->assoclen + req->cryptlen + 1165 (encrypt ? authsize : (-authsize))); 1166 return ERR_PTR(dst_nents); 1167 } 1168 } else { 1169 src_nents = sg_nents_for_len(req->src, req->assoclen + 1170 req->cryptlen + 1171 (encrypt ? authsize : 0)); 1172 if (unlikely(src_nents < 0)) { 1173 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1174 req->assoclen + req->cryptlen + 1175 (encrypt ? authsize : 0)); 1176 return ERR_PTR(src_nents); 1177 } 1178 } 1179 1180 if (likely(req->src == req->dst)) { 1181 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1182 DMA_BIDIRECTIONAL); 1183 if (unlikely(!mapped_src_nents)) { 1184 dev_err(jrdev, "unable to map source\n"); 1185 return ERR_PTR(-ENOMEM); 1186 } 1187 } else { 1188 /* Cover also the case of null (zero length) input data */ 1189 if (src_nents) { 1190 mapped_src_nents = dma_map_sg(jrdev, req->src, 1191 src_nents, DMA_TO_DEVICE); 1192 if (unlikely(!mapped_src_nents)) { 1193 dev_err(jrdev, "unable to map source\n"); 1194 return ERR_PTR(-ENOMEM); 1195 } 1196 } else { 1197 mapped_src_nents = 0; 1198 } 1199 1200 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1201 DMA_FROM_DEVICE); 1202 if (unlikely(!mapped_dst_nents)) { 1203 dev_err(jrdev, "unable to map destination\n"); 1204 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1205 return ERR_PTR(-ENOMEM); 1206 } 1207 } 1208 1209 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0; 1210 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1211 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1212 1213 /* allocate space for base edesc and hw desc commands, link tables */ 1214 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1215 GFP_DMA | flags); 1216 if (!edesc) { 1217 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1218 0, 0, 0); 1219 return ERR_PTR(-ENOMEM); 1220 } 1221 1222 edesc->src_nents = src_nents; 1223 edesc->dst_nents = dst_nents; 1224 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + 1225 desc_bytes; 1226 *all_contig_ptr = !(mapped_src_nents > 1); 1227 1228 sec4_sg_index = 0; 1229 if (mapped_src_nents > 1) { 1230 sg_to_sec4_sg_last(req->src, mapped_src_nents, 1231 edesc->sec4_sg + sec4_sg_index, 0); 1232 sec4_sg_index += mapped_src_nents; 1233 } 1234 if (mapped_dst_nents > 1) { 1235 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1236 edesc->sec4_sg + sec4_sg_index, 0); 1237 } 1238 1239 if (!sec4_sg_bytes) 1240 return edesc; 1241 1242 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1243 sec4_sg_bytes, DMA_TO_DEVICE); 1244 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1245 dev_err(jrdev, "unable to map S/G table\n"); 1246 aead_unmap(jrdev, edesc, req); 1247 kfree(edesc); 1248 return ERR_PTR(-ENOMEM); 1249 } 1250 1251 edesc->sec4_sg_bytes = sec4_sg_bytes; 1252 1253 return edesc; 1254 } 1255 1256 static int gcm_encrypt(struct aead_request *req) 1257 { 1258 struct aead_edesc *edesc; 1259 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1260 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1261 struct device *jrdev = ctx->jrdev; 1262 bool all_contig; 1263 u32 *desc; 1264 int ret = 0; 1265 1266 /* allocate extended descriptor */ 1267 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true); 1268 if (IS_ERR(edesc)) 1269 return PTR_ERR(edesc); 1270 1271 /* Create and submit job descriptor */ 1272 init_gcm_job(req, edesc, all_contig, true); 1273 #ifdef DEBUG 1274 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1275 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1276 desc_bytes(edesc->hw_desc), 1); 1277 #endif 1278 1279 desc = edesc->hw_desc; 1280 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1281 if (!ret) { 1282 ret = -EINPROGRESS; 1283 } else { 1284 aead_unmap(jrdev, edesc, req); 1285 kfree(edesc); 1286 } 1287 1288 return ret; 1289 } 1290 1291 static int ipsec_gcm_encrypt(struct aead_request *req) 1292 { 1293 if (req->assoclen < 8) 1294 return -EINVAL; 1295 1296 return gcm_encrypt(req); 1297 } 1298 1299 static int aead_encrypt(struct aead_request *req) 1300 { 1301 struct aead_edesc *edesc; 1302 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1303 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1304 struct device *jrdev = ctx->jrdev; 1305 bool all_contig; 1306 u32 *desc; 1307 int ret = 0; 1308 1309 /* allocate extended descriptor */ 1310 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1311 &all_contig, true); 1312 if (IS_ERR(edesc)) 1313 return PTR_ERR(edesc); 1314 1315 /* Create and submit job descriptor */ 1316 init_authenc_job(req, edesc, all_contig, true); 1317 #ifdef DEBUG 1318 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1319 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1320 desc_bytes(edesc->hw_desc), 1); 1321 #endif 1322 1323 desc = edesc->hw_desc; 1324 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1325 if (!ret) { 1326 ret = -EINPROGRESS; 1327 } else { 1328 aead_unmap(jrdev, edesc, req); 1329 kfree(edesc); 1330 } 1331 1332 return ret; 1333 } 1334 1335 static int gcm_decrypt(struct aead_request *req) 1336 { 1337 struct aead_edesc *edesc; 1338 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1339 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1340 struct device *jrdev = ctx->jrdev; 1341 bool all_contig; 1342 u32 *desc; 1343 int ret = 0; 1344 1345 /* allocate extended descriptor */ 1346 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false); 1347 if (IS_ERR(edesc)) 1348 return PTR_ERR(edesc); 1349 1350 /* Create and submit job descriptor*/ 1351 init_gcm_job(req, edesc, all_contig, false); 1352 #ifdef DEBUG 1353 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1354 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1355 desc_bytes(edesc->hw_desc), 1); 1356 #endif 1357 1358 desc = edesc->hw_desc; 1359 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1360 if (!ret) { 1361 ret = -EINPROGRESS; 1362 } else { 1363 aead_unmap(jrdev, edesc, req); 1364 kfree(edesc); 1365 } 1366 1367 return ret; 1368 } 1369 1370 static int ipsec_gcm_decrypt(struct aead_request *req) 1371 { 1372 if (req->assoclen < 8) 1373 return -EINVAL; 1374 1375 return gcm_decrypt(req); 1376 } 1377 1378 static int aead_decrypt(struct aead_request *req) 1379 { 1380 struct aead_edesc *edesc; 1381 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1382 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1383 struct device *jrdev = ctx->jrdev; 1384 bool all_contig; 1385 u32 *desc; 1386 int ret = 0; 1387 1388 caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ", 1389 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1390 req->assoclen + req->cryptlen, 1); 1391 1392 /* allocate extended descriptor */ 1393 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1394 &all_contig, false); 1395 if (IS_ERR(edesc)) 1396 return PTR_ERR(edesc); 1397 1398 /* Create and submit job descriptor*/ 1399 init_authenc_job(req, edesc, all_contig, false); 1400 #ifdef DEBUG 1401 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1402 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1403 desc_bytes(edesc->hw_desc), 1); 1404 #endif 1405 1406 desc = edesc->hw_desc; 1407 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1408 if (!ret) { 1409 ret = -EINPROGRESS; 1410 } else { 1411 aead_unmap(jrdev, edesc, req); 1412 kfree(edesc); 1413 } 1414 1415 return ret; 1416 } 1417 1418 /* 1419 * allocate and map the skcipher extended descriptor for skcipher 1420 */ 1421 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req, 1422 int desc_bytes) 1423 { 1424 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1425 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1426 struct device *jrdev = ctx->jrdev; 1427 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1428 GFP_KERNEL : GFP_ATOMIC; 1429 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1430 struct skcipher_edesc *edesc; 1431 dma_addr_t iv_dma; 1432 u8 *iv; 1433 int ivsize = crypto_skcipher_ivsize(skcipher); 1434 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1435 1436 src_nents = sg_nents_for_len(req->src, req->cryptlen); 1437 if (unlikely(src_nents < 0)) { 1438 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1439 req->cryptlen); 1440 return ERR_PTR(src_nents); 1441 } 1442 1443 if (req->dst != req->src) { 1444 dst_nents = sg_nents_for_len(req->dst, req->cryptlen); 1445 if (unlikely(dst_nents < 0)) { 1446 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1447 req->cryptlen); 1448 return ERR_PTR(dst_nents); 1449 } 1450 } 1451 1452 if (likely(req->src == req->dst)) { 1453 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1454 DMA_BIDIRECTIONAL); 1455 if (unlikely(!mapped_src_nents)) { 1456 dev_err(jrdev, "unable to map source\n"); 1457 return ERR_PTR(-ENOMEM); 1458 } 1459 } else { 1460 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1461 DMA_TO_DEVICE); 1462 if (unlikely(!mapped_src_nents)) { 1463 dev_err(jrdev, "unable to map source\n"); 1464 return ERR_PTR(-ENOMEM); 1465 } 1466 1467 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1468 DMA_FROM_DEVICE); 1469 if (unlikely(!mapped_dst_nents)) { 1470 dev_err(jrdev, "unable to map destination\n"); 1471 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1472 return ERR_PTR(-ENOMEM); 1473 } 1474 } 1475 1476 sec4_sg_ents = 1 + mapped_src_nents; 1477 dst_sg_idx = sec4_sg_ents; 1478 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1479 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1480 1481 /* 1482 * allocate space for base edesc and hw desc commands, link tables, IV 1483 */ 1484 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize, 1485 GFP_DMA | flags); 1486 if (!edesc) { 1487 dev_err(jrdev, "could not allocate extended descriptor\n"); 1488 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1489 0, 0, 0); 1490 return ERR_PTR(-ENOMEM); 1491 } 1492 1493 edesc->src_nents = src_nents; 1494 edesc->dst_nents = dst_nents; 1495 edesc->sec4_sg_bytes = sec4_sg_bytes; 1496 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + 1497 desc_bytes); 1498 1499 /* Make sure IV is located in a DMAable area */ 1500 iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes; 1501 memcpy(iv, req->iv, ivsize); 1502 1503 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE); 1504 if (dma_mapping_error(jrdev, iv_dma)) { 1505 dev_err(jrdev, "unable to map IV\n"); 1506 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1507 0, 0, 0); 1508 kfree(edesc); 1509 return ERR_PTR(-ENOMEM); 1510 } 1511 1512 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); 1513 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg + 1, 0); 1514 1515 if (mapped_dst_nents > 1) { 1516 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1517 edesc->sec4_sg + dst_sg_idx, 0); 1518 } 1519 1520 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1521 sec4_sg_bytes, DMA_TO_DEVICE); 1522 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1523 dev_err(jrdev, "unable to map S/G table\n"); 1524 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1525 iv_dma, ivsize, 0, 0); 1526 kfree(edesc); 1527 return ERR_PTR(-ENOMEM); 1528 } 1529 1530 edesc->iv_dma = iv_dma; 1531 1532 #ifdef DEBUG 1533 print_hex_dump(KERN_ERR, "skcipher sec4_sg@" __stringify(__LINE__)": ", 1534 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1535 sec4_sg_bytes, 1); 1536 #endif 1537 1538 return edesc; 1539 } 1540 1541 static int skcipher_encrypt(struct skcipher_request *req) 1542 { 1543 struct skcipher_edesc *edesc; 1544 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1545 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1546 struct device *jrdev = ctx->jrdev; 1547 u32 *desc; 1548 int ret = 0; 1549 1550 /* allocate extended descriptor */ 1551 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1552 if (IS_ERR(edesc)) 1553 return PTR_ERR(edesc); 1554 1555 /* Create and submit job descriptor*/ 1556 init_skcipher_job(req, edesc, true); 1557 #ifdef DEBUG 1558 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ", 1559 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1560 desc_bytes(edesc->hw_desc), 1); 1561 #endif 1562 desc = edesc->hw_desc; 1563 ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req); 1564 1565 if (!ret) { 1566 ret = -EINPROGRESS; 1567 } else { 1568 skcipher_unmap(jrdev, edesc, req); 1569 kfree(edesc); 1570 } 1571 1572 return ret; 1573 } 1574 1575 static int skcipher_decrypt(struct skcipher_request *req) 1576 { 1577 struct skcipher_edesc *edesc; 1578 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1579 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1580 int ivsize = crypto_skcipher_ivsize(skcipher); 1581 struct device *jrdev = ctx->jrdev; 1582 u32 *desc; 1583 int ret = 0; 1584 1585 /* allocate extended descriptor */ 1586 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1587 if (IS_ERR(edesc)) 1588 return PTR_ERR(edesc); 1589 1590 /* 1591 * The crypto API expects us to set the IV (req->iv) to the last 1592 * ciphertext block. 1593 */ 1594 scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen - ivsize, 1595 ivsize, 0); 1596 1597 /* Create and submit job descriptor*/ 1598 init_skcipher_job(req, edesc, false); 1599 desc = edesc->hw_desc; 1600 #ifdef DEBUG 1601 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ", 1602 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1603 desc_bytes(edesc->hw_desc), 1); 1604 #endif 1605 1606 ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req); 1607 if (!ret) { 1608 ret = -EINPROGRESS; 1609 } else { 1610 skcipher_unmap(jrdev, edesc, req); 1611 kfree(edesc); 1612 } 1613 1614 return ret; 1615 } 1616 1617 static struct caam_skcipher_alg driver_algs[] = { 1618 { 1619 .skcipher = { 1620 .base = { 1621 .cra_name = "cbc(aes)", 1622 .cra_driver_name = "cbc-aes-caam", 1623 .cra_blocksize = AES_BLOCK_SIZE, 1624 }, 1625 .setkey = skcipher_setkey, 1626 .encrypt = skcipher_encrypt, 1627 .decrypt = skcipher_decrypt, 1628 .min_keysize = AES_MIN_KEY_SIZE, 1629 .max_keysize = AES_MAX_KEY_SIZE, 1630 .ivsize = AES_BLOCK_SIZE, 1631 }, 1632 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1633 }, 1634 { 1635 .skcipher = { 1636 .base = { 1637 .cra_name = "cbc(des3_ede)", 1638 .cra_driver_name = "cbc-3des-caam", 1639 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1640 }, 1641 .setkey = skcipher_setkey, 1642 .encrypt = skcipher_encrypt, 1643 .decrypt = skcipher_decrypt, 1644 .min_keysize = DES3_EDE_KEY_SIZE, 1645 .max_keysize = DES3_EDE_KEY_SIZE, 1646 .ivsize = DES3_EDE_BLOCK_SIZE, 1647 }, 1648 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 1649 }, 1650 { 1651 .skcipher = { 1652 .base = { 1653 .cra_name = "cbc(des)", 1654 .cra_driver_name = "cbc-des-caam", 1655 .cra_blocksize = DES_BLOCK_SIZE, 1656 }, 1657 .setkey = skcipher_setkey, 1658 .encrypt = skcipher_encrypt, 1659 .decrypt = skcipher_decrypt, 1660 .min_keysize = DES_KEY_SIZE, 1661 .max_keysize = DES_KEY_SIZE, 1662 .ivsize = DES_BLOCK_SIZE, 1663 }, 1664 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 1665 }, 1666 { 1667 .skcipher = { 1668 .base = { 1669 .cra_name = "ctr(aes)", 1670 .cra_driver_name = "ctr-aes-caam", 1671 .cra_blocksize = 1, 1672 }, 1673 .setkey = skcipher_setkey, 1674 .encrypt = skcipher_encrypt, 1675 .decrypt = skcipher_decrypt, 1676 .min_keysize = AES_MIN_KEY_SIZE, 1677 .max_keysize = AES_MAX_KEY_SIZE, 1678 .ivsize = AES_BLOCK_SIZE, 1679 .chunksize = AES_BLOCK_SIZE, 1680 }, 1681 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | 1682 OP_ALG_AAI_CTR_MOD128, 1683 }, 1684 { 1685 .skcipher = { 1686 .base = { 1687 .cra_name = "rfc3686(ctr(aes))", 1688 .cra_driver_name = "rfc3686-ctr-aes-caam", 1689 .cra_blocksize = 1, 1690 }, 1691 .setkey = skcipher_setkey, 1692 .encrypt = skcipher_encrypt, 1693 .decrypt = skcipher_decrypt, 1694 .min_keysize = AES_MIN_KEY_SIZE + 1695 CTR_RFC3686_NONCE_SIZE, 1696 .max_keysize = AES_MAX_KEY_SIZE + 1697 CTR_RFC3686_NONCE_SIZE, 1698 .ivsize = CTR_RFC3686_IV_SIZE, 1699 .chunksize = AES_BLOCK_SIZE, 1700 }, 1701 .caam = { 1702 .class1_alg_type = OP_ALG_ALGSEL_AES | 1703 OP_ALG_AAI_CTR_MOD128, 1704 .rfc3686 = true, 1705 }, 1706 }, 1707 { 1708 .skcipher = { 1709 .base = { 1710 .cra_name = "xts(aes)", 1711 .cra_driver_name = "xts-aes-caam", 1712 .cra_blocksize = AES_BLOCK_SIZE, 1713 }, 1714 .setkey = xts_skcipher_setkey, 1715 .encrypt = skcipher_encrypt, 1716 .decrypt = skcipher_decrypt, 1717 .min_keysize = 2 * AES_MIN_KEY_SIZE, 1718 .max_keysize = 2 * AES_MAX_KEY_SIZE, 1719 .ivsize = AES_BLOCK_SIZE, 1720 }, 1721 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS, 1722 }, 1723 }; 1724 1725 static struct caam_aead_alg driver_aeads[] = { 1726 { 1727 .aead = { 1728 .base = { 1729 .cra_name = "rfc4106(gcm(aes))", 1730 .cra_driver_name = "rfc4106-gcm-aes-caam", 1731 .cra_blocksize = 1, 1732 }, 1733 .setkey = rfc4106_setkey, 1734 .setauthsize = rfc4106_setauthsize, 1735 .encrypt = ipsec_gcm_encrypt, 1736 .decrypt = ipsec_gcm_decrypt, 1737 .ivsize = GCM_RFC4106_IV_SIZE, 1738 .maxauthsize = AES_BLOCK_SIZE, 1739 }, 1740 .caam = { 1741 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 1742 }, 1743 }, 1744 { 1745 .aead = { 1746 .base = { 1747 .cra_name = "rfc4543(gcm(aes))", 1748 .cra_driver_name = "rfc4543-gcm-aes-caam", 1749 .cra_blocksize = 1, 1750 }, 1751 .setkey = rfc4543_setkey, 1752 .setauthsize = rfc4543_setauthsize, 1753 .encrypt = ipsec_gcm_encrypt, 1754 .decrypt = ipsec_gcm_decrypt, 1755 .ivsize = GCM_RFC4543_IV_SIZE, 1756 .maxauthsize = AES_BLOCK_SIZE, 1757 }, 1758 .caam = { 1759 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 1760 }, 1761 }, 1762 /* Galois Counter Mode */ 1763 { 1764 .aead = { 1765 .base = { 1766 .cra_name = "gcm(aes)", 1767 .cra_driver_name = "gcm-aes-caam", 1768 .cra_blocksize = 1, 1769 }, 1770 .setkey = gcm_setkey, 1771 .setauthsize = gcm_setauthsize, 1772 .encrypt = gcm_encrypt, 1773 .decrypt = gcm_decrypt, 1774 .ivsize = GCM_AES_IV_SIZE, 1775 .maxauthsize = AES_BLOCK_SIZE, 1776 }, 1777 .caam = { 1778 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 1779 }, 1780 }, 1781 /* single-pass ipsec_esp descriptor */ 1782 { 1783 .aead = { 1784 .base = { 1785 .cra_name = "authenc(hmac(md5)," 1786 "ecb(cipher_null))", 1787 .cra_driver_name = "authenc-hmac-md5-" 1788 "ecb-cipher_null-caam", 1789 .cra_blocksize = NULL_BLOCK_SIZE, 1790 }, 1791 .setkey = aead_setkey, 1792 .setauthsize = aead_setauthsize, 1793 .encrypt = aead_encrypt, 1794 .decrypt = aead_decrypt, 1795 .ivsize = NULL_IV_SIZE, 1796 .maxauthsize = MD5_DIGEST_SIZE, 1797 }, 1798 .caam = { 1799 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 1800 OP_ALG_AAI_HMAC_PRECOMP, 1801 }, 1802 }, 1803 { 1804 .aead = { 1805 .base = { 1806 .cra_name = "authenc(hmac(sha1)," 1807 "ecb(cipher_null))", 1808 .cra_driver_name = "authenc-hmac-sha1-" 1809 "ecb-cipher_null-caam", 1810 .cra_blocksize = NULL_BLOCK_SIZE, 1811 }, 1812 .setkey = aead_setkey, 1813 .setauthsize = aead_setauthsize, 1814 .encrypt = aead_encrypt, 1815 .decrypt = aead_decrypt, 1816 .ivsize = NULL_IV_SIZE, 1817 .maxauthsize = SHA1_DIGEST_SIZE, 1818 }, 1819 .caam = { 1820 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 1821 OP_ALG_AAI_HMAC_PRECOMP, 1822 }, 1823 }, 1824 { 1825 .aead = { 1826 .base = { 1827 .cra_name = "authenc(hmac(sha224)," 1828 "ecb(cipher_null))", 1829 .cra_driver_name = "authenc-hmac-sha224-" 1830 "ecb-cipher_null-caam", 1831 .cra_blocksize = NULL_BLOCK_SIZE, 1832 }, 1833 .setkey = aead_setkey, 1834 .setauthsize = aead_setauthsize, 1835 .encrypt = aead_encrypt, 1836 .decrypt = aead_decrypt, 1837 .ivsize = NULL_IV_SIZE, 1838 .maxauthsize = SHA224_DIGEST_SIZE, 1839 }, 1840 .caam = { 1841 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 1842 OP_ALG_AAI_HMAC_PRECOMP, 1843 }, 1844 }, 1845 { 1846 .aead = { 1847 .base = { 1848 .cra_name = "authenc(hmac(sha256)," 1849 "ecb(cipher_null))", 1850 .cra_driver_name = "authenc-hmac-sha256-" 1851 "ecb-cipher_null-caam", 1852 .cra_blocksize = NULL_BLOCK_SIZE, 1853 }, 1854 .setkey = aead_setkey, 1855 .setauthsize = aead_setauthsize, 1856 .encrypt = aead_encrypt, 1857 .decrypt = aead_decrypt, 1858 .ivsize = NULL_IV_SIZE, 1859 .maxauthsize = SHA256_DIGEST_SIZE, 1860 }, 1861 .caam = { 1862 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 1863 OP_ALG_AAI_HMAC_PRECOMP, 1864 }, 1865 }, 1866 { 1867 .aead = { 1868 .base = { 1869 .cra_name = "authenc(hmac(sha384)," 1870 "ecb(cipher_null))", 1871 .cra_driver_name = "authenc-hmac-sha384-" 1872 "ecb-cipher_null-caam", 1873 .cra_blocksize = NULL_BLOCK_SIZE, 1874 }, 1875 .setkey = aead_setkey, 1876 .setauthsize = aead_setauthsize, 1877 .encrypt = aead_encrypt, 1878 .decrypt = aead_decrypt, 1879 .ivsize = NULL_IV_SIZE, 1880 .maxauthsize = SHA384_DIGEST_SIZE, 1881 }, 1882 .caam = { 1883 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 1884 OP_ALG_AAI_HMAC_PRECOMP, 1885 }, 1886 }, 1887 { 1888 .aead = { 1889 .base = { 1890 .cra_name = "authenc(hmac(sha512)," 1891 "ecb(cipher_null))", 1892 .cra_driver_name = "authenc-hmac-sha512-" 1893 "ecb-cipher_null-caam", 1894 .cra_blocksize = NULL_BLOCK_SIZE, 1895 }, 1896 .setkey = aead_setkey, 1897 .setauthsize = aead_setauthsize, 1898 .encrypt = aead_encrypt, 1899 .decrypt = aead_decrypt, 1900 .ivsize = NULL_IV_SIZE, 1901 .maxauthsize = SHA512_DIGEST_SIZE, 1902 }, 1903 .caam = { 1904 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 1905 OP_ALG_AAI_HMAC_PRECOMP, 1906 }, 1907 }, 1908 { 1909 .aead = { 1910 .base = { 1911 .cra_name = "authenc(hmac(md5),cbc(aes))", 1912 .cra_driver_name = "authenc-hmac-md5-" 1913 "cbc-aes-caam", 1914 .cra_blocksize = AES_BLOCK_SIZE, 1915 }, 1916 .setkey = aead_setkey, 1917 .setauthsize = aead_setauthsize, 1918 .encrypt = aead_encrypt, 1919 .decrypt = aead_decrypt, 1920 .ivsize = AES_BLOCK_SIZE, 1921 .maxauthsize = MD5_DIGEST_SIZE, 1922 }, 1923 .caam = { 1924 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1925 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 1926 OP_ALG_AAI_HMAC_PRECOMP, 1927 }, 1928 }, 1929 { 1930 .aead = { 1931 .base = { 1932 .cra_name = "echainiv(authenc(hmac(md5)," 1933 "cbc(aes)))", 1934 .cra_driver_name = "echainiv-authenc-hmac-md5-" 1935 "cbc-aes-caam", 1936 .cra_blocksize = AES_BLOCK_SIZE, 1937 }, 1938 .setkey = aead_setkey, 1939 .setauthsize = aead_setauthsize, 1940 .encrypt = aead_encrypt, 1941 .decrypt = aead_decrypt, 1942 .ivsize = AES_BLOCK_SIZE, 1943 .maxauthsize = MD5_DIGEST_SIZE, 1944 }, 1945 .caam = { 1946 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1947 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 1948 OP_ALG_AAI_HMAC_PRECOMP, 1949 .geniv = true, 1950 }, 1951 }, 1952 { 1953 .aead = { 1954 .base = { 1955 .cra_name = "authenc(hmac(sha1),cbc(aes))", 1956 .cra_driver_name = "authenc-hmac-sha1-" 1957 "cbc-aes-caam", 1958 .cra_blocksize = AES_BLOCK_SIZE, 1959 }, 1960 .setkey = aead_setkey, 1961 .setauthsize = aead_setauthsize, 1962 .encrypt = aead_encrypt, 1963 .decrypt = aead_decrypt, 1964 .ivsize = AES_BLOCK_SIZE, 1965 .maxauthsize = SHA1_DIGEST_SIZE, 1966 }, 1967 .caam = { 1968 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1969 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 1970 OP_ALG_AAI_HMAC_PRECOMP, 1971 }, 1972 }, 1973 { 1974 .aead = { 1975 .base = { 1976 .cra_name = "echainiv(authenc(hmac(sha1)," 1977 "cbc(aes)))", 1978 .cra_driver_name = "echainiv-authenc-" 1979 "hmac-sha1-cbc-aes-caam", 1980 .cra_blocksize = AES_BLOCK_SIZE, 1981 }, 1982 .setkey = aead_setkey, 1983 .setauthsize = aead_setauthsize, 1984 .encrypt = aead_encrypt, 1985 .decrypt = aead_decrypt, 1986 .ivsize = AES_BLOCK_SIZE, 1987 .maxauthsize = SHA1_DIGEST_SIZE, 1988 }, 1989 .caam = { 1990 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1991 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 1992 OP_ALG_AAI_HMAC_PRECOMP, 1993 .geniv = true, 1994 }, 1995 }, 1996 { 1997 .aead = { 1998 .base = { 1999 .cra_name = "authenc(hmac(sha224),cbc(aes))", 2000 .cra_driver_name = "authenc-hmac-sha224-" 2001 "cbc-aes-caam", 2002 .cra_blocksize = AES_BLOCK_SIZE, 2003 }, 2004 .setkey = aead_setkey, 2005 .setauthsize = aead_setauthsize, 2006 .encrypt = aead_encrypt, 2007 .decrypt = aead_decrypt, 2008 .ivsize = AES_BLOCK_SIZE, 2009 .maxauthsize = SHA224_DIGEST_SIZE, 2010 }, 2011 .caam = { 2012 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2013 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2014 OP_ALG_AAI_HMAC_PRECOMP, 2015 }, 2016 }, 2017 { 2018 .aead = { 2019 .base = { 2020 .cra_name = "echainiv(authenc(hmac(sha224)," 2021 "cbc(aes)))", 2022 .cra_driver_name = "echainiv-authenc-" 2023 "hmac-sha224-cbc-aes-caam", 2024 .cra_blocksize = AES_BLOCK_SIZE, 2025 }, 2026 .setkey = aead_setkey, 2027 .setauthsize = aead_setauthsize, 2028 .encrypt = aead_encrypt, 2029 .decrypt = aead_decrypt, 2030 .ivsize = AES_BLOCK_SIZE, 2031 .maxauthsize = SHA224_DIGEST_SIZE, 2032 }, 2033 .caam = { 2034 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2035 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2036 OP_ALG_AAI_HMAC_PRECOMP, 2037 .geniv = true, 2038 }, 2039 }, 2040 { 2041 .aead = { 2042 .base = { 2043 .cra_name = "authenc(hmac(sha256),cbc(aes))", 2044 .cra_driver_name = "authenc-hmac-sha256-" 2045 "cbc-aes-caam", 2046 .cra_blocksize = AES_BLOCK_SIZE, 2047 }, 2048 .setkey = aead_setkey, 2049 .setauthsize = aead_setauthsize, 2050 .encrypt = aead_encrypt, 2051 .decrypt = aead_decrypt, 2052 .ivsize = AES_BLOCK_SIZE, 2053 .maxauthsize = SHA256_DIGEST_SIZE, 2054 }, 2055 .caam = { 2056 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2057 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2058 OP_ALG_AAI_HMAC_PRECOMP, 2059 }, 2060 }, 2061 { 2062 .aead = { 2063 .base = { 2064 .cra_name = "echainiv(authenc(hmac(sha256)," 2065 "cbc(aes)))", 2066 .cra_driver_name = "echainiv-authenc-" 2067 "hmac-sha256-cbc-aes-caam", 2068 .cra_blocksize = AES_BLOCK_SIZE, 2069 }, 2070 .setkey = aead_setkey, 2071 .setauthsize = aead_setauthsize, 2072 .encrypt = aead_encrypt, 2073 .decrypt = aead_decrypt, 2074 .ivsize = AES_BLOCK_SIZE, 2075 .maxauthsize = SHA256_DIGEST_SIZE, 2076 }, 2077 .caam = { 2078 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2079 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2080 OP_ALG_AAI_HMAC_PRECOMP, 2081 .geniv = true, 2082 }, 2083 }, 2084 { 2085 .aead = { 2086 .base = { 2087 .cra_name = "authenc(hmac(sha384),cbc(aes))", 2088 .cra_driver_name = "authenc-hmac-sha384-" 2089 "cbc-aes-caam", 2090 .cra_blocksize = AES_BLOCK_SIZE, 2091 }, 2092 .setkey = aead_setkey, 2093 .setauthsize = aead_setauthsize, 2094 .encrypt = aead_encrypt, 2095 .decrypt = aead_decrypt, 2096 .ivsize = AES_BLOCK_SIZE, 2097 .maxauthsize = SHA384_DIGEST_SIZE, 2098 }, 2099 .caam = { 2100 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2101 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2102 OP_ALG_AAI_HMAC_PRECOMP, 2103 }, 2104 }, 2105 { 2106 .aead = { 2107 .base = { 2108 .cra_name = "echainiv(authenc(hmac(sha384)," 2109 "cbc(aes)))", 2110 .cra_driver_name = "echainiv-authenc-" 2111 "hmac-sha384-cbc-aes-caam", 2112 .cra_blocksize = AES_BLOCK_SIZE, 2113 }, 2114 .setkey = aead_setkey, 2115 .setauthsize = aead_setauthsize, 2116 .encrypt = aead_encrypt, 2117 .decrypt = aead_decrypt, 2118 .ivsize = AES_BLOCK_SIZE, 2119 .maxauthsize = SHA384_DIGEST_SIZE, 2120 }, 2121 .caam = { 2122 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2123 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2124 OP_ALG_AAI_HMAC_PRECOMP, 2125 .geniv = true, 2126 }, 2127 }, 2128 { 2129 .aead = { 2130 .base = { 2131 .cra_name = "authenc(hmac(sha512),cbc(aes))", 2132 .cra_driver_name = "authenc-hmac-sha512-" 2133 "cbc-aes-caam", 2134 .cra_blocksize = AES_BLOCK_SIZE, 2135 }, 2136 .setkey = aead_setkey, 2137 .setauthsize = aead_setauthsize, 2138 .encrypt = aead_encrypt, 2139 .decrypt = aead_decrypt, 2140 .ivsize = AES_BLOCK_SIZE, 2141 .maxauthsize = SHA512_DIGEST_SIZE, 2142 }, 2143 .caam = { 2144 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2145 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2146 OP_ALG_AAI_HMAC_PRECOMP, 2147 }, 2148 }, 2149 { 2150 .aead = { 2151 .base = { 2152 .cra_name = "echainiv(authenc(hmac(sha512)," 2153 "cbc(aes)))", 2154 .cra_driver_name = "echainiv-authenc-" 2155 "hmac-sha512-cbc-aes-caam", 2156 .cra_blocksize = AES_BLOCK_SIZE, 2157 }, 2158 .setkey = aead_setkey, 2159 .setauthsize = aead_setauthsize, 2160 .encrypt = aead_encrypt, 2161 .decrypt = aead_decrypt, 2162 .ivsize = AES_BLOCK_SIZE, 2163 .maxauthsize = SHA512_DIGEST_SIZE, 2164 }, 2165 .caam = { 2166 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2167 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2168 OP_ALG_AAI_HMAC_PRECOMP, 2169 .geniv = true, 2170 }, 2171 }, 2172 { 2173 .aead = { 2174 .base = { 2175 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 2176 .cra_driver_name = "authenc-hmac-md5-" 2177 "cbc-des3_ede-caam", 2178 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2179 }, 2180 .setkey = aead_setkey, 2181 .setauthsize = aead_setauthsize, 2182 .encrypt = aead_encrypt, 2183 .decrypt = aead_decrypt, 2184 .ivsize = DES3_EDE_BLOCK_SIZE, 2185 .maxauthsize = MD5_DIGEST_SIZE, 2186 }, 2187 .caam = { 2188 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2189 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2190 OP_ALG_AAI_HMAC_PRECOMP, 2191 } 2192 }, 2193 { 2194 .aead = { 2195 .base = { 2196 .cra_name = "echainiv(authenc(hmac(md5)," 2197 "cbc(des3_ede)))", 2198 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2199 "cbc-des3_ede-caam", 2200 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2201 }, 2202 .setkey = aead_setkey, 2203 .setauthsize = aead_setauthsize, 2204 .encrypt = aead_encrypt, 2205 .decrypt = aead_decrypt, 2206 .ivsize = DES3_EDE_BLOCK_SIZE, 2207 .maxauthsize = MD5_DIGEST_SIZE, 2208 }, 2209 .caam = { 2210 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2211 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2212 OP_ALG_AAI_HMAC_PRECOMP, 2213 .geniv = true, 2214 } 2215 }, 2216 { 2217 .aead = { 2218 .base = { 2219 .cra_name = "authenc(hmac(sha1)," 2220 "cbc(des3_ede))", 2221 .cra_driver_name = "authenc-hmac-sha1-" 2222 "cbc-des3_ede-caam", 2223 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2224 }, 2225 .setkey = aead_setkey, 2226 .setauthsize = aead_setauthsize, 2227 .encrypt = aead_encrypt, 2228 .decrypt = aead_decrypt, 2229 .ivsize = DES3_EDE_BLOCK_SIZE, 2230 .maxauthsize = SHA1_DIGEST_SIZE, 2231 }, 2232 .caam = { 2233 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2234 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2235 OP_ALG_AAI_HMAC_PRECOMP, 2236 }, 2237 }, 2238 { 2239 .aead = { 2240 .base = { 2241 .cra_name = "echainiv(authenc(hmac(sha1)," 2242 "cbc(des3_ede)))", 2243 .cra_driver_name = "echainiv-authenc-" 2244 "hmac-sha1-" 2245 "cbc-des3_ede-caam", 2246 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2247 }, 2248 .setkey = aead_setkey, 2249 .setauthsize = aead_setauthsize, 2250 .encrypt = aead_encrypt, 2251 .decrypt = aead_decrypt, 2252 .ivsize = DES3_EDE_BLOCK_SIZE, 2253 .maxauthsize = SHA1_DIGEST_SIZE, 2254 }, 2255 .caam = { 2256 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2257 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2258 OP_ALG_AAI_HMAC_PRECOMP, 2259 .geniv = true, 2260 }, 2261 }, 2262 { 2263 .aead = { 2264 .base = { 2265 .cra_name = "authenc(hmac(sha224)," 2266 "cbc(des3_ede))", 2267 .cra_driver_name = "authenc-hmac-sha224-" 2268 "cbc-des3_ede-caam", 2269 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2270 }, 2271 .setkey = aead_setkey, 2272 .setauthsize = aead_setauthsize, 2273 .encrypt = aead_encrypt, 2274 .decrypt = aead_decrypt, 2275 .ivsize = DES3_EDE_BLOCK_SIZE, 2276 .maxauthsize = SHA224_DIGEST_SIZE, 2277 }, 2278 .caam = { 2279 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2280 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2281 OP_ALG_AAI_HMAC_PRECOMP, 2282 }, 2283 }, 2284 { 2285 .aead = { 2286 .base = { 2287 .cra_name = "echainiv(authenc(hmac(sha224)," 2288 "cbc(des3_ede)))", 2289 .cra_driver_name = "echainiv-authenc-" 2290 "hmac-sha224-" 2291 "cbc-des3_ede-caam", 2292 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2293 }, 2294 .setkey = aead_setkey, 2295 .setauthsize = aead_setauthsize, 2296 .encrypt = aead_encrypt, 2297 .decrypt = aead_decrypt, 2298 .ivsize = DES3_EDE_BLOCK_SIZE, 2299 .maxauthsize = SHA224_DIGEST_SIZE, 2300 }, 2301 .caam = { 2302 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2303 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2304 OP_ALG_AAI_HMAC_PRECOMP, 2305 .geniv = true, 2306 }, 2307 }, 2308 { 2309 .aead = { 2310 .base = { 2311 .cra_name = "authenc(hmac(sha256)," 2312 "cbc(des3_ede))", 2313 .cra_driver_name = "authenc-hmac-sha256-" 2314 "cbc-des3_ede-caam", 2315 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2316 }, 2317 .setkey = aead_setkey, 2318 .setauthsize = aead_setauthsize, 2319 .encrypt = aead_encrypt, 2320 .decrypt = aead_decrypt, 2321 .ivsize = DES3_EDE_BLOCK_SIZE, 2322 .maxauthsize = SHA256_DIGEST_SIZE, 2323 }, 2324 .caam = { 2325 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2326 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2327 OP_ALG_AAI_HMAC_PRECOMP, 2328 }, 2329 }, 2330 { 2331 .aead = { 2332 .base = { 2333 .cra_name = "echainiv(authenc(hmac(sha256)," 2334 "cbc(des3_ede)))", 2335 .cra_driver_name = "echainiv-authenc-" 2336 "hmac-sha256-" 2337 "cbc-des3_ede-caam", 2338 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2339 }, 2340 .setkey = aead_setkey, 2341 .setauthsize = aead_setauthsize, 2342 .encrypt = aead_encrypt, 2343 .decrypt = aead_decrypt, 2344 .ivsize = DES3_EDE_BLOCK_SIZE, 2345 .maxauthsize = SHA256_DIGEST_SIZE, 2346 }, 2347 .caam = { 2348 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2349 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2350 OP_ALG_AAI_HMAC_PRECOMP, 2351 .geniv = true, 2352 }, 2353 }, 2354 { 2355 .aead = { 2356 .base = { 2357 .cra_name = "authenc(hmac(sha384)," 2358 "cbc(des3_ede))", 2359 .cra_driver_name = "authenc-hmac-sha384-" 2360 "cbc-des3_ede-caam", 2361 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2362 }, 2363 .setkey = aead_setkey, 2364 .setauthsize = aead_setauthsize, 2365 .encrypt = aead_encrypt, 2366 .decrypt = aead_decrypt, 2367 .ivsize = DES3_EDE_BLOCK_SIZE, 2368 .maxauthsize = SHA384_DIGEST_SIZE, 2369 }, 2370 .caam = { 2371 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2372 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2373 OP_ALG_AAI_HMAC_PRECOMP, 2374 }, 2375 }, 2376 { 2377 .aead = { 2378 .base = { 2379 .cra_name = "echainiv(authenc(hmac(sha384)," 2380 "cbc(des3_ede)))", 2381 .cra_driver_name = "echainiv-authenc-" 2382 "hmac-sha384-" 2383 "cbc-des3_ede-caam", 2384 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2385 }, 2386 .setkey = aead_setkey, 2387 .setauthsize = aead_setauthsize, 2388 .encrypt = aead_encrypt, 2389 .decrypt = aead_decrypt, 2390 .ivsize = DES3_EDE_BLOCK_SIZE, 2391 .maxauthsize = SHA384_DIGEST_SIZE, 2392 }, 2393 .caam = { 2394 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2395 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2396 OP_ALG_AAI_HMAC_PRECOMP, 2397 .geniv = true, 2398 }, 2399 }, 2400 { 2401 .aead = { 2402 .base = { 2403 .cra_name = "authenc(hmac(sha512)," 2404 "cbc(des3_ede))", 2405 .cra_driver_name = "authenc-hmac-sha512-" 2406 "cbc-des3_ede-caam", 2407 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2408 }, 2409 .setkey = aead_setkey, 2410 .setauthsize = aead_setauthsize, 2411 .encrypt = aead_encrypt, 2412 .decrypt = aead_decrypt, 2413 .ivsize = DES3_EDE_BLOCK_SIZE, 2414 .maxauthsize = SHA512_DIGEST_SIZE, 2415 }, 2416 .caam = { 2417 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2418 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2419 OP_ALG_AAI_HMAC_PRECOMP, 2420 }, 2421 }, 2422 { 2423 .aead = { 2424 .base = { 2425 .cra_name = "echainiv(authenc(hmac(sha512)," 2426 "cbc(des3_ede)))", 2427 .cra_driver_name = "echainiv-authenc-" 2428 "hmac-sha512-" 2429 "cbc-des3_ede-caam", 2430 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2431 }, 2432 .setkey = aead_setkey, 2433 .setauthsize = aead_setauthsize, 2434 .encrypt = aead_encrypt, 2435 .decrypt = aead_decrypt, 2436 .ivsize = DES3_EDE_BLOCK_SIZE, 2437 .maxauthsize = SHA512_DIGEST_SIZE, 2438 }, 2439 .caam = { 2440 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2441 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2442 OP_ALG_AAI_HMAC_PRECOMP, 2443 .geniv = true, 2444 }, 2445 }, 2446 { 2447 .aead = { 2448 .base = { 2449 .cra_name = "authenc(hmac(md5),cbc(des))", 2450 .cra_driver_name = "authenc-hmac-md5-" 2451 "cbc-des-caam", 2452 .cra_blocksize = DES_BLOCK_SIZE, 2453 }, 2454 .setkey = aead_setkey, 2455 .setauthsize = aead_setauthsize, 2456 .encrypt = aead_encrypt, 2457 .decrypt = aead_decrypt, 2458 .ivsize = DES_BLOCK_SIZE, 2459 .maxauthsize = MD5_DIGEST_SIZE, 2460 }, 2461 .caam = { 2462 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2463 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2464 OP_ALG_AAI_HMAC_PRECOMP, 2465 }, 2466 }, 2467 { 2468 .aead = { 2469 .base = { 2470 .cra_name = "echainiv(authenc(hmac(md5)," 2471 "cbc(des)))", 2472 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2473 "cbc-des-caam", 2474 .cra_blocksize = DES_BLOCK_SIZE, 2475 }, 2476 .setkey = aead_setkey, 2477 .setauthsize = aead_setauthsize, 2478 .encrypt = aead_encrypt, 2479 .decrypt = aead_decrypt, 2480 .ivsize = DES_BLOCK_SIZE, 2481 .maxauthsize = MD5_DIGEST_SIZE, 2482 }, 2483 .caam = { 2484 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2485 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2486 OP_ALG_AAI_HMAC_PRECOMP, 2487 .geniv = true, 2488 }, 2489 }, 2490 { 2491 .aead = { 2492 .base = { 2493 .cra_name = "authenc(hmac(sha1),cbc(des))", 2494 .cra_driver_name = "authenc-hmac-sha1-" 2495 "cbc-des-caam", 2496 .cra_blocksize = DES_BLOCK_SIZE, 2497 }, 2498 .setkey = aead_setkey, 2499 .setauthsize = aead_setauthsize, 2500 .encrypt = aead_encrypt, 2501 .decrypt = aead_decrypt, 2502 .ivsize = DES_BLOCK_SIZE, 2503 .maxauthsize = SHA1_DIGEST_SIZE, 2504 }, 2505 .caam = { 2506 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2507 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2508 OP_ALG_AAI_HMAC_PRECOMP, 2509 }, 2510 }, 2511 { 2512 .aead = { 2513 .base = { 2514 .cra_name = "echainiv(authenc(hmac(sha1)," 2515 "cbc(des)))", 2516 .cra_driver_name = "echainiv-authenc-" 2517 "hmac-sha1-cbc-des-caam", 2518 .cra_blocksize = DES_BLOCK_SIZE, 2519 }, 2520 .setkey = aead_setkey, 2521 .setauthsize = aead_setauthsize, 2522 .encrypt = aead_encrypt, 2523 .decrypt = aead_decrypt, 2524 .ivsize = DES_BLOCK_SIZE, 2525 .maxauthsize = SHA1_DIGEST_SIZE, 2526 }, 2527 .caam = { 2528 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2529 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2530 OP_ALG_AAI_HMAC_PRECOMP, 2531 .geniv = true, 2532 }, 2533 }, 2534 { 2535 .aead = { 2536 .base = { 2537 .cra_name = "authenc(hmac(sha224),cbc(des))", 2538 .cra_driver_name = "authenc-hmac-sha224-" 2539 "cbc-des-caam", 2540 .cra_blocksize = DES_BLOCK_SIZE, 2541 }, 2542 .setkey = aead_setkey, 2543 .setauthsize = aead_setauthsize, 2544 .encrypt = aead_encrypt, 2545 .decrypt = aead_decrypt, 2546 .ivsize = DES_BLOCK_SIZE, 2547 .maxauthsize = SHA224_DIGEST_SIZE, 2548 }, 2549 .caam = { 2550 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2551 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2552 OP_ALG_AAI_HMAC_PRECOMP, 2553 }, 2554 }, 2555 { 2556 .aead = { 2557 .base = { 2558 .cra_name = "echainiv(authenc(hmac(sha224)," 2559 "cbc(des)))", 2560 .cra_driver_name = "echainiv-authenc-" 2561 "hmac-sha224-cbc-des-caam", 2562 .cra_blocksize = DES_BLOCK_SIZE, 2563 }, 2564 .setkey = aead_setkey, 2565 .setauthsize = aead_setauthsize, 2566 .encrypt = aead_encrypt, 2567 .decrypt = aead_decrypt, 2568 .ivsize = DES_BLOCK_SIZE, 2569 .maxauthsize = SHA224_DIGEST_SIZE, 2570 }, 2571 .caam = { 2572 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2573 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2574 OP_ALG_AAI_HMAC_PRECOMP, 2575 .geniv = true, 2576 }, 2577 }, 2578 { 2579 .aead = { 2580 .base = { 2581 .cra_name = "authenc(hmac(sha256),cbc(des))", 2582 .cra_driver_name = "authenc-hmac-sha256-" 2583 "cbc-des-caam", 2584 .cra_blocksize = DES_BLOCK_SIZE, 2585 }, 2586 .setkey = aead_setkey, 2587 .setauthsize = aead_setauthsize, 2588 .encrypt = aead_encrypt, 2589 .decrypt = aead_decrypt, 2590 .ivsize = DES_BLOCK_SIZE, 2591 .maxauthsize = SHA256_DIGEST_SIZE, 2592 }, 2593 .caam = { 2594 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2595 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2596 OP_ALG_AAI_HMAC_PRECOMP, 2597 }, 2598 }, 2599 { 2600 .aead = { 2601 .base = { 2602 .cra_name = "echainiv(authenc(hmac(sha256)," 2603 "cbc(des)))", 2604 .cra_driver_name = "echainiv-authenc-" 2605 "hmac-sha256-cbc-des-caam", 2606 .cra_blocksize = DES_BLOCK_SIZE, 2607 }, 2608 .setkey = aead_setkey, 2609 .setauthsize = aead_setauthsize, 2610 .encrypt = aead_encrypt, 2611 .decrypt = aead_decrypt, 2612 .ivsize = DES_BLOCK_SIZE, 2613 .maxauthsize = SHA256_DIGEST_SIZE, 2614 }, 2615 .caam = { 2616 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2617 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2618 OP_ALG_AAI_HMAC_PRECOMP, 2619 .geniv = true, 2620 }, 2621 }, 2622 { 2623 .aead = { 2624 .base = { 2625 .cra_name = "authenc(hmac(sha384),cbc(des))", 2626 .cra_driver_name = "authenc-hmac-sha384-" 2627 "cbc-des-caam", 2628 .cra_blocksize = DES_BLOCK_SIZE, 2629 }, 2630 .setkey = aead_setkey, 2631 .setauthsize = aead_setauthsize, 2632 .encrypt = aead_encrypt, 2633 .decrypt = aead_decrypt, 2634 .ivsize = DES_BLOCK_SIZE, 2635 .maxauthsize = SHA384_DIGEST_SIZE, 2636 }, 2637 .caam = { 2638 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2639 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2640 OP_ALG_AAI_HMAC_PRECOMP, 2641 }, 2642 }, 2643 { 2644 .aead = { 2645 .base = { 2646 .cra_name = "echainiv(authenc(hmac(sha384)," 2647 "cbc(des)))", 2648 .cra_driver_name = "echainiv-authenc-" 2649 "hmac-sha384-cbc-des-caam", 2650 .cra_blocksize = DES_BLOCK_SIZE, 2651 }, 2652 .setkey = aead_setkey, 2653 .setauthsize = aead_setauthsize, 2654 .encrypt = aead_encrypt, 2655 .decrypt = aead_decrypt, 2656 .ivsize = DES_BLOCK_SIZE, 2657 .maxauthsize = SHA384_DIGEST_SIZE, 2658 }, 2659 .caam = { 2660 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2661 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2662 OP_ALG_AAI_HMAC_PRECOMP, 2663 .geniv = true, 2664 }, 2665 }, 2666 { 2667 .aead = { 2668 .base = { 2669 .cra_name = "authenc(hmac(sha512),cbc(des))", 2670 .cra_driver_name = "authenc-hmac-sha512-" 2671 "cbc-des-caam", 2672 .cra_blocksize = DES_BLOCK_SIZE, 2673 }, 2674 .setkey = aead_setkey, 2675 .setauthsize = aead_setauthsize, 2676 .encrypt = aead_encrypt, 2677 .decrypt = aead_decrypt, 2678 .ivsize = DES_BLOCK_SIZE, 2679 .maxauthsize = SHA512_DIGEST_SIZE, 2680 }, 2681 .caam = { 2682 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2683 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2684 OP_ALG_AAI_HMAC_PRECOMP, 2685 }, 2686 }, 2687 { 2688 .aead = { 2689 .base = { 2690 .cra_name = "echainiv(authenc(hmac(sha512)," 2691 "cbc(des)))", 2692 .cra_driver_name = "echainiv-authenc-" 2693 "hmac-sha512-cbc-des-caam", 2694 .cra_blocksize = DES_BLOCK_SIZE, 2695 }, 2696 .setkey = aead_setkey, 2697 .setauthsize = aead_setauthsize, 2698 .encrypt = aead_encrypt, 2699 .decrypt = aead_decrypt, 2700 .ivsize = DES_BLOCK_SIZE, 2701 .maxauthsize = SHA512_DIGEST_SIZE, 2702 }, 2703 .caam = { 2704 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2705 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2706 OP_ALG_AAI_HMAC_PRECOMP, 2707 .geniv = true, 2708 }, 2709 }, 2710 { 2711 .aead = { 2712 .base = { 2713 .cra_name = "authenc(hmac(md5)," 2714 "rfc3686(ctr(aes)))", 2715 .cra_driver_name = "authenc-hmac-md5-" 2716 "rfc3686-ctr-aes-caam", 2717 .cra_blocksize = 1, 2718 }, 2719 .setkey = aead_setkey, 2720 .setauthsize = aead_setauthsize, 2721 .encrypt = aead_encrypt, 2722 .decrypt = aead_decrypt, 2723 .ivsize = CTR_RFC3686_IV_SIZE, 2724 .maxauthsize = MD5_DIGEST_SIZE, 2725 }, 2726 .caam = { 2727 .class1_alg_type = OP_ALG_ALGSEL_AES | 2728 OP_ALG_AAI_CTR_MOD128, 2729 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2730 OP_ALG_AAI_HMAC_PRECOMP, 2731 .rfc3686 = true, 2732 }, 2733 }, 2734 { 2735 .aead = { 2736 .base = { 2737 .cra_name = "seqiv(authenc(" 2738 "hmac(md5),rfc3686(ctr(aes))))", 2739 .cra_driver_name = "seqiv-authenc-hmac-md5-" 2740 "rfc3686-ctr-aes-caam", 2741 .cra_blocksize = 1, 2742 }, 2743 .setkey = aead_setkey, 2744 .setauthsize = aead_setauthsize, 2745 .encrypt = aead_encrypt, 2746 .decrypt = aead_decrypt, 2747 .ivsize = CTR_RFC3686_IV_SIZE, 2748 .maxauthsize = MD5_DIGEST_SIZE, 2749 }, 2750 .caam = { 2751 .class1_alg_type = OP_ALG_ALGSEL_AES | 2752 OP_ALG_AAI_CTR_MOD128, 2753 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2754 OP_ALG_AAI_HMAC_PRECOMP, 2755 .rfc3686 = true, 2756 .geniv = true, 2757 }, 2758 }, 2759 { 2760 .aead = { 2761 .base = { 2762 .cra_name = "authenc(hmac(sha1)," 2763 "rfc3686(ctr(aes)))", 2764 .cra_driver_name = "authenc-hmac-sha1-" 2765 "rfc3686-ctr-aes-caam", 2766 .cra_blocksize = 1, 2767 }, 2768 .setkey = aead_setkey, 2769 .setauthsize = aead_setauthsize, 2770 .encrypt = aead_encrypt, 2771 .decrypt = aead_decrypt, 2772 .ivsize = CTR_RFC3686_IV_SIZE, 2773 .maxauthsize = SHA1_DIGEST_SIZE, 2774 }, 2775 .caam = { 2776 .class1_alg_type = OP_ALG_ALGSEL_AES | 2777 OP_ALG_AAI_CTR_MOD128, 2778 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2779 OP_ALG_AAI_HMAC_PRECOMP, 2780 .rfc3686 = true, 2781 }, 2782 }, 2783 { 2784 .aead = { 2785 .base = { 2786 .cra_name = "seqiv(authenc(" 2787 "hmac(sha1),rfc3686(ctr(aes))))", 2788 .cra_driver_name = "seqiv-authenc-hmac-sha1-" 2789 "rfc3686-ctr-aes-caam", 2790 .cra_blocksize = 1, 2791 }, 2792 .setkey = aead_setkey, 2793 .setauthsize = aead_setauthsize, 2794 .encrypt = aead_encrypt, 2795 .decrypt = aead_decrypt, 2796 .ivsize = CTR_RFC3686_IV_SIZE, 2797 .maxauthsize = SHA1_DIGEST_SIZE, 2798 }, 2799 .caam = { 2800 .class1_alg_type = OP_ALG_ALGSEL_AES | 2801 OP_ALG_AAI_CTR_MOD128, 2802 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2803 OP_ALG_AAI_HMAC_PRECOMP, 2804 .rfc3686 = true, 2805 .geniv = true, 2806 }, 2807 }, 2808 { 2809 .aead = { 2810 .base = { 2811 .cra_name = "authenc(hmac(sha224)," 2812 "rfc3686(ctr(aes)))", 2813 .cra_driver_name = "authenc-hmac-sha224-" 2814 "rfc3686-ctr-aes-caam", 2815 .cra_blocksize = 1, 2816 }, 2817 .setkey = aead_setkey, 2818 .setauthsize = aead_setauthsize, 2819 .encrypt = aead_encrypt, 2820 .decrypt = aead_decrypt, 2821 .ivsize = CTR_RFC3686_IV_SIZE, 2822 .maxauthsize = SHA224_DIGEST_SIZE, 2823 }, 2824 .caam = { 2825 .class1_alg_type = OP_ALG_ALGSEL_AES | 2826 OP_ALG_AAI_CTR_MOD128, 2827 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2828 OP_ALG_AAI_HMAC_PRECOMP, 2829 .rfc3686 = true, 2830 }, 2831 }, 2832 { 2833 .aead = { 2834 .base = { 2835 .cra_name = "seqiv(authenc(" 2836 "hmac(sha224),rfc3686(ctr(aes))))", 2837 .cra_driver_name = "seqiv-authenc-hmac-sha224-" 2838 "rfc3686-ctr-aes-caam", 2839 .cra_blocksize = 1, 2840 }, 2841 .setkey = aead_setkey, 2842 .setauthsize = aead_setauthsize, 2843 .encrypt = aead_encrypt, 2844 .decrypt = aead_decrypt, 2845 .ivsize = CTR_RFC3686_IV_SIZE, 2846 .maxauthsize = SHA224_DIGEST_SIZE, 2847 }, 2848 .caam = { 2849 .class1_alg_type = OP_ALG_ALGSEL_AES | 2850 OP_ALG_AAI_CTR_MOD128, 2851 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2852 OP_ALG_AAI_HMAC_PRECOMP, 2853 .rfc3686 = true, 2854 .geniv = true, 2855 }, 2856 }, 2857 { 2858 .aead = { 2859 .base = { 2860 .cra_name = "authenc(hmac(sha256)," 2861 "rfc3686(ctr(aes)))", 2862 .cra_driver_name = "authenc-hmac-sha256-" 2863 "rfc3686-ctr-aes-caam", 2864 .cra_blocksize = 1, 2865 }, 2866 .setkey = aead_setkey, 2867 .setauthsize = aead_setauthsize, 2868 .encrypt = aead_encrypt, 2869 .decrypt = aead_decrypt, 2870 .ivsize = CTR_RFC3686_IV_SIZE, 2871 .maxauthsize = SHA256_DIGEST_SIZE, 2872 }, 2873 .caam = { 2874 .class1_alg_type = OP_ALG_ALGSEL_AES | 2875 OP_ALG_AAI_CTR_MOD128, 2876 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2877 OP_ALG_AAI_HMAC_PRECOMP, 2878 .rfc3686 = true, 2879 }, 2880 }, 2881 { 2882 .aead = { 2883 .base = { 2884 .cra_name = "seqiv(authenc(hmac(sha256)," 2885 "rfc3686(ctr(aes))))", 2886 .cra_driver_name = "seqiv-authenc-hmac-sha256-" 2887 "rfc3686-ctr-aes-caam", 2888 .cra_blocksize = 1, 2889 }, 2890 .setkey = aead_setkey, 2891 .setauthsize = aead_setauthsize, 2892 .encrypt = aead_encrypt, 2893 .decrypt = aead_decrypt, 2894 .ivsize = CTR_RFC3686_IV_SIZE, 2895 .maxauthsize = SHA256_DIGEST_SIZE, 2896 }, 2897 .caam = { 2898 .class1_alg_type = OP_ALG_ALGSEL_AES | 2899 OP_ALG_AAI_CTR_MOD128, 2900 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2901 OP_ALG_AAI_HMAC_PRECOMP, 2902 .rfc3686 = true, 2903 .geniv = true, 2904 }, 2905 }, 2906 { 2907 .aead = { 2908 .base = { 2909 .cra_name = "authenc(hmac(sha384)," 2910 "rfc3686(ctr(aes)))", 2911 .cra_driver_name = "authenc-hmac-sha384-" 2912 "rfc3686-ctr-aes-caam", 2913 .cra_blocksize = 1, 2914 }, 2915 .setkey = aead_setkey, 2916 .setauthsize = aead_setauthsize, 2917 .encrypt = aead_encrypt, 2918 .decrypt = aead_decrypt, 2919 .ivsize = CTR_RFC3686_IV_SIZE, 2920 .maxauthsize = SHA384_DIGEST_SIZE, 2921 }, 2922 .caam = { 2923 .class1_alg_type = OP_ALG_ALGSEL_AES | 2924 OP_ALG_AAI_CTR_MOD128, 2925 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2926 OP_ALG_AAI_HMAC_PRECOMP, 2927 .rfc3686 = true, 2928 }, 2929 }, 2930 { 2931 .aead = { 2932 .base = { 2933 .cra_name = "seqiv(authenc(hmac(sha384)," 2934 "rfc3686(ctr(aes))))", 2935 .cra_driver_name = "seqiv-authenc-hmac-sha384-" 2936 "rfc3686-ctr-aes-caam", 2937 .cra_blocksize = 1, 2938 }, 2939 .setkey = aead_setkey, 2940 .setauthsize = aead_setauthsize, 2941 .encrypt = aead_encrypt, 2942 .decrypt = aead_decrypt, 2943 .ivsize = CTR_RFC3686_IV_SIZE, 2944 .maxauthsize = SHA384_DIGEST_SIZE, 2945 }, 2946 .caam = { 2947 .class1_alg_type = OP_ALG_ALGSEL_AES | 2948 OP_ALG_AAI_CTR_MOD128, 2949 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2950 OP_ALG_AAI_HMAC_PRECOMP, 2951 .rfc3686 = true, 2952 .geniv = true, 2953 }, 2954 }, 2955 { 2956 .aead = { 2957 .base = { 2958 .cra_name = "authenc(hmac(sha512)," 2959 "rfc3686(ctr(aes)))", 2960 .cra_driver_name = "authenc-hmac-sha512-" 2961 "rfc3686-ctr-aes-caam", 2962 .cra_blocksize = 1, 2963 }, 2964 .setkey = aead_setkey, 2965 .setauthsize = aead_setauthsize, 2966 .encrypt = aead_encrypt, 2967 .decrypt = aead_decrypt, 2968 .ivsize = CTR_RFC3686_IV_SIZE, 2969 .maxauthsize = SHA512_DIGEST_SIZE, 2970 }, 2971 .caam = { 2972 .class1_alg_type = OP_ALG_ALGSEL_AES | 2973 OP_ALG_AAI_CTR_MOD128, 2974 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2975 OP_ALG_AAI_HMAC_PRECOMP, 2976 .rfc3686 = true, 2977 }, 2978 }, 2979 { 2980 .aead = { 2981 .base = { 2982 .cra_name = "seqiv(authenc(hmac(sha512)," 2983 "rfc3686(ctr(aes))))", 2984 .cra_driver_name = "seqiv-authenc-hmac-sha512-" 2985 "rfc3686-ctr-aes-caam", 2986 .cra_blocksize = 1, 2987 }, 2988 .setkey = aead_setkey, 2989 .setauthsize = aead_setauthsize, 2990 .encrypt = aead_encrypt, 2991 .decrypt = aead_decrypt, 2992 .ivsize = CTR_RFC3686_IV_SIZE, 2993 .maxauthsize = SHA512_DIGEST_SIZE, 2994 }, 2995 .caam = { 2996 .class1_alg_type = OP_ALG_ALGSEL_AES | 2997 OP_ALG_AAI_CTR_MOD128, 2998 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2999 OP_ALG_AAI_HMAC_PRECOMP, 3000 .rfc3686 = true, 3001 .geniv = true, 3002 }, 3003 }, 3004 }; 3005 3006 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam, 3007 bool uses_dkp) 3008 { 3009 dma_addr_t dma_addr; 3010 struct caam_drv_private *priv; 3011 3012 ctx->jrdev = caam_jr_alloc(); 3013 if (IS_ERR(ctx->jrdev)) { 3014 pr_err("Job Ring Device allocation for transform failed\n"); 3015 return PTR_ERR(ctx->jrdev); 3016 } 3017 3018 priv = dev_get_drvdata(ctx->jrdev->parent); 3019 if (priv->era >= 6 && uses_dkp) 3020 ctx->dir = DMA_BIDIRECTIONAL; 3021 else 3022 ctx->dir = DMA_TO_DEVICE; 3023 3024 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3025 offsetof(struct caam_ctx, 3026 sh_desc_enc_dma), 3027 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3028 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3029 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3030 caam_jr_free(ctx->jrdev); 3031 return -ENOMEM; 3032 } 3033 3034 ctx->sh_desc_enc_dma = dma_addr; 3035 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, 3036 sh_desc_dec); 3037 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key); 3038 3039 /* copy descriptor header template value */ 3040 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3041 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3042 3043 return 0; 3044 } 3045 3046 static int caam_cra_init(struct crypto_skcipher *tfm) 3047 { 3048 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 3049 struct caam_skcipher_alg *caam_alg = 3050 container_of(alg, typeof(*caam_alg), skcipher); 3051 3052 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam, 3053 false); 3054 } 3055 3056 static int caam_aead_init(struct crypto_aead *tfm) 3057 { 3058 struct aead_alg *alg = crypto_aead_alg(tfm); 3059 struct caam_aead_alg *caam_alg = 3060 container_of(alg, struct caam_aead_alg, aead); 3061 struct caam_ctx *ctx = crypto_aead_ctx(tfm); 3062 3063 return caam_init_common(ctx, &caam_alg->caam, 3064 alg->setkey == aead_setkey); 3065 } 3066 3067 static void caam_exit_common(struct caam_ctx *ctx) 3068 { 3069 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, 3070 offsetof(struct caam_ctx, sh_desc_enc_dma), 3071 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3072 caam_jr_free(ctx->jrdev); 3073 } 3074 3075 static void caam_cra_exit(struct crypto_skcipher *tfm) 3076 { 3077 caam_exit_common(crypto_skcipher_ctx(tfm)); 3078 } 3079 3080 static void caam_aead_exit(struct crypto_aead *tfm) 3081 { 3082 caam_exit_common(crypto_aead_ctx(tfm)); 3083 } 3084 3085 static void __exit caam_algapi_exit(void) 3086 { 3087 int i; 3088 3089 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3090 struct caam_aead_alg *t_alg = driver_aeads + i; 3091 3092 if (t_alg->registered) 3093 crypto_unregister_aead(&t_alg->aead); 3094 } 3095 3096 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3097 struct caam_skcipher_alg *t_alg = driver_algs + i; 3098 3099 if (t_alg->registered) 3100 crypto_unregister_skcipher(&t_alg->skcipher); 3101 } 3102 } 3103 3104 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg) 3105 { 3106 struct skcipher_alg *alg = &t_alg->skcipher; 3107 3108 alg->base.cra_module = THIS_MODULE; 3109 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3110 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3111 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3112 3113 alg->init = caam_cra_init; 3114 alg->exit = caam_cra_exit; 3115 } 3116 3117 static void caam_aead_alg_init(struct caam_aead_alg *t_alg) 3118 { 3119 struct aead_alg *alg = &t_alg->aead; 3120 3121 alg->base.cra_module = THIS_MODULE; 3122 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3123 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3124 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3125 3126 alg->init = caam_aead_init; 3127 alg->exit = caam_aead_exit; 3128 } 3129 3130 static int __init caam_algapi_init(void) 3131 { 3132 struct device_node *dev_node; 3133 struct platform_device *pdev; 3134 struct device *ctrldev; 3135 struct caam_drv_private *priv; 3136 int i = 0, err = 0; 3137 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst; 3138 unsigned int md_limit = SHA512_DIGEST_SIZE; 3139 bool registered = false; 3140 3141 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0"); 3142 if (!dev_node) { 3143 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0"); 3144 if (!dev_node) 3145 return -ENODEV; 3146 } 3147 3148 pdev = of_find_device_by_node(dev_node); 3149 if (!pdev) { 3150 of_node_put(dev_node); 3151 return -ENODEV; 3152 } 3153 3154 ctrldev = &pdev->dev; 3155 priv = dev_get_drvdata(ctrldev); 3156 of_node_put(dev_node); 3157 3158 /* 3159 * If priv is NULL, it's probably because the caam driver wasn't 3160 * properly initialized (e.g. RNG4 init failed). Thus, bail out here. 3161 */ 3162 if (!priv) 3163 return -ENODEV; 3164 3165 3166 /* 3167 * Register crypto algorithms the device supports. 3168 * First, detect presence and attributes of DES, AES, and MD blocks. 3169 */ 3170 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls); 3171 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls); 3172 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT; 3173 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT; 3174 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3175 3176 /* If MD is present, limit digest size based on LP256 */ 3177 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256)) 3178 md_limit = SHA256_DIGEST_SIZE; 3179 3180 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3181 struct caam_skcipher_alg *t_alg = driver_algs + i; 3182 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK; 3183 3184 /* Skip DES algorithms if not supported by device */ 3185 if (!des_inst && 3186 ((alg_sel == OP_ALG_ALGSEL_3DES) || 3187 (alg_sel == OP_ALG_ALGSEL_DES))) 3188 continue; 3189 3190 /* Skip AES algorithms if not supported by device */ 3191 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES)) 3192 continue; 3193 3194 /* 3195 * Check support for AES modes not available 3196 * on LP devices. 3197 */ 3198 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP) 3199 if ((t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) == 3200 OP_ALG_AAI_XTS) 3201 continue; 3202 3203 caam_skcipher_alg_init(t_alg); 3204 3205 err = crypto_register_skcipher(&t_alg->skcipher); 3206 if (err) { 3207 pr_warn("%s alg registration failed\n", 3208 t_alg->skcipher.base.cra_driver_name); 3209 continue; 3210 } 3211 3212 t_alg->registered = true; 3213 registered = true; 3214 } 3215 3216 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3217 struct caam_aead_alg *t_alg = driver_aeads + i; 3218 u32 c1_alg_sel = t_alg->caam.class1_alg_type & 3219 OP_ALG_ALGSEL_MASK; 3220 u32 c2_alg_sel = t_alg->caam.class2_alg_type & 3221 OP_ALG_ALGSEL_MASK; 3222 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3223 3224 /* Skip DES algorithms if not supported by device */ 3225 if (!des_inst && 3226 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) || 3227 (c1_alg_sel == OP_ALG_ALGSEL_DES))) 3228 continue; 3229 3230 /* Skip AES algorithms if not supported by device */ 3231 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES)) 3232 continue; 3233 3234 /* 3235 * Check support for AES algorithms not available 3236 * on LP devices. 3237 */ 3238 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP) 3239 if (alg_aai == OP_ALG_AAI_GCM) 3240 continue; 3241 3242 /* 3243 * Skip algorithms requiring message digests 3244 * if MD or MD size is not supported by device. 3245 */ 3246 if (c2_alg_sel && 3247 (!md_inst || (t_alg->aead.maxauthsize > md_limit))) 3248 continue; 3249 3250 caam_aead_alg_init(t_alg); 3251 3252 err = crypto_register_aead(&t_alg->aead); 3253 if (err) { 3254 pr_warn("%s alg registration failed\n", 3255 t_alg->aead.base.cra_driver_name); 3256 continue; 3257 } 3258 3259 t_alg->registered = true; 3260 registered = true; 3261 } 3262 3263 if (registered) 3264 pr_info("caam algorithms registered in /proc/crypto\n"); 3265 3266 return err; 3267 } 3268 3269 module_init(caam_algapi_init); 3270 module_exit(caam_algapi_exit); 3271 3272 MODULE_LICENSE("GPL"); 3273 MODULE_DESCRIPTION("FSL CAAM support for crypto API"); 3274 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC"); 3275