1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * caam - Freescale FSL CAAM support for crypto API 4 * 5 * Copyright 2008-2011 Freescale Semiconductor, Inc. 6 * Copyright 2016-2019 NXP 7 * 8 * Based on talitos crypto API driver. 9 * 10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008): 11 * 12 * --------------- --------------- 13 * | JobDesc #1 |-------------------->| ShareDesc | 14 * | *(packet 1) | | (PDB) | 15 * --------------- |------------->| (hashKey) | 16 * . | | (cipherKey) | 17 * . | |-------->| (operation) | 18 * --------------- | | --------------- 19 * | JobDesc #2 |------| | 20 * | *(packet 2) | | 21 * --------------- | 22 * . | 23 * . | 24 * --------------- | 25 * | JobDesc #3 |------------ 26 * | *(packet 3) | 27 * --------------- 28 * 29 * The SharedDesc never changes for a connection unless rekeyed, but 30 * each packet will likely be in a different place. So all we need 31 * to know to process the packet is where the input is, where the 32 * output goes, and what context we want to process with. Context is 33 * in the SharedDesc, packet references in the JobDesc. 34 * 35 * So, a job desc looks like: 36 * 37 * --------------------- 38 * | Header | 39 * | ShareDesc Pointer | 40 * | SEQ_OUT_PTR | 41 * | (output buffer) | 42 * | (output length) | 43 * | SEQ_IN_PTR | 44 * | (input buffer) | 45 * | (input length) | 46 * --------------------- 47 */ 48 49 #include "compat.h" 50 51 #include "regs.h" 52 #include "intern.h" 53 #include "desc_constr.h" 54 #include "jr.h" 55 #include "error.h" 56 #include "sg_sw_sec4.h" 57 #include "key_gen.h" 58 #include "caamalg_desc.h" 59 #include <crypto/engine.h> 60 #include <crypto/xts.h> 61 #include <asm/unaligned.h> 62 #include <linux/dma-mapping.h> 63 #include <linux/device.h> 64 #include <linux/err.h> 65 #include <linux/kernel.h> 66 #include <linux/slab.h> 67 #include <linux/string.h> 68 69 /* 70 * crypto alg 71 */ 72 #define CAAM_CRA_PRIORITY 3000 73 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 74 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 75 CTR_RFC3686_NONCE_SIZE + \ 76 SHA512_DIGEST_SIZE * 2) 77 78 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2) 79 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 80 CAAM_CMD_SZ * 4) 81 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 82 CAAM_CMD_SZ * 5) 83 84 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6) 85 86 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN) 87 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ) 88 89 struct caam_alg_entry { 90 int class1_alg_type; 91 int class2_alg_type; 92 bool rfc3686; 93 bool geniv; 94 bool nodkp; 95 }; 96 97 struct caam_aead_alg { 98 struct aead_alg aead; 99 struct caam_alg_entry caam; 100 bool registered; 101 }; 102 103 struct caam_skcipher_alg { 104 struct skcipher_alg skcipher; 105 struct caam_alg_entry caam; 106 bool registered; 107 }; 108 109 /* 110 * per-session context 111 */ 112 struct caam_ctx { 113 struct crypto_engine_ctx enginectx; 114 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 115 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 116 u8 key[CAAM_MAX_KEY_SIZE]; 117 dma_addr_t sh_desc_enc_dma; 118 dma_addr_t sh_desc_dec_dma; 119 dma_addr_t key_dma; 120 enum dma_data_direction dir; 121 struct device *jrdev; 122 struct alginfo adata; 123 struct alginfo cdata; 124 unsigned int authsize; 125 bool xts_key_fallback; 126 struct crypto_skcipher *fallback; 127 }; 128 129 struct caam_skcipher_req_ctx { 130 struct skcipher_edesc *edesc; 131 struct skcipher_request fallback_req; 132 }; 133 134 struct caam_aead_req_ctx { 135 struct aead_edesc *edesc; 136 }; 137 138 static int aead_null_set_sh_desc(struct crypto_aead *aead) 139 { 140 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 141 struct device *jrdev = ctx->jrdev; 142 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 143 u32 *desc; 144 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 145 ctx->adata.keylen_pad; 146 147 /* 148 * Job Descriptor and Shared Descriptors 149 * must all fit into the 64-word Descriptor h/w Buffer 150 */ 151 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) { 152 ctx->adata.key_inline = true; 153 ctx->adata.key_virt = ctx->key; 154 } else { 155 ctx->adata.key_inline = false; 156 ctx->adata.key_dma = ctx->key_dma; 157 } 158 159 /* aead_encrypt shared descriptor */ 160 desc = ctx->sh_desc_enc; 161 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize, 162 ctrlpriv->era); 163 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 164 desc_bytes(desc), ctx->dir); 165 166 /* 167 * Job Descriptor and Shared Descriptors 168 * must all fit into the 64-word Descriptor h/w Buffer 169 */ 170 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) { 171 ctx->adata.key_inline = true; 172 ctx->adata.key_virt = ctx->key; 173 } else { 174 ctx->adata.key_inline = false; 175 ctx->adata.key_dma = ctx->key_dma; 176 } 177 178 /* aead_decrypt shared descriptor */ 179 desc = ctx->sh_desc_dec; 180 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize, 181 ctrlpriv->era); 182 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 183 desc_bytes(desc), ctx->dir); 184 185 return 0; 186 } 187 188 static int aead_set_sh_desc(struct crypto_aead *aead) 189 { 190 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 191 struct caam_aead_alg, aead); 192 unsigned int ivsize = crypto_aead_ivsize(aead); 193 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 194 struct device *jrdev = ctx->jrdev; 195 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 196 u32 ctx1_iv_off = 0; 197 u32 *desc, *nonce = NULL; 198 u32 inl_mask; 199 unsigned int data_len[2]; 200 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 201 OP_ALG_AAI_CTR_MOD128); 202 const bool is_rfc3686 = alg->caam.rfc3686; 203 204 if (!ctx->authsize) 205 return 0; 206 207 /* NULL encryption / decryption */ 208 if (!ctx->cdata.keylen) 209 return aead_null_set_sh_desc(aead); 210 211 /* 212 * AES-CTR needs to load IV in CONTEXT1 reg 213 * at an offset of 128bits (16bytes) 214 * CONTEXT1[255:128] = IV 215 */ 216 if (ctr_mode) 217 ctx1_iv_off = 16; 218 219 /* 220 * RFC3686 specific: 221 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 222 */ 223 if (is_rfc3686) { 224 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 225 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + 226 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); 227 } 228 229 /* 230 * In case |user key| > |derived key|, using DKP<imm,imm> 231 * would result in invalid opcodes (last bytes of user key) in 232 * the resulting descriptor. Use DKP<ptr,imm> instead => both 233 * virtual and dma key addresses are needed. 234 */ 235 ctx->adata.key_virt = ctx->key; 236 ctx->adata.key_dma = ctx->key_dma; 237 238 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 239 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 240 241 data_len[0] = ctx->adata.keylen_pad; 242 data_len[1] = ctx->cdata.keylen; 243 244 if (alg->caam.geniv) 245 goto skip_enc; 246 247 /* 248 * Job Descriptor and Shared Descriptors 249 * must all fit into the 64-word Descriptor h/w Buffer 250 */ 251 if (desc_inline_query(DESC_AEAD_ENC_LEN + 252 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 253 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 254 ARRAY_SIZE(data_len)) < 0) 255 return -EINVAL; 256 257 ctx->adata.key_inline = !!(inl_mask & 1); 258 ctx->cdata.key_inline = !!(inl_mask & 2); 259 260 /* aead_encrypt shared descriptor */ 261 desc = ctx->sh_desc_enc; 262 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, 263 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, 264 false, ctrlpriv->era); 265 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 266 desc_bytes(desc), ctx->dir); 267 268 skip_enc: 269 /* 270 * Job Descriptor and Shared Descriptors 271 * must all fit into the 64-word Descriptor h/w Buffer 272 */ 273 if (desc_inline_query(DESC_AEAD_DEC_LEN + 274 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 275 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 276 ARRAY_SIZE(data_len)) < 0) 277 return -EINVAL; 278 279 ctx->adata.key_inline = !!(inl_mask & 1); 280 ctx->cdata.key_inline = !!(inl_mask & 2); 281 282 /* aead_decrypt shared descriptor */ 283 desc = ctx->sh_desc_dec; 284 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, 285 ctx->authsize, alg->caam.geniv, is_rfc3686, 286 nonce, ctx1_iv_off, false, ctrlpriv->era); 287 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 288 desc_bytes(desc), ctx->dir); 289 290 if (!alg->caam.geniv) 291 goto skip_givenc; 292 293 /* 294 * Job Descriptor and Shared Descriptors 295 * must all fit into the 64-word Descriptor h/w Buffer 296 */ 297 if (desc_inline_query(DESC_AEAD_GIVENC_LEN + 298 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 299 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 300 ARRAY_SIZE(data_len)) < 0) 301 return -EINVAL; 302 303 ctx->adata.key_inline = !!(inl_mask & 1); 304 ctx->cdata.key_inline = !!(inl_mask & 2); 305 306 /* aead_givencrypt shared descriptor */ 307 desc = ctx->sh_desc_enc; 308 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, 309 ctx->authsize, is_rfc3686, nonce, 310 ctx1_iv_off, false, ctrlpriv->era); 311 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 312 desc_bytes(desc), ctx->dir); 313 314 skip_givenc: 315 return 0; 316 } 317 318 static int aead_setauthsize(struct crypto_aead *authenc, 319 unsigned int authsize) 320 { 321 struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc); 322 323 ctx->authsize = authsize; 324 aead_set_sh_desc(authenc); 325 326 return 0; 327 } 328 329 static int gcm_set_sh_desc(struct crypto_aead *aead) 330 { 331 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 332 struct device *jrdev = ctx->jrdev; 333 unsigned int ivsize = crypto_aead_ivsize(aead); 334 u32 *desc; 335 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 336 ctx->cdata.keylen; 337 338 if (!ctx->cdata.keylen || !ctx->authsize) 339 return 0; 340 341 /* 342 * AES GCM encrypt shared descriptor 343 * Job Descriptor and Shared Descriptor 344 * must fit into the 64-word Descriptor h/w Buffer 345 */ 346 if (rem_bytes >= DESC_GCM_ENC_LEN) { 347 ctx->cdata.key_inline = true; 348 ctx->cdata.key_virt = ctx->key; 349 } else { 350 ctx->cdata.key_inline = false; 351 ctx->cdata.key_dma = ctx->key_dma; 352 } 353 354 desc = ctx->sh_desc_enc; 355 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 356 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 357 desc_bytes(desc), ctx->dir); 358 359 /* 360 * Job Descriptor and Shared Descriptors 361 * must all fit into the 64-word Descriptor h/w Buffer 362 */ 363 if (rem_bytes >= DESC_GCM_DEC_LEN) { 364 ctx->cdata.key_inline = true; 365 ctx->cdata.key_virt = ctx->key; 366 } else { 367 ctx->cdata.key_inline = false; 368 ctx->cdata.key_dma = ctx->key_dma; 369 } 370 371 desc = ctx->sh_desc_dec; 372 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 373 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 374 desc_bytes(desc), ctx->dir); 375 376 return 0; 377 } 378 379 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize) 380 { 381 struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc); 382 int err; 383 384 err = crypto_gcm_check_authsize(authsize); 385 if (err) 386 return err; 387 388 ctx->authsize = authsize; 389 gcm_set_sh_desc(authenc); 390 391 return 0; 392 } 393 394 static int rfc4106_set_sh_desc(struct crypto_aead *aead) 395 { 396 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 397 struct device *jrdev = ctx->jrdev; 398 unsigned int ivsize = crypto_aead_ivsize(aead); 399 u32 *desc; 400 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 401 ctx->cdata.keylen; 402 403 if (!ctx->cdata.keylen || !ctx->authsize) 404 return 0; 405 406 /* 407 * RFC4106 encrypt shared descriptor 408 * Job Descriptor and Shared Descriptor 409 * must fit into the 64-word Descriptor h/w Buffer 410 */ 411 if (rem_bytes >= DESC_RFC4106_ENC_LEN) { 412 ctx->cdata.key_inline = true; 413 ctx->cdata.key_virt = ctx->key; 414 } else { 415 ctx->cdata.key_inline = false; 416 ctx->cdata.key_dma = ctx->key_dma; 417 } 418 419 desc = ctx->sh_desc_enc; 420 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 421 false); 422 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 423 desc_bytes(desc), ctx->dir); 424 425 /* 426 * Job Descriptor and Shared Descriptors 427 * must all fit into the 64-word Descriptor h/w Buffer 428 */ 429 if (rem_bytes >= DESC_RFC4106_DEC_LEN) { 430 ctx->cdata.key_inline = true; 431 ctx->cdata.key_virt = ctx->key; 432 } else { 433 ctx->cdata.key_inline = false; 434 ctx->cdata.key_dma = ctx->key_dma; 435 } 436 437 desc = ctx->sh_desc_dec; 438 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 439 false); 440 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 441 desc_bytes(desc), ctx->dir); 442 443 return 0; 444 } 445 446 static int rfc4106_setauthsize(struct crypto_aead *authenc, 447 unsigned int authsize) 448 { 449 struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc); 450 int err; 451 452 err = crypto_rfc4106_check_authsize(authsize); 453 if (err) 454 return err; 455 456 ctx->authsize = authsize; 457 rfc4106_set_sh_desc(authenc); 458 459 return 0; 460 } 461 462 static int rfc4543_set_sh_desc(struct crypto_aead *aead) 463 { 464 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 465 struct device *jrdev = ctx->jrdev; 466 unsigned int ivsize = crypto_aead_ivsize(aead); 467 u32 *desc; 468 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 469 ctx->cdata.keylen; 470 471 if (!ctx->cdata.keylen || !ctx->authsize) 472 return 0; 473 474 /* 475 * RFC4543 encrypt shared descriptor 476 * Job Descriptor and Shared Descriptor 477 * must fit into the 64-word Descriptor h/w Buffer 478 */ 479 if (rem_bytes >= DESC_RFC4543_ENC_LEN) { 480 ctx->cdata.key_inline = true; 481 ctx->cdata.key_virt = ctx->key; 482 } else { 483 ctx->cdata.key_inline = false; 484 ctx->cdata.key_dma = ctx->key_dma; 485 } 486 487 desc = ctx->sh_desc_enc; 488 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 489 false); 490 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 491 desc_bytes(desc), ctx->dir); 492 493 /* 494 * Job Descriptor and Shared Descriptors 495 * must all fit into the 64-word Descriptor h/w Buffer 496 */ 497 if (rem_bytes >= DESC_RFC4543_DEC_LEN) { 498 ctx->cdata.key_inline = true; 499 ctx->cdata.key_virt = ctx->key; 500 } else { 501 ctx->cdata.key_inline = false; 502 ctx->cdata.key_dma = ctx->key_dma; 503 } 504 505 desc = ctx->sh_desc_dec; 506 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 507 false); 508 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 509 desc_bytes(desc), ctx->dir); 510 511 return 0; 512 } 513 514 static int rfc4543_setauthsize(struct crypto_aead *authenc, 515 unsigned int authsize) 516 { 517 struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc); 518 519 if (authsize != 16) 520 return -EINVAL; 521 522 ctx->authsize = authsize; 523 rfc4543_set_sh_desc(authenc); 524 525 return 0; 526 } 527 528 static int chachapoly_set_sh_desc(struct crypto_aead *aead) 529 { 530 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 531 struct device *jrdev = ctx->jrdev; 532 unsigned int ivsize = crypto_aead_ivsize(aead); 533 u32 *desc; 534 535 if (!ctx->cdata.keylen || !ctx->authsize) 536 return 0; 537 538 desc = ctx->sh_desc_enc; 539 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 540 ctx->authsize, true, false); 541 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 542 desc_bytes(desc), ctx->dir); 543 544 desc = ctx->sh_desc_dec; 545 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 546 ctx->authsize, false, false); 547 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 548 desc_bytes(desc), ctx->dir); 549 550 return 0; 551 } 552 553 static int chachapoly_setauthsize(struct crypto_aead *aead, 554 unsigned int authsize) 555 { 556 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 557 558 if (authsize != POLY1305_DIGEST_SIZE) 559 return -EINVAL; 560 561 ctx->authsize = authsize; 562 return chachapoly_set_sh_desc(aead); 563 } 564 565 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key, 566 unsigned int keylen) 567 { 568 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 569 unsigned int ivsize = crypto_aead_ivsize(aead); 570 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize; 571 572 if (keylen != CHACHA_KEY_SIZE + saltlen) 573 return -EINVAL; 574 575 ctx->cdata.key_virt = key; 576 ctx->cdata.keylen = keylen - saltlen; 577 578 return chachapoly_set_sh_desc(aead); 579 } 580 581 static int aead_setkey(struct crypto_aead *aead, 582 const u8 *key, unsigned int keylen) 583 { 584 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 585 struct device *jrdev = ctx->jrdev; 586 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 587 struct crypto_authenc_keys keys; 588 int ret = 0; 589 590 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) 591 goto badkey; 592 593 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n", 594 keys.authkeylen + keys.enckeylen, keys.enckeylen, 595 keys.authkeylen); 596 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 597 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 598 599 /* 600 * If DKP is supported, use it in the shared descriptor to generate 601 * the split key. 602 */ 603 if (ctrlpriv->era >= 6) { 604 ctx->adata.keylen = keys.authkeylen; 605 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype & 606 OP_ALG_ALGSEL_MASK); 607 608 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE) 609 goto badkey; 610 611 memcpy(ctx->key, keys.authkey, keys.authkeylen); 612 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, 613 keys.enckeylen); 614 dma_sync_single_for_device(jrdev, ctx->key_dma, 615 ctx->adata.keylen_pad + 616 keys.enckeylen, ctx->dir); 617 goto skip_split_key; 618 } 619 620 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, 621 keys.authkeylen, CAAM_MAX_KEY_SIZE - 622 keys.enckeylen); 623 if (ret) { 624 goto badkey; 625 } 626 627 /* postpend encryption key to auth split key */ 628 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); 629 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + 630 keys.enckeylen, ctx->dir); 631 632 print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ", 633 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, 634 ctx->adata.keylen_pad + keys.enckeylen, 1); 635 636 skip_split_key: 637 ctx->cdata.keylen = keys.enckeylen; 638 memzero_explicit(&keys, sizeof(keys)); 639 return aead_set_sh_desc(aead); 640 badkey: 641 memzero_explicit(&keys, sizeof(keys)); 642 return -EINVAL; 643 } 644 645 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key, 646 unsigned int keylen) 647 { 648 struct crypto_authenc_keys keys; 649 int err; 650 651 err = crypto_authenc_extractkeys(&keys, key, keylen); 652 if (unlikely(err)) 653 return err; 654 655 err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?: 656 aead_setkey(aead, key, keylen); 657 658 memzero_explicit(&keys, sizeof(keys)); 659 return err; 660 } 661 662 static int gcm_setkey(struct crypto_aead *aead, 663 const u8 *key, unsigned int keylen) 664 { 665 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 666 struct device *jrdev = ctx->jrdev; 667 int err; 668 669 err = aes_check_keylen(keylen); 670 if (err) 671 return err; 672 673 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 674 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 675 676 memcpy(ctx->key, key, keylen); 677 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); 678 ctx->cdata.keylen = keylen; 679 680 return gcm_set_sh_desc(aead); 681 } 682 683 static int rfc4106_setkey(struct crypto_aead *aead, 684 const u8 *key, unsigned int keylen) 685 { 686 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 687 struct device *jrdev = ctx->jrdev; 688 int err; 689 690 err = aes_check_keylen(keylen - 4); 691 if (err) 692 return err; 693 694 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 695 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 696 697 memcpy(ctx->key, key, keylen); 698 699 /* 700 * The last four bytes of the key material are used as the salt value 701 * in the nonce. Update the AES key length. 702 */ 703 ctx->cdata.keylen = keylen - 4; 704 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 705 ctx->dir); 706 return rfc4106_set_sh_desc(aead); 707 } 708 709 static int rfc4543_setkey(struct crypto_aead *aead, 710 const u8 *key, unsigned int keylen) 711 { 712 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 713 struct device *jrdev = ctx->jrdev; 714 int err; 715 716 err = aes_check_keylen(keylen - 4); 717 if (err) 718 return err; 719 720 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 721 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 722 723 memcpy(ctx->key, key, keylen); 724 725 /* 726 * The last four bytes of the key material are used as the salt value 727 * in the nonce. Update the AES key length. 728 */ 729 ctx->cdata.keylen = keylen - 4; 730 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 731 ctx->dir); 732 return rfc4543_set_sh_desc(aead); 733 } 734 735 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 736 unsigned int keylen, const u32 ctx1_iv_off) 737 { 738 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher); 739 struct caam_skcipher_alg *alg = 740 container_of(crypto_skcipher_alg(skcipher), typeof(*alg), 741 skcipher); 742 struct device *jrdev = ctx->jrdev; 743 unsigned int ivsize = crypto_skcipher_ivsize(skcipher); 744 u32 *desc; 745 const bool is_rfc3686 = alg->caam.rfc3686; 746 747 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 748 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 749 750 ctx->cdata.keylen = keylen; 751 ctx->cdata.key_virt = key; 752 ctx->cdata.key_inline = true; 753 754 /* skcipher_encrypt shared descriptor */ 755 desc = ctx->sh_desc_enc; 756 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, 757 ctx1_iv_off); 758 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 759 desc_bytes(desc), ctx->dir); 760 761 /* skcipher_decrypt shared descriptor */ 762 desc = ctx->sh_desc_dec; 763 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, 764 ctx1_iv_off); 765 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 766 desc_bytes(desc), ctx->dir); 767 768 return 0; 769 } 770 771 static int aes_skcipher_setkey(struct crypto_skcipher *skcipher, 772 const u8 *key, unsigned int keylen) 773 { 774 int err; 775 776 err = aes_check_keylen(keylen); 777 if (err) 778 return err; 779 780 return skcipher_setkey(skcipher, key, keylen, 0); 781 } 782 783 static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher, 784 const u8 *key, unsigned int keylen) 785 { 786 u32 ctx1_iv_off; 787 int err; 788 789 /* 790 * RFC3686 specific: 791 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER} 792 * | *key = {KEY, NONCE} 793 */ 794 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 795 keylen -= CTR_RFC3686_NONCE_SIZE; 796 797 err = aes_check_keylen(keylen); 798 if (err) 799 return err; 800 801 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off); 802 } 803 804 static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher, 805 const u8 *key, unsigned int keylen) 806 { 807 u32 ctx1_iv_off; 808 int err; 809 810 /* 811 * AES-CTR needs to load IV in CONTEXT1 reg 812 * at an offset of 128bits (16bytes) 813 * CONTEXT1[255:128] = IV 814 */ 815 ctx1_iv_off = 16; 816 817 err = aes_check_keylen(keylen); 818 if (err) 819 return err; 820 821 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off); 822 } 823 824 static int des_skcipher_setkey(struct crypto_skcipher *skcipher, 825 const u8 *key, unsigned int keylen) 826 { 827 return verify_skcipher_des_key(skcipher, key) ?: 828 skcipher_setkey(skcipher, key, keylen, 0); 829 } 830 831 static int des3_skcipher_setkey(struct crypto_skcipher *skcipher, 832 const u8 *key, unsigned int keylen) 833 { 834 return verify_skcipher_des3_key(skcipher, key) ?: 835 skcipher_setkey(skcipher, key, keylen, 0); 836 } 837 838 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 839 unsigned int keylen) 840 { 841 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher); 842 struct device *jrdev = ctx->jrdev; 843 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 844 u32 *desc; 845 int err; 846 847 err = xts_verify_key(skcipher, key, keylen); 848 if (err) { 849 dev_dbg(jrdev, "key size mismatch\n"); 850 return err; 851 } 852 853 if (keylen != 2 * AES_KEYSIZE_128 && keylen != 2 * AES_KEYSIZE_256) 854 ctx->xts_key_fallback = true; 855 856 if (ctrlpriv->era <= 8 || ctx->xts_key_fallback) { 857 err = crypto_skcipher_setkey(ctx->fallback, key, keylen); 858 if (err) 859 return err; 860 } 861 862 ctx->cdata.keylen = keylen; 863 ctx->cdata.key_virt = key; 864 ctx->cdata.key_inline = true; 865 866 /* xts_skcipher_encrypt shared descriptor */ 867 desc = ctx->sh_desc_enc; 868 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata); 869 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 870 desc_bytes(desc), ctx->dir); 871 872 /* xts_skcipher_decrypt shared descriptor */ 873 desc = ctx->sh_desc_dec; 874 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata); 875 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 876 desc_bytes(desc), ctx->dir); 877 878 return 0; 879 } 880 881 /* 882 * aead_edesc - s/w-extended aead descriptor 883 * @src_nents: number of segments in input s/w scatterlist 884 * @dst_nents: number of segments in output s/w scatterlist 885 * @mapped_src_nents: number of segments in input h/w link table 886 * @mapped_dst_nents: number of segments in output h/w link table 887 * @sec4_sg_bytes: length of dma mapped sec4_sg space 888 * @bklog: stored to determine if the request needs backlog 889 * @sec4_sg_dma: bus physical mapped address of h/w link table 890 * @sec4_sg: pointer to h/w link table 891 * @hw_desc: the h/w job descriptor followed by any referenced link tables 892 */ 893 struct aead_edesc { 894 int src_nents; 895 int dst_nents; 896 int mapped_src_nents; 897 int mapped_dst_nents; 898 int sec4_sg_bytes; 899 bool bklog; 900 dma_addr_t sec4_sg_dma; 901 struct sec4_sg_entry *sec4_sg; 902 u32 hw_desc[]; 903 }; 904 905 /* 906 * skcipher_edesc - s/w-extended skcipher descriptor 907 * @src_nents: number of segments in input s/w scatterlist 908 * @dst_nents: number of segments in output s/w scatterlist 909 * @mapped_src_nents: number of segments in input h/w link table 910 * @mapped_dst_nents: number of segments in output h/w link table 911 * @iv_dma: dma address of iv for checking continuity and link table 912 * @sec4_sg_bytes: length of dma mapped sec4_sg space 913 * @bklog: stored to determine if the request needs backlog 914 * @sec4_sg_dma: bus physical mapped address of h/w link table 915 * @sec4_sg: pointer to h/w link table 916 * @hw_desc: the h/w job descriptor followed by any referenced link tables 917 * and IV 918 */ 919 struct skcipher_edesc { 920 int src_nents; 921 int dst_nents; 922 int mapped_src_nents; 923 int mapped_dst_nents; 924 dma_addr_t iv_dma; 925 int sec4_sg_bytes; 926 bool bklog; 927 dma_addr_t sec4_sg_dma; 928 struct sec4_sg_entry *sec4_sg; 929 u32 hw_desc[]; 930 }; 931 932 static void caam_unmap(struct device *dev, struct scatterlist *src, 933 struct scatterlist *dst, int src_nents, 934 int dst_nents, 935 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, 936 int sec4_sg_bytes) 937 { 938 if (dst != src) { 939 if (src_nents) 940 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); 941 if (dst_nents) 942 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); 943 } else { 944 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); 945 } 946 947 if (iv_dma) 948 dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL); 949 if (sec4_sg_bytes) 950 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes, 951 DMA_TO_DEVICE); 952 } 953 954 static void aead_unmap(struct device *dev, 955 struct aead_edesc *edesc, 956 struct aead_request *req) 957 { 958 caam_unmap(dev, req->src, req->dst, 959 edesc->src_nents, edesc->dst_nents, 0, 0, 960 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 961 } 962 963 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, 964 struct skcipher_request *req) 965 { 966 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 967 int ivsize = crypto_skcipher_ivsize(skcipher); 968 969 caam_unmap(dev, req->src, req->dst, 970 edesc->src_nents, edesc->dst_nents, 971 edesc->iv_dma, ivsize, 972 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 973 } 974 975 static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err, 976 void *context) 977 { 978 struct aead_request *req = context; 979 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); 980 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); 981 struct aead_edesc *edesc; 982 int ecode = 0; 983 bool has_bklog; 984 985 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 986 987 edesc = rctx->edesc; 988 has_bklog = edesc->bklog; 989 990 if (err) 991 ecode = caam_jr_strstatus(jrdev, err); 992 993 aead_unmap(jrdev, edesc, req); 994 995 kfree(edesc); 996 997 /* 998 * If no backlog flag, the completion of the request is done 999 * by CAAM, not crypto engine. 1000 */ 1001 if (!has_bklog) 1002 aead_request_complete(req, ecode); 1003 else 1004 crypto_finalize_aead_request(jrp->engine, req, ecode); 1005 } 1006 1007 static inline u8 *skcipher_edesc_iv(struct skcipher_edesc *edesc) 1008 { 1009 1010 return PTR_ALIGN((u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes, 1011 dma_get_cache_alignment()); 1012 } 1013 1014 static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err, 1015 void *context) 1016 { 1017 struct skcipher_request *req = context; 1018 struct skcipher_edesc *edesc; 1019 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); 1020 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1021 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); 1022 int ivsize = crypto_skcipher_ivsize(skcipher); 1023 int ecode = 0; 1024 bool has_bklog; 1025 1026 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 1027 1028 edesc = rctx->edesc; 1029 has_bklog = edesc->bklog; 1030 if (err) 1031 ecode = caam_jr_strstatus(jrdev, err); 1032 1033 skcipher_unmap(jrdev, edesc, req); 1034 1035 /* 1036 * The crypto API expects us to set the IV (req->iv) to the last 1037 * ciphertext block (CBC mode) or last counter (CTR mode). 1038 * This is used e.g. by the CTS mode. 1039 */ 1040 if (ivsize && !ecode) { 1041 memcpy(req->iv, skcipher_edesc_iv(edesc), ivsize); 1042 1043 print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ", 1044 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, 1045 ivsize, 1); 1046 } 1047 1048 caam_dump_sg("dst @" __stringify(__LINE__)": ", 1049 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 1050 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 1051 1052 kfree(edesc); 1053 1054 /* 1055 * If no backlog flag, the completion of the request is done 1056 * by CAAM, not crypto engine. 1057 */ 1058 if (!has_bklog) 1059 skcipher_request_complete(req, ecode); 1060 else 1061 crypto_finalize_skcipher_request(jrp->engine, req, ecode); 1062 } 1063 1064 /* 1065 * Fill in aead job descriptor 1066 */ 1067 static void init_aead_job(struct aead_request *req, 1068 struct aead_edesc *edesc, 1069 bool all_contig, bool encrypt) 1070 { 1071 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1072 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1073 int authsize = ctx->authsize; 1074 u32 *desc = edesc->hw_desc; 1075 u32 out_options, in_options; 1076 dma_addr_t dst_dma, src_dma; 1077 int len, sec4_sg_index = 0; 1078 dma_addr_t ptr; 1079 u32 *sh_desc; 1080 1081 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1082 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1083 1084 len = desc_len(sh_desc); 1085 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1086 1087 if (all_contig) { 1088 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) : 1089 0; 1090 in_options = 0; 1091 } else { 1092 src_dma = edesc->sec4_sg_dma; 1093 sec4_sg_index += edesc->mapped_src_nents; 1094 in_options = LDST_SGF; 1095 } 1096 1097 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, 1098 in_options); 1099 1100 dst_dma = src_dma; 1101 out_options = in_options; 1102 1103 if (unlikely(req->src != req->dst)) { 1104 if (!edesc->mapped_dst_nents) { 1105 dst_dma = 0; 1106 out_options = 0; 1107 } else if (edesc->mapped_dst_nents == 1) { 1108 dst_dma = sg_dma_address(req->dst); 1109 out_options = 0; 1110 } else { 1111 dst_dma = edesc->sec4_sg_dma + 1112 sec4_sg_index * 1113 sizeof(struct sec4_sg_entry); 1114 out_options = LDST_SGF; 1115 } 1116 } 1117 1118 if (encrypt) 1119 append_seq_out_ptr(desc, dst_dma, 1120 req->assoclen + req->cryptlen + authsize, 1121 out_options); 1122 else 1123 append_seq_out_ptr(desc, dst_dma, 1124 req->assoclen + req->cryptlen - authsize, 1125 out_options); 1126 } 1127 1128 static void init_gcm_job(struct aead_request *req, 1129 struct aead_edesc *edesc, 1130 bool all_contig, bool encrypt) 1131 { 1132 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1133 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1134 unsigned int ivsize = crypto_aead_ivsize(aead); 1135 u32 *desc = edesc->hw_desc; 1136 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE); 1137 unsigned int last; 1138 1139 init_aead_job(req, edesc, all_contig, encrypt); 1140 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1141 1142 /* BUG This should not be specific to generic GCM. */ 1143 last = 0; 1144 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) 1145 last = FIFOLD_TYPE_LAST1; 1146 1147 /* Read GCM IV */ 1148 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE | 1149 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last); 1150 /* Append Salt */ 1151 if (!generic_gcm) 1152 append_data(desc, ctx->key + ctx->cdata.keylen, 4); 1153 /* Append IV */ 1154 append_data(desc, req->iv, ivsize); 1155 /* End of blank commands */ 1156 } 1157 1158 static void init_chachapoly_job(struct aead_request *req, 1159 struct aead_edesc *edesc, bool all_contig, 1160 bool encrypt) 1161 { 1162 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1163 unsigned int ivsize = crypto_aead_ivsize(aead); 1164 unsigned int assoclen = req->assoclen; 1165 u32 *desc = edesc->hw_desc; 1166 u32 ctx_iv_off = 4; 1167 1168 init_aead_job(req, edesc, all_contig, encrypt); 1169 1170 if (ivsize != CHACHAPOLY_IV_SIZE) { 1171 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */ 1172 ctx_iv_off += 4; 1173 1174 /* 1175 * The associated data comes already with the IV but we need 1176 * to skip it when we authenticate or encrypt... 1177 */ 1178 assoclen -= ivsize; 1179 } 1180 1181 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen); 1182 1183 /* 1184 * For IPsec load the IV further in the same register. 1185 * For RFC7539 simply load the 12 bytes nonce in a single operation 1186 */ 1187 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB | 1188 LDST_SRCDST_BYTE_CONTEXT | 1189 ctx_iv_off << LDST_OFFSET_SHIFT); 1190 } 1191 1192 static void init_authenc_job(struct aead_request *req, 1193 struct aead_edesc *edesc, 1194 bool all_contig, bool encrypt) 1195 { 1196 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1197 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 1198 struct caam_aead_alg, aead); 1199 unsigned int ivsize = crypto_aead_ivsize(aead); 1200 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1201 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); 1202 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 1203 OP_ALG_AAI_CTR_MOD128); 1204 const bool is_rfc3686 = alg->caam.rfc3686; 1205 u32 *desc = edesc->hw_desc; 1206 u32 ivoffset = 0; 1207 1208 /* 1209 * AES-CTR needs to load IV in CONTEXT1 reg 1210 * at an offset of 128bits (16bytes) 1211 * CONTEXT1[255:128] = IV 1212 */ 1213 if (ctr_mode) 1214 ivoffset = 16; 1215 1216 /* 1217 * RFC3686 specific: 1218 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 1219 */ 1220 if (is_rfc3686) 1221 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE; 1222 1223 init_aead_job(req, edesc, all_contig, encrypt); 1224 1225 /* 1226 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports 1227 * having DPOVRD as destination. 1228 */ 1229 if (ctrlpriv->era < 3) 1230 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1231 else 1232 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen); 1233 1234 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) 1235 append_load_as_imm(desc, req->iv, ivsize, 1236 LDST_CLASS_1_CCB | 1237 LDST_SRCDST_BYTE_CONTEXT | 1238 (ivoffset << LDST_OFFSET_SHIFT)); 1239 } 1240 1241 /* 1242 * Fill in skcipher job descriptor 1243 */ 1244 static void init_skcipher_job(struct skcipher_request *req, 1245 struct skcipher_edesc *edesc, 1246 const bool encrypt) 1247 { 1248 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1249 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher); 1250 struct device *jrdev = ctx->jrdev; 1251 int ivsize = crypto_skcipher_ivsize(skcipher); 1252 u32 *desc = edesc->hw_desc; 1253 u32 *sh_desc; 1254 u32 in_options = 0, out_options = 0; 1255 dma_addr_t src_dma, dst_dma, ptr; 1256 int len, sec4_sg_index = 0; 1257 1258 print_hex_dump_debug("presciv@"__stringify(__LINE__)": ", 1259 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); 1260 dev_dbg(jrdev, "asked=%d, cryptlen%d\n", 1261 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); 1262 1263 caam_dump_sg("src @" __stringify(__LINE__)": ", 1264 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1265 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); 1266 1267 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1268 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1269 1270 len = desc_len(sh_desc); 1271 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1272 1273 if (ivsize || edesc->mapped_src_nents > 1) { 1274 src_dma = edesc->sec4_sg_dma; 1275 sec4_sg_index = edesc->mapped_src_nents + !!ivsize; 1276 in_options = LDST_SGF; 1277 } else { 1278 src_dma = sg_dma_address(req->src); 1279 } 1280 1281 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options); 1282 1283 if (likely(req->src == req->dst)) { 1284 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry); 1285 out_options = in_options; 1286 } else if (!ivsize && edesc->mapped_dst_nents == 1) { 1287 dst_dma = sg_dma_address(req->dst); 1288 } else { 1289 dst_dma = edesc->sec4_sg_dma + sec4_sg_index * 1290 sizeof(struct sec4_sg_entry); 1291 out_options = LDST_SGF; 1292 } 1293 1294 append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options); 1295 } 1296 1297 /* 1298 * allocate and map the aead extended descriptor 1299 */ 1300 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1301 int desc_bytes, bool *all_contig_ptr, 1302 bool encrypt) 1303 { 1304 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1305 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1306 struct device *jrdev = ctx->jrdev; 1307 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); 1308 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1309 GFP_KERNEL : GFP_ATOMIC; 1310 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1311 int src_len, dst_len = 0; 1312 struct aead_edesc *edesc; 1313 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; 1314 unsigned int authsize = ctx->authsize; 1315 1316 if (unlikely(req->dst != req->src)) { 1317 src_len = req->assoclen + req->cryptlen; 1318 dst_len = src_len + (encrypt ? authsize : (-authsize)); 1319 1320 src_nents = sg_nents_for_len(req->src, src_len); 1321 if (unlikely(src_nents < 0)) { 1322 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1323 src_len); 1324 return ERR_PTR(src_nents); 1325 } 1326 1327 dst_nents = sg_nents_for_len(req->dst, dst_len); 1328 if (unlikely(dst_nents < 0)) { 1329 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1330 dst_len); 1331 return ERR_PTR(dst_nents); 1332 } 1333 } else { 1334 src_len = req->assoclen + req->cryptlen + 1335 (encrypt ? authsize : 0); 1336 1337 src_nents = sg_nents_for_len(req->src, src_len); 1338 if (unlikely(src_nents < 0)) { 1339 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1340 src_len); 1341 return ERR_PTR(src_nents); 1342 } 1343 } 1344 1345 if (likely(req->src == req->dst)) { 1346 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1347 DMA_BIDIRECTIONAL); 1348 if (unlikely(!mapped_src_nents)) { 1349 dev_err(jrdev, "unable to map source\n"); 1350 return ERR_PTR(-ENOMEM); 1351 } 1352 } else { 1353 /* Cover also the case of null (zero length) input data */ 1354 if (src_nents) { 1355 mapped_src_nents = dma_map_sg(jrdev, req->src, 1356 src_nents, DMA_TO_DEVICE); 1357 if (unlikely(!mapped_src_nents)) { 1358 dev_err(jrdev, "unable to map source\n"); 1359 return ERR_PTR(-ENOMEM); 1360 } 1361 } else { 1362 mapped_src_nents = 0; 1363 } 1364 1365 /* Cover also the case of null (zero length) output data */ 1366 if (dst_nents) { 1367 mapped_dst_nents = dma_map_sg(jrdev, req->dst, 1368 dst_nents, 1369 DMA_FROM_DEVICE); 1370 if (unlikely(!mapped_dst_nents)) { 1371 dev_err(jrdev, "unable to map destination\n"); 1372 dma_unmap_sg(jrdev, req->src, src_nents, 1373 DMA_TO_DEVICE); 1374 return ERR_PTR(-ENOMEM); 1375 } 1376 } else { 1377 mapped_dst_nents = 0; 1378 } 1379 } 1380 1381 /* 1382 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond 1383 * the end of the table by allocating more S/G entries. 1384 */ 1385 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0; 1386 if (mapped_dst_nents > 1) 1387 sec4_sg_len += pad_sg_nents(mapped_dst_nents); 1388 else 1389 sec4_sg_len = pad_sg_nents(sec4_sg_len); 1390 1391 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1392 1393 /* allocate space for base edesc and hw desc commands, link tables */ 1394 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, flags); 1395 if (!edesc) { 1396 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1397 0, 0, 0); 1398 return ERR_PTR(-ENOMEM); 1399 } 1400 1401 edesc->src_nents = src_nents; 1402 edesc->dst_nents = dst_nents; 1403 edesc->mapped_src_nents = mapped_src_nents; 1404 edesc->mapped_dst_nents = mapped_dst_nents; 1405 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + 1406 desc_bytes; 1407 1408 rctx->edesc = edesc; 1409 1410 *all_contig_ptr = !(mapped_src_nents > 1); 1411 1412 sec4_sg_index = 0; 1413 if (mapped_src_nents > 1) { 1414 sg_to_sec4_sg_last(req->src, src_len, 1415 edesc->sec4_sg + sec4_sg_index, 0); 1416 sec4_sg_index += mapped_src_nents; 1417 } 1418 if (mapped_dst_nents > 1) { 1419 sg_to_sec4_sg_last(req->dst, dst_len, 1420 edesc->sec4_sg + sec4_sg_index, 0); 1421 } 1422 1423 if (!sec4_sg_bytes) 1424 return edesc; 1425 1426 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1427 sec4_sg_bytes, DMA_TO_DEVICE); 1428 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1429 dev_err(jrdev, "unable to map S/G table\n"); 1430 aead_unmap(jrdev, edesc, req); 1431 kfree(edesc); 1432 return ERR_PTR(-ENOMEM); 1433 } 1434 1435 edesc->sec4_sg_bytes = sec4_sg_bytes; 1436 1437 return edesc; 1438 } 1439 1440 static int aead_enqueue_req(struct device *jrdev, struct aead_request *req) 1441 { 1442 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev); 1443 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); 1444 struct aead_edesc *edesc = rctx->edesc; 1445 u32 *desc = edesc->hw_desc; 1446 int ret; 1447 1448 /* 1449 * Only the backlog request are sent to crypto-engine since the others 1450 * can be handled by CAAM, if free, especially since JR has up to 1024 1451 * entries (more than the 10 entries from crypto-engine). 1452 */ 1453 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG) 1454 ret = crypto_transfer_aead_request_to_engine(jrpriv->engine, 1455 req); 1456 else 1457 ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req); 1458 1459 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) { 1460 aead_unmap(jrdev, edesc, req); 1461 kfree(rctx->edesc); 1462 } 1463 1464 return ret; 1465 } 1466 1467 static inline int chachapoly_crypt(struct aead_request *req, bool encrypt) 1468 { 1469 struct aead_edesc *edesc; 1470 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1471 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1472 struct device *jrdev = ctx->jrdev; 1473 bool all_contig; 1474 u32 *desc; 1475 1476 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, 1477 encrypt); 1478 if (IS_ERR(edesc)) 1479 return PTR_ERR(edesc); 1480 1481 desc = edesc->hw_desc; 1482 1483 init_chachapoly_job(req, edesc, all_contig, encrypt); 1484 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ", 1485 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1486 1); 1487 1488 return aead_enqueue_req(jrdev, req); 1489 } 1490 1491 static int chachapoly_encrypt(struct aead_request *req) 1492 { 1493 return chachapoly_crypt(req, true); 1494 } 1495 1496 static int chachapoly_decrypt(struct aead_request *req) 1497 { 1498 return chachapoly_crypt(req, false); 1499 } 1500 1501 static inline int aead_crypt(struct aead_request *req, bool encrypt) 1502 { 1503 struct aead_edesc *edesc; 1504 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1505 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1506 struct device *jrdev = ctx->jrdev; 1507 bool all_contig; 1508 1509 /* allocate extended descriptor */ 1510 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1511 &all_contig, encrypt); 1512 if (IS_ERR(edesc)) 1513 return PTR_ERR(edesc); 1514 1515 /* Create and submit job descriptor */ 1516 init_authenc_job(req, edesc, all_contig, encrypt); 1517 1518 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1519 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1520 desc_bytes(edesc->hw_desc), 1); 1521 1522 return aead_enqueue_req(jrdev, req); 1523 } 1524 1525 static int aead_encrypt(struct aead_request *req) 1526 { 1527 return aead_crypt(req, true); 1528 } 1529 1530 static int aead_decrypt(struct aead_request *req) 1531 { 1532 return aead_crypt(req, false); 1533 } 1534 1535 static int aead_do_one_req(struct crypto_engine *engine, void *areq) 1536 { 1537 struct aead_request *req = aead_request_cast(areq); 1538 struct caam_ctx *ctx = crypto_aead_ctx_dma(crypto_aead_reqtfm(req)); 1539 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); 1540 u32 *desc = rctx->edesc->hw_desc; 1541 int ret; 1542 1543 rctx->edesc->bklog = true; 1544 1545 ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req); 1546 1547 if (ret == -ENOSPC && engine->retry_support) 1548 return ret; 1549 1550 if (ret != -EINPROGRESS) { 1551 aead_unmap(ctx->jrdev, rctx->edesc, req); 1552 kfree(rctx->edesc); 1553 } else { 1554 ret = 0; 1555 } 1556 1557 return ret; 1558 } 1559 1560 static inline int gcm_crypt(struct aead_request *req, bool encrypt) 1561 { 1562 struct aead_edesc *edesc; 1563 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1564 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1565 struct device *jrdev = ctx->jrdev; 1566 bool all_contig; 1567 1568 /* allocate extended descriptor */ 1569 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, 1570 encrypt); 1571 if (IS_ERR(edesc)) 1572 return PTR_ERR(edesc); 1573 1574 /* Create and submit job descriptor */ 1575 init_gcm_job(req, edesc, all_contig, encrypt); 1576 1577 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1578 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1579 desc_bytes(edesc->hw_desc), 1); 1580 1581 return aead_enqueue_req(jrdev, req); 1582 } 1583 1584 static int gcm_encrypt(struct aead_request *req) 1585 { 1586 return gcm_crypt(req, true); 1587 } 1588 1589 static int gcm_decrypt(struct aead_request *req) 1590 { 1591 return gcm_crypt(req, false); 1592 } 1593 1594 static int ipsec_gcm_encrypt(struct aead_request *req) 1595 { 1596 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req); 1597 } 1598 1599 static int ipsec_gcm_decrypt(struct aead_request *req) 1600 { 1601 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req); 1602 } 1603 1604 /* 1605 * allocate and map the skcipher extended descriptor for skcipher 1606 */ 1607 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req, 1608 int desc_bytes) 1609 { 1610 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1611 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher); 1612 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); 1613 struct device *jrdev = ctx->jrdev; 1614 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1615 GFP_KERNEL : GFP_ATOMIC; 1616 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1617 struct skcipher_edesc *edesc; 1618 dma_addr_t iv_dma = 0; 1619 u8 *iv; 1620 int ivsize = crypto_skcipher_ivsize(skcipher); 1621 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1622 unsigned int aligned_size; 1623 1624 src_nents = sg_nents_for_len(req->src, req->cryptlen); 1625 if (unlikely(src_nents < 0)) { 1626 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1627 req->cryptlen); 1628 return ERR_PTR(src_nents); 1629 } 1630 1631 if (req->dst != req->src) { 1632 dst_nents = sg_nents_for_len(req->dst, req->cryptlen); 1633 if (unlikely(dst_nents < 0)) { 1634 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1635 req->cryptlen); 1636 return ERR_PTR(dst_nents); 1637 } 1638 } 1639 1640 if (likely(req->src == req->dst)) { 1641 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1642 DMA_BIDIRECTIONAL); 1643 if (unlikely(!mapped_src_nents)) { 1644 dev_err(jrdev, "unable to map source\n"); 1645 return ERR_PTR(-ENOMEM); 1646 } 1647 } else { 1648 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1649 DMA_TO_DEVICE); 1650 if (unlikely(!mapped_src_nents)) { 1651 dev_err(jrdev, "unable to map source\n"); 1652 return ERR_PTR(-ENOMEM); 1653 } 1654 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1655 DMA_FROM_DEVICE); 1656 if (unlikely(!mapped_dst_nents)) { 1657 dev_err(jrdev, "unable to map destination\n"); 1658 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1659 return ERR_PTR(-ENOMEM); 1660 } 1661 } 1662 1663 if (!ivsize && mapped_src_nents == 1) 1664 sec4_sg_ents = 0; // no need for an input hw s/g table 1665 else 1666 sec4_sg_ents = mapped_src_nents + !!ivsize; 1667 dst_sg_idx = sec4_sg_ents; 1668 1669 /* 1670 * Input, output HW S/G tables: [IV, src][dst, IV] 1671 * IV entries point to the same buffer 1672 * If src == dst, S/G entries are reused (S/G tables overlap) 1673 * 1674 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond 1675 * the end of the table by allocating more S/G entries. Logic: 1676 * if (output S/G) 1677 * pad output S/G, if needed 1678 * else if (input S/G) ... 1679 * pad input S/G, if needed 1680 */ 1681 if (ivsize || mapped_dst_nents > 1) { 1682 if (req->src == req->dst) 1683 sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents); 1684 else 1685 sec4_sg_ents += pad_sg_nents(mapped_dst_nents + 1686 !!ivsize); 1687 } else { 1688 sec4_sg_ents = pad_sg_nents(sec4_sg_ents); 1689 } 1690 1691 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1692 1693 /* 1694 * allocate space for base edesc and hw desc commands, link tables, IV 1695 */ 1696 aligned_size = sizeof(*edesc) + desc_bytes + sec4_sg_bytes; 1697 aligned_size = ALIGN(aligned_size, dma_get_cache_alignment()); 1698 aligned_size += ~(ARCH_KMALLOC_MINALIGN - 1) & 1699 (dma_get_cache_alignment() - 1); 1700 aligned_size += ALIGN(ivsize, dma_get_cache_alignment()); 1701 edesc = kzalloc(aligned_size, flags); 1702 if (!edesc) { 1703 dev_err(jrdev, "could not allocate extended descriptor\n"); 1704 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1705 0, 0, 0); 1706 return ERR_PTR(-ENOMEM); 1707 } 1708 1709 edesc->src_nents = src_nents; 1710 edesc->dst_nents = dst_nents; 1711 edesc->mapped_src_nents = mapped_src_nents; 1712 edesc->mapped_dst_nents = mapped_dst_nents; 1713 edesc->sec4_sg_bytes = sec4_sg_bytes; 1714 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + 1715 desc_bytes); 1716 rctx->edesc = edesc; 1717 1718 /* Make sure IV is located in a DMAable area */ 1719 if (ivsize) { 1720 iv = skcipher_edesc_iv(edesc); 1721 memcpy(iv, req->iv, ivsize); 1722 1723 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL); 1724 if (dma_mapping_error(jrdev, iv_dma)) { 1725 dev_err(jrdev, "unable to map IV\n"); 1726 caam_unmap(jrdev, req->src, req->dst, src_nents, 1727 dst_nents, 0, 0, 0, 0); 1728 kfree(edesc); 1729 return ERR_PTR(-ENOMEM); 1730 } 1731 1732 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); 1733 } 1734 if (dst_sg_idx) 1735 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg + 1736 !!ivsize, 0); 1737 1738 if (req->src != req->dst && (ivsize || mapped_dst_nents > 1)) 1739 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg + 1740 dst_sg_idx, 0); 1741 1742 if (ivsize) 1743 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx + 1744 mapped_dst_nents, iv_dma, ivsize, 0); 1745 1746 if (ivsize || mapped_dst_nents > 1) 1747 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx + 1748 mapped_dst_nents - 1 + !!ivsize); 1749 1750 if (sec4_sg_bytes) { 1751 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1752 sec4_sg_bytes, 1753 DMA_TO_DEVICE); 1754 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1755 dev_err(jrdev, "unable to map S/G table\n"); 1756 caam_unmap(jrdev, req->src, req->dst, src_nents, 1757 dst_nents, iv_dma, ivsize, 0, 0); 1758 kfree(edesc); 1759 return ERR_PTR(-ENOMEM); 1760 } 1761 } 1762 1763 edesc->iv_dma = iv_dma; 1764 1765 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ", 1766 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1767 sec4_sg_bytes, 1); 1768 1769 return edesc; 1770 } 1771 1772 static int skcipher_do_one_req(struct crypto_engine *engine, void *areq) 1773 { 1774 struct skcipher_request *req = skcipher_request_cast(areq); 1775 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(crypto_skcipher_reqtfm(req)); 1776 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); 1777 u32 *desc = rctx->edesc->hw_desc; 1778 int ret; 1779 1780 rctx->edesc->bklog = true; 1781 1782 ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req); 1783 1784 if (ret == -ENOSPC && engine->retry_support) 1785 return ret; 1786 1787 if (ret != -EINPROGRESS) { 1788 skcipher_unmap(ctx->jrdev, rctx->edesc, req); 1789 kfree(rctx->edesc); 1790 } else { 1791 ret = 0; 1792 } 1793 1794 return ret; 1795 } 1796 1797 static inline bool xts_skcipher_ivsize(struct skcipher_request *req) 1798 { 1799 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1800 unsigned int ivsize = crypto_skcipher_ivsize(skcipher); 1801 1802 return !!get_unaligned((u64 *)(req->iv + (ivsize / 2))); 1803 } 1804 1805 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt) 1806 { 1807 struct skcipher_edesc *edesc; 1808 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1809 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher); 1810 struct device *jrdev = ctx->jrdev; 1811 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev); 1812 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 1813 u32 *desc; 1814 int ret = 0; 1815 1816 /* 1817 * XTS is expected to return an error even for input length = 0 1818 * Note that the case input length < block size will be caught during 1819 * HW offloading and return an error. 1820 */ 1821 if (!req->cryptlen && !ctx->fallback) 1822 return 0; 1823 1824 if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) || 1825 ctx->xts_key_fallback)) { 1826 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); 1827 1828 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); 1829 skcipher_request_set_callback(&rctx->fallback_req, 1830 req->base.flags, 1831 req->base.complete, 1832 req->base.data); 1833 skcipher_request_set_crypt(&rctx->fallback_req, req->src, 1834 req->dst, req->cryptlen, req->iv); 1835 1836 return encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) : 1837 crypto_skcipher_decrypt(&rctx->fallback_req); 1838 } 1839 1840 /* allocate extended descriptor */ 1841 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1842 if (IS_ERR(edesc)) 1843 return PTR_ERR(edesc); 1844 1845 /* Create and submit job descriptor*/ 1846 init_skcipher_job(req, edesc, encrypt); 1847 1848 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ", 1849 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1850 desc_bytes(edesc->hw_desc), 1); 1851 1852 desc = edesc->hw_desc; 1853 /* 1854 * Only the backlog request are sent to crypto-engine since the others 1855 * can be handled by CAAM, if free, especially since JR has up to 1024 1856 * entries (more than the 10 entries from crypto-engine). 1857 */ 1858 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG) 1859 ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine, 1860 req); 1861 else 1862 ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req); 1863 1864 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) { 1865 skcipher_unmap(jrdev, edesc, req); 1866 kfree(edesc); 1867 } 1868 1869 return ret; 1870 } 1871 1872 static int skcipher_encrypt(struct skcipher_request *req) 1873 { 1874 return skcipher_crypt(req, true); 1875 } 1876 1877 static int skcipher_decrypt(struct skcipher_request *req) 1878 { 1879 return skcipher_crypt(req, false); 1880 } 1881 1882 static struct caam_skcipher_alg driver_algs[] = { 1883 { 1884 .skcipher = { 1885 .base = { 1886 .cra_name = "cbc(aes)", 1887 .cra_driver_name = "cbc-aes-caam", 1888 .cra_blocksize = AES_BLOCK_SIZE, 1889 }, 1890 .setkey = aes_skcipher_setkey, 1891 .encrypt = skcipher_encrypt, 1892 .decrypt = skcipher_decrypt, 1893 .min_keysize = AES_MIN_KEY_SIZE, 1894 .max_keysize = AES_MAX_KEY_SIZE, 1895 .ivsize = AES_BLOCK_SIZE, 1896 }, 1897 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1898 }, 1899 { 1900 .skcipher = { 1901 .base = { 1902 .cra_name = "cbc(des3_ede)", 1903 .cra_driver_name = "cbc-3des-caam", 1904 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1905 }, 1906 .setkey = des3_skcipher_setkey, 1907 .encrypt = skcipher_encrypt, 1908 .decrypt = skcipher_decrypt, 1909 .min_keysize = DES3_EDE_KEY_SIZE, 1910 .max_keysize = DES3_EDE_KEY_SIZE, 1911 .ivsize = DES3_EDE_BLOCK_SIZE, 1912 }, 1913 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 1914 }, 1915 { 1916 .skcipher = { 1917 .base = { 1918 .cra_name = "cbc(des)", 1919 .cra_driver_name = "cbc-des-caam", 1920 .cra_blocksize = DES_BLOCK_SIZE, 1921 }, 1922 .setkey = des_skcipher_setkey, 1923 .encrypt = skcipher_encrypt, 1924 .decrypt = skcipher_decrypt, 1925 .min_keysize = DES_KEY_SIZE, 1926 .max_keysize = DES_KEY_SIZE, 1927 .ivsize = DES_BLOCK_SIZE, 1928 }, 1929 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 1930 }, 1931 { 1932 .skcipher = { 1933 .base = { 1934 .cra_name = "ctr(aes)", 1935 .cra_driver_name = "ctr-aes-caam", 1936 .cra_blocksize = 1, 1937 }, 1938 .setkey = ctr_skcipher_setkey, 1939 .encrypt = skcipher_encrypt, 1940 .decrypt = skcipher_decrypt, 1941 .min_keysize = AES_MIN_KEY_SIZE, 1942 .max_keysize = AES_MAX_KEY_SIZE, 1943 .ivsize = AES_BLOCK_SIZE, 1944 .chunksize = AES_BLOCK_SIZE, 1945 }, 1946 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | 1947 OP_ALG_AAI_CTR_MOD128, 1948 }, 1949 { 1950 .skcipher = { 1951 .base = { 1952 .cra_name = "rfc3686(ctr(aes))", 1953 .cra_driver_name = "rfc3686-ctr-aes-caam", 1954 .cra_blocksize = 1, 1955 }, 1956 .setkey = rfc3686_skcipher_setkey, 1957 .encrypt = skcipher_encrypt, 1958 .decrypt = skcipher_decrypt, 1959 .min_keysize = AES_MIN_KEY_SIZE + 1960 CTR_RFC3686_NONCE_SIZE, 1961 .max_keysize = AES_MAX_KEY_SIZE + 1962 CTR_RFC3686_NONCE_SIZE, 1963 .ivsize = CTR_RFC3686_IV_SIZE, 1964 .chunksize = AES_BLOCK_SIZE, 1965 }, 1966 .caam = { 1967 .class1_alg_type = OP_ALG_ALGSEL_AES | 1968 OP_ALG_AAI_CTR_MOD128, 1969 .rfc3686 = true, 1970 }, 1971 }, 1972 { 1973 .skcipher = { 1974 .base = { 1975 .cra_name = "xts(aes)", 1976 .cra_driver_name = "xts-aes-caam", 1977 .cra_flags = CRYPTO_ALG_NEED_FALLBACK, 1978 .cra_blocksize = AES_BLOCK_SIZE, 1979 }, 1980 .setkey = xts_skcipher_setkey, 1981 .encrypt = skcipher_encrypt, 1982 .decrypt = skcipher_decrypt, 1983 .min_keysize = 2 * AES_MIN_KEY_SIZE, 1984 .max_keysize = 2 * AES_MAX_KEY_SIZE, 1985 .ivsize = AES_BLOCK_SIZE, 1986 }, 1987 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS, 1988 }, 1989 { 1990 .skcipher = { 1991 .base = { 1992 .cra_name = "ecb(des)", 1993 .cra_driver_name = "ecb-des-caam", 1994 .cra_blocksize = DES_BLOCK_SIZE, 1995 }, 1996 .setkey = des_skcipher_setkey, 1997 .encrypt = skcipher_encrypt, 1998 .decrypt = skcipher_decrypt, 1999 .min_keysize = DES_KEY_SIZE, 2000 .max_keysize = DES_KEY_SIZE, 2001 }, 2002 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB, 2003 }, 2004 { 2005 .skcipher = { 2006 .base = { 2007 .cra_name = "ecb(aes)", 2008 .cra_driver_name = "ecb-aes-caam", 2009 .cra_blocksize = AES_BLOCK_SIZE, 2010 }, 2011 .setkey = aes_skcipher_setkey, 2012 .encrypt = skcipher_encrypt, 2013 .decrypt = skcipher_decrypt, 2014 .min_keysize = AES_MIN_KEY_SIZE, 2015 .max_keysize = AES_MAX_KEY_SIZE, 2016 }, 2017 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB, 2018 }, 2019 { 2020 .skcipher = { 2021 .base = { 2022 .cra_name = "ecb(des3_ede)", 2023 .cra_driver_name = "ecb-des3-caam", 2024 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2025 }, 2026 .setkey = des3_skcipher_setkey, 2027 .encrypt = skcipher_encrypt, 2028 .decrypt = skcipher_decrypt, 2029 .min_keysize = DES3_EDE_KEY_SIZE, 2030 .max_keysize = DES3_EDE_KEY_SIZE, 2031 }, 2032 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB, 2033 }, 2034 }; 2035 2036 static struct caam_aead_alg driver_aeads[] = { 2037 { 2038 .aead = { 2039 .base = { 2040 .cra_name = "rfc4106(gcm(aes))", 2041 .cra_driver_name = "rfc4106-gcm-aes-caam", 2042 .cra_blocksize = 1, 2043 }, 2044 .setkey = rfc4106_setkey, 2045 .setauthsize = rfc4106_setauthsize, 2046 .encrypt = ipsec_gcm_encrypt, 2047 .decrypt = ipsec_gcm_decrypt, 2048 .ivsize = GCM_RFC4106_IV_SIZE, 2049 .maxauthsize = AES_BLOCK_SIZE, 2050 }, 2051 .caam = { 2052 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2053 .nodkp = true, 2054 }, 2055 }, 2056 { 2057 .aead = { 2058 .base = { 2059 .cra_name = "rfc4543(gcm(aes))", 2060 .cra_driver_name = "rfc4543-gcm-aes-caam", 2061 .cra_blocksize = 1, 2062 }, 2063 .setkey = rfc4543_setkey, 2064 .setauthsize = rfc4543_setauthsize, 2065 .encrypt = ipsec_gcm_encrypt, 2066 .decrypt = ipsec_gcm_decrypt, 2067 .ivsize = GCM_RFC4543_IV_SIZE, 2068 .maxauthsize = AES_BLOCK_SIZE, 2069 }, 2070 .caam = { 2071 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2072 .nodkp = true, 2073 }, 2074 }, 2075 /* Galois Counter Mode */ 2076 { 2077 .aead = { 2078 .base = { 2079 .cra_name = "gcm(aes)", 2080 .cra_driver_name = "gcm-aes-caam", 2081 .cra_blocksize = 1, 2082 }, 2083 .setkey = gcm_setkey, 2084 .setauthsize = gcm_setauthsize, 2085 .encrypt = gcm_encrypt, 2086 .decrypt = gcm_decrypt, 2087 .ivsize = GCM_AES_IV_SIZE, 2088 .maxauthsize = AES_BLOCK_SIZE, 2089 }, 2090 .caam = { 2091 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2092 .nodkp = true, 2093 }, 2094 }, 2095 /* single-pass ipsec_esp descriptor */ 2096 { 2097 .aead = { 2098 .base = { 2099 .cra_name = "authenc(hmac(md5)," 2100 "ecb(cipher_null))", 2101 .cra_driver_name = "authenc-hmac-md5-" 2102 "ecb-cipher_null-caam", 2103 .cra_blocksize = NULL_BLOCK_SIZE, 2104 }, 2105 .setkey = aead_setkey, 2106 .setauthsize = aead_setauthsize, 2107 .encrypt = aead_encrypt, 2108 .decrypt = aead_decrypt, 2109 .ivsize = NULL_IV_SIZE, 2110 .maxauthsize = MD5_DIGEST_SIZE, 2111 }, 2112 .caam = { 2113 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2114 OP_ALG_AAI_HMAC_PRECOMP, 2115 }, 2116 }, 2117 { 2118 .aead = { 2119 .base = { 2120 .cra_name = "authenc(hmac(sha1)," 2121 "ecb(cipher_null))", 2122 .cra_driver_name = "authenc-hmac-sha1-" 2123 "ecb-cipher_null-caam", 2124 .cra_blocksize = NULL_BLOCK_SIZE, 2125 }, 2126 .setkey = aead_setkey, 2127 .setauthsize = aead_setauthsize, 2128 .encrypt = aead_encrypt, 2129 .decrypt = aead_decrypt, 2130 .ivsize = NULL_IV_SIZE, 2131 .maxauthsize = SHA1_DIGEST_SIZE, 2132 }, 2133 .caam = { 2134 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2135 OP_ALG_AAI_HMAC_PRECOMP, 2136 }, 2137 }, 2138 { 2139 .aead = { 2140 .base = { 2141 .cra_name = "authenc(hmac(sha224)," 2142 "ecb(cipher_null))", 2143 .cra_driver_name = "authenc-hmac-sha224-" 2144 "ecb-cipher_null-caam", 2145 .cra_blocksize = NULL_BLOCK_SIZE, 2146 }, 2147 .setkey = aead_setkey, 2148 .setauthsize = aead_setauthsize, 2149 .encrypt = aead_encrypt, 2150 .decrypt = aead_decrypt, 2151 .ivsize = NULL_IV_SIZE, 2152 .maxauthsize = SHA224_DIGEST_SIZE, 2153 }, 2154 .caam = { 2155 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2156 OP_ALG_AAI_HMAC_PRECOMP, 2157 }, 2158 }, 2159 { 2160 .aead = { 2161 .base = { 2162 .cra_name = "authenc(hmac(sha256)," 2163 "ecb(cipher_null))", 2164 .cra_driver_name = "authenc-hmac-sha256-" 2165 "ecb-cipher_null-caam", 2166 .cra_blocksize = NULL_BLOCK_SIZE, 2167 }, 2168 .setkey = aead_setkey, 2169 .setauthsize = aead_setauthsize, 2170 .encrypt = aead_encrypt, 2171 .decrypt = aead_decrypt, 2172 .ivsize = NULL_IV_SIZE, 2173 .maxauthsize = SHA256_DIGEST_SIZE, 2174 }, 2175 .caam = { 2176 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2177 OP_ALG_AAI_HMAC_PRECOMP, 2178 }, 2179 }, 2180 { 2181 .aead = { 2182 .base = { 2183 .cra_name = "authenc(hmac(sha384)," 2184 "ecb(cipher_null))", 2185 .cra_driver_name = "authenc-hmac-sha384-" 2186 "ecb-cipher_null-caam", 2187 .cra_blocksize = NULL_BLOCK_SIZE, 2188 }, 2189 .setkey = aead_setkey, 2190 .setauthsize = aead_setauthsize, 2191 .encrypt = aead_encrypt, 2192 .decrypt = aead_decrypt, 2193 .ivsize = NULL_IV_SIZE, 2194 .maxauthsize = SHA384_DIGEST_SIZE, 2195 }, 2196 .caam = { 2197 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2198 OP_ALG_AAI_HMAC_PRECOMP, 2199 }, 2200 }, 2201 { 2202 .aead = { 2203 .base = { 2204 .cra_name = "authenc(hmac(sha512)," 2205 "ecb(cipher_null))", 2206 .cra_driver_name = "authenc-hmac-sha512-" 2207 "ecb-cipher_null-caam", 2208 .cra_blocksize = NULL_BLOCK_SIZE, 2209 }, 2210 .setkey = aead_setkey, 2211 .setauthsize = aead_setauthsize, 2212 .encrypt = aead_encrypt, 2213 .decrypt = aead_decrypt, 2214 .ivsize = NULL_IV_SIZE, 2215 .maxauthsize = SHA512_DIGEST_SIZE, 2216 }, 2217 .caam = { 2218 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2219 OP_ALG_AAI_HMAC_PRECOMP, 2220 }, 2221 }, 2222 { 2223 .aead = { 2224 .base = { 2225 .cra_name = "authenc(hmac(md5),cbc(aes))", 2226 .cra_driver_name = "authenc-hmac-md5-" 2227 "cbc-aes-caam", 2228 .cra_blocksize = AES_BLOCK_SIZE, 2229 }, 2230 .setkey = aead_setkey, 2231 .setauthsize = aead_setauthsize, 2232 .encrypt = aead_encrypt, 2233 .decrypt = aead_decrypt, 2234 .ivsize = AES_BLOCK_SIZE, 2235 .maxauthsize = MD5_DIGEST_SIZE, 2236 }, 2237 .caam = { 2238 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2239 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2240 OP_ALG_AAI_HMAC_PRECOMP, 2241 }, 2242 }, 2243 { 2244 .aead = { 2245 .base = { 2246 .cra_name = "echainiv(authenc(hmac(md5)," 2247 "cbc(aes)))", 2248 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2249 "cbc-aes-caam", 2250 .cra_blocksize = AES_BLOCK_SIZE, 2251 }, 2252 .setkey = aead_setkey, 2253 .setauthsize = aead_setauthsize, 2254 .encrypt = aead_encrypt, 2255 .decrypt = aead_decrypt, 2256 .ivsize = AES_BLOCK_SIZE, 2257 .maxauthsize = MD5_DIGEST_SIZE, 2258 }, 2259 .caam = { 2260 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2261 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2262 OP_ALG_AAI_HMAC_PRECOMP, 2263 .geniv = true, 2264 }, 2265 }, 2266 { 2267 .aead = { 2268 .base = { 2269 .cra_name = "authenc(hmac(sha1),cbc(aes))", 2270 .cra_driver_name = "authenc-hmac-sha1-" 2271 "cbc-aes-caam", 2272 .cra_blocksize = AES_BLOCK_SIZE, 2273 }, 2274 .setkey = aead_setkey, 2275 .setauthsize = aead_setauthsize, 2276 .encrypt = aead_encrypt, 2277 .decrypt = aead_decrypt, 2278 .ivsize = AES_BLOCK_SIZE, 2279 .maxauthsize = SHA1_DIGEST_SIZE, 2280 }, 2281 .caam = { 2282 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2283 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2284 OP_ALG_AAI_HMAC_PRECOMP, 2285 }, 2286 }, 2287 { 2288 .aead = { 2289 .base = { 2290 .cra_name = "echainiv(authenc(hmac(sha1)," 2291 "cbc(aes)))", 2292 .cra_driver_name = "echainiv-authenc-" 2293 "hmac-sha1-cbc-aes-caam", 2294 .cra_blocksize = AES_BLOCK_SIZE, 2295 }, 2296 .setkey = aead_setkey, 2297 .setauthsize = aead_setauthsize, 2298 .encrypt = aead_encrypt, 2299 .decrypt = aead_decrypt, 2300 .ivsize = AES_BLOCK_SIZE, 2301 .maxauthsize = SHA1_DIGEST_SIZE, 2302 }, 2303 .caam = { 2304 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2305 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2306 OP_ALG_AAI_HMAC_PRECOMP, 2307 .geniv = true, 2308 }, 2309 }, 2310 { 2311 .aead = { 2312 .base = { 2313 .cra_name = "authenc(hmac(sha224),cbc(aes))", 2314 .cra_driver_name = "authenc-hmac-sha224-" 2315 "cbc-aes-caam", 2316 .cra_blocksize = AES_BLOCK_SIZE, 2317 }, 2318 .setkey = aead_setkey, 2319 .setauthsize = aead_setauthsize, 2320 .encrypt = aead_encrypt, 2321 .decrypt = aead_decrypt, 2322 .ivsize = AES_BLOCK_SIZE, 2323 .maxauthsize = SHA224_DIGEST_SIZE, 2324 }, 2325 .caam = { 2326 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2327 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2328 OP_ALG_AAI_HMAC_PRECOMP, 2329 }, 2330 }, 2331 { 2332 .aead = { 2333 .base = { 2334 .cra_name = "echainiv(authenc(hmac(sha224)," 2335 "cbc(aes)))", 2336 .cra_driver_name = "echainiv-authenc-" 2337 "hmac-sha224-cbc-aes-caam", 2338 .cra_blocksize = AES_BLOCK_SIZE, 2339 }, 2340 .setkey = aead_setkey, 2341 .setauthsize = aead_setauthsize, 2342 .encrypt = aead_encrypt, 2343 .decrypt = aead_decrypt, 2344 .ivsize = AES_BLOCK_SIZE, 2345 .maxauthsize = SHA224_DIGEST_SIZE, 2346 }, 2347 .caam = { 2348 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2349 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2350 OP_ALG_AAI_HMAC_PRECOMP, 2351 .geniv = true, 2352 }, 2353 }, 2354 { 2355 .aead = { 2356 .base = { 2357 .cra_name = "authenc(hmac(sha256),cbc(aes))", 2358 .cra_driver_name = "authenc-hmac-sha256-" 2359 "cbc-aes-caam", 2360 .cra_blocksize = AES_BLOCK_SIZE, 2361 }, 2362 .setkey = aead_setkey, 2363 .setauthsize = aead_setauthsize, 2364 .encrypt = aead_encrypt, 2365 .decrypt = aead_decrypt, 2366 .ivsize = AES_BLOCK_SIZE, 2367 .maxauthsize = SHA256_DIGEST_SIZE, 2368 }, 2369 .caam = { 2370 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2371 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2372 OP_ALG_AAI_HMAC_PRECOMP, 2373 }, 2374 }, 2375 { 2376 .aead = { 2377 .base = { 2378 .cra_name = "echainiv(authenc(hmac(sha256)," 2379 "cbc(aes)))", 2380 .cra_driver_name = "echainiv-authenc-" 2381 "hmac-sha256-cbc-aes-caam", 2382 .cra_blocksize = AES_BLOCK_SIZE, 2383 }, 2384 .setkey = aead_setkey, 2385 .setauthsize = aead_setauthsize, 2386 .encrypt = aead_encrypt, 2387 .decrypt = aead_decrypt, 2388 .ivsize = AES_BLOCK_SIZE, 2389 .maxauthsize = SHA256_DIGEST_SIZE, 2390 }, 2391 .caam = { 2392 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2393 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2394 OP_ALG_AAI_HMAC_PRECOMP, 2395 .geniv = true, 2396 }, 2397 }, 2398 { 2399 .aead = { 2400 .base = { 2401 .cra_name = "authenc(hmac(sha384),cbc(aes))", 2402 .cra_driver_name = "authenc-hmac-sha384-" 2403 "cbc-aes-caam", 2404 .cra_blocksize = AES_BLOCK_SIZE, 2405 }, 2406 .setkey = aead_setkey, 2407 .setauthsize = aead_setauthsize, 2408 .encrypt = aead_encrypt, 2409 .decrypt = aead_decrypt, 2410 .ivsize = AES_BLOCK_SIZE, 2411 .maxauthsize = SHA384_DIGEST_SIZE, 2412 }, 2413 .caam = { 2414 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2415 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2416 OP_ALG_AAI_HMAC_PRECOMP, 2417 }, 2418 }, 2419 { 2420 .aead = { 2421 .base = { 2422 .cra_name = "echainiv(authenc(hmac(sha384)," 2423 "cbc(aes)))", 2424 .cra_driver_name = "echainiv-authenc-" 2425 "hmac-sha384-cbc-aes-caam", 2426 .cra_blocksize = AES_BLOCK_SIZE, 2427 }, 2428 .setkey = aead_setkey, 2429 .setauthsize = aead_setauthsize, 2430 .encrypt = aead_encrypt, 2431 .decrypt = aead_decrypt, 2432 .ivsize = AES_BLOCK_SIZE, 2433 .maxauthsize = SHA384_DIGEST_SIZE, 2434 }, 2435 .caam = { 2436 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2437 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2438 OP_ALG_AAI_HMAC_PRECOMP, 2439 .geniv = true, 2440 }, 2441 }, 2442 { 2443 .aead = { 2444 .base = { 2445 .cra_name = "authenc(hmac(sha512),cbc(aes))", 2446 .cra_driver_name = "authenc-hmac-sha512-" 2447 "cbc-aes-caam", 2448 .cra_blocksize = AES_BLOCK_SIZE, 2449 }, 2450 .setkey = aead_setkey, 2451 .setauthsize = aead_setauthsize, 2452 .encrypt = aead_encrypt, 2453 .decrypt = aead_decrypt, 2454 .ivsize = AES_BLOCK_SIZE, 2455 .maxauthsize = SHA512_DIGEST_SIZE, 2456 }, 2457 .caam = { 2458 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2459 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2460 OP_ALG_AAI_HMAC_PRECOMP, 2461 }, 2462 }, 2463 { 2464 .aead = { 2465 .base = { 2466 .cra_name = "echainiv(authenc(hmac(sha512)," 2467 "cbc(aes)))", 2468 .cra_driver_name = "echainiv-authenc-" 2469 "hmac-sha512-cbc-aes-caam", 2470 .cra_blocksize = AES_BLOCK_SIZE, 2471 }, 2472 .setkey = aead_setkey, 2473 .setauthsize = aead_setauthsize, 2474 .encrypt = aead_encrypt, 2475 .decrypt = aead_decrypt, 2476 .ivsize = AES_BLOCK_SIZE, 2477 .maxauthsize = SHA512_DIGEST_SIZE, 2478 }, 2479 .caam = { 2480 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2481 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2482 OP_ALG_AAI_HMAC_PRECOMP, 2483 .geniv = true, 2484 }, 2485 }, 2486 { 2487 .aead = { 2488 .base = { 2489 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 2490 .cra_driver_name = "authenc-hmac-md5-" 2491 "cbc-des3_ede-caam", 2492 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2493 }, 2494 .setkey = des3_aead_setkey, 2495 .setauthsize = aead_setauthsize, 2496 .encrypt = aead_encrypt, 2497 .decrypt = aead_decrypt, 2498 .ivsize = DES3_EDE_BLOCK_SIZE, 2499 .maxauthsize = MD5_DIGEST_SIZE, 2500 }, 2501 .caam = { 2502 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2503 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2504 OP_ALG_AAI_HMAC_PRECOMP, 2505 } 2506 }, 2507 { 2508 .aead = { 2509 .base = { 2510 .cra_name = "echainiv(authenc(hmac(md5)," 2511 "cbc(des3_ede)))", 2512 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2513 "cbc-des3_ede-caam", 2514 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2515 }, 2516 .setkey = des3_aead_setkey, 2517 .setauthsize = aead_setauthsize, 2518 .encrypt = aead_encrypt, 2519 .decrypt = aead_decrypt, 2520 .ivsize = DES3_EDE_BLOCK_SIZE, 2521 .maxauthsize = MD5_DIGEST_SIZE, 2522 }, 2523 .caam = { 2524 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2525 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2526 OP_ALG_AAI_HMAC_PRECOMP, 2527 .geniv = true, 2528 } 2529 }, 2530 { 2531 .aead = { 2532 .base = { 2533 .cra_name = "authenc(hmac(sha1)," 2534 "cbc(des3_ede))", 2535 .cra_driver_name = "authenc-hmac-sha1-" 2536 "cbc-des3_ede-caam", 2537 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2538 }, 2539 .setkey = des3_aead_setkey, 2540 .setauthsize = aead_setauthsize, 2541 .encrypt = aead_encrypt, 2542 .decrypt = aead_decrypt, 2543 .ivsize = DES3_EDE_BLOCK_SIZE, 2544 .maxauthsize = SHA1_DIGEST_SIZE, 2545 }, 2546 .caam = { 2547 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2548 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2549 OP_ALG_AAI_HMAC_PRECOMP, 2550 }, 2551 }, 2552 { 2553 .aead = { 2554 .base = { 2555 .cra_name = "echainiv(authenc(hmac(sha1)," 2556 "cbc(des3_ede)))", 2557 .cra_driver_name = "echainiv-authenc-" 2558 "hmac-sha1-" 2559 "cbc-des3_ede-caam", 2560 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2561 }, 2562 .setkey = des3_aead_setkey, 2563 .setauthsize = aead_setauthsize, 2564 .encrypt = aead_encrypt, 2565 .decrypt = aead_decrypt, 2566 .ivsize = DES3_EDE_BLOCK_SIZE, 2567 .maxauthsize = SHA1_DIGEST_SIZE, 2568 }, 2569 .caam = { 2570 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2571 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2572 OP_ALG_AAI_HMAC_PRECOMP, 2573 .geniv = true, 2574 }, 2575 }, 2576 { 2577 .aead = { 2578 .base = { 2579 .cra_name = "authenc(hmac(sha224)," 2580 "cbc(des3_ede))", 2581 .cra_driver_name = "authenc-hmac-sha224-" 2582 "cbc-des3_ede-caam", 2583 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2584 }, 2585 .setkey = des3_aead_setkey, 2586 .setauthsize = aead_setauthsize, 2587 .encrypt = aead_encrypt, 2588 .decrypt = aead_decrypt, 2589 .ivsize = DES3_EDE_BLOCK_SIZE, 2590 .maxauthsize = SHA224_DIGEST_SIZE, 2591 }, 2592 .caam = { 2593 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2594 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2595 OP_ALG_AAI_HMAC_PRECOMP, 2596 }, 2597 }, 2598 { 2599 .aead = { 2600 .base = { 2601 .cra_name = "echainiv(authenc(hmac(sha224)," 2602 "cbc(des3_ede)))", 2603 .cra_driver_name = "echainiv-authenc-" 2604 "hmac-sha224-" 2605 "cbc-des3_ede-caam", 2606 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2607 }, 2608 .setkey = des3_aead_setkey, 2609 .setauthsize = aead_setauthsize, 2610 .encrypt = aead_encrypt, 2611 .decrypt = aead_decrypt, 2612 .ivsize = DES3_EDE_BLOCK_SIZE, 2613 .maxauthsize = SHA224_DIGEST_SIZE, 2614 }, 2615 .caam = { 2616 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2617 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2618 OP_ALG_AAI_HMAC_PRECOMP, 2619 .geniv = true, 2620 }, 2621 }, 2622 { 2623 .aead = { 2624 .base = { 2625 .cra_name = "authenc(hmac(sha256)," 2626 "cbc(des3_ede))", 2627 .cra_driver_name = "authenc-hmac-sha256-" 2628 "cbc-des3_ede-caam", 2629 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2630 }, 2631 .setkey = des3_aead_setkey, 2632 .setauthsize = aead_setauthsize, 2633 .encrypt = aead_encrypt, 2634 .decrypt = aead_decrypt, 2635 .ivsize = DES3_EDE_BLOCK_SIZE, 2636 .maxauthsize = SHA256_DIGEST_SIZE, 2637 }, 2638 .caam = { 2639 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2640 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2641 OP_ALG_AAI_HMAC_PRECOMP, 2642 }, 2643 }, 2644 { 2645 .aead = { 2646 .base = { 2647 .cra_name = "echainiv(authenc(hmac(sha256)," 2648 "cbc(des3_ede)))", 2649 .cra_driver_name = "echainiv-authenc-" 2650 "hmac-sha256-" 2651 "cbc-des3_ede-caam", 2652 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2653 }, 2654 .setkey = des3_aead_setkey, 2655 .setauthsize = aead_setauthsize, 2656 .encrypt = aead_encrypt, 2657 .decrypt = aead_decrypt, 2658 .ivsize = DES3_EDE_BLOCK_SIZE, 2659 .maxauthsize = SHA256_DIGEST_SIZE, 2660 }, 2661 .caam = { 2662 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2663 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2664 OP_ALG_AAI_HMAC_PRECOMP, 2665 .geniv = true, 2666 }, 2667 }, 2668 { 2669 .aead = { 2670 .base = { 2671 .cra_name = "authenc(hmac(sha384)," 2672 "cbc(des3_ede))", 2673 .cra_driver_name = "authenc-hmac-sha384-" 2674 "cbc-des3_ede-caam", 2675 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2676 }, 2677 .setkey = des3_aead_setkey, 2678 .setauthsize = aead_setauthsize, 2679 .encrypt = aead_encrypt, 2680 .decrypt = aead_decrypt, 2681 .ivsize = DES3_EDE_BLOCK_SIZE, 2682 .maxauthsize = SHA384_DIGEST_SIZE, 2683 }, 2684 .caam = { 2685 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2686 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2687 OP_ALG_AAI_HMAC_PRECOMP, 2688 }, 2689 }, 2690 { 2691 .aead = { 2692 .base = { 2693 .cra_name = "echainiv(authenc(hmac(sha384)," 2694 "cbc(des3_ede)))", 2695 .cra_driver_name = "echainiv-authenc-" 2696 "hmac-sha384-" 2697 "cbc-des3_ede-caam", 2698 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2699 }, 2700 .setkey = des3_aead_setkey, 2701 .setauthsize = aead_setauthsize, 2702 .encrypt = aead_encrypt, 2703 .decrypt = aead_decrypt, 2704 .ivsize = DES3_EDE_BLOCK_SIZE, 2705 .maxauthsize = SHA384_DIGEST_SIZE, 2706 }, 2707 .caam = { 2708 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2709 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2710 OP_ALG_AAI_HMAC_PRECOMP, 2711 .geniv = true, 2712 }, 2713 }, 2714 { 2715 .aead = { 2716 .base = { 2717 .cra_name = "authenc(hmac(sha512)," 2718 "cbc(des3_ede))", 2719 .cra_driver_name = "authenc-hmac-sha512-" 2720 "cbc-des3_ede-caam", 2721 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2722 }, 2723 .setkey = des3_aead_setkey, 2724 .setauthsize = aead_setauthsize, 2725 .encrypt = aead_encrypt, 2726 .decrypt = aead_decrypt, 2727 .ivsize = DES3_EDE_BLOCK_SIZE, 2728 .maxauthsize = SHA512_DIGEST_SIZE, 2729 }, 2730 .caam = { 2731 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2732 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2733 OP_ALG_AAI_HMAC_PRECOMP, 2734 }, 2735 }, 2736 { 2737 .aead = { 2738 .base = { 2739 .cra_name = "echainiv(authenc(hmac(sha512)," 2740 "cbc(des3_ede)))", 2741 .cra_driver_name = "echainiv-authenc-" 2742 "hmac-sha512-" 2743 "cbc-des3_ede-caam", 2744 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2745 }, 2746 .setkey = des3_aead_setkey, 2747 .setauthsize = aead_setauthsize, 2748 .encrypt = aead_encrypt, 2749 .decrypt = aead_decrypt, 2750 .ivsize = DES3_EDE_BLOCK_SIZE, 2751 .maxauthsize = SHA512_DIGEST_SIZE, 2752 }, 2753 .caam = { 2754 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2755 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2756 OP_ALG_AAI_HMAC_PRECOMP, 2757 .geniv = true, 2758 }, 2759 }, 2760 { 2761 .aead = { 2762 .base = { 2763 .cra_name = "authenc(hmac(md5),cbc(des))", 2764 .cra_driver_name = "authenc-hmac-md5-" 2765 "cbc-des-caam", 2766 .cra_blocksize = DES_BLOCK_SIZE, 2767 }, 2768 .setkey = aead_setkey, 2769 .setauthsize = aead_setauthsize, 2770 .encrypt = aead_encrypt, 2771 .decrypt = aead_decrypt, 2772 .ivsize = DES_BLOCK_SIZE, 2773 .maxauthsize = MD5_DIGEST_SIZE, 2774 }, 2775 .caam = { 2776 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2777 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2778 OP_ALG_AAI_HMAC_PRECOMP, 2779 }, 2780 }, 2781 { 2782 .aead = { 2783 .base = { 2784 .cra_name = "echainiv(authenc(hmac(md5)," 2785 "cbc(des)))", 2786 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2787 "cbc-des-caam", 2788 .cra_blocksize = DES_BLOCK_SIZE, 2789 }, 2790 .setkey = aead_setkey, 2791 .setauthsize = aead_setauthsize, 2792 .encrypt = aead_encrypt, 2793 .decrypt = aead_decrypt, 2794 .ivsize = DES_BLOCK_SIZE, 2795 .maxauthsize = MD5_DIGEST_SIZE, 2796 }, 2797 .caam = { 2798 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2799 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2800 OP_ALG_AAI_HMAC_PRECOMP, 2801 .geniv = true, 2802 }, 2803 }, 2804 { 2805 .aead = { 2806 .base = { 2807 .cra_name = "authenc(hmac(sha1),cbc(des))", 2808 .cra_driver_name = "authenc-hmac-sha1-" 2809 "cbc-des-caam", 2810 .cra_blocksize = DES_BLOCK_SIZE, 2811 }, 2812 .setkey = aead_setkey, 2813 .setauthsize = aead_setauthsize, 2814 .encrypt = aead_encrypt, 2815 .decrypt = aead_decrypt, 2816 .ivsize = DES_BLOCK_SIZE, 2817 .maxauthsize = SHA1_DIGEST_SIZE, 2818 }, 2819 .caam = { 2820 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2821 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2822 OP_ALG_AAI_HMAC_PRECOMP, 2823 }, 2824 }, 2825 { 2826 .aead = { 2827 .base = { 2828 .cra_name = "echainiv(authenc(hmac(sha1)," 2829 "cbc(des)))", 2830 .cra_driver_name = "echainiv-authenc-" 2831 "hmac-sha1-cbc-des-caam", 2832 .cra_blocksize = DES_BLOCK_SIZE, 2833 }, 2834 .setkey = aead_setkey, 2835 .setauthsize = aead_setauthsize, 2836 .encrypt = aead_encrypt, 2837 .decrypt = aead_decrypt, 2838 .ivsize = DES_BLOCK_SIZE, 2839 .maxauthsize = SHA1_DIGEST_SIZE, 2840 }, 2841 .caam = { 2842 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2843 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2844 OP_ALG_AAI_HMAC_PRECOMP, 2845 .geniv = true, 2846 }, 2847 }, 2848 { 2849 .aead = { 2850 .base = { 2851 .cra_name = "authenc(hmac(sha224),cbc(des))", 2852 .cra_driver_name = "authenc-hmac-sha224-" 2853 "cbc-des-caam", 2854 .cra_blocksize = DES_BLOCK_SIZE, 2855 }, 2856 .setkey = aead_setkey, 2857 .setauthsize = aead_setauthsize, 2858 .encrypt = aead_encrypt, 2859 .decrypt = aead_decrypt, 2860 .ivsize = DES_BLOCK_SIZE, 2861 .maxauthsize = SHA224_DIGEST_SIZE, 2862 }, 2863 .caam = { 2864 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2865 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2866 OP_ALG_AAI_HMAC_PRECOMP, 2867 }, 2868 }, 2869 { 2870 .aead = { 2871 .base = { 2872 .cra_name = "echainiv(authenc(hmac(sha224)," 2873 "cbc(des)))", 2874 .cra_driver_name = "echainiv-authenc-" 2875 "hmac-sha224-cbc-des-caam", 2876 .cra_blocksize = DES_BLOCK_SIZE, 2877 }, 2878 .setkey = aead_setkey, 2879 .setauthsize = aead_setauthsize, 2880 .encrypt = aead_encrypt, 2881 .decrypt = aead_decrypt, 2882 .ivsize = DES_BLOCK_SIZE, 2883 .maxauthsize = SHA224_DIGEST_SIZE, 2884 }, 2885 .caam = { 2886 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2887 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2888 OP_ALG_AAI_HMAC_PRECOMP, 2889 .geniv = true, 2890 }, 2891 }, 2892 { 2893 .aead = { 2894 .base = { 2895 .cra_name = "authenc(hmac(sha256),cbc(des))", 2896 .cra_driver_name = "authenc-hmac-sha256-" 2897 "cbc-des-caam", 2898 .cra_blocksize = DES_BLOCK_SIZE, 2899 }, 2900 .setkey = aead_setkey, 2901 .setauthsize = aead_setauthsize, 2902 .encrypt = aead_encrypt, 2903 .decrypt = aead_decrypt, 2904 .ivsize = DES_BLOCK_SIZE, 2905 .maxauthsize = SHA256_DIGEST_SIZE, 2906 }, 2907 .caam = { 2908 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2909 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2910 OP_ALG_AAI_HMAC_PRECOMP, 2911 }, 2912 }, 2913 { 2914 .aead = { 2915 .base = { 2916 .cra_name = "echainiv(authenc(hmac(sha256)," 2917 "cbc(des)))", 2918 .cra_driver_name = "echainiv-authenc-" 2919 "hmac-sha256-cbc-des-caam", 2920 .cra_blocksize = DES_BLOCK_SIZE, 2921 }, 2922 .setkey = aead_setkey, 2923 .setauthsize = aead_setauthsize, 2924 .encrypt = aead_encrypt, 2925 .decrypt = aead_decrypt, 2926 .ivsize = DES_BLOCK_SIZE, 2927 .maxauthsize = SHA256_DIGEST_SIZE, 2928 }, 2929 .caam = { 2930 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2931 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2932 OP_ALG_AAI_HMAC_PRECOMP, 2933 .geniv = true, 2934 }, 2935 }, 2936 { 2937 .aead = { 2938 .base = { 2939 .cra_name = "authenc(hmac(sha384),cbc(des))", 2940 .cra_driver_name = "authenc-hmac-sha384-" 2941 "cbc-des-caam", 2942 .cra_blocksize = DES_BLOCK_SIZE, 2943 }, 2944 .setkey = aead_setkey, 2945 .setauthsize = aead_setauthsize, 2946 .encrypt = aead_encrypt, 2947 .decrypt = aead_decrypt, 2948 .ivsize = DES_BLOCK_SIZE, 2949 .maxauthsize = SHA384_DIGEST_SIZE, 2950 }, 2951 .caam = { 2952 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2953 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2954 OP_ALG_AAI_HMAC_PRECOMP, 2955 }, 2956 }, 2957 { 2958 .aead = { 2959 .base = { 2960 .cra_name = "echainiv(authenc(hmac(sha384)," 2961 "cbc(des)))", 2962 .cra_driver_name = "echainiv-authenc-" 2963 "hmac-sha384-cbc-des-caam", 2964 .cra_blocksize = DES_BLOCK_SIZE, 2965 }, 2966 .setkey = aead_setkey, 2967 .setauthsize = aead_setauthsize, 2968 .encrypt = aead_encrypt, 2969 .decrypt = aead_decrypt, 2970 .ivsize = DES_BLOCK_SIZE, 2971 .maxauthsize = SHA384_DIGEST_SIZE, 2972 }, 2973 .caam = { 2974 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2975 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2976 OP_ALG_AAI_HMAC_PRECOMP, 2977 .geniv = true, 2978 }, 2979 }, 2980 { 2981 .aead = { 2982 .base = { 2983 .cra_name = "authenc(hmac(sha512),cbc(des))", 2984 .cra_driver_name = "authenc-hmac-sha512-" 2985 "cbc-des-caam", 2986 .cra_blocksize = DES_BLOCK_SIZE, 2987 }, 2988 .setkey = aead_setkey, 2989 .setauthsize = aead_setauthsize, 2990 .encrypt = aead_encrypt, 2991 .decrypt = aead_decrypt, 2992 .ivsize = DES_BLOCK_SIZE, 2993 .maxauthsize = SHA512_DIGEST_SIZE, 2994 }, 2995 .caam = { 2996 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2997 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2998 OP_ALG_AAI_HMAC_PRECOMP, 2999 }, 3000 }, 3001 { 3002 .aead = { 3003 .base = { 3004 .cra_name = "echainiv(authenc(hmac(sha512)," 3005 "cbc(des)))", 3006 .cra_driver_name = "echainiv-authenc-" 3007 "hmac-sha512-cbc-des-caam", 3008 .cra_blocksize = DES_BLOCK_SIZE, 3009 }, 3010 .setkey = aead_setkey, 3011 .setauthsize = aead_setauthsize, 3012 .encrypt = aead_encrypt, 3013 .decrypt = aead_decrypt, 3014 .ivsize = DES_BLOCK_SIZE, 3015 .maxauthsize = SHA512_DIGEST_SIZE, 3016 }, 3017 .caam = { 3018 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 3019 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3020 OP_ALG_AAI_HMAC_PRECOMP, 3021 .geniv = true, 3022 }, 3023 }, 3024 { 3025 .aead = { 3026 .base = { 3027 .cra_name = "authenc(hmac(md5)," 3028 "rfc3686(ctr(aes)))", 3029 .cra_driver_name = "authenc-hmac-md5-" 3030 "rfc3686-ctr-aes-caam", 3031 .cra_blocksize = 1, 3032 }, 3033 .setkey = aead_setkey, 3034 .setauthsize = aead_setauthsize, 3035 .encrypt = aead_encrypt, 3036 .decrypt = aead_decrypt, 3037 .ivsize = CTR_RFC3686_IV_SIZE, 3038 .maxauthsize = MD5_DIGEST_SIZE, 3039 }, 3040 .caam = { 3041 .class1_alg_type = OP_ALG_ALGSEL_AES | 3042 OP_ALG_AAI_CTR_MOD128, 3043 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3044 OP_ALG_AAI_HMAC_PRECOMP, 3045 .rfc3686 = true, 3046 }, 3047 }, 3048 { 3049 .aead = { 3050 .base = { 3051 .cra_name = "seqiv(authenc(" 3052 "hmac(md5),rfc3686(ctr(aes))))", 3053 .cra_driver_name = "seqiv-authenc-hmac-md5-" 3054 "rfc3686-ctr-aes-caam", 3055 .cra_blocksize = 1, 3056 }, 3057 .setkey = aead_setkey, 3058 .setauthsize = aead_setauthsize, 3059 .encrypt = aead_encrypt, 3060 .decrypt = aead_decrypt, 3061 .ivsize = CTR_RFC3686_IV_SIZE, 3062 .maxauthsize = MD5_DIGEST_SIZE, 3063 }, 3064 .caam = { 3065 .class1_alg_type = OP_ALG_ALGSEL_AES | 3066 OP_ALG_AAI_CTR_MOD128, 3067 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3068 OP_ALG_AAI_HMAC_PRECOMP, 3069 .rfc3686 = true, 3070 .geniv = true, 3071 }, 3072 }, 3073 { 3074 .aead = { 3075 .base = { 3076 .cra_name = "authenc(hmac(sha1)," 3077 "rfc3686(ctr(aes)))", 3078 .cra_driver_name = "authenc-hmac-sha1-" 3079 "rfc3686-ctr-aes-caam", 3080 .cra_blocksize = 1, 3081 }, 3082 .setkey = aead_setkey, 3083 .setauthsize = aead_setauthsize, 3084 .encrypt = aead_encrypt, 3085 .decrypt = aead_decrypt, 3086 .ivsize = CTR_RFC3686_IV_SIZE, 3087 .maxauthsize = SHA1_DIGEST_SIZE, 3088 }, 3089 .caam = { 3090 .class1_alg_type = OP_ALG_ALGSEL_AES | 3091 OP_ALG_AAI_CTR_MOD128, 3092 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3093 OP_ALG_AAI_HMAC_PRECOMP, 3094 .rfc3686 = true, 3095 }, 3096 }, 3097 { 3098 .aead = { 3099 .base = { 3100 .cra_name = "seqiv(authenc(" 3101 "hmac(sha1),rfc3686(ctr(aes))))", 3102 .cra_driver_name = "seqiv-authenc-hmac-sha1-" 3103 "rfc3686-ctr-aes-caam", 3104 .cra_blocksize = 1, 3105 }, 3106 .setkey = aead_setkey, 3107 .setauthsize = aead_setauthsize, 3108 .encrypt = aead_encrypt, 3109 .decrypt = aead_decrypt, 3110 .ivsize = CTR_RFC3686_IV_SIZE, 3111 .maxauthsize = SHA1_DIGEST_SIZE, 3112 }, 3113 .caam = { 3114 .class1_alg_type = OP_ALG_ALGSEL_AES | 3115 OP_ALG_AAI_CTR_MOD128, 3116 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3117 OP_ALG_AAI_HMAC_PRECOMP, 3118 .rfc3686 = true, 3119 .geniv = true, 3120 }, 3121 }, 3122 { 3123 .aead = { 3124 .base = { 3125 .cra_name = "authenc(hmac(sha224)," 3126 "rfc3686(ctr(aes)))", 3127 .cra_driver_name = "authenc-hmac-sha224-" 3128 "rfc3686-ctr-aes-caam", 3129 .cra_blocksize = 1, 3130 }, 3131 .setkey = aead_setkey, 3132 .setauthsize = aead_setauthsize, 3133 .encrypt = aead_encrypt, 3134 .decrypt = aead_decrypt, 3135 .ivsize = CTR_RFC3686_IV_SIZE, 3136 .maxauthsize = SHA224_DIGEST_SIZE, 3137 }, 3138 .caam = { 3139 .class1_alg_type = OP_ALG_ALGSEL_AES | 3140 OP_ALG_AAI_CTR_MOD128, 3141 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3142 OP_ALG_AAI_HMAC_PRECOMP, 3143 .rfc3686 = true, 3144 }, 3145 }, 3146 { 3147 .aead = { 3148 .base = { 3149 .cra_name = "seqiv(authenc(" 3150 "hmac(sha224),rfc3686(ctr(aes))))", 3151 .cra_driver_name = "seqiv-authenc-hmac-sha224-" 3152 "rfc3686-ctr-aes-caam", 3153 .cra_blocksize = 1, 3154 }, 3155 .setkey = aead_setkey, 3156 .setauthsize = aead_setauthsize, 3157 .encrypt = aead_encrypt, 3158 .decrypt = aead_decrypt, 3159 .ivsize = CTR_RFC3686_IV_SIZE, 3160 .maxauthsize = SHA224_DIGEST_SIZE, 3161 }, 3162 .caam = { 3163 .class1_alg_type = OP_ALG_ALGSEL_AES | 3164 OP_ALG_AAI_CTR_MOD128, 3165 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3166 OP_ALG_AAI_HMAC_PRECOMP, 3167 .rfc3686 = true, 3168 .geniv = true, 3169 }, 3170 }, 3171 { 3172 .aead = { 3173 .base = { 3174 .cra_name = "authenc(hmac(sha256)," 3175 "rfc3686(ctr(aes)))", 3176 .cra_driver_name = "authenc-hmac-sha256-" 3177 "rfc3686-ctr-aes-caam", 3178 .cra_blocksize = 1, 3179 }, 3180 .setkey = aead_setkey, 3181 .setauthsize = aead_setauthsize, 3182 .encrypt = aead_encrypt, 3183 .decrypt = aead_decrypt, 3184 .ivsize = CTR_RFC3686_IV_SIZE, 3185 .maxauthsize = SHA256_DIGEST_SIZE, 3186 }, 3187 .caam = { 3188 .class1_alg_type = OP_ALG_ALGSEL_AES | 3189 OP_ALG_AAI_CTR_MOD128, 3190 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3191 OP_ALG_AAI_HMAC_PRECOMP, 3192 .rfc3686 = true, 3193 }, 3194 }, 3195 { 3196 .aead = { 3197 .base = { 3198 .cra_name = "seqiv(authenc(hmac(sha256)," 3199 "rfc3686(ctr(aes))))", 3200 .cra_driver_name = "seqiv-authenc-hmac-sha256-" 3201 "rfc3686-ctr-aes-caam", 3202 .cra_blocksize = 1, 3203 }, 3204 .setkey = aead_setkey, 3205 .setauthsize = aead_setauthsize, 3206 .encrypt = aead_encrypt, 3207 .decrypt = aead_decrypt, 3208 .ivsize = CTR_RFC3686_IV_SIZE, 3209 .maxauthsize = SHA256_DIGEST_SIZE, 3210 }, 3211 .caam = { 3212 .class1_alg_type = OP_ALG_ALGSEL_AES | 3213 OP_ALG_AAI_CTR_MOD128, 3214 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3215 OP_ALG_AAI_HMAC_PRECOMP, 3216 .rfc3686 = true, 3217 .geniv = true, 3218 }, 3219 }, 3220 { 3221 .aead = { 3222 .base = { 3223 .cra_name = "authenc(hmac(sha384)," 3224 "rfc3686(ctr(aes)))", 3225 .cra_driver_name = "authenc-hmac-sha384-" 3226 "rfc3686-ctr-aes-caam", 3227 .cra_blocksize = 1, 3228 }, 3229 .setkey = aead_setkey, 3230 .setauthsize = aead_setauthsize, 3231 .encrypt = aead_encrypt, 3232 .decrypt = aead_decrypt, 3233 .ivsize = CTR_RFC3686_IV_SIZE, 3234 .maxauthsize = SHA384_DIGEST_SIZE, 3235 }, 3236 .caam = { 3237 .class1_alg_type = OP_ALG_ALGSEL_AES | 3238 OP_ALG_AAI_CTR_MOD128, 3239 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3240 OP_ALG_AAI_HMAC_PRECOMP, 3241 .rfc3686 = true, 3242 }, 3243 }, 3244 { 3245 .aead = { 3246 .base = { 3247 .cra_name = "seqiv(authenc(hmac(sha384)," 3248 "rfc3686(ctr(aes))))", 3249 .cra_driver_name = "seqiv-authenc-hmac-sha384-" 3250 "rfc3686-ctr-aes-caam", 3251 .cra_blocksize = 1, 3252 }, 3253 .setkey = aead_setkey, 3254 .setauthsize = aead_setauthsize, 3255 .encrypt = aead_encrypt, 3256 .decrypt = aead_decrypt, 3257 .ivsize = CTR_RFC3686_IV_SIZE, 3258 .maxauthsize = SHA384_DIGEST_SIZE, 3259 }, 3260 .caam = { 3261 .class1_alg_type = OP_ALG_ALGSEL_AES | 3262 OP_ALG_AAI_CTR_MOD128, 3263 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3264 OP_ALG_AAI_HMAC_PRECOMP, 3265 .rfc3686 = true, 3266 .geniv = true, 3267 }, 3268 }, 3269 { 3270 .aead = { 3271 .base = { 3272 .cra_name = "authenc(hmac(sha512)," 3273 "rfc3686(ctr(aes)))", 3274 .cra_driver_name = "authenc-hmac-sha512-" 3275 "rfc3686-ctr-aes-caam", 3276 .cra_blocksize = 1, 3277 }, 3278 .setkey = aead_setkey, 3279 .setauthsize = aead_setauthsize, 3280 .encrypt = aead_encrypt, 3281 .decrypt = aead_decrypt, 3282 .ivsize = CTR_RFC3686_IV_SIZE, 3283 .maxauthsize = SHA512_DIGEST_SIZE, 3284 }, 3285 .caam = { 3286 .class1_alg_type = OP_ALG_ALGSEL_AES | 3287 OP_ALG_AAI_CTR_MOD128, 3288 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3289 OP_ALG_AAI_HMAC_PRECOMP, 3290 .rfc3686 = true, 3291 }, 3292 }, 3293 { 3294 .aead = { 3295 .base = { 3296 .cra_name = "seqiv(authenc(hmac(sha512)," 3297 "rfc3686(ctr(aes))))", 3298 .cra_driver_name = "seqiv-authenc-hmac-sha512-" 3299 "rfc3686-ctr-aes-caam", 3300 .cra_blocksize = 1, 3301 }, 3302 .setkey = aead_setkey, 3303 .setauthsize = aead_setauthsize, 3304 .encrypt = aead_encrypt, 3305 .decrypt = aead_decrypt, 3306 .ivsize = CTR_RFC3686_IV_SIZE, 3307 .maxauthsize = SHA512_DIGEST_SIZE, 3308 }, 3309 .caam = { 3310 .class1_alg_type = OP_ALG_ALGSEL_AES | 3311 OP_ALG_AAI_CTR_MOD128, 3312 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3313 OP_ALG_AAI_HMAC_PRECOMP, 3314 .rfc3686 = true, 3315 .geniv = true, 3316 }, 3317 }, 3318 { 3319 .aead = { 3320 .base = { 3321 .cra_name = "rfc7539(chacha20,poly1305)", 3322 .cra_driver_name = "rfc7539-chacha20-poly1305-" 3323 "caam", 3324 .cra_blocksize = 1, 3325 }, 3326 .setkey = chachapoly_setkey, 3327 .setauthsize = chachapoly_setauthsize, 3328 .encrypt = chachapoly_encrypt, 3329 .decrypt = chachapoly_decrypt, 3330 .ivsize = CHACHAPOLY_IV_SIZE, 3331 .maxauthsize = POLY1305_DIGEST_SIZE, 3332 }, 3333 .caam = { 3334 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3335 OP_ALG_AAI_AEAD, 3336 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3337 OP_ALG_AAI_AEAD, 3338 .nodkp = true, 3339 }, 3340 }, 3341 { 3342 .aead = { 3343 .base = { 3344 .cra_name = "rfc7539esp(chacha20,poly1305)", 3345 .cra_driver_name = "rfc7539esp-chacha20-" 3346 "poly1305-caam", 3347 .cra_blocksize = 1, 3348 }, 3349 .setkey = chachapoly_setkey, 3350 .setauthsize = chachapoly_setauthsize, 3351 .encrypt = chachapoly_encrypt, 3352 .decrypt = chachapoly_decrypt, 3353 .ivsize = 8, 3354 .maxauthsize = POLY1305_DIGEST_SIZE, 3355 }, 3356 .caam = { 3357 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3358 OP_ALG_AAI_AEAD, 3359 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3360 OP_ALG_AAI_AEAD, 3361 .nodkp = true, 3362 }, 3363 }, 3364 }; 3365 3366 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam, 3367 bool uses_dkp) 3368 { 3369 dma_addr_t dma_addr; 3370 struct caam_drv_private *priv; 3371 const size_t sh_desc_enc_offset = offsetof(struct caam_ctx, 3372 sh_desc_enc); 3373 3374 ctx->jrdev = caam_jr_alloc(); 3375 if (IS_ERR(ctx->jrdev)) { 3376 pr_err("Job Ring Device allocation for transform failed\n"); 3377 return PTR_ERR(ctx->jrdev); 3378 } 3379 3380 priv = dev_get_drvdata(ctx->jrdev->parent); 3381 if (priv->era >= 6 && uses_dkp) 3382 ctx->dir = DMA_BIDIRECTIONAL; 3383 else 3384 ctx->dir = DMA_TO_DEVICE; 3385 3386 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3387 offsetof(struct caam_ctx, 3388 sh_desc_enc_dma) - 3389 sh_desc_enc_offset, 3390 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3391 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3392 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3393 caam_jr_free(ctx->jrdev); 3394 return -ENOMEM; 3395 } 3396 3397 ctx->sh_desc_enc_dma = dma_addr; 3398 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, 3399 sh_desc_dec) - 3400 sh_desc_enc_offset; 3401 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) - 3402 sh_desc_enc_offset; 3403 3404 /* copy descriptor header template value */ 3405 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3406 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3407 3408 return 0; 3409 } 3410 3411 static int caam_cra_init(struct crypto_skcipher *tfm) 3412 { 3413 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 3414 struct caam_skcipher_alg *caam_alg = 3415 container_of(alg, typeof(*caam_alg), skcipher); 3416 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm); 3417 u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3418 int ret = 0; 3419 3420 ctx->enginectx.op.do_one_request = skcipher_do_one_req; 3421 3422 if (alg_aai == OP_ALG_AAI_XTS) { 3423 const char *tfm_name = crypto_tfm_alg_name(&tfm->base); 3424 struct crypto_skcipher *fallback; 3425 3426 fallback = crypto_alloc_skcipher(tfm_name, 0, 3427 CRYPTO_ALG_NEED_FALLBACK); 3428 if (IS_ERR(fallback)) { 3429 pr_err("Failed to allocate %s fallback: %ld\n", 3430 tfm_name, PTR_ERR(fallback)); 3431 return PTR_ERR(fallback); 3432 } 3433 3434 ctx->fallback = fallback; 3435 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx) + 3436 crypto_skcipher_reqsize(fallback)); 3437 } else { 3438 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx)); 3439 } 3440 3441 ret = caam_init_common(ctx, &caam_alg->caam, false); 3442 if (ret && ctx->fallback) 3443 crypto_free_skcipher(ctx->fallback); 3444 3445 return ret; 3446 } 3447 3448 static int caam_aead_init(struct crypto_aead *tfm) 3449 { 3450 struct aead_alg *alg = crypto_aead_alg(tfm); 3451 struct caam_aead_alg *caam_alg = 3452 container_of(alg, struct caam_aead_alg, aead); 3453 struct caam_ctx *ctx = crypto_aead_ctx_dma(tfm); 3454 3455 crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx)); 3456 3457 ctx->enginectx.op.do_one_request = aead_do_one_req; 3458 3459 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp); 3460 } 3461 3462 static void caam_exit_common(struct caam_ctx *ctx) 3463 { 3464 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, 3465 offsetof(struct caam_ctx, sh_desc_enc_dma) - 3466 offsetof(struct caam_ctx, sh_desc_enc), 3467 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3468 caam_jr_free(ctx->jrdev); 3469 } 3470 3471 static void caam_cra_exit(struct crypto_skcipher *tfm) 3472 { 3473 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm); 3474 3475 if (ctx->fallback) 3476 crypto_free_skcipher(ctx->fallback); 3477 caam_exit_common(ctx); 3478 } 3479 3480 static void caam_aead_exit(struct crypto_aead *tfm) 3481 { 3482 caam_exit_common(crypto_aead_ctx_dma(tfm)); 3483 } 3484 3485 void caam_algapi_exit(void) 3486 { 3487 int i; 3488 3489 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3490 struct caam_aead_alg *t_alg = driver_aeads + i; 3491 3492 if (t_alg->registered) 3493 crypto_unregister_aead(&t_alg->aead); 3494 } 3495 3496 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3497 struct caam_skcipher_alg *t_alg = driver_algs + i; 3498 3499 if (t_alg->registered) 3500 crypto_unregister_skcipher(&t_alg->skcipher); 3501 } 3502 } 3503 3504 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg) 3505 { 3506 struct skcipher_alg *alg = &t_alg->skcipher; 3507 3508 alg->base.cra_module = THIS_MODULE; 3509 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3510 alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding(); 3511 alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | 3512 CRYPTO_ALG_KERN_DRIVER_ONLY); 3513 3514 alg->init = caam_cra_init; 3515 alg->exit = caam_cra_exit; 3516 } 3517 3518 static void caam_aead_alg_init(struct caam_aead_alg *t_alg) 3519 { 3520 struct aead_alg *alg = &t_alg->aead; 3521 3522 alg->base.cra_module = THIS_MODULE; 3523 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3524 alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding(); 3525 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | 3526 CRYPTO_ALG_KERN_DRIVER_ONLY; 3527 3528 alg->init = caam_aead_init; 3529 alg->exit = caam_aead_exit; 3530 } 3531 3532 int caam_algapi_init(struct device *ctrldev) 3533 { 3534 struct caam_drv_private *priv = dev_get_drvdata(ctrldev); 3535 int i = 0, err = 0; 3536 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst; 3537 unsigned int md_limit = SHA512_DIGEST_SIZE; 3538 bool registered = false, gcm_support; 3539 3540 /* 3541 * Register crypto algorithms the device supports. 3542 * First, detect presence and attributes of DES, AES, and MD blocks. 3543 */ 3544 if (priv->era < 10) { 3545 u32 cha_vid, cha_inst, aes_rn; 3546 3547 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls); 3548 aes_vid = cha_vid & CHA_ID_LS_AES_MASK; 3549 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3550 3551 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls); 3552 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> 3553 CHA_ID_LS_DES_SHIFT; 3554 aes_inst = cha_inst & CHA_ID_LS_AES_MASK; 3555 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3556 ccha_inst = 0; 3557 ptha_inst = 0; 3558 3559 aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) & 3560 CHA_ID_LS_AES_MASK; 3561 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8); 3562 } else { 3563 u32 aesa, mdha; 3564 3565 aesa = rd_reg32(&priv->ctrl->vreg.aesa); 3566 mdha = rd_reg32(&priv->ctrl->vreg.mdha); 3567 3568 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3569 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3570 3571 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK; 3572 aes_inst = aesa & CHA_VER_NUM_MASK; 3573 md_inst = mdha & CHA_VER_NUM_MASK; 3574 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK; 3575 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK; 3576 3577 gcm_support = aesa & CHA_VER_MISC_AES_GCM; 3578 } 3579 3580 /* If MD is present, limit digest size based on LP256 */ 3581 if (md_inst && md_vid == CHA_VER_VID_MD_LP256) 3582 md_limit = SHA256_DIGEST_SIZE; 3583 3584 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3585 struct caam_skcipher_alg *t_alg = driver_algs + i; 3586 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK; 3587 3588 /* Skip DES algorithms if not supported by device */ 3589 if (!des_inst && 3590 ((alg_sel == OP_ALG_ALGSEL_3DES) || 3591 (alg_sel == OP_ALG_ALGSEL_DES))) 3592 continue; 3593 3594 /* Skip AES algorithms if not supported by device */ 3595 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES)) 3596 continue; 3597 3598 /* 3599 * Check support for AES modes not available 3600 * on LP devices. 3601 */ 3602 if (aes_vid == CHA_VER_VID_AES_LP && 3603 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) == 3604 OP_ALG_AAI_XTS) 3605 continue; 3606 3607 caam_skcipher_alg_init(t_alg); 3608 3609 err = crypto_register_skcipher(&t_alg->skcipher); 3610 if (err) { 3611 pr_warn("%s alg registration failed\n", 3612 t_alg->skcipher.base.cra_driver_name); 3613 continue; 3614 } 3615 3616 t_alg->registered = true; 3617 registered = true; 3618 } 3619 3620 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3621 struct caam_aead_alg *t_alg = driver_aeads + i; 3622 u32 c1_alg_sel = t_alg->caam.class1_alg_type & 3623 OP_ALG_ALGSEL_MASK; 3624 u32 c2_alg_sel = t_alg->caam.class2_alg_type & 3625 OP_ALG_ALGSEL_MASK; 3626 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3627 3628 /* Skip DES algorithms if not supported by device */ 3629 if (!des_inst && 3630 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) || 3631 (c1_alg_sel == OP_ALG_ALGSEL_DES))) 3632 continue; 3633 3634 /* Skip AES algorithms if not supported by device */ 3635 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES)) 3636 continue; 3637 3638 /* Skip CHACHA20 algorithms if not supported by device */ 3639 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst) 3640 continue; 3641 3642 /* Skip POLY1305 algorithms if not supported by device */ 3643 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst) 3644 continue; 3645 3646 /* Skip GCM algorithms if not supported by device */ 3647 if (c1_alg_sel == OP_ALG_ALGSEL_AES && 3648 alg_aai == OP_ALG_AAI_GCM && !gcm_support) 3649 continue; 3650 3651 /* 3652 * Skip algorithms requiring message digests 3653 * if MD or MD size is not supported by device. 3654 */ 3655 if (is_mdha(c2_alg_sel) && 3656 (!md_inst || t_alg->aead.maxauthsize > md_limit)) 3657 continue; 3658 3659 caam_aead_alg_init(t_alg); 3660 3661 err = crypto_register_aead(&t_alg->aead); 3662 if (err) { 3663 pr_warn("%s alg registration failed\n", 3664 t_alg->aead.base.cra_driver_name); 3665 continue; 3666 } 3667 3668 t_alg->registered = true; 3669 registered = true; 3670 } 3671 3672 if (registered) 3673 pr_info("caam algorithms registered in /proc/crypto\n"); 3674 3675 return err; 3676 } 3677