1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (C) 2017 Marvell 4 * 5 * Antoine Tenart <antoine.tenart@free-electrons.com> 6 */ 7 8 #include <linux/device.h> 9 #include <linux/dma-mapping.h> 10 #include <linux/dmapool.h> 11 12 #include <crypto/aead.h> 13 #include <crypto/aes.h> 14 #include <crypto/authenc.h> 15 #include <crypto/des.h> 16 #include <crypto/sha.h> 17 #include <crypto/skcipher.h> 18 #include <crypto/internal/aead.h> 19 #include <crypto/internal/skcipher.h> 20 21 #include "safexcel.h" 22 23 enum safexcel_cipher_direction { 24 SAFEXCEL_ENCRYPT, 25 SAFEXCEL_DECRYPT, 26 }; 27 28 enum safexcel_cipher_alg { 29 SAFEXCEL_DES, 30 SAFEXCEL_3DES, 31 SAFEXCEL_AES, 32 }; 33 34 struct safexcel_cipher_ctx { 35 struct safexcel_context base; 36 struct safexcel_crypto_priv *priv; 37 38 u32 mode; 39 enum safexcel_cipher_alg alg; 40 bool aead; 41 42 __le32 key[8]; 43 unsigned int key_len; 44 45 /* All the below is AEAD specific */ 46 u32 hash_alg; 47 u32 state_sz; 48 u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)]; 49 u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)]; 50 }; 51 52 struct safexcel_cipher_req { 53 enum safexcel_cipher_direction direction; 54 bool needs_inv; 55 }; 56 57 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv, 58 struct safexcel_command_desc *cdesc, 59 u32 length) 60 { 61 struct safexcel_token *token; 62 unsigned offset = 0; 63 64 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) { 65 switch (ctx->alg) { 66 case SAFEXCEL_DES: 67 offset = DES_BLOCK_SIZE / sizeof(u32); 68 memcpy(cdesc->control_data.token, iv, DES_BLOCK_SIZE); 69 cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD; 70 break; 71 case SAFEXCEL_3DES: 72 offset = DES3_EDE_BLOCK_SIZE / sizeof(u32); 73 memcpy(cdesc->control_data.token, iv, DES3_EDE_BLOCK_SIZE); 74 cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD; 75 break; 76 77 case SAFEXCEL_AES: 78 offset = AES_BLOCK_SIZE / sizeof(u32); 79 memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE); 80 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD; 81 break; 82 } 83 } 84 85 token = (struct safexcel_token *)(cdesc->control_data.token + offset); 86 87 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION; 88 token[0].packet_length = length; 89 token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET | 90 EIP197_TOKEN_STAT_LAST_HASH; 91 token[0].instructions = EIP197_TOKEN_INS_LAST | 92 EIP197_TOKEN_INS_TYPE_CRYTO | 93 EIP197_TOKEN_INS_TYPE_OUTPUT; 94 } 95 96 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv, 97 struct safexcel_command_desc *cdesc, 98 enum safexcel_cipher_direction direction, 99 u32 cryptlen, u32 assoclen, u32 digestsize) 100 { 101 struct safexcel_token *token; 102 unsigned offset = 0; 103 104 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) { 105 offset = AES_BLOCK_SIZE / sizeof(u32); 106 memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE); 107 108 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD; 109 } 110 111 token = (struct safexcel_token *)(cdesc->control_data.token + offset); 112 113 if (direction == SAFEXCEL_DECRYPT) 114 cryptlen -= digestsize; 115 116 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION; 117 token[0].packet_length = assoclen; 118 token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH | 119 EIP197_TOKEN_INS_TYPE_OUTPUT; 120 121 token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION; 122 token[1].packet_length = cryptlen; 123 token[1].stat = EIP197_TOKEN_STAT_LAST_HASH; 124 token[1].instructions = EIP197_TOKEN_INS_LAST | 125 EIP197_TOKEN_INS_TYPE_CRYTO | 126 EIP197_TOKEN_INS_TYPE_HASH | 127 EIP197_TOKEN_INS_TYPE_OUTPUT; 128 129 if (direction == SAFEXCEL_ENCRYPT) { 130 token[2].opcode = EIP197_TOKEN_OPCODE_INSERT; 131 token[2].packet_length = digestsize; 132 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH | 133 EIP197_TOKEN_STAT_LAST_PACKET; 134 token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT | 135 EIP197_TOKEN_INS_INSERT_HASH_DIGEST; 136 } else { 137 token[2].opcode = EIP197_TOKEN_OPCODE_RETRIEVE; 138 token[2].packet_length = digestsize; 139 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH | 140 EIP197_TOKEN_STAT_LAST_PACKET; 141 token[2].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST; 142 143 token[3].opcode = EIP197_TOKEN_OPCODE_VERIFY; 144 token[3].packet_length = digestsize | 145 EIP197_TOKEN_HASH_RESULT_VERIFY; 146 token[3].stat = EIP197_TOKEN_STAT_LAST_HASH | 147 EIP197_TOKEN_STAT_LAST_PACKET; 148 token[3].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT; 149 } 150 } 151 152 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm, 153 const u8 *key, unsigned int len) 154 { 155 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm); 156 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 157 struct safexcel_crypto_priv *priv = ctx->priv; 158 struct crypto_aes_ctx aes; 159 int ret, i; 160 161 ret = crypto_aes_expand_key(&aes, key, len); 162 if (ret) { 163 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 164 return ret; 165 } 166 167 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { 168 for (i = 0; i < len / sizeof(u32); i++) { 169 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) { 170 ctx->base.needs_inv = true; 171 break; 172 } 173 } 174 } 175 176 for (i = 0; i < len / sizeof(u32); i++) 177 ctx->key[i] = cpu_to_le32(aes.key_enc[i]); 178 179 ctx->key_len = len; 180 181 memzero_explicit(&aes, sizeof(aes)); 182 return 0; 183 } 184 185 static int safexcel_aead_aes_setkey(struct crypto_aead *ctfm, const u8 *key, 186 unsigned int len) 187 { 188 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm); 189 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 190 struct safexcel_ahash_export_state istate, ostate; 191 struct safexcel_crypto_priv *priv = ctx->priv; 192 struct crypto_authenc_keys keys; 193 194 if (crypto_authenc_extractkeys(&keys, key, len) != 0) 195 goto badkey; 196 197 if (keys.enckeylen > sizeof(ctx->key)) 198 goto badkey; 199 200 /* Encryption key */ 201 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma && 202 memcmp(ctx->key, keys.enckey, keys.enckeylen)) 203 ctx->base.needs_inv = true; 204 205 /* Auth key */ 206 switch (ctx->hash_alg) { 207 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1: 208 if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey, 209 keys.authkeylen, &istate, &ostate)) 210 goto badkey; 211 break; 212 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224: 213 if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey, 214 keys.authkeylen, &istate, &ostate)) 215 goto badkey; 216 break; 217 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256: 218 if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey, 219 keys.authkeylen, &istate, &ostate)) 220 goto badkey; 221 break; 222 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384: 223 if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey, 224 keys.authkeylen, &istate, &ostate)) 225 goto badkey; 226 break; 227 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512: 228 if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey, 229 keys.authkeylen, &istate, &ostate)) 230 goto badkey; 231 break; 232 default: 233 dev_err(priv->dev, "aead: unsupported hash algorithm\n"); 234 goto badkey; 235 } 236 237 crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) & 238 CRYPTO_TFM_RES_MASK); 239 240 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma && 241 (memcmp(ctx->ipad, istate.state, ctx->state_sz) || 242 memcmp(ctx->opad, ostate.state, ctx->state_sz))) 243 ctx->base.needs_inv = true; 244 245 /* Now copy the keys into the context */ 246 memcpy(ctx->key, keys.enckey, keys.enckeylen); 247 ctx->key_len = keys.enckeylen; 248 249 memcpy(ctx->ipad, &istate.state, ctx->state_sz); 250 memcpy(ctx->opad, &ostate.state, ctx->state_sz); 251 252 memzero_explicit(&keys, sizeof(keys)); 253 return 0; 254 255 badkey: 256 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 257 memzero_explicit(&keys, sizeof(keys)); 258 return -EINVAL; 259 } 260 261 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx, 262 struct crypto_async_request *async, 263 struct safexcel_cipher_req *sreq, 264 struct safexcel_command_desc *cdesc) 265 { 266 struct safexcel_crypto_priv *priv = ctx->priv; 267 int ctrl_size; 268 269 if (ctx->aead) { 270 if (sreq->direction == SAFEXCEL_ENCRYPT) 271 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT; 272 else 273 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN; 274 } else { 275 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_CRYPTO_OUT; 276 277 /* The decryption control type is a combination of the 278 * encryption type and CONTEXT_CONTROL_TYPE_NULL_IN, for all 279 * types. 280 */ 281 if (sreq->direction == SAFEXCEL_DECRYPT) 282 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_NULL_IN; 283 } 284 285 cdesc->control_data.control0 |= CONTEXT_CONTROL_KEY_EN; 286 cdesc->control_data.control1 |= ctx->mode; 287 288 if (ctx->aead) 289 cdesc->control_data.control0 |= CONTEXT_CONTROL_DIGEST_HMAC | 290 ctx->hash_alg; 291 292 if (ctx->alg == SAFEXCEL_DES) { 293 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_DES; 294 } else if (ctx->alg == SAFEXCEL_3DES) { 295 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_3DES; 296 } else if (ctx->alg == SAFEXCEL_AES) { 297 switch (ctx->key_len) { 298 case AES_KEYSIZE_128: 299 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128; 300 break; 301 case AES_KEYSIZE_192: 302 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES192; 303 break; 304 case AES_KEYSIZE_256: 305 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES256; 306 break; 307 default: 308 dev_err(priv->dev, "aes keysize not supported: %u\n", 309 ctx->key_len); 310 return -EINVAL; 311 } 312 } 313 314 ctrl_size = ctx->key_len / sizeof(u32); 315 if (ctx->aead) 316 /* Take in account the ipad+opad digests */ 317 ctrl_size += ctx->state_sz / sizeof(u32) * 2; 318 cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(ctrl_size); 319 320 return 0; 321 } 322 323 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring, 324 struct crypto_async_request *async, 325 struct scatterlist *src, 326 struct scatterlist *dst, 327 unsigned int cryptlen, 328 struct safexcel_cipher_req *sreq, 329 bool *should_complete, int *ret) 330 { 331 struct safexcel_result_desc *rdesc; 332 int ndesc = 0; 333 334 *ret = 0; 335 336 do { 337 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr); 338 if (IS_ERR(rdesc)) { 339 dev_err(priv->dev, 340 "cipher: result: could not retrieve the result descriptor\n"); 341 *ret = PTR_ERR(rdesc); 342 break; 343 } 344 345 if (likely(!*ret)) 346 *ret = safexcel_rdesc_check_errors(priv, rdesc); 347 348 ndesc++; 349 } while (!rdesc->last_seg); 350 351 safexcel_complete(priv, ring); 352 353 if (src == dst) { 354 dma_unmap_sg(priv->dev, src, 355 sg_nents_for_len(src, cryptlen), 356 DMA_BIDIRECTIONAL); 357 } else { 358 dma_unmap_sg(priv->dev, src, 359 sg_nents_for_len(src, cryptlen), 360 DMA_TO_DEVICE); 361 dma_unmap_sg(priv->dev, dst, 362 sg_nents_for_len(dst, cryptlen), 363 DMA_FROM_DEVICE); 364 } 365 366 *should_complete = true; 367 368 return ndesc; 369 } 370 371 static int safexcel_send_req(struct crypto_async_request *base, int ring, 372 struct safexcel_cipher_req *sreq, 373 struct scatterlist *src, struct scatterlist *dst, 374 unsigned int cryptlen, unsigned int assoclen, 375 unsigned int digestsize, u8 *iv, int *commands, 376 int *results) 377 { 378 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm); 379 struct safexcel_crypto_priv *priv = ctx->priv; 380 struct safexcel_command_desc *cdesc; 381 struct safexcel_result_desc *rdesc, *first_rdesc = NULL; 382 struct scatterlist *sg; 383 unsigned int totlen = cryptlen + assoclen; 384 int nr_src, nr_dst, n_cdesc = 0, n_rdesc = 0, queued = totlen; 385 int i, ret = 0; 386 387 if (src == dst) { 388 nr_src = dma_map_sg(priv->dev, src, 389 sg_nents_for_len(src, totlen), 390 DMA_BIDIRECTIONAL); 391 nr_dst = nr_src; 392 if (!nr_src) 393 return -EINVAL; 394 } else { 395 nr_src = dma_map_sg(priv->dev, src, 396 sg_nents_for_len(src, totlen), 397 DMA_TO_DEVICE); 398 if (!nr_src) 399 return -EINVAL; 400 401 nr_dst = dma_map_sg(priv->dev, dst, 402 sg_nents_for_len(dst, totlen), 403 DMA_FROM_DEVICE); 404 if (!nr_dst) { 405 dma_unmap_sg(priv->dev, src, 406 sg_nents_for_len(src, totlen), 407 DMA_TO_DEVICE); 408 return -EINVAL; 409 } 410 } 411 412 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len); 413 414 if (ctx->aead) { 415 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32), 416 ctx->ipad, ctx->state_sz); 417 memcpy(ctx->base.ctxr->data + (ctx->key_len + ctx->state_sz) / sizeof(u32), 418 ctx->opad, ctx->state_sz); 419 } 420 421 /* command descriptors */ 422 for_each_sg(src, sg, nr_src, i) { 423 int len = sg_dma_len(sg); 424 425 /* Do not overflow the request */ 426 if (queued - len < 0) 427 len = queued; 428 429 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc, !(queued - len), 430 sg_dma_address(sg), len, totlen, 431 ctx->base.ctxr_dma); 432 if (IS_ERR(cdesc)) { 433 /* No space left in the command descriptor ring */ 434 ret = PTR_ERR(cdesc); 435 goto cdesc_rollback; 436 } 437 n_cdesc++; 438 439 if (n_cdesc == 1) { 440 safexcel_context_control(ctx, base, sreq, cdesc); 441 if (ctx->aead) 442 safexcel_aead_token(ctx, iv, cdesc, 443 sreq->direction, cryptlen, 444 assoclen, digestsize); 445 else 446 safexcel_skcipher_token(ctx, iv, cdesc, 447 cryptlen); 448 } 449 450 queued -= len; 451 if (!queued) 452 break; 453 } 454 455 /* result descriptors */ 456 for_each_sg(dst, sg, nr_dst, i) { 457 bool first = !i, last = (i == nr_dst - 1); 458 u32 len = sg_dma_len(sg); 459 460 rdesc = safexcel_add_rdesc(priv, ring, first, last, 461 sg_dma_address(sg), len); 462 if (IS_ERR(rdesc)) { 463 /* No space left in the result descriptor ring */ 464 ret = PTR_ERR(rdesc); 465 goto rdesc_rollback; 466 } 467 if (first) 468 first_rdesc = rdesc; 469 n_rdesc++; 470 } 471 472 safexcel_rdr_req_set(priv, ring, first_rdesc, base); 473 474 *commands = n_cdesc; 475 *results = n_rdesc; 476 return 0; 477 478 rdesc_rollback: 479 for (i = 0; i < n_rdesc; i++) 480 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr); 481 cdesc_rollback: 482 for (i = 0; i < n_cdesc; i++) 483 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr); 484 485 if (src == dst) { 486 dma_unmap_sg(priv->dev, src, 487 sg_nents_for_len(src, totlen), 488 DMA_BIDIRECTIONAL); 489 } else { 490 dma_unmap_sg(priv->dev, src, 491 sg_nents_for_len(src, totlen), 492 DMA_TO_DEVICE); 493 dma_unmap_sg(priv->dev, dst, 494 sg_nents_for_len(dst, totlen), 495 DMA_FROM_DEVICE); 496 } 497 498 return ret; 499 } 500 501 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv, 502 int ring, 503 struct crypto_async_request *base, 504 bool *should_complete, int *ret) 505 { 506 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm); 507 struct safexcel_result_desc *rdesc; 508 int ndesc = 0, enq_ret; 509 510 *ret = 0; 511 512 do { 513 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr); 514 if (IS_ERR(rdesc)) { 515 dev_err(priv->dev, 516 "cipher: invalidate: could not retrieve the result descriptor\n"); 517 *ret = PTR_ERR(rdesc); 518 break; 519 } 520 521 if (likely(!*ret)) 522 *ret = safexcel_rdesc_check_errors(priv, rdesc); 523 524 ndesc++; 525 } while (!rdesc->last_seg); 526 527 safexcel_complete(priv, ring); 528 529 if (ctx->base.exit_inv) { 530 dma_pool_free(priv->context_pool, ctx->base.ctxr, 531 ctx->base.ctxr_dma); 532 533 *should_complete = true; 534 535 return ndesc; 536 } 537 538 ring = safexcel_select_ring(priv); 539 ctx->base.ring = ring; 540 541 spin_lock_bh(&priv->ring[ring].queue_lock); 542 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base); 543 spin_unlock_bh(&priv->ring[ring].queue_lock); 544 545 if (enq_ret != -EINPROGRESS) 546 *ret = enq_ret; 547 548 queue_work(priv->ring[ring].workqueue, 549 &priv->ring[ring].work_data.work); 550 551 *should_complete = false; 552 553 return ndesc; 554 } 555 556 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv, 557 int ring, 558 struct crypto_async_request *async, 559 bool *should_complete, int *ret) 560 { 561 struct skcipher_request *req = skcipher_request_cast(async); 562 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); 563 int err; 564 565 if (sreq->needs_inv) { 566 sreq->needs_inv = false; 567 err = safexcel_handle_inv_result(priv, ring, async, 568 should_complete, ret); 569 } else { 570 err = safexcel_handle_req_result(priv, ring, async, req->src, 571 req->dst, req->cryptlen, sreq, 572 should_complete, ret); 573 } 574 575 return err; 576 } 577 578 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv, 579 int ring, 580 struct crypto_async_request *async, 581 bool *should_complete, int *ret) 582 { 583 struct aead_request *req = aead_request_cast(async); 584 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 585 struct safexcel_cipher_req *sreq = aead_request_ctx(req); 586 int err; 587 588 if (sreq->needs_inv) { 589 sreq->needs_inv = false; 590 err = safexcel_handle_inv_result(priv, ring, async, 591 should_complete, ret); 592 } else { 593 err = safexcel_handle_req_result(priv, ring, async, req->src, 594 req->dst, 595 req->cryptlen + crypto_aead_authsize(tfm), 596 sreq, should_complete, ret); 597 } 598 599 return err; 600 } 601 602 static int safexcel_cipher_send_inv(struct crypto_async_request *base, 603 int ring, int *commands, int *results) 604 { 605 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm); 606 struct safexcel_crypto_priv *priv = ctx->priv; 607 int ret; 608 609 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring); 610 if (unlikely(ret)) 611 return ret; 612 613 *commands = 1; 614 *results = 1; 615 616 return 0; 617 } 618 619 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring, 620 int *commands, int *results) 621 { 622 struct skcipher_request *req = skcipher_request_cast(async); 623 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 624 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); 625 struct safexcel_crypto_priv *priv = ctx->priv; 626 int ret; 627 628 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv); 629 630 if (sreq->needs_inv) 631 ret = safexcel_cipher_send_inv(async, ring, commands, results); 632 else 633 ret = safexcel_send_req(async, ring, sreq, req->src, 634 req->dst, req->cryptlen, 0, 0, req->iv, 635 commands, results); 636 return ret; 637 } 638 639 static int safexcel_aead_send(struct crypto_async_request *async, int ring, 640 int *commands, int *results) 641 { 642 struct aead_request *req = aead_request_cast(async); 643 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 644 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 645 struct safexcel_cipher_req *sreq = aead_request_ctx(req); 646 struct safexcel_crypto_priv *priv = ctx->priv; 647 int ret; 648 649 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv); 650 651 if (sreq->needs_inv) 652 ret = safexcel_cipher_send_inv(async, ring, commands, results); 653 else 654 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst, 655 req->cryptlen, req->assoclen, 656 crypto_aead_authsize(tfm), req->iv, 657 commands, results); 658 return ret; 659 } 660 661 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm, 662 struct crypto_async_request *base, 663 struct safexcel_cipher_req *sreq, 664 struct safexcel_inv_result *result) 665 { 666 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 667 struct safexcel_crypto_priv *priv = ctx->priv; 668 int ring = ctx->base.ring; 669 670 init_completion(&result->completion); 671 672 ctx = crypto_tfm_ctx(base->tfm); 673 ctx->base.exit_inv = true; 674 sreq->needs_inv = true; 675 676 spin_lock_bh(&priv->ring[ring].queue_lock); 677 crypto_enqueue_request(&priv->ring[ring].queue, base); 678 spin_unlock_bh(&priv->ring[ring].queue_lock); 679 680 queue_work(priv->ring[ring].workqueue, 681 &priv->ring[ring].work_data.work); 682 683 wait_for_completion(&result->completion); 684 685 if (result->error) { 686 dev_warn(priv->dev, 687 "cipher: sync: invalidate: completion error %d\n", 688 result->error); 689 return result->error; 690 } 691 692 return 0; 693 } 694 695 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm) 696 { 697 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE); 698 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); 699 struct safexcel_inv_result result = {}; 700 701 memset(req, 0, sizeof(struct skcipher_request)); 702 703 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 704 safexcel_inv_complete, &result); 705 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm)); 706 707 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result); 708 } 709 710 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm) 711 { 712 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE); 713 struct safexcel_cipher_req *sreq = aead_request_ctx(req); 714 struct safexcel_inv_result result = {}; 715 716 memset(req, 0, sizeof(struct aead_request)); 717 718 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 719 safexcel_inv_complete, &result); 720 aead_request_set_tfm(req, __crypto_aead_cast(tfm)); 721 722 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result); 723 } 724 725 static int safexcel_queue_req(struct crypto_async_request *base, 726 struct safexcel_cipher_req *sreq, 727 enum safexcel_cipher_direction dir, u32 mode, 728 enum safexcel_cipher_alg alg) 729 { 730 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm); 731 struct safexcel_crypto_priv *priv = ctx->priv; 732 int ret, ring; 733 734 sreq->needs_inv = false; 735 sreq->direction = dir; 736 ctx->alg = alg; 737 ctx->mode = mode; 738 739 if (ctx->base.ctxr) { 740 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) { 741 sreq->needs_inv = true; 742 ctx->base.needs_inv = false; 743 } 744 } else { 745 ctx->base.ring = safexcel_select_ring(priv); 746 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool, 747 EIP197_GFP_FLAGS(*base), 748 &ctx->base.ctxr_dma); 749 if (!ctx->base.ctxr) 750 return -ENOMEM; 751 } 752 753 ring = ctx->base.ring; 754 755 spin_lock_bh(&priv->ring[ring].queue_lock); 756 ret = crypto_enqueue_request(&priv->ring[ring].queue, base); 757 spin_unlock_bh(&priv->ring[ring].queue_lock); 758 759 queue_work(priv->ring[ring].workqueue, 760 &priv->ring[ring].work_data.work); 761 762 return ret; 763 } 764 765 static int safexcel_ecb_aes_encrypt(struct skcipher_request *req) 766 { 767 return safexcel_queue_req(&req->base, skcipher_request_ctx(req), 768 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB, 769 SAFEXCEL_AES); 770 } 771 772 static int safexcel_ecb_aes_decrypt(struct skcipher_request *req) 773 { 774 return safexcel_queue_req(&req->base, skcipher_request_ctx(req), 775 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB, 776 SAFEXCEL_AES); 777 } 778 779 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm) 780 { 781 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 782 struct safexcel_alg_template *tmpl = 783 container_of(tfm->__crt_alg, struct safexcel_alg_template, 784 alg.skcipher.base); 785 786 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm), 787 sizeof(struct safexcel_cipher_req)); 788 789 ctx->priv = tmpl->priv; 790 791 ctx->base.send = safexcel_skcipher_send; 792 ctx->base.handle_result = safexcel_skcipher_handle_result; 793 return 0; 794 } 795 796 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm) 797 { 798 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 799 800 memzero_explicit(ctx->key, sizeof(ctx->key)); 801 802 /* context not allocated, skip invalidation */ 803 if (!ctx->base.ctxr) 804 return -ENOMEM; 805 806 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data)); 807 return 0; 808 } 809 810 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm) 811 { 812 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 813 struct safexcel_crypto_priv *priv = ctx->priv; 814 int ret; 815 816 if (safexcel_cipher_cra_exit(tfm)) 817 return; 818 819 if (priv->flags & EIP197_TRC_CACHE) { 820 ret = safexcel_skcipher_exit_inv(tfm); 821 if (ret) 822 dev_warn(priv->dev, "skcipher: invalidation error %d\n", 823 ret); 824 } else { 825 dma_pool_free(priv->context_pool, ctx->base.ctxr, 826 ctx->base.ctxr_dma); 827 } 828 } 829 830 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm) 831 { 832 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 833 struct safexcel_crypto_priv *priv = ctx->priv; 834 int ret; 835 836 if (safexcel_cipher_cra_exit(tfm)) 837 return; 838 839 if (priv->flags & EIP197_TRC_CACHE) { 840 ret = safexcel_aead_exit_inv(tfm); 841 if (ret) 842 dev_warn(priv->dev, "aead: invalidation error %d\n", 843 ret); 844 } else { 845 dma_pool_free(priv->context_pool, ctx->base.ctxr, 846 ctx->base.ctxr_dma); 847 } 848 } 849 850 struct safexcel_alg_template safexcel_alg_ecb_aes = { 851 .type = SAFEXCEL_ALG_TYPE_SKCIPHER, 852 .engines = EIP97IES | EIP197B | EIP197D, 853 .alg.skcipher = { 854 .setkey = safexcel_skcipher_aes_setkey, 855 .encrypt = safexcel_ecb_aes_encrypt, 856 .decrypt = safexcel_ecb_aes_decrypt, 857 .min_keysize = AES_MIN_KEY_SIZE, 858 .max_keysize = AES_MAX_KEY_SIZE, 859 .base = { 860 .cra_name = "ecb(aes)", 861 .cra_driver_name = "safexcel-ecb-aes", 862 .cra_priority = 300, 863 .cra_flags = CRYPTO_ALG_ASYNC | 864 CRYPTO_ALG_KERN_DRIVER_ONLY, 865 .cra_blocksize = AES_BLOCK_SIZE, 866 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), 867 .cra_alignmask = 0, 868 .cra_init = safexcel_skcipher_cra_init, 869 .cra_exit = safexcel_skcipher_cra_exit, 870 .cra_module = THIS_MODULE, 871 }, 872 }, 873 }; 874 875 static int safexcel_cbc_aes_encrypt(struct skcipher_request *req) 876 { 877 return safexcel_queue_req(&req->base, skcipher_request_ctx(req), 878 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC, 879 SAFEXCEL_AES); 880 } 881 882 static int safexcel_cbc_aes_decrypt(struct skcipher_request *req) 883 { 884 return safexcel_queue_req(&req->base, skcipher_request_ctx(req), 885 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC, 886 SAFEXCEL_AES); 887 } 888 889 struct safexcel_alg_template safexcel_alg_cbc_aes = { 890 .type = SAFEXCEL_ALG_TYPE_SKCIPHER, 891 .engines = EIP97IES | EIP197B | EIP197D, 892 .alg.skcipher = { 893 .setkey = safexcel_skcipher_aes_setkey, 894 .encrypt = safexcel_cbc_aes_encrypt, 895 .decrypt = safexcel_cbc_aes_decrypt, 896 .min_keysize = AES_MIN_KEY_SIZE, 897 .max_keysize = AES_MAX_KEY_SIZE, 898 .ivsize = AES_BLOCK_SIZE, 899 .base = { 900 .cra_name = "cbc(aes)", 901 .cra_driver_name = "safexcel-cbc-aes", 902 .cra_priority = 300, 903 .cra_flags = CRYPTO_ALG_ASYNC | 904 CRYPTO_ALG_KERN_DRIVER_ONLY, 905 .cra_blocksize = AES_BLOCK_SIZE, 906 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), 907 .cra_alignmask = 0, 908 .cra_init = safexcel_skcipher_cra_init, 909 .cra_exit = safexcel_skcipher_cra_exit, 910 .cra_module = THIS_MODULE, 911 }, 912 }, 913 }; 914 915 static int safexcel_cbc_des_encrypt(struct skcipher_request *req) 916 { 917 return safexcel_queue_req(&req->base, skcipher_request_ctx(req), 918 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC, 919 SAFEXCEL_DES); 920 } 921 922 static int safexcel_cbc_des_decrypt(struct skcipher_request *req) 923 { 924 return safexcel_queue_req(&req->base, skcipher_request_ctx(req), 925 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC, 926 SAFEXCEL_DES); 927 } 928 929 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key, 930 unsigned int len) 931 { 932 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm); 933 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 934 u32 tmp[DES_EXPKEY_WORDS]; 935 int ret; 936 937 if (len != DES_KEY_SIZE) { 938 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 939 return -EINVAL; 940 } 941 942 ret = des_ekey(tmp, key); 943 if (!ret && (tfm->crt_flags & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) { 944 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY; 945 return -EINVAL; 946 } 947 948 /* if context exits and key changed, need to invalidate it */ 949 if (ctx->base.ctxr_dma) 950 if (memcmp(ctx->key, key, len)) 951 ctx->base.needs_inv = true; 952 953 memcpy(ctx->key, key, len); 954 ctx->key_len = len; 955 956 return 0; 957 } 958 959 struct safexcel_alg_template safexcel_alg_cbc_des = { 960 .type = SAFEXCEL_ALG_TYPE_SKCIPHER, 961 .engines = EIP97IES | EIP197B | EIP197D, 962 .alg.skcipher = { 963 .setkey = safexcel_des_setkey, 964 .encrypt = safexcel_cbc_des_encrypt, 965 .decrypt = safexcel_cbc_des_decrypt, 966 .min_keysize = DES_KEY_SIZE, 967 .max_keysize = DES_KEY_SIZE, 968 .ivsize = DES_BLOCK_SIZE, 969 .base = { 970 .cra_name = "cbc(des)", 971 .cra_driver_name = "safexcel-cbc-des", 972 .cra_priority = 300, 973 .cra_flags = CRYPTO_ALG_ASYNC | 974 CRYPTO_ALG_KERN_DRIVER_ONLY, 975 .cra_blocksize = DES_BLOCK_SIZE, 976 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), 977 .cra_alignmask = 0, 978 .cra_init = safexcel_skcipher_cra_init, 979 .cra_exit = safexcel_skcipher_cra_exit, 980 .cra_module = THIS_MODULE, 981 }, 982 }, 983 }; 984 985 static int safexcel_ecb_des_encrypt(struct skcipher_request *req) 986 { 987 return safexcel_queue_req(&req->base, skcipher_request_ctx(req), 988 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB, 989 SAFEXCEL_DES); 990 } 991 992 static int safexcel_ecb_des_decrypt(struct skcipher_request *req) 993 { 994 return safexcel_queue_req(&req->base, skcipher_request_ctx(req), 995 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB, 996 SAFEXCEL_DES); 997 } 998 999 struct safexcel_alg_template safexcel_alg_ecb_des = { 1000 .type = SAFEXCEL_ALG_TYPE_SKCIPHER, 1001 .engines = EIP97IES | EIP197B | EIP197D, 1002 .alg.skcipher = { 1003 .setkey = safexcel_des_setkey, 1004 .encrypt = safexcel_ecb_des_encrypt, 1005 .decrypt = safexcel_ecb_des_decrypt, 1006 .min_keysize = DES_KEY_SIZE, 1007 .max_keysize = DES_KEY_SIZE, 1008 .ivsize = DES_BLOCK_SIZE, 1009 .base = { 1010 .cra_name = "ecb(des)", 1011 .cra_driver_name = "safexcel-ecb-des", 1012 .cra_priority = 300, 1013 .cra_flags = CRYPTO_ALG_ASYNC | 1014 CRYPTO_ALG_KERN_DRIVER_ONLY, 1015 .cra_blocksize = DES_BLOCK_SIZE, 1016 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), 1017 .cra_alignmask = 0, 1018 .cra_init = safexcel_skcipher_cra_init, 1019 .cra_exit = safexcel_skcipher_cra_exit, 1020 .cra_module = THIS_MODULE, 1021 }, 1022 }, 1023 }; 1024 1025 static int safexcel_cbc_des3_ede_encrypt(struct skcipher_request *req) 1026 { 1027 return safexcel_queue_req(&req->base, skcipher_request_ctx(req), 1028 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC, 1029 SAFEXCEL_3DES); 1030 } 1031 1032 static int safexcel_cbc_des3_ede_decrypt(struct skcipher_request *req) 1033 { 1034 return safexcel_queue_req(&req->base, skcipher_request_ctx(req), 1035 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC, 1036 SAFEXCEL_3DES); 1037 } 1038 1039 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm, 1040 const u8 *key, unsigned int len) 1041 { 1042 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm); 1043 int err; 1044 1045 err = des3_verify_key(ctfm, key); 1046 if (unlikely(err)) 1047 return err; 1048 1049 /* if context exits and key changed, need to invalidate it */ 1050 if (ctx->base.ctxr_dma) { 1051 if (memcmp(ctx->key, key, len)) 1052 ctx->base.needs_inv = true; 1053 } 1054 1055 memcpy(ctx->key, key, len); 1056 1057 ctx->key_len = len; 1058 1059 return 0; 1060 } 1061 1062 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = { 1063 .type = SAFEXCEL_ALG_TYPE_SKCIPHER, 1064 .engines = EIP97IES | EIP197B | EIP197D, 1065 .alg.skcipher = { 1066 .setkey = safexcel_des3_ede_setkey, 1067 .encrypt = safexcel_cbc_des3_ede_encrypt, 1068 .decrypt = safexcel_cbc_des3_ede_decrypt, 1069 .min_keysize = DES3_EDE_KEY_SIZE, 1070 .max_keysize = DES3_EDE_KEY_SIZE, 1071 .ivsize = DES3_EDE_BLOCK_SIZE, 1072 .base = { 1073 .cra_name = "cbc(des3_ede)", 1074 .cra_driver_name = "safexcel-cbc-des3_ede", 1075 .cra_priority = 300, 1076 .cra_flags = CRYPTO_ALG_ASYNC | 1077 CRYPTO_ALG_KERN_DRIVER_ONLY, 1078 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1079 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), 1080 .cra_alignmask = 0, 1081 .cra_init = safexcel_skcipher_cra_init, 1082 .cra_exit = safexcel_skcipher_cra_exit, 1083 .cra_module = THIS_MODULE, 1084 }, 1085 }, 1086 }; 1087 1088 static int safexcel_ecb_des3_ede_encrypt(struct skcipher_request *req) 1089 { 1090 return safexcel_queue_req(&req->base, skcipher_request_ctx(req), 1091 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB, 1092 SAFEXCEL_3DES); 1093 } 1094 1095 static int safexcel_ecb_des3_ede_decrypt(struct skcipher_request *req) 1096 { 1097 return safexcel_queue_req(&req->base, skcipher_request_ctx(req), 1098 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB, 1099 SAFEXCEL_3DES); 1100 } 1101 1102 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = { 1103 .type = SAFEXCEL_ALG_TYPE_SKCIPHER, 1104 .engines = EIP97IES | EIP197B | EIP197D, 1105 .alg.skcipher = { 1106 .setkey = safexcel_des3_ede_setkey, 1107 .encrypt = safexcel_ecb_des3_ede_encrypt, 1108 .decrypt = safexcel_ecb_des3_ede_decrypt, 1109 .min_keysize = DES3_EDE_KEY_SIZE, 1110 .max_keysize = DES3_EDE_KEY_SIZE, 1111 .ivsize = DES3_EDE_BLOCK_SIZE, 1112 .base = { 1113 .cra_name = "ecb(des3_ede)", 1114 .cra_driver_name = "safexcel-ecb-des3_ede", 1115 .cra_priority = 300, 1116 .cra_flags = CRYPTO_ALG_ASYNC | 1117 CRYPTO_ALG_KERN_DRIVER_ONLY, 1118 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1119 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), 1120 .cra_alignmask = 0, 1121 .cra_init = safexcel_skcipher_cra_init, 1122 .cra_exit = safexcel_skcipher_cra_exit, 1123 .cra_module = THIS_MODULE, 1124 }, 1125 }, 1126 }; 1127 1128 static int safexcel_aead_encrypt(struct aead_request *req) 1129 { 1130 struct safexcel_cipher_req *creq = aead_request_ctx(req); 1131 1132 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT, 1133 CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES); 1134 } 1135 1136 static int safexcel_aead_decrypt(struct aead_request *req) 1137 { 1138 struct safexcel_cipher_req *creq = aead_request_ctx(req); 1139 1140 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT, 1141 CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES); 1142 } 1143 1144 static int safexcel_aead_cra_init(struct crypto_tfm *tfm) 1145 { 1146 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 1147 struct safexcel_alg_template *tmpl = 1148 container_of(tfm->__crt_alg, struct safexcel_alg_template, 1149 alg.aead.base); 1150 1151 crypto_aead_set_reqsize(__crypto_aead_cast(tfm), 1152 sizeof(struct safexcel_cipher_req)); 1153 1154 ctx->priv = tmpl->priv; 1155 1156 ctx->aead = true; 1157 ctx->base.send = safexcel_aead_send; 1158 ctx->base.handle_result = safexcel_aead_handle_result; 1159 return 0; 1160 } 1161 1162 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm) 1163 { 1164 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 1165 1166 safexcel_aead_cra_init(tfm); 1167 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1; 1168 ctx->state_sz = SHA1_DIGEST_SIZE; 1169 return 0; 1170 } 1171 1172 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = { 1173 .type = SAFEXCEL_ALG_TYPE_AEAD, 1174 .engines = EIP97IES | EIP197B | EIP197D, 1175 .alg.aead = { 1176 .setkey = safexcel_aead_aes_setkey, 1177 .encrypt = safexcel_aead_encrypt, 1178 .decrypt = safexcel_aead_decrypt, 1179 .ivsize = AES_BLOCK_SIZE, 1180 .maxauthsize = SHA1_DIGEST_SIZE, 1181 .base = { 1182 .cra_name = "authenc(hmac(sha1),cbc(aes))", 1183 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes", 1184 .cra_priority = 300, 1185 .cra_flags = CRYPTO_ALG_ASYNC | 1186 CRYPTO_ALG_KERN_DRIVER_ONLY, 1187 .cra_blocksize = AES_BLOCK_SIZE, 1188 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), 1189 .cra_alignmask = 0, 1190 .cra_init = safexcel_aead_sha1_cra_init, 1191 .cra_exit = safexcel_aead_cra_exit, 1192 .cra_module = THIS_MODULE, 1193 }, 1194 }, 1195 }; 1196 1197 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm) 1198 { 1199 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 1200 1201 safexcel_aead_cra_init(tfm); 1202 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256; 1203 ctx->state_sz = SHA256_DIGEST_SIZE; 1204 return 0; 1205 } 1206 1207 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = { 1208 .type = SAFEXCEL_ALG_TYPE_AEAD, 1209 .engines = EIP97IES | EIP197B | EIP197D, 1210 .alg.aead = { 1211 .setkey = safexcel_aead_aes_setkey, 1212 .encrypt = safexcel_aead_encrypt, 1213 .decrypt = safexcel_aead_decrypt, 1214 .ivsize = AES_BLOCK_SIZE, 1215 .maxauthsize = SHA256_DIGEST_SIZE, 1216 .base = { 1217 .cra_name = "authenc(hmac(sha256),cbc(aes))", 1218 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes", 1219 .cra_priority = 300, 1220 .cra_flags = CRYPTO_ALG_ASYNC | 1221 CRYPTO_ALG_KERN_DRIVER_ONLY, 1222 .cra_blocksize = AES_BLOCK_SIZE, 1223 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), 1224 .cra_alignmask = 0, 1225 .cra_init = safexcel_aead_sha256_cra_init, 1226 .cra_exit = safexcel_aead_cra_exit, 1227 .cra_module = THIS_MODULE, 1228 }, 1229 }, 1230 }; 1231 1232 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm) 1233 { 1234 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 1235 1236 safexcel_aead_cra_init(tfm); 1237 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224; 1238 ctx->state_sz = SHA256_DIGEST_SIZE; 1239 return 0; 1240 } 1241 1242 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = { 1243 .type = SAFEXCEL_ALG_TYPE_AEAD, 1244 .engines = EIP97IES | EIP197B | EIP197D, 1245 .alg.aead = { 1246 .setkey = safexcel_aead_aes_setkey, 1247 .encrypt = safexcel_aead_encrypt, 1248 .decrypt = safexcel_aead_decrypt, 1249 .ivsize = AES_BLOCK_SIZE, 1250 .maxauthsize = SHA224_DIGEST_SIZE, 1251 .base = { 1252 .cra_name = "authenc(hmac(sha224),cbc(aes))", 1253 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes", 1254 .cra_priority = 300, 1255 .cra_flags = CRYPTO_ALG_ASYNC | 1256 CRYPTO_ALG_KERN_DRIVER_ONLY, 1257 .cra_blocksize = AES_BLOCK_SIZE, 1258 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), 1259 .cra_alignmask = 0, 1260 .cra_init = safexcel_aead_sha224_cra_init, 1261 .cra_exit = safexcel_aead_cra_exit, 1262 .cra_module = THIS_MODULE, 1263 }, 1264 }, 1265 }; 1266 1267 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm) 1268 { 1269 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 1270 1271 safexcel_aead_cra_init(tfm); 1272 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512; 1273 ctx->state_sz = SHA512_DIGEST_SIZE; 1274 return 0; 1275 } 1276 1277 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = { 1278 .type = SAFEXCEL_ALG_TYPE_AEAD, 1279 .engines = EIP97IES | EIP197B | EIP197D, 1280 .alg.aead = { 1281 .setkey = safexcel_aead_aes_setkey, 1282 .encrypt = safexcel_aead_encrypt, 1283 .decrypt = safexcel_aead_decrypt, 1284 .ivsize = AES_BLOCK_SIZE, 1285 .maxauthsize = SHA512_DIGEST_SIZE, 1286 .base = { 1287 .cra_name = "authenc(hmac(sha512),cbc(aes))", 1288 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes", 1289 .cra_priority = 300, 1290 .cra_flags = CRYPTO_ALG_ASYNC | 1291 CRYPTO_ALG_KERN_DRIVER_ONLY, 1292 .cra_blocksize = AES_BLOCK_SIZE, 1293 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), 1294 .cra_alignmask = 0, 1295 .cra_init = safexcel_aead_sha512_cra_init, 1296 .cra_exit = safexcel_aead_cra_exit, 1297 .cra_module = THIS_MODULE, 1298 }, 1299 }, 1300 }; 1301 1302 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm) 1303 { 1304 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 1305 1306 safexcel_aead_cra_init(tfm); 1307 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384; 1308 ctx->state_sz = SHA512_DIGEST_SIZE; 1309 return 0; 1310 } 1311 1312 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = { 1313 .type = SAFEXCEL_ALG_TYPE_AEAD, 1314 .engines = EIP97IES | EIP197B | EIP197D, 1315 .alg.aead = { 1316 .setkey = safexcel_aead_aes_setkey, 1317 .encrypt = safexcel_aead_encrypt, 1318 .decrypt = safexcel_aead_decrypt, 1319 .ivsize = AES_BLOCK_SIZE, 1320 .maxauthsize = SHA384_DIGEST_SIZE, 1321 .base = { 1322 .cra_name = "authenc(hmac(sha384),cbc(aes))", 1323 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes", 1324 .cra_priority = 300, 1325 .cra_flags = CRYPTO_ALG_ASYNC | 1326 CRYPTO_ALG_KERN_DRIVER_ONLY, 1327 .cra_blocksize = AES_BLOCK_SIZE, 1328 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), 1329 .cra_alignmask = 0, 1330 .cra_init = safexcel_aead_sha384_cra_init, 1331 .cra_exit = safexcel_aead_cra_exit, 1332 .cra_module = THIS_MODULE, 1333 }, 1334 }, 1335 }; 1336