1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (C) 2017 Marvell 4 * 5 * Antoine Tenart <antoine.tenart@free-electrons.com> 6 */ 7 8 #include <crypto/aes.h> 9 #include <crypto/hmac.h> 10 #include <crypto/md5.h> 11 #include <crypto/sha1.h> 12 #include <crypto/sha2.h> 13 #include <crypto/sha3.h> 14 #include <crypto/skcipher.h> 15 #include <crypto/sm3.h> 16 #include <crypto/internal/cipher.h> 17 #include <linux/device.h> 18 #include <linux/dma-mapping.h> 19 #include <linux/dmapool.h> 20 21 #include "safexcel.h" 22 23 struct safexcel_ahash_ctx { 24 struct safexcel_context base; 25 26 u32 alg; 27 u8 key_sz; 28 bool cbcmac; 29 bool do_fallback; 30 bool fb_init_done; 31 bool fb_do_setkey; 32 33 struct crypto_aes_ctx *aes; 34 struct crypto_ahash *fback; 35 struct crypto_shash *shpre; 36 struct shash_desc *shdesc; 37 }; 38 39 struct safexcel_ahash_req { 40 bool last_req; 41 bool finish; 42 bool hmac; 43 bool needs_inv; 44 bool hmac_zlen; 45 bool len_is_le; 46 bool not_first; 47 bool xcbcmac; 48 49 int nents; 50 dma_addr_t result_dma; 51 52 u32 digest; 53 54 u8 state_sz; /* expected state size, only set once */ 55 u8 block_sz; /* block size, only set once */ 56 u8 digest_sz; /* output digest size, only set once */ 57 __le32 state[SHA3_512_BLOCK_SIZE / 58 sizeof(__le32)] __aligned(sizeof(__le32)); 59 60 u64 len; 61 u64 processed; 62 63 u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32)); 64 dma_addr_t cache_dma; 65 unsigned int cache_sz; 66 67 u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32)); 68 }; 69 70 static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req) 71 { 72 return req->len - req->processed; 73 } 74 75 static void safexcel_hash_token(struct safexcel_command_desc *cdesc, 76 u32 input_length, u32 result_length, 77 bool cbcmac) 78 { 79 struct safexcel_token *token = 80 (struct safexcel_token *)cdesc->control_data.token; 81 82 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION; 83 token[0].packet_length = input_length; 84 token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH; 85 86 input_length &= 15; 87 if (unlikely(cbcmac && input_length)) { 88 token[0].stat = 0; 89 token[1].opcode = EIP197_TOKEN_OPCODE_INSERT; 90 token[1].packet_length = 16 - input_length; 91 token[1].stat = EIP197_TOKEN_STAT_LAST_HASH; 92 token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH; 93 } else { 94 token[0].stat = EIP197_TOKEN_STAT_LAST_HASH; 95 eip197_noop_token(&token[1]); 96 } 97 98 token[2].opcode = EIP197_TOKEN_OPCODE_INSERT; 99 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH | 100 EIP197_TOKEN_STAT_LAST_PACKET; 101 token[2].packet_length = result_length; 102 token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT | 103 EIP197_TOKEN_INS_INSERT_HASH_DIGEST; 104 105 eip197_noop_token(&token[3]); 106 } 107 108 static void safexcel_context_control(struct safexcel_ahash_ctx *ctx, 109 struct safexcel_ahash_req *req, 110 struct safexcel_command_desc *cdesc) 111 { 112 struct safexcel_crypto_priv *priv = ctx->base.priv; 113 u64 count = 0; 114 115 cdesc->control_data.control0 = ctx->alg; 116 cdesc->control_data.control1 = 0; 117 118 /* 119 * Copy the input digest if needed, and setup the context 120 * fields. Do this now as we need it to setup the first command 121 * descriptor. 122 */ 123 if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) { 124 if (req->xcbcmac) 125 memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz); 126 else 127 memcpy(ctx->base.ctxr->data, req->state, req->state_sz); 128 129 if (!req->finish && req->xcbcmac) 130 cdesc->control_data.control0 |= 131 CONTEXT_CONTROL_DIGEST_XCM | 132 CONTEXT_CONTROL_TYPE_HASH_OUT | 133 CONTEXT_CONTROL_NO_FINISH_HASH | 134 CONTEXT_CONTROL_SIZE(req->state_sz / 135 sizeof(u32)); 136 else 137 cdesc->control_data.control0 |= 138 CONTEXT_CONTROL_DIGEST_XCM | 139 CONTEXT_CONTROL_TYPE_HASH_OUT | 140 CONTEXT_CONTROL_SIZE(req->state_sz / 141 sizeof(u32)); 142 return; 143 } else if (!req->processed) { 144 /* First - and possibly only - block of basic hash only */ 145 if (req->finish) 146 cdesc->control_data.control0 |= req->digest | 147 CONTEXT_CONTROL_TYPE_HASH_OUT | 148 CONTEXT_CONTROL_RESTART_HASH | 149 /* ensure its not 0! */ 150 CONTEXT_CONTROL_SIZE(1); 151 else 152 cdesc->control_data.control0 |= req->digest | 153 CONTEXT_CONTROL_TYPE_HASH_OUT | 154 CONTEXT_CONTROL_RESTART_HASH | 155 CONTEXT_CONTROL_NO_FINISH_HASH | 156 /* ensure its not 0! */ 157 CONTEXT_CONTROL_SIZE(1); 158 return; 159 } 160 161 /* Hash continuation or HMAC, setup (inner) digest from state */ 162 memcpy(ctx->base.ctxr->data, req->state, req->state_sz); 163 164 if (req->finish) { 165 /* Compute digest count for hash/HMAC finish operations */ 166 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) || 167 req->hmac_zlen || (req->processed != req->block_sz)) { 168 count = req->processed / EIP197_COUNTER_BLOCK_SIZE; 169 170 /* This is a hardware limitation, as the 171 * counter must fit into an u32. This represents 172 * a fairly big amount of input data, so we 173 * shouldn't see this. 174 */ 175 if (unlikely(count & 0xffffffff00000000ULL)) { 176 dev_warn(priv->dev, 177 "Input data is too big\n"); 178 return; 179 } 180 } 181 182 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) || 183 /* Special case: zero length HMAC */ 184 req->hmac_zlen || 185 /* PE HW < 4.4 cannot do HMAC continue, fake using hash */ 186 (req->processed != req->block_sz)) { 187 /* Basic hash continue operation, need digest + cnt */ 188 cdesc->control_data.control0 |= 189 CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) | 190 CONTEXT_CONTROL_TYPE_HASH_OUT | 191 CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 192 /* For zero-len HMAC, don't finalize, already padded! */ 193 if (req->hmac_zlen) 194 cdesc->control_data.control0 |= 195 CONTEXT_CONTROL_NO_FINISH_HASH; 196 cdesc->control_data.control1 |= 197 CONTEXT_CONTROL_DIGEST_CNT; 198 ctx->base.ctxr->data[req->state_sz >> 2] = 199 cpu_to_le32(count); 200 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 201 202 /* Clear zero-length HMAC flag for next operation! */ 203 req->hmac_zlen = false; 204 } else { /* HMAC */ 205 /* Need outer digest for HMAC finalization */ 206 memcpy(ctx->base.ctxr->data + (req->state_sz >> 2), 207 &ctx->base.opad, req->state_sz); 208 209 /* Single pass HMAC - no digest count */ 210 cdesc->control_data.control0 |= 211 CONTEXT_CONTROL_SIZE(req->state_sz >> 1) | 212 CONTEXT_CONTROL_TYPE_HASH_OUT | 213 CONTEXT_CONTROL_DIGEST_HMAC; 214 } 215 } else { /* Hash continuation, do not finish yet */ 216 cdesc->control_data.control0 |= 217 CONTEXT_CONTROL_SIZE(req->state_sz >> 2) | 218 CONTEXT_CONTROL_DIGEST_PRECOMPUTED | 219 CONTEXT_CONTROL_TYPE_HASH_OUT | 220 CONTEXT_CONTROL_NO_FINISH_HASH; 221 } 222 } 223 224 static int safexcel_ahash_enqueue(struct ahash_request *areq); 225 226 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, 227 int ring, 228 struct crypto_async_request *async, 229 bool *should_complete, int *ret) 230 { 231 struct safexcel_result_desc *rdesc; 232 struct ahash_request *areq = ahash_request_cast(async); 233 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); 234 struct safexcel_ahash_req *sreq = ahash_request_ctx_dma(areq); 235 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash); 236 u64 cache_len; 237 238 *ret = 0; 239 240 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr); 241 if (IS_ERR(rdesc)) { 242 dev_err(priv->dev, 243 "hash: result: could not retrieve the result descriptor\n"); 244 *ret = PTR_ERR(rdesc); 245 } else { 246 *ret = safexcel_rdesc_check_errors(priv, rdesc); 247 } 248 249 safexcel_complete(priv, ring); 250 251 if (sreq->nents) { 252 dma_unmap_sg(priv->dev, areq->src, 253 sg_nents_for_len(areq->src, areq->nbytes), 254 DMA_TO_DEVICE); 255 sreq->nents = 0; 256 } 257 258 if (sreq->result_dma) { 259 dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz, 260 DMA_FROM_DEVICE); 261 sreq->result_dma = 0; 262 } 263 264 if (sreq->cache_dma) { 265 dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz, 266 DMA_TO_DEVICE); 267 sreq->cache_dma = 0; 268 sreq->cache_sz = 0; 269 } 270 271 if (sreq->finish) { 272 if (sreq->hmac && 273 (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) { 274 /* Faking HMAC using hash - need to do outer hash */ 275 memcpy(sreq->cache, sreq->state, 276 crypto_ahash_digestsize(ahash)); 277 278 memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz); 279 280 sreq->len = sreq->block_sz + 281 crypto_ahash_digestsize(ahash); 282 sreq->processed = sreq->block_sz; 283 sreq->hmac = 0; 284 285 if (priv->flags & EIP197_TRC_CACHE) 286 ctx->base.needs_inv = true; 287 areq->nbytes = 0; 288 safexcel_ahash_enqueue(areq); 289 290 *should_complete = false; /* Not done yet */ 291 return 1; 292 } 293 294 if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM && 295 ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) { 296 /* Undo final XOR with 0xffffffff ...*/ 297 *(__le32 *)areq->result = ~sreq->state[0]; 298 } else { 299 memcpy(areq->result, sreq->state, 300 crypto_ahash_digestsize(ahash)); 301 } 302 } 303 304 cache_len = safexcel_queued_len(sreq); 305 if (cache_len) 306 memcpy(sreq->cache, sreq->cache_next, cache_len); 307 308 *should_complete = true; 309 310 return 1; 311 } 312 313 static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring, 314 int *commands, int *results) 315 { 316 struct ahash_request *areq = ahash_request_cast(async); 317 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 318 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 319 struct safexcel_crypto_priv *priv = ctx->base.priv; 320 struct safexcel_command_desc *cdesc, *first_cdesc = NULL; 321 struct safexcel_result_desc *rdesc; 322 struct scatterlist *sg; 323 struct safexcel_token *dmmy; 324 int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0; 325 u64 queued, len; 326 327 queued = safexcel_queued_len(req); 328 if (queued <= HASH_CACHE_SIZE) 329 cache_len = queued; 330 else 331 cache_len = queued - areq->nbytes; 332 333 if (!req->finish && !req->last_req) { 334 /* If this is not the last request and the queued data does not 335 * fit into full cache blocks, cache it for the next send call. 336 */ 337 extra = queued & (HASH_CACHE_SIZE - 1); 338 339 /* If this is not the last request and the queued data 340 * is a multiple of a block, cache the last one for now. 341 */ 342 if (!extra) 343 extra = HASH_CACHE_SIZE; 344 345 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src), 346 req->cache_next, extra, 347 areq->nbytes - extra); 348 349 queued -= extra; 350 351 if (!queued) { 352 *commands = 0; 353 *results = 0; 354 return 0; 355 } 356 357 extra = 0; 358 } 359 360 if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) { 361 if (unlikely(cache_len < AES_BLOCK_SIZE)) { 362 /* 363 * Cache contains less than 1 full block, complete. 364 */ 365 extra = AES_BLOCK_SIZE - cache_len; 366 if (queued > cache_len) { 367 /* More data follows: borrow bytes */ 368 u64 tmp = queued - cache_len; 369 370 skip = min_t(u64, tmp, extra); 371 sg_pcopy_to_buffer(areq->src, 372 sg_nents(areq->src), 373 req->cache + cache_len, 374 skip, 0); 375 } 376 extra -= skip; 377 memset(req->cache + cache_len + skip, 0, extra); 378 if (!ctx->cbcmac && extra) { 379 // 10- padding for XCBCMAC & CMAC 380 req->cache[cache_len + skip] = 0x80; 381 // HW will use K2 iso K3 - compensate! 382 for (i = 0; i < AES_BLOCK_SIZE / 4; i++) { 383 u32 *cache = (void *)req->cache; 384 u32 *ipad = ctx->base.ipad.word; 385 u32 x; 386 387 x = ipad[i] ^ ipad[i + 4]; 388 cache[i] ^= swab32(x); 389 } 390 } 391 cache_len = AES_BLOCK_SIZE; 392 queued = queued + extra; 393 } 394 395 /* XCBC continue: XOR previous result into 1st word */ 396 crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE); 397 } 398 399 len = queued; 400 /* Add a command descriptor for the cached data, if any */ 401 if (cache_len) { 402 req->cache_dma = dma_map_single(priv->dev, req->cache, 403 cache_len, DMA_TO_DEVICE); 404 if (dma_mapping_error(priv->dev, req->cache_dma)) 405 return -EINVAL; 406 407 req->cache_sz = cache_len; 408 first_cdesc = safexcel_add_cdesc(priv, ring, 1, 409 (cache_len == len), 410 req->cache_dma, cache_len, 411 len, ctx->base.ctxr_dma, 412 &dmmy); 413 if (IS_ERR(first_cdesc)) { 414 ret = PTR_ERR(first_cdesc); 415 goto unmap_cache; 416 } 417 n_cdesc++; 418 419 queued -= cache_len; 420 if (!queued) 421 goto send_command; 422 } 423 424 /* Now handle the current ahash request buffer(s) */ 425 req->nents = dma_map_sg(priv->dev, areq->src, 426 sg_nents_for_len(areq->src, 427 areq->nbytes), 428 DMA_TO_DEVICE); 429 if (!req->nents) { 430 ret = -ENOMEM; 431 goto cdesc_rollback; 432 } 433 434 for_each_sg(areq->src, sg, req->nents, i) { 435 int sglen = sg_dma_len(sg); 436 437 if (unlikely(sglen <= skip)) { 438 skip -= sglen; 439 continue; 440 } 441 442 /* Do not overflow the request */ 443 if ((queued + skip) <= sglen) 444 sglen = queued; 445 else 446 sglen -= skip; 447 448 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc, 449 !(queued - sglen), 450 sg_dma_address(sg) + skip, sglen, 451 len, ctx->base.ctxr_dma, &dmmy); 452 if (IS_ERR(cdesc)) { 453 ret = PTR_ERR(cdesc); 454 goto unmap_sg; 455 } 456 457 if (!n_cdesc) 458 first_cdesc = cdesc; 459 n_cdesc++; 460 461 queued -= sglen; 462 if (!queued) 463 break; 464 skip = 0; 465 } 466 467 send_command: 468 /* Setup the context options */ 469 safexcel_context_control(ctx, req, first_cdesc); 470 471 /* Add the token */ 472 safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac); 473 474 req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz, 475 DMA_FROM_DEVICE); 476 if (dma_mapping_error(priv->dev, req->result_dma)) { 477 ret = -EINVAL; 478 goto unmap_sg; 479 } 480 481 /* Add a result descriptor */ 482 rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma, 483 req->digest_sz); 484 if (IS_ERR(rdesc)) { 485 ret = PTR_ERR(rdesc); 486 goto unmap_result; 487 } 488 489 safexcel_rdr_req_set(priv, ring, rdesc, &areq->base); 490 491 req->processed += len - extra; 492 493 *commands = n_cdesc; 494 *results = 1; 495 return 0; 496 497 unmap_result: 498 dma_unmap_single(priv->dev, req->result_dma, req->digest_sz, 499 DMA_FROM_DEVICE); 500 unmap_sg: 501 if (req->nents) { 502 dma_unmap_sg(priv->dev, areq->src, 503 sg_nents_for_len(areq->src, areq->nbytes), 504 DMA_TO_DEVICE); 505 req->nents = 0; 506 } 507 cdesc_rollback: 508 for (i = 0; i < n_cdesc; i++) 509 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr); 510 unmap_cache: 511 if (req->cache_dma) { 512 dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz, 513 DMA_TO_DEVICE); 514 req->cache_dma = 0; 515 req->cache_sz = 0; 516 } 517 518 return ret; 519 } 520 521 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv, 522 int ring, 523 struct crypto_async_request *async, 524 bool *should_complete, int *ret) 525 { 526 struct safexcel_result_desc *rdesc; 527 struct ahash_request *areq = ahash_request_cast(async); 528 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); 529 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash); 530 int enq_ret; 531 532 *ret = 0; 533 534 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr); 535 if (IS_ERR(rdesc)) { 536 dev_err(priv->dev, 537 "hash: invalidate: could not retrieve the result descriptor\n"); 538 *ret = PTR_ERR(rdesc); 539 } else { 540 *ret = safexcel_rdesc_check_errors(priv, rdesc); 541 } 542 543 safexcel_complete(priv, ring); 544 545 if (ctx->base.exit_inv) { 546 dma_pool_free(priv->context_pool, ctx->base.ctxr, 547 ctx->base.ctxr_dma); 548 549 *should_complete = true; 550 return 1; 551 } 552 553 ring = safexcel_select_ring(priv); 554 ctx->base.ring = ring; 555 556 spin_lock_bh(&priv->ring[ring].queue_lock); 557 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async); 558 spin_unlock_bh(&priv->ring[ring].queue_lock); 559 560 if (enq_ret != -EINPROGRESS) 561 *ret = enq_ret; 562 563 queue_work(priv->ring[ring].workqueue, 564 &priv->ring[ring].work_data.work); 565 566 *should_complete = false; 567 568 return 1; 569 } 570 571 static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring, 572 struct crypto_async_request *async, 573 bool *should_complete, int *ret) 574 { 575 struct ahash_request *areq = ahash_request_cast(async); 576 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 577 int err; 578 579 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv); 580 581 if (req->needs_inv) { 582 req->needs_inv = false; 583 err = safexcel_handle_inv_result(priv, ring, async, 584 should_complete, ret); 585 } else { 586 err = safexcel_handle_req_result(priv, ring, async, 587 should_complete, ret); 588 } 589 590 return err; 591 } 592 593 static int safexcel_ahash_send_inv(struct crypto_async_request *async, 594 int ring, int *commands, int *results) 595 { 596 struct ahash_request *areq = ahash_request_cast(async); 597 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 598 int ret; 599 600 ret = safexcel_invalidate_cache(async, ctx->base.priv, 601 ctx->base.ctxr_dma, ring); 602 if (unlikely(ret)) 603 return ret; 604 605 *commands = 1; 606 *results = 1; 607 608 return 0; 609 } 610 611 static int safexcel_ahash_send(struct crypto_async_request *async, 612 int ring, int *commands, int *results) 613 { 614 struct ahash_request *areq = ahash_request_cast(async); 615 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 616 int ret; 617 618 if (req->needs_inv) 619 ret = safexcel_ahash_send_inv(async, ring, commands, results); 620 else 621 ret = safexcel_ahash_send_req(async, ring, commands, results); 622 623 return ret; 624 } 625 626 static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm) 627 { 628 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); 629 struct safexcel_crypto_priv *priv = ctx->base.priv; 630 EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE); 631 struct safexcel_ahash_req *rctx = ahash_request_ctx_dma(req); 632 DECLARE_CRYPTO_WAIT(result); 633 int ring = ctx->base.ring; 634 int err; 635 636 memset(req, 0, EIP197_AHASH_REQ_SIZE); 637 638 /* create invalidation request */ 639 init_completion(&result.completion); 640 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 641 crypto_req_done, &result); 642 643 ahash_request_set_tfm(req, __crypto_ahash_cast(tfm)); 644 ctx = crypto_tfm_ctx(req->base.tfm); 645 ctx->base.exit_inv = true; 646 rctx->needs_inv = true; 647 648 spin_lock_bh(&priv->ring[ring].queue_lock); 649 crypto_enqueue_request(&priv->ring[ring].queue, &req->base); 650 spin_unlock_bh(&priv->ring[ring].queue_lock); 651 652 queue_work(priv->ring[ring].workqueue, 653 &priv->ring[ring].work_data.work); 654 655 err = crypto_wait_req(-EINPROGRESS, &result); 656 657 if (err) { 658 dev_warn(priv->dev, "hash: completion error (%d)\n", err); 659 return err; 660 } 661 662 return 0; 663 } 664 665 /* safexcel_ahash_cache: cache data until at least one request can be sent to 666 * the engine, aka. when there is at least 1 block size in the pipe. 667 */ 668 static int safexcel_ahash_cache(struct ahash_request *areq) 669 { 670 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 671 u64 cache_len; 672 673 /* cache_len: everything accepted by the driver but not sent yet, 674 * tot sz handled by update() - last req sz - tot sz handled by send() 675 */ 676 cache_len = safexcel_queued_len(req); 677 678 /* 679 * In case there isn't enough bytes to proceed (less than a 680 * block size), cache the data until we have enough. 681 */ 682 if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) { 683 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src), 684 req->cache + cache_len, 685 areq->nbytes, 0); 686 return 0; 687 } 688 689 /* We couldn't cache all the data */ 690 return -E2BIG; 691 } 692 693 static int safexcel_ahash_enqueue(struct ahash_request *areq) 694 { 695 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 696 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 697 struct safexcel_crypto_priv *priv = ctx->base.priv; 698 int ret, ring; 699 700 req->needs_inv = false; 701 702 if (ctx->base.ctxr) { 703 if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv && 704 /* invalidate for *any* non-XCBC continuation */ 705 ((req->not_first && !req->xcbcmac) || 706 /* invalidate if (i)digest changed */ 707 memcmp(ctx->base.ctxr->data, req->state, req->state_sz) || 708 /* invalidate for HMAC finish with odigest changed */ 709 (req->finish && req->hmac && 710 memcmp(ctx->base.ctxr->data + (req->state_sz>>2), 711 &ctx->base.opad, req->state_sz)))) 712 /* 713 * We're still setting needs_inv here, even though it is 714 * cleared right away, because the needs_inv flag can be 715 * set in other functions and we want to keep the same 716 * logic. 717 */ 718 ctx->base.needs_inv = true; 719 720 if (ctx->base.needs_inv) { 721 ctx->base.needs_inv = false; 722 req->needs_inv = true; 723 } 724 } else { 725 ctx->base.ring = safexcel_select_ring(priv); 726 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool, 727 EIP197_GFP_FLAGS(areq->base), 728 &ctx->base.ctxr_dma); 729 if (!ctx->base.ctxr) 730 return -ENOMEM; 731 } 732 req->not_first = true; 733 734 ring = ctx->base.ring; 735 736 spin_lock_bh(&priv->ring[ring].queue_lock); 737 ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base); 738 spin_unlock_bh(&priv->ring[ring].queue_lock); 739 740 queue_work(priv->ring[ring].workqueue, 741 &priv->ring[ring].work_data.work); 742 743 return ret; 744 } 745 746 static int safexcel_ahash_update(struct ahash_request *areq) 747 { 748 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 749 int ret; 750 751 /* If the request is 0 length, do nothing */ 752 if (!areq->nbytes) 753 return 0; 754 755 /* Add request to the cache if it fits */ 756 ret = safexcel_ahash_cache(areq); 757 758 /* Update total request length */ 759 req->len += areq->nbytes; 760 761 /* If not all data could fit into the cache, go process the excess. 762 * Also go process immediately for an HMAC IV precompute, which 763 * will never be finished at all, but needs to be processed anyway. 764 */ 765 if ((ret && !req->finish) || req->last_req) 766 return safexcel_ahash_enqueue(areq); 767 768 return 0; 769 } 770 771 static int safexcel_ahash_final(struct ahash_request *areq) 772 { 773 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 774 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 775 776 req->finish = true; 777 778 if (unlikely(!req->len && !areq->nbytes)) { 779 /* 780 * If we have an overall 0 length *hash* request: 781 * The HW cannot do 0 length hash, so we provide the correct 782 * result directly here. 783 */ 784 if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5) 785 memcpy(areq->result, md5_zero_message_hash, 786 MD5_DIGEST_SIZE); 787 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1) 788 memcpy(areq->result, sha1_zero_message_hash, 789 SHA1_DIGEST_SIZE); 790 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224) 791 memcpy(areq->result, sha224_zero_message_hash, 792 SHA224_DIGEST_SIZE); 793 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256) 794 memcpy(areq->result, sha256_zero_message_hash, 795 SHA256_DIGEST_SIZE); 796 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384) 797 memcpy(areq->result, sha384_zero_message_hash, 798 SHA384_DIGEST_SIZE); 799 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512) 800 memcpy(areq->result, sha512_zero_message_hash, 801 SHA512_DIGEST_SIZE); 802 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) { 803 memcpy(areq->result, 804 EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE); 805 } 806 807 return 0; 808 } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM && 809 ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 && 810 req->len == sizeof(u32) && !areq->nbytes)) { 811 /* Zero length CRC32 */ 812 memcpy(areq->result, &ctx->base.ipad, sizeof(u32)); 813 return 0; 814 } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE && 815 !areq->nbytes)) { 816 /* Zero length CBC MAC */ 817 memset(areq->result, 0, AES_BLOCK_SIZE); 818 return 0; 819 } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE && 820 !areq->nbytes)) { 821 /* Zero length (X)CBC/CMAC */ 822 int i; 823 824 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) { 825 u32 *result = (void *)areq->result; 826 827 /* K3 */ 828 result[i] = swab32(ctx->base.ipad.word[i + 4]); 829 } 830 areq->result[0] ^= 0x80; // 10- padding 831 aes_encrypt(ctx->aes, areq->result, areq->result); 832 return 0; 833 } else if (unlikely(req->hmac && 834 (req->len == req->block_sz) && 835 !areq->nbytes)) { 836 /* 837 * If we have an overall 0 length *HMAC* request: 838 * For HMAC, we need to finalize the inner digest 839 * and then perform the outer hash. 840 */ 841 842 /* generate pad block in the cache */ 843 /* start with a hash block of all zeroes */ 844 memset(req->cache, 0, req->block_sz); 845 /* set the first byte to 0x80 to 'append a 1 bit' */ 846 req->cache[0] = 0x80; 847 /* add the length in bits in the last 2 bytes */ 848 if (req->len_is_le) { 849 /* Little endian length word (e.g. MD5) */ 850 req->cache[req->block_sz-8] = (req->block_sz << 3) & 851 255; 852 req->cache[req->block_sz-7] = (req->block_sz >> 5); 853 } else { 854 /* Big endian length word (e.g. any SHA) */ 855 req->cache[req->block_sz-2] = (req->block_sz >> 5); 856 req->cache[req->block_sz-1] = (req->block_sz << 3) & 857 255; 858 } 859 860 req->len += req->block_sz; /* plus 1 hash block */ 861 862 /* Set special zero-length HMAC flag */ 863 req->hmac_zlen = true; 864 865 /* Finalize HMAC */ 866 req->digest = CONTEXT_CONTROL_DIGEST_HMAC; 867 } else if (req->hmac) { 868 /* Finalize HMAC */ 869 req->digest = CONTEXT_CONTROL_DIGEST_HMAC; 870 } 871 872 return safexcel_ahash_enqueue(areq); 873 } 874 875 static int safexcel_ahash_finup(struct ahash_request *areq) 876 { 877 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 878 879 req->finish = true; 880 881 safexcel_ahash_update(areq); 882 return safexcel_ahash_final(areq); 883 } 884 885 static int safexcel_ahash_export(struct ahash_request *areq, void *out) 886 { 887 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 888 struct safexcel_ahash_export_state *export = out; 889 890 export->len = req->len; 891 export->processed = req->processed; 892 893 export->digest = req->digest; 894 895 memcpy(export->state, req->state, req->state_sz); 896 memcpy(export->cache, req->cache, HASH_CACHE_SIZE); 897 898 return 0; 899 } 900 901 static int safexcel_ahash_import(struct ahash_request *areq, const void *in) 902 { 903 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 904 const struct safexcel_ahash_export_state *export = in; 905 int ret; 906 907 ret = crypto_ahash_init(areq); 908 if (ret) 909 return ret; 910 911 req->len = export->len; 912 req->processed = export->processed; 913 914 req->digest = export->digest; 915 916 memcpy(req->cache, export->cache, HASH_CACHE_SIZE); 917 memcpy(req->state, export->state, req->state_sz); 918 919 return 0; 920 } 921 922 static int safexcel_ahash_cra_init(struct crypto_tfm *tfm) 923 { 924 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); 925 struct safexcel_alg_template *tmpl = 926 container_of(__crypto_ahash_alg(tfm->__crt_alg), 927 struct safexcel_alg_template, alg.ahash); 928 929 ctx->base.priv = tmpl->priv; 930 ctx->base.send = safexcel_ahash_send; 931 ctx->base.handle_result = safexcel_handle_result; 932 ctx->fb_do_setkey = false; 933 934 crypto_ahash_set_reqsize_dma(__crypto_ahash_cast(tfm), 935 sizeof(struct safexcel_ahash_req)); 936 return 0; 937 } 938 939 static int safexcel_sha1_init(struct ahash_request *areq) 940 { 941 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 942 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 943 944 memset(req, 0, sizeof(*req)); 945 946 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1; 947 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 948 req->state_sz = SHA1_DIGEST_SIZE; 949 req->digest_sz = SHA1_DIGEST_SIZE; 950 req->block_sz = SHA1_BLOCK_SIZE; 951 952 return 0; 953 } 954 955 static int safexcel_sha1_digest(struct ahash_request *areq) 956 { 957 int ret = safexcel_sha1_init(areq); 958 959 if (ret) 960 return ret; 961 962 return safexcel_ahash_finup(areq); 963 } 964 965 static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm) 966 { 967 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); 968 struct safexcel_crypto_priv *priv = ctx->base.priv; 969 int ret; 970 971 /* context not allocated, skip invalidation */ 972 if (!ctx->base.ctxr) 973 return; 974 975 if (priv->flags & EIP197_TRC_CACHE) { 976 ret = safexcel_ahash_exit_inv(tfm); 977 if (ret) 978 dev_warn(priv->dev, "hash: invalidation error %d\n", ret); 979 } else { 980 dma_pool_free(priv->context_pool, ctx->base.ctxr, 981 ctx->base.ctxr_dma); 982 } 983 } 984 985 struct safexcel_alg_template safexcel_alg_sha1 = { 986 .type = SAFEXCEL_ALG_TYPE_AHASH, 987 .algo_mask = SAFEXCEL_ALG_SHA1, 988 .alg.ahash = { 989 .init = safexcel_sha1_init, 990 .update = safexcel_ahash_update, 991 .final = safexcel_ahash_final, 992 .finup = safexcel_ahash_finup, 993 .digest = safexcel_sha1_digest, 994 .export = safexcel_ahash_export, 995 .import = safexcel_ahash_import, 996 .halg = { 997 .digestsize = SHA1_DIGEST_SIZE, 998 .statesize = sizeof(struct safexcel_ahash_export_state), 999 .base = { 1000 .cra_name = "sha1", 1001 .cra_driver_name = "safexcel-sha1", 1002 .cra_priority = SAFEXCEL_CRA_PRIORITY, 1003 .cra_flags = CRYPTO_ALG_ASYNC | 1004 CRYPTO_ALG_ALLOCATES_MEMORY | 1005 CRYPTO_ALG_KERN_DRIVER_ONLY, 1006 .cra_blocksize = SHA1_BLOCK_SIZE, 1007 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 1008 .cra_init = safexcel_ahash_cra_init, 1009 .cra_exit = safexcel_ahash_cra_exit, 1010 .cra_module = THIS_MODULE, 1011 }, 1012 }, 1013 }, 1014 }; 1015 1016 static int safexcel_hmac_sha1_init(struct ahash_request *areq) 1017 { 1018 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 1019 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 1020 1021 memset(req, 0, sizeof(*req)); 1022 1023 /* Start from ipad precompute */ 1024 memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE); 1025 /* Already processed the key^ipad part now! */ 1026 req->len = SHA1_BLOCK_SIZE; 1027 req->processed = SHA1_BLOCK_SIZE; 1028 1029 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1; 1030 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 1031 req->state_sz = SHA1_DIGEST_SIZE; 1032 req->digest_sz = SHA1_DIGEST_SIZE; 1033 req->block_sz = SHA1_BLOCK_SIZE; 1034 req->hmac = true; 1035 1036 return 0; 1037 } 1038 1039 static int safexcel_hmac_sha1_digest(struct ahash_request *areq) 1040 { 1041 int ret = safexcel_hmac_sha1_init(areq); 1042 1043 if (ret) 1044 return ret; 1045 1046 return safexcel_ahash_finup(areq); 1047 } 1048 1049 static int safexcel_hmac_init_pad(struct ahash_request *areq, 1050 unsigned int blocksize, const u8 *key, 1051 unsigned int keylen, u8 *ipad, u8 *opad) 1052 { 1053 DECLARE_CRYPTO_WAIT(result); 1054 struct scatterlist sg; 1055 int ret, i; 1056 u8 *keydup; 1057 1058 if (keylen <= blocksize) { 1059 memcpy(ipad, key, keylen); 1060 } else { 1061 keydup = kmemdup(key, keylen, GFP_KERNEL); 1062 if (!keydup) 1063 return -ENOMEM; 1064 1065 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG, 1066 crypto_req_done, &result); 1067 sg_init_one(&sg, keydup, keylen); 1068 ahash_request_set_crypt(areq, &sg, ipad, keylen); 1069 1070 ret = crypto_ahash_digest(areq); 1071 ret = crypto_wait_req(ret, &result); 1072 1073 /* Avoid leaking */ 1074 kfree_sensitive(keydup); 1075 1076 if (ret) 1077 return ret; 1078 1079 keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq)); 1080 } 1081 1082 memset(ipad + keylen, 0, blocksize - keylen); 1083 memcpy(opad, ipad, blocksize); 1084 1085 for (i = 0; i < blocksize; i++) { 1086 ipad[i] ^= HMAC_IPAD_VALUE; 1087 opad[i] ^= HMAC_OPAD_VALUE; 1088 } 1089 1090 return 0; 1091 } 1092 1093 static int safexcel_hmac_init_iv(struct ahash_request *areq, 1094 unsigned int blocksize, u8 *pad, void *state) 1095 { 1096 struct safexcel_ahash_req *req; 1097 DECLARE_CRYPTO_WAIT(result); 1098 struct scatterlist sg; 1099 int ret; 1100 1101 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG, 1102 crypto_req_done, &result); 1103 sg_init_one(&sg, pad, blocksize); 1104 ahash_request_set_crypt(areq, &sg, pad, blocksize); 1105 1106 ret = crypto_ahash_init(areq); 1107 if (ret) 1108 return ret; 1109 1110 req = ahash_request_ctx_dma(areq); 1111 req->hmac = true; 1112 req->last_req = true; 1113 1114 ret = crypto_ahash_update(areq); 1115 ret = crypto_wait_req(ret, &result); 1116 1117 return ret ?: crypto_ahash_export(areq, state); 1118 } 1119 1120 static int __safexcel_hmac_setkey(const char *alg, const u8 *key, 1121 unsigned int keylen, 1122 void *istate, void *ostate) 1123 { 1124 struct ahash_request *areq; 1125 struct crypto_ahash *tfm; 1126 unsigned int blocksize; 1127 u8 *ipad, *opad; 1128 int ret; 1129 1130 tfm = crypto_alloc_ahash(alg, 0, 0); 1131 if (IS_ERR(tfm)) 1132 return PTR_ERR(tfm); 1133 1134 areq = ahash_request_alloc(tfm, GFP_KERNEL); 1135 if (!areq) { 1136 ret = -ENOMEM; 1137 goto free_ahash; 1138 } 1139 1140 crypto_ahash_clear_flags(tfm, ~0); 1141 blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm)); 1142 1143 ipad = kcalloc(2, blocksize, GFP_KERNEL); 1144 if (!ipad) { 1145 ret = -ENOMEM; 1146 goto free_request; 1147 } 1148 1149 opad = ipad + blocksize; 1150 1151 ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad); 1152 if (ret) 1153 goto free_ipad; 1154 1155 ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate); 1156 if (ret) 1157 goto free_ipad; 1158 1159 ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate); 1160 1161 free_ipad: 1162 kfree(ipad); 1163 free_request: 1164 ahash_request_free(areq); 1165 free_ahash: 1166 crypto_free_ahash(tfm); 1167 1168 return ret; 1169 } 1170 1171 int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key, 1172 unsigned int keylen, const char *alg, 1173 unsigned int state_sz) 1174 { 1175 struct safexcel_crypto_priv *priv = base->priv; 1176 struct safexcel_ahash_export_state istate, ostate; 1177 int ret; 1178 1179 ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate); 1180 if (ret) 1181 return ret; 1182 1183 if (priv->flags & EIP197_TRC_CACHE && base->ctxr && 1184 (memcmp(&base->ipad, istate.state, state_sz) || 1185 memcmp(&base->opad, ostate.state, state_sz))) 1186 base->needs_inv = true; 1187 1188 memcpy(&base->ipad, &istate.state, state_sz); 1189 memcpy(&base->opad, &ostate.state, state_sz); 1190 1191 return 0; 1192 } 1193 1194 static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key, 1195 unsigned int keylen, const char *alg, 1196 unsigned int state_sz) 1197 { 1198 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 1199 1200 return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz); 1201 } 1202 1203 static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key, 1204 unsigned int keylen) 1205 { 1206 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1", 1207 SHA1_DIGEST_SIZE); 1208 } 1209 1210 struct safexcel_alg_template safexcel_alg_hmac_sha1 = { 1211 .type = SAFEXCEL_ALG_TYPE_AHASH, 1212 .algo_mask = SAFEXCEL_ALG_SHA1, 1213 .alg.ahash = { 1214 .init = safexcel_hmac_sha1_init, 1215 .update = safexcel_ahash_update, 1216 .final = safexcel_ahash_final, 1217 .finup = safexcel_ahash_finup, 1218 .digest = safexcel_hmac_sha1_digest, 1219 .setkey = safexcel_hmac_sha1_setkey, 1220 .export = safexcel_ahash_export, 1221 .import = safexcel_ahash_import, 1222 .halg = { 1223 .digestsize = SHA1_DIGEST_SIZE, 1224 .statesize = sizeof(struct safexcel_ahash_export_state), 1225 .base = { 1226 .cra_name = "hmac(sha1)", 1227 .cra_driver_name = "safexcel-hmac-sha1", 1228 .cra_priority = SAFEXCEL_CRA_PRIORITY, 1229 .cra_flags = CRYPTO_ALG_ASYNC | 1230 CRYPTO_ALG_ALLOCATES_MEMORY | 1231 CRYPTO_ALG_KERN_DRIVER_ONLY, 1232 .cra_blocksize = SHA1_BLOCK_SIZE, 1233 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 1234 .cra_init = safexcel_ahash_cra_init, 1235 .cra_exit = safexcel_ahash_cra_exit, 1236 .cra_module = THIS_MODULE, 1237 }, 1238 }, 1239 }, 1240 }; 1241 1242 static int safexcel_sha256_init(struct ahash_request *areq) 1243 { 1244 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 1245 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 1246 1247 memset(req, 0, sizeof(*req)); 1248 1249 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256; 1250 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 1251 req->state_sz = SHA256_DIGEST_SIZE; 1252 req->digest_sz = SHA256_DIGEST_SIZE; 1253 req->block_sz = SHA256_BLOCK_SIZE; 1254 1255 return 0; 1256 } 1257 1258 static int safexcel_sha256_digest(struct ahash_request *areq) 1259 { 1260 int ret = safexcel_sha256_init(areq); 1261 1262 if (ret) 1263 return ret; 1264 1265 return safexcel_ahash_finup(areq); 1266 } 1267 1268 struct safexcel_alg_template safexcel_alg_sha256 = { 1269 .type = SAFEXCEL_ALG_TYPE_AHASH, 1270 .algo_mask = SAFEXCEL_ALG_SHA2_256, 1271 .alg.ahash = { 1272 .init = safexcel_sha256_init, 1273 .update = safexcel_ahash_update, 1274 .final = safexcel_ahash_final, 1275 .finup = safexcel_ahash_finup, 1276 .digest = safexcel_sha256_digest, 1277 .export = safexcel_ahash_export, 1278 .import = safexcel_ahash_import, 1279 .halg = { 1280 .digestsize = SHA256_DIGEST_SIZE, 1281 .statesize = sizeof(struct safexcel_ahash_export_state), 1282 .base = { 1283 .cra_name = "sha256", 1284 .cra_driver_name = "safexcel-sha256", 1285 .cra_priority = SAFEXCEL_CRA_PRIORITY, 1286 .cra_flags = CRYPTO_ALG_ASYNC | 1287 CRYPTO_ALG_ALLOCATES_MEMORY | 1288 CRYPTO_ALG_KERN_DRIVER_ONLY, 1289 .cra_blocksize = SHA256_BLOCK_SIZE, 1290 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 1291 .cra_init = safexcel_ahash_cra_init, 1292 .cra_exit = safexcel_ahash_cra_exit, 1293 .cra_module = THIS_MODULE, 1294 }, 1295 }, 1296 }, 1297 }; 1298 1299 static int safexcel_sha224_init(struct ahash_request *areq) 1300 { 1301 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 1302 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 1303 1304 memset(req, 0, sizeof(*req)); 1305 1306 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224; 1307 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 1308 req->state_sz = SHA256_DIGEST_SIZE; 1309 req->digest_sz = SHA256_DIGEST_SIZE; 1310 req->block_sz = SHA256_BLOCK_SIZE; 1311 1312 return 0; 1313 } 1314 1315 static int safexcel_sha224_digest(struct ahash_request *areq) 1316 { 1317 int ret = safexcel_sha224_init(areq); 1318 1319 if (ret) 1320 return ret; 1321 1322 return safexcel_ahash_finup(areq); 1323 } 1324 1325 struct safexcel_alg_template safexcel_alg_sha224 = { 1326 .type = SAFEXCEL_ALG_TYPE_AHASH, 1327 .algo_mask = SAFEXCEL_ALG_SHA2_256, 1328 .alg.ahash = { 1329 .init = safexcel_sha224_init, 1330 .update = safexcel_ahash_update, 1331 .final = safexcel_ahash_final, 1332 .finup = safexcel_ahash_finup, 1333 .digest = safexcel_sha224_digest, 1334 .export = safexcel_ahash_export, 1335 .import = safexcel_ahash_import, 1336 .halg = { 1337 .digestsize = SHA224_DIGEST_SIZE, 1338 .statesize = sizeof(struct safexcel_ahash_export_state), 1339 .base = { 1340 .cra_name = "sha224", 1341 .cra_driver_name = "safexcel-sha224", 1342 .cra_priority = SAFEXCEL_CRA_PRIORITY, 1343 .cra_flags = CRYPTO_ALG_ASYNC | 1344 CRYPTO_ALG_ALLOCATES_MEMORY | 1345 CRYPTO_ALG_KERN_DRIVER_ONLY, 1346 .cra_blocksize = SHA224_BLOCK_SIZE, 1347 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 1348 .cra_init = safexcel_ahash_cra_init, 1349 .cra_exit = safexcel_ahash_cra_exit, 1350 .cra_module = THIS_MODULE, 1351 }, 1352 }, 1353 }, 1354 }; 1355 1356 static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key, 1357 unsigned int keylen) 1358 { 1359 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224", 1360 SHA256_DIGEST_SIZE); 1361 } 1362 1363 static int safexcel_hmac_sha224_init(struct ahash_request *areq) 1364 { 1365 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 1366 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 1367 1368 memset(req, 0, sizeof(*req)); 1369 1370 /* Start from ipad precompute */ 1371 memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE); 1372 /* Already processed the key^ipad part now! */ 1373 req->len = SHA256_BLOCK_SIZE; 1374 req->processed = SHA256_BLOCK_SIZE; 1375 1376 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224; 1377 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 1378 req->state_sz = SHA256_DIGEST_SIZE; 1379 req->digest_sz = SHA256_DIGEST_SIZE; 1380 req->block_sz = SHA256_BLOCK_SIZE; 1381 req->hmac = true; 1382 1383 return 0; 1384 } 1385 1386 static int safexcel_hmac_sha224_digest(struct ahash_request *areq) 1387 { 1388 int ret = safexcel_hmac_sha224_init(areq); 1389 1390 if (ret) 1391 return ret; 1392 1393 return safexcel_ahash_finup(areq); 1394 } 1395 1396 struct safexcel_alg_template safexcel_alg_hmac_sha224 = { 1397 .type = SAFEXCEL_ALG_TYPE_AHASH, 1398 .algo_mask = SAFEXCEL_ALG_SHA2_256, 1399 .alg.ahash = { 1400 .init = safexcel_hmac_sha224_init, 1401 .update = safexcel_ahash_update, 1402 .final = safexcel_ahash_final, 1403 .finup = safexcel_ahash_finup, 1404 .digest = safexcel_hmac_sha224_digest, 1405 .setkey = safexcel_hmac_sha224_setkey, 1406 .export = safexcel_ahash_export, 1407 .import = safexcel_ahash_import, 1408 .halg = { 1409 .digestsize = SHA224_DIGEST_SIZE, 1410 .statesize = sizeof(struct safexcel_ahash_export_state), 1411 .base = { 1412 .cra_name = "hmac(sha224)", 1413 .cra_driver_name = "safexcel-hmac-sha224", 1414 .cra_priority = SAFEXCEL_CRA_PRIORITY, 1415 .cra_flags = CRYPTO_ALG_ASYNC | 1416 CRYPTO_ALG_ALLOCATES_MEMORY | 1417 CRYPTO_ALG_KERN_DRIVER_ONLY, 1418 .cra_blocksize = SHA224_BLOCK_SIZE, 1419 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 1420 .cra_init = safexcel_ahash_cra_init, 1421 .cra_exit = safexcel_ahash_cra_exit, 1422 .cra_module = THIS_MODULE, 1423 }, 1424 }, 1425 }, 1426 }; 1427 1428 static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key, 1429 unsigned int keylen) 1430 { 1431 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256", 1432 SHA256_DIGEST_SIZE); 1433 } 1434 1435 static int safexcel_hmac_sha256_init(struct ahash_request *areq) 1436 { 1437 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 1438 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 1439 1440 memset(req, 0, sizeof(*req)); 1441 1442 /* Start from ipad precompute */ 1443 memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE); 1444 /* Already processed the key^ipad part now! */ 1445 req->len = SHA256_BLOCK_SIZE; 1446 req->processed = SHA256_BLOCK_SIZE; 1447 1448 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256; 1449 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 1450 req->state_sz = SHA256_DIGEST_SIZE; 1451 req->digest_sz = SHA256_DIGEST_SIZE; 1452 req->block_sz = SHA256_BLOCK_SIZE; 1453 req->hmac = true; 1454 1455 return 0; 1456 } 1457 1458 static int safexcel_hmac_sha256_digest(struct ahash_request *areq) 1459 { 1460 int ret = safexcel_hmac_sha256_init(areq); 1461 1462 if (ret) 1463 return ret; 1464 1465 return safexcel_ahash_finup(areq); 1466 } 1467 1468 struct safexcel_alg_template safexcel_alg_hmac_sha256 = { 1469 .type = SAFEXCEL_ALG_TYPE_AHASH, 1470 .algo_mask = SAFEXCEL_ALG_SHA2_256, 1471 .alg.ahash = { 1472 .init = safexcel_hmac_sha256_init, 1473 .update = safexcel_ahash_update, 1474 .final = safexcel_ahash_final, 1475 .finup = safexcel_ahash_finup, 1476 .digest = safexcel_hmac_sha256_digest, 1477 .setkey = safexcel_hmac_sha256_setkey, 1478 .export = safexcel_ahash_export, 1479 .import = safexcel_ahash_import, 1480 .halg = { 1481 .digestsize = SHA256_DIGEST_SIZE, 1482 .statesize = sizeof(struct safexcel_ahash_export_state), 1483 .base = { 1484 .cra_name = "hmac(sha256)", 1485 .cra_driver_name = "safexcel-hmac-sha256", 1486 .cra_priority = SAFEXCEL_CRA_PRIORITY, 1487 .cra_flags = CRYPTO_ALG_ASYNC | 1488 CRYPTO_ALG_ALLOCATES_MEMORY | 1489 CRYPTO_ALG_KERN_DRIVER_ONLY, 1490 .cra_blocksize = SHA256_BLOCK_SIZE, 1491 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 1492 .cra_init = safexcel_ahash_cra_init, 1493 .cra_exit = safexcel_ahash_cra_exit, 1494 .cra_module = THIS_MODULE, 1495 }, 1496 }, 1497 }, 1498 }; 1499 1500 static int safexcel_sha512_init(struct ahash_request *areq) 1501 { 1502 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 1503 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 1504 1505 memset(req, 0, sizeof(*req)); 1506 1507 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512; 1508 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 1509 req->state_sz = SHA512_DIGEST_SIZE; 1510 req->digest_sz = SHA512_DIGEST_SIZE; 1511 req->block_sz = SHA512_BLOCK_SIZE; 1512 1513 return 0; 1514 } 1515 1516 static int safexcel_sha512_digest(struct ahash_request *areq) 1517 { 1518 int ret = safexcel_sha512_init(areq); 1519 1520 if (ret) 1521 return ret; 1522 1523 return safexcel_ahash_finup(areq); 1524 } 1525 1526 struct safexcel_alg_template safexcel_alg_sha512 = { 1527 .type = SAFEXCEL_ALG_TYPE_AHASH, 1528 .algo_mask = SAFEXCEL_ALG_SHA2_512, 1529 .alg.ahash = { 1530 .init = safexcel_sha512_init, 1531 .update = safexcel_ahash_update, 1532 .final = safexcel_ahash_final, 1533 .finup = safexcel_ahash_finup, 1534 .digest = safexcel_sha512_digest, 1535 .export = safexcel_ahash_export, 1536 .import = safexcel_ahash_import, 1537 .halg = { 1538 .digestsize = SHA512_DIGEST_SIZE, 1539 .statesize = sizeof(struct safexcel_ahash_export_state), 1540 .base = { 1541 .cra_name = "sha512", 1542 .cra_driver_name = "safexcel-sha512", 1543 .cra_priority = SAFEXCEL_CRA_PRIORITY, 1544 .cra_flags = CRYPTO_ALG_ASYNC | 1545 CRYPTO_ALG_ALLOCATES_MEMORY | 1546 CRYPTO_ALG_KERN_DRIVER_ONLY, 1547 .cra_blocksize = SHA512_BLOCK_SIZE, 1548 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 1549 .cra_init = safexcel_ahash_cra_init, 1550 .cra_exit = safexcel_ahash_cra_exit, 1551 .cra_module = THIS_MODULE, 1552 }, 1553 }, 1554 }, 1555 }; 1556 1557 static int safexcel_sha384_init(struct ahash_request *areq) 1558 { 1559 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 1560 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 1561 1562 memset(req, 0, sizeof(*req)); 1563 1564 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384; 1565 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 1566 req->state_sz = SHA512_DIGEST_SIZE; 1567 req->digest_sz = SHA512_DIGEST_SIZE; 1568 req->block_sz = SHA512_BLOCK_SIZE; 1569 1570 return 0; 1571 } 1572 1573 static int safexcel_sha384_digest(struct ahash_request *areq) 1574 { 1575 int ret = safexcel_sha384_init(areq); 1576 1577 if (ret) 1578 return ret; 1579 1580 return safexcel_ahash_finup(areq); 1581 } 1582 1583 struct safexcel_alg_template safexcel_alg_sha384 = { 1584 .type = SAFEXCEL_ALG_TYPE_AHASH, 1585 .algo_mask = SAFEXCEL_ALG_SHA2_512, 1586 .alg.ahash = { 1587 .init = safexcel_sha384_init, 1588 .update = safexcel_ahash_update, 1589 .final = safexcel_ahash_final, 1590 .finup = safexcel_ahash_finup, 1591 .digest = safexcel_sha384_digest, 1592 .export = safexcel_ahash_export, 1593 .import = safexcel_ahash_import, 1594 .halg = { 1595 .digestsize = SHA384_DIGEST_SIZE, 1596 .statesize = sizeof(struct safexcel_ahash_export_state), 1597 .base = { 1598 .cra_name = "sha384", 1599 .cra_driver_name = "safexcel-sha384", 1600 .cra_priority = SAFEXCEL_CRA_PRIORITY, 1601 .cra_flags = CRYPTO_ALG_ASYNC | 1602 CRYPTO_ALG_ALLOCATES_MEMORY | 1603 CRYPTO_ALG_KERN_DRIVER_ONLY, 1604 .cra_blocksize = SHA384_BLOCK_SIZE, 1605 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 1606 .cra_init = safexcel_ahash_cra_init, 1607 .cra_exit = safexcel_ahash_cra_exit, 1608 .cra_module = THIS_MODULE, 1609 }, 1610 }, 1611 }, 1612 }; 1613 1614 static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key, 1615 unsigned int keylen) 1616 { 1617 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512", 1618 SHA512_DIGEST_SIZE); 1619 } 1620 1621 static int safexcel_hmac_sha512_init(struct ahash_request *areq) 1622 { 1623 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 1624 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 1625 1626 memset(req, 0, sizeof(*req)); 1627 1628 /* Start from ipad precompute */ 1629 memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE); 1630 /* Already processed the key^ipad part now! */ 1631 req->len = SHA512_BLOCK_SIZE; 1632 req->processed = SHA512_BLOCK_SIZE; 1633 1634 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512; 1635 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 1636 req->state_sz = SHA512_DIGEST_SIZE; 1637 req->digest_sz = SHA512_DIGEST_SIZE; 1638 req->block_sz = SHA512_BLOCK_SIZE; 1639 req->hmac = true; 1640 1641 return 0; 1642 } 1643 1644 static int safexcel_hmac_sha512_digest(struct ahash_request *areq) 1645 { 1646 int ret = safexcel_hmac_sha512_init(areq); 1647 1648 if (ret) 1649 return ret; 1650 1651 return safexcel_ahash_finup(areq); 1652 } 1653 1654 struct safexcel_alg_template safexcel_alg_hmac_sha512 = { 1655 .type = SAFEXCEL_ALG_TYPE_AHASH, 1656 .algo_mask = SAFEXCEL_ALG_SHA2_512, 1657 .alg.ahash = { 1658 .init = safexcel_hmac_sha512_init, 1659 .update = safexcel_ahash_update, 1660 .final = safexcel_ahash_final, 1661 .finup = safexcel_ahash_finup, 1662 .digest = safexcel_hmac_sha512_digest, 1663 .setkey = safexcel_hmac_sha512_setkey, 1664 .export = safexcel_ahash_export, 1665 .import = safexcel_ahash_import, 1666 .halg = { 1667 .digestsize = SHA512_DIGEST_SIZE, 1668 .statesize = sizeof(struct safexcel_ahash_export_state), 1669 .base = { 1670 .cra_name = "hmac(sha512)", 1671 .cra_driver_name = "safexcel-hmac-sha512", 1672 .cra_priority = SAFEXCEL_CRA_PRIORITY, 1673 .cra_flags = CRYPTO_ALG_ASYNC | 1674 CRYPTO_ALG_ALLOCATES_MEMORY | 1675 CRYPTO_ALG_KERN_DRIVER_ONLY, 1676 .cra_blocksize = SHA512_BLOCK_SIZE, 1677 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 1678 .cra_init = safexcel_ahash_cra_init, 1679 .cra_exit = safexcel_ahash_cra_exit, 1680 .cra_module = THIS_MODULE, 1681 }, 1682 }, 1683 }, 1684 }; 1685 1686 static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key, 1687 unsigned int keylen) 1688 { 1689 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384", 1690 SHA512_DIGEST_SIZE); 1691 } 1692 1693 static int safexcel_hmac_sha384_init(struct ahash_request *areq) 1694 { 1695 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 1696 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 1697 1698 memset(req, 0, sizeof(*req)); 1699 1700 /* Start from ipad precompute */ 1701 memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE); 1702 /* Already processed the key^ipad part now! */ 1703 req->len = SHA512_BLOCK_SIZE; 1704 req->processed = SHA512_BLOCK_SIZE; 1705 1706 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384; 1707 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 1708 req->state_sz = SHA512_DIGEST_SIZE; 1709 req->digest_sz = SHA512_DIGEST_SIZE; 1710 req->block_sz = SHA512_BLOCK_SIZE; 1711 req->hmac = true; 1712 1713 return 0; 1714 } 1715 1716 static int safexcel_hmac_sha384_digest(struct ahash_request *areq) 1717 { 1718 int ret = safexcel_hmac_sha384_init(areq); 1719 1720 if (ret) 1721 return ret; 1722 1723 return safexcel_ahash_finup(areq); 1724 } 1725 1726 struct safexcel_alg_template safexcel_alg_hmac_sha384 = { 1727 .type = SAFEXCEL_ALG_TYPE_AHASH, 1728 .algo_mask = SAFEXCEL_ALG_SHA2_512, 1729 .alg.ahash = { 1730 .init = safexcel_hmac_sha384_init, 1731 .update = safexcel_ahash_update, 1732 .final = safexcel_ahash_final, 1733 .finup = safexcel_ahash_finup, 1734 .digest = safexcel_hmac_sha384_digest, 1735 .setkey = safexcel_hmac_sha384_setkey, 1736 .export = safexcel_ahash_export, 1737 .import = safexcel_ahash_import, 1738 .halg = { 1739 .digestsize = SHA384_DIGEST_SIZE, 1740 .statesize = sizeof(struct safexcel_ahash_export_state), 1741 .base = { 1742 .cra_name = "hmac(sha384)", 1743 .cra_driver_name = "safexcel-hmac-sha384", 1744 .cra_priority = SAFEXCEL_CRA_PRIORITY, 1745 .cra_flags = CRYPTO_ALG_ASYNC | 1746 CRYPTO_ALG_ALLOCATES_MEMORY | 1747 CRYPTO_ALG_KERN_DRIVER_ONLY, 1748 .cra_blocksize = SHA384_BLOCK_SIZE, 1749 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 1750 .cra_init = safexcel_ahash_cra_init, 1751 .cra_exit = safexcel_ahash_cra_exit, 1752 .cra_module = THIS_MODULE, 1753 }, 1754 }, 1755 }, 1756 }; 1757 1758 static int safexcel_md5_init(struct ahash_request *areq) 1759 { 1760 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 1761 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 1762 1763 memset(req, 0, sizeof(*req)); 1764 1765 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5; 1766 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 1767 req->state_sz = MD5_DIGEST_SIZE; 1768 req->digest_sz = MD5_DIGEST_SIZE; 1769 req->block_sz = MD5_HMAC_BLOCK_SIZE; 1770 1771 return 0; 1772 } 1773 1774 static int safexcel_md5_digest(struct ahash_request *areq) 1775 { 1776 int ret = safexcel_md5_init(areq); 1777 1778 if (ret) 1779 return ret; 1780 1781 return safexcel_ahash_finup(areq); 1782 } 1783 1784 struct safexcel_alg_template safexcel_alg_md5 = { 1785 .type = SAFEXCEL_ALG_TYPE_AHASH, 1786 .algo_mask = SAFEXCEL_ALG_MD5, 1787 .alg.ahash = { 1788 .init = safexcel_md5_init, 1789 .update = safexcel_ahash_update, 1790 .final = safexcel_ahash_final, 1791 .finup = safexcel_ahash_finup, 1792 .digest = safexcel_md5_digest, 1793 .export = safexcel_ahash_export, 1794 .import = safexcel_ahash_import, 1795 .halg = { 1796 .digestsize = MD5_DIGEST_SIZE, 1797 .statesize = sizeof(struct safexcel_ahash_export_state), 1798 .base = { 1799 .cra_name = "md5", 1800 .cra_driver_name = "safexcel-md5", 1801 .cra_priority = SAFEXCEL_CRA_PRIORITY, 1802 .cra_flags = CRYPTO_ALG_ASYNC | 1803 CRYPTO_ALG_ALLOCATES_MEMORY | 1804 CRYPTO_ALG_KERN_DRIVER_ONLY, 1805 .cra_blocksize = MD5_HMAC_BLOCK_SIZE, 1806 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 1807 .cra_init = safexcel_ahash_cra_init, 1808 .cra_exit = safexcel_ahash_cra_exit, 1809 .cra_module = THIS_MODULE, 1810 }, 1811 }, 1812 }, 1813 }; 1814 1815 static int safexcel_hmac_md5_init(struct ahash_request *areq) 1816 { 1817 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 1818 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 1819 1820 memset(req, 0, sizeof(*req)); 1821 1822 /* Start from ipad precompute */ 1823 memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE); 1824 /* Already processed the key^ipad part now! */ 1825 req->len = MD5_HMAC_BLOCK_SIZE; 1826 req->processed = MD5_HMAC_BLOCK_SIZE; 1827 1828 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5; 1829 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 1830 req->state_sz = MD5_DIGEST_SIZE; 1831 req->digest_sz = MD5_DIGEST_SIZE; 1832 req->block_sz = MD5_HMAC_BLOCK_SIZE; 1833 req->len_is_le = true; /* MD5 is little endian! ... */ 1834 req->hmac = true; 1835 1836 return 0; 1837 } 1838 1839 static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key, 1840 unsigned int keylen) 1841 { 1842 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5", 1843 MD5_DIGEST_SIZE); 1844 } 1845 1846 static int safexcel_hmac_md5_digest(struct ahash_request *areq) 1847 { 1848 int ret = safexcel_hmac_md5_init(areq); 1849 1850 if (ret) 1851 return ret; 1852 1853 return safexcel_ahash_finup(areq); 1854 } 1855 1856 struct safexcel_alg_template safexcel_alg_hmac_md5 = { 1857 .type = SAFEXCEL_ALG_TYPE_AHASH, 1858 .algo_mask = SAFEXCEL_ALG_MD5, 1859 .alg.ahash = { 1860 .init = safexcel_hmac_md5_init, 1861 .update = safexcel_ahash_update, 1862 .final = safexcel_ahash_final, 1863 .finup = safexcel_ahash_finup, 1864 .digest = safexcel_hmac_md5_digest, 1865 .setkey = safexcel_hmac_md5_setkey, 1866 .export = safexcel_ahash_export, 1867 .import = safexcel_ahash_import, 1868 .halg = { 1869 .digestsize = MD5_DIGEST_SIZE, 1870 .statesize = sizeof(struct safexcel_ahash_export_state), 1871 .base = { 1872 .cra_name = "hmac(md5)", 1873 .cra_driver_name = "safexcel-hmac-md5", 1874 .cra_priority = SAFEXCEL_CRA_PRIORITY, 1875 .cra_flags = CRYPTO_ALG_ASYNC | 1876 CRYPTO_ALG_ALLOCATES_MEMORY | 1877 CRYPTO_ALG_KERN_DRIVER_ONLY, 1878 .cra_blocksize = MD5_HMAC_BLOCK_SIZE, 1879 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 1880 .cra_init = safexcel_ahash_cra_init, 1881 .cra_exit = safexcel_ahash_cra_exit, 1882 .cra_module = THIS_MODULE, 1883 }, 1884 }, 1885 }, 1886 }; 1887 1888 static int safexcel_crc32_cra_init(struct crypto_tfm *tfm) 1889 { 1890 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); 1891 int ret = safexcel_ahash_cra_init(tfm); 1892 1893 /* Default 'key' is all zeroes */ 1894 memset(&ctx->base.ipad, 0, sizeof(u32)); 1895 return ret; 1896 } 1897 1898 static int safexcel_crc32_init(struct ahash_request *areq) 1899 { 1900 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 1901 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 1902 1903 memset(req, 0, sizeof(*req)); 1904 1905 /* Start from loaded key */ 1906 req->state[0] = cpu_to_le32(~ctx->base.ipad.word[0]); 1907 /* Set processed to non-zero to enable invalidation detection */ 1908 req->len = sizeof(u32); 1909 req->processed = sizeof(u32); 1910 1911 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32; 1912 req->digest = CONTEXT_CONTROL_DIGEST_XCM; 1913 req->state_sz = sizeof(u32); 1914 req->digest_sz = sizeof(u32); 1915 req->block_sz = sizeof(u32); 1916 1917 return 0; 1918 } 1919 1920 static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key, 1921 unsigned int keylen) 1922 { 1923 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); 1924 1925 if (keylen != sizeof(u32)) 1926 return -EINVAL; 1927 1928 memcpy(&ctx->base.ipad, key, sizeof(u32)); 1929 return 0; 1930 } 1931 1932 static int safexcel_crc32_digest(struct ahash_request *areq) 1933 { 1934 return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq); 1935 } 1936 1937 struct safexcel_alg_template safexcel_alg_crc32 = { 1938 .type = SAFEXCEL_ALG_TYPE_AHASH, 1939 .algo_mask = 0, 1940 .alg.ahash = { 1941 .init = safexcel_crc32_init, 1942 .update = safexcel_ahash_update, 1943 .final = safexcel_ahash_final, 1944 .finup = safexcel_ahash_finup, 1945 .digest = safexcel_crc32_digest, 1946 .setkey = safexcel_crc32_setkey, 1947 .export = safexcel_ahash_export, 1948 .import = safexcel_ahash_import, 1949 .halg = { 1950 .digestsize = sizeof(u32), 1951 .statesize = sizeof(struct safexcel_ahash_export_state), 1952 .base = { 1953 .cra_name = "crc32", 1954 .cra_driver_name = "safexcel-crc32", 1955 .cra_priority = SAFEXCEL_CRA_PRIORITY, 1956 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY | 1957 CRYPTO_ALG_ASYNC | 1958 CRYPTO_ALG_ALLOCATES_MEMORY | 1959 CRYPTO_ALG_KERN_DRIVER_ONLY, 1960 .cra_blocksize = 1, 1961 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 1962 .cra_init = safexcel_crc32_cra_init, 1963 .cra_exit = safexcel_ahash_cra_exit, 1964 .cra_module = THIS_MODULE, 1965 }, 1966 }, 1967 }, 1968 }; 1969 1970 static int safexcel_cbcmac_init(struct ahash_request *areq) 1971 { 1972 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 1973 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 1974 1975 memset(req, 0, sizeof(*req)); 1976 1977 /* Start from loaded keys */ 1978 memcpy(req->state, &ctx->base.ipad, ctx->key_sz); 1979 /* Set processed to non-zero to enable invalidation detection */ 1980 req->len = AES_BLOCK_SIZE; 1981 req->processed = AES_BLOCK_SIZE; 1982 1983 req->digest = CONTEXT_CONTROL_DIGEST_XCM; 1984 req->state_sz = ctx->key_sz; 1985 req->digest_sz = AES_BLOCK_SIZE; 1986 req->block_sz = AES_BLOCK_SIZE; 1987 req->xcbcmac = true; 1988 1989 return 0; 1990 } 1991 1992 static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key, 1993 unsigned int len) 1994 { 1995 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); 1996 struct crypto_aes_ctx aes; 1997 int ret, i; 1998 1999 ret = aes_expandkey(&aes, key, len); 2000 if (ret) 2001 return ret; 2002 2003 memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE); 2004 for (i = 0; i < len / sizeof(u32); i++) 2005 ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]); 2006 2007 if (len == AES_KEYSIZE_192) { 2008 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192; 2009 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE; 2010 } else if (len == AES_KEYSIZE_256) { 2011 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256; 2012 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE; 2013 } else { 2014 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128; 2015 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE; 2016 } 2017 ctx->cbcmac = true; 2018 2019 memzero_explicit(&aes, sizeof(aes)); 2020 return 0; 2021 } 2022 2023 static int safexcel_cbcmac_digest(struct ahash_request *areq) 2024 { 2025 return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq); 2026 } 2027 2028 struct safexcel_alg_template safexcel_alg_cbcmac = { 2029 .type = SAFEXCEL_ALG_TYPE_AHASH, 2030 .algo_mask = 0, 2031 .alg.ahash = { 2032 .init = safexcel_cbcmac_init, 2033 .update = safexcel_ahash_update, 2034 .final = safexcel_ahash_final, 2035 .finup = safexcel_ahash_finup, 2036 .digest = safexcel_cbcmac_digest, 2037 .setkey = safexcel_cbcmac_setkey, 2038 .export = safexcel_ahash_export, 2039 .import = safexcel_ahash_import, 2040 .halg = { 2041 .digestsize = AES_BLOCK_SIZE, 2042 .statesize = sizeof(struct safexcel_ahash_export_state), 2043 .base = { 2044 .cra_name = "cbcmac(aes)", 2045 .cra_driver_name = "safexcel-cbcmac-aes", 2046 .cra_priority = SAFEXCEL_CRA_PRIORITY, 2047 .cra_flags = CRYPTO_ALG_ASYNC | 2048 CRYPTO_ALG_ALLOCATES_MEMORY | 2049 CRYPTO_ALG_KERN_DRIVER_ONLY, 2050 .cra_blocksize = AES_BLOCK_SIZE, 2051 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 2052 .cra_init = safexcel_ahash_cra_init, 2053 .cra_exit = safexcel_ahash_cra_exit, 2054 .cra_module = THIS_MODULE, 2055 }, 2056 }, 2057 }, 2058 }; 2059 2060 static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key, 2061 unsigned int len) 2062 { 2063 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); 2064 u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)]; 2065 int ret, i; 2066 2067 ret = aes_expandkey(ctx->aes, key, len); 2068 if (ret) 2069 return ret; 2070 2071 /* precompute the XCBC key material */ 2072 aes_encrypt(ctx->aes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE, 2073 "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1"); 2074 aes_encrypt(ctx->aes, (u8 *)key_tmp, 2075 "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2"); 2076 aes_encrypt(ctx->aes, (u8 *)key_tmp + AES_BLOCK_SIZE, 2077 "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3"); 2078 for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++) 2079 ctx->base.ipad.word[i] = swab32(key_tmp[i]); 2080 2081 ret = aes_expandkey(ctx->aes, 2082 (u8 *)key_tmp + 2 * AES_BLOCK_SIZE, 2083 AES_MIN_KEY_SIZE); 2084 if (ret) 2085 return ret; 2086 2087 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128; 2088 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE; 2089 ctx->cbcmac = false; 2090 2091 return 0; 2092 } 2093 2094 static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm) 2095 { 2096 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); 2097 2098 safexcel_ahash_cra_init(tfm); 2099 ctx->aes = kmalloc(sizeof(*ctx->aes), GFP_KERNEL); 2100 return ctx->aes == NULL ? -ENOMEM : 0; 2101 } 2102 2103 static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm) 2104 { 2105 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); 2106 2107 kfree(ctx->aes); 2108 safexcel_ahash_cra_exit(tfm); 2109 } 2110 2111 struct safexcel_alg_template safexcel_alg_xcbcmac = { 2112 .type = SAFEXCEL_ALG_TYPE_AHASH, 2113 .algo_mask = 0, 2114 .alg.ahash = { 2115 .init = safexcel_cbcmac_init, 2116 .update = safexcel_ahash_update, 2117 .final = safexcel_ahash_final, 2118 .finup = safexcel_ahash_finup, 2119 .digest = safexcel_cbcmac_digest, 2120 .setkey = safexcel_xcbcmac_setkey, 2121 .export = safexcel_ahash_export, 2122 .import = safexcel_ahash_import, 2123 .halg = { 2124 .digestsize = AES_BLOCK_SIZE, 2125 .statesize = sizeof(struct safexcel_ahash_export_state), 2126 .base = { 2127 .cra_name = "xcbc(aes)", 2128 .cra_driver_name = "safexcel-xcbc-aes", 2129 .cra_priority = SAFEXCEL_CRA_PRIORITY, 2130 .cra_flags = CRYPTO_ALG_ASYNC | 2131 CRYPTO_ALG_ALLOCATES_MEMORY | 2132 CRYPTO_ALG_KERN_DRIVER_ONLY, 2133 .cra_blocksize = AES_BLOCK_SIZE, 2134 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 2135 .cra_init = safexcel_xcbcmac_cra_init, 2136 .cra_exit = safexcel_xcbcmac_cra_exit, 2137 .cra_module = THIS_MODULE, 2138 }, 2139 }, 2140 }, 2141 }; 2142 2143 static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key, 2144 unsigned int len) 2145 { 2146 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); 2147 __be64 consts[4]; 2148 u64 _const[2]; 2149 u8 msb_mask, gfmask; 2150 int ret, i; 2151 2152 /* precompute the CMAC key material */ 2153 ret = aes_expandkey(ctx->aes, key, len); 2154 if (ret) 2155 return ret; 2156 2157 for (i = 0; i < len / sizeof(u32); i++) 2158 ctx->base.ipad.word[i + 8] = swab32(ctx->aes->key_enc[i]); 2159 2160 /* code below borrowed from crypto/cmac.c */ 2161 /* encrypt the zero block */ 2162 memset(consts, 0, AES_BLOCK_SIZE); 2163 aes_encrypt(ctx->aes, (u8 *)consts, (u8 *)consts); 2164 2165 gfmask = 0x87; 2166 _const[0] = be64_to_cpu(consts[1]); 2167 _const[1] = be64_to_cpu(consts[0]); 2168 2169 /* gf(2^128) multiply zero-ciphertext with u and u^2 */ 2170 for (i = 0; i < 4; i += 2) { 2171 msb_mask = ((s64)_const[1] >> 63) & gfmask; 2172 _const[1] = (_const[1] << 1) | (_const[0] >> 63); 2173 _const[0] = (_const[0] << 1) ^ msb_mask; 2174 2175 consts[i + 0] = cpu_to_be64(_const[1]); 2176 consts[i + 1] = cpu_to_be64(_const[0]); 2177 } 2178 /* end of code borrowed from crypto/cmac.c */ 2179 2180 for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++) 2181 ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]); 2182 2183 if (len == AES_KEYSIZE_192) { 2184 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192; 2185 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE; 2186 } else if (len == AES_KEYSIZE_256) { 2187 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256; 2188 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE; 2189 } else { 2190 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128; 2191 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE; 2192 } 2193 ctx->cbcmac = false; 2194 2195 return 0; 2196 } 2197 2198 struct safexcel_alg_template safexcel_alg_cmac = { 2199 .type = SAFEXCEL_ALG_TYPE_AHASH, 2200 .algo_mask = 0, 2201 .alg.ahash = { 2202 .init = safexcel_cbcmac_init, 2203 .update = safexcel_ahash_update, 2204 .final = safexcel_ahash_final, 2205 .finup = safexcel_ahash_finup, 2206 .digest = safexcel_cbcmac_digest, 2207 .setkey = safexcel_cmac_setkey, 2208 .export = safexcel_ahash_export, 2209 .import = safexcel_ahash_import, 2210 .halg = { 2211 .digestsize = AES_BLOCK_SIZE, 2212 .statesize = sizeof(struct safexcel_ahash_export_state), 2213 .base = { 2214 .cra_name = "cmac(aes)", 2215 .cra_driver_name = "safexcel-cmac-aes", 2216 .cra_priority = SAFEXCEL_CRA_PRIORITY, 2217 .cra_flags = CRYPTO_ALG_ASYNC | 2218 CRYPTO_ALG_ALLOCATES_MEMORY | 2219 CRYPTO_ALG_KERN_DRIVER_ONLY, 2220 .cra_blocksize = AES_BLOCK_SIZE, 2221 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 2222 .cra_init = safexcel_xcbcmac_cra_init, 2223 .cra_exit = safexcel_xcbcmac_cra_exit, 2224 .cra_module = THIS_MODULE, 2225 }, 2226 }, 2227 }, 2228 }; 2229 2230 static int safexcel_sm3_init(struct ahash_request *areq) 2231 { 2232 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 2233 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 2234 2235 memset(req, 0, sizeof(*req)); 2236 2237 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3; 2238 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 2239 req->state_sz = SM3_DIGEST_SIZE; 2240 req->digest_sz = SM3_DIGEST_SIZE; 2241 req->block_sz = SM3_BLOCK_SIZE; 2242 2243 return 0; 2244 } 2245 2246 static int safexcel_sm3_digest(struct ahash_request *areq) 2247 { 2248 int ret = safexcel_sm3_init(areq); 2249 2250 if (ret) 2251 return ret; 2252 2253 return safexcel_ahash_finup(areq); 2254 } 2255 2256 struct safexcel_alg_template safexcel_alg_sm3 = { 2257 .type = SAFEXCEL_ALG_TYPE_AHASH, 2258 .algo_mask = SAFEXCEL_ALG_SM3, 2259 .alg.ahash = { 2260 .init = safexcel_sm3_init, 2261 .update = safexcel_ahash_update, 2262 .final = safexcel_ahash_final, 2263 .finup = safexcel_ahash_finup, 2264 .digest = safexcel_sm3_digest, 2265 .export = safexcel_ahash_export, 2266 .import = safexcel_ahash_import, 2267 .halg = { 2268 .digestsize = SM3_DIGEST_SIZE, 2269 .statesize = sizeof(struct safexcel_ahash_export_state), 2270 .base = { 2271 .cra_name = "sm3", 2272 .cra_driver_name = "safexcel-sm3", 2273 .cra_priority = SAFEXCEL_CRA_PRIORITY, 2274 .cra_flags = CRYPTO_ALG_ASYNC | 2275 CRYPTO_ALG_ALLOCATES_MEMORY | 2276 CRYPTO_ALG_KERN_DRIVER_ONLY, 2277 .cra_blocksize = SM3_BLOCK_SIZE, 2278 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 2279 .cra_init = safexcel_ahash_cra_init, 2280 .cra_exit = safexcel_ahash_cra_exit, 2281 .cra_module = THIS_MODULE, 2282 }, 2283 }, 2284 }, 2285 }; 2286 2287 static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key, 2288 unsigned int keylen) 2289 { 2290 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3", 2291 SM3_DIGEST_SIZE); 2292 } 2293 2294 static int safexcel_hmac_sm3_init(struct ahash_request *areq) 2295 { 2296 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); 2297 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 2298 2299 memset(req, 0, sizeof(*req)); 2300 2301 /* Start from ipad precompute */ 2302 memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE); 2303 /* Already processed the key^ipad part now! */ 2304 req->len = SM3_BLOCK_SIZE; 2305 req->processed = SM3_BLOCK_SIZE; 2306 2307 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3; 2308 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; 2309 req->state_sz = SM3_DIGEST_SIZE; 2310 req->digest_sz = SM3_DIGEST_SIZE; 2311 req->block_sz = SM3_BLOCK_SIZE; 2312 req->hmac = true; 2313 2314 return 0; 2315 } 2316 2317 static int safexcel_hmac_sm3_digest(struct ahash_request *areq) 2318 { 2319 int ret = safexcel_hmac_sm3_init(areq); 2320 2321 if (ret) 2322 return ret; 2323 2324 return safexcel_ahash_finup(areq); 2325 } 2326 2327 struct safexcel_alg_template safexcel_alg_hmac_sm3 = { 2328 .type = SAFEXCEL_ALG_TYPE_AHASH, 2329 .algo_mask = SAFEXCEL_ALG_SM3, 2330 .alg.ahash = { 2331 .init = safexcel_hmac_sm3_init, 2332 .update = safexcel_ahash_update, 2333 .final = safexcel_ahash_final, 2334 .finup = safexcel_ahash_finup, 2335 .digest = safexcel_hmac_sm3_digest, 2336 .setkey = safexcel_hmac_sm3_setkey, 2337 .export = safexcel_ahash_export, 2338 .import = safexcel_ahash_import, 2339 .halg = { 2340 .digestsize = SM3_DIGEST_SIZE, 2341 .statesize = sizeof(struct safexcel_ahash_export_state), 2342 .base = { 2343 .cra_name = "hmac(sm3)", 2344 .cra_driver_name = "safexcel-hmac-sm3", 2345 .cra_priority = SAFEXCEL_CRA_PRIORITY, 2346 .cra_flags = CRYPTO_ALG_ASYNC | 2347 CRYPTO_ALG_ALLOCATES_MEMORY | 2348 CRYPTO_ALG_KERN_DRIVER_ONLY, 2349 .cra_blocksize = SM3_BLOCK_SIZE, 2350 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 2351 .cra_init = safexcel_ahash_cra_init, 2352 .cra_exit = safexcel_ahash_cra_exit, 2353 .cra_module = THIS_MODULE, 2354 }, 2355 }, 2356 }, 2357 }; 2358 2359 static int safexcel_sha3_224_init(struct ahash_request *areq) 2360 { 2361 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); 2362 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2363 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 2364 2365 memset(req, 0, sizeof(*req)); 2366 2367 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224; 2368 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; 2369 req->state_sz = SHA3_224_DIGEST_SIZE; 2370 req->digest_sz = SHA3_224_DIGEST_SIZE; 2371 req->block_sz = SHA3_224_BLOCK_SIZE; 2372 ctx->do_fallback = false; 2373 ctx->fb_init_done = false; 2374 return 0; 2375 } 2376 2377 static int safexcel_sha3_fbcheck(struct ahash_request *req) 2378 { 2379 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 2380 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2381 struct ahash_request *subreq = ahash_request_ctx_dma(req); 2382 int ret = 0; 2383 2384 if (ctx->do_fallback) { 2385 ahash_request_set_tfm(subreq, ctx->fback); 2386 ahash_request_set_callback(subreq, req->base.flags, 2387 req->base.complete, req->base.data); 2388 ahash_request_set_crypt(subreq, req->src, req->result, 2389 req->nbytes); 2390 if (!ctx->fb_init_done) { 2391 if (ctx->fb_do_setkey) { 2392 /* Set fallback cipher HMAC key */ 2393 u8 key[SHA3_224_BLOCK_SIZE]; 2394 2395 memcpy(key, &ctx->base.ipad, 2396 crypto_ahash_blocksize(ctx->fback) / 2); 2397 memcpy(key + 2398 crypto_ahash_blocksize(ctx->fback) / 2, 2399 &ctx->base.opad, 2400 crypto_ahash_blocksize(ctx->fback) / 2); 2401 ret = crypto_ahash_setkey(ctx->fback, key, 2402 crypto_ahash_blocksize(ctx->fback)); 2403 memzero_explicit(key, 2404 crypto_ahash_blocksize(ctx->fback)); 2405 ctx->fb_do_setkey = false; 2406 } 2407 ret = ret ?: crypto_ahash_init(subreq); 2408 ctx->fb_init_done = true; 2409 } 2410 } 2411 return ret; 2412 } 2413 2414 static int safexcel_sha3_update(struct ahash_request *req) 2415 { 2416 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 2417 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2418 struct ahash_request *subreq = ahash_request_ctx_dma(req); 2419 2420 ctx->do_fallback = true; 2421 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq); 2422 } 2423 2424 static int safexcel_sha3_final(struct ahash_request *req) 2425 { 2426 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 2427 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2428 struct ahash_request *subreq = ahash_request_ctx_dma(req); 2429 2430 ctx->do_fallback = true; 2431 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq); 2432 } 2433 2434 static int safexcel_sha3_finup(struct ahash_request *req) 2435 { 2436 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 2437 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2438 struct ahash_request *subreq = ahash_request_ctx_dma(req); 2439 2440 ctx->do_fallback |= !req->nbytes; 2441 if (ctx->do_fallback) 2442 /* Update or ex/import happened or len 0, cannot use the HW */ 2443 return safexcel_sha3_fbcheck(req) ?: 2444 crypto_ahash_finup(subreq); 2445 else 2446 return safexcel_ahash_finup(req); 2447 } 2448 2449 static int safexcel_sha3_digest_fallback(struct ahash_request *req) 2450 { 2451 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 2452 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2453 struct ahash_request *subreq = ahash_request_ctx_dma(req); 2454 2455 ctx->do_fallback = true; 2456 ctx->fb_init_done = false; 2457 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq); 2458 } 2459 2460 static int safexcel_sha3_224_digest(struct ahash_request *req) 2461 { 2462 if (req->nbytes) 2463 return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req); 2464 2465 /* HW cannot do zero length hash, use fallback instead */ 2466 return safexcel_sha3_digest_fallback(req); 2467 } 2468 2469 static int safexcel_sha3_export(struct ahash_request *req, void *out) 2470 { 2471 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 2472 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2473 struct ahash_request *subreq = ahash_request_ctx_dma(req); 2474 2475 ctx->do_fallback = true; 2476 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out); 2477 } 2478 2479 static int safexcel_sha3_import(struct ahash_request *req, const void *in) 2480 { 2481 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 2482 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2483 struct ahash_request *subreq = ahash_request_ctx_dma(req); 2484 2485 ctx->do_fallback = true; 2486 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in); 2487 // return safexcel_ahash_import(req, in); 2488 } 2489 2490 static int safexcel_sha3_cra_init(struct crypto_tfm *tfm) 2491 { 2492 struct crypto_ahash *ahash = __crypto_ahash_cast(tfm); 2493 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); 2494 2495 safexcel_ahash_cra_init(tfm); 2496 2497 /* Allocate fallback implementation */ 2498 ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0, 2499 CRYPTO_ALG_ASYNC | 2500 CRYPTO_ALG_NEED_FALLBACK); 2501 if (IS_ERR(ctx->fback)) 2502 return PTR_ERR(ctx->fback); 2503 2504 /* Update statesize from fallback algorithm! */ 2505 crypto_hash_alg_common(ahash)->statesize = 2506 crypto_ahash_statesize(ctx->fback); 2507 crypto_ahash_set_reqsize_dma( 2508 ahash, max(sizeof(struct safexcel_ahash_req), 2509 sizeof(struct ahash_request) + 2510 crypto_ahash_reqsize(ctx->fback))); 2511 return 0; 2512 } 2513 2514 static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm) 2515 { 2516 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); 2517 2518 crypto_free_ahash(ctx->fback); 2519 safexcel_ahash_cra_exit(tfm); 2520 } 2521 2522 struct safexcel_alg_template safexcel_alg_sha3_224 = { 2523 .type = SAFEXCEL_ALG_TYPE_AHASH, 2524 .algo_mask = SAFEXCEL_ALG_SHA3, 2525 .alg.ahash = { 2526 .init = safexcel_sha3_224_init, 2527 .update = safexcel_sha3_update, 2528 .final = safexcel_sha3_final, 2529 .finup = safexcel_sha3_finup, 2530 .digest = safexcel_sha3_224_digest, 2531 .export = safexcel_sha3_export, 2532 .import = safexcel_sha3_import, 2533 .halg = { 2534 .digestsize = SHA3_224_DIGEST_SIZE, 2535 .statesize = sizeof(struct safexcel_ahash_export_state), 2536 .base = { 2537 .cra_name = "sha3-224", 2538 .cra_driver_name = "safexcel-sha3-224", 2539 .cra_priority = SAFEXCEL_CRA_PRIORITY, 2540 .cra_flags = CRYPTO_ALG_ASYNC | 2541 CRYPTO_ALG_KERN_DRIVER_ONLY | 2542 CRYPTO_ALG_NEED_FALLBACK, 2543 .cra_blocksize = SHA3_224_BLOCK_SIZE, 2544 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 2545 .cra_init = safexcel_sha3_cra_init, 2546 .cra_exit = safexcel_sha3_cra_exit, 2547 .cra_module = THIS_MODULE, 2548 }, 2549 }, 2550 }, 2551 }; 2552 2553 static int safexcel_sha3_256_init(struct ahash_request *areq) 2554 { 2555 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); 2556 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2557 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 2558 2559 memset(req, 0, sizeof(*req)); 2560 2561 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256; 2562 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; 2563 req->state_sz = SHA3_256_DIGEST_SIZE; 2564 req->digest_sz = SHA3_256_DIGEST_SIZE; 2565 req->block_sz = SHA3_256_BLOCK_SIZE; 2566 ctx->do_fallback = false; 2567 ctx->fb_init_done = false; 2568 return 0; 2569 } 2570 2571 static int safexcel_sha3_256_digest(struct ahash_request *req) 2572 { 2573 if (req->nbytes) 2574 return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req); 2575 2576 /* HW cannot do zero length hash, use fallback instead */ 2577 return safexcel_sha3_digest_fallback(req); 2578 } 2579 2580 struct safexcel_alg_template safexcel_alg_sha3_256 = { 2581 .type = SAFEXCEL_ALG_TYPE_AHASH, 2582 .algo_mask = SAFEXCEL_ALG_SHA3, 2583 .alg.ahash = { 2584 .init = safexcel_sha3_256_init, 2585 .update = safexcel_sha3_update, 2586 .final = safexcel_sha3_final, 2587 .finup = safexcel_sha3_finup, 2588 .digest = safexcel_sha3_256_digest, 2589 .export = safexcel_sha3_export, 2590 .import = safexcel_sha3_import, 2591 .halg = { 2592 .digestsize = SHA3_256_DIGEST_SIZE, 2593 .statesize = sizeof(struct safexcel_ahash_export_state), 2594 .base = { 2595 .cra_name = "sha3-256", 2596 .cra_driver_name = "safexcel-sha3-256", 2597 .cra_priority = SAFEXCEL_CRA_PRIORITY, 2598 .cra_flags = CRYPTO_ALG_ASYNC | 2599 CRYPTO_ALG_KERN_DRIVER_ONLY | 2600 CRYPTO_ALG_NEED_FALLBACK, 2601 .cra_blocksize = SHA3_256_BLOCK_SIZE, 2602 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 2603 .cra_init = safexcel_sha3_cra_init, 2604 .cra_exit = safexcel_sha3_cra_exit, 2605 .cra_module = THIS_MODULE, 2606 }, 2607 }, 2608 }, 2609 }; 2610 2611 static int safexcel_sha3_384_init(struct ahash_request *areq) 2612 { 2613 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); 2614 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2615 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 2616 2617 memset(req, 0, sizeof(*req)); 2618 2619 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384; 2620 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; 2621 req->state_sz = SHA3_384_DIGEST_SIZE; 2622 req->digest_sz = SHA3_384_DIGEST_SIZE; 2623 req->block_sz = SHA3_384_BLOCK_SIZE; 2624 ctx->do_fallback = false; 2625 ctx->fb_init_done = false; 2626 return 0; 2627 } 2628 2629 static int safexcel_sha3_384_digest(struct ahash_request *req) 2630 { 2631 if (req->nbytes) 2632 return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req); 2633 2634 /* HW cannot do zero length hash, use fallback instead */ 2635 return safexcel_sha3_digest_fallback(req); 2636 } 2637 2638 struct safexcel_alg_template safexcel_alg_sha3_384 = { 2639 .type = SAFEXCEL_ALG_TYPE_AHASH, 2640 .algo_mask = SAFEXCEL_ALG_SHA3, 2641 .alg.ahash = { 2642 .init = safexcel_sha3_384_init, 2643 .update = safexcel_sha3_update, 2644 .final = safexcel_sha3_final, 2645 .finup = safexcel_sha3_finup, 2646 .digest = safexcel_sha3_384_digest, 2647 .export = safexcel_sha3_export, 2648 .import = safexcel_sha3_import, 2649 .halg = { 2650 .digestsize = SHA3_384_DIGEST_SIZE, 2651 .statesize = sizeof(struct safexcel_ahash_export_state), 2652 .base = { 2653 .cra_name = "sha3-384", 2654 .cra_driver_name = "safexcel-sha3-384", 2655 .cra_priority = SAFEXCEL_CRA_PRIORITY, 2656 .cra_flags = CRYPTO_ALG_ASYNC | 2657 CRYPTO_ALG_KERN_DRIVER_ONLY | 2658 CRYPTO_ALG_NEED_FALLBACK, 2659 .cra_blocksize = SHA3_384_BLOCK_SIZE, 2660 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 2661 .cra_init = safexcel_sha3_cra_init, 2662 .cra_exit = safexcel_sha3_cra_exit, 2663 .cra_module = THIS_MODULE, 2664 }, 2665 }, 2666 }, 2667 }; 2668 2669 static int safexcel_sha3_512_init(struct ahash_request *areq) 2670 { 2671 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); 2672 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2673 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 2674 2675 memset(req, 0, sizeof(*req)); 2676 2677 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512; 2678 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; 2679 req->state_sz = SHA3_512_DIGEST_SIZE; 2680 req->digest_sz = SHA3_512_DIGEST_SIZE; 2681 req->block_sz = SHA3_512_BLOCK_SIZE; 2682 ctx->do_fallback = false; 2683 ctx->fb_init_done = false; 2684 return 0; 2685 } 2686 2687 static int safexcel_sha3_512_digest(struct ahash_request *req) 2688 { 2689 if (req->nbytes) 2690 return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req); 2691 2692 /* HW cannot do zero length hash, use fallback instead */ 2693 return safexcel_sha3_digest_fallback(req); 2694 } 2695 2696 struct safexcel_alg_template safexcel_alg_sha3_512 = { 2697 .type = SAFEXCEL_ALG_TYPE_AHASH, 2698 .algo_mask = SAFEXCEL_ALG_SHA3, 2699 .alg.ahash = { 2700 .init = safexcel_sha3_512_init, 2701 .update = safexcel_sha3_update, 2702 .final = safexcel_sha3_final, 2703 .finup = safexcel_sha3_finup, 2704 .digest = safexcel_sha3_512_digest, 2705 .export = safexcel_sha3_export, 2706 .import = safexcel_sha3_import, 2707 .halg = { 2708 .digestsize = SHA3_512_DIGEST_SIZE, 2709 .statesize = sizeof(struct safexcel_ahash_export_state), 2710 .base = { 2711 .cra_name = "sha3-512", 2712 .cra_driver_name = "safexcel-sha3-512", 2713 .cra_priority = SAFEXCEL_CRA_PRIORITY, 2714 .cra_flags = CRYPTO_ALG_ASYNC | 2715 CRYPTO_ALG_KERN_DRIVER_ONLY | 2716 CRYPTO_ALG_NEED_FALLBACK, 2717 .cra_blocksize = SHA3_512_BLOCK_SIZE, 2718 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 2719 .cra_init = safexcel_sha3_cra_init, 2720 .cra_exit = safexcel_sha3_cra_exit, 2721 .cra_module = THIS_MODULE, 2722 }, 2723 }, 2724 }, 2725 }; 2726 2727 static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg) 2728 { 2729 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); 2730 int ret; 2731 2732 ret = safexcel_sha3_cra_init(tfm); 2733 if (ret) 2734 return ret; 2735 2736 /* Allocate precalc basic digest implementation */ 2737 ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK); 2738 if (IS_ERR(ctx->shpre)) 2739 return PTR_ERR(ctx->shpre); 2740 2741 ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) + 2742 crypto_shash_descsize(ctx->shpre), GFP_KERNEL); 2743 if (!ctx->shdesc) { 2744 crypto_free_shash(ctx->shpre); 2745 return -ENOMEM; 2746 } 2747 ctx->shdesc->tfm = ctx->shpre; 2748 return 0; 2749 } 2750 2751 static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm) 2752 { 2753 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); 2754 2755 crypto_free_ahash(ctx->fback); 2756 crypto_free_shash(ctx->shpre); 2757 kfree(ctx->shdesc); 2758 safexcel_ahash_cra_exit(tfm); 2759 } 2760 2761 static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key, 2762 unsigned int keylen) 2763 { 2764 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2765 int ret = 0; 2766 2767 if (keylen > crypto_ahash_blocksize(tfm)) { 2768 /* 2769 * If the key is larger than the blocksize, then hash it 2770 * first using our fallback cipher 2771 */ 2772 ret = crypto_shash_digest(ctx->shdesc, key, keylen, 2773 ctx->base.ipad.byte); 2774 keylen = crypto_shash_digestsize(ctx->shpre); 2775 2776 /* 2777 * If the digest is larger than half the blocksize, we need to 2778 * move the rest to opad due to the way our HMAC infra works. 2779 */ 2780 if (keylen > crypto_ahash_blocksize(tfm) / 2) 2781 /* Buffers overlap, need to use memmove iso memcpy! */ 2782 memmove(&ctx->base.opad, 2783 ctx->base.ipad.byte + 2784 crypto_ahash_blocksize(tfm) / 2, 2785 keylen - crypto_ahash_blocksize(tfm) / 2); 2786 } else { 2787 /* 2788 * Copy the key to our ipad & opad buffers 2789 * Note that ipad and opad each contain one half of the key, 2790 * to match the existing HMAC driver infrastructure. 2791 */ 2792 if (keylen <= crypto_ahash_blocksize(tfm) / 2) { 2793 memcpy(&ctx->base.ipad, key, keylen); 2794 } else { 2795 memcpy(&ctx->base.ipad, key, 2796 crypto_ahash_blocksize(tfm) / 2); 2797 memcpy(&ctx->base.opad, 2798 key + crypto_ahash_blocksize(tfm) / 2, 2799 keylen - crypto_ahash_blocksize(tfm) / 2); 2800 } 2801 } 2802 2803 /* Pad key with zeroes */ 2804 if (keylen <= crypto_ahash_blocksize(tfm) / 2) { 2805 memset(ctx->base.ipad.byte + keylen, 0, 2806 crypto_ahash_blocksize(tfm) / 2 - keylen); 2807 memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2); 2808 } else { 2809 memset(ctx->base.opad.byte + keylen - 2810 crypto_ahash_blocksize(tfm) / 2, 0, 2811 crypto_ahash_blocksize(tfm) - keylen); 2812 } 2813 2814 /* If doing fallback, still need to set the new key! */ 2815 ctx->fb_do_setkey = true; 2816 return ret; 2817 } 2818 2819 static int safexcel_hmac_sha3_224_init(struct ahash_request *areq) 2820 { 2821 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); 2822 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2823 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 2824 2825 memset(req, 0, sizeof(*req)); 2826 2827 /* Copy (half of) the key */ 2828 memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2); 2829 /* Start of HMAC should have len == processed == blocksize */ 2830 req->len = SHA3_224_BLOCK_SIZE; 2831 req->processed = SHA3_224_BLOCK_SIZE; 2832 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224; 2833 req->digest = CONTEXT_CONTROL_DIGEST_HMAC; 2834 req->state_sz = SHA3_224_BLOCK_SIZE / 2; 2835 req->digest_sz = SHA3_224_DIGEST_SIZE; 2836 req->block_sz = SHA3_224_BLOCK_SIZE; 2837 req->hmac = true; 2838 ctx->do_fallback = false; 2839 ctx->fb_init_done = false; 2840 return 0; 2841 } 2842 2843 static int safexcel_hmac_sha3_224_digest(struct ahash_request *req) 2844 { 2845 if (req->nbytes) 2846 return safexcel_hmac_sha3_224_init(req) ?: 2847 safexcel_ahash_finup(req); 2848 2849 /* HW cannot do zero length HMAC, use fallback instead */ 2850 return safexcel_sha3_digest_fallback(req); 2851 } 2852 2853 static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm) 2854 { 2855 return safexcel_hmac_sha3_cra_init(tfm, "sha3-224"); 2856 } 2857 2858 struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = { 2859 .type = SAFEXCEL_ALG_TYPE_AHASH, 2860 .algo_mask = SAFEXCEL_ALG_SHA3, 2861 .alg.ahash = { 2862 .init = safexcel_hmac_sha3_224_init, 2863 .update = safexcel_sha3_update, 2864 .final = safexcel_sha3_final, 2865 .finup = safexcel_sha3_finup, 2866 .digest = safexcel_hmac_sha3_224_digest, 2867 .setkey = safexcel_hmac_sha3_setkey, 2868 .export = safexcel_sha3_export, 2869 .import = safexcel_sha3_import, 2870 .halg = { 2871 .digestsize = SHA3_224_DIGEST_SIZE, 2872 .statesize = sizeof(struct safexcel_ahash_export_state), 2873 .base = { 2874 .cra_name = "hmac(sha3-224)", 2875 .cra_driver_name = "safexcel-hmac-sha3-224", 2876 .cra_priority = SAFEXCEL_CRA_PRIORITY, 2877 .cra_flags = CRYPTO_ALG_ASYNC | 2878 CRYPTO_ALG_KERN_DRIVER_ONLY | 2879 CRYPTO_ALG_NEED_FALLBACK, 2880 .cra_blocksize = SHA3_224_BLOCK_SIZE, 2881 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 2882 .cra_init = safexcel_hmac_sha3_224_cra_init, 2883 .cra_exit = safexcel_hmac_sha3_cra_exit, 2884 .cra_module = THIS_MODULE, 2885 }, 2886 }, 2887 }, 2888 }; 2889 2890 static int safexcel_hmac_sha3_256_init(struct ahash_request *areq) 2891 { 2892 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); 2893 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2894 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 2895 2896 memset(req, 0, sizeof(*req)); 2897 2898 /* Copy (half of) the key */ 2899 memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2); 2900 /* Start of HMAC should have len == processed == blocksize */ 2901 req->len = SHA3_256_BLOCK_SIZE; 2902 req->processed = SHA3_256_BLOCK_SIZE; 2903 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256; 2904 req->digest = CONTEXT_CONTROL_DIGEST_HMAC; 2905 req->state_sz = SHA3_256_BLOCK_SIZE / 2; 2906 req->digest_sz = SHA3_256_DIGEST_SIZE; 2907 req->block_sz = SHA3_256_BLOCK_SIZE; 2908 req->hmac = true; 2909 ctx->do_fallback = false; 2910 ctx->fb_init_done = false; 2911 return 0; 2912 } 2913 2914 static int safexcel_hmac_sha3_256_digest(struct ahash_request *req) 2915 { 2916 if (req->nbytes) 2917 return safexcel_hmac_sha3_256_init(req) ?: 2918 safexcel_ahash_finup(req); 2919 2920 /* HW cannot do zero length HMAC, use fallback instead */ 2921 return safexcel_sha3_digest_fallback(req); 2922 } 2923 2924 static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm) 2925 { 2926 return safexcel_hmac_sha3_cra_init(tfm, "sha3-256"); 2927 } 2928 2929 struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = { 2930 .type = SAFEXCEL_ALG_TYPE_AHASH, 2931 .algo_mask = SAFEXCEL_ALG_SHA3, 2932 .alg.ahash = { 2933 .init = safexcel_hmac_sha3_256_init, 2934 .update = safexcel_sha3_update, 2935 .final = safexcel_sha3_final, 2936 .finup = safexcel_sha3_finup, 2937 .digest = safexcel_hmac_sha3_256_digest, 2938 .setkey = safexcel_hmac_sha3_setkey, 2939 .export = safexcel_sha3_export, 2940 .import = safexcel_sha3_import, 2941 .halg = { 2942 .digestsize = SHA3_256_DIGEST_SIZE, 2943 .statesize = sizeof(struct safexcel_ahash_export_state), 2944 .base = { 2945 .cra_name = "hmac(sha3-256)", 2946 .cra_driver_name = "safexcel-hmac-sha3-256", 2947 .cra_priority = SAFEXCEL_CRA_PRIORITY, 2948 .cra_flags = CRYPTO_ALG_ASYNC | 2949 CRYPTO_ALG_KERN_DRIVER_ONLY | 2950 CRYPTO_ALG_NEED_FALLBACK, 2951 .cra_blocksize = SHA3_256_BLOCK_SIZE, 2952 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 2953 .cra_init = safexcel_hmac_sha3_256_cra_init, 2954 .cra_exit = safexcel_hmac_sha3_cra_exit, 2955 .cra_module = THIS_MODULE, 2956 }, 2957 }, 2958 }, 2959 }; 2960 2961 static int safexcel_hmac_sha3_384_init(struct ahash_request *areq) 2962 { 2963 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); 2964 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 2965 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 2966 2967 memset(req, 0, sizeof(*req)); 2968 2969 /* Copy (half of) the key */ 2970 memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2); 2971 /* Start of HMAC should have len == processed == blocksize */ 2972 req->len = SHA3_384_BLOCK_SIZE; 2973 req->processed = SHA3_384_BLOCK_SIZE; 2974 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384; 2975 req->digest = CONTEXT_CONTROL_DIGEST_HMAC; 2976 req->state_sz = SHA3_384_BLOCK_SIZE / 2; 2977 req->digest_sz = SHA3_384_DIGEST_SIZE; 2978 req->block_sz = SHA3_384_BLOCK_SIZE; 2979 req->hmac = true; 2980 ctx->do_fallback = false; 2981 ctx->fb_init_done = false; 2982 return 0; 2983 } 2984 2985 static int safexcel_hmac_sha3_384_digest(struct ahash_request *req) 2986 { 2987 if (req->nbytes) 2988 return safexcel_hmac_sha3_384_init(req) ?: 2989 safexcel_ahash_finup(req); 2990 2991 /* HW cannot do zero length HMAC, use fallback instead */ 2992 return safexcel_sha3_digest_fallback(req); 2993 } 2994 2995 static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm) 2996 { 2997 return safexcel_hmac_sha3_cra_init(tfm, "sha3-384"); 2998 } 2999 3000 struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = { 3001 .type = SAFEXCEL_ALG_TYPE_AHASH, 3002 .algo_mask = SAFEXCEL_ALG_SHA3, 3003 .alg.ahash = { 3004 .init = safexcel_hmac_sha3_384_init, 3005 .update = safexcel_sha3_update, 3006 .final = safexcel_sha3_final, 3007 .finup = safexcel_sha3_finup, 3008 .digest = safexcel_hmac_sha3_384_digest, 3009 .setkey = safexcel_hmac_sha3_setkey, 3010 .export = safexcel_sha3_export, 3011 .import = safexcel_sha3_import, 3012 .halg = { 3013 .digestsize = SHA3_384_DIGEST_SIZE, 3014 .statesize = sizeof(struct safexcel_ahash_export_state), 3015 .base = { 3016 .cra_name = "hmac(sha3-384)", 3017 .cra_driver_name = "safexcel-hmac-sha3-384", 3018 .cra_priority = SAFEXCEL_CRA_PRIORITY, 3019 .cra_flags = CRYPTO_ALG_ASYNC | 3020 CRYPTO_ALG_KERN_DRIVER_ONLY | 3021 CRYPTO_ALG_NEED_FALLBACK, 3022 .cra_blocksize = SHA3_384_BLOCK_SIZE, 3023 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 3024 .cra_init = safexcel_hmac_sha3_384_cra_init, 3025 .cra_exit = safexcel_hmac_sha3_cra_exit, 3026 .cra_module = THIS_MODULE, 3027 }, 3028 }, 3029 }, 3030 }; 3031 3032 static int safexcel_hmac_sha3_512_init(struct ahash_request *areq) 3033 { 3034 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); 3035 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); 3036 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); 3037 3038 memset(req, 0, sizeof(*req)); 3039 3040 /* Copy (half of) the key */ 3041 memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2); 3042 /* Start of HMAC should have len == processed == blocksize */ 3043 req->len = SHA3_512_BLOCK_SIZE; 3044 req->processed = SHA3_512_BLOCK_SIZE; 3045 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512; 3046 req->digest = CONTEXT_CONTROL_DIGEST_HMAC; 3047 req->state_sz = SHA3_512_BLOCK_SIZE / 2; 3048 req->digest_sz = SHA3_512_DIGEST_SIZE; 3049 req->block_sz = SHA3_512_BLOCK_SIZE; 3050 req->hmac = true; 3051 ctx->do_fallback = false; 3052 ctx->fb_init_done = false; 3053 return 0; 3054 } 3055 3056 static int safexcel_hmac_sha3_512_digest(struct ahash_request *req) 3057 { 3058 if (req->nbytes) 3059 return safexcel_hmac_sha3_512_init(req) ?: 3060 safexcel_ahash_finup(req); 3061 3062 /* HW cannot do zero length HMAC, use fallback instead */ 3063 return safexcel_sha3_digest_fallback(req); 3064 } 3065 3066 static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm) 3067 { 3068 return safexcel_hmac_sha3_cra_init(tfm, "sha3-512"); 3069 } 3070 struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = { 3071 .type = SAFEXCEL_ALG_TYPE_AHASH, 3072 .algo_mask = SAFEXCEL_ALG_SHA3, 3073 .alg.ahash = { 3074 .init = safexcel_hmac_sha3_512_init, 3075 .update = safexcel_sha3_update, 3076 .final = safexcel_sha3_final, 3077 .finup = safexcel_sha3_finup, 3078 .digest = safexcel_hmac_sha3_512_digest, 3079 .setkey = safexcel_hmac_sha3_setkey, 3080 .export = safexcel_sha3_export, 3081 .import = safexcel_sha3_import, 3082 .halg = { 3083 .digestsize = SHA3_512_DIGEST_SIZE, 3084 .statesize = sizeof(struct safexcel_ahash_export_state), 3085 .base = { 3086 .cra_name = "hmac(sha3-512)", 3087 .cra_driver_name = "safexcel-hmac-sha3-512", 3088 .cra_priority = SAFEXCEL_CRA_PRIORITY, 3089 .cra_flags = CRYPTO_ALG_ASYNC | 3090 CRYPTO_ALG_KERN_DRIVER_ONLY | 3091 CRYPTO_ALG_NEED_FALLBACK, 3092 .cra_blocksize = SHA3_512_BLOCK_SIZE, 3093 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), 3094 .cra_init = safexcel_hmac_sha3_512_cra_init, 3095 .cra_exit = safexcel_hmac_sha3_cra_exit, 3096 .cra_module = THIS_MODULE, 3097 }, 3098 }, 3099 }, 3100 }; 3101