1 // SPDX-License-Identifier: GPL-2.0-only 2 // SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved. 3 /* 4 * Crypto driver to handle HASH algorithms using NVIDIA Security Engine. 5 */ 6 7 #include <linux/clk.h> 8 #include <linux/dma-mapping.h> 9 #include <linux/module.h> 10 #include <linux/of_device.h> 11 #include <linux/platform_device.h> 12 13 #include <crypto/aes.h> 14 #include <crypto/sha1.h> 15 #include <crypto/sha2.h> 16 #include <crypto/sha3.h> 17 #include <crypto/internal/des.h> 18 #include <crypto/engine.h> 19 #include <crypto/scatterwalk.h> 20 #include <crypto/internal/hash.h> 21 22 #include "tegra-se.h" 23 24 struct tegra_sha_ctx { 25 struct tegra_se *se; 26 unsigned int alg; 27 bool fallback; 28 u32 key_id; 29 struct crypto_ahash *fallback_tfm; 30 }; 31 32 struct tegra_sha_reqctx { 33 struct scatterlist *src_sg; 34 struct tegra_se_datbuf datbuf; 35 struct tegra_se_datbuf residue; 36 struct tegra_se_datbuf digest; 37 unsigned int alg; 38 unsigned int config; 39 unsigned int total_len; 40 unsigned int blk_size; 41 unsigned int task; 42 u32 key_id; 43 u32 result[HASH_RESULT_REG_COUNT]; 44 struct ahash_request fallback_req; 45 }; 46 47 static int tegra_sha_get_config(u32 alg) 48 { 49 int cfg = 0; 50 51 switch (alg) { 52 case SE_ALG_SHA1: 53 cfg |= SE_SHA_ENC_ALG_SHA; 54 cfg |= SE_SHA_ENC_MODE_SHA1; 55 break; 56 57 case SE_ALG_HMAC_SHA224: 58 cfg |= SE_SHA_ENC_ALG_HMAC; 59 fallthrough; 60 case SE_ALG_SHA224: 61 cfg |= SE_SHA_ENC_ALG_SHA; 62 cfg |= SE_SHA_ENC_MODE_SHA224; 63 break; 64 65 case SE_ALG_HMAC_SHA256: 66 cfg |= SE_SHA_ENC_ALG_HMAC; 67 fallthrough; 68 case SE_ALG_SHA256: 69 cfg |= SE_SHA_ENC_ALG_SHA; 70 cfg |= SE_SHA_ENC_MODE_SHA256; 71 break; 72 73 case SE_ALG_HMAC_SHA384: 74 cfg |= SE_SHA_ENC_ALG_HMAC; 75 fallthrough; 76 case SE_ALG_SHA384: 77 cfg |= SE_SHA_ENC_ALG_SHA; 78 cfg |= SE_SHA_ENC_MODE_SHA384; 79 break; 80 81 case SE_ALG_HMAC_SHA512: 82 cfg |= SE_SHA_ENC_ALG_HMAC; 83 fallthrough; 84 case SE_ALG_SHA512: 85 cfg |= SE_SHA_ENC_ALG_SHA; 86 cfg |= SE_SHA_ENC_MODE_SHA512; 87 break; 88 89 case SE_ALG_SHA3_224: 90 cfg |= SE_SHA_ENC_ALG_SHA; 91 cfg |= SE_SHA_ENC_MODE_SHA3_224; 92 break; 93 case SE_ALG_SHA3_256: 94 cfg |= SE_SHA_ENC_ALG_SHA; 95 cfg |= SE_SHA_ENC_MODE_SHA3_256; 96 break; 97 case SE_ALG_SHA3_384: 98 cfg |= SE_SHA_ENC_ALG_SHA; 99 cfg |= SE_SHA_ENC_MODE_SHA3_384; 100 break; 101 case SE_ALG_SHA3_512: 102 cfg |= SE_SHA_ENC_ALG_SHA; 103 cfg |= SE_SHA_ENC_MODE_SHA3_512; 104 break; 105 default: 106 return -EINVAL; 107 } 108 109 return cfg; 110 } 111 112 static int tegra_sha_fallback_init(struct ahash_request *req) 113 { 114 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 115 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 116 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 117 118 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 119 rctx->fallback_req.base.flags = req->base.flags & 120 CRYPTO_TFM_REQ_MAY_SLEEP; 121 122 return crypto_ahash_init(&rctx->fallback_req); 123 } 124 125 static int tegra_sha_fallback_update(struct ahash_request *req) 126 { 127 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 128 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 129 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 130 131 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 132 rctx->fallback_req.base.flags = req->base.flags & 133 CRYPTO_TFM_REQ_MAY_SLEEP; 134 rctx->fallback_req.nbytes = req->nbytes; 135 rctx->fallback_req.src = req->src; 136 137 return crypto_ahash_update(&rctx->fallback_req); 138 } 139 140 static int tegra_sha_fallback_final(struct ahash_request *req) 141 { 142 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 143 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 144 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 145 146 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 147 rctx->fallback_req.base.flags = req->base.flags & 148 CRYPTO_TFM_REQ_MAY_SLEEP; 149 rctx->fallback_req.result = req->result; 150 151 return crypto_ahash_final(&rctx->fallback_req); 152 } 153 154 static int tegra_sha_fallback_finup(struct ahash_request *req) 155 { 156 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 157 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 158 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 159 160 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 161 rctx->fallback_req.base.flags = req->base.flags & 162 CRYPTO_TFM_REQ_MAY_SLEEP; 163 164 rctx->fallback_req.nbytes = req->nbytes; 165 rctx->fallback_req.src = req->src; 166 rctx->fallback_req.result = req->result; 167 168 return crypto_ahash_finup(&rctx->fallback_req); 169 } 170 171 static int tegra_sha_fallback_digest(struct ahash_request *req) 172 { 173 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 174 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 175 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 176 177 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 178 rctx->fallback_req.base.flags = req->base.flags & 179 CRYPTO_TFM_REQ_MAY_SLEEP; 180 181 rctx->fallback_req.nbytes = req->nbytes; 182 rctx->fallback_req.src = req->src; 183 rctx->fallback_req.result = req->result; 184 185 return crypto_ahash_digest(&rctx->fallback_req); 186 } 187 188 static int tegra_sha_fallback_import(struct ahash_request *req, const void *in) 189 { 190 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 191 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 192 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 193 194 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 195 rctx->fallback_req.base.flags = req->base.flags & 196 CRYPTO_TFM_REQ_MAY_SLEEP; 197 198 return crypto_ahash_import(&rctx->fallback_req, in); 199 } 200 201 static int tegra_sha_fallback_export(struct ahash_request *req, void *out) 202 { 203 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 204 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 205 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 206 207 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 208 rctx->fallback_req.base.flags = req->base.flags & 209 CRYPTO_TFM_REQ_MAY_SLEEP; 210 211 return crypto_ahash_export(&rctx->fallback_req, out); 212 } 213 214 static int tegra_sha_prep_cmd(struct tegra_se *se, u32 *cpuvaddr, 215 struct tegra_sha_reqctx *rctx) 216 { 217 u64 msg_len, msg_left; 218 int i = 0; 219 220 msg_len = rctx->total_len * 8; 221 msg_left = rctx->datbuf.size * 8; 222 223 /* 224 * If IN_ADDR_HI_0.SZ > SHA_MSG_LEFT_[0-3] to the HASH engine, 225 * HW treats it as the last buffer and process the data. 226 * Therefore, add an extra byte to msg_left if it is not the 227 * last buffer. 228 */ 229 if (rctx->task & SHA_UPDATE) { 230 msg_left += 8; 231 msg_len += 8; 232 } 233 234 cpuvaddr[i++] = host1x_opcode_setpayload(8); 235 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_MSG_LENGTH); 236 cpuvaddr[i++] = lower_32_bits(msg_len); 237 cpuvaddr[i++] = upper_32_bits(msg_len); 238 cpuvaddr[i++] = 0; 239 cpuvaddr[i++] = 0; 240 cpuvaddr[i++] = lower_32_bits(msg_left); 241 cpuvaddr[i++] = upper_32_bits(msg_left); 242 cpuvaddr[i++] = 0; 243 cpuvaddr[i++] = 0; 244 cpuvaddr[i++] = host1x_opcode_setpayload(6); 245 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_CFG); 246 cpuvaddr[i++] = rctx->config; 247 248 if (rctx->task & SHA_FIRST) { 249 cpuvaddr[i++] = SE_SHA_TASK_HASH_INIT; 250 rctx->task &= ~SHA_FIRST; 251 } else { 252 cpuvaddr[i++] = 0; 253 } 254 255 cpuvaddr[i++] = rctx->datbuf.addr; 256 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->datbuf.addr)) | 257 SE_ADDR_HI_SZ(rctx->datbuf.size)); 258 cpuvaddr[i++] = rctx->digest.addr; 259 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->digest.addr)) | 260 SE_ADDR_HI_SZ(rctx->digest.size)); 261 if (rctx->key_id) { 262 cpuvaddr[i++] = host1x_opcode_setpayload(1); 263 cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_CRYPTO_CFG); 264 cpuvaddr[i++] = SE_AES_KEY_INDEX(rctx->key_id); 265 } 266 267 cpuvaddr[i++] = host1x_opcode_setpayload(1); 268 cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_OPERATION); 269 cpuvaddr[i++] = SE_SHA_OP_WRSTALL | 270 SE_SHA_OP_START | 271 SE_SHA_OP_LASTBUF; 272 cpuvaddr[i++] = se_host1x_opcode_nonincr(host1x_uclass_incr_syncpt_r(), 1); 273 cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) | 274 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); 275 276 dev_dbg(se->dev, "msg len %llu msg left %llu cfg %#x", 277 msg_len, msg_left, rctx->config); 278 279 return i; 280 } 281 282 static void tegra_sha_copy_hash_result(struct tegra_se *se, struct tegra_sha_reqctx *rctx) 283 { 284 int i; 285 286 for (i = 0; i < HASH_RESULT_REG_COUNT; i++) 287 rctx->result[i] = readl(se->base + se->hw->regs->result + (i * 4)); 288 } 289 290 static void tegra_sha_paste_hash_result(struct tegra_se *se, struct tegra_sha_reqctx *rctx) 291 { 292 int i; 293 294 for (i = 0; i < HASH_RESULT_REG_COUNT; i++) 295 writel(rctx->result[i], 296 se->base + se->hw->regs->result + (i * 4)); 297 } 298 299 static int tegra_sha_do_update(struct ahash_request *req) 300 { 301 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 302 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 303 unsigned int nblks, nresidue, size, ret; 304 u32 *cpuvaddr = ctx->se->cmdbuf->addr; 305 306 nresidue = (req->nbytes + rctx->residue.size) % rctx->blk_size; 307 nblks = (req->nbytes + rctx->residue.size) / rctx->blk_size; 308 309 /* 310 * If nbytes is a multiple of block size and there is no residue, 311 * then reserve the last block as residue during final() to process. 312 */ 313 if (!nresidue && nblks) { 314 nresidue = rctx->blk_size; 315 nblks--; 316 } 317 318 rctx->src_sg = req->src; 319 rctx->datbuf.size = (req->nbytes + rctx->residue.size) - nresidue; 320 rctx->total_len += rctx->datbuf.size; 321 322 /* 323 * If nbytes are less than a block size, copy it residue and 324 * return. The bytes will be processed in final() 325 */ 326 if (nblks < 1) { 327 scatterwalk_map_and_copy(rctx->residue.buf + rctx->residue.size, 328 rctx->src_sg, 0, req->nbytes, 0); 329 330 rctx->residue.size += req->nbytes; 331 return 0; 332 } 333 334 /* Copy the previous residue first */ 335 if (rctx->residue.size) 336 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); 337 338 scatterwalk_map_and_copy(rctx->datbuf.buf + rctx->residue.size, 339 rctx->src_sg, 0, req->nbytes - nresidue, 0); 340 341 scatterwalk_map_and_copy(rctx->residue.buf, rctx->src_sg, 342 req->nbytes - nresidue, nresidue, 0); 343 344 /* Update residue value with the residue after current block */ 345 rctx->residue.size = nresidue; 346 347 rctx->config = tegra_sha_get_config(rctx->alg) | 348 SE_SHA_DST_HASH_REG; 349 350 /* 351 * If this is not the first 'update' call, paste the previous copied 352 * intermediate results to the registers so that it gets picked up. 353 * This is to support the import/export functionality. 354 */ 355 if (!(rctx->task & SHA_FIRST)) 356 tegra_sha_paste_hash_result(ctx->se, rctx); 357 358 size = tegra_sha_prep_cmd(ctx->se, cpuvaddr, rctx); 359 360 ret = tegra_se_host1x_submit(ctx->se, size); 361 362 /* 363 * If this is not the final update, copy the intermediate results 364 * from the registers so that it can be used in the next 'update' 365 * call. This is to support the import/export functionality. 366 */ 367 if (!(rctx->task & SHA_FINAL)) 368 tegra_sha_copy_hash_result(ctx->se, rctx); 369 370 return ret; 371 } 372 373 static int tegra_sha_do_final(struct ahash_request *req) 374 { 375 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 376 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 377 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 378 struct tegra_se *se = ctx->se; 379 u32 *cpuvaddr = se->cmdbuf->addr; 380 int size, ret = 0; 381 382 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); 383 rctx->datbuf.size = rctx->residue.size; 384 rctx->total_len += rctx->residue.size; 385 386 rctx->config = tegra_sha_get_config(rctx->alg) | 387 SE_SHA_DST_MEMORY; 388 389 size = tegra_sha_prep_cmd(se, cpuvaddr, rctx); 390 391 ret = tegra_se_host1x_submit(se, size); 392 if (ret) 393 goto out; 394 395 /* Copy result */ 396 memcpy(req->result, rctx->digest.buf, rctx->digest.size); 397 398 out: 399 dma_free_coherent(se->dev, SE_SHA_BUFLEN, 400 rctx->datbuf.buf, rctx->datbuf.addr); 401 dma_free_coherent(se->dev, crypto_ahash_blocksize(tfm), 402 rctx->residue.buf, rctx->residue.addr); 403 dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf, 404 rctx->digest.addr); 405 return ret; 406 } 407 408 static int tegra_sha_do_one_req(struct crypto_engine *engine, void *areq) 409 { 410 struct ahash_request *req = ahash_request_cast(areq); 411 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 412 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 413 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 414 struct tegra_se *se = ctx->se; 415 int ret = 0; 416 417 if (rctx->task & SHA_UPDATE) { 418 ret = tegra_sha_do_update(req); 419 rctx->task &= ~SHA_UPDATE; 420 } 421 422 if (rctx->task & SHA_FINAL) { 423 ret = tegra_sha_do_final(req); 424 rctx->task &= ~SHA_FINAL; 425 } 426 427 crypto_finalize_hash_request(se->engine, req, ret); 428 429 return 0; 430 } 431 432 static void tegra_sha_init_fallback(struct crypto_ahash *tfm, struct tegra_sha_ctx *ctx, 433 const char *algname) 434 { 435 unsigned int statesize; 436 437 ctx->fallback_tfm = crypto_alloc_ahash(algname, 0, CRYPTO_ALG_ASYNC | 438 CRYPTO_ALG_NEED_FALLBACK); 439 440 if (IS_ERR(ctx->fallback_tfm)) { 441 dev_warn(ctx->se->dev, 442 "failed to allocate fallback for %s\n", algname); 443 ctx->fallback_tfm = NULL; 444 return; 445 } 446 447 statesize = crypto_ahash_statesize(ctx->fallback_tfm); 448 449 if (statesize > sizeof(struct tegra_sha_reqctx)) 450 crypto_ahash_set_statesize(tfm, statesize); 451 452 /* Update reqsize if fallback is added */ 453 crypto_ahash_set_reqsize(tfm, 454 sizeof(struct tegra_sha_reqctx) + 455 crypto_ahash_reqsize(ctx->fallback_tfm)); 456 } 457 458 static int tegra_sha_cra_init(struct crypto_tfm *tfm) 459 { 460 struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm); 461 struct crypto_ahash *ahash_tfm = __crypto_ahash_cast(tfm); 462 struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg); 463 struct tegra_se_alg *se_alg; 464 const char *algname; 465 int ret; 466 467 algname = crypto_tfm_alg_name(tfm); 468 se_alg = container_of(alg, struct tegra_se_alg, alg.ahash.base); 469 470 crypto_ahash_set_reqsize(ahash_tfm, sizeof(struct tegra_sha_reqctx)); 471 472 ctx->se = se_alg->se_dev; 473 ctx->fallback = false; 474 ctx->key_id = 0; 475 476 ret = se_algname_to_algid(algname); 477 if (ret < 0) { 478 dev_err(ctx->se->dev, "invalid algorithm\n"); 479 return ret; 480 } 481 482 if (se_alg->alg_base) 483 tegra_sha_init_fallback(ahash_tfm, ctx, algname); 484 485 ctx->alg = ret; 486 487 return 0; 488 } 489 490 static void tegra_sha_cra_exit(struct crypto_tfm *tfm) 491 { 492 struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm); 493 494 if (ctx->fallback_tfm) 495 crypto_free_ahash(ctx->fallback_tfm); 496 497 tegra_key_invalidate(ctx->se, ctx->key_id, ctx->alg); 498 } 499 500 static int tegra_sha_init(struct ahash_request *req) 501 { 502 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 503 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 504 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 505 struct tegra_se *se = ctx->se; 506 507 if (ctx->fallback) 508 return tegra_sha_fallback_init(req); 509 510 rctx->total_len = 0; 511 rctx->datbuf.size = 0; 512 rctx->residue.size = 0; 513 rctx->key_id = ctx->key_id; 514 rctx->task = SHA_FIRST; 515 rctx->alg = ctx->alg; 516 rctx->blk_size = crypto_ahash_blocksize(tfm); 517 rctx->digest.size = crypto_ahash_digestsize(tfm); 518 519 rctx->digest.buf = dma_alloc_coherent(se->dev, rctx->digest.size, 520 &rctx->digest.addr, GFP_KERNEL); 521 if (!rctx->digest.buf) 522 goto digbuf_fail; 523 524 rctx->residue.buf = dma_alloc_coherent(se->dev, rctx->blk_size, 525 &rctx->residue.addr, GFP_KERNEL); 526 if (!rctx->residue.buf) 527 goto resbuf_fail; 528 529 rctx->datbuf.buf = dma_alloc_coherent(se->dev, SE_SHA_BUFLEN, 530 &rctx->datbuf.addr, GFP_KERNEL); 531 if (!rctx->datbuf.buf) 532 goto datbuf_fail; 533 534 return 0; 535 536 datbuf_fail: 537 dma_free_coherent(se->dev, rctx->blk_size, rctx->residue.buf, 538 rctx->residue.addr); 539 resbuf_fail: 540 dma_free_coherent(se->dev, SE_SHA_BUFLEN, rctx->datbuf.buf, 541 rctx->datbuf.addr); 542 digbuf_fail: 543 return -ENOMEM; 544 } 545 546 static int tegra_hmac_fallback_setkey(struct tegra_sha_ctx *ctx, const u8 *key, 547 unsigned int keylen) 548 { 549 if (!ctx->fallback_tfm) { 550 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); 551 return -EINVAL; 552 } 553 554 ctx->fallback = true; 555 return crypto_ahash_setkey(ctx->fallback_tfm, key, keylen); 556 } 557 558 static int tegra_hmac_setkey(struct crypto_ahash *tfm, const u8 *key, 559 unsigned int keylen) 560 { 561 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 562 563 if (aes_check_keylen(keylen)) 564 return tegra_hmac_fallback_setkey(ctx, key, keylen); 565 566 ctx->fallback = false; 567 568 return tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id); 569 } 570 571 static int tegra_sha_update(struct ahash_request *req) 572 { 573 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 574 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 575 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 576 577 if (ctx->fallback) 578 return tegra_sha_fallback_update(req); 579 580 rctx->task |= SHA_UPDATE; 581 582 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 583 } 584 585 static int tegra_sha_final(struct ahash_request *req) 586 { 587 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 588 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 589 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 590 591 if (ctx->fallback) 592 return tegra_sha_fallback_final(req); 593 594 rctx->task |= SHA_FINAL; 595 596 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 597 } 598 599 static int tegra_sha_finup(struct ahash_request *req) 600 { 601 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 602 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 603 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 604 605 if (ctx->fallback) 606 return tegra_sha_fallback_finup(req); 607 608 rctx->task |= SHA_UPDATE | SHA_FINAL; 609 610 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 611 } 612 613 static int tegra_sha_digest(struct ahash_request *req) 614 { 615 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 616 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 617 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 618 int ret; 619 620 if (ctx->fallback) 621 return tegra_sha_fallback_digest(req); 622 623 ret = tegra_sha_init(req); 624 if (ret) 625 return ret; 626 627 rctx->task |= SHA_UPDATE | SHA_FINAL; 628 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 629 } 630 631 static int tegra_sha_export(struct ahash_request *req, void *out) 632 { 633 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 634 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 635 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 636 637 if (ctx->fallback) 638 return tegra_sha_fallback_export(req, out); 639 640 memcpy(out, rctx, sizeof(*rctx)); 641 642 return 0; 643 } 644 645 static int tegra_sha_import(struct ahash_request *req, const void *in) 646 { 647 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 648 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 649 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 650 651 if (ctx->fallback) 652 return tegra_sha_fallback_import(req, in); 653 654 memcpy(rctx, in, sizeof(*rctx)); 655 656 return 0; 657 } 658 659 static struct tegra_se_alg tegra_hash_algs[] = { 660 { 661 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 662 .alg.ahash.base = { 663 .init = tegra_sha_init, 664 .update = tegra_sha_update, 665 .final = tegra_sha_final, 666 .finup = tegra_sha_finup, 667 .digest = tegra_sha_digest, 668 .export = tegra_sha_export, 669 .import = tegra_sha_import, 670 .halg.digestsize = SHA1_DIGEST_SIZE, 671 .halg.statesize = sizeof(struct tegra_sha_reqctx), 672 .halg.base = { 673 .cra_name = "sha1", 674 .cra_driver_name = "tegra-se-sha1", 675 .cra_priority = 300, 676 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 677 .cra_blocksize = SHA1_BLOCK_SIZE, 678 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 679 .cra_alignmask = 0, 680 .cra_module = THIS_MODULE, 681 .cra_init = tegra_sha_cra_init, 682 .cra_exit = tegra_sha_cra_exit, 683 } 684 } 685 }, { 686 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 687 .alg.ahash.base = { 688 .init = tegra_sha_init, 689 .update = tegra_sha_update, 690 .final = tegra_sha_final, 691 .finup = tegra_sha_finup, 692 .digest = tegra_sha_digest, 693 .export = tegra_sha_export, 694 .import = tegra_sha_import, 695 .halg.digestsize = SHA224_DIGEST_SIZE, 696 .halg.statesize = sizeof(struct tegra_sha_reqctx), 697 .halg.base = { 698 .cra_name = "sha224", 699 .cra_driver_name = "tegra-se-sha224", 700 .cra_priority = 300, 701 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 702 .cra_blocksize = SHA224_BLOCK_SIZE, 703 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 704 .cra_alignmask = 0, 705 .cra_module = THIS_MODULE, 706 .cra_init = tegra_sha_cra_init, 707 .cra_exit = tegra_sha_cra_exit, 708 } 709 } 710 }, { 711 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 712 .alg.ahash.base = { 713 .init = tegra_sha_init, 714 .update = tegra_sha_update, 715 .final = tegra_sha_final, 716 .finup = tegra_sha_finup, 717 .digest = tegra_sha_digest, 718 .export = tegra_sha_export, 719 .import = tegra_sha_import, 720 .halg.digestsize = SHA256_DIGEST_SIZE, 721 .halg.statesize = sizeof(struct tegra_sha_reqctx), 722 .halg.base = { 723 .cra_name = "sha256", 724 .cra_driver_name = "tegra-se-sha256", 725 .cra_priority = 300, 726 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 727 .cra_blocksize = SHA256_BLOCK_SIZE, 728 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 729 .cra_alignmask = 0, 730 .cra_module = THIS_MODULE, 731 .cra_init = tegra_sha_cra_init, 732 .cra_exit = tegra_sha_cra_exit, 733 } 734 } 735 }, { 736 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 737 .alg.ahash.base = { 738 .init = tegra_sha_init, 739 .update = tegra_sha_update, 740 .final = tegra_sha_final, 741 .finup = tegra_sha_finup, 742 .digest = tegra_sha_digest, 743 .export = tegra_sha_export, 744 .import = tegra_sha_import, 745 .halg.digestsize = SHA384_DIGEST_SIZE, 746 .halg.statesize = sizeof(struct tegra_sha_reqctx), 747 .halg.base = { 748 .cra_name = "sha384", 749 .cra_driver_name = "tegra-se-sha384", 750 .cra_priority = 300, 751 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 752 .cra_blocksize = SHA384_BLOCK_SIZE, 753 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 754 .cra_alignmask = 0, 755 .cra_module = THIS_MODULE, 756 .cra_init = tegra_sha_cra_init, 757 .cra_exit = tegra_sha_cra_exit, 758 } 759 } 760 }, { 761 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 762 .alg.ahash.base = { 763 .init = tegra_sha_init, 764 .update = tegra_sha_update, 765 .final = tegra_sha_final, 766 .finup = tegra_sha_finup, 767 .digest = tegra_sha_digest, 768 .export = tegra_sha_export, 769 .import = tegra_sha_import, 770 .halg.digestsize = SHA512_DIGEST_SIZE, 771 .halg.statesize = sizeof(struct tegra_sha_reqctx), 772 .halg.base = { 773 .cra_name = "sha512", 774 .cra_driver_name = "tegra-se-sha512", 775 .cra_priority = 300, 776 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 777 .cra_blocksize = SHA512_BLOCK_SIZE, 778 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 779 .cra_alignmask = 0, 780 .cra_module = THIS_MODULE, 781 .cra_init = tegra_sha_cra_init, 782 .cra_exit = tegra_sha_cra_exit, 783 } 784 } 785 }, { 786 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 787 .alg.ahash.base = { 788 .init = tegra_sha_init, 789 .update = tegra_sha_update, 790 .final = tegra_sha_final, 791 .finup = tegra_sha_finup, 792 .digest = tegra_sha_digest, 793 .export = tegra_sha_export, 794 .import = tegra_sha_import, 795 .halg.digestsize = SHA3_224_DIGEST_SIZE, 796 .halg.statesize = sizeof(struct tegra_sha_reqctx), 797 .halg.base = { 798 .cra_name = "sha3-224", 799 .cra_driver_name = "tegra-se-sha3-224", 800 .cra_priority = 300, 801 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 802 .cra_blocksize = SHA3_224_BLOCK_SIZE, 803 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 804 .cra_alignmask = 0, 805 .cra_module = THIS_MODULE, 806 .cra_init = tegra_sha_cra_init, 807 .cra_exit = tegra_sha_cra_exit, 808 } 809 } 810 }, { 811 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 812 .alg.ahash.base = { 813 .init = tegra_sha_init, 814 .update = tegra_sha_update, 815 .final = tegra_sha_final, 816 .finup = tegra_sha_finup, 817 .digest = tegra_sha_digest, 818 .export = tegra_sha_export, 819 .import = tegra_sha_import, 820 .halg.digestsize = SHA3_256_DIGEST_SIZE, 821 .halg.statesize = sizeof(struct tegra_sha_reqctx), 822 .halg.base = { 823 .cra_name = "sha3-256", 824 .cra_driver_name = "tegra-se-sha3-256", 825 .cra_priority = 300, 826 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 827 .cra_blocksize = SHA3_256_BLOCK_SIZE, 828 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 829 .cra_alignmask = 0, 830 .cra_module = THIS_MODULE, 831 .cra_init = tegra_sha_cra_init, 832 .cra_exit = tegra_sha_cra_exit, 833 } 834 } 835 }, { 836 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 837 .alg.ahash.base = { 838 .init = tegra_sha_init, 839 .update = tegra_sha_update, 840 .final = tegra_sha_final, 841 .finup = tegra_sha_finup, 842 .digest = tegra_sha_digest, 843 .export = tegra_sha_export, 844 .import = tegra_sha_import, 845 .halg.digestsize = SHA3_384_DIGEST_SIZE, 846 .halg.statesize = sizeof(struct tegra_sha_reqctx), 847 .halg.base = { 848 .cra_name = "sha3-384", 849 .cra_driver_name = "tegra-se-sha3-384", 850 .cra_priority = 300, 851 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 852 .cra_blocksize = SHA3_384_BLOCK_SIZE, 853 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 854 .cra_alignmask = 0, 855 .cra_module = THIS_MODULE, 856 .cra_init = tegra_sha_cra_init, 857 .cra_exit = tegra_sha_cra_exit, 858 } 859 } 860 }, { 861 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 862 .alg.ahash.base = { 863 .init = tegra_sha_init, 864 .update = tegra_sha_update, 865 .final = tegra_sha_final, 866 .finup = tegra_sha_finup, 867 .digest = tegra_sha_digest, 868 .export = tegra_sha_export, 869 .import = tegra_sha_import, 870 .halg.digestsize = SHA3_512_DIGEST_SIZE, 871 .halg.statesize = sizeof(struct tegra_sha_reqctx), 872 .halg.base = { 873 .cra_name = "sha3-512", 874 .cra_driver_name = "tegra-se-sha3-512", 875 .cra_priority = 300, 876 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 877 .cra_blocksize = SHA3_512_BLOCK_SIZE, 878 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 879 .cra_alignmask = 0, 880 .cra_module = THIS_MODULE, 881 .cra_init = tegra_sha_cra_init, 882 .cra_exit = tegra_sha_cra_exit, 883 } 884 } 885 }, { 886 .alg_base = "sha224", 887 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 888 .alg.ahash.base = { 889 .init = tegra_sha_init, 890 .update = tegra_sha_update, 891 .final = tegra_sha_final, 892 .finup = tegra_sha_finup, 893 .digest = tegra_sha_digest, 894 .export = tegra_sha_export, 895 .import = tegra_sha_import, 896 .setkey = tegra_hmac_setkey, 897 .halg.digestsize = SHA224_DIGEST_SIZE, 898 .halg.statesize = sizeof(struct tegra_sha_reqctx), 899 .halg.base = { 900 .cra_name = "hmac(sha224)", 901 .cra_driver_name = "tegra-se-hmac-sha224", 902 .cra_priority = 300, 903 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK, 904 .cra_blocksize = SHA224_BLOCK_SIZE, 905 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 906 .cra_alignmask = 0, 907 .cra_module = THIS_MODULE, 908 .cra_init = tegra_sha_cra_init, 909 .cra_exit = tegra_sha_cra_exit, 910 } 911 } 912 }, { 913 .alg_base = "sha256", 914 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 915 .alg.ahash.base = { 916 .init = tegra_sha_init, 917 .update = tegra_sha_update, 918 .final = tegra_sha_final, 919 .finup = tegra_sha_finup, 920 .digest = tegra_sha_digest, 921 .export = tegra_sha_export, 922 .import = tegra_sha_import, 923 .setkey = tegra_hmac_setkey, 924 .halg.digestsize = SHA256_DIGEST_SIZE, 925 .halg.statesize = sizeof(struct tegra_sha_reqctx), 926 .halg.base = { 927 .cra_name = "hmac(sha256)", 928 .cra_driver_name = "tegra-se-hmac-sha256", 929 .cra_priority = 300, 930 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK, 931 .cra_blocksize = SHA256_BLOCK_SIZE, 932 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 933 .cra_alignmask = 0, 934 .cra_module = THIS_MODULE, 935 .cra_init = tegra_sha_cra_init, 936 .cra_exit = tegra_sha_cra_exit, 937 } 938 } 939 }, { 940 .alg_base = "sha384", 941 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 942 .alg.ahash.base = { 943 .init = tegra_sha_init, 944 .update = tegra_sha_update, 945 .final = tegra_sha_final, 946 .finup = tegra_sha_finup, 947 .digest = tegra_sha_digest, 948 .export = tegra_sha_export, 949 .import = tegra_sha_import, 950 .setkey = tegra_hmac_setkey, 951 .halg.digestsize = SHA384_DIGEST_SIZE, 952 .halg.statesize = sizeof(struct tegra_sha_reqctx), 953 .halg.base = { 954 .cra_name = "hmac(sha384)", 955 .cra_driver_name = "tegra-se-hmac-sha384", 956 .cra_priority = 300, 957 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK, 958 .cra_blocksize = SHA384_BLOCK_SIZE, 959 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 960 .cra_alignmask = 0, 961 .cra_module = THIS_MODULE, 962 .cra_init = tegra_sha_cra_init, 963 .cra_exit = tegra_sha_cra_exit, 964 } 965 } 966 }, { 967 .alg_base = "sha512", 968 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 969 .alg.ahash.base = { 970 .init = tegra_sha_init, 971 .update = tegra_sha_update, 972 .final = tegra_sha_final, 973 .finup = tegra_sha_finup, 974 .digest = tegra_sha_digest, 975 .export = tegra_sha_export, 976 .import = tegra_sha_import, 977 .setkey = tegra_hmac_setkey, 978 .halg.digestsize = SHA512_DIGEST_SIZE, 979 .halg.statesize = sizeof(struct tegra_sha_reqctx), 980 .halg.base = { 981 .cra_name = "hmac(sha512)", 982 .cra_driver_name = "tegra-se-hmac-sha512", 983 .cra_priority = 300, 984 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK, 985 .cra_blocksize = SHA512_BLOCK_SIZE, 986 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 987 .cra_alignmask = 0, 988 .cra_module = THIS_MODULE, 989 .cra_init = tegra_sha_cra_init, 990 .cra_exit = tegra_sha_cra_exit, 991 } 992 } 993 } 994 }; 995 996 static int tegra_hash_kac_manifest(u32 user, u32 alg, u32 keylen) 997 { 998 int manifest; 999 1000 manifest = SE_KAC_USER_NS; 1001 1002 switch (alg) { 1003 case SE_ALG_HMAC_SHA224: 1004 case SE_ALG_HMAC_SHA256: 1005 case SE_ALG_HMAC_SHA384: 1006 case SE_ALG_HMAC_SHA512: 1007 manifest |= SE_KAC_HMAC; 1008 break; 1009 default: 1010 return -EINVAL; 1011 } 1012 1013 switch (keylen) { 1014 case AES_KEYSIZE_128: 1015 manifest |= SE_KAC_SIZE_128; 1016 break; 1017 case AES_KEYSIZE_192: 1018 manifest |= SE_KAC_SIZE_192; 1019 break; 1020 case AES_KEYSIZE_256: 1021 default: 1022 manifest |= SE_KAC_SIZE_256; 1023 break; 1024 } 1025 1026 return manifest; 1027 } 1028 1029 int tegra_init_hash(struct tegra_se *se) 1030 { 1031 struct ahash_engine_alg *alg; 1032 int i, ret; 1033 1034 se->manifest = tegra_hash_kac_manifest; 1035 1036 for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++) { 1037 tegra_hash_algs[i].se_dev = se; 1038 alg = &tegra_hash_algs[i].alg.ahash; 1039 1040 ret = crypto_engine_register_ahash(alg); 1041 if (ret) { 1042 dev_err(se->dev, "failed to register %s\n", 1043 alg->base.halg.base.cra_name); 1044 goto sha_err; 1045 } 1046 } 1047 1048 return 0; 1049 1050 sha_err: 1051 while (i--) 1052 crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash); 1053 1054 return ret; 1055 } 1056 1057 void tegra_deinit_hash(struct tegra_se *se) 1058 { 1059 int i; 1060 1061 for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++) 1062 crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash); 1063 } 1064