1 // SPDX-License-Identifier: GPL-2.0-only 2 // SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved. 3 /* 4 * Crypto driver to handle HASH algorithms using NVIDIA Security Engine. 5 */ 6 7 #include <linux/clk.h> 8 #include <linux/dma-mapping.h> 9 #include <linux/module.h> 10 #include <linux/of_device.h> 11 #include <linux/platform_device.h> 12 13 #include <crypto/aes.h> 14 #include <crypto/sha1.h> 15 #include <crypto/sha2.h> 16 #include <crypto/sha3.h> 17 #include <crypto/internal/des.h> 18 #include <crypto/engine.h> 19 #include <crypto/scatterwalk.h> 20 #include <crypto/internal/hash.h> 21 22 #include "tegra-se.h" 23 24 struct tegra_sha_ctx { 25 struct tegra_se *se; 26 unsigned int alg; 27 bool fallback; 28 u32 key_id; 29 struct crypto_ahash *fallback_tfm; 30 }; 31 32 struct tegra_sha_reqctx { 33 struct scatterlist *src_sg; 34 struct tegra_se_datbuf datbuf; 35 struct tegra_se_datbuf residue; 36 struct tegra_se_datbuf digest; 37 struct tegra_se_datbuf intr_res; 38 unsigned int alg; 39 unsigned int config; 40 unsigned int total_len; 41 unsigned int blk_size; 42 unsigned int task; 43 u32 key_id; 44 u32 result[HASH_RESULT_REG_COUNT]; 45 struct ahash_request fallback_req; 46 }; 47 48 static int tegra_sha_get_config(u32 alg) 49 { 50 int cfg = 0; 51 52 switch (alg) { 53 case SE_ALG_SHA1: 54 cfg |= SE_SHA_ENC_ALG_SHA; 55 cfg |= SE_SHA_ENC_MODE_SHA1; 56 break; 57 58 case SE_ALG_HMAC_SHA224: 59 cfg |= SE_SHA_ENC_ALG_HMAC; 60 fallthrough; 61 case SE_ALG_SHA224: 62 cfg |= SE_SHA_ENC_ALG_SHA; 63 cfg |= SE_SHA_ENC_MODE_SHA224; 64 break; 65 66 case SE_ALG_HMAC_SHA256: 67 cfg |= SE_SHA_ENC_ALG_HMAC; 68 fallthrough; 69 case SE_ALG_SHA256: 70 cfg |= SE_SHA_ENC_ALG_SHA; 71 cfg |= SE_SHA_ENC_MODE_SHA256; 72 break; 73 74 case SE_ALG_HMAC_SHA384: 75 cfg |= SE_SHA_ENC_ALG_HMAC; 76 fallthrough; 77 case SE_ALG_SHA384: 78 cfg |= SE_SHA_ENC_ALG_SHA; 79 cfg |= SE_SHA_ENC_MODE_SHA384; 80 break; 81 82 case SE_ALG_HMAC_SHA512: 83 cfg |= SE_SHA_ENC_ALG_HMAC; 84 fallthrough; 85 case SE_ALG_SHA512: 86 cfg |= SE_SHA_ENC_ALG_SHA; 87 cfg |= SE_SHA_ENC_MODE_SHA512; 88 break; 89 90 case SE_ALG_SHA3_224: 91 cfg |= SE_SHA_ENC_ALG_SHA; 92 cfg |= SE_SHA_ENC_MODE_SHA3_224; 93 break; 94 case SE_ALG_SHA3_256: 95 cfg |= SE_SHA_ENC_ALG_SHA; 96 cfg |= SE_SHA_ENC_MODE_SHA3_256; 97 break; 98 case SE_ALG_SHA3_384: 99 cfg |= SE_SHA_ENC_ALG_SHA; 100 cfg |= SE_SHA_ENC_MODE_SHA3_384; 101 break; 102 case SE_ALG_SHA3_512: 103 cfg |= SE_SHA_ENC_ALG_SHA; 104 cfg |= SE_SHA_ENC_MODE_SHA3_512; 105 break; 106 default: 107 return -EINVAL; 108 } 109 110 return cfg; 111 } 112 113 static int tegra_sha_fallback_init(struct ahash_request *req) 114 { 115 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 116 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 117 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 118 119 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 120 ahash_request_set_callback(&rctx->fallback_req, 121 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 122 req->base.complete, req->base.data); 123 124 return crypto_ahash_init(&rctx->fallback_req); 125 } 126 127 static int tegra_sha_fallback_update(struct ahash_request *req) 128 { 129 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 130 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 131 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 132 133 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 134 ahash_request_set_callback(&rctx->fallback_req, 135 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 136 req->base.complete, req->base.data); 137 ahash_request_set_crypt(&rctx->fallback_req, req->src, NULL, req->nbytes); 138 139 return crypto_ahash_update(&rctx->fallback_req); 140 } 141 142 static int tegra_sha_fallback_final(struct ahash_request *req) 143 { 144 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 145 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 146 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 147 148 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 149 ahash_request_set_callback(&rctx->fallback_req, 150 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 151 req->base.complete, req->base.data); 152 ahash_request_set_crypt(&rctx->fallback_req, NULL, req->result, 0); 153 154 return crypto_ahash_final(&rctx->fallback_req); 155 } 156 157 static int tegra_sha_fallback_finup(struct ahash_request *req) 158 { 159 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 160 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 161 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 162 163 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 164 ahash_request_set_callback(&rctx->fallback_req, 165 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 166 req->base.complete, req->base.data); 167 ahash_request_set_crypt(&rctx->fallback_req, req->src, req->result, 168 req->nbytes); 169 170 return crypto_ahash_finup(&rctx->fallback_req); 171 } 172 173 static int tegra_sha_fallback_digest(struct ahash_request *req) 174 { 175 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 176 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 177 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 178 179 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 180 ahash_request_set_callback(&rctx->fallback_req, 181 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 182 req->base.complete, req->base.data); 183 ahash_request_set_crypt(&rctx->fallback_req, req->src, req->result, 184 req->nbytes); 185 186 return crypto_ahash_digest(&rctx->fallback_req); 187 } 188 189 static int tegra_sha_fallback_import(struct ahash_request *req, const void *in) 190 { 191 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 192 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 193 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 194 195 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 196 ahash_request_set_callback(&rctx->fallback_req, 197 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 198 req->base.complete, req->base.data); 199 200 return crypto_ahash_import(&rctx->fallback_req, in); 201 } 202 203 static int tegra_sha_fallback_export(struct ahash_request *req, void *out) 204 { 205 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 206 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 207 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 208 209 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 210 ahash_request_set_callback(&rctx->fallback_req, 211 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 212 req->base.complete, req->base.data); 213 214 return crypto_ahash_export(&rctx->fallback_req, out); 215 } 216 217 static int tegra_se_insert_hash_result(struct tegra_sha_ctx *ctx, u32 *cpuvaddr, 218 struct tegra_sha_reqctx *rctx) 219 { 220 __be32 *res_be = (__be32 *)rctx->intr_res.buf; 221 u32 *res = (u32 *)rctx->intr_res.buf; 222 int i = 0, j; 223 224 cpuvaddr[i++] = 0; 225 cpuvaddr[i++] = host1x_opcode_setpayload(HASH_RESULT_REG_COUNT); 226 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_HASH_RESULT); 227 228 for (j = 0; j < HASH_RESULT_REG_COUNT; j++) { 229 int idx = j; 230 231 /* 232 * The initial, intermediate and final hash value of SHA-384, SHA-512 233 * in SHA_HASH_RESULT registers follow the below layout of bytes. 234 * 235 * +---------------+------------+ 236 * | HASH_RESULT_0 | B4...B7 | 237 * +---------------+------------+ 238 * | HASH_RESULT_1 | B0...B3 | 239 * +---------------+------------+ 240 * | HASH_RESULT_2 | B12...B15 | 241 * +---------------+------------+ 242 * | HASH_RESULT_3 | B8...B11 | 243 * +---------------+------------+ 244 * | ...... | 245 * +---------------+------------+ 246 * | HASH_RESULT_14| B60...B63 | 247 * +---------------+------------+ 248 * | HASH_RESULT_15| B56...B59 | 249 * +---------------+------------+ 250 * 251 */ 252 if (ctx->alg == SE_ALG_SHA384 || ctx->alg == SE_ALG_SHA512) 253 idx = (j % 2) ? j - 1 : j + 1; 254 255 /* For SHA-1, SHA-224, SHA-256, SHA-384, SHA-512 the initial 256 * intermediate and final hash value when stored in 257 * SHA_HASH_RESULT registers, the byte order is NOT in 258 * little-endian. 259 */ 260 if (ctx->alg <= SE_ALG_SHA512) 261 cpuvaddr[i++] = be32_to_cpu(res_be[idx]); 262 else 263 cpuvaddr[i++] = res[idx]; 264 } 265 266 return i; 267 } 268 269 static int tegra_sha_prep_cmd(struct tegra_sha_ctx *ctx, u32 *cpuvaddr, 270 struct tegra_sha_reqctx *rctx) 271 { 272 struct tegra_se *se = ctx->se; 273 u64 msg_len, msg_left; 274 int i = 0; 275 276 msg_len = rctx->total_len * 8; 277 msg_left = rctx->datbuf.size * 8; 278 279 /* 280 * If IN_ADDR_HI_0.SZ > SHA_MSG_LEFT_[0-3] to the HASH engine, 281 * HW treats it as the last buffer and process the data. 282 * Therefore, add an extra byte to msg_left if it is not the 283 * last buffer. 284 */ 285 if (rctx->task & SHA_UPDATE) { 286 msg_left += 8; 287 msg_len += 8; 288 } 289 290 cpuvaddr[i++] = host1x_opcode_setpayload(8); 291 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_MSG_LENGTH); 292 cpuvaddr[i++] = lower_32_bits(msg_len); 293 cpuvaddr[i++] = upper_32_bits(msg_len); 294 cpuvaddr[i++] = 0; 295 cpuvaddr[i++] = 0; 296 cpuvaddr[i++] = lower_32_bits(msg_left); 297 cpuvaddr[i++] = upper_32_bits(msg_left); 298 cpuvaddr[i++] = 0; 299 cpuvaddr[i++] = 0; 300 cpuvaddr[i++] = host1x_opcode_setpayload(2); 301 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_CFG); 302 cpuvaddr[i++] = rctx->config; 303 304 if (rctx->task & SHA_FIRST) { 305 cpuvaddr[i++] = SE_SHA_TASK_HASH_INIT; 306 rctx->task &= ~SHA_FIRST; 307 } else { 308 /* 309 * If it isn't the first task, program the HASH_RESULT register 310 * with the intermediate result from the previous task 311 */ 312 i += tegra_se_insert_hash_result(ctx, cpuvaddr + i, rctx); 313 } 314 315 cpuvaddr[i++] = host1x_opcode_setpayload(4); 316 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_IN_ADDR); 317 cpuvaddr[i++] = rctx->datbuf.addr; 318 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->datbuf.addr)) | 319 SE_ADDR_HI_SZ(rctx->datbuf.size)); 320 321 if (rctx->task & SHA_UPDATE) { 322 cpuvaddr[i++] = rctx->intr_res.addr; 323 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->intr_res.addr)) | 324 SE_ADDR_HI_SZ(rctx->intr_res.size)); 325 } else { 326 cpuvaddr[i++] = rctx->digest.addr; 327 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->digest.addr)) | 328 SE_ADDR_HI_SZ(rctx->digest.size)); 329 } 330 331 if (rctx->key_id) { 332 cpuvaddr[i++] = host1x_opcode_setpayload(1); 333 cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_CRYPTO_CFG); 334 cpuvaddr[i++] = SE_AES_KEY_INDEX(rctx->key_id); 335 } 336 337 cpuvaddr[i++] = host1x_opcode_setpayload(1); 338 cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_OPERATION); 339 cpuvaddr[i++] = SE_SHA_OP_WRSTALL | SE_SHA_OP_START | 340 SE_SHA_OP_LASTBUF; 341 cpuvaddr[i++] = se_host1x_opcode_nonincr(host1x_uclass_incr_syncpt_r(), 1); 342 cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) | 343 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); 344 345 dev_dbg(se->dev, "msg len %llu msg left %llu sz %zd cfg %#x", 346 msg_len, msg_left, rctx->datbuf.size, rctx->config); 347 348 return i; 349 } 350 351 static int tegra_sha_do_init(struct ahash_request *req) 352 { 353 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 354 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 355 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 356 struct tegra_se *se = ctx->se; 357 358 if (ctx->fallback) 359 return tegra_sha_fallback_init(req); 360 361 rctx->total_len = 0; 362 rctx->datbuf.size = 0; 363 rctx->residue.size = 0; 364 rctx->key_id = ctx->key_id; 365 rctx->task |= SHA_FIRST; 366 rctx->alg = ctx->alg; 367 rctx->blk_size = crypto_ahash_blocksize(tfm); 368 rctx->digest.size = crypto_ahash_digestsize(tfm); 369 370 rctx->digest.buf = dma_alloc_coherent(se->dev, rctx->digest.size, 371 &rctx->digest.addr, GFP_KERNEL); 372 if (!rctx->digest.buf) 373 goto digbuf_fail; 374 375 rctx->residue.buf = dma_alloc_coherent(se->dev, rctx->blk_size, 376 &rctx->residue.addr, GFP_KERNEL); 377 if (!rctx->residue.buf) 378 goto resbuf_fail; 379 380 rctx->intr_res.size = HASH_RESULT_REG_COUNT * 4; 381 rctx->intr_res.buf = dma_alloc_coherent(se->dev, rctx->intr_res.size, 382 &rctx->intr_res.addr, GFP_KERNEL); 383 if (!rctx->intr_res.buf) 384 goto intr_res_fail; 385 386 return 0; 387 388 intr_res_fail: 389 dma_free_coherent(se->dev, rctx->residue.size, rctx->residue.buf, 390 rctx->residue.addr); 391 resbuf_fail: 392 dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf, 393 rctx->digest.addr); 394 digbuf_fail: 395 return -ENOMEM; 396 } 397 398 static int tegra_sha_do_update(struct ahash_request *req) 399 { 400 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 401 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 402 struct tegra_se *se = ctx->se; 403 unsigned int nblks, nresidue, size, ret; 404 u32 *cpuvaddr = se->cmdbuf->addr; 405 406 nresidue = (req->nbytes + rctx->residue.size) % rctx->blk_size; 407 nblks = (req->nbytes + rctx->residue.size) / rctx->blk_size; 408 409 /* 410 * If nbytes is a multiple of block size and there is no residue, 411 * then reserve the last block as residue during final() to process. 412 */ 413 if (!nresidue && nblks) { 414 nresidue = rctx->blk_size; 415 nblks--; 416 } 417 418 rctx->src_sg = req->src; 419 rctx->datbuf.size = (req->nbytes + rctx->residue.size) - nresidue; 420 421 /* 422 * If nbytes are less than a block size, copy it residue and 423 * return. The bytes will be processed in final() 424 */ 425 if (nblks < 1) { 426 scatterwalk_map_and_copy(rctx->residue.buf + rctx->residue.size, 427 rctx->src_sg, 0, req->nbytes, 0); 428 rctx->residue.size += req->nbytes; 429 430 return 0; 431 } 432 433 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->datbuf.size, 434 &rctx->datbuf.addr, GFP_KERNEL); 435 if (!rctx->datbuf.buf) 436 return -ENOMEM; 437 438 /* Copy the previous residue first */ 439 if (rctx->residue.size) 440 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); 441 442 scatterwalk_map_and_copy(rctx->datbuf.buf + rctx->residue.size, 443 rctx->src_sg, 0, req->nbytes - nresidue, 0); 444 445 scatterwalk_map_and_copy(rctx->residue.buf, rctx->src_sg, 446 req->nbytes - nresidue, nresidue, 0); 447 448 /* Update residue value with the residue after current block */ 449 rctx->residue.size = nresidue; 450 rctx->total_len += rctx->datbuf.size; 451 452 rctx->config = tegra_sha_get_config(rctx->alg) | 453 SE_SHA_DST_MEMORY; 454 455 size = tegra_sha_prep_cmd(ctx, cpuvaddr, rctx); 456 ret = tegra_se_host1x_submit(se, se->cmdbuf, size); 457 458 dma_free_coherent(se->dev, rctx->datbuf.size, 459 rctx->datbuf.buf, rctx->datbuf.addr); 460 461 return ret; 462 } 463 464 static int tegra_sha_do_final(struct ahash_request *req) 465 { 466 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 467 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 468 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 469 struct tegra_se *se = ctx->se; 470 u32 *cpuvaddr = se->cmdbuf->addr; 471 int size, ret = 0; 472 473 if (rctx->residue.size) { 474 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->residue.size, 475 &rctx->datbuf.addr, GFP_KERNEL); 476 if (!rctx->datbuf.buf) { 477 ret = -ENOMEM; 478 goto out_free; 479 } 480 481 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); 482 } 483 484 rctx->datbuf.size = rctx->residue.size; 485 rctx->total_len += rctx->residue.size; 486 487 rctx->config = tegra_sha_get_config(rctx->alg) | 488 SE_SHA_DST_MEMORY; 489 490 size = tegra_sha_prep_cmd(ctx, cpuvaddr, rctx); 491 ret = tegra_se_host1x_submit(se, se->cmdbuf, size); 492 if (ret) 493 goto out; 494 495 /* Copy result */ 496 memcpy(req->result, rctx->digest.buf, rctx->digest.size); 497 498 out: 499 if (rctx->residue.size) 500 dma_free_coherent(se->dev, rctx->datbuf.size, 501 rctx->datbuf.buf, rctx->datbuf.addr); 502 out_free: 503 dma_free_coherent(se->dev, crypto_ahash_blocksize(tfm), 504 rctx->residue.buf, rctx->residue.addr); 505 dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf, 506 rctx->digest.addr); 507 508 dma_free_coherent(se->dev, rctx->intr_res.size, rctx->intr_res.buf, 509 rctx->intr_res.addr); 510 511 return ret; 512 } 513 514 static int tegra_sha_do_one_req(struct crypto_engine *engine, void *areq) 515 { 516 struct ahash_request *req = ahash_request_cast(areq); 517 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 518 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 519 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 520 struct tegra_se *se = ctx->se; 521 int ret = 0; 522 523 if (rctx->task & SHA_INIT) { 524 ret = tegra_sha_do_init(req); 525 if (ret) 526 goto out; 527 528 rctx->task &= ~SHA_INIT; 529 } 530 531 if (rctx->task & SHA_UPDATE) { 532 ret = tegra_sha_do_update(req); 533 if (ret) 534 goto out; 535 536 rctx->task &= ~SHA_UPDATE; 537 } 538 539 if (rctx->task & SHA_FINAL) { 540 ret = tegra_sha_do_final(req); 541 if (ret) 542 goto out; 543 544 rctx->task &= ~SHA_FINAL; 545 } 546 547 out: 548 crypto_finalize_hash_request(se->engine, req, ret); 549 550 return 0; 551 } 552 553 static void tegra_sha_init_fallback(struct crypto_ahash *tfm, struct tegra_sha_ctx *ctx, 554 const char *algname) 555 { 556 unsigned int statesize; 557 558 ctx->fallback_tfm = crypto_alloc_ahash(algname, 0, CRYPTO_ALG_ASYNC | 559 CRYPTO_ALG_NEED_FALLBACK); 560 561 if (IS_ERR(ctx->fallback_tfm)) { 562 dev_warn(ctx->se->dev, 563 "failed to allocate fallback for %s\n", algname); 564 ctx->fallback_tfm = NULL; 565 return; 566 } 567 568 statesize = crypto_ahash_statesize(ctx->fallback_tfm); 569 570 if (statesize > sizeof(struct tegra_sha_reqctx)) 571 crypto_ahash_set_statesize(tfm, statesize); 572 573 /* Update reqsize if fallback is added */ 574 crypto_ahash_set_reqsize(tfm, 575 sizeof(struct tegra_sha_reqctx) + 576 crypto_ahash_reqsize(ctx->fallback_tfm)); 577 } 578 579 static int tegra_sha_cra_init(struct crypto_tfm *tfm) 580 { 581 struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm); 582 struct crypto_ahash *ahash_tfm = __crypto_ahash_cast(tfm); 583 struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg); 584 struct tegra_se_alg *se_alg; 585 const char *algname; 586 int ret; 587 588 algname = crypto_tfm_alg_name(tfm); 589 se_alg = container_of(alg, struct tegra_se_alg, alg.ahash.base); 590 591 crypto_ahash_set_reqsize(ahash_tfm, sizeof(struct tegra_sha_reqctx)); 592 593 ctx->se = se_alg->se_dev; 594 ctx->fallback = false; 595 ctx->key_id = 0; 596 597 ret = se_algname_to_algid(algname); 598 if (ret < 0) { 599 dev_err(ctx->se->dev, "invalid algorithm\n"); 600 return ret; 601 } 602 603 if (se_alg->alg_base) 604 tegra_sha_init_fallback(ahash_tfm, ctx, algname); 605 606 ctx->alg = ret; 607 608 return 0; 609 } 610 611 static void tegra_sha_cra_exit(struct crypto_tfm *tfm) 612 { 613 struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm); 614 615 if (ctx->fallback_tfm) 616 crypto_free_ahash(ctx->fallback_tfm); 617 618 tegra_key_invalidate(ctx->se, ctx->key_id, ctx->alg); 619 } 620 621 static int tegra_hmac_fallback_setkey(struct tegra_sha_ctx *ctx, const u8 *key, 622 unsigned int keylen) 623 { 624 if (!ctx->fallback_tfm) { 625 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); 626 return -EINVAL; 627 } 628 629 ctx->fallback = true; 630 return crypto_ahash_setkey(ctx->fallback_tfm, key, keylen); 631 } 632 633 static int tegra_hmac_setkey(struct crypto_ahash *tfm, const u8 *key, 634 unsigned int keylen) 635 { 636 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 637 int ret; 638 639 if (aes_check_keylen(keylen)) 640 return tegra_hmac_fallback_setkey(ctx, key, keylen); 641 642 ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id); 643 if (ret) 644 return tegra_hmac_fallback_setkey(ctx, key, keylen); 645 646 ctx->fallback = false; 647 648 return 0; 649 } 650 651 static int tegra_sha_init(struct ahash_request *req) 652 { 653 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 654 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 655 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 656 657 rctx->task = SHA_INIT; 658 659 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 660 } 661 662 static int tegra_sha_update(struct ahash_request *req) 663 { 664 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 665 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 666 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 667 668 if (ctx->fallback) 669 return tegra_sha_fallback_update(req); 670 671 rctx->task |= SHA_UPDATE; 672 673 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 674 } 675 676 static int tegra_sha_final(struct ahash_request *req) 677 { 678 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 679 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 680 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 681 682 if (ctx->fallback) 683 return tegra_sha_fallback_final(req); 684 685 rctx->task |= SHA_FINAL; 686 687 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 688 } 689 690 static int tegra_sha_finup(struct ahash_request *req) 691 { 692 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 693 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 694 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 695 696 if (ctx->fallback) 697 return tegra_sha_fallback_finup(req); 698 699 rctx->task |= SHA_UPDATE | SHA_FINAL; 700 701 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 702 } 703 704 static int tegra_sha_digest(struct ahash_request *req) 705 { 706 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 707 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 708 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 709 710 if (ctx->fallback) 711 return tegra_sha_fallback_digest(req); 712 713 rctx->task |= SHA_INIT | SHA_UPDATE | SHA_FINAL; 714 715 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 716 } 717 718 static int tegra_sha_export(struct ahash_request *req, void *out) 719 { 720 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 721 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 722 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 723 724 if (ctx->fallback) 725 return tegra_sha_fallback_export(req, out); 726 727 memcpy(out, rctx, sizeof(*rctx)); 728 729 return 0; 730 } 731 732 static int tegra_sha_import(struct ahash_request *req, const void *in) 733 { 734 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 735 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 736 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 737 738 if (ctx->fallback) 739 return tegra_sha_fallback_import(req, in); 740 741 memcpy(rctx, in, sizeof(*rctx)); 742 743 return 0; 744 } 745 746 static struct tegra_se_alg tegra_hash_algs[] = { 747 { 748 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 749 .alg.ahash.base = { 750 .init = tegra_sha_init, 751 .update = tegra_sha_update, 752 .final = tegra_sha_final, 753 .finup = tegra_sha_finup, 754 .digest = tegra_sha_digest, 755 .export = tegra_sha_export, 756 .import = tegra_sha_import, 757 .halg.digestsize = SHA1_DIGEST_SIZE, 758 .halg.statesize = sizeof(struct tegra_sha_reqctx), 759 .halg.base = { 760 .cra_name = "sha1", 761 .cra_driver_name = "tegra-se-sha1", 762 .cra_priority = 300, 763 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 764 .cra_blocksize = SHA1_BLOCK_SIZE, 765 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 766 .cra_alignmask = 0, 767 .cra_module = THIS_MODULE, 768 .cra_init = tegra_sha_cra_init, 769 .cra_exit = tegra_sha_cra_exit, 770 } 771 } 772 }, { 773 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 774 .alg.ahash.base = { 775 .init = tegra_sha_init, 776 .update = tegra_sha_update, 777 .final = tegra_sha_final, 778 .finup = tegra_sha_finup, 779 .digest = tegra_sha_digest, 780 .export = tegra_sha_export, 781 .import = tegra_sha_import, 782 .halg.digestsize = SHA224_DIGEST_SIZE, 783 .halg.statesize = sizeof(struct tegra_sha_reqctx), 784 .halg.base = { 785 .cra_name = "sha224", 786 .cra_driver_name = "tegra-se-sha224", 787 .cra_priority = 300, 788 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 789 .cra_blocksize = SHA224_BLOCK_SIZE, 790 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 791 .cra_alignmask = 0, 792 .cra_module = THIS_MODULE, 793 .cra_init = tegra_sha_cra_init, 794 .cra_exit = tegra_sha_cra_exit, 795 } 796 } 797 }, { 798 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 799 .alg.ahash.base = { 800 .init = tegra_sha_init, 801 .update = tegra_sha_update, 802 .final = tegra_sha_final, 803 .finup = tegra_sha_finup, 804 .digest = tegra_sha_digest, 805 .export = tegra_sha_export, 806 .import = tegra_sha_import, 807 .halg.digestsize = SHA256_DIGEST_SIZE, 808 .halg.statesize = sizeof(struct tegra_sha_reqctx), 809 .halg.base = { 810 .cra_name = "sha256", 811 .cra_driver_name = "tegra-se-sha256", 812 .cra_priority = 300, 813 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 814 .cra_blocksize = SHA256_BLOCK_SIZE, 815 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 816 .cra_alignmask = 0, 817 .cra_module = THIS_MODULE, 818 .cra_init = tegra_sha_cra_init, 819 .cra_exit = tegra_sha_cra_exit, 820 } 821 } 822 }, { 823 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 824 .alg.ahash.base = { 825 .init = tegra_sha_init, 826 .update = tegra_sha_update, 827 .final = tegra_sha_final, 828 .finup = tegra_sha_finup, 829 .digest = tegra_sha_digest, 830 .export = tegra_sha_export, 831 .import = tegra_sha_import, 832 .halg.digestsize = SHA384_DIGEST_SIZE, 833 .halg.statesize = sizeof(struct tegra_sha_reqctx), 834 .halg.base = { 835 .cra_name = "sha384", 836 .cra_driver_name = "tegra-se-sha384", 837 .cra_priority = 300, 838 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 839 .cra_blocksize = SHA384_BLOCK_SIZE, 840 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 841 .cra_alignmask = 0, 842 .cra_module = THIS_MODULE, 843 .cra_init = tegra_sha_cra_init, 844 .cra_exit = tegra_sha_cra_exit, 845 } 846 } 847 }, { 848 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 849 .alg.ahash.base = { 850 .init = tegra_sha_init, 851 .update = tegra_sha_update, 852 .final = tegra_sha_final, 853 .finup = tegra_sha_finup, 854 .digest = tegra_sha_digest, 855 .export = tegra_sha_export, 856 .import = tegra_sha_import, 857 .halg.digestsize = SHA512_DIGEST_SIZE, 858 .halg.statesize = sizeof(struct tegra_sha_reqctx), 859 .halg.base = { 860 .cra_name = "sha512", 861 .cra_driver_name = "tegra-se-sha512", 862 .cra_priority = 300, 863 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 864 .cra_blocksize = SHA512_BLOCK_SIZE, 865 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 866 .cra_alignmask = 0, 867 .cra_module = THIS_MODULE, 868 .cra_init = tegra_sha_cra_init, 869 .cra_exit = tegra_sha_cra_exit, 870 } 871 } 872 }, { 873 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 874 .alg.ahash.base = { 875 .init = tegra_sha_init, 876 .update = tegra_sha_update, 877 .final = tegra_sha_final, 878 .finup = tegra_sha_finup, 879 .digest = tegra_sha_digest, 880 .export = tegra_sha_export, 881 .import = tegra_sha_import, 882 .halg.digestsize = SHA3_224_DIGEST_SIZE, 883 .halg.statesize = sizeof(struct tegra_sha_reqctx), 884 .halg.base = { 885 .cra_name = "sha3-224", 886 .cra_driver_name = "tegra-se-sha3-224", 887 .cra_priority = 300, 888 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 889 .cra_blocksize = SHA3_224_BLOCK_SIZE, 890 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 891 .cra_alignmask = 0, 892 .cra_module = THIS_MODULE, 893 .cra_init = tegra_sha_cra_init, 894 .cra_exit = tegra_sha_cra_exit, 895 } 896 } 897 }, { 898 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 899 .alg.ahash.base = { 900 .init = tegra_sha_init, 901 .update = tegra_sha_update, 902 .final = tegra_sha_final, 903 .finup = tegra_sha_finup, 904 .digest = tegra_sha_digest, 905 .export = tegra_sha_export, 906 .import = tegra_sha_import, 907 .halg.digestsize = SHA3_256_DIGEST_SIZE, 908 .halg.statesize = sizeof(struct tegra_sha_reqctx), 909 .halg.base = { 910 .cra_name = "sha3-256", 911 .cra_driver_name = "tegra-se-sha3-256", 912 .cra_priority = 300, 913 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 914 .cra_blocksize = SHA3_256_BLOCK_SIZE, 915 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 916 .cra_alignmask = 0, 917 .cra_module = THIS_MODULE, 918 .cra_init = tegra_sha_cra_init, 919 .cra_exit = tegra_sha_cra_exit, 920 } 921 } 922 }, { 923 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 924 .alg.ahash.base = { 925 .init = tegra_sha_init, 926 .update = tegra_sha_update, 927 .final = tegra_sha_final, 928 .finup = tegra_sha_finup, 929 .digest = tegra_sha_digest, 930 .export = tegra_sha_export, 931 .import = tegra_sha_import, 932 .halg.digestsize = SHA3_384_DIGEST_SIZE, 933 .halg.statesize = sizeof(struct tegra_sha_reqctx), 934 .halg.base = { 935 .cra_name = "sha3-384", 936 .cra_driver_name = "tegra-se-sha3-384", 937 .cra_priority = 300, 938 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 939 .cra_blocksize = SHA3_384_BLOCK_SIZE, 940 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 941 .cra_alignmask = 0, 942 .cra_module = THIS_MODULE, 943 .cra_init = tegra_sha_cra_init, 944 .cra_exit = tegra_sha_cra_exit, 945 } 946 } 947 }, { 948 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 949 .alg.ahash.base = { 950 .init = tegra_sha_init, 951 .update = tegra_sha_update, 952 .final = tegra_sha_final, 953 .finup = tegra_sha_finup, 954 .digest = tegra_sha_digest, 955 .export = tegra_sha_export, 956 .import = tegra_sha_import, 957 .halg.digestsize = SHA3_512_DIGEST_SIZE, 958 .halg.statesize = sizeof(struct tegra_sha_reqctx), 959 .halg.base = { 960 .cra_name = "sha3-512", 961 .cra_driver_name = "tegra-se-sha3-512", 962 .cra_priority = 300, 963 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 964 .cra_blocksize = SHA3_512_BLOCK_SIZE, 965 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 966 .cra_alignmask = 0, 967 .cra_module = THIS_MODULE, 968 .cra_init = tegra_sha_cra_init, 969 .cra_exit = tegra_sha_cra_exit, 970 } 971 } 972 }, { 973 .alg_base = "sha224", 974 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 975 .alg.ahash.base = { 976 .init = tegra_sha_init, 977 .update = tegra_sha_update, 978 .final = tegra_sha_final, 979 .finup = tegra_sha_finup, 980 .digest = tegra_sha_digest, 981 .export = tegra_sha_export, 982 .import = tegra_sha_import, 983 .setkey = tegra_hmac_setkey, 984 .halg.digestsize = SHA224_DIGEST_SIZE, 985 .halg.statesize = sizeof(struct tegra_sha_reqctx), 986 .halg.base = { 987 .cra_name = "hmac(sha224)", 988 .cra_driver_name = "tegra-se-hmac-sha224", 989 .cra_priority = 300, 990 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK, 991 .cra_blocksize = SHA224_BLOCK_SIZE, 992 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 993 .cra_alignmask = 0, 994 .cra_module = THIS_MODULE, 995 .cra_init = tegra_sha_cra_init, 996 .cra_exit = tegra_sha_cra_exit, 997 } 998 } 999 }, { 1000 .alg_base = "sha256", 1001 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 1002 .alg.ahash.base = { 1003 .init = tegra_sha_init, 1004 .update = tegra_sha_update, 1005 .final = tegra_sha_final, 1006 .finup = tegra_sha_finup, 1007 .digest = tegra_sha_digest, 1008 .export = tegra_sha_export, 1009 .import = tegra_sha_import, 1010 .setkey = tegra_hmac_setkey, 1011 .halg.digestsize = SHA256_DIGEST_SIZE, 1012 .halg.statesize = sizeof(struct tegra_sha_reqctx), 1013 .halg.base = { 1014 .cra_name = "hmac(sha256)", 1015 .cra_driver_name = "tegra-se-hmac-sha256", 1016 .cra_priority = 300, 1017 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK, 1018 .cra_blocksize = SHA256_BLOCK_SIZE, 1019 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 1020 .cra_alignmask = 0, 1021 .cra_module = THIS_MODULE, 1022 .cra_init = tegra_sha_cra_init, 1023 .cra_exit = tegra_sha_cra_exit, 1024 } 1025 } 1026 }, { 1027 .alg_base = "sha384", 1028 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 1029 .alg.ahash.base = { 1030 .init = tegra_sha_init, 1031 .update = tegra_sha_update, 1032 .final = tegra_sha_final, 1033 .finup = tegra_sha_finup, 1034 .digest = tegra_sha_digest, 1035 .export = tegra_sha_export, 1036 .import = tegra_sha_import, 1037 .setkey = tegra_hmac_setkey, 1038 .halg.digestsize = SHA384_DIGEST_SIZE, 1039 .halg.statesize = sizeof(struct tegra_sha_reqctx), 1040 .halg.base = { 1041 .cra_name = "hmac(sha384)", 1042 .cra_driver_name = "tegra-se-hmac-sha384", 1043 .cra_priority = 300, 1044 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK, 1045 .cra_blocksize = SHA384_BLOCK_SIZE, 1046 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 1047 .cra_alignmask = 0, 1048 .cra_module = THIS_MODULE, 1049 .cra_init = tegra_sha_cra_init, 1050 .cra_exit = tegra_sha_cra_exit, 1051 } 1052 } 1053 }, { 1054 .alg_base = "sha512", 1055 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 1056 .alg.ahash.base = { 1057 .init = tegra_sha_init, 1058 .update = tegra_sha_update, 1059 .final = tegra_sha_final, 1060 .finup = tegra_sha_finup, 1061 .digest = tegra_sha_digest, 1062 .export = tegra_sha_export, 1063 .import = tegra_sha_import, 1064 .setkey = tegra_hmac_setkey, 1065 .halg.digestsize = SHA512_DIGEST_SIZE, 1066 .halg.statesize = sizeof(struct tegra_sha_reqctx), 1067 .halg.base = { 1068 .cra_name = "hmac(sha512)", 1069 .cra_driver_name = "tegra-se-hmac-sha512", 1070 .cra_priority = 300, 1071 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK, 1072 .cra_blocksize = SHA512_BLOCK_SIZE, 1073 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 1074 .cra_alignmask = 0, 1075 .cra_module = THIS_MODULE, 1076 .cra_init = tegra_sha_cra_init, 1077 .cra_exit = tegra_sha_cra_exit, 1078 } 1079 } 1080 } 1081 }; 1082 1083 static int tegra_hash_kac_manifest(u32 user, u32 alg, u32 keylen) 1084 { 1085 int manifest; 1086 1087 manifest = SE_KAC_USER_NS; 1088 1089 switch (alg) { 1090 case SE_ALG_HMAC_SHA224: 1091 case SE_ALG_HMAC_SHA256: 1092 case SE_ALG_HMAC_SHA384: 1093 case SE_ALG_HMAC_SHA512: 1094 manifest |= SE_KAC_HMAC; 1095 break; 1096 default: 1097 return -EINVAL; 1098 } 1099 1100 switch (keylen) { 1101 case AES_KEYSIZE_128: 1102 manifest |= SE_KAC_SIZE_128; 1103 break; 1104 case AES_KEYSIZE_192: 1105 manifest |= SE_KAC_SIZE_192; 1106 break; 1107 case AES_KEYSIZE_256: 1108 default: 1109 manifest |= SE_KAC_SIZE_256; 1110 break; 1111 } 1112 1113 return manifest; 1114 } 1115 1116 int tegra_init_hash(struct tegra_se *se) 1117 { 1118 struct ahash_engine_alg *alg; 1119 int i, ret; 1120 1121 se->manifest = tegra_hash_kac_manifest; 1122 1123 for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++) { 1124 tegra_hash_algs[i].se_dev = se; 1125 alg = &tegra_hash_algs[i].alg.ahash; 1126 1127 ret = crypto_engine_register_ahash(alg); 1128 if (ret) { 1129 dev_err(se->dev, "failed to register %s\n", 1130 alg->base.halg.base.cra_name); 1131 goto sha_err; 1132 } 1133 } 1134 1135 return 0; 1136 1137 sha_err: 1138 while (i--) 1139 crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash); 1140 1141 return ret; 1142 } 1143 1144 void tegra_deinit_hash(struct tegra_se *se) 1145 { 1146 int i; 1147 1148 for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++) 1149 crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash); 1150 } 1151