1 // SPDX-License-Identifier: GPL-2.0-only 2 // SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved. 3 /* 4 * Crypto driver to handle HASH algorithms using NVIDIA Security Engine. 5 */ 6 7 #include <linux/clk.h> 8 #include <linux/dma-mapping.h> 9 #include <linux/module.h> 10 #include <linux/of_device.h> 11 #include <linux/platform_device.h> 12 13 #include <crypto/aes.h> 14 #include <crypto/sha1.h> 15 #include <crypto/sha2.h> 16 #include <crypto/sha3.h> 17 #include <crypto/internal/des.h> 18 #include <crypto/engine.h> 19 #include <crypto/scatterwalk.h> 20 #include <crypto/internal/hash.h> 21 22 #include "tegra-se.h" 23 24 struct tegra_sha_ctx { 25 struct tegra_se *se; 26 unsigned int alg; 27 bool fallback; 28 u32 key_id; 29 struct crypto_ahash *fallback_tfm; 30 }; 31 32 struct tegra_sha_reqctx { 33 struct scatterlist *src_sg; 34 struct tegra_se_datbuf datbuf; 35 struct tegra_se_datbuf residue; 36 struct tegra_se_datbuf digest; 37 struct tegra_se_datbuf intr_res; 38 unsigned int alg; 39 unsigned int config; 40 unsigned int total_len; 41 unsigned int blk_size; 42 unsigned int task; 43 u32 key_id; 44 u32 result[HASH_RESULT_REG_COUNT]; 45 struct ahash_request fallback_req; 46 }; 47 48 static int tegra_sha_get_config(u32 alg) 49 { 50 int cfg = 0; 51 52 switch (alg) { 53 case SE_ALG_SHA1: 54 cfg |= SE_SHA_ENC_ALG_SHA; 55 cfg |= SE_SHA_ENC_MODE_SHA1; 56 break; 57 58 case SE_ALG_HMAC_SHA224: 59 cfg |= SE_SHA_ENC_ALG_HMAC; 60 fallthrough; 61 case SE_ALG_SHA224: 62 cfg |= SE_SHA_ENC_ALG_SHA; 63 cfg |= SE_SHA_ENC_MODE_SHA224; 64 break; 65 66 case SE_ALG_HMAC_SHA256: 67 cfg |= SE_SHA_ENC_ALG_HMAC; 68 fallthrough; 69 case SE_ALG_SHA256: 70 cfg |= SE_SHA_ENC_ALG_SHA; 71 cfg |= SE_SHA_ENC_MODE_SHA256; 72 break; 73 74 case SE_ALG_HMAC_SHA384: 75 cfg |= SE_SHA_ENC_ALG_HMAC; 76 fallthrough; 77 case SE_ALG_SHA384: 78 cfg |= SE_SHA_ENC_ALG_SHA; 79 cfg |= SE_SHA_ENC_MODE_SHA384; 80 break; 81 82 case SE_ALG_HMAC_SHA512: 83 cfg |= SE_SHA_ENC_ALG_HMAC; 84 fallthrough; 85 case SE_ALG_SHA512: 86 cfg |= SE_SHA_ENC_ALG_SHA; 87 cfg |= SE_SHA_ENC_MODE_SHA512; 88 break; 89 90 case SE_ALG_SHA3_224: 91 cfg |= SE_SHA_ENC_ALG_SHA; 92 cfg |= SE_SHA_ENC_MODE_SHA3_224; 93 break; 94 case SE_ALG_SHA3_256: 95 cfg |= SE_SHA_ENC_ALG_SHA; 96 cfg |= SE_SHA_ENC_MODE_SHA3_256; 97 break; 98 case SE_ALG_SHA3_384: 99 cfg |= SE_SHA_ENC_ALG_SHA; 100 cfg |= SE_SHA_ENC_MODE_SHA3_384; 101 break; 102 case SE_ALG_SHA3_512: 103 cfg |= SE_SHA_ENC_ALG_SHA; 104 cfg |= SE_SHA_ENC_MODE_SHA3_512; 105 break; 106 default: 107 return -EINVAL; 108 } 109 110 return cfg; 111 } 112 113 static int tegra_sha_fallback_init(struct ahash_request *req) 114 { 115 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 116 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 117 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 118 119 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 120 ahash_request_set_callback(&rctx->fallback_req, 121 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 122 req->base.complete, req->base.data); 123 124 return crypto_ahash_init(&rctx->fallback_req); 125 } 126 127 static int tegra_sha_fallback_update(struct ahash_request *req) 128 { 129 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 130 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 131 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 132 133 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 134 ahash_request_set_callback(&rctx->fallback_req, 135 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 136 req->base.complete, req->base.data); 137 ahash_request_set_crypt(&rctx->fallback_req, req->src, NULL, req->nbytes); 138 139 return crypto_ahash_update(&rctx->fallback_req); 140 } 141 142 static int tegra_sha_fallback_final(struct ahash_request *req) 143 { 144 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 145 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 146 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 147 148 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 149 ahash_request_set_callback(&rctx->fallback_req, 150 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 151 req->base.complete, req->base.data); 152 ahash_request_set_crypt(&rctx->fallback_req, NULL, req->result, 0); 153 154 return crypto_ahash_final(&rctx->fallback_req); 155 } 156 157 static int tegra_sha_fallback_finup(struct ahash_request *req) 158 { 159 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 160 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 161 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 162 163 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 164 ahash_request_set_callback(&rctx->fallback_req, 165 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 166 req->base.complete, req->base.data); 167 ahash_request_set_crypt(&rctx->fallback_req, req->src, req->result, 168 req->nbytes); 169 170 return crypto_ahash_finup(&rctx->fallback_req); 171 } 172 173 static int tegra_sha_fallback_digest(struct ahash_request *req) 174 { 175 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 176 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 177 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 178 179 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 180 ahash_request_set_callback(&rctx->fallback_req, 181 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 182 req->base.complete, req->base.data); 183 ahash_request_set_crypt(&rctx->fallback_req, req->src, req->result, 184 req->nbytes); 185 186 return crypto_ahash_digest(&rctx->fallback_req); 187 } 188 189 static int tegra_sha_fallback_import(struct ahash_request *req, const void *in) 190 { 191 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 192 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 193 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 194 195 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 196 ahash_request_set_callback(&rctx->fallback_req, 197 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 198 req->base.complete, req->base.data); 199 200 return crypto_ahash_import(&rctx->fallback_req, in); 201 } 202 203 static int tegra_sha_fallback_export(struct ahash_request *req, void *out) 204 { 205 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 206 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 207 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 208 209 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 210 ahash_request_set_callback(&rctx->fallback_req, 211 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP, 212 req->base.complete, req->base.data); 213 214 return crypto_ahash_export(&rctx->fallback_req, out); 215 } 216 217 static int tegra_se_insert_hash_result(struct tegra_sha_ctx *ctx, u32 *cpuvaddr, 218 struct tegra_sha_reqctx *rctx) 219 { 220 __be32 *res_be = (__be32 *)rctx->intr_res.buf; 221 u32 *res = (u32 *)rctx->intr_res.buf; 222 int i = 0, j; 223 224 cpuvaddr[i++] = 0; 225 cpuvaddr[i++] = host1x_opcode_setpayload(HASH_RESULT_REG_COUNT); 226 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_HASH_RESULT); 227 228 for (j = 0; j < HASH_RESULT_REG_COUNT; j++) { 229 int idx = j; 230 231 /* 232 * The initial, intermediate and final hash value of SHA-384, SHA-512 233 * in SHA_HASH_RESULT registers follow the below layout of bytes. 234 * 235 * +---------------+------------+ 236 * | HASH_RESULT_0 | B4...B7 | 237 * +---------------+------------+ 238 * | HASH_RESULT_1 | B0...B3 | 239 * +---------------+------------+ 240 * | HASH_RESULT_2 | B12...B15 | 241 * +---------------+------------+ 242 * | HASH_RESULT_3 | B8...B11 | 243 * +---------------+------------+ 244 * | ...... | 245 * +---------------+------------+ 246 * | HASH_RESULT_14| B60...B63 | 247 * +---------------+------------+ 248 * | HASH_RESULT_15| B56...B59 | 249 * +---------------+------------+ 250 * 251 */ 252 if (ctx->alg == SE_ALG_SHA384 || ctx->alg == SE_ALG_SHA512) 253 idx = (j % 2) ? j - 1 : j + 1; 254 255 /* For SHA-1, SHA-224, SHA-256, SHA-384, SHA-512 the initial 256 * intermediate and final hash value when stored in 257 * SHA_HASH_RESULT registers, the byte order is NOT in 258 * little-endian. 259 */ 260 if (ctx->alg <= SE_ALG_SHA512) 261 cpuvaddr[i++] = be32_to_cpu(res_be[idx]); 262 else 263 cpuvaddr[i++] = res[idx]; 264 } 265 266 return i; 267 } 268 269 static int tegra_sha_prep_cmd(struct tegra_sha_ctx *ctx, u32 *cpuvaddr, 270 struct tegra_sha_reqctx *rctx) 271 { 272 struct tegra_se *se = ctx->se; 273 u64 msg_len, msg_left; 274 int i = 0; 275 276 msg_len = rctx->total_len * 8; 277 msg_left = rctx->datbuf.size * 8; 278 279 /* 280 * If IN_ADDR_HI_0.SZ > SHA_MSG_LEFT_[0-3] to the HASH engine, 281 * HW treats it as the last buffer and process the data. 282 * Therefore, add an extra byte to msg_left if it is not the 283 * last buffer. 284 */ 285 if (rctx->task & SHA_UPDATE) { 286 msg_left += 8; 287 msg_len += 8; 288 } 289 290 cpuvaddr[i++] = host1x_opcode_setpayload(8); 291 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_MSG_LENGTH); 292 cpuvaddr[i++] = lower_32_bits(msg_len); 293 cpuvaddr[i++] = upper_32_bits(msg_len); 294 cpuvaddr[i++] = 0; 295 cpuvaddr[i++] = 0; 296 cpuvaddr[i++] = lower_32_bits(msg_left); 297 cpuvaddr[i++] = upper_32_bits(msg_left); 298 cpuvaddr[i++] = 0; 299 cpuvaddr[i++] = 0; 300 cpuvaddr[i++] = host1x_opcode_setpayload(2); 301 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_CFG); 302 cpuvaddr[i++] = rctx->config; 303 304 if (rctx->task & SHA_FIRST) { 305 cpuvaddr[i++] = SE_SHA_TASK_HASH_INIT; 306 rctx->task &= ~SHA_FIRST; 307 } else { 308 /* 309 * If it isn't the first task, program the HASH_RESULT register 310 * with the intermediate result from the previous task 311 */ 312 i += tegra_se_insert_hash_result(ctx, cpuvaddr + i, rctx); 313 } 314 315 cpuvaddr[i++] = host1x_opcode_setpayload(4); 316 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_IN_ADDR); 317 cpuvaddr[i++] = rctx->datbuf.addr; 318 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->datbuf.addr)) | 319 SE_ADDR_HI_SZ(rctx->datbuf.size)); 320 321 if (rctx->task & SHA_UPDATE) { 322 cpuvaddr[i++] = rctx->intr_res.addr; 323 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->intr_res.addr)) | 324 SE_ADDR_HI_SZ(rctx->intr_res.size)); 325 } else { 326 cpuvaddr[i++] = rctx->digest.addr; 327 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->digest.addr)) | 328 SE_ADDR_HI_SZ(rctx->digest.size)); 329 } 330 331 if (rctx->key_id) { 332 cpuvaddr[i++] = host1x_opcode_setpayload(1); 333 cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_CRYPTO_CFG); 334 cpuvaddr[i++] = SE_AES_KEY_INDEX(rctx->key_id); 335 } 336 337 cpuvaddr[i++] = host1x_opcode_setpayload(1); 338 cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_OPERATION); 339 cpuvaddr[i++] = SE_SHA_OP_WRSTALL | SE_SHA_OP_START | 340 SE_SHA_OP_LASTBUF; 341 cpuvaddr[i++] = se_host1x_opcode_nonincr(host1x_uclass_incr_syncpt_r(), 1); 342 cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) | 343 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); 344 345 dev_dbg(se->dev, "msg len %llu msg left %llu sz %zd cfg %#x", 346 msg_len, msg_left, rctx->datbuf.size, rctx->config); 347 348 return i; 349 } 350 351 static int tegra_sha_do_init(struct ahash_request *req) 352 { 353 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 354 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 355 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 356 struct tegra_se *se = ctx->se; 357 358 if (ctx->fallback) 359 return tegra_sha_fallback_init(req); 360 361 rctx->total_len = 0; 362 rctx->datbuf.size = 0; 363 rctx->residue.size = 0; 364 rctx->key_id = ctx->key_id; 365 rctx->task |= SHA_FIRST; 366 rctx->alg = ctx->alg; 367 rctx->blk_size = crypto_ahash_blocksize(tfm); 368 rctx->digest.size = crypto_ahash_digestsize(tfm); 369 370 rctx->digest.buf = dma_alloc_coherent(se->dev, rctx->digest.size, 371 &rctx->digest.addr, GFP_KERNEL); 372 if (!rctx->digest.buf) 373 goto digbuf_fail; 374 375 rctx->residue.buf = dma_alloc_coherent(se->dev, rctx->blk_size, 376 &rctx->residue.addr, GFP_KERNEL); 377 if (!rctx->residue.buf) 378 goto resbuf_fail; 379 380 rctx->intr_res.size = HASH_RESULT_REG_COUNT * 4; 381 rctx->intr_res.buf = dma_alloc_coherent(se->dev, rctx->intr_res.size, 382 &rctx->intr_res.addr, GFP_KERNEL); 383 if (!rctx->intr_res.buf) 384 goto intr_res_fail; 385 386 return 0; 387 388 intr_res_fail: 389 dma_free_coherent(se->dev, rctx->residue.size, rctx->residue.buf, 390 rctx->residue.addr); 391 resbuf_fail: 392 dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf, 393 rctx->digest.addr); 394 digbuf_fail: 395 return -ENOMEM; 396 } 397 398 static int tegra_sha_do_update(struct ahash_request *req) 399 { 400 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 401 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 402 struct tegra_se *se = ctx->se; 403 unsigned int nblks, nresidue, size; 404 u32 *cpuvaddr = se->cmdbuf->addr; 405 int ret; 406 407 nresidue = (req->nbytes + rctx->residue.size) % rctx->blk_size; 408 nblks = (req->nbytes + rctx->residue.size) / rctx->blk_size; 409 410 /* 411 * If nbytes is a multiple of block size and there is no residue, 412 * then reserve the last block as residue during final() to process. 413 */ 414 if (!nresidue && nblks) { 415 nresidue = rctx->blk_size; 416 nblks--; 417 } 418 419 rctx->src_sg = req->src; 420 rctx->datbuf.size = (req->nbytes + rctx->residue.size) - nresidue; 421 422 /* 423 * If nbytes are less than a block size, copy it residue and 424 * return. The bytes will be processed in final() 425 */ 426 if (nblks < 1) { 427 scatterwalk_map_and_copy(rctx->residue.buf + rctx->residue.size, 428 rctx->src_sg, 0, req->nbytes, 0); 429 rctx->residue.size += req->nbytes; 430 431 return 0; 432 } 433 434 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->datbuf.size, 435 &rctx->datbuf.addr, GFP_KERNEL); 436 if (!rctx->datbuf.buf) 437 return -ENOMEM; 438 439 /* Copy the previous residue first */ 440 if (rctx->residue.size) 441 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); 442 443 scatterwalk_map_and_copy(rctx->datbuf.buf + rctx->residue.size, 444 rctx->src_sg, 0, req->nbytes - nresidue, 0); 445 446 scatterwalk_map_and_copy(rctx->residue.buf, rctx->src_sg, 447 req->nbytes - nresidue, nresidue, 0); 448 449 /* Update residue value with the residue after current block */ 450 rctx->residue.size = nresidue; 451 rctx->total_len += rctx->datbuf.size; 452 453 rctx->config = tegra_sha_get_config(rctx->alg) | 454 SE_SHA_DST_MEMORY; 455 456 size = tegra_sha_prep_cmd(ctx, cpuvaddr, rctx); 457 ret = tegra_se_host1x_submit(se, se->cmdbuf, size); 458 459 dma_free_coherent(se->dev, rctx->datbuf.size, 460 rctx->datbuf.buf, rctx->datbuf.addr); 461 462 return ret; 463 } 464 465 static int tegra_sha_do_final(struct ahash_request *req) 466 { 467 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 468 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 469 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 470 struct tegra_se *se = ctx->se; 471 u32 *cpuvaddr = se->cmdbuf->addr; 472 int size, ret = 0; 473 474 if (rctx->residue.size) { 475 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->residue.size, 476 &rctx->datbuf.addr, GFP_KERNEL); 477 if (!rctx->datbuf.buf) { 478 ret = -ENOMEM; 479 goto out_free; 480 } 481 482 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); 483 } 484 485 rctx->datbuf.size = rctx->residue.size; 486 rctx->total_len += rctx->residue.size; 487 488 rctx->config = tegra_sha_get_config(rctx->alg) | 489 SE_SHA_DST_MEMORY; 490 491 size = tegra_sha_prep_cmd(ctx, cpuvaddr, rctx); 492 ret = tegra_se_host1x_submit(se, se->cmdbuf, size); 493 if (ret) 494 goto out; 495 496 /* Copy result */ 497 memcpy(req->result, rctx->digest.buf, rctx->digest.size); 498 499 out: 500 if (rctx->residue.size) 501 dma_free_coherent(se->dev, rctx->datbuf.size, 502 rctx->datbuf.buf, rctx->datbuf.addr); 503 out_free: 504 dma_free_coherent(se->dev, crypto_ahash_blocksize(tfm), 505 rctx->residue.buf, rctx->residue.addr); 506 dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf, 507 rctx->digest.addr); 508 509 dma_free_coherent(se->dev, rctx->intr_res.size, rctx->intr_res.buf, 510 rctx->intr_res.addr); 511 512 return ret; 513 } 514 515 static int tegra_sha_do_one_req(struct crypto_engine *engine, void *areq) 516 { 517 struct ahash_request *req = ahash_request_cast(areq); 518 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 519 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 520 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 521 struct tegra_se *se = ctx->se; 522 int ret = 0; 523 524 if (rctx->task & SHA_INIT) { 525 ret = tegra_sha_do_init(req); 526 if (ret) 527 goto out; 528 529 rctx->task &= ~SHA_INIT; 530 } 531 532 if (rctx->task & SHA_UPDATE) { 533 ret = tegra_sha_do_update(req); 534 if (ret) 535 goto out; 536 537 rctx->task &= ~SHA_UPDATE; 538 } 539 540 if (rctx->task & SHA_FINAL) { 541 ret = tegra_sha_do_final(req); 542 if (ret) 543 goto out; 544 545 rctx->task &= ~SHA_FINAL; 546 } 547 548 out: 549 crypto_finalize_hash_request(se->engine, req, ret); 550 551 return 0; 552 } 553 554 static void tegra_sha_init_fallback(struct crypto_ahash *tfm, struct tegra_sha_ctx *ctx, 555 const char *algname) 556 { 557 unsigned int statesize; 558 559 ctx->fallback_tfm = crypto_alloc_ahash(algname, 0, CRYPTO_ALG_ASYNC | 560 CRYPTO_ALG_NEED_FALLBACK); 561 562 if (IS_ERR(ctx->fallback_tfm)) { 563 dev_warn(ctx->se->dev, 564 "failed to allocate fallback for %s\n", algname); 565 ctx->fallback_tfm = NULL; 566 return; 567 } 568 569 statesize = crypto_ahash_statesize(ctx->fallback_tfm); 570 571 if (statesize > sizeof(struct tegra_sha_reqctx)) 572 crypto_ahash_set_statesize(tfm, statesize); 573 574 /* Update reqsize if fallback is added */ 575 crypto_ahash_set_reqsize(tfm, 576 sizeof(struct tegra_sha_reqctx) + 577 crypto_ahash_reqsize(ctx->fallback_tfm)); 578 } 579 580 static int tegra_sha_cra_init(struct crypto_tfm *tfm) 581 { 582 struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm); 583 struct crypto_ahash *ahash_tfm = __crypto_ahash_cast(tfm); 584 struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg); 585 struct tegra_se_alg *se_alg; 586 const char *algname; 587 int ret; 588 589 algname = crypto_tfm_alg_name(tfm); 590 se_alg = container_of(alg, struct tegra_se_alg, alg.ahash.base); 591 592 crypto_ahash_set_reqsize(ahash_tfm, sizeof(struct tegra_sha_reqctx)); 593 594 ctx->se = se_alg->se_dev; 595 ctx->fallback = false; 596 ctx->key_id = 0; 597 598 ret = se_algname_to_algid(algname); 599 if (ret < 0) { 600 dev_err(ctx->se->dev, "invalid algorithm\n"); 601 return ret; 602 } 603 604 if (se_alg->alg_base) 605 tegra_sha_init_fallback(ahash_tfm, ctx, algname); 606 607 ctx->alg = ret; 608 609 return 0; 610 } 611 612 static void tegra_sha_cra_exit(struct crypto_tfm *tfm) 613 { 614 struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm); 615 616 if (ctx->fallback_tfm) 617 crypto_free_ahash(ctx->fallback_tfm); 618 619 tegra_key_invalidate(ctx->se, ctx->key_id, ctx->alg); 620 } 621 622 static int tegra_hmac_fallback_setkey(struct tegra_sha_ctx *ctx, const u8 *key, 623 unsigned int keylen) 624 { 625 if (!ctx->fallback_tfm) { 626 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); 627 return -EINVAL; 628 } 629 630 ctx->fallback = true; 631 return crypto_ahash_setkey(ctx->fallback_tfm, key, keylen); 632 } 633 634 static int tegra_hmac_setkey(struct crypto_ahash *tfm, const u8 *key, 635 unsigned int keylen) 636 { 637 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 638 int ret; 639 640 if (aes_check_keylen(keylen)) 641 return tegra_hmac_fallback_setkey(ctx, key, keylen); 642 643 ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id); 644 if (ret) 645 return tegra_hmac_fallback_setkey(ctx, key, keylen); 646 647 ctx->fallback = false; 648 649 return 0; 650 } 651 652 static int tegra_sha_init(struct ahash_request *req) 653 { 654 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 655 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 656 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 657 658 rctx->task = SHA_INIT; 659 660 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 661 } 662 663 static int tegra_sha_update(struct ahash_request *req) 664 { 665 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 666 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 667 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 668 669 if (ctx->fallback) 670 return tegra_sha_fallback_update(req); 671 672 rctx->task |= SHA_UPDATE; 673 674 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 675 } 676 677 static int tegra_sha_final(struct ahash_request *req) 678 { 679 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 680 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 681 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 682 683 if (ctx->fallback) 684 return tegra_sha_fallback_final(req); 685 686 rctx->task |= SHA_FINAL; 687 688 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 689 } 690 691 static int tegra_sha_finup(struct ahash_request *req) 692 { 693 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 694 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 695 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 696 697 if (ctx->fallback) 698 return tegra_sha_fallback_finup(req); 699 700 rctx->task |= SHA_UPDATE | SHA_FINAL; 701 702 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 703 } 704 705 static int tegra_sha_digest(struct ahash_request *req) 706 { 707 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 708 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 709 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 710 711 if (ctx->fallback) 712 return tegra_sha_fallback_digest(req); 713 714 rctx->task |= SHA_INIT | SHA_UPDATE | SHA_FINAL; 715 716 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); 717 } 718 719 static int tegra_sha_export(struct ahash_request *req, void *out) 720 { 721 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 722 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 723 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 724 725 if (ctx->fallback) 726 return tegra_sha_fallback_export(req, out); 727 728 memcpy(out, rctx, sizeof(*rctx)); 729 730 return 0; 731 } 732 733 static int tegra_sha_import(struct ahash_request *req, const void *in) 734 { 735 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); 736 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 737 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm); 738 739 if (ctx->fallback) 740 return tegra_sha_fallback_import(req, in); 741 742 memcpy(rctx, in, sizeof(*rctx)); 743 744 return 0; 745 } 746 747 static struct tegra_se_alg tegra_hash_algs[] = { 748 { 749 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 750 .alg.ahash.base = { 751 .init = tegra_sha_init, 752 .update = tegra_sha_update, 753 .final = tegra_sha_final, 754 .finup = tegra_sha_finup, 755 .digest = tegra_sha_digest, 756 .export = tegra_sha_export, 757 .import = tegra_sha_import, 758 .halg.digestsize = SHA1_DIGEST_SIZE, 759 .halg.statesize = sizeof(struct tegra_sha_reqctx), 760 .halg.base = { 761 .cra_name = "sha1", 762 .cra_driver_name = "tegra-se-sha1", 763 .cra_priority = 300, 764 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 765 .cra_blocksize = SHA1_BLOCK_SIZE, 766 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 767 .cra_alignmask = 0, 768 .cra_module = THIS_MODULE, 769 .cra_init = tegra_sha_cra_init, 770 .cra_exit = tegra_sha_cra_exit, 771 } 772 } 773 }, { 774 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 775 .alg.ahash.base = { 776 .init = tegra_sha_init, 777 .update = tegra_sha_update, 778 .final = tegra_sha_final, 779 .finup = tegra_sha_finup, 780 .digest = tegra_sha_digest, 781 .export = tegra_sha_export, 782 .import = tegra_sha_import, 783 .halg.digestsize = SHA224_DIGEST_SIZE, 784 .halg.statesize = sizeof(struct tegra_sha_reqctx), 785 .halg.base = { 786 .cra_name = "sha224", 787 .cra_driver_name = "tegra-se-sha224", 788 .cra_priority = 300, 789 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 790 .cra_blocksize = SHA224_BLOCK_SIZE, 791 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 792 .cra_alignmask = 0, 793 .cra_module = THIS_MODULE, 794 .cra_init = tegra_sha_cra_init, 795 .cra_exit = tegra_sha_cra_exit, 796 } 797 } 798 }, { 799 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 800 .alg.ahash.base = { 801 .init = tegra_sha_init, 802 .update = tegra_sha_update, 803 .final = tegra_sha_final, 804 .finup = tegra_sha_finup, 805 .digest = tegra_sha_digest, 806 .export = tegra_sha_export, 807 .import = tegra_sha_import, 808 .halg.digestsize = SHA256_DIGEST_SIZE, 809 .halg.statesize = sizeof(struct tegra_sha_reqctx), 810 .halg.base = { 811 .cra_name = "sha256", 812 .cra_driver_name = "tegra-se-sha256", 813 .cra_priority = 300, 814 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 815 .cra_blocksize = SHA256_BLOCK_SIZE, 816 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 817 .cra_alignmask = 0, 818 .cra_module = THIS_MODULE, 819 .cra_init = tegra_sha_cra_init, 820 .cra_exit = tegra_sha_cra_exit, 821 } 822 } 823 }, { 824 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 825 .alg.ahash.base = { 826 .init = tegra_sha_init, 827 .update = tegra_sha_update, 828 .final = tegra_sha_final, 829 .finup = tegra_sha_finup, 830 .digest = tegra_sha_digest, 831 .export = tegra_sha_export, 832 .import = tegra_sha_import, 833 .halg.digestsize = SHA384_DIGEST_SIZE, 834 .halg.statesize = sizeof(struct tegra_sha_reqctx), 835 .halg.base = { 836 .cra_name = "sha384", 837 .cra_driver_name = "tegra-se-sha384", 838 .cra_priority = 300, 839 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 840 .cra_blocksize = SHA384_BLOCK_SIZE, 841 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 842 .cra_alignmask = 0, 843 .cra_module = THIS_MODULE, 844 .cra_init = tegra_sha_cra_init, 845 .cra_exit = tegra_sha_cra_exit, 846 } 847 } 848 }, { 849 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 850 .alg.ahash.base = { 851 .init = tegra_sha_init, 852 .update = tegra_sha_update, 853 .final = tegra_sha_final, 854 .finup = tegra_sha_finup, 855 .digest = tegra_sha_digest, 856 .export = tegra_sha_export, 857 .import = tegra_sha_import, 858 .halg.digestsize = SHA512_DIGEST_SIZE, 859 .halg.statesize = sizeof(struct tegra_sha_reqctx), 860 .halg.base = { 861 .cra_name = "sha512", 862 .cra_driver_name = "tegra-se-sha512", 863 .cra_priority = 300, 864 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 865 .cra_blocksize = SHA512_BLOCK_SIZE, 866 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 867 .cra_alignmask = 0, 868 .cra_module = THIS_MODULE, 869 .cra_init = tegra_sha_cra_init, 870 .cra_exit = tegra_sha_cra_exit, 871 } 872 } 873 }, { 874 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 875 .alg.ahash.base = { 876 .init = tegra_sha_init, 877 .update = tegra_sha_update, 878 .final = tegra_sha_final, 879 .finup = tegra_sha_finup, 880 .digest = tegra_sha_digest, 881 .export = tegra_sha_export, 882 .import = tegra_sha_import, 883 .halg.digestsize = SHA3_224_DIGEST_SIZE, 884 .halg.statesize = sizeof(struct tegra_sha_reqctx), 885 .halg.base = { 886 .cra_name = "sha3-224", 887 .cra_driver_name = "tegra-se-sha3-224", 888 .cra_priority = 300, 889 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 890 .cra_blocksize = SHA3_224_BLOCK_SIZE, 891 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 892 .cra_alignmask = 0, 893 .cra_module = THIS_MODULE, 894 .cra_init = tegra_sha_cra_init, 895 .cra_exit = tegra_sha_cra_exit, 896 } 897 } 898 }, { 899 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 900 .alg.ahash.base = { 901 .init = tegra_sha_init, 902 .update = tegra_sha_update, 903 .final = tegra_sha_final, 904 .finup = tegra_sha_finup, 905 .digest = tegra_sha_digest, 906 .export = tegra_sha_export, 907 .import = tegra_sha_import, 908 .halg.digestsize = SHA3_256_DIGEST_SIZE, 909 .halg.statesize = sizeof(struct tegra_sha_reqctx), 910 .halg.base = { 911 .cra_name = "sha3-256", 912 .cra_driver_name = "tegra-se-sha3-256", 913 .cra_priority = 300, 914 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 915 .cra_blocksize = SHA3_256_BLOCK_SIZE, 916 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 917 .cra_alignmask = 0, 918 .cra_module = THIS_MODULE, 919 .cra_init = tegra_sha_cra_init, 920 .cra_exit = tegra_sha_cra_exit, 921 } 922 } 923 }, { 924 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 925 .alg.ahash.base = { 926 .init = tegra_sha_init, 927 .update = tegra_sha_update, 928 .final = tegra_sha_final, 929 .finup = tegra_sha_finup, 930 .digest = tegra_sha_digest, 931 .export = tegra_sha_export, 932 .import = tegra_sha_import, 933 .halg.digestsize = SHA3_384_DIGEST_SIZE, 934 .halg.statesize = sizeof(struct tegra_sha_reqctx), 935 .halg.base = { 936 .cra_name = "sha3-384", 937 .cra_driver_name = "tegra-se-sha3-384", 938 .cra_priority = 300, 939 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 940 .cra_blocksize = SHA3_384_BLOCK_SIZE, 941 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 942 .cra_alignmask = 0, 943 .cra_module = THIS_MODULE, 944 .cra_init = tegra_sha_cra_init, 945 .cra_exit = tegra_sha_cra_exit, 946 } 947 } 948 }, { 949 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 950 .alg.ahash.base = { 951 .init = tegra_sha_init, 952 .update = tegra_sha_update, 953 .final = tegra_sha_final, 954 .finup = tegra_sha_finup, 955 .digest = tegra_sha_digest, 956 .export = tegra_sha_export, 957 .import = tegra_sha_import, 958 .halg.digestsize = SHA3_512_DIGEST_SIZE, 959 .halg.statesize = sizeof(struct tegra_sha_reqctx), 960 .halg.base = { 961 .cra_name = "sha3-512", 962 .cra_driver_name = "tegra-se-sha3-512", 963 .cra_priority = 300, 964 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 965 .cra_blocksize = SHA3_512_BLOCK_SIZE, 966 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 967 .cra_alignmask = 0, 968 .cra_module = THIS_MODULE, 969 .cra_init = tegra_sha_cra_init, 970 .cra_exit = tegra_sha_cra_exit, 971 } 972 } 973 }, { 974 .alg_base = "sha224", 975 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 976 .alg.ahash.base = { 977 .init = tegra_sha_init, 978 .update = tegra_sha_update, 979 .final = tegra_sha_final, 980 .finup = tegra_sha_finup, 981 .digest = tegra_sha_digest, 982 .export = tegra_sha_export, 983 .import = tegra_sha_import, 984 .setkey = tegra_hmac_setkey, 985 .halg.digestsize = SHA224_DIGEST_SIZE, 986 .halg.statesize = sizeof(struct tegra_sha_reqctx), 987 .halg.base = { 988 .cra_name = "hmac(sha224)", 989 .cra_driver_name = "tegra-se-hmac-sha224", 990 .cra_priority = 300, 991 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK, 992 .cra_blocksize = SHA224_BLOCK_SIZE, 993 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 994 .cra_alignmask = 0, 995 .cra_module = THIS_MODULE, 996 .cra_init = tegra_sha_cra_init, 997 .cra_exit = tegra_sha_cra_exit, 998 } 999 } 1000 }, { 1001 .alg_base = "sha256", 1002 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 1003 .alg.ahash.base = { 1004 .init = tegra_sha_init, 1005 .update = tegra_sha_update, 1006 .final = tegra_sha_final, 1007 .finup = tegra_sha_finup, 1008 .digest = tegra_sha_digest, 1009 .export = tegra_sha_export, 1010 .import = tegra_sha_import, 1011 .setkey = tegra_hmac_setkey, 1012 .halg.digestsize = SHA256_DIGEST_SIZE, 1013 .halg.statesize = sizeof(struct tegra_sha_reqctx), 1014 .halg.base = { 1015 .cra_name = "hmac(sha256)", 1016 .cra_driver_name = "tegra-se-hmac-sha256", 1017 .cra_priority = 300, 1018 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK, 1019 .cra_blocksize = SHA256_BLOCK_SIZE, 1020 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 1021 .cra_alignmask = 0, 1022 .cra_module = THIS_MODULE, 1023 .cra_init = tegra_sha_cra_init, 1024 .cra_exit = tegra_sha_cra_exit, 1025 } 1026 } 1027 }, { 1028 .alg_base = "sha384", 1029 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 1030 .alg.ahash.base = { 1031 .init = tegra_sha_init, 1032 .update = tegra_sha_update, 1033 .final = tegra_sha_final, 1034 .finup = tegra_sha_finup, 1035 .digest = tegra_sha_digest, 1036 .export = tegra_sha_export, 1037 .import = tegra_sha_import, 1038 .setkey = tegra_hmac_setkey, 1039 .halg.digestsize = SHA384_DIGEST_SIZE, 1040 .halg.statesize = sizeof(struct tegra_sha_reqctx), 1041 .halg.base = { 1042 .cra_name = "hmac(sha384)", 1043 .cra_driver_name = "tegra-se-hmac-sha384", 1044 .cra_priority = 300, 1045 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK, 1046 .cra_blocksize = SHA384_BLOCK_SIZE, 1047 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 1048 .cra_alignmask = 0, 1049 .cra_module = THIS_MODULE, 1050 .cra_init = tegra_sha_cra_init, 1051 .cra_exit = tegra_sha_cra_exit, 1052 } 1053 } 1054 }, { 1055 .alg_base = "sha512", 1056 .alg.ahash.op.do_one_request = tegra_sha_do_one_req, 1057 .alg.ahash.base = { 1058 .init = tegra_sha_init, 1059 .update = tegra_sha_update, 1060 .final = tegra_sha_final, 1061 .finup = tegra_sha_finup, 1062 .digest = tegra_sha_digest, 1063 .export = tegra_sha_export, 1064 .import = tegra_sha_import, 1065 .setkey = tegra_hmac_setkey, 1066 .halg.digestsize = SHA512_DIGEST_SIZE, 1067 .halg.statesize = sizeof(struct tegra_sha_reqctx), 1068 .halg.base = { 1069 .cra_name = "hmac(sha512)", 1070 .cra_driver_name = "tegra-se-hmac-sha512", 1071 .cra_priority = 300, 1072 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK, 1073 .cra_blocksize = SHA512_BLOCK_SIZE, 1074 .cra_ctxsize = sizeof(struct tegra_sha_ctx), 1075 .cra_alignmask = 0, 1076 .cra_module = THIS_MODULE, 1077 .cra_init = tegra_sha_cra_init, 1078 .cra_exit = tegra_sha_cra_exit, 1079 } 1080 } 1081 } 1082 }; 1083 1084 static int tegra_hash_kac_manifest(u32 user, u32 alg, u32 keylen) 1085 { 1086 int manifest; 1087 1088 manifest = SE_KAC_USER_NS; 1089 1090 switch (alg) { 1091 case SE_ALG_HMAC_SHA224: 1092 case SE_ALG_HMAC_SHA256: 1093 case SE_ALG_HMAC_SHA384: 1094 case SE_ALG_HMAC_SHA512: 1095 manifest |= SE_KAC_HMAC; 1096 break; 1097 default: 1098 return -EINVAL; 1099 } 1100 1101 switch (keylen) { 1102 case AES_KEYSIZE_128: 1103 manifest |= SE_KAC_SIZE_128; 1104 break; 1105 case AES_KEYSIZE_192: 1106 manifest |= SE_KAC_SIZE_192; 1107 break; 1108 case AES_KEYSIZE_256: 1109 default: 1110 manifest |= SE_KAC_SIZE_256; 1111 break; 1112 } 1113 1114 return manifest; 1115 } 1116 1117 int tegra_init_hash(struct tegra_se *se) 1118 { 1119 struct ahash_engine_alg *alg; 1120 int i, ret; 1121 1122 se->manifest = tegra_hash_kac_manifest; 1123 1124 for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++) { 1125 tegra_hash_algs[i].se_dev = se; 1126 alg = &tegra_hash_algs[i].alg.ahash; 1127 1128 ret = crypto_engine_register_ahash(alg); 1129 if (ret) { 1130 dev_err(se->dev, "failed to register %s\n", 1131 alg->base.halg.base.cra_name); 1132 goto sha_err; 1133 } 1134 } 1135 1136 return 0; 1137 1138 sha_err: 1139 while (i--) 1140 crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash); 1141 1142 return ret; 1143 } 1144 1145 void tegra_deinit_hash(struct tegra_se *se) 1146 { 1147 int i; 1148 1149 for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++) 1150 crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash); 1151 } 1152