1 // SPDX-License-Identifier: GPL-2.0-only
2 // SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
3 /*
4 * Crypto driver to handle HASH algorithms using NVIDIA Security Engine.
5 */
6
7 #include <linux/bottom_half.h>
8 #include <linux/clk.h>
9 #include <linux/dma-mapping.h>
10 #include <linux/module.h>
11 #include <linux/of_device.h>
12 #include <linux/platform_device.h>
13
14 #include <crypto/aes.h>
15 #include <crypto/sha1.h>
16 #include <crypto/sha2.h>
17 #include <crypto/sha3.h>
18 #include <crypto/internal/des.h>
19 #include <crypto/engine.h>
20 #include <crypto/scatterwalk.h>
21 #include <crypto/internal/hash.h>
22
23 #include "tegra-se.h"
24
25 struct tegra_sha_ctx {
26 struct tegra_se *se;
27 unsigned int alg;
28 bool fallback;
29 u32 key_id;
30 struct crypto_ahash *fallback_tfm;
31 };
32
33 struct tegra_sha_reqctx {
34 struct scatterlist *src_sg;
35 struct tegra_se_datbuf datbuf;
36 struct tegra_se_datbuf residue;
37 struct tegra_se_datbuf digest;
38 struct tegra_se_datbuf intr_res;
39 unsigned int alg;
40 unsigned int config;
41 unsigned int total_len;
42 unsigned int blk_size;
43 unsigned int task;
44 u32 key_id;
45 u32 result[HASH_RESULT_REG_COUNT];
46 struct ahash_request fallback_req;
47 };
48
tegra_sha_get_config(u32 alg)49 static int tegra_sha_get_config(u32 alg)
50 {
51 int cfg = 0;
52
53 switch (alg) {
54 case SE_ALG_SHA1:
55 cfg |= SE_SHA_ENC_ALG_SHA;
56 cfg |= SE_SHA_ENC_MODE_SHA1;
57 break;
58
59 case SE_ALG_HMAC_SHA224:
60 cfg |= SE_SHA_ENC_ALG_HMAC;
61 fallthrough;
62 case SE_ALG_SHA224:
63 cfg |= SE_SHA_ENC_ALG_SHA;
64 cfg |= SE_SHA_ENC_MODE_SHA224;
65 break;
66
67 case SE_ALG_HMAC_SHA256:
68 cfg |= SE_SHA_ENC_ALG_HMAC;
69 fallthrough;
70 case SE_ALG_SHA256:
71 cfg |= SE_SHA_ENC_ALG_SHA;
72 cfg |= SE_SHA_ENC_MODE_SHA256;
73 break;
74
75 case SE_ALG_HMAC_SHA384:
76 cfg |= SE_SHA_ENC_ALG_HMAC;
77 fallthrough;
78 case SE_ALG_SHA384:
79 cfg |= SE_SHA_ENC_ALG_SHA;
80 cfg |= SE_SHA_ENC_MODE_SHA384;
81 break;
82
83 case SE_ALG_HMAC_SHA512:
84 cfg |= SE_SHA_ENC_ALG_HMAC;
85 fallthrough;
86 case SE_ALG_SHA512:
87 cfg |= SE_SHA_ENC_ALG_SHA;
88 cfg |= SE_SHA_ENC_MODE_SHA512;
89 break;
90
91 case SE_ALG_SHA3_224:
92 cfg |= SE_SHA_ENC_ALG_SHA;
93 cfg |= SE_SHA_ENC_MODE_SHA3_224;
94 break;
95 case SE_ALG_SHA3_256:
96 cfg |= SE_SHA_ENC_ALG_SHA;
97 cfg |= SE_SHA_ENC_MODE_SHA3_256;
98 break;
99 case SE_ALG_SHA3_384:
100 cfg |= SE_SHA_ENC_ALG_SHA;
101 cfg |= SE_SHA_ENC_MODE_SHA3_384;
102 break;
103 case SE_ALG_SHA3_512:
104 cfg |= SE_SHA_ENC_ALG_SHA;
105 cfg |= SE_SHA_ENC_MODE_SHA3_512;
106 break;
107 default:
108 return -EINVAL;
109 }
110
111 return cfg;
112 }
113
tegra_sha_fallback_init(struct ahash_request * req)114 static int tegra_sha_fallback_init(struct ahash_request *req)
115 {
116 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
117 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
118 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
119
120 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
121 ahash_request_set_callback(&rctx->fallback_req,
122 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
123 req->base.complete, req->base.data);
124
125 return crypto_ahash_init(&rctx->fallback_req);
126 }
127
tegra_sha_fallback_update(struct ahash_request * req)128 static int tegra_sha_fallback_update(struct ahash_request *req)
129 {
130 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
131 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
132 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
133
134 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
135 ahash_request_set_callback(&rctx->fallback_req,
136 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
137 req->base.complete, req->base.data);
138 ahash_request_set_crypt(&rctx->fallback_req, req->src, NULL, req->nbytes);
139
140 return crypto_ahash_update(&rctx->fallback_req);
141 }
142
tegra_sha_fallback_final(struct ahash_request * req)143 static int tegra_sha_fallback_final(struct ahash_request *req)
144 {
145 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
146 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
147 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
148
149 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
150 ahash_request_set_callback(&rctx->fallback_req,
151 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
152 req->base.complete, req->base.data);
153 ahash_request_set_crypt(&rctx->fallback_req, NULL, req->result, 0);
154
155 return crypto_ahash_final(&rctx->fallback_req);
156 }
157
tegra_sha_fallback_finup(struct ahash_request * req)158 static int tegra_sha_fallback_finup(struct ahash_request *req)
159 {
160 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
161 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
162 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
163
164 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
165 ahash_request_set_callback(&rctx->fallback_req,
166 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
167 req->base.complete, req->base.data);
168 ahash_request_set_crypt(&rctx->fallback_req, req->src, req->result,
169 req->nbytes);
170
171 return crypto_ahash_finup(&rctx->fallback_req);
172 }
173
tegra_sha_fallback_digest(struct ahash_request * req)174 static int tegra_sha_fallback_digest(struct ahash_request *req)
175 {
176 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
177 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
178 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
179
180 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
181 ahash_request_set_callback(&rctx->fallback_req,
182 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
183 req->base.complete, req->base.data);
184 ahash_request_set_crypt(&rctx->fallback_req, req->src, req->result,
185 req->nbytes);
186
187 return crypto_ahash_digest(&rctx->fallback_req);
188 }
189
tegra_sha_fallback_import(struct ahash_request * req,const void * in)190 static int tegra_sha_fallback_import(struct ahash_request *req, const void *in)
191 {
192 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
193 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
194 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
195
196 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
197 ahash_request_set_callback(&rctx->fallback_req,
198 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
199 req->base.complete, req->base.data);
200
201 return crypto_ahash_import(&rctx->fallback_req, in);
202 }
203
tegra_sha_fallback_export(struct ahash_request * req,void * out)204 static int tegra_sha_fallback_export(struct ahash_request *req, void *out)
205 {
206 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
207 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
208 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
209
210 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
211 ahash_request_set_callback(&rctx->fallback_req,
212 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
213 req->base.complete, req->base.data);
214
215 return crypto_ahash_export(&rctx->fallback_req, out);
216 }
217
tegra_se_insert_hash_result(struct tegra_sha_ctx * ctx,u32 * cpuvaddr,struct tegra_sha_reqctx * rctx)218 static int tegra_se_insert_hash_result(struct tegra_sha_ctx *ctx, u32 *cpuvaddr,
219 struct tegra_sha_reqctx *rctx)
220 {
221 __be32 *res_be = (__be32 *)rctx->intr_res.buf;
222 u32 *res = (u32 *)rctx->intr_res.buf;
223 int i = 0, j;
224
225 cpuvaddr[i++] = 0;
226 cpuvaddr[i++] = host1x_opcode_setpayload(HASH_RESULT_REG_COUNT);
227 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_HASH_RESULT);
228
229 for (j = 0; j < HASH_RESULT_REG_COUNT; j++) {
230 int idx = j;
231
232 /*
233 * The initial, intermediate and final hash value of SHA-384, SHA-512
234 * in SHA_HASH_RESULT registers follow the below layout of bytes.
235 *
236 * +---------------+------------+
237 * | HASH_RESULT_0 | B4...B7 |
238 * +---------------+------------+
239 * | HASH_RESULT_1 | B0...B3 |
240 * +---------------+------------+
241 * | HASH_RESULT_2 | B12...B15 |
242 * +---------------+------------+
243 * | HASH_RESULT_3 | B8...B11 |
244 * +---------------+------------+
245 * | ...... |
246 * +---------------+------------+
247 * | HASH_RESULT_14| B60...B63 |
248 * +---------------+------------+
249 * | HASH_RESULT_15| B56...B59 |
250 * +---------------+------------+
251 *
252 */
253 if (ctx->alg == SE_ALG_SHA384 || ctx->alg == SE_ALG_SHA512)
254 idx = (j % 2) ? j - 1 : j + 1;
255
256 /* For SHA-1, SHA-224, SHA-256, SHA-384, SHA-512 the initial
257 * intermediate and final hash value when stored in
258 * SHA_HASH_RESULT registers, the byte order is NOT in
259 * little-endian.
260 */
261 if (ctx->alg <= SE_ALG_SHA512)
262 cpuvaddr[i++] = be32_to_cpu(res_be[idx]);
263 else
264 cpuvaddr[i++] = res[idx];
265 }
266
267 return i;
268 }
269
tegra_sha_prep_cmd(struct tegra_sha_ctx * ctx,u32 * cpuvaddr,struct tegra_sha_reqctx * rctx)270 static int tegra_sha_prep_cmd(struct tegra_sha_ctx *ctx, u32 *cpuvaddr,
271 struct tegra_sha_reqctx *rctx)
272 {
273 struct tegra_se *se = ctx->se;
274 u64 msg_len, msg_left;
275 int i = 0;
276
277 msg_len = rctx->total_len * 8;
278 msg_left = rctx->datbuf.size * 8;
279
280 /*
281 * If IN_ADDR_HI_0.SZ > SHA_MSG_LEFT_[0-3] to the HASH engine,
282 * HW treats it as the last buffer and process the data.
283 * Therefore, add an extra byte to msg_left if it is not the
284 * last buffer.
285 */
286 if (rctx->task & SHA_UPDATE) {
287 msg_left += 8;
288 msg_len += 8;
289 }
290
291 cpuvaddr[i++] = host1x_opcode_setpayload(8);
292 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_MSG_LENGTH);
293 cpuvaddr[i++] = lower_32_bits(msg_len);
294 cpuvaddr[i++] = upper_32_bits(msg_len);
295 cpuvaddr[i++] = 0;
296 cpuvaddr[i++] = 0;
297 cpuvaddr[i++] = lower_32_bits(msg_left);
298 cpuvaddr[i++] = upper_32_bits(msg_left);
299 cpuvaddr[i++] = 0;
300 cpuvaddr[i++] = 0;
301 cpuvaddr[i++] = host1x_opcode_setpayload(2);
302 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_CFG);
303 cpuvaddr[i++] = rctx->config;
304
305 if (rctx->task & SHA_FIRST) {
306 cpuvaddr[i++] = SE_SHA_TASK_HASH_INIT;
307 rctx->task &= ~SHA_FIRST;
308 } else {
309 /*
310 * If it isn't the first task, program the HASH_RESULT register
311 * with the intermediate result from the previous task
312 */
313 i += tegra_se_insert_hash_result(ctx, cpuvaddr + i, rctx);
314 }
315
316 cpuvaddr[i++] = host1x_opcode_setpayload(4);
317 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_IN_ADDR);
318 cpuvaddr[i++] = rctx->datbuf.addr;
319 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->datbuf.addr)) |
320 SE_ADDR_HI_SZ(rctx->datbuf.size));
321
322 if (rctx->task & SHA_UPDATE) {
323 cpuvaddr[i++] = rctx->intr_res.addr;
324 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->intr_res.addr)) |
325 SE_ADDR_HI_SZ(rctx->intr_res.size));
326 } else {
327 cpuvaddr[i++] = rctx->digest.addr;
328 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->digest.addr)) |
329 SE_ADDR_HI_SZ(rctx->digest.size));
330 }
331
332 if (rctx->key_id) {
333 cpuvaddr[i++] = host1x_opcode_setpayload(1);
334 cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_CRYPTO_CFG);
335 cpuvaddr[i++] = SE_AES_KEY_INDEX(rctx->key_id);
336 }
337
338 cpuvaddr[i++] = host1x_opcode_setpayload(1);
339 cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_OPERATION);
340 cpuvaddr[i++] = SE_SHA_OP_WRSTALL | SE_SHA_OP_START |
341 SE_SHA_OP_LASTBUF;
342 cpuvaddr[i++] = se_host1x_opcode_nonincr(host1x_uclass_incr_syncpt_r(), 1);
343 cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) |
344 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id);
345
346 dev_dbg(se->dev, "msg len %llu msg left %llu sz %zd cfg %#x",
347 msg_len, msg_left, rctx->datbuf.size, rctx->config);
348
349 return i;
350 }
351
tegra_sha_do_init(struct ahash_request * req)352 static int tegra_sha_do_init(struct ahash_request *req)
353 {
354 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
355 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
356 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
357 struct tegra_se *se = ctx->se;
358
359 if (ctx->fallback)
360 return tegra_sha_fallback_init(req);
361
362 rctx->total_len = 0;
363 rctx->datbuf.size = 0;
364 rctx->residue.size = 0;
365 rctx->key_id = ctx->key_id;
366 rctx->task |= SHA_FIRST;
367 rctx->alg = ctx->alg;
368 rctx->blk_size = crypto_ahash_blocksize(tfm);
369 rctx->digest.size = crypto_ahash_digestsize(tfm);
370
371 rctx->digest.buf = dma_alloc_coherent(se->dev, rctx->digest.size,
372 &rctx->digest.addr, GFP_KERNEL);
373 if (!rctx->digest.buf)
374 goto digbuf_fail;
375
376 rctx->residue.buf = dma_alloc_coherent(se->dev, rctx->blk_size,
377 &rctx->residue.addr, GFP_KERNEL);
378 if (!rctx->residue.buf)
379 goto resbuf_fail;
380
381 rctx->intr_res.size = HASH_RESULT_REG_COUNT * 4;
382 rctx->intr_res.buf = dma_alloc_coherent(se->dev, rctx->intr_res.size,
383 &rctx->intr_res.addr, GFP_KERNEL);
384 if (!rctx->intr_res.buf)
385 goto intr_res_fail;
386
387 return 0;
388
389 intr_res_fail:
390 dma_free_coherent(se->dev, rctx->residue.size, rctx->residue.buf,
391 rctx->residue.addr);
392 resbuf_fail:
393 dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf,
394 rctx->digest.addr);
395 digbuf_fail:
396 return -ENOMEM;
397 }
398
tegra_sha_do_update(struct ahash_request * req)399 static int tegra_sha_do_update(struct ahash_request *req)
400 {
401 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
402 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
403 struct tegra_se *se = ctx->se;
404 unsigned int nblks, nresidue, size;
405 u32 *cpuvaddr = se->cmdbuf->addr;
406 int ret;
407
408 nresidue = (req->nbytes + rctx->residue.size) % rctx->blk_size;
409 nblks = (req->nbytes + rctx->residue.size) / rctx->blk_size;
410
411 /*
412 * If nbytes is a multiple of block size and there is no residue,
413 * then reserve the last block as residue during final() to process.
414 */
415 if (!nresidue && nblks) {
416 nresidue = rctx->blk_size;
417 nblks--;
418 }
419
420 rctx->src_sg = req->src;
421 rctx->datbuf.size = (req->nbytes + rctx->residue.size) - nresidue;
422
423 /*
424 * If nbytes are less than a block size, copy it residue and
425 * return. The bytes will be processed in final()
426 */
427 if (nblks < 1) {
428 scatterwalk_map_and_copy(rctx->residue.buf + rctx->residue.size,
429 rctx->src_sg, 0, req->nbytes, 0);
430 rctx->residue.size += req->nbytes;
431
432 return 0;
433 }
434
435 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->datbuf.size,
436 &rctx->datbuf.addr, GFP_KERNEL);
437 if (!rctx->datbuf.buf)
438 return -ENOMEM;
439
440 /* Copy the previous residue first */
441 if (rctx->residue.size)
442 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size);
443
444 scatterwalk_map_and_copy(rctx->datbuf.buf + rctx->residue.size,
445 rctx->src_sg, 0, req->nbytes - nresidue, 0);
446
447 scatterwalk_map_and_copy(rctx->residue.buf, rctx->src_sg,
448 req->nbytes - nresidue, nresidue, 0);
449
450 /* Update residue value with the residue after current block */
451 rctx->residue.size = nresidue;
452 rctx->total_len += rctx->datbuf.size;
453
454 rctx->config = tegra_sha_get_config(rctx->alg) |
455 SE_SHA_DST_MEMORY;
456
457 size = tegra_sha_prep_cmd(ctx, cpuvaddr, rctx);
458 ret = tegra_se_host1x_submit(se, se->cmdbuf, size);
459
460 dma_free_coherent(se->dev, rctx->datbuf.size,
461 rctx->datbuf.buf, rctx->datbuf.addr);
462
463 return ret;
464 }
465
tegra_sha_do_final(struct ahash_request * req)466 static int tegra_sha_do_final(struct ahash_request *req)
467 {
468 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
469 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
470 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
471 struct tegra_se *se = ctx->se;
472 u32 *cpuvaddr = se->cmdbuf->addr;
473 int size, ret = 0;
474
475 if (rctx->residue.size) {
476 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->residue.size,
477 &rctx->datbuf.addr, GFP_KERNEL);
478 if (!rctx->datbuf.buf) {
479 ret = -ENOMEM;
480 goto out_free;
481 }
482
483 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size);
484 }
485
486 rctx->datbuf.size = rctx->residue.size;
487 rctx->total_len += rctx->residue.size;
488
489 rctx->config = tegra_sha_get_config(rctx->alg) |
490 SE_SHA_DST_MEMORY;
491
492 size = tegra_sha_prep_cmd(ctx, cpuvaddr, rctx);
493 ret = tegra_se_host1x_submit(se, se->cmdbuf, size);
494 if (ret)
495 goto out;
496
497 /* Copy result */
498 memcpy(req->result, rctx->digest.buf, rctx->digest.size);
499
500 out:
501 if (rctx->residue.size)
502 dma_free_coherent(se->dev, rctx->datbuf.size,
503 rctx->datbuf.buf, rctx->datbuf.addr);
504 out_free:
505 dma_free_coherent(se->dev, crypto_ahash_blocksize(tfm),
506 rctx->residue.buf, rctx->residue.addr);
507 dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf,
508 rctx->digest.addr);
509
510 dma_free_coherent(se->dev, rctx->intr_res.size, rctx->intr_res.buf,
511 rctx->intr_res.addr);
512
513 return ret;
514 }
515
tegra_sha_do_one_req(struct crypto_engine * engine,void * areq)516 static int tegra_sha_do_one_req(struct crypto_engine *engine, void *areq)
517 {
518 struct ahash_request *req = ahash_request_cast(areq);
519 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
520 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
521 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
522 struct tegra_se *se = ctx->se;
523 int ret = 0;
524
525 if (rctx->task & SHA_INIT) {
526 ret = tegra_sha_do_init(req);
527 if (ret)
528 goto out;
529
530 rctx->task &= ~SHA_INIT;
531 }
532
533 if (rctx->task & SHA_UPDATE) {
534 ret = tegra_sha_do_update(req);
535 if (ret)
536 goto out;
537
538 rctx->task &= ~SHA_UPDATE;
539 }
540
541 if (rctx->task & SHA_FINAL) {
542 ret = tegra_sha_do_final(req);
543 if (ret)
544 goto out;
545
546 rctx->task &= ~SHA_FINAL;
547 }
548
549 out:
550 local_bh_disable();
551 crypto_finalize_hash_request(se->engine, req, ret);
552 local_bh_enable();
553
554 return 0;
555 }
556
tegra_sha_init_fallback(struct crypto_ahash * tfm,struct tegra_sha_ctx * ctx,const char * algname)557 static void tegra_sha_init_fallback(struct crypto_ahash *tfm, struct tegra_sha_ctx *ctx,
558 const char *algname)
559 {
560 unsigned int statesize;
561
562 ctx->fallback_tfm = crypto_alloc_ahash(algname, 0, CRYPTO_ALG_ASYNC |
563 CRYPTO_ALG_NEED_FALLBACK);
564
565 if (IS_ERR(ctx->fallback_tfm)) {
566 dev_warn(ctx->se->dev,
567 "failed to allocate fallback for %s\n", algname);
568 ctx->fallback_tfm = NULL;
569 return;
570 }
571
572 statesize = crypto_ahash_statesize(ctx->fallback_tfm);
573
574 if (statesize > sizeof(struct tegra_sha_reqctx))
575 crypto_ahash_set_statesize(tfm, statesize);
576
577 /* Update reqsize if fallback is added */
578 crypto_ahash_set_reqsize(tfm,
579 sizeof(struct tegra_sha_reqctx) +
580 crypto_ahash_reqsize(ctx->fallback_tfm));
581 }
582
tegra_sha_cra_init(struct crypto_tfm * tfm)583 static int tegra_sha_cra_init(struct crypto_tfm *tfm)
584 {
585 struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm);
586 struct crypto_ahash *ahash_tfm = __crypto_ahash_cast(tfm);
587 struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg);
588 struct tegra_se_alg *se_alg;
589 const char *algname;
590 int ret;
591
592 algname = crypto_tfm_alg_name(tfm);
593 se_alg = container_of(alg, struct tegra_se_alg, alg.ahash.base);
594
595 crypto_ahash_set_reqsize(ahash_tfm, sizeof(struct tegra_sha_reqctx));
596
597 ctx->se = se_alg->se_dev;
598 ctx->fallback = false;
599 ctx->key_id = 0;
600
601 ret = se_algname_to_algid(algname);
602 if (ret < 0) {
603 dev_err(ctx->se->dev, "invalid algorithm\n");
604 return ret;
605 }
606
607 if (se_alg->alg_base)
608 tegra_sha_init_fallback(ahash_tfm, ctx, algname);
609
610 ctx->alg = ret;
611
612 return 0;
613 }
614
tegra_sha_cra_exit(struct crypto_tfm * tfm)615 static void tegra_sha_cra_exit(struct crypto_tfm *tfm)
616 {
617 struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm);
618
619 if (ctx->fallback_tfm)
620 crypto_free_ahash(ctx->fallback_tfm);
621
622 tegra_key_invalidate(ctx->se, ctx->key_id, ctx->alg);
623 }
624
tegra_hmac_fallback_setkey(struct tegra_sha_ctx * ctx,const u8 * key,unsigned int keylen)625 static int tegra_hmac_fallback_setkey(struct tegra_sha_ctx *ctx, const u8 *key,
626 unsigned int keylen)
627 {
628 if (!ctx->fallback_tfm) {
629 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen);
630 return -EINVAL;
631 }
632
633 ctx->fallback = true;
634 return crypto_ahash_setkey(ctx->fallback_tfm, key, keylen);
635 }
636
tegra_hmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)637 static int tegra_hmac_setkey(struct crypto_ahash *tfm, const u8 *key,
638 unsigned int keylen)
639 {
640 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
641 int ret;
642
643 if (aes_check_keylen(keylen))
644 return tegra_hmac_fallback_setkey(ctx, key, keylen);
645
646 ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id);
647 if (ret)
648 return tegra_hmac_fallback_setkey(ctx, key, keylen);
649
650 ctx->fallback = false;
651
652 return 0;
653 }
654
tegra_sha_init(struct ahash_request * req)655 static int tegra_sha_init(struct ahash_request *req)
656 {
657 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
658 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
659 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
660
661 rctx->task = SHA_INIT;
662
663 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
664 }
665
tegra_sha_update(struct ahash_request * req)666 static int tegra_sha_update(struct ahash_request *req)
667 {
668 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
669 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
670 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
671
672 if (ctx->fallback)
673 return tegra_sha_fallback_update(req);
674
675 rctx->task |= SHA_UPDATE;
676
677 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
678 }
679
tegra_sha_final(struct ahash_request * req)680 static int tegra_sha_final(struct ahash_request *req)
681 {
682 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
683 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
684 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
685
686 if (ctx->fallback)
687 return tegra_sha_fallback_final(req);
688
689 rctx->task |= SHA_FINAL;
690
691 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
692 }
693
tegra_sha_finup(struct ahash_request * req)694 static int tegra_sha_finup(struct ahash_request *req)
695 {
696 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
697 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
698 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
699
700 if (ctx->fallback)
701 return tegra_sha_fallback_finup(req);
702
703 rctx->task |= SHA_UPDATE | SHA_FINAL;
704
705 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
706 }
707
tegra_sha_digest(struct ahash_request * req)708 static int tegra_sha_digest(struct ahash_request *req)
709 {
710 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
711 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
712 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
713
714 if (ctx->fallback)
715 return tegra_sha_fallback_digest(req);
716
717 rctx->task |= SHA_INIT | SHA_UPDATE | SHA_FINAL;
718
719 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
720 }
721
tegra_sha_export(struct ahash_request * req,void * out)722 static int tegra_sha_export(struct ahash_request *req, void *out)
723 {
724 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
725 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
726 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
727
728 if (ctx->fallback)
729 return tegra_sha_fallback_export(req, out);
730
731 memcpy(out, rctx, sizeof(*rctx));
732
733 return 0;
734 }
735
tegra_sha_import(struct ahash_request * req,const void * in)736 static int tegra_sha_import(struct ahash_request *req, const void *in)
737 {
738 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
739 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
740 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
741
742 if (ctx->fallback)
743 return tegra_sha_fallback_import(req, in);
744
745 memcpy(rctx, in, sizeof(*rctx));
746
747 return 0;
748 }
749
750 static struct tegra_se_alg tegra_hash_algs[] = {
751 {
752 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
753 .alg.ahash.base = {
754 .init = tegra_sha_init,
755 .update = tegra_sha_update,
756 .final = tegra_sha_final,
757 .finup = tegra_sha_finup,
758 .digest = tegra_sha_digest,
759 .export = tegra_sha_export,
760 .import = tegra_sha_import,
761 .halg.digestsize = SHA1_DIGEST_SIZE,
762 .halg.statesize = sizeof(struct tegra_sha_reqctx),
763 .halg.base = {
764 .cra_name = "sha1",
765 .cra_driver_name = "tegra-se-sha1",
766 .cra_priority = 300,
767 .cra_flags = CRYPTO_ALG_ASYNC,
768 .cra_blocksize = SHA1_BLOCK_SIZE,
769 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
770 .cra_alignmask = 0,
771 .cra_module = THIS_MODULE,
772 .cra_init = tegra_sha_cra_init,
773 .cra_exit = tegra_sha_cra_exit,
774 }
775 }
776 }, {
777 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
778 .alg.ahash.base = {
779 .init = tegra_sha_init,
780 .update = tegra_sha_update,
781 .final = tegra_sha_final,
782 .finup = tegra_sha_finup,
783 .digest = tegra_sha_digest,
784 .export = tegra_sha_export,
785 .import = tegra_sha_import,
786 .halg.digestsize = SHA224_DIGEST_SIZE,
787 .halg.statesize = sizeof(struct tegra_sha_reqctx),
788 .halg.base = {
789 .cra_name = "sha224",
790 .cra_driver_name = "tegra-se-sha224",
791 .cra_priority = 300,
792 .cra_flags = CRYPTO_ALG_ASYNC,
793 .cra_blocksize = SHA224_BLOCK_SIZE,
794 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
795 .cra_alignmask = 0,
796 .cra_module = THIS_MODULE,
797 .cra_init = tegra_sha_cra_init,
798 .cra_exit = tegra_sha_cra_exit,
799 }
800 }
801 }, {
802 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
803 .alg.ahash.base = {
804 .init = tegra_sha_init,
805 .update = tegra_sha_update,
806 .final = tegra_sha_final,
807 .finup = tegra_sha_finup,
808 .digest = tegra_sha_digest,
809 .export = tegra_sha_export,
810 .import = tegra_sha_import,
811 .halg.digestsize = SHA256_DIGEST_SIZE,
812 .halg.statesize = sizeof(struct tegra_sha_reqctx),
813 .halg.base = {
814 .cra_name = "sha256",
815 .cra_driver_name = "tegra-se-sha256",
816 .cra_priority = 300,
817 .cra_flags = CRYPTO_ALG_ASYNC,
818 .cra_blocksize = SHA256_BLOCK_SIZE,
819 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
820 .cra_alignmask = 0,
821 .cra_module = THIS_MODULE,
822 .cra_init = tegra_sha_cra_init,
823 .cra_exit = tegra_sha_cra_exit,
824 }
825 }
826 }, {
827 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
828 .alg.ahash.base = {
829 .init = tegra_sha_init,
830 .update = tegra_sha_update,
831 .final = tegra_sha_final,
832 .finup = tegra_sha_finup,
833 .digest = tegra_sha_digest,
834 .export = tegra_sha_export,
835 .import = tegra_sha_import,
836 .halg.digestsize = SHA384_DIGEST_SIZE,
837 .halg.statesize = sizeof(struct tegra_sha_reqctx),
838 .halg.base = {
839 .cra_name = "sha384",
840 .cra_driver_name = "tegra-se-sha384",
841 .cra_priority = 300,
842 .cra_flags = CRYPTO_ALG_ASYNC,
843 .cra_blocksize = SHA384_BLOCK_SIZE,
844 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
845 .cra_alignmask = 0,
846 .cra_module = THIS_MODULE,
847 .cra_init = tegra_sha_cra_init,
848 .cra_exit = tegra_sha_cra_exit,
849 }
850 }
851 }, {
852 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
853 .alg.ahash.base = {
854 .init = tegra_sha_init,
855 .update = tegra_sha_update,
856 .final = tegra_sha_final,
857 .finup = tegra_sha_finup,
858 .digest = tegra_sha_digest,
859 .export = tegra_sha_export,
860 .import = tegra_sha_import,
861 .halg.digestsize = SHA512_DIGEST_SIZE,
862 .halg.statesize = sizeof(struct tegra_sha_reqctx),
863 .halg.base = {
864 .cra_name = "sha512",
865 .cra_driver_name = "tegra-se-sha512",
866 .cra_priority = 300,
867 .cra_flags = CRYPTO_ALG_ASYNC,
868 .cra_blocksize = SHA512_BLOCK_SIZE,
869 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
870 .cra_alignmask = 0,
871 .cra_module = THIS_MODULE,
872 .cra_init = tegra_sha_cra_init,
873 .cra_exit = tegra_sha_cra_exit,
874 }
875 }
876 }, {
877 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
878 .alg.ahash.base = {
879 .init = tegra_sha_init,
880 .update = tegra_sha_update,
881 .final = tegra_sha_final,
882 .finup = tegra_sha_finup,
883 .digest = tegra_sha_digest,
884 .export = tegra_sha_export,
885 .import = tegra_sha_import,
886 .halg.digestsize = SHA3_224_DIGEST_SIZE,
887 .halg.statesize = sizeof(struct tegra_sha_reqctx),
888 .halg.base = {
889 .cra_name = "sha3-224",
890 .cra_driver_name = "tegra-se-sha3-224",
891 .cra_priority = 300,
892 .cra_flags = CRYPTO_ALG_ASYNC,
893 .cra_blocksize = SHA3_224_BLOCK_SIZE,
894 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
895 .cra_alignmask = 0,
896 .cra_module = THIS_MODULE,
897 .cra_init = tegra_sha_cra_init,
898 .cra_exit = tegra_sha_cra_exit,
899 }
900 }
901 }, {
902 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
903 .alg.ahash.base = {
904 .init = tegra_sha_init,
905 .update = tegra_sha_update,
906 .final = tegra_sha_final,
907 .finup = tegra_sha_finup,
908 .digest = tegra_sha_digest,
909 .export = tegra_sha_export,
910 .import = tegra_sha_import,
911 .halg.digestsize = SHA3_256_DIGEST_SIZE,
912 .halg.statesize = sizeof(struct tegra_sha_reqctx),
913 .halg.base = {
914 .cra_name = "sha3-256",
915 .cra_driver_name = "tegra-se-sha3-256",
916 .cra_priority = 300,
917 .cra_flags = CRYPTO_ALG_ASYNC,
918 .cra_blocksize = SHA3_256_BLOCK_SIZE,
919 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
920 .cra_alignmask = 0,
921 .cra_module = THIS_MODULE,
922 .cra_init = tegra_sha_cra_init,
923 .cra_exit = tegra_sha_cra_exit,
924 }
925 }
926 }, {
927 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
928 .alg.ahash.base = {
929 .init = tegra_sha_init,
930 .update = tegra_sha_update,
931 .final = tegra_sha_final,
932 .finup = tegra_sha_finup,
933 .digest = tegra_sha_digest,
934 .export = tegra_sha_export,
935 .import = tegra_sha_import,
936 .halg.digestsize = SHA3_384_DIGEST_SIZE,
937 .halg.statesize = sizeof(struct tegra_sha_reqctx),
938 .halg.base = {
939 .cra_name = "sha3-384",
940 .cra_driver_name = "tegra-se-sha3-384",
941 .cra_priority = 300,
942 .cra_flags = CRYPTO_ALG_ASYNC,
943 .cra_blocksize = SHA3_384_BLOCK_SIZE,
944 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
945 .cra_alignmask = 0,
946 .cra_module = THIS_MODULE,
947 .cra_init = tegra_sha_cra_init,
948 .cra_exit = tegra_sha_cra_exit,
949 }
950 }
951 }, {
952 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
953 .alg.ahash.base = {
954 .init = tegra_sha_init,
955 .update = tegra_sha_update,
956 .final = tegra_sha_final,
957 .finup = tegra_sha_finup,
958 .digest = tegra_sha_digest,
959 .export = tegra_sha_export,
960 .import = tegra_sha_import,
961 .halg.digestsize = SHA3_512_DIGEST_SIZE,
962 .halg.statesize = sizeof(struct tegra_sha_reqctx),
963 .halg.base = {
964 .cra_name = "sha3-512",
965 .cra_driver_name = "tegra-se-sha3-512",
966 .cra_priority = 300,
967 .cra_flags = CRYPTO_ALG_ASYNC,
968 .cra_blocksize = SHA3_512_BLOCK_SIZE,
969 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
970 .cra_alignmask = 0,
971 .cra_module = THIS_MODULE,
972 .cra_init = tegra_sha_cra_init,
973 .cra_exit = tegra_sha_cra_exit,
974 }
975 }
976 }, {
977 .alg_base = "sha224",
978 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
979 .alg.ahash.base = {
980 .init = tegra_sha_init,
981 .update = tegra_sha_update,
982 .final = tegra_sha_final,
983 .finup = tegra_sha_finup,
984 .digest = tegra_sha_digest,
985 .export = tegra_sha_export,
986 .import = tegra_sha_import,
987 .setkey = tegra_hmac_setkey,
988 .halg.digestsize = SHA224_DIGEST_SIZE,
989 .halg.statesize = sizeof(struct tegra_sha_reqctx),
990 .halg.base = {
991 .cra_name = "hmac(sha224)",
992 .cra_driver_name = "tegra-se-hmac-sha224",
993 .cra_priority = 300,
994 .cra_flags = CRYPTO_ALG_ASYNC |
995 CRYPTO_ALG_NEED_FALLBACK,
996 .cra_blocksize = SHA224_BLOCK_SIZE,
997 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
998 .cra_alignmask = 0,
999 .cra_module = THIS_MODULE,
1000 .cra_init = tegra_sha_cra_init,
1001 .cra_exit = tegra_sha_cra_exit,
1002 }
1003 }
1004 }, {
1005 .alg_base = "sha256",
1006 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
1007 .alg.ahash.base = {
1008 .init = tegra_sha_init,
1009 .update = tegra_sha_update,
1010 .final = tegra_sha_final,
1011 .finup = tegra_sha_finup,
1012 .digest = tegra_sha_digest,
1013 .export = tegra_sha_export,
1014 .import = tegra_sha_import,
1015 .setkey = tegra_hmac_setkey,
1016 .halg.digestsize = SHA256_DIGEST_SIZE,
1017 .halg.statesize = sizeof(struct tegra_sha_reqctx),
1018 .halg.base = {
1019 .cra_name = "hmac(sha256)",
1020 .cra_driver_name = "tegra-se-hmac-sha256",
1021 .cra_priority = 300,
1022 .cra_flags = CRYPTO_ALG_ASYNC |
1023 CRYPTO_ALG_NEED_FALLBACK,
1024 .cra_blocksize = SHA256_BLOCK_SIZE,
1025 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
1026 .cra_alignmask = 0,
1027 .cra_module = THIS_MODULE,
1028 .cra_init = tegra_sha_cra_init,
1029 .cra_exit = tegra_sha_cra_exit,
1030 }
1031 }
1032 }, {
1033 .alg_base = "sha384",
1034 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
1035 .alg.ahash.base = {
1036 .init = tegra_sha_init,
1037 .update = tegra_sha_update,
1038 .final = tegra_sha_final,
1039 .finup = tegra_sha_finup,
1040 .digest = tegra_sha_digest,
1041 .export = tegra_sha_export,
1042 .import = tegra_sha_import,
1043 .setkey = tegra_hmac_setkey,
1044 .halg.digestsize = SHA384_DIGEST_SIZE,
1045 .halg.statesize = sizeof(struct tegra_sha_reqctx),
1046 .halg.base = {
1047 .cra_name = "hmac(sha384)",
1048 .cra_driver_name = "tegra-se-hmac-sha384",
1049 .cra_priority = 300,
1050 .cra_flags = CRYPTO_ALG_ASYNC |
1051 CRYPTO_ALG_NEED_FALLBACK,
1052 .cra_blocksize = SHA384_BLOCK_SIZE,
1053 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
1054 .cra_alignmask = 0,
1055 .cra_module = THIS_MODULE,
1056 .cra_init = tegra_sha_cra_init,
1057 .cra_exit = tegra_sha_cra_exit,
1058 }
1059 }
1060 }, {
1061 .alg_base = "sha512",
1062 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
1063 .alg.ahash.base = {
1064 .init = tegra_sha_init,
1065 .update = tegra_sha_update,
1066 .final = tegra_sha_final,
1067 .finup = tegra_sha_finup,
1068 .digest = tegra_sha_digest,
1069 .export = tegra_sha_export,
1070 .import = tegra_sha_import,
1071 .setkey = tegra_hmac_setkey,
1072 .halg.digestsize = SHA512_DIGEST_SIZE,
1073 .halg.statesize = sizeof(struct tegra_sha_reqctx),
1074 .halg.base = {
1075 .cra_name = "hmac(sha512)",
1076 .cra_driver_name = "tegra-se-hmac-sha512",
1077 .cra_priority = 300,
1078 .cra_flags = CRYPTO_ALG_ASYNC |
1079 CRYPTO_ALG_NEED_FALLBACK,
1080 .cra_blocksize = SHA512_BLOCK_SIZE,
1081 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
1082 .cra_alignmask = 0,
1083 .cra_module = THIS_MODULE,
1084 .cra_init = tegra_sha_cra_init,
1085 .cra_exit = tegra_sha_cra_exit,
1086 }
1087 }
1088 }
1089 };
1090
tegra_hash_kac_manifest(u32 user,u32 alg,u32 keylen)1091 static int tegra_hash_kac_manifest(u32 user, u32 alg, u32 keylen)
1092 {
1093 int manifest;
1094
1095 manifest = SE_KAC_USER_NS;
1096
1097 switch (alg) {
1098 case SE_ALG_HMAC_SHA224:
1099 case SE_ALG_HMAC_SHA256:
1100 case SE_ALG_HMAC_SHA384:
1101 case SE_ALG_HMAC_SHA512:
1102 manifest |= SE_KAC_HMAC;
1103 break;
1104 default:
1105 return -EINVAL;
1106 }
1107
1108 switch (keylen) {
1109 case AES_KEYSIZE_128:
1110 manifest |= SE_KAC_SIZE_128;
1111 break;
1112 case AES_KEYSIZE_192:
1113 manifest |= SE_KAC_SIZE_192;
1114 break;
1115 case AES_KEYSIZE_256:
1116 default:
1117 manifest |= SE_KAC_SIZE_256;
1118 break;
1119 }
1120
1121 return manifest;
1122 }
1123
tegra_init_hash(struct tegra_se * se)1124 int tegra_init_hash(struct tegra_se *se)
1125 {
1126 struct ahash_engine_alg *alg;
1127 int i, ret;
1128
1129 se->manifest = tegra_hash_kac_manifest;
1130
1131 for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++) {
1132 tegra_hash_algs[i].se_dev = se;
1133 alg = &tegra_hash_algs[i].alg.ahash;
1134
1135 ret = crypto_engine_register_ahash(alg);
1136 if (ret) {
1137 dev_err(se->dev, "failed to register %s\n",
1138 alg->base.halg.base.cra_name);
1139 goto sha_err;
1140 }
1141 }
1142
1143 return 0;
1144
1145 sha_err:
1146 while (i--)
1147 crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash);
1148
1149 return ret;
1150 }
1151
tegra_deinit_hash(struct tegra_se * se)1152 void tegra_deinit_hash(struct tegra_se *se)
1153 {
1154 int i;
1155
1156 for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++)
1157 crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash);
1158 }
1159