xref: /linux/drivers/crypto/inside-secure/safexcel_hash.c (revision a578dd095dfe8b56c167201d9aea43e47d27f807)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7 
8 #include <crypto/aes.h>
9 #include <crypto/hmac.h>
10 #include <crypto/md5.h>
11 #include <crypto/sha1.h>
12 #include <crypto/sha2.h>
13 #include <crypto/sha3.h>
14 #include <crypto/skcipher.h>
15 #include <crypto/sm3.h>
16 #include <crypto/internal/cipher.h>
17 #include <linux/device.h>
18 #include <linux/dma-mapping.h>
19 #include <linux/dmapool.h>
20 
21 #include "safexcel.h"
22 
23 struct safexcel_ahash_ctx {
24 	struct safexcel_context base;
25 
26 	u32 alg;
27 	u8  key_sz;
28 	bool cbcmac;
29 	bool do_fallback;
30 	bool fb_init_done;
31 	bool fb_do_setkey;
32 
33 	struct crypto_aes_ctx *aes;
34 	struct crypto_ahash *fback;
35 	struct crypto_shash *shpre;
36 	struct shash_desc *shdesc;
37 };
38 
39 struct safexcel_ahash_req {
40 	bool last_req;
41 	bool finish;
42 	bool hmac;
43 	bool needs_inv;
44 	bool hmac_zlen;
45 	bool len_is_le;
46 	bool not_first;
47 	bool xcbcmac;
48 
49 	int nents;
50 	dma_addr_t result_dma;
51 
52 	u32 digest;
53 
54 	u8 state_sz;    /* expected state size, only set once */
55 	u8 block_sz;    /* block size, only set once */
56 	u8 digest_sz;   /* output digest size, only set once */
57 	__le32 state[SHA3_512_BLOCK_SIZE /
58 		     sizeof(__le32)] __aligned(sizeof(__le32));
59 
60 	u64 len;
61 	u64 processed;
62 
63 	u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
64 	dma_addr_t cache_dma;
65 	unsigned int cache_sz;
66 
67 	u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
68 };
69 
safexcel_queued_len(struct safexcel_ahash_req * req)70 static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
71 {
72 	return req->len - req->processed;
73 }
74 
safexcel_hash_token(struct safexcel_command_desc * cdesc,u32 input_length,u32 result_length,bool cbcmac)75 static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
76 				u32 input_length, u32 result_length,
77 				bool cbcmac)
78 {
79 	struct safexcel_token *token =
80 		(struct safexcel_token *)cdesc->control_data.token;
81 
82 	token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
83 	token[0].packet_length = input_length;
84 	token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
85 
86 	input_length &= 15;
87 	if (unlikely(cbcmac && input_length)) {
88 		token[0].stat =  0;
89 		token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
90 		token[1].packet_length = 16 - input_length;
91 		token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
92 		token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
93 	} else {
94 		token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
95 		eip197_noop_token(&token[1]);
96 	}
97 
98 	token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
99 	token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
100 			EIP197_TOKEN_STAT_LAST_PACKET;
101 	token[2].packet_length = result_length;
102 	token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
103 				EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
104 
105 	eip197_noop_token(&token[3]);
106 }
107 
safexcel_context_control(struct safexcel_ahash_ctx * ctx,struct safexcel_ahash_req * req,struct safexcel_command_desc * cdesc)108 static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
109 				     struct safexcel_ahash_req *req,
110 				     struct safexcel_command_desc *cdesc)
111 {
112 	struct safexcel_crypto_priv *priv = ctx->base.priv;
113 	u64 count = 0;
114 
115 	cdesc->control_data.control0 = ctx->alg;
116 	cdesc->control_data.control1 = 0;
117 
118 	/*
119 	 * Copy the input digest if needed, and setup the context
120 	 * fields. Do this now as we need it to setup the first command
121 	 * descriptor.
122 	 */
123 	if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
124 		if (req->xcbcmac)
125 			memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz);
126 		else
127 			memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
128 
129 		if (!req->finish && req->xcbcmac)
130 			cdesc->control_data.control0 |=
131 				CONTEXT_CONTROL_DIGEST_XCM |
132 				CONTEXT_CONTROL_TYPE_HASH_OUT  |
133 				CONTEXT_CONTROL_NO_FINISH_HASH |
134 				CONTEXT_CONTROL_SIZE(req->state_sz /
135 						     sizeof(u32));
136 		else
137 			cdesc->control_data.control0 |=
138 				CONTEXT_CONTROL_DIGEST_XCM |
139 				CONTEXT_CONTROL_TYPE_HASH_OUT  |
140 				CONTEXT_CONTROL_SIZE(req->state_sz /
141 						     sizeof(u32));
142 		return;
143 	} else if (!req->processed) {
144 		/* First - and possibly only - block of basic hash only */
145 		if (req->finish)
146 			cdesc->control_data.control0 |= req->digest |
147 				CONTEXT_CONTROL_TYPE_HASH_OUT |
148 				CONTEXT_CONTROL_RESTART_HASH  |
149 				/* ensure its not 0! */
150 				CONTEXT_CONTROL_SIZE(1);
151 		else
152 			cdesc->control_data.control0 |= req->digest |
153 				CONTEXT_CONTROL_TYPE_HASH_OUT  |
154 				CONTEXT_CONTROL_RESTART_HASH   |
155 				CONTEXT_CONTROL_NO_FINISH_HASH |
156 				/* ensure its not 0! */
157 				CONTEXT_CONTROL_SIZE(1);
158 		return;
159 	}
160 
161 	/* Hash continuation or HMAC, setup (inner) digest from state */
162 	memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
163 
164 	if (req->finish) {
165 		/* Compute digest count for hash/HMAC finish operations */
166 		if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
167 		    req->hmac_zlen || (req->processed != req->block_sz)) {
168 			count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
169 
170 			/* This is a hardware limitation, as the
171 			 * counter must fit into an u32. This represents
172 			 * a fairly big amount of input data, so we
173 			 * shouldn't see this.
174 			 */
175 			if (unlikely(count & 0xffffffff00000000ULL)) {
176 				dev_warn(priv->dev,
177 					 "Input data is too big\n");
178 				return;
179 			}
180 		}
181 
182 		if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
183 		    /* Special case: zero length HMAC */
184 		    req->hmac_zlen ||
185 		    /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
186 		    (req->processed != req->block_sz)) {
187 			/* Basic hash continue operation, need digest + cnt */
188 			cdesc->control_data.control0 |=
189 				CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
190 				CONTEXT_CONTROL_TYPE_HASH_OUT |
191 				CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
192 			/* For zero-len HMAC, don't finalize, already padded! */
193 			if (req->hmac_zlen)
194 				cdesc->control_data.control0 |=
195 					CONTEXT_CONTROL_NO_FINISH_HASH;
196 			cdesc->control_data.control1 |=
197 				CONTEXT_CONTROL_DIGEST_CNT;
198 			ctx->base.ctxr->data[req->state_sz >> 2] =
199 				cpu_to_le32(count);
200 			req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
201 
202 			/* Clear zero-length HMAC flag for next operation! */
203 			req->hmac_zlen = false;
204 		} else { /* HMAC */
205 			/* Need outer digest for HMAC finalization */
206 			memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
207 			       &ctx->base.opad, req->state_sz);
208 
209 			/* Single pass HMAC - no digest count */
210 			cdesc->control_data.control0 |=
211 				CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
212 				CONTEXT_CONTROL_TYPE_HASH_OUT |
213 				CONTEXT_CONTROL_DIGEST_HMAC;
214 		}
215 	} else { /* Hash continuation, do not finish yet */
216 		cdesc->control_data.control0 |=
217 			CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
218 			CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
219 			CONTEXT_CONTROL_TYPE_HASH_OUT |
220 			CONTEXT_CONTROL_NO_FINISH_HASH;
221 	}
222 }
223 
224 static int safexcel_ahash_enqueue(struct ahash_request *areq);
225 
safexcel_handle_req_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)226 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
227 				      int ring,
228 				      struct crypto_async_request *async,
229 				      bool *should_complete, int *ret)
230 {
231 	struct safexcel_result_desc *rdesc;
232 	struct ahash_request *areq = ahash_request_cast(async);
233 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
234 	struct safexcel_ahash_req *sreq = ahash_request_ctx_dma(areq);
235 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
236 	u64 cache_len;
237 
238 	*ret = 0;
239 
240 	rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
241 	if (IS_ERR(rdesc)) {
242 		dev_err(priv->dev,
243 			"hash: result: could not retrieve the result descriptor\n");
244 		*ret = PTR_ERR(rdesc);
245 	} else {
246 		*ret = safexcel_rdesc_check_errors(priv, rdesc);
247 	}
248 
249 	safexcel_complete(priv, ring);
250 
251 	if (sreq->nents) {
252 		dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
253 		sreq->nents = 0;
254 	}
255 
256 	if (sreq->result_dma) {
257 		dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
258 				 DMA_FROM_DEVICE);
259 		sreq->result_dma = 0;
260 	}
261 
262 	if (sreq->cache_dma) {
263 		dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
264 				 DMA_TO_DEVICE);
265 		sreq->cache_dma = 0;
266 		sreq->cache_sz = 0;
267 	}
268 
269 	if (sreq->finish) {
270 		if (sreq->hmac &&
271 		    (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
272 			/* Faking HMAC using hash - need to do outer hash */
273 			memcpy(sreq->cache, sreq->state,
274 			       crypto_ahash_digestsize(ahash));
275 
276 			memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz);
277 
278 			sreq->len = sreq->block_sz +
279 				    crypto_ahash_digestsize(ahash);
280 			sreq->processed = sreq->block_sz;
281 			sreq->hmac = 0;
282 
283 			if (priv->flags & EIP197_TRC_CACHE)
284 				ctx->base.needs_inv = true;
285 			areq->nbytes = 0;
286 			safexcel_ahash_enqueue(areq);
287 
288 			*should_complete = false; /* Not done yet */
289 			return 1;
290 		}
291 
292 		memcpy(areq->result, sreq->state,
293 		       crypto_ahash_digestsize(ahash));
294 	}
295 
296 	cache_len = safexcel_queued_len(sreq);
297 	if (cache_len)
298 		memcpy(sreq->cache, sreq->cache_next, cache_len);
299 
300 	*should_complete = true;
301 
302 	return 1;
303 }
304 
safexcel_ahash_send_req(struct crypto_async_request * async,int ring,int * commands,int * results)305 static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
306 				   int *commands, int *results)
307 {
308 	struct ahash_request *areq = ahash_request_cast(async);
309 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
310 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
311 	struct safexcel_crypto_priv *priv = ctx->base.priv;
312 	struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
313 	struct safexcel_result_desc *rdesc;
314 	struct scatterlist *sg;
315 	struct safexcel_token *dmmy;
316 	int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
317 	u64 queued, len;
318 
319 	queued = safexcel_queued_len(req);
320 	if (queued <= HASH_CACHE_SIZE)
321 		cache_len = queued;
322 	else
323 		cache_len = queued - areq->nbytes;
324 
325 	if (!req->finish && !req->last_req) {
326 		/* If this is not the last request and the queued data does not
327 		 * fit into full cache blocks, cache it for the next send call.
328 		 */
329 		extra = queued & (HASH_CACHE_SIZE - 1);
330 
331 		/* If this is not the last request and the queued data
332 		 * is a multiple of a block, cache the last one for now.
333 		 */
334 		if (!extra)
335 			extra = HASH_CACHE_SIZE;
336 
337 		sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
338 				   req->cache_next, extra,
339 				   areq->nbytes - extra);
340 
341 		queued -= extra;
342 
343 		if (!queued) {
344 			*commands = 0;
345 			*results = 0;
346 			return 0;
347 		}
348 
349 		extra = 0;
350 	}
351 
352 	if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
353 		if (unlikely(cache_len < AES_BLOCK_SIZE)) {
354 			/*
355 			 * Cache contains less than 1 full block, complete.
356 			 */
357 			extra = AES_BLOCK_SIZE - cache_len;
358 			if (queued > cache_len) {
359 				/* More data follows: borrow bytes */
360 				u64 tmp = queued - cache_len;
361 
362 				skip = min_t(u64, tmp, extra);
363 				sg_pcopy_to_buffer(areq->src,
364 					sg_nents(areq->src),
365 					req->cache + cache_len,
366 					skip, 0);
367 			}
368 			extra -= skip;
369 			memset(req->cache + cache_len + skip, 0, extra);
370 			if (!ctx->cbcmac && extra) {
371 				// 10- padding for XCBCMAC & CMAC
372 				req->cache[cache_len + skip] = 0x80;
373 				// HW will use K2 iso K3 - compensate!
374 				for (i = 0; i < AES_BLOCK_SIZE / 4; i++) {
375 					u32 *cache = (void *)req->cache;
376 					u32 *ipad = ctx->base.ipad.word;
377 					u32 x;
378 
379 					x = ipad[i] ^ ipad[i + 4];
380 					cache[i] ^= swab32(x);
381 				}
382 			}
383 			cache_len = AES_BLOCK_SIZE;
384 			queued = queued + extra;
385 		}
386 
387 		/* XCBC continue: XOR previous result into 1st word */
388 		crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
389 	}
390 
391 	len = queued;
392 	/* Add a command descriptor for the cached data, if any */
393 	if (cache_len) {
394 		req->cache_dma = dma_map_single(priv->dev, req->cache,
395 						cache_len, DMA_TO_DEVICE);
396 		if (dma_mapping_error(priv->dev, req->cache_dma))
397 			return -EINVAL;
398 
399 		req->cache_sz = cache_len;
400 		first_cdesc = safexcel_add_cdesc(priv, ring, 1,
401 						 (cache_len == len),
402 						 req->cache_dma, cache_len,
403 						 len, ctx->base.ctxr_dma,
404 						 &dmmy);
405 		if (IS_ERR(first_cdesc)) {
406 			ret = PTR_ERR(first_cdesc);
407 			goto unmap_cache;
408 		}
409 		n_cdesc++;
410 
411 		queued -= cache_len;
412 		if (!queued)
413 			goto send_command;
414 	}
415 
416 	/* Now handle the current ahash request buffer(s) */
417 	req->nents = dma_map_sg(priv->dev, areq->src,
418 				sg_nents_for_len(areq->src,
419 						 areq->nbytes),
420 				DMA_TO_DEVICE);
421 	if (!req->nents) {
422 		ret = -ENOMEM;
423 		goto cdesc_rollback;
424 	}
425 
426 	for_each_sg(areq->src, sg, req->nents, i) {
427 		int sglen = sg_dma_len(sg);
428 
429 		if (unlikely(sglen <= skip)) {
430 			skip -= sglen;
431 			continue;
432 		}
433 
434 		/* Do not overflow the request */
435 		if ((queued + skip) <= sglen)
436 			sglen = queued;
437 		else
438 			sglen -= skip;
439 
440 		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
441 					   !(queued - sglen),
442 					   sg_dma_address(sg) + skip, sglen,
443 					   len, ctx->base.ctxr_dma, &dmmy);
444 		if (IS_ERR(cdesc)) {
445 			ret = PTR_ERR(cdesc);
446 			goto unmap_sg;
447 		}
448 
449 		if (!n_cdesc)
450 			first_cdesc = cdesc;
451 		n_cdesc++;
452 
453 		queued -= sglen;
454 		if (!queued)
455 			break;
456 		skip = 0;
457 	}
458 
459 send_command:
460 	/* Setup the context options */
461 	safexcel_context_control(ctx, req, first_cdesc);
462 
463 	/* Add the token */
464 	safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
465 
466 	req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
467 					 DMA_FROM_DEVICE);
468 	if (dma_mapping_error(priv->dev, req->result_dma)) {
469 		ret = -EINVAL;
470 		goto unmap_sg;
471 	}
472 
473 	/* Add a result descriptor */
474 	rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
475 				   req->digest_sz);
476 	if (IS_ERR(rdesc)) {
477 		ret = PTR_ERR(rdesc);
478 		goto unmap_result;
479 	}
480 
481 	safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
482 
483 	req->processed += len - extra;
484 
485 	*commands = n_cdesc;
486 	*results = 1;
487 	return 0;
488 
489 unmap_result:
490 	dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
491 			 DMA_FROM_DEVICE);
492 unmap_sg:
493 	if (req->nents) {
494 		dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
495 		req->nents = 0;
496 	}
497 cdesc_rollback:
498 	for (i = 0; i < n_cdesc; i++)
499 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
500 unmap_cache:
501 	if (req->cache_dma) {
502 		dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
503 				 DMA_TO_DEVICE);
504 		req->cache_dma = 0;
505 		req->cache_sz = 0;
506 	}
507 
508 	return ret;
509 }
510 
safexcel_handle_inv_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)511 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
512 				      int ring,
513 				      struct crypto_async_request *async,
514 				      bool *should_complete, int *ret)
515 {
516 	struct safexcel_result_desc *rdesc;
517 	struct ahash_request *areq = ahash_request_cast(async);
518 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
519 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
520 	int enq_ret;
521 
522 	*ret = 0;
523 
524 	rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
525 	if (IS_ERR(rdesc)) {
526 		dev_err(priv->dev,
527 			"hash: invalidate: could not retrieve the result descriptor\n");
528 		*ret = PTR_ERR(rdesc);
529 	} else {
530 		*ret = safexcel_rdesc_check_errors(priv, rdesc);
531 	}
532 
533 	safexcel_complete(priv, ring);
534 
535 	if (ctx->base.exit_inv) {
536 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
537 			      ctx->base.ctxr_dma);
538 
539 		*should_complete = true;
540 		return 1;
541 	}
542 
543 	ring = safexcel_select_ring(priv);
544 	ctx->base.ring = ring;
545 
546 	spin_lock_bh(&priv->ring[ring].queue_lock);
547 	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
548 	spin_unlock_bh(&priv->ring[ring].queue_lock);
549 
550 	if (enq_ret != -EINPROGRESS)
551 		*ret = enq_ret;
552 
553 	queue_work(priv->ring[ring].workqueue,
554 		   &priv->ring[ring].work_data.work);
555 
556 	*should_complete = false;
557 
558 	return 1;
559 }
560 
safexcel_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)561 static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
562 				  struct crypto_async_request *async,
563 				  bool *should_complete, int *ret)
564 {
565 	struct ahash_request *areq = ahash_request_cast(async);
566 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
567 	int err;
568 
569 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
570 
571 	if (req->needs_inv) {
572 		req->needs_inv = false;
573 		err = safexcel_handle_inv_result(priv, ring, async,
574 						 should_complete, ret);
575 	} else {
576 		err = safexcel_handle_req_result(priv, ring, async,
577 						 should_complete, ret);
578 	}
579 
580 	return err;
581 }
582 
safexcel_ahash_send_inv(struct crypto_async_request * async,int ring,int * commands,int * results)583 static int safexcel_ahash_send_inv(struct crypto_async_request *async,
584 				   int ring, int *commands, int *results)
585 {
586 	struct ahash_request *areq = ahash_request_cast(async);
587 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
588 	int ret;
589 
590 	ret = safexcel_invalidate_cache(async, ctx->base.priv,
591 					ctx->base.ctxr_dma, ring);
592 	if (unlikely(ret))
593 		return ret;
594 
595 	*commands = 1;
596 	*results = 1;
597 
598 	return 0;
599 }
600 
safexcel_ahash_send(struct crypto_async_request * async,int ring,int * commands,int * results)601 static int safexcel_ahash_send(struct crypto_async_request *async,
602 			       int ring, int *commands, int *results)
603 {
604 	struct ahash_request *areq = ahash_request_cast(async);
605 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
606 	int ret;
607 
608 	if (req->needs_inv)
609 		ret = safexcel_ahash_send_inv(async, ring, commands, results);
610 	else
611 		ret = safexcel_ahash_send_req(async, ring, commands, results);
612 
613 	return ret;
614 }
615 
safexcel_ahash_exit_inv(struct crypto_tfm * tfm)616 static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
617 {
618 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
619 	struct safexcel_crypto_priv *priv = ctx->base.priv;
620 	EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
621 	struct safexcel_ahash_req *rctx = ahash_request_ctx_dma(req);
622 	DECLARE_CRYPTO_WAIT(result);
623 	int ring = ctx->base.ring;
624 	int err;
625 
626 	memset(req, 0, EIP197_AHASH_REQ_SIZE);
627 
628 	/* create invalidation request */
629 	init_completion(&result.completion);
630 	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
631 				   crypto_req_done, &result);
632 
633 	ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
634 	ctx = crypto_tfm_ctx(req->base.tfm);
635 	ctx->base.exit_inv = true;
636 	rctx->needs_inv = true;
637 
638 	spin_lock_bh(&priv->ring[ring].queue_lock);
639 	crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
640 	spin_unlock_bh(&priv->ring[ring].queue_lock);
641 
642 	queue_work(priv->ring[ring].workqueue,
643 		   &priv->ring[ring].work_data.work);
644 
645 	err = crypto_wait_req(-EINPROGRESS, &result);
646 
647 	if (err) {
648 		dev_warn(priv->dev, "hash: completion error (%d)\n", err);
649 		return err;
650 	}
651 
652 	return 0;
653 }
654 
655 /* safexcel_ahash_cache: cache data until at least one request can be sent to
656  * the engine, aka. when there is at least 1 block size in the pipe.
657  */
safexcel_ahash_cache(struct ahash_request * areq)658 static int safexcel_ahash_cache(struct ahash_request *areq)
659 {
660 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
661 	u64 cache_len;
662 
663 	/* cache_len: everything accepted by the driver but not sent yet,
664 	 * tot sz handled by update() - last req sz - tot sz handled by send()
665 	 */
666 	cache_len = safexcel_queued_len(req);
667 
668 	/*
669 	 * In case there isn't enough bytes to proceed (less than a
670 	 * block size), cache the data until we have enough.
671 	 */
672 	if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
673 		sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
674 				   req->cache + cache_len,
675 				   areq->nbytes, 0);
676 		return 0;
677 	}
678 
679 	/* We couldn't cache all the data */
680 	return -E2BIG;
681 }
682 
safexcel_ahash_enqueue(struct ahash_request * areq)683 static int safexcel_ahash_enqueue(struct ahash_request *areq)
684 {
685 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
686 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
687 	struct safexcel_crypto_priv *priv = ctx->base.priv;
688 	int ret, ring;
689 
690 	req->needs_inv = false;
691 
692 	if (ctx->base.ctxr) {
693 		if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
694 		     /* invalidate for *any* non-XCBC continuation */
695 		   ((req->not_first && !req->xcbcmac) ||
696 		     /* invalidate if (i)digest changed */
697 		     memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
698 		     /* invalidate for HMAC finish with odigest changed */
699 		     (req->finish && req->hmac &&
700 		      memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
701 			     &ctx->base.opad, req->state_sz))))
702 			/*
703 			 * We're still setting needs_inv here, even though it is
704 			 * cleared right away, because the needs_inv flag can be
705 			 * set in other functions and we want to keep the same
706 			 * logic.
707 			 */
708 			ctx->base.needs_inv = true;
709 
710 		if (ctx->base.needs_inv) {
711 			ctx->base.needs_inv = false;
712 			req->needs_inv = true;
713 		}
714 	} else {
715 		ctx->base.ring = safexcel_select_ring(priv);
716 		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
717 						 EIP197_GFP_FLAGS(areq->base),
718 						 &ctx->base.ctxr_dma);
719 		if (!ctx->base.ctxr)
720 			return -ENOMEM;
721 	}
722 	req->not_first = true;
723 
724 	ring = ctx->base.ring;
725 
726 	spin_lock_bh(&priv->ring[ring].queue_lock);
727 	ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
728 	spin_unlock_bh(&priv->ring[ring].queue_lock);
729 
730 	queue_work(priv->ring[ring].workqueue,
731 		   &priv->ring[ring].work_data.work);
732 
733 	return ret;
734 }
735 
safexcel_ahash_update(struct ahash_request * areq)736 static int safexcel_ahash_update(struct ahash_request *areq)
737 {
738 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
739 	int ret;
740 
741 	/* If the request is 0 length, do nothing */
742 	if (!areq->nbytes)
743 		return 0;
744 
745 	/* Add request to the cache if it fits */
746 	ret = safexcel_ahash_cache(areq);
747 
748 	/* Update total request length */
749 	req->len += areq->nbytes;
750 
751 	/* If not all data could fit into the cache, go process the excess.
752 	 * Also go process immediately for an HMAC IV precompute, which
753 	 * will never be finished at all, but needs to be processed anyway.
754 	 */
755 	if ((ret && !req->finish) || req->last_req)
756 		return safexcel_ahash_enqueue(areq);
757 
758 	return 0;
759 }
760 
safexcel_ahash_final(struct ahash_request * areq)761 static int safexcel_ahash_final(struct ahash_request *areq)
762 {
763 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
764 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
765 
766 	req->finish = true;
767 
768 	if (unlikely(!req->len && !areq->nbytes)) {
769 		/*
770 		 * If we have an overall 0 length *hash* request:
771 		 * The HW cannot do 0 length hash, so we provide the correct
772 		 * result directly here.
773 		 */
774 		if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
775 			memcpy(areq->result, md5_zero_message_hash,
776 			       MD5_DIGEST_SIZE);
777 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
778 			memcpy(areq->result, sha1_zero_message_hash,
779 			       SHA1_DIGEST_SIZE);
780 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
781 			memcpy(areq->result, sha224_zero_message_hash,
782 			       SHA224_DIGEST_SIZE);
783 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
784 			memcpy(areq->result, sha256_zero_message_hash,
785 			       SHA256_DIGEST_SIZE);
786 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
787 			memcpy(areq->result, sha384_zero_message_hash,
788 			       SHA384_DIGEST_SIZE);
789 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
790 			memcpy(areq->result, sha512_zero_message_hash,
791 			       SHA512_DIGEST_SIZE);
792 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
793 			memcpy(areq->result,
794 			       EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
795 		}
796 
797 		return 0;
798 	} else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
799 			    ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
800 			    req->len == sizeof(u32) && !areq->nbytes)) {
801 		/* Zero length CRC32 */
802 		memcpy(areq->result, &ctx->base.ipad, sizeof(u32));
803 		return 0;
804 	} else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
805 			    !areq->nbytes)) {
806 		/* Zero length CBC MAC */
807 		memset(areq->result, 0, AES_BLOCK_SIZE);
808 		return 0;
809 	} else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
810 			    !areq->nbytes)) {
811 		/* Zero length (X)CBC/CMAC */
812 		int i;
813 
814 		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
815 			u32 *result = (void *)areq->result;
816 
817 			/* K3 */
818 			result[i] = swab32(ctx->base.ipad.word[i + 4]);
819 		}
820 		areq->result[0] ^= 0x80;			// 10- padding
821 		aes_encrypt(ctx->aes, areq->result, areq->result);
822 		return 0;
823 	} else if (unlikely(req->hmac &&
824 			    (req->len == req->block_sz) &&
825 			    !areq->nbytes)) {
826 		/*
827 		 * If we have an overall 0 length *HMAC* request:
828 		 * For HMAC, we need to finalize the inner digest
829 		 * and then perform the outer hash.
830 		 */
831 
832 		/* generate pad block in the cache */
833 		/* start with a hash block of all zeroes */
834 		memset(req->cache, 0, req->block_sz);
835 		/* set the first byte to 0x80 to 'append a 1 bit' */
836 		req->cache[0] = 0x80;
837 		/* add the length in bits in the last 2 bytes */
838 		if (req->len_is_le) {
839 			/* Little endian length word (e.g. MD5) */
840 			req->cache[req->block_sz-8] = (req->block_sz << 3) &
841 						      255;
842 			req->cache[req->block_sz-7] = (req->block_sz >> 5);
843 		} else {
844 			/* Big endian length word (e.g. any SHA) */
845 			req->cache[req->block_sz-2] = (req->block_sz >> 5);
846 			req->cache[req->block_sz-1] = (req->block_sz << 3) &
847 						      255;
848 		}
849 
850 		req->len += req->block_sz; /* plus 1 hash block */
851 
852 		/* Set special zero-length HMAC flag */
853 		req->hmac_zlen = true;
854 
855 		/* Finalize HMAC */
856 		req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
857 	} else if (req->hmac) {
858 		/* Finalize HMAC */
859 		req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
860 	}
861 
862 	return safexcel_ahash_enqueue(areq);
863 }
864 
safexcel_ahash_finup(struct ahash_request * areq)865 static int safexcel_ahash_finup(struct ahash_request *areq)
866 {
867 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
868 
869 	req->finish = true;
870 
871 	safexcel_ahash_update(areq);
872 	return safexcel_ahash_final(areq);
873 }
874 
safexcel_ahash_export(struct ahash_request * areq,void * out)875 static int safexcel_ahash_export(struct ahash_request *areq, void *out)
876 {
877 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
878 	struct safexcel_ahash_export_state *export = out;
879 
880 	export->len = req->len;
881 	export->processed = req->processed;
882 
883 	export->digest = req->digest;
884 
885 	memcpy(export->state, req->state, req->state_sz);
886 	memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
887 
888 	return 0;
889 }
890 
safexcel_ahash_import(struct ahash_request * areq,const void * in)891 static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
892 {
893 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
894 	const struct safexcel_ahash_export_state *export = in;
895 	int ret;
896 
897 	ret = crypto_ahash_init(areq);
898 	if (ret)
899 		return ret;
900 
901 	req->len = export->len;
902 	req->processed = export->processed;
903 
904 	req->digest = export->digest;
905 
906 	memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
907 	memcpy(req->state, export->state, req->state_sz);
908 
909 	return 0;
910 }
911 
safexcel_ahash_cra_init(struct crypto_tfm * tfm)912 static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
913 {
914 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
915 	struct safexcel_alg_template *tmpl =
916 		container_of(__crypto_ahash_alg(tfm->__crt_alg),
917 			     struct safexcel_alg_template, alg.ahash);
918 
919 	ctx->base.priv = tmpl->priv;
920 	ctx->base.send = safexcel_ahash_send;
921 	ctx->base.handle_result = safexcel_handle_result;
922 	ctx->fb_do_setkey = false;
923 
924 	crypto_ahash_set_reqsize_dma(__crypto_ahash_cast(tfm),
925 				     sizeof(struct safexcel_ahash_req));
926 	return 0;
927 }
928 
safexcel_sha1_init(struct ahash_request * areq)929 static int safexcel_sha1_init(struct ahash_request *areq)
930 {
931 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
932 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
933 
934 	memset(req, 0, sizeof(*req));
935 
936 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
937 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
938 	req->state_sz = SHA1_DIGEST_SIZE;
939 	req->digest_sz = SHA1_DIGEST_SIZE;
940 	req->block_sz = SHA1_BLOCK_SIZE;
941 
942 	return 0;
943 }
944 
safexcel_sha1_digest(struct ahash_request * areq)945 static int safexcel_sha1_digest(struct ahash_request *areq)
946 {
947 	int ret = safexcel_sha1_init(areq);
948 
949 	if (ret)
950 		return ret;
951 
952 	return safexcel_ahash_finup(areq);
953 }
954 
safexcel_ahash_cra_exit(struct crypto_tfm * tfm)955 static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
956 {
957 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
958 	struct safexcel_crypto_priv *priv = ctx->base.priv;
959 	int ret;
960 
961 	/* context not allocated, skip invalidation */
962 	if (!ctx->base.ctxr)
963 		return;
964 
965 	if (priv->flags & EIP197_TRC_CACHE) {
966 		ret = safexcel_ahash_exit_inv(tfm);
967 		if (ret)
968 			dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
969 	} else {
970 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
971 			      ctx->base.ctxr_dma);
972 	}
973 }
974 
975 struct safexcel_alg_template safexcel_alg_sha1 = {
976 	.type = SAFEXCEL_ALG_TYPE_AHASH,
977 	.algo_mask = SAFEXCEL_ALG_SHA1,
978 	.alg.ahash = {
979 		.init = safexcel_sha1_init,
980 		.update = safexcel_ahash_update,
981 		.final = safexcel_ahash_final,
982 		.finup = safexcel_ahash_finup,
983 		.digest = safexcel_sha1_digest,
984 		.export = safexcel_ahash_export,
985 		.import = safexcel_ahash_import,
986 		.halg = {
987 			.digestsize = SHA1_DIGEST_SIZE,
988 			.statesize = sizeof(struct safexcel_ahash_export_state),
989 			.base = {
990 				.cra_name = "sha1",
991 				.cra_driver_name = "safexcel-sha1",
992 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
993 				.cra_flags = CRYPTO_ALG_ASYNC |
994 					     CRYPTO_ALG_ALLOCATES_MEMORY |
995 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
996 				.cra_blocksize = SHA1_BLOCK_SIZE,
997 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
998 				.cra_init = safexcel_ahash_cra_init,
999 				.cra_exit = safexcel_ahash_cra_exit,
1000 				.cra_module = THIS_MODULE,
1001 			},
1002 		},
1003 	},
1004 };
1005 
safexcel_hmac_sha1_init(struct ahash_request * areq)1006 static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1007 {
1008 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1009 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1010 
1011 	memset(req, 0, sizeof(*req));
1012 
1013 	/* Start from ipad precompute */
1014 	memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE);
1015 	/* Already processed the key^ipad part now! */
1016 	req->len	= SHA1_BLOCK_SIZE;
1017 	req->processed	= SHA1_BLOCK_SIZE;
1018 
1019 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1020 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1021 	req->state_sz = SHA1_DIGEST_SIZE;
1022 	req->digest_sz = SHA1_DIGEST_SIZE;
1023 	req->block_sz = SHA1_BLOCK_SIZE;
1024 	req->hmac = true;
1025 
1026 	return 0;
1027 }
1028 
safexcel_hmac_sha1_digest(struct ahash_request * areq)1029 static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1030 {
1031 	int ret = safexcel_hmac_sha1_init(areq);
1032 
1033 	if (ret)
1034 		return ret;
1035 
1036 	return safexcel_ahash_finup(areq);
1037 }
1038 
safexcel_hmac_init_pad(struct ahash_request * areq,unsigned int blocksize,const u8 * key,unsigned int keylen,u8 * ipad,u8 * opad)1039 static int safexcel_hmac_init_pad(struct ahash_request *areq,
1040 				  unsigned int blocksize, const u8 *key,
1041 				  unsigned int keylen, u8 *ipad, u8 *opad)
1042 {
1043 	DECLARE_CRYPTO_WAIT(result);
1044 	struct scatterlist sg;
1045 	int ret, i;
1046 	u8 *keydup;
1047 
1048 	if (keylen <= blocksize) {
1049 		memcpy(ipad, key, keylen);
1050 	} else {
1051 		keydup = kmemdup(key, keylen, GFP_KERNEL);
1052 		if (!keydup)
1053 			return -ENOMEM;
1054 
1055 		ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1056 					   crypto_req_done, &result);
1057 		sg_init_one(&sg, keydup, keylen);
1058 		ahash_request_set_crypt(areq, &sg, ipad, keylen);
1059 
1060 		ret = crypto_ahash_digest(areq);
1061 		ret = crypto_wait_req(ret, &result);
1062 
1063 		/* Avoid leaking */
1064 		kfree_sensitive(keydup);
1065 
1066 		if (ret)
1067 			return ret;
1068 
1069 		keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1070 	}
1071 
1072 	memset(ipad + keylen, 0, blocksize - keylen);
1073 	memcpy(opad, ipad, blocksize);
1074 
1075 	for (i = 0; i < blocksize; i++) {
1076 		ipad[i] ^= HMAC_IPAD_VALUE;
1077 		opad[i] ^= HMAC_OPAD_VALUE;
1078 	}
1079 
1080 	return 0;
1081 }
1082 
safexcel_hmac_init_iv(struct ahash_request * areq,unsigned int blocksize,u8 * pad,void * state)1083 static int safexcel_hmac_init_iv(struct ahash_request *areq,
1084 				 unsigned int blocksize, u8 *pad, void *state)
1085 {
1086 	struct safexcel_ahash_req *req;
1087 	DECLARE_CRYPTO_WAIT(result);
1088 	struct scatterlist sg;
1089 	int ret;
1090 
1091 	ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1092 				   crypto_req_done, &result);
1093 	sg_init_one(&sg, pad, blocksize);
1094 	ahash_request_set_crypt(areq, &sg, pad, blocksize);
1095 
1096 	ret = crypto_ahash_init(areq);
1097 	if (ret)
1098 		return ret;
1099 
1100 	req = ahash_request_ctx_dma(areq);
1101 	req->hmac = true;
1102 	req->last_req = true;
1103 
1104 	ret = crypto_ahash_update(areq);
1105 	ret = crypto_wait_req(ret, &result);
1106 
1107 	return ret ?: crypto_ahash_export(areq, state);
1108 }
1109 
__safexcel_hmac_setkey(const char * alg,const u8 * key,unsigned int keylen,void * istate,void * ostate)1110 static int __safexcel_hmac_setkey(const char *alg, const u8 *key,
1111 				  unsigned int keylen,
1112 				  void *istate, void *ostate)
1113 {
1114 	struct ahash_request *areq;
1115 	struct crypto_ahash *tfm;
1116 	unsigned int blocksize;
1117 	u8 *ipad, *opad;
1118 	int ret;
1119 
1120 	tfm = crypto_alloc_ahash(alg, 0, 0);
1121 	if (IS_ERR(tfm))
1122 		return PTR_ERR(tfm);
1123 
1124 	areq = ahash_request_alloc(tfm, GFP_KERNEL);
1125 	if (!areq) {
1126 		ret = -ENOMEM;
1127 		goto free_ahash;
1128 	}
1129 
1130 	crypto_ahash_clear_flags(tfm, ~0);
1131 	blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1132 
1133 	ipad = kcalloc(2, blocksize, GFP_KERNEL);
1134 	if (!ipad) {
1135 		ret = -ENOMEM;
1136 		goto free_request;
1137 	}
1138 
1139 	opad = ipad + blocksize;
1140 
1141 	ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1142 	if (ret)
1143 		goto free_ipad;
1144 
1145 	ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1146 	if (ret)
1147 		goto free_ipad;
1148 
1149 	ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1150 
1151 free_ipad:
1152 	kfree(ipad);
1153 free_request:
1154 	ahash_request_free(areq);
1155 free_ahash:
1156 	crypto_free_ahash(tfm);
1157 
1158 	return ret;
1159 }
1160 
safexcel_hmac_setkey(struct safexcel_context * base,const u8 * key,unsigned int keylen,const char * alg,unsigned int state_sz)1161 int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key,
1162 			 unsigned int keylen, const char *alg,
1163 			 unsigned int state_sz)
1164 {
1165 	struct safexcel_crypto_priv *priv = base->priv;
1166 	struct safexcel_ahash_export_state istate, ostate;
1167 	int ret;
1168 
1169 	ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1170 	if (ret)
1171 		return ret;
1172 
1173 	if (priv->flags & EIP197_TRC_CACHE && base->ctxr &&
1174 	    (memcmp(&base->ipad, istate.state, state_sz) ||
1175 	     memcmp(&base->opad, ostate.state, state_sz)))
1176 		base->needs_inv = true;
1177 
1178 	memcpy(&base->ipad, &istate.state, state_sz);
1179 	memcpy(&base->opad, &ostate.state, state_sz);
1180 
1181 	return 0;
1182 }
1183 
safexcel_hmac_alg_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen,const char * alg,unsigned int state_sz)1184 static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1185 				    unsigned int keylen, const char *alg,
1186 				    unsigned int state_sz)
1187 {
1188 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
1189 
1190 	return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz);
1191 }
1192 
safexcel_hmac_sha1_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1193 static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1194 				     unsigned int keylen)
1195 {
1196 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1197 					SHA1_DIGEST_SIZE);
1198 }
1199 
1200 struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1201 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1202 	.algo_mask = SAFEXCEL_ALG_SHA1,
1203 	.alg.ahash = {
1204 		.init = safexcel_hmac_sha1_init,
1205 		.update = safexcel_ahash_update,
1206 		.final = safexcel_ahash_final,
1207 		.finup = safexcel_ahash_finup,
1208 		.digest = safexcel_hmac_sha1_digest,
1209 		.setkey = safexcel_hmac_sha1_setkey,
1210 		.export = safexcel_ahash_export,
1211 		.import = safexcel_ahash_import,
1212 		.halg = {
1213 			.digestsize = SHA1_DIGEST_SIZE,
1214 			.statesize = sizeof(struct safexcel_ahash_export_state),
1215 			.base = {
1216 				.cra_name = "hmac(sha1)",
1217 				.cra_driver_name = "safexcel-hmac-sha1",
1218 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1219 				.cra_flags = CRYPTO_ALG_ASYNC |
1220 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1221 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1222 				.cra_blocksize = SHA1_BLOCK_SIZE,
1223 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1224 				.cra_init = safexcel_ahash_cra_init,
1225 				.cra_exit = safexcel_ahash_cra_exit,
1226 				.cra_module = THIS_MODULE,
1227 			},
1228 		},
1229 	},
1230 };
1231 
safexcel_sha256_init(struct ahash_request * areq)1232 static int safexcel_sha256_init(struct ahash_request *areq)
1233 {
1234 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1235 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1236 
1237 	memset(req, 0, sizeof(*req));
1238 
1239 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1240 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1241 	req->state_sz = SHA256_DIGEST_SIZE;
1242 	req->digest_sz = SHA256_DIGEST_SIZE;
1243 	req->block_sz = SHA256_BLOCK_SIZE;
1244 
1245 	return 0;
1246 }
1247 
safexcel_sha256_digest(struct ahash_request * areq)1248 static int safexcel_sha256_digest(struct ahash_request *areq)
1249 {
1250 	int ret = safexcel_sha256_init(areq);
1251 
1252 	if (ret)
1253 		return ret;
1254 
1255 	return safexcel_ahash_finup(areq);
1256 }
1257 
1258 struct safexcel_alg_template safexcel_alg_sha256 = {
1259 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1260 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1261 	.alg.ahash = {
1262 		.init = safexcel_sha256_init,
1263 		.update = safexcel_ahash_update,
1264 		.final = safexcel_ahash_final,
1265 		.finup = safexcel_ahash_finup,
1266 		.digest = safexcel_sha256_digest,
1267 		.export = safexcel_ahash_export,
1268 		.import = safexcel_ahash_import,
1269 		.halg = {
1270 			.digestsize = SHA256_DIGEST_SIZE,
1271 			.statesize = sizeof(struct safexcel_ahash_export_state),
1272 			.base = {
1273 				.cra_name = "sha256",
1274 				.cra_driver_name = "safexcel-sha256",
1275 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1276 				.cra_flags = CRYPTO_ALG_ASYNC |
1277 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1278 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1279 				.cra_blocksize = SHA256_BLOCK_SIZE,
1280 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1281 				.cra_init = safexcel_ahash_cra_init,
1282 				.cra_exit = safexcel_ahash_cra_exit,
1283 				.cra_module = THIS_MODULE,
1284 			},
1285 		},
1286 	},
1287 };
1288 
safexcel_sha224_init(struct ahash_request * areq)1289 static int safexcel_sha224_init(struct ahash_request *areq)
1290 {
1291 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1292 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1293 
1294 	memset(req, 0, sizeof(*req));
1295 
1296 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1297 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1298 	req->state_sz = SHA256_DIGEST_SIZE;
1299 	req->digest_sz = SHA256_DIGEST_SIZE;
1300 	req->block_sz = SHA256_BLOCK_SIZE;
1301 
1302 	return 0;
1303 }
1304 
safexcel_sha224_digest(struct ahash_request * areq)1305 static int safexcel_sha224_digest(struct ahash_request *areq)
1306 {
1307 	int ret = safexcel_sha224_init(areq);
1308 
1309 	if (ret)
1310 		return ret;
1311 
1312 	return safexcel_ahash_finup(areq);
1313 }
1314 
1315 struct safexcel_alg_template safexcel_alg_sha224 = {
1316 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1317 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1318 	.alg.ahash = {
1319 		.init = safexcel_sha224_init,
1320 		.update = safexcel_ahash_update,
1321 		.final = safexcel_ahash_final,
1322 		.finup = safexcel_ahash_finup,
1323 		.digest = safexcel_sha224_digest,
1324 		.export = safexcel_ahash_export,
1325 		.import = safexcel_ahash_import,
1326 		.halg = {
1327 			.digestsize = SHA224_DIGEST_SIZE,
1328 			.statesize = sizeof(struct safexcel_ahash_export_state),
1329 			.base = {
1330 				.cra_name = "sha224",
1331 				.cra_driver_name = "safexcel-sha224",
1332 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1333 				.cra_flags = CRYPTO_ALG_ASYNC |
1334 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1335 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1336 				.cra_blocksize = SHA224_BLOCK_SIZE,
1337 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1338 				.cra_init = safexcel_ahash_cra_init,
1339 				.cra_exit = safexcel_ahash_cra_exit,
1340 				.cra_module = THIS_MODULE,
1341 			},
1342 		},
1343 	},
1344 };
1345 
safexcel_hmac_sha224_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1346 static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1347 				       unsigned int keylen)
1348 {
1349 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1350 					SHA256_DIGEST_SIZE);
1351 }
1352 
safexcel_hmac_sha224_init(struct ahash_request * areq)1353 static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1354 {
1355 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1356 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1357 
1358 	memset(req, 0, sizeof(*req));
1359 
1360 	/* Start from ipad precompute */
1361 	memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1362 	/* Already processed the key^ipad part now! */
1363 	req->len	= SHA256_BLOCK_SIZE;
1364 	req->processed	= SHA256_BLOCK_SIZE;
1365 
1366 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1367 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1368 	req->state_sz = SHA256_DIGEST_SIZE;
1369 	req->digest_sz = SHA256_DIGEST_SIZE;
1370 	req->block_sz = SHA256_BLOCK_SIZE;
1371 	req->hmac = true;
1372 
1373 	return 0;
1374 }
1375 
safexcel_hmac_sha224_digest(struct ahash_request * areq)1376 static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1377 {
1378 	int ret = safexcel_hmac_sha224_init(areq);
1379 
1380 	if (ret)
1381 		return ret;
1382 
1383 	return safexcel_ahash_finup(areq);
1384 }
1385 
1386 struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1387 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1388 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1389 	.alg.ahash = {
1390 		.init = safexcel_hmac_sha224_init,
1391 		.update = safexcel_ahash_update,
1392 		.final = safexcel_ahash_final,
1393 		.finup = safexcel_ahash_finup,
1394 		.digest = safexcel_hmac_sha224_digest,
1395 		.setkey = safexcel_hmac_sha224_setkey,
1396 		.export = safexcel_ahash_export,
1397 		.import = safexcel_ahash_import,
1398 		.halg = {
1399 			.digestsize = SHA224_DIGEST_SIZE,
1400 			.statesize = sizeof(struct safexcel_ahash_export_state),
1401 			.base = {
1402 				.cra_name = "hmac(sha224)",
1403 				.cra_driver_name = "safexcel-hmac-sha224",
1404 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1405 				.cra_flags = CRYPTO_ALG_ASYNC |
1406 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1407 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1408 				.cra_blocksize = SHA224_BLOCK_SIZE,
1409 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1410 				.cra_init = safexcel_ahash_cra_init,
1411 				.cra_exit = safexcel_ahash_cra_exit,
1412 				.cra_module = THIS_MODULE,
1413 			},
1414 		},
1415 	},
1416 };
1417 
safexcel_hmac_sha256_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1418 static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1419 				     unsigned int keylen)
1420 {
1421 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1422 					SHA256_DIGEST_SIZE);
1423 }
1424 
safexcel_hmac_sha256_init(struct ahash_request * areq)1425 static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1426 {
1427 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1428 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1429 
1430 	memset(req, 0, sizeof(*req));
1431 
1432 	/* Start from ipad precompute */
1433 	memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1434 	/* Already processed the key^ipad part now! */
1435 	req->len	= SHA256_BLOCK_SIZE;
1436 	req->processed	= SHA256_BLOCK_SIZE;
1437 
1438 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1439 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1440 	req->state_sz = SHA256_DIGEST_SIZE;
1441 	req->digest_sz = SHA256_DIGEST_SIZE;
1442 	req->block_sz = SHA256_BLOCK_SIZE;
1443 	req->hmac = true;
1444 
1445 	return 0;
1446 }
1447 
safexcel_hmac_sha256_digest(struct ahash_request * areq)1448 static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1449 {
1450 	int ret = safexcel_hmac_sha256_init(areq);
1451 
1452 	if (ret)
1453 		return ret;
1454 
1455 	return safexcel_ahash_finup(areq);
1456 }
1457 
1458 struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1459 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1460 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1461 	.alg.ahash = {
1462 		.init = safexcel_hmac_sha256_init,
1463 		.update = safexcel_ahash_update,
1464 		.final = safexcel_ahash_final,
1465 		.finup = safexcel_ahash_finup,
1466 		.digest = safexcel_hmac_sha256_digest,
1467 		.setkey = safexcel_hmac_sha256_setkey,
1468 		.export = safexcel_ahash_export,
1469 		.import = safexcel_ahash_import,
1470 		.halg = {
1471 			.digestsize = SHA256_DIGEST_SIZE,
1472 			.statesize = sizeof(struct safexcel_ahash_export_state),
1473 			.base = {
1474 				.cra_name = "hmac(sha256)",
1475 				.cra_driver_name = "safexcel-hmac-sha256",
1476 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1477 				.cra_flags = CRYPTO_ALG_ASYNC |
1478 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1479 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1480 				.cra_blocksize = SHA256_BLOCK_SIZE,
1481 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1482 				.cra_init = safexcel_ahash_cra_init,
1483 				.cra_exit = safexcel_ahash_cra_exit,
1484 				.cra_module = THIS_MODULE,
1485 			},
1486 		},
1487 	},
1488 };
1489 
safexcel_sha512_init(struct ahash_request * areq)1490 static int safexcel_sha512_init(struct ahash_request *areq)
1491 {
1492 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1493 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1494 
1495 	memset(req, 0, sizeof(*req));
1496 
1497 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1498 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1499 	req->state_sz = SHA512_DIGEST_SIZE;
1500 	req->digest_sz = SHA512_DIGEST_SIZE;
1501 	req->block_sz = SHA512_BLOCK_SIZE;
1502 
1503 	return 0;
1504 }
1505 
safexcel_sha512_digest(struct ahash_request * areq)1506 static int safexcel_sha512_digest(struct ahash_request *areq)
1507 {
1508 	int ret = safexcel_sha512_init(areq);
1509 
1510 	if (ret)
1511 		return ret;
1512 
1513 	return safexcel_ahash_finup(areq);
1514 }
1515 
1516 struct safexcel_alg_template safexcel_alg_sha512 = {
1517 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1518 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1519 	.alg.ahash = {
1520 		.init = safexcel_sha512_init,
1521 		.update = safexcel_ahash_update,
1522 		.final = safexcel_ahash_final,
1523 		.finup = safexcel_ahash_finup,
1524 		.digest = safexcel_sha512_digest,
1525 		.export = safexcel_ahash_export,
1526 		.import = safexcel_ahash_import,
1527 		.halg = {
1528 			.digestsize = SHA512_DIGEST_SIZE,
1529 			.statesize = sizeof(struct safexcel_ahash_export_state),
1530 			.base = {
1531 				.cra_name = "sha512",
1532 				.cra_driver_name = "safexcel-sha512",
1533 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1534 				.cra_flags = CRYPTO_ALG_ASYNC |
1535 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1536 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1537 				.cra_blocksize = SHA512_BLOCK_SIZE,
1538 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1539 				.cra_init = safexcel_ahash_cra_init,
1540 				.cra_exit = safexcel_ahash_cra_exit,
1541 				.cra_module = THIS_MODULE,
1542 			},
1543 		},
1544 	},
1545 };
1546 
safexcel_sha384_init(struct ahash_request * areq)1547 static int safexcel_sha384_init(struct ahash_request *areq)
1548 {
1549 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1550 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1551 
1552 	memset(req, 0, sizeof(*req));
1553 
1554 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1555 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1556 	req->state_sz = SHA512_DIGEST_SIZE;
1557 	req->digest_sz = SHA512_DIGEST_SIZE;
1558 	req->block_sz = SHA512_BLOCK_SIZE;
1559 
1560 	return 0;
1561 }
1562 
safexcel_sha384_digest(struct ahash_request * areq)1563 static int safexcel_sha384_digest(struct ahash_request *areq)
1564 {
1565 	int ret = safexcel_sha384_init(areq);
1566 
1567 	if (ret)
1568 		return ret;
1569 
1570 	return safexcel_ahash_finup(areq);
1571 }
1572 
1573 struct safexcel_alg_template safexcel_alg_sha384 = {
1574 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1575 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1576 	.alg.ahash = {
1577 		.init = safexcel_sha384_init,
1578 		.update = safexcel_ahash_update,
1579 		.final = safexcel_ahash_final,
1580 		.finup = safexcel_ahash_finup,
1581 		.digest = safexcel_sha384_digest,
1582 		.export = safexcel_ahash_export,
1583 		.import = safexcel_ahash_import,
1584 		.halg = {
1585 			.digestsize = SHA384_DIGEST_SIZE,
1586 			.statesize = sizeof(struct safexcel_ahash_export_state),
1587 			.base = {
1588 				.cra_name = "sha384",
1589 				.cra_driver_name = "safexcel-sha384",
1590 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1591 				.cra_flags = CRYPTO_ALG_ASYNC |
1592 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1593 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1594 				.cra_blocksize = SHA384_BLOCK_SIZE,
1595 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1596 				.cra_init = safexcel_ahash_cra_init,
1597 				.cra_exit = safexcel_ahash_cra_exit,
1598 				.cra_module = THIS_MODULE,
1599 			},
1600 		},
1601 	},
1602 };
1603 
safexcel_hmac_sha512_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1604 static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1605 				       unsigned int keylen)
1606 {
1607 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1608 					SHA512_DIGEST_SIZE);
1609 }
1610 
safexcel_hmac_sha512_init(struct ahash_request * areq)1611 static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1612 {
1613 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1614 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1615 
1616 	memset(req, 0, sizeof(*req));
1617 
1618 	/* Start from ipad precompute */
1619 	memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1620 	/* Already processed the key^ipad part now! */
1621 	req->len	= SHA512_BLOCK_SIZE;
1622 	req->processed	= SHA512_BLOCK_SIZE;
1623 
1624 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1625 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1626 	req->state_sz = SHA512_DIGEST_SIZE;
1627 	req->digest_sz = SHA512_DIGEST_SIZE;
1628 	req->block_sz = SHA512_BLOCK_SIZE;
1629 	req->hmac = true;
1630 
1631 	return 0;
1632 }
1633 
safexcel_hmac_sha512_digest(struct ahash_request * areq)1634 static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1635 {
1636 	int ret = safexcel_hmac_sha512_init(areq);
1637 
1638 	if (ret)
1639 		return ret;
1640 
1641 	return safexcel_ahash_finup(areq);
1642 }
1643 
1644 struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1645 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1646 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1647 	.alg.ahash = {
1648 		.init = safexcel_hmac_sha512_init,
1649 		.update = safexcel_ahash_update,
1650 		.final = safexcel_ahash_final,
1651 		.finup = safexcel_ahash_finup,
1652 		.digest = safexcel_hmac_sha512_digest,
1653 		.setkey = safexcel_hmac_sha512_setkey,
1654 		.export = safexcel_ahash_export,
1655 		.import = safexcel_ahash_import,
1656 		.halg = {
1657 			.digestsize = SHA512_DIGEST_SIZE,
1658 			.statesize = sizeof(struct safexcel_ahash_export_state),
1659 			.base = {
1660 				.cra_name = "hmac(sha512)",
1661 				.cra_driver_name = "safexcel-hmac-sha512",
1662 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1663 				.cra_flags = CRYPTO_ALG_ASYNC |
1664 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1665 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1666 				.cra_blocksize = SHA512_BLOCK_SIZE,
1667 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1668 				.cra_init = safexcel_ahash_cra_init,
1669 				.cra_exit = safexcel_ahash_cra_exit,
1670 				.cra_module = THIS_MODULE,
1671 			},
1672 		},
1673 	},
1674 };
1675 
safexcel_hmac_sha384_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1676 static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1677 				       unsigned int keylen)
1678 {
1679 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1680 					SHA512_DIGEST_SIZE);
1681 }
1682 
safexcel_hmac_sha384_init(struct ahash_request * areq)1683 static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1684 {
1685 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1686 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1687 
1688 	memset(req, 0, sizeof(*req));
1689 
1690 	/* Start from ipad precompute */
1691 	memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1692 	/* Already processed the key^ipad part now! */
1693 	req->len	= SHA512_BLOCK_SIZE;
1694 	req->processed	= SHA512_BLOCK_SIZE;
1695 
1696 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1697 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1698 	req->state_sz = SHA512_DIGEST_SIZE;
1699 	req->digest_sz = SHA512_DIGEST_SIZE;
1700 	req->block_sz = SHA512_BLOCK_SIZE;
1701 	req->hmac = true;
1702 
1703 	return 0;
1704 }
1705 
safexcel_hmac_sha384_digest(struct ahash_request * areq)1706 static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1707 {
1708 	int ret = safexcel_hmac_sha384_init(areq);
1709 
1710 	if (ret)
1711 		return ret;
1712 
1713 	return safexcel_ahash_finup(areq);
1714 }
1715 
1716 struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1717 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1718 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1719 	.alg.ahash = {
1720 		.init = safexcel_hmac_sha384_init,
1721 		.update = safexcel_ahash_update,
1722 		.final = safexcel_ahash_final,
1723 		.finup = safexcel_ahash_finup,
1724 		.digest = safexcel_hmac_sha384_digest,
1725 		.setkey = safexcel_hmac_sha384_setkey,
1726 		.export = safexcel_ahash_export,
1727 		.import = safexcel_ahash_import,
1728 		.halg = {
1729 			.digestsize = SHA384_DIGEST_SIZE,
1730 			.statesize = sizeof(struct safexcel_ahash_export_state),
1731 			.base = {
1732 				.cra_name = "hmac(sha384)",
1733 				.cra_driver_name = "safexcel-hmac-sha384",
1734 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1735 				.cra_flags = CRYPTO_ALG_ASYNC |
1736 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1737 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1738 				.cra_blocksize = SHA384_BLOCK_SIZE,
1739 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1740 				.cra_init = safexcel_ahash_cra_init,
1741 				.cra_exit = safexcel_ahash_cra_exit,
1742 				.cra_module = THIS_MODULE,
1743 			},
1744 		},
1745 	},
1746 };
1747 
safexcel_md5_init(struct ahash_request * areq)1748 static int safexcel_md5_init(struct ahash_request *areq)
1749 {
1750 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1751 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1752 
1753 	memset(req, 0, sizeof(*req));
1754 
1755 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1756 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1757 	req->state_sz = MD5_DIGEST_SIZE;
1758 	req->digest_sz = MD5_DIGEST_SIZE;
1759 	req->block_sz = MD5_HMAC_BLOCK_SIZE;
1760 
1761 	return 0;
1762 }
1763 
safexcel_md5_digest(struct ahash_request * areq)1764 static int safexcel_md5_digest(struct ahash_request *areq)
1765 {
1766 	int ret = safexcel_md5_init(areq);
1767 
1768 	if (ret)
1769 		return ret;
1770 
1771 	return safexcel_ahash_finup(areq);
1772 }
1773 
1774 struct safexcel_alg_template safexcel_alg_md5 = {
1775 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1776 	.algo_mask = SAFEXCEL_ALG_MD5,
1777 	.alg.ahash = {
1778 		.init = safexcel_md5_init,
1779 		.update = safexcel_ahash_update,
1780 		.final = safexcel_ahash_final,
1781 		.finup = safexcel_ahash_finup,
1782 		.digest = safexcel_md5_digest,
1783 		.export = safexcel_ahash_export,
1784 		.import = safexcel_ahash_import,
1785 		.halg = {
1786 			.digestsize = MD5_DIGEST_SIZE,
1787 			.statesize = sizeof(struct safexcel_ahash_export_state),
1788 			.base = {
1789 				.cra_name = "md5",
1790 				.cra_driver_name = "safexcel-md5",
1791 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1792 				.cra_flags = CRYPTO_ALG_ASYNC |
1793 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1794 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1795 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1796 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1797 				.cra_init = safexcel_ahash_cra_init,
1798 				.cra_exit = safexcel_ahash_cra_exit,
1799 				.cra_module = THIS_MODULE,
1800 			},
1801 		},
1802 	},
1803 };
1804 
safexcel_hmac_md5_init(struct ahash_request * areq)1805 static int safexcel_hmac_md5_init(struct ahash_request *areq)
1806 {
1807 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1808 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1809 
1810 	memset(req, 0, sizeof(*req));
1811 
1812 	/* Start from ipad precompute */
1813 	memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE);
1814 	/* Already processed the key^ipad part now! */
1815 	req->len	= MD5_HMAC_BLOCK_SIZE;
1816 	req->processed	= MD5_HMAC_BLOCK_SIZE;
1817 
1818 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1819 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1820 	req->state_sz = MD5_DIGEST_SIZE;
1821 	req->digest_sz = MD5_DIGEST_SIZE;
1822 	req->block_sz = MD5_HMAC_BLOCK_SIZE;
1823 	req->len_is_le = true; /* MD5 is little endian! ... */
1824 	req->hmac = true;
1825 
1826 	return 0;
1827 }
1828 
safexcel_hmac_md5_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1829 static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1830 				     unsigned int keylen)
1831 {
1832 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1833 					MD5_DIGEST_SIZE);
1834 }
1835 
safexcel_hmac_md5_digest(struct ahash_request * areq)1836 static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1837 {
1838 	int ret = safexcel_hmac_md5_init(areq);
1839 
1840 	if (ret)
1841 		return ret;
1842 
1843 	return safexcel_ahash_finup(areq);
1844 }
1845 
1846 struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1847 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1848 	.algo_mask = SAFEXCEL_ALG_MD5,
1849 	.alg.ahash = {
1850 		.init = safexcel_hmac_md5_init,
1851 		.update = safexcel_ahash_update,
1852 		.final = safexcel_ahash_final,
1853 		.finup = safexcel_ahash_finup,
1854 		.digest = safexcel_hmac_md5_digest,
1855 		.setkey = safexcel_hmac_md5_setkey,
1856 		.export = safexcel_ahash_export,
1857 		.import = safexcel_ahash_import,
1858 		.halg = {
1859 			.digestsize = MD5_DIGEST_SIZE,
1860 			.statesize = sizeof(struct safexcel_ahash_export_state),
1861 			.base = {
1862 				.cra_name = "hmac(md5)",
1863 				.cra_driver_name = "safexcel-hmac-md5",
1864 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1865 				.cra_flags = CRYPTO_ALG_ASYNC |
1866 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1867 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1868 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1869 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1870 				.cra_init = safexcel_ahash_cra_init,
1871 				.cra_exit = safexcel_ahash_cra_exit,
1872 				.cra_module = THIS_MODULE,
1873 			},
1874 		},
1875 	},
1876 };
1877 
safexcel_cbcmac_init(struct ahash_request * areq)1878 static int safexcel_cbcmac_init(struct ahash_request *areq)
1879 {
1880 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1881 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1882 
1883 	memset(req, 0, sizeof(*req));
1884 
1885 	/* Start from loaded keys */
1886 	memcpy(req->state, &ctx->base.ipad, ctx->key_sz);
1887 	/* Set processed to non-zero to enable invalidation detection */
1888 	req->len	= AES_BLOCK_SIZE;
1889 	req->processed	= AES_BLOCK_SIZE;
1890 
1891 	req->digest   = CONTEXT_CONTROL_DIGEST_XCM;
1892 	req->state_sz = ctx->key_sz;
1893 	req->digest_sz = AES_BLOCK_SIZE;
1894 	req->block_sz = AES_BLOCK_SIZE;
1895 	req->xcbcmac  = true;
1896 
1897 	return 0;
1898 }
1899 
safexcel_cbcmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int len)1900 static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
1901 				 unsigned int len)
1902 {
1903 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1904 	struct crypto_aes_ctx aes;
1905 	int ret, i;
1906 
1907 	ret = aes_expandkey(&aes, key, len);
1908 	if (ret)
1909 		return ret;
1910 
1911 	memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE);
1912 	for (i = 0; i < len / sizeof(u32); i++)
1913 		ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]);
1914 
1915 	if (len == AES_KEYSIZE_192) {
1916 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
1917 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
1918 	} else if (len == AES_KEYSIZE_256) {
1919 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
1920 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
1921 	} else {
1922 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
1923 		ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
1924 	}
1925 	ctx->cbcmac  = true;
1926 
1927 	memzero_explicit(&aes, sizeof(aes));
1928 	return 0;
1929 }
1930 
safexcel_cbcmac_digest(struct ahash_request * areq)1931 static int safexcel_cbcmac_digest(struct ahash_request *areq)
1932 {
1933 	return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
1934 }
1935 
1936 struct safexcel_alg_template safexcel_alg_cbcmac = {
1937 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1938 	.algo_mask = 0,
1939 	.alg.ahash = {
1940 		.init = safexcel_cbcmac_init,
1941 		.update = safexcel_ahash_update,
1942 		.final = safexcel_ahash_final,
1943 		.finup = safexcel_ahash_finup,
1944 		.digest = safexcel_cbcmac_digest,
1945 		.setkey = safexcel_cbcmac_setkey,
1946 		.export = safexcel_ahash_export,
1947 		.import = safexcel_ahash_import,
1948 		.halg = {
1949 			.digestsize = AES_BLOCK_SIZE,
1950 			.statesize = sizeof(struct safexcel_ahash_export_state),
1951 			.base = {
1952 				.cra_name = "cbcmac(aes)",
1953 				.cra_driver_name = "safexcel-cbcmac-aes",
1954 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1955 				.cra_flags = CRYPTO_ALG_ASYNC |
1956 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1957 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1958 				.cra_blocksize = AES_BLOCK_SIZE,
1959 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1960 				.cra_init = safexcel_ahash_cra_init,
1961 				.cra_exit = safexcel_ahash_cra_exit,
1962 				.cra_module = THIS_MODULE,
1963 			},
1964 		},
1965 	},
1966 };
1967 
safexcel_xcbcmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int len)1968 static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
1969 				 unsigned int len)
1970 {
1971 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1972 	u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
1973 	int ret, i;
1974 
1975 	ret = aes_expandkey(ctx->aes, key, len);
1976 	if (ret)
1977 		return ret;
1978 
1979 	/* precompute the XCBC key material */
1980 	aes_encrypt(ctx->aes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
1981 		    "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
1982 	aes_encrypt(ctx->aes, (u8 *)key_tmp,
1983 		    "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
1984 	aes_encrypt(ctx->aes, (u8 *)key_tmp + AES_BLOCK_SIZE,
1985 		    "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
1986 	for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
1987 		ctx->base.ipad.word[i] = swab32(key_tmp[i]);
1988 
1989 	ret = aes_expandkey(ctx->aes,
1990 			    (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
1991 			    AES_MIN_KEY_SIZE);
1992 	if (ret)
1993 		return ret;
1994 
1995 	ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
1996 	ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
1997 	ctx->cbcmac = false;
1998 
1999 	return 0;
2000 }
2001 
safexcel_xcbcmac_cra_init(struct crypto_tfm * tfm)2002 static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2003 {
2004 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2005 
2006 	safexcel_ahash_cra_init(tfm);
2007 	ctx->aes = kmalloc(sizeof(*ctx->aes), GFP_KERNEL);
2008 	return ctx->aes == NULL ? -ENOMEM : 0;
2009 }
2010 
safexcel_xcbcmac_cra_exit(struct crypto_tfm * tfm)2011 static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2012 {
2013 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2014 
2015 	kfree(ctx->aes);
2016 	safexcel_ahash_cra_exit(tfm);
2017 }
2018 
2019 struct safexcel_alg_template safexcel_alg_xcbcmac = {
2020 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2021 	.algo_mask = 0,
2022 	.alg.ahash = {
2023 		.init = safexcel_cbcmac_init,
2024 		.update = safexcel_ahash_update,
2025 		.final = safexcel_ahash_final,
2026 		.finup = safexcel_ahash_finup,
2027 		.digest = safexcel_cbcmac_digest,
2028 		.setkey = safexcel_xcbcmac_setkey,
2029 		.export = safexcel_ahash_export,
2030 		.import = safexcel_ahash_import,
2031 		.halg = {
2032 			.digestsize = AES_BLOCK_SIZE,
2033 			.statesize = sizeof(struct safexcel_ahash_export_state),
2034 			.base = {
2035 				.cra_name = "xcbc(aes)",
2036 				.cra_driver_name = "safexcel-xcbc-aes",
2037 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2038 				.cra_flags = CRYPTO_ALG_ASYNC |
2039 					     CRYPTO_ALG_ALLOCATES_MEMORY |
2040 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2041 				.cra_blocksize = AES_BLOCK_SIZE,
2042 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2043 				.cra_init = safexcel_xcbcmac_cra_init,
2044 				.cra_exit = safexcel_xcbcmac_cra_exit,
2045 				.cra_module = THIS_MODULE,
2046 			},
2047 		},
2048 	},
2049 };
2050 
safexcel_cmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int len)2051 static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2052 				unsigned int len)
2053 {
2054 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2055 	__be64 consts[4];
2056 	u64 _const[2];
2057 	u8 msb_mask, gfmask;
2058 	int ret, i;
2059 
2060 	/* precompute the CMAC key material */
2061 	ret = aes_expandkey(ctx->aes, key, len);
2062 	if (ret)
2063 		return ret;
2064 
2065 	for (i = 0; i < len / sizeof(u32); i++)
2066 		ctx->base.ipad.word[i + 8] = swab32(ctx->aes->key_enc[i]);
2067 
2068 	/* code below borrowed from crypto/cmac.c */
2069 	/* encrypt the zero block */
2070 	memset(consts, 0, AES_BLOCK_SIZE);
2071 	aes_encrypt(ctx->aes, (u8 *)consts, (u8 *)consts);
2072 
2073 	gfmask = 0x87;
2074 	_const[0] = be64_to_cpu(consts[1]);
2075 	_const[1] = be64_to_cpu(consts[0]);
2076 
2077 	/* gf(2^128) multiply zero-ciphertext with u and u^2 */
2078 	for (i = 0; i < 4; i += 2) {
2079 		msb_mask = ((s64)_const[1] >> 63) & gfmask;
2080 		_const[1] = (_const[1] << 1) | (_const[0] >> 63);
2081 		_const[0] = (_const[0] << 1) ^ msb_mask;
2082 
2083 		consts[i + 0] = cpu_to_be64(_const[1]);
2084 		consts[i + 1] = cpu_to_be64(_const[0]);
2085 	}
2086 	/* end of code borrowed from crypto/cmac.c */
2087 
2088 	for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2089 		ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]);
2090 
2091 	if (len == AES_KEYSIZE_192) {
2092 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2093 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2094 	} else if (len == AES_KEYSIZE_256) {
2095 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2096 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2097 	} else {
2098 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2099 		ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2100 	}
2101 	ctx->cbcmac = false;
2102 
2103 	return 0;
2104 }
2105 
2106 struct safexcel_alg_template safexcel_alg_cmac = {
2107 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2108 	.algo_mask = 0,
2109 	.alg.ahash = {
2110 		.init = safexcel_cbcmac_init,
2111 		.update = safexcel_ahash_update,
2112 		.final = safexcel_ahash_final,
2113 		.finup = safexcel_ahash_finup,
2114 		.digest = safexcel_cbcmac_digest,
2115 		.setkey = safexcel_cmac_setkey,
2116 		.export = safexcel_ahash_export,
2117 		.import = safexcel_ahash_import,
2118 		.halg = {
2119 			.digestsize = AES_BLOCK_SIZE,
2120 			.statesize = sizeof(struct safexcel_ahash_export_state),
2121 			.base = {
2122 				.cra_name = "cmac(aes)",
2123 				.cra_driver_name = "safexcel-cmac-aes",
2124 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2125 				.cra_flags = CRYPTO_ALG_ASYNC |
2126 					     CRYPTO_ALG_ALLOCATES_MEMORY |
2127 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2128 				.cra_blocksize = AES_BLOCK_SIZE,
2129 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2130 				.cra_init = safexcel_xcbcmac_cra_init,
2131 				.cra_exit = safexcel_xcbcmac_cra_exit,
2132 				.cra_module = THIS_MODULE,
2133 			},
2134 		},
2135 	},
2136 };
2137 
safexcel_sm3_init(struct ahash_request * areq)2138 static int safexcel_sm3_init(struct ahash_request *areq)
2139 {
2140 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2141 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2142 
2143 	memset(req, 0, sizeof(*req));
2144 
2145 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2146 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2147 	req->state_sz = SM3_DIGEST_SIZE;
2148 	req->digest_sz = SM3_DIGEST_SIZE;
2149 	req->block_sz = SM3_BLOCK_SIZE;
2150 
2151 	return 0;
2152 }
2153 
safexcel_sm3_digest(struct ahash_request * areq)2154 static int safexcel_sm3_digest(struct ahash_request *areq)
2155 {
2156 	int ret = safexcel_sm3_init(areq);
2157 
2158 	if (ret)
2159 		return ret;
2160 
2161 	return safexcel_ahash_finup(areq);
2162 }
2163 
2164 struct safexcel_alg_template safexcel_alg_sm3 = {
2165 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2166 	.algo_mask = SAFEXCEL_ALG_SM3,
2167 	.alg.ahash = {
2168 		.init = safexcel_sm3_init,
2169 		.update = safexcel_ahash_update,
2170 		.final = safexcel_ahash_final,
2171 		.finup = safexcel_ahash_finup,
2172 		.digest = safexcel_sm3_digest,
2173 		.export = safexcel_ahash_export,
2174 		.import = safexcel_ahash_import,
2175 		.halg = {
2176 			.digestsize = SM3_DIGEST_SIZE,
2177 			.statesize = sizeof(struct safexcel_ahash_export_state),
2178 			.base = {
2179 				.cra_name = "sm3",
2180 				.cra_driver_name = "safexcel-sm3",
2181 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2182 				.cra_flags = CRYPTO_ALG_ASYNC |
2183 					     CRYPTO_ALG_ALLOCATES_MEMORY |
2184 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2185 				.cra_blocksize = SM3_BLOCK_SIZE,
2186 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2187 				.cra_init = safexcel_ahash_cra_init,
2188 				.cra_exit = safexcel_ahash_cra_exit,
2189 				.cra_module = THIS_MODULE,
2190 			},
2191 		},
2192 	},
2193 };
2194 
safexcel_hmac_sm3_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)2195 static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2196 				    unsigned int keylen)
2197 {
2198 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2199 					SM3_DIGEST_SIZE);
2200 }
2201 
safexcel_hmac_sm3_init(struct ahash_request * areq)2202 static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2203 {
2204 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2205 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2206 
2207 	memset(req, 0, sizeof(*req));
2208 
2209 	/* Start from ipad precompute */
2210 	memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE);
2211 	/* Already processed the key^ipad part now! */
2212 	req->len	= SM3_BLOCK_SIZE;
2213 	req->processed	= SM3_BLOCK_SIZE;
2214 
2215 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2216 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2217 	req->state_sz = SM3_DIGEST_SIZE;
2218 	req->digest_sz = SM3_DIGEST_SIZE;
2219 	req->block_sz = SM3_BLOCK_SIZE;
2220 	req->hmac = true;
2221 
2222 	return 0;
2223 }
2224 
safexcel_hmac_sm3_digest(struct ahash_request * areq)2225 static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2226 {
2227 	int ret = safexcel_hmac_sm3_init(areq);
2228 
2229 	if (ret)
2230 		return ret;
2231 
2232 	return safexcel_ahash_finup(areq);
2233 }
2234 
2235 struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2236 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2237 	.algo_mask = SAFEXCEL_ALG_SM3,
2238 	.alg.ahash = {
2239 		.init = safexcel_hmac_sm3_init,
2240 		.update = safexcel_ahash_update,
2241 		.final = safexcel_ahash_final,
2242 		.finup = safexcel_ahash_finup,
2243 		.digest = safexcel_hmac_sm3_digest,
2244 		.setkey = safexcel_hmac_sm3_setkey,
2245 		.export = safexcel_ahash_export,
2246 		.import = safexcel_ahash_import,
2247 		.halg = {
2248 			.digestsize = SM3_DIGEST_SIZE,
2249 			.statesize = sizeof(struct safexcel_ahash_export_state),
2250 			.base = {
2251 				.cra_name = "hmac(sm3)",
2252 				.cra_driver_name = "safexcel-hmac-sm3",
2253 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2254 				.cra_flags = CRYPTO_ALG_ASYNC |
2255 					     CRYPTO_ALG_ALLOCATES_MEMORY |
2256 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2257 				.cra_blocksize = SM3_BLOCK_SIZE,
2258 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2259 				.cra_init = safexcel_ahash_cra_init,
2260 				.cra_exit = safexcel_ahash_cra_exit,
2261 				.cra_module = THIS_MODULE,
2262 			},
2263 		},
2264 	},
2265 };
2266 
safexcel_sha3_224_init(struct ahash_request * areq)2267 static int safexcel_sha3_224_init(struct ahash_request *areq)
2268 {
2269 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2270 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2271 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2272 
2273 	memset(req, 0, sizeof(*req));
2274 
2275 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2276 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2277 	req->state_sz = SHA3_224_DIGEST_SIZE;
2278 	req->digest_sz = SHA3_224_DIGEST_SIZE;
2279 	req->block_sz = SHA3_224_BLOCK_SIZE;
2280 	ctx->do_fallback = false;
2281 	ctx->fb_init_done = false;
2282 	return 0;
2283 }
2284 
safexcel_sha3_fbcheck(struct ahash_request * req)2285 static int safexcel_sha3_fbcheck(struct ahash_request *req)
2286 {
2287 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2288 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2289 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2290 	int ret = 0;
2291 
2292 	if (ctx->do_fallback) {
2293 		ahash_request_set_tfm(subreq, ctx->fback);
2294 		ahash_request_set_callback(subreq, req->base.flags,
2295 					   req->base.complete, req->base.data);
2296 		ahash_request_set_crypt(subreq, req->src, req->result,
2297 					req->nbytes);
2298 		if (!ctx->fb_init_done) {
2299 			if (ctx->fb_do_setkey) {
2300 				/* Set fallback cipher HMAC key */
2301 				u8 key[SHA3_224_BLOCK_SIZE];
2302 
2303 				memcpy(key, &ctx->base.ipad,
2304 				       crypto_ahash_blocksize(ctx->fback) / 2);
2305 				memcpy(key +
2306 				       crypto_ahash_blocksize(ctx->fback) / 2,
2307 				       &ctx->base.opad,
2308 				       crypto_ahash_blocksize(ctx->fback) / 2);
2309 				ret = crypto_ahash_setkey(ctx->fback, key,
2310 					crypto_ahash_blocksize(ctx->fback));
2311 				memzero_explicit(key,
2312 					crypto_ahash_blocksize(ctx->fback));
2313 				ctx->fb_do_setkey = false;
2314 			}
2315 			ret = ret ?: crypto_ahash_init(subreq);
2316 			ctx->fb_init_done = true;
2317 		}
2318 	}
2319 	return ret;
2320 }
2321 
safexcel_sha3_update(struct ahash_request * req)2322 static int safexcel_sha3_update(struct ahash_request *req)
2323 {
2324 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2325 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2326 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2327 
2328 	ctx->do_fallback = true;
2329 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2330 }
2331 
safexcel_sha3_final(struct ahash_request * req)2332 static int safexcel_sha3_final(struct ahash_request *req)
2333 {
2334 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2335 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2336 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2337 
2338 	ctx->do_fallback = true;
2339 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2340 }
2341 
safexcel_sha3_finup(struct ahash_request * req)2342 static int safexcel_sha3_finup(struct ahash_request *req)
2343 {
2344 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2345 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2346 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2347 
2348 	ctx->do_fallback |= !req->nbytes;
2349 	if (ctx->do_fallback)
2350 		/* Update or ex/import happened or len 0, cannot use the HW */
2351 		return safexcel_sha3_fbcheck(req) ?:
2352 		       crypto_ahash_finup(subreq);
2353 	else
2354 		return safexcel_ahash_finup(req);
2355 }
2356 
safexcel_sha3_digest_fallback(struct ahash_request * req)2357 static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2358 {
2359 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2360 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2361 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2362 
2363 	ctx->do_fallback = true;
2364 	ctx->fb_init_done = false;
2365 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2366 }
2367 
safexcel_sha3_224_digest(struct ahash_request * req)2368 static int safexcel_sha3_224_digest(struct ahash_request *req)
2369 {
2370 	if (req->nbytes)
2371 		return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2372 
2373 	/* HW cannot do zero length hash, use fallback instead */
2374 	return safexcel_sha3_digest_fallback(req);
2375 }
2376 
safexcel_sha3_export(struct ahash_request * req,void * out)2377 static int safexcel_sha3_export(struct ahash_request *req, void *out)
2378 {
2379 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2380 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2381 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2382 
2383 	ctx->do_fallback = true;
2384 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2385 }
2386 
safexcel_sha3_import(struct ahash_request * req,const void * in)2387 static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2388 {
2389 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2390 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2391 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2392 
2393 	ctx->do_fallback = true;
2394 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2395 	// return safexcel_ahash_import(req, in);
2396 }
2397 
safexcel_sha3_cra_init(struct crypto_tfm * tfm)2398 static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2399 {
2400 	struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2401 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2402 
2403 	safexcel_ahash_cra_init(tfm);
2404 
2405 	/* Allocate fallback implementation */
2406 	ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2407 					CRYPTO_ALG_ASYNC |
2408 					CRYPTO_ALG_NEED_FALLBACK);
2409 	if (IS_ERR(ctx->fback))
2410 		return PTR_ERR(ctx->fback);
2411 
2412 	/* Update statesize from fallback algorithm! */
2413 	crypto_hash_alg_common(ahash)->statesize =
2414 		crypto_ahash_statesize(ctx->fback);
2415 	crypto_ahash_set_reqsize_dma(
2416 		ahash, max(sizeof(struct safexcel_ahash_req),
2417 			   sizeof(struct ahash_request) +
2418 			   crypto_ahash_reqsize(ctx->fback)));
2419 	return 0;
2420 }
2421 
safexcel_sha3_cra_exit(struct crypto_tfm * tfm)2422 static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2423 {
2424 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2425 
2426 	crypto_free_ahash(ctx->fback);
2427 	safexcel_ahash_cra_exit(tfm);
2428 }
2429 
2430 struct safexcel_alg_template safexcel_alg_sha3_224 = {
2431 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2432 	.algo_mask = SAFEXCEL_ALG_SHA3,
2433 	.alg.ahash = {
2434 		.init = safexcel_sha3_224_init,
2435 		.update = safexcel_sha3_update,
2436 		.final = safexcel_sha3_final,
2437 		.finup = safexcel_sha3_finup,
2438 		.digest = safexcel_sha3_224_digest,
2439 		.export = safexcel_sha3_export,
2440 		.import = safexcel_sha3_import,
2441 		.halg = {
2442 			.digestsize = SHA3_224_DIGEST_SIZE,
2443 			.statesize = sizeof(struct safexcel_ahash_export_state),
2444 			.base = {
2445 				.cra_name = "sha3-224",
2446 				.cra_driver_name = "safexcel-sha3-224",
2447 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2448 				.cra_flags = CRYPTO_ALG_ASYNC |
2449 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2450 					     CRYPTO_ALG_NEED_FALLBACK,
2451 				.cra_blocksize = SHA3_224_BLOCK_SIZE,
2452 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2453 				.cra_init = safexcel_sha3_cra_init,
2454 				.cra_exit = safexcel_sha3_cra_exit,
2455 				.cra_module = THIS_MODULE,
2456 			},
2457 		},
2458 	},
2459 };
2460 
safexcel_sha3_256_init(struct ahash_request * areq)2461 static int safexcel_sha3_256_init(struct ahash_request *areq)
2462 {
2463 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2464 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2465 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2466 
2467 	memset(req, 0, sizeof(*req));
2468 
2469 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2470 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2471 	req->state_sz = SHA3_256_DIGEST_SIZE;
2472 	req->digest_sz = SHA3_256_DIGEST_SIZE;
2473 	req->block_sz = SHA3_256_BLOCK_SIZE;
2474 	ctx->do_fallback = false;
2475 	ctx->fb_init_done = false;
2476 	return 0;
2477 }
2478 
safexcel_sha3_256_digest(struct ahash_request * req)2479 static int safexcel_sha3_256_digest(struct ahash_request *req)
2480 {
2481 	if (req->nbytes)
2482 		return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2483 
2484 	/* HW cannot do zero length hash, use fallback instead */
2485 	return safexcel_sha3_digest_fallback(req);
2486 }
2487 
2488 struct safexcel_alg_template safexcel_alg_sha3_256 = {
2489 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2490 	.algo_mask = SAFEXCEL_ALG_SHA3,
2491 	.alg.ahash = {
2492 		.init = safexcel_sha3_256_init,
2493 		.update = safexcel_sha3_update,
2494 		.final = safexcel_sha3_final,
2495 		.finup = safexcel_sha3_finup,
2496 		.digest = safexcel_sha3_256_digest,
2497 		.export = safexcel_sha3_export,
2498 		.import = safexcel_sha3_import,
2499 		.halg = {
2500 			.digestsize = SHA3_256_DIGEST_SIZE,
2501 			.statesize = sizeof(struct safexcel_ahash_export_state),
2502 			.base = {
2503 				.cra_name = "sha3-256",
2504 				.cra_driver_name = "safexcel-sha3-256",
2505 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2506 				.cra_flags = CRYPTO_ALG_ASYNC |
2507 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2508 					     CRYPTO_ALG_NEED_FALLBACK,
2509 				.cra_blocksize = SHA3_256_BLOCK_SIZE,
2510 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2511 				.cra_init = safexcel_sha3_cra_init,
2512 				.cra_exit = safexcel_sha3_cra_exit,
2513 				.cra_module = THIS_MODULE,
2514 			},
2515 		},
2516 	},
2517 };
2518 
safexcel_sha3_384_init(struct ahash_request * areq)2519 static int safexcel_sha3_384_init(struct ahash_request *areq)
2520 {
2521 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2522 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2523 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2524 
2525 	memset(req, 0, sizeof(*req));
2526 
2527 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2528 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2529 	req->state_sz = SHA3_384_DIGEST_SIZE;
2530 	req->digest_sz = SHA3_384_DIGEST_SIZE;
2531 	req->block_sz = SHA3_384_BLOCK_SIZE;
2532 	ctx->do_fallback = false;
2533 	ctx->fb_init_done = false;
2534 	return 0;
2535 }
2536 
safexcel_sha3_384_digest(struct ahash_request * req)2537 static int safexcel_sha3_384_digest(struct ahash_request *req)
2538 {
2539 	if (req->nbytes)
2540 		return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2541 
2542 	/* HW cannot do zero length hash, use fallback instead */
2543 	return safexcel_sha3_digest_fallback(req);
2544 }
2545 
2546 struct safexcel_alg_template safexcel_alg_sha3_384 = {
2547 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2548 	.algo_mask = SAFEXCEL_ALG_SHA3,
2549 	.alg.ahash = {
2550 		.init = safexcel_sha3_384_init,
2551 		.update = safexcel_sha3_update,
2552 		.final = safexcel_sha3_final,
2553 		.finup = safexcel_sha3_finup,
2554 		.digest = safexcel_sha3_384_digest,
2555 		.export = safexcel_sha3_export,
2556 		.import = safexcel_sha3_import,
2557 		.halg = {
2558 			.digestsize = SHA3_384_DIGEST_SIZE,
2559 			.statesize = sizeof(struct safexcel_ahash_export_state),
2560 			.base = {
2561 				.cra_name = "sha3-384",
2562 				.cra_driver_name = "safexcel-sha3-384",
2563 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2564 				.cra_flags = CRYPTO_ALG_ASYNC |
2565 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2566 					     CRYPTO_ALG_NEED_FALLBACK,
2567 				.cra_blocksize = SHA3_384_BLOCK_SIZE,
2568 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2569 				.cra_init = safexcel_sha3_cra_init,
2570 				.cra_exit = safexcel_sha3_cra_exit,
2571 				.cra_module = THIS_MODULE,
2572 			},
2573 		},
2574 	},
2575 };
2576 
safexcel_sha3_512_init(struct ahash_request * areq)2577 static int safexcel_sha3_512_init(struct ahash_request *areq)
2578 {
2579 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2580 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2581 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2582 
2583 	memset(req, 0, sizeof(*req));
2584 
2585 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2586 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2587 	req->state_sz = SHA3_512_DIGEST_SIZE;
2588 	req->digest_sz = SHA3_512_DIGEST_SIZE;
2589 	req->block_sz = SHA3_512_BLOCK_SIZE;
2590 	ctx->do_fallback = false;
2591 	ctx->fb_init_done = false;
2592 	return 0;
2593 }
2594 
safexcel_sha3_512_digest(struct ahash_request * req)2595 static int safexcel_sha3_512_digest(struct ahash_request *req)
2596 {
2597 	if (req->nbytes)
2598 		return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2599 
2600 	/* HW cannot do zero length hash, use fallback instead */
2601 	return safexcel_sha3_digest_fallback(req);
2602 }
2603 
2604 struct safexcel_alg_template safexcel_alg_sha3_512 = {
2605 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2606 	.algo_mask = SAFEXCEL_ALG_SHA3,
2607 	.alg.ahash = {
2608 		.init = safexcel_sha3_512_init,
2609 		.update = safexcel_sha3_update,
2610 		.final = safexcel_sha3_final,
2611 		.finup = safexcel_sha3_finup,
2612 		.digest = safexcel_sha3_512_digest,
2613 		.export = safexcel_sha3_export,
2614 		.import = safexcel_sha3_import,
2615 		.halg = {
2616 			.digestsize = SHA3_512_DIGEST_SIZE,
2617 			.statesize = sizeof(struct safexcel_ahash_export_state),
2618 			.base = {
2619 				.cra_name = "sha3-512",
2620 				.cra_driver_name = "safexcel-sha3-512",
2621 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2622 				.cra_flags = CRYPTO_ALG_ASYNC |
2623 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2624 					     CRYPTO_ALG_NEED_FALLBACK,
2625 				.cra_blocksize = SHA3_512_BLOCK_SIZE,
2626 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2627 				.cra_init = safexcel_sha3_cra_init,
2628 				.cra_exit = safexcel_sha3_cra_exit,
2629 				.cra_module = THIS_MODULE,
2630 			},
2631 		},
2632 	},
2633 };
2634 
safexcel_hmac_sha3_cra_init(struct crypto_tfm * tfm,const char * alg)2635 static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2636 {
2637 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2638 	int ret;
2639 
2640 	ret = safexcel_sha3_cra_init(tfm);
2641 	if (ret)
2642 		return ret;
2643 
2644 	/* Allocate precalc basic digest implementation */
2645 	ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2646 	if (IS_ERR(ctx->shpre))
2647 		return PTR_ERR(ctx->shpre);
2648 
2649 	ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2650 			      crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2651 	if (!ctx->shdesc) {
2652 		crypto_free_shash(ctx->shpre);
2653 		return -ENOMEM;
2654 	}
2655 	ctx->shdesc->tfm = ctx->shpre;
2656 	return 0;
2657 }
2658 
safexcel_hmac_sha3_cra_exit(struct crypto_tfm * tfm)2659 static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2660 {
2661 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2662 
2663 	crypto_free_ahash(ctx->fback);
2664 	crypto_free_shash(ctx->shpre);
2665 	kfree(ctx->shdesc);
2666 	safexcel_ahash_cra_exit(tfm);
2667 }
2668 
safexcel_hmac_sha3_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)2669 static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2670 				     unsigned int keylen)
2671 {
2672 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2673 	int ret = 0;
2674 
2675 	if (keylen > crypto_ahash_blocksize(tfm)) {
2676 		/*
2677 		 * If the key is larger than the blocksize, then hash it
2678 		 * first using our fallback cipher
2679 		 */
2680 		ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2681 					  ctx->base.ipad.byte);
2682 		keylen = crypto_shash_digestsize(ctx->shpre);
2683 
2684 		/*
2685 		 * If the digest is larger than half the blocksize, we need to
2686 		 * move the rest to opad due to the way our HMAC infra works.
2687 		 */
2688 		if (keylen > crypto_ahash_blocksize(tfm) / 2)
2689 			/* Buffers overlap, need to use memmove iso memcpy! */
2690 			memmove(&ctx->base.opad,
2691 				ctx->base.ipad.byte +
2692 					crypto_ahash_blocksize(tfm) / 2,
2693 				keylen - crypto_ahash_blocksize(tfm) / 2);
2694 	} else {
2695 		/*
2696 		 * Copy the key to our ipad & opad buffers
2697 		 * Note that ipad and opad each contain one half of the key,
2698 		 * to match the existing HMAC driver infrastructure.
2699 		 */
2700 		if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2701 			memcpy(&ctx->base.ipad, key, keylen);
2702 		} else {
2703 			memcpy(&ctx->base.ipad, key,
2704 			       crypto_ahash_blocksize(tfm) / 2);
2705 			memcpy(&ctx->base.opad,
2706 			       key + crypto_ahash_blocksize(tfm) / 2,
2707 			       keylen - crypto_ahash_blocksize(tfm) / 2);
2708 		}
2709 	}
2710 
2711 	/* Pad key with zeroes */
2712 	if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2713 		memset(ctx->base.ipad.byte + keylen, 0,
2714 		       crypto_ahash_blocksize(tfm) / 2 - keylen);
2715 		memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2);
2716 	} else {
2717 		memset(ctx->base.opad.byte + keylen -
2718 		       crypto_ahash_blocksize(tfm) / 2, 0,
2719 		       crypto_ahash_blocksize(tfm) - keylen);
2720 	}
2721 
2722 	/* If doing fallback, still need to set the new key! */
2723 	ctx->fb_do_setkey = true;
2724 	return ret;
2725 }
2726 
safexcel_hmac_sha3_224_init(struct ahash_request * areq)2727 static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2728 {
2729 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2730 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2731 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2732 
2733 	memset(req, 0, sizeof(*req));
2734 
2735 	/* Copy (half of) the key */
2736 	memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2);
2737 	/* Start of HMAC should have len == processed == blocksize */
2738 	req->len	= SHA3_224_BLOCK_SIZE;
2739 	req->processed	= SHA3_224_BLOCK_SIZE;
2740 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2741 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2742 	req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2743 	req->digest_sz = SHA3_224_DIGEST_SIZE;
2744 	req->block_sz = SHA3_224_BLOCK_SIZE;
2745 	req->hmac = true;
2746 	ctx->do_fallback = false;
2747 	ctx->fb_init_done = false;
2748 	return 0;
2749 }
2750 
safexcel_hmac_sha3_224_digest(struct ahash_request * req)2751 static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2752 {
2753 	if (req->nbytes)
2754 		return safexcel_hmac_sha3_224_init(req) ?:
2755 		       safexcel_ahash_finup(req);
2756 
2757 	/* HW cannot do zero length HMAC, use fallback instead */
2758 	return safexcel_sha3_digest_fallback(req);
2759 }
2760 
safexcel_hmac_sha3_224_cra_init(struct crypto_tfm * tfm)2761 static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2762 {
2763 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2764 }
2765 
2766 struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2767 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2768 	.algo_mask = SAFEXCEL_ALG_SHA3,
2769 	.alg.ahash = {
2770 		.init = safexcel_hmac_sha3_224_init,
2771 		.update = safexcel_sha3_update,
2772 		.final = safexcel_sha3_final,
2773 		.finup = safexcel_sha3_finup,
2774 		.digest = safexcel_hmac_sha3_224_digest,
2775 		.setkey = safexcel_hmac_sha3_setkey,
2776 		.export = safexcel_sha3_export,
2777 		.import = safexcel_sha3_import,
2778 		.halg = {
2779 			.digestsize = SHA3_224_DIGEST_SIZE,
2780 			.statesize = sizeof(struct safexcel_ahash_export_state),
2781 			.base = {
2782 				.cra_name = "hmac(sha3-224)",
2783 				.cra_driver_name = "safexcel-hmac-sha3-224",
2784 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2785 				.cra_flags = CRYPTO_ALG_ASYNC |
2786 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2787 					     CRYPTO_ALG_NEED_FALLBACK,
2788 				.cra_blocksize = SHA3_224_BLOCK_SIZE,
2789 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2790 				.cra_init = safexcel_hmac_sha3_224_cra_init,
2791 				.cra_exit = safexcel_hmac_sha3_cra_exit,
2792 				.cra_module = THIS_MODULE,
2793 			},
2794 		},
2795 	},
2796 };
2797 
safexcel_hmac_sha3_256_init(struct ahash_request * areq)2798 static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2799 {
2800 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2801 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2802 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2803 
2804 	memset(req, 0, sizeof(*req));
2805 
2806 	/* Copy (half of) the key */
2807 	memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2);
2808 	/* Start of HMAC should have len == processed == blocksize */
2809 	req->len	= SHA3_256_BLOCK_SIZE;
2810 	req->processed	= SHA3_256_BLOCK_SIZE;
2811 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2812 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2813 	req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2814 	req->digest_sz = SHA3_256_DIGEST_SIZE;
2815 	req->block_sz = SHA3_256_BLOCK_SIZE;
2816 	req->hmac = true;
2817 	ctx->do_fallback = false;
2818 	ctx->fb_init_done = false;
2819 	return 0;
2820 }
2821 
safexcel_hmac_sha3_256_digest(struct ahash_request * req)2822 static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2823 {
2824 	if (req->nbytes)
2825 		return safexcel_hmac_sha3_256_init(req) ?:
2826 		       safexcel_ahash_finup(req);
2827 
2828 	/* HW cannot do zero length HMAC, use fallback instead */
2829 	return safexcel_sha3_digest_fallback(req);
2830 }
2831 
safexcel_hmac_sha3_256_cra_init(struct crypto_tfm * tfm)2832 static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2833 {
2834 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2835 }
2836 
2837 struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2838 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2839 	.algo_mask = SAFEXCEL_ALG_SHA3,
2840 	.alg.ahash = {
2841 		.init = safexcel_hmac_sha3_256_init,
2842 		.update = safexcel_sha3_update,
2843 		.final = safexcel_sha3_final,
2844 		.finup = safexcel_sha3_finup,
2845 		.digest = safexcel_hmac_sha3_256_digest,
2846 		.setkey = safexcel_hmac_sha3_setkey,
2847 		.export = safexcel_sha3_export,
2848 		.import = safexcel_sha3_import,
2849 		.halg = {
2850 			.digestsize = SHA3_256_DIGEST_SIZE,
2851 			.statesize = sizeof(struct safexcel_ahash_export_state),
2852 			.base = {
2853 				.cra_name = "hmac(sha3-256)",
2854 				.cra_driver_name = "safexcel-hmac-sha3-256",
2855 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2856 				.cra_flags = CRYPTO_ALG_ASYNC |
2857 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2858 					     CRYPTO_ALG_NEED_FALLBACK,
2859 				.cra_blocksize = SHA3_256_BLOCK_SIZE,
2860 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2861 				.cra_init = safexcel_hmac_sha3_256_cra_init,
2862 				.cra_exit = safexcel_hmac_sha3_cra_exit,
2863 				.cra_module = THIS_MODULE,
2864 			},
2865 		},
2866 	},
2867 };
2868 
safexcel_hmac_sha3_384_init(struct ahash_request * areq)2869 static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
2870 {
2871 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2872 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2873 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2874 
2875 	memset(req, 0, sizeof(*req));
2876 
2877 	/* Copy (half of) the key */
2878 	memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2);
2879 	/* Start of HMAC should have len == processed == blocksize */
2880 	req->len	= SHA3_384_BLOCK_SIZE;
2881 	req->processed	= SHA3_384_BLOCK_SIZE;
2882 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2883 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2884 	req->state_sz = SHA3_384_BLOCK_SIZE / 2;
2885 	req->digest_sz = SHA3_384_DIGEST_SIZE;
2886 	req->block_sz = SHA3_384_BLOCK_SIZE;
2887 	req->hmac = true;
2888 	ctx->do_fallback = false;
2889 	ctx->fb_init_done = false;
2890 	return 0;
2891 }
2892 
safexcel_hmac_sha3_384_digest(struct ahash_request * req)2893 static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
2894 {
2895 	if (req->nbytes)
2896 		return safexcel_hmac_sha3_384_init(req) ?:
2897 		       safexcel_ahash_finup(req);
2898 
2899 	/* HW cannot do zero length HMAC, use fallback instead */
2900 	return safexcel_sha3_digest_fallback(req);
2901 }
2902 
safexcel_hmac_sha3_384_cra_init(struct crypto_tfm * tfm)2903 static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
2904 {
2905 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
2906 }
2907 
2908 struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
2909 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2910 	.algo_mask = SAFEXCEL_ALG_SHA3,
2911 	.alg.ahash = {
2912 		.init = safexcel_hmac_sha3_384_init,
2913 		.update = safexcel_sha3_update,
2914 		.final = safexcel_sha3_final,
2915 		.finup = safexcel_sha3_finup,
2916 		.digest = safexcel_hmac_sha3_384_digest,
2917 		.setkey = safexcel_hmac_sha3_setkey,
2918 		.export = safexcel_sha3_export,
2919 		.import = safexcel_sha3_import,
2920 		.halg = {
2921 			.digestsize = SHA3_384_DIGEST_SIZE,
2922 			.statesize = sizeof(struct safexcel_ahash_export_state),
2923 			.base = {
2924 				.cra_name = "hmac(sha3-384)",
2925 				.cra_driver_name = "safexcel-hmac-sha3-384",
2926 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2927 				.cra_flags = CRYPTO_ALG_ASYNC |
2928 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2929 					     CRYPTO_ALG_NEED_FALLBACK,
2930 				.cra_blocksize = SHA3_384_BLOCK_SIZE,
2931 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2932 				.cra_init = safexcel_hmac_sha3_384_cra_init,
2933 				.cra_exit = safexcel_hmac_sha3_cra_exit,
2934 				.cra_module = THIS_MODULE,
2935 			},
2936 		},
2937 	},
2938 };
2939 
safexcel_hmac_sha3_512_init(struct ahash_request * areq)2940 static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
2941 {
2942 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2943 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2944 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2945 
2946 	memset(req, 0, sizeof(*req));
2947 
2948 	/* Copy (half of) the key */
2949 	memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2);
2950 	/* Start of HMAC should have len == processed == blocksize */
2951 	req->len	= SHA3_512_BLOCK_SIZE;
2952 	req->processed	= SHA3_512_BLOCK_SIZE;
2953 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2954 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2955 	req->state_sz = SHA3_512_BLOCK_SIZE / 2;
2956 	req->digest_sz = SHA3_512_DIGEST_SIZE;
2957 	req->block_sz = SHA3_512_BLOCK_SIZE;
2958 	req->hmac = true;
2959 	ctx->do_fallback = false;
2960 	ctx->fb_init_done = false;
2961 	return 0;
2962 }
2963 
safexcel_hmac_sha3_512_digest(struct ahash_request * req)2964 static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
2965 {
2966 	if (req->nbytes)
2967 		return safexcel_hmac_sha3_512_init(req) ?:
2968 		       safexcel_ahash_finup(req);
2969 
2970 	/* HW cannot do zero length HMAC, use fallback instead */
2971 	return safexcel_sha3_digest_fallback(req);
2972 }
2973 
safexcel_hmac_sha3_512_cra_init(struct crypto_tfm * tfm)2974 static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
2975 {
2976 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
2977 }
2978 struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
2979 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2980 	.algo_mask = SAFEXCEL_ALG_SHA3,
2981 	.alg.ahash = {
2982 		.init = safexcel_hmac_sha3_512_init,
2983 		.update = safexcel_sha3_update,
2984 		.final = safexcel_sha3_final,
2985 		.finup = safexcel_sha3_finup,
2986 		.digest = safexcel_hmac_sha3_512_digest,
2987 		.setkey = safexcel_hmac_sha3_setkey,
2988 		.export = safexcel_sha3_export,
2989 		.import = safexcel_sha3_import,
2990 		.halg = {
2991 			.digestsize = SHA3_512_DIGEST_SIZE,
2992 			.statesize = sizeof(struct safexcel_ahash_export_state),
2993 			.base = {
2994 				.cra_name = "hmac(sha3-512)",
2995 				.cra_driver_name = "safexcel-hmac-sha3-512",
2996 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2997 				.cra_flags = CRYPTO_ALG_ASYNC |
2998 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2999 					     CRYPTO_ALG_NEED_FALLBACK,
3000 				.cra_blocksize = SHA3_512_BLOCK_SIZE,
3001 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3002 				.cra_init = safexcel_hmac_sha3_512_cra_init,
3003 				.cra_exit = safexcel_hmac_sha3_cra_exit,
3004 				.cra_module = THIS_MODULE,
3005 			},
3006 		},
3007 	},
3008 };
3009