xref: /linux/crypto/testmgr.c (revision f79e4d5f92a129a1159c973735007d4ddc8541f3)
1 /*
2  * Algorithm testing framework and tests.
3  *
4  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5  * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6  * Copyright (c) 2007 Nokia Siemens Networks
7  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8  *
9  * Updated RFC4106 AES-GCM testing.
10  *    Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11  *             Adrian Hoban <adrian.hoban@intel.com>
12  *             Gabriele Paoloni <gabriele.paoloni@intel.com>
13  *             Tadeusz Struk (tadeusz.struk@intel.com)
14  *    Copyright (c) 2010, Intel Corporation.
15  *
16  * This program is free software; you can redistribute it and/or modify it
17  * under the terms of the GNU General Public License as published by the Free
18  * Software Foundation; either version 2 of the License, or (at your option)
19  * any later version.
20  *
21  */
22 
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <crypto/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/fips.h>
28 #include <linux/module.h>
29 #include <linux/scatterlist.h>
30 #include <linux/slab.h>
31 #include <linux/string.h>
32 #include <crypto/rng.h>
33 #include <crypto/drbg.h>
34 #include <crypto/akcipher.h>
35 #include <crypto/kpp.h>
36 #include <crypto/acompress.h>
37 
38 #include "internal.h"
39 
40 static bool notests;
41 module_param(notests, bool, 0644);
42 MODULE_PARM_DESC(notests, "disable crypto self-tests");
43 
44 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
45 
46 /* a perfect nop */
47 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
48 {
49 	return 0;
50 }
51 
52 #else
53 
54 #include "testmgr.h"
55 
56 /*
57  * Need slab memory for testing (size in number of pages).
58  */
59 #define XBUFSIZE	8
60 
61 /*
62  * Indexes into the xbuf to simulate cross-page access.
63  */
64 #define IDX1		32
65 #define IDX2		32400
66 #define IDX3		1511
67 #define IDX4		8193
68 #define IDX5		22222
69 #define IDX6		17101
70 #define IDX7		27333
71 #define IDX8		3000
72 
73 /*
74 * Used by test_cipher()
75 */
76 #define ENCRYPT 1
77 #define DECRYPT 0
78 
79 struct aead_test_suite {
80 	struct {
81 		const struct aead_testvec *vecs;
82 		unsigned int count;
83 	} enc, dec;
84 };
85 
86 struct cipher_test_suite {
87 	const struct cipher_testvec *vecs;
88 	unsigned int count;
89 };
90 
91 struct comp_test_suite {
92 	struct {
93 		const struct comp_testvec *vecs;
94 		unsigned int count;
95 	} comp, decomp;
96 };
97 
98 struct hash_test_suite {
99 	const struct hash_testvec *vecs;
100 	unsigned int count;
101 };
102 
103 struct cprng_test_suite {
104 	const struct cprng_testvec *vecs;
105 	unsigned int count;
106 };
107 
108 struct drbg_test_suite {
109 	const struct drbg_testvec *vecs;
110 	unsigned int count;
111 };
112 
113 struct akcipher_test_suite {
114 	const struct akcipher_testvec *vecs;
115 	unsigned int count;
116 };
117 
118 struct kpp_test_suite {
119 	const struct kpp_testvec *vecs;
120 	unsigned int count;
121 };
122 
123 struct alg_test_desc {
124 	const char *alg;
125 	int (*test)(const struct alg_test_desc *desc, const char *driver,
126 		    u32 type, u32 mask);
127 	int fips_allowed;	/* set if alg is allowed in fips mode */
128 
129 	union {
130 		struct aead_test_suite aead;
131 		struct cipher_test_suite cipher;
132 		struct comp_test_suite comp;
133 		struct hash_test_suite hash;
134 		struct cprng_test_suite cprng;
135 		struct drbg_test_suite drbg;
136 		struct akcipher_test_suite akcipher;
137 		struct kpp_test_suite kpp;
138 	} suite;
139 };
140 
141 static const unsigned int IDX[8] = {
142 	IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
143 
144 static void hexdump(unsigned char *buf, unsigned int len)
145 {
146 	print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
147 			16, 1,
148 			buf, len, false);
149 }
150 
151 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
152 {
153 	int i;
154 
155 	for (i = 0; i < XBUFSIZE; i++) {
156 		buf[i] = (void *)__get_free_page(GFP_KERNEL);
157 		if (!buf[i])
158 			goto err_free_buf;
159 	}
160 
161 	return 0;
162 
163 err_free_buf:
164 	while (i-- > 0)
165 		free_page((unsigned long)buf[i]);
166 
167 	return -ENOMEM;
168 }
169 
170 static void testmgr_free_buf(char *buf[XBUFSIZE])
171 {
172 	int i;
173 
174 	for (i = 0; i < XBUFSIZE; i++)
175 		free_page((unsigned long)buf[i]);
176 }
177 
178 static int ahash_guard_result(char *result, char c, int size)
179 {
180 	int i;
181 
182 	for (i = 0; i < size; i++) {
183 		if (result[i] != c)
184 			return -EINVAL;
185 	}
186 
187 	return 0;
188 }
189 
190 static int ahash_partial_update(struct ahash_request **preq,
191 	struct crypto_ahash *tfm, const struct hash_testvec *template,
192 	void *hash_buff, int k, int temp, struct scatterlist *sg,
193 	const char *algo, char *result, struct crypto_wait *wait)
194 {
195 	char *state;
196 	struct ahash_request *req;
197 	int statesize, ret = -EINVAL;
198 	static const unsigned char guard[] = { 0x00, 0xba, 0xad, 0x00 };
199 	int digestsize = crypto_ahash_digestsize(tfm);
200 
201 	req = *preq;
202 	statesize = crypto_ahash_statesize(
203 			crypto_ahash_reqtfm(req));
204 	state = kmalloc(statesize + sizeof(guard), GFP_KERNEL);
205 	if (!state) {
206 		pr_err("alg: hash: Failed to alloc state for %s\n", algo);
207 		goto out_nostate;
208 	}
209 	memcpy(state + statesize, guard, sizeof(guard));
210 	memset(result, 1, digestsize);
211 	ret = crypto_ahash_export(req, state);
212 	WARN_ON(memcmp(state + statesize, guard, sizeof(guard)));
213 	if (ret) {
214 		pr_err("alg: hash: Failed to export() for %s\n", algo);
215 		goto out;
216 	}
217 	ret = ahash_guard_result(result, 1, digestsize);
218 	if (ret) {
219 		pr_err("alg: hash: Failed, export used req->result for %s\n",
220 		       algo);
221 		goto out;
222 	}
223 	ahash_request_free(req);
224 	req = ahash_request_alloc(tfm, GFP_KERNEL);
225 	if (!req) {
226 		pr_err("alg: hash: Failed to alloc request for %s\n", algo);
227 		goto out_noreq;
228 	}
229 	ahash_request_set_callback(req,
230 		CRYPTO_TFM_REQ_MAY_BACKLOG,
231 		crypto_req_done, wait);
232 
233 	memcpy(hash_buff, template->plaintext + temp,
234 		template->tap[k]);
235 	sg_init_one(&sg[0], hash_buff, template->tap[k]);
236 	ahash_request_set_crypt(req, sg, result, template->tap[k]);
237 	ret = crypto_ahash_import(req, state);
238 	if (ret) {
239 		pr_err("alg: hash: Failed to import() for %s\n", algo);
240 		goto out;
241 	}
242 	ret = ahash_guard_result(result, 1, digestsize);
243 	if (ret) {
244 		pr_err("alg: hash: Failed, import used req->result for %s\n",
245 		       algo);
246 		goto out;
247 	}
248 	ret = crypto_wait_req(crypto_ahash_update(req), wait);
249 	if (ret)
250 		goto out;
251 	*preq = req;
252 	ret = 0;
253 	goto out_noreq;
254 out:
255 	ahash_request_free(req);
256 out_noreq:
257 	kfree(state);
258 out_nostate:
259 	return ret;
260 }
261 
262 static int __test_hash(struct crypto_ahash *tfm,
263 		       const struct hash_testvec *template, unsigned int tcount,
264 		       bool use_digest, const int align_offset)
265 {
266 	const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
267 	size_t digest_size = crypto_ahash_digestsize(tfm);
268 	unsigned int i, j, k, temp;
269 	struct scatterlist sg[8];
270 	char *result;
271 	char *key;
272 	struct ahash_request *req;
273 	struct crypto_wait wait;
274 	void *hash_buff;
275 	char *xbuf[XBUFSIZE];
276 	int ret = -ENOMEM;
277 
278 	result = kmalloc(digest_size, GFP_KERNEL);
279 	if (!result)
280 		return ret;
281 	key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
282 	if (!key)
283 		goto out_nobuf;
284 	if (testmgr_alloc_buf(xbuf))
285 		goto out_nobuf;
286 
287 	crypto_init_wait(&wait);
288 
289 	req = ahash_request_alloc(tfm, GFP_KERNEL);
290 	if (!req) {
291 		printk(KERN_ERR "alg: hash: Failed to allocate request for "
292 		       "%s\n", algo);
293 		goto out_noreq;
294 	}
295 	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
296 				   crypto_req_done, &wait);
297 
298 	j = 0;
299 	for (i = 0; i < tcount; i++) {
300 		if (template[i].np)
301 			continue;
302 
303 		ret = -EINVAL;
304 		if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
305 			goto out;
306 
307 		j++;
308 		memset(result, 0, digest_size);
309 
310 		hash_buff = xbuf[0];
311 		hash_buff += align_offset;
312 
313 		memcpy(hash_buff, template[i].plaintext, template[i].psize);
314 		sg_init_one(&sg[0], hash_buff, template[i].psize);
315 
316 		if (template[i].ksize) {
317 			crypto_ahash_clear_flags(tfm, ~0);
318 			if (template[i].ksize > MAX_KEYLEN) {
319 				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
320 				       j, algo, template[i].ksize, MAX_KEYLEN);
321 				ret = -EINVAL;
322 				goto out;
323 			}
324 			memcpy(key, template[i].key, template[i].ksize);
325 			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
326 			if (ret) {
327 				printk(KERN_ERR "alg: hash: setkey failed on "
328 				       "test %d for %s: ret=%d\n", j, algo,
329 				       -ret);
330 				goto out;
331 			}
332 		}
333 
334 		ahash_request_set_crypt(req, sg, result, template[i].psize);
335 		if (use_digest) {
336 			ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
337 			if (ret) {
338 				pr_err("alg: hash: digest failed on test %d "
339 				       "for %s: ret=%d\n", j, algo, -ret);
340 				goto out;
341 			}
342 		} else {
343 			memset(result, 1, digest_size);
344 			ret = crypto_wait_req(crypto_ahash_init(req), &wait);
345 			if (ret) {
346 				pr_err("alg: hash: init failed on test %d "
347 				       "for %s: ret=%d\n", j, algo, -ret);
348 				goto out;
349 			}
350 			ret = ahash_guard_result(result, 1, digest_size);
351 			if (ret) {
352 				pr_err("alg: hash: init failed on test %d "
353 				       "for %s: used req->result\n", j, algo);
354 				goto out;
355 			}
356 			ret = crypto_wait_req(crypto_ahash_update(req), &wait);
357 			if (ret) {
358 				pr_err("alg: hash: update failed on test %d "
359 				       "for %s: ret=%d\n", j, algo, -ret);
360 				goto out;
361 			}
362 			ret = ahash_guard_result(result, 1, digest_size);
363 			if (ret) {
364 				pr_err("alg: hash: update failed on test %d "
365 				       "for %s: used req->result\n", j, algo);
366 				goto out;
367 			}
368 			ret = crypto_wait_req(crypto_ahash_final(req), &wait);
369 			if (ret) {
370 				pr_err("alg: hash: final failed on test %d "
371 				       "for %s: ret=%d\n", j, algo, -ret);
372 				goto out;
373 			}
374 		}
375 
376 		if (memcmp(result, template[i].digest,
377 			   crypto_ahash_digestsize(tfm))) {
378 			printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
379 			       j, algo);
380 			hexdump(result, crypto_ahash_digestsize(tfm));
381 			ret = -EINVAL;
382 			goto out;
383 		}
384 	}
385 
386 	j = 0;
387 	for (i = 0; i < tcount; i++) {
388 		/* alignment tests are only done with continuous buffers */
389 		if (align_offset != 0)
390 			break;
391 
392 		if (!template[i].np)
393 			continue;
394 
395 		j++;
396 		memset(result, 0, digest_size);
397 
398 		temp = 0;
399 		sg_init_table(sg, template[i].np);
400 		ret = -EINVAL;
401 		for (k = 0; k < template[i].np; k++) {
402 			if (WARN_ON(offset_in_page(IDX[k]) +
403 				    template[i].tap[k] > PAGE_SIZE))
404 				goto out;
405 			sg_set_buf(&sg[k],
406 				   memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
407 					  offset_in_page(IDX[k]),
408 					  template[i].plaintext + temp,
409 					  template[i].tap[k]),
410 				   template[i].tap[k]);
411 			temp += template[i].tap[k];
412 		}
413 
414 		if (template[i].ksize) {
415 			if (template[i].ksize > MAX_KEYLEN) {
416 				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
417 				       j, algo, template[i].ksize, MAX_KEYLEN);
418 				ret = -EINVAL;
419 				goto out;
420 			}
421 			crypto_ahash_clear_flags(tfm, ~0);
422 			memcpy(key, template[i].key, template[i].ksize);
423 			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
424 
425 			if (ret) {
426 				printk(KERN_ERR "alg: hash: setkey "
427 				       "failed on chunking test %d "
428 				       "for %s: ret=%d\n", j, algo, -ret);
429 				goto out;
430 			}
431 		}
432 
433 		ahash_request_set_crypt(req, sg, result, template[i].psize);
434 		ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
435 		if (ret) {
436 			pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n",
437 			       j, algo, -ret);
438 			goto out;
439 		}
440 
441 		if (memcmp(result, template[i].digest,
442 			   crypto_ahash_digestsize(tfm))) {
443 			printk(KERN_ERR "alg: hash: Chunking test %d "
444 			       "failed for %s\n", j, algo);
445 			hexdump(result, crypto_ahash_digestsize(tfm));
446 			ret = -EINVAL;
447 			goto out;
448 		}
449 	}
450 
451 	/* partial update exercise */
452 	j = 0;
453 	for (i = 0; i < tcount; i++) {
454 		/* alignment tests are only done with continuous buffers */
455 		if (align_offset != 0)
456 			break;
457 
458 		if (template[i].np < 2)
459 			continue;
460 
461 		j++;
462 		memset(result, 0, digest_size);
463 
464 		ret = -EINVAL;
465 		hash_buff = xbuf[0];
466 		memcpy(hash_buff, template[i].plaintext,
467 			template[i].tap[0]);
468 		sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
469 
470 		if (template[i].ksize) {
471 			crypto_ahash_clear_flags(tfm, ~0);
472 			if (template[i].ksize > MAX_KEYLEN) {
473 				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
474 					j, algo, template[i].ksize, MAX_KEYLEN);
475 				ret = -EINVAL;
476 				goto out;
477 			}
478 			memcpy(key, template[i].key, template[i].ksize);
479 			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
480 			if (ret) {
481 				pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
482 					j, algo, -ret);
483 				goto out;
484 			}
485 		}
486 
487 		ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
488 		ret = crypto_wait_req(crypto_ahash_init(req), &wait);
489 		if (ret) {
490 			pr_err("alg: hash: init failed on test %d for %s: ret=%d\n",
491 				j, algo, -ret);
492 			goto out;
493 		}
494 		ret = crypto_wait_req(crypto_ahash_update(req), &wait);
495 		if (ret) {
496 			pr_err("alg: hash: update failed on test %d for %s: ret=%d\n",
497 				j, algo, -ret);
498 			goto out;
499 		}
500 
501 		temp = template[i].tap[0];
502 		for (k = 1; k < template[i].np; k++) {
503 			ret = ahash_partial_update(&req, tfm, &template[i],
504 				hash_buff, k, temp, &sg[0], algo, result,
505 				&wait);
506 			if (ret) {
507 				pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n",
508 					j, algo, -ret);
509 				goto out_noreq;
510 			}
511 			temp += template[i].tap[k];
512 		}
513 		ret = crypto_wait_req(crypto_ahash_final(req), &wait);
514 		if (ret) {
515 			pr_err("alg: hash: final failed on test %d for %s: ret=%d\n",
516 				j, algo, -ret);
517 			goto out;
518 		}
519 		if (memcmp(result, template[i].digest,
520 			   crypto_ahash_digestsize(tfm))) {
521 			pr_err("alg: hash: Partial Test %d failed for %s\n",
522 			       j, algo);
523 			hexdump(result, crypto_ahash_digestsize(tfm));
524 			ret = -EINVAL;
525 			goto out;
526 		}
527 	}
528 
529 	ret = 0;
530 
531 out:
532 	ahash_request_free(req);
533 out_noreq:
534 	testmgr_free_buf(xbuf);
535 out_nobuf:
536 	kfree(key);
537 	kfree(result);
538 	return ret;
539 }
540 
541 static int test_hash(struct crypto_ahash *tfm,
542 		     const struct hash_testvec *template,
543 		     unsigned int tcount, bool use_digest)
544 {
545 	unsigned int alignmask;
546 	int ret;
547 
548 	ret = __test_hash(tfm, template, tcount, use_digest, 0);
549 	if (ret)
550 		return ret;
551 
552 	/* test unaligned buffers, check with one byte offset */
553 	ret = __test_hash(tfm, template, tcount, use_digest, 1);
554 	if (ret)
555 		return ret;
556 
557 	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
558 	if (alignmask) {
559 		/* Check if alignment mask for tfm is correctly set. */
560 		ret = __test_hash(tfm, template, tcount, use_digest,
561 				  alignmask + 1);
562 		if (ret)
563 			return ret;
564 	}
565 
566 	return 0;
567 }
568 
569 static int __test_aead(struct crypto_aead *tfm, int enc,
570 		       const struct aead_testvec *template, unsigned int tcount,
571 		       const bool diff_dst, const int align_offset)
572 {
573 	const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
574 	unsigned int i, j, k, n, temp;
575 	int ret = -ENOMEM;
576 	char *q;
577 	char *key;
578 	struct aead_request *req;
579 	struct scatterlist *sg;
580 	struct scatterlist *sgout;
581 	const char *e, *d;
582 	struct crypto_wait wait;
583 	unsigned int authsize, iv_len;
584 	void *input;
585 	void *output;
586 	void *assoc;
587 	char *iv;
588 	char *xbuf[XBUFSIZE];
589 	char *xoutbuf[XBUFSIZE];
590 	char *axbuf[XBUFSIZE];
591 
592 	iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
593 	if (!iv)
594 		return ret;
595 	key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
596 	if (!key)
597 		goto out_noxbuf;
598 	if (testmgr_alloc_buf(xbuf))
599 		goto out_noxbuf;
600 	if (testmgr_alloc_buf(axbuf))
601 		goto out_noaxbuf;
602 	if (diff_dst && testmgr_alloc_buf(xoutbuf))
603 		goto out_nooutbuf;
604 
605 	/* avoid "the frame size is larger than 1024 bytes" compiler warning */
606 	sg = kmalloc(array3_size(sizeof(*sg), 8, (diff_dst ? 4 : 2)),
607 		     GFP_KERNEL);
608 	if (!sg)
609 		goto out_nosg;
610 	sgout = &sg[16];
611 
612 	if (diff_dst)
613 		d = "-ddst";
614 	else
615 		d = "";
616 
617 	if (enc == ENCRYPT)
618 		e = "encryption";
619 	else
620 		e = "decryption";
621 
622 	crypto_init_wait(&wait);
623 
624 	req = aead_request_alloc(tfm, GFP_KERNEL);
625 	if (!req) {
626 		pr_err("alg: aead%s: Failed to allocate request for %s\n",
627 		       d, algo);
628 		goto out;
629 	}
630 
631 	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
632 				  crypto_req_done, &wait);
633 
634 	iv_len = crypto_aead_ivsize(tfm);
635 
636 	for (i = 0, j = 0; i < tcount; i++) {
637 		if (template[i].np)
638 			continue;
639 
640 		j++;
641 
642 		/* some templates have no input data but they will
643 		 * touch input
644 		 */
645 		input = xbuf[0];
646 		input += align_offset;
647 		assoc = axbuf[0];
648 
649 		ret = -EINVAL;
650 		if (WARN_ON(align_offset + template[i].ilen >
651 			    PAGE_SIZE || template[i].alen > PAGE_SIZE))
652 			goto out;
653 
654 		memcpy(input, template[i].input, template[i].ilen);
655 		memcpy(assoc, template[i].assoc, template[i].alen);
656 		if (template[i].iv)
657 			memcpy(iv, template[i].iv, iv_len);
658 		else
659 			memset(iv, 0, iv_len);
660 
661 		crypto_aead_clear_flags(tfm, ~0);
662 		if (template[i].wk)
663 			crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
664 
665 		if (template[i].klen > MAX_KEYLEN) {
666 			pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
667 			       d, j, algo, template[i].klen,
668 			       MAX_KEYLEN);
669 			ret = -EINVAL;
670 			goto out;
671 		}
672 		memcpy(key, template[i].key, template[i].klen);
673 
674 		ret = crypto_aead_setkey(tfm, key, template[i].klen);
675 		if (template[i].fail == !ret) {
676 			pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
677 			       d, j, algo, crypto_aead_get_flags(tfm));
678 			goto out;
679 		} else if (ret)
680 			continue;
681 
682 		authsize = abs(template[i].rlen - template[i].ilen);
683 		ret = crypto_aead_setauthsize(tfm, authsize);
684 		if (ret) {
685 			pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
686 			       d, authsize, j, algo);
687 			goto out;
688 		}
689 
690 		k = !!template[i].alen;
691 		sg_init_table(sg, k + 1);
692 		sg_set_buf(&sg[0], assoc, template[i].alen);
693 		sg_set_buf(&sg[k], input,
694 			   template[i].ilen + (enc ? authsize : 0));
695 		output = input;
696 
697 		if (diff_dst) {
698 			sg_init_table(sgout, k + 1);
699 			sg_set_buf(&sgout[0], assoc, template[i].alen);
700 
701 			output = xoutbuf[0];
702 			output += align_offset;
703 			sg_set_buf(&sgout[k], output,
704 				   template[i].rlen + (enc ? 0 : authsize));
705 		}
706 
707 		aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
708 				       template[i].ilen, iv);
709 
710 		aead_request_set_ad(req, template[i].alen);
711 
712 		ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
713 				      : crypto_aead_decrypt(req), &wait);
714 
715 		switch (ret) {
716 		case 0:
717 			if (template[i].novrfy) {
718 				/* verification was supposed to fail */
719 				pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
720 				       d, e, j, algo);
721 				/* so really, we got a bad message */
722 				ret = -EBADMSG;
723 				goto out;
724 			}
725 			break;
726 		case -EBADMSG:
727 			if (template[i].novrfy)
728 				/* verification failure was expected */
729 				continue;
730 			/* fall through */
731 		default:
732 			pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
733 			       d, e, j, algo, -ret);
734 			goto out;
735 		}
736 
737 		q = output;
738 		if (memcmp(q, template[i].result, template[i].rlen)) {
739 			pr_err("alg: aead%s: Test %d failed on %s for %s\n",
740 			       d, j, e, algo);
741 			hexdump(q, template[i].rlen);
742 			ret = -EINVAL;
743 			goto out;
744 		}
745 	}
746 
747 	for (i = 0, j = 0; i < tcount; i++) {
748 		/* alignment tests are only done with continuous buffers */
749 		if (align_offset != 0)
750 			break;
751 
752 		if (!template[i].np)
753 			continue;
754 
755 		j++;
756 
757 		if (template[i].iv)
758 			memcpy(iv, template[i].iv, iv_len);
759 		else
760 			memset(iv, 0, MAX_IVLEN);
761 
762 		crypto_aead_clear_flags(tfm, ~0);
763 		if (template[i].wk)
764 			crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
765 		if (template[i].klen > MAX_KEYLEN) {
766 			pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
767 			       d, j, algo, template[i].klen, MAX_KEYLEN);
768 			ret = -EINVAL;
769 			goto out;
770 		}
771 		memcpy(key, template[i].key, template[i].klen);
772 
773 		ret = crypto_aead_setkey(tfm, key, template[i].klen);
774 		if (template[i].fail == !ret) {
775 			pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
776 			       d, j, algo, crypto_aead_get_flags(tfm));
777 			goto out;
778 		} else if (ret)
779 			continue;
780 
781 		authsize = abs(template[i].rlen - template[i].ilen);
782 
783 		ret = -EINVAL;
784 		sg_init_table(sg, template[i].anp + template[i].np);
785 		if (diff_dst)
786 			sg_init_table(sgout, template[i].anp + template[i].np);
787 
788 		ret = -EINVAL;
789 		for (k = 0, temp = 0; k < template[i].anp; k++) {
790 			if (WARN_ON(offset_in_page(IDX[k]) +
791 				    template[i].atap[k] > PAGE_SIZE))
792 				goto out;
793 			sg_set_buf(&sg[k],
794 				   memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
795 					  offset_in_page(IDX[k]),
796 					  template[i].assoc + temp,
797 					  template[i].atap[k]),
798 				   template[i].atap[k]);
799 			if (diff_dst)
800 				sg_set_buf(&sgout[k],
801 					   axbuf[IDX[k] >> PAGE_SHIFT] +
802 					   offset_in_page(IDX[k]),
803 					   template[i].atap[k]);
804 			temp += template[i].atap[k];
805 		}
806 
807 		for (k = 0, temp = 0; k < template[i].np; k++) {
808 			if (WARN_ON(offset_in_page(IDX[k]) +
809 				    template[i].tap[k] > PAGE_SIZE))
810 				goto out;
811 
812 			q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
813 			memcpy(q, template[i].input + temp, template[i].tap[k]);
814 			sg_set_buf(&sg[template[i].anp + k],
815 				   q, template[i].tap[k]);
816 
817 			if (diff_dst) {
818 				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
819 				    offset_in_page(IDX[k]);
820 
821 				memset(q, 0, template[i].tap[k]);
822 
823 				sg_set_buf(&sgout[template[i].anp + k],
824 					   q, template[i].tap[k]);
825 			}
826 
827 			n = template[i].tap[k];
828 			if (k == template[i].np - 1 && enc)
829 				n += authsize;
830 			if (offset_in_page(q) + n < PAGE_SIZE)
831 				q[n] = 0;
832 
833 			temp += template[i].tap[k];
834 		}
835 
836 		ret = crypto_aead_setauthsize(tfm, authsize);
837 		if (ret) {
838 			pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
839 			       d, authsize, j, algo);
840 			goto out;
841 		}
842 
843 		if (enc) {
844 			if (WARN_ON(sg[template[i].anp + k - 1].offset +
845 				    sg[template[i].anp + k - 1].length +
846 				    authsize > PAGE_SIZE)) {
847 				ret = -EINVAL;
848 				goto out;
849 			}
850 
851 			if (diff_dst)
852 				sgout[template[i].anp + k - 1].length +=
853 					authsize;
854 			sg[template[i].anp + k - 1].length += authsize;
855 		}
856 
857 		aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
858 				       template[i].ilen,
859 				       iv);
860 
861 		aead_request_set_ad(req, template[i].alen);
862 
863 		ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
864 				      : crypto_aead_decrypt(req), &wait);
865 
866 		switch (ret) {
867 		case 0:
868 			if (template[i].novrfy) {
869 				/* verification was supposed to fail */
870 				pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
871 				       d, e, j, algo);
872 				/* so really, we got a bad message */
873 				ret = -EBADMSG;
874 				goto out;
875 			}
876 			break;
877 		case -EBADMSG:
878 			if (template[i].novrfy)
879 				/* verification failure was expected */
880 				continue;
881 			/* fall through */
882 		default:
883 			pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
884 			       d, e, j, algo, -ret);
885 			goto out;
886 		}
887 
888 		ret = -EINVAL;
889 		for (k = 0, temp = 0; k < template[i].np; k++) {
890 			if (diff_dst)
891 				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
892 				    offset_in_page(IDX[k]);
893 			else
894 				q = xbuf[IDX[k] >> PAGE_SHIFT] +
895 				    offset_in_page(IDX[k]);
896 
897 			n = template[i].tap[k];
898 			if (k == template[i].np - 1)
899 				n += enc ? authsize : -authsize;
900 
901 			if (memcmp(q, template[i].result + temp, n)) {
902 				pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
903 				       d, j, e, k, algo);
904 				hexdump(q, n);
905 				goto out;
906 			}
907 
908 			q += n;
909 			if (k == template[i].np - 1 && !enc) {
910 				if (!diff_dst &&
911 					memcmp(q, template[i].input +
912 					      temp + n, authsize))
913 					n = authsize;
914 				else
915 					n = 0;
916 			} else {
917 				for (n = 0; offset_in_page(q + n) && q[n]; n++)
918 					;
919 			}
920 			if (n) {
921 				pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
922 				       d, j, e, k, algo, n);
923 				hexdump(q, n);
924 				goto out;
925 			}
926 
927 			temp += template[i].tap[k];
928 		}
929 	}
930 
931 	ret = 0;
932 
933 out:
934 	aead_request_free(req);
935 	kfree(sg);
936 out_nosg:
937 	if (diff_dst)
938 		testmgr_free_buf(xoutbuf);
939 out_nooutbuf:
940 	testmgr_free_buf(axbuf);
941 out_noaxbuf:
942 	testmgr_free_buf(xbuf);
943 out_noxbuf:
944 	kfree(key);
945 	kfree(iv);
946 	return ret;
947 }
948 
949 static int test_aead(struct crypto_aead *tfm, int enc,
950 		     const struct aead_testvec *template, unsigned int tcount)
951 {
952 	unsigned int alignmask;
953 	int ret;
954 
955 	/* test 'dst == src' case */
956 	ret = __test_aead(tfm, enc, template, tcount, false, 0);
957 	if (ret)
958 		return ret;
959 
960 	/* test 'dst != src' case */
961 	ret = __test_aead(tfm, enc, template, tcount, true, 0);
962 	if (ret)
963 		return ret;
964 
965 	/* test unaligned buffers, check with one byte offset */
966 	ret = __test_aead(tfm, enc, template, tcount, true, 1);
967 	if (ret)
968 		return ret;
969 
970 	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
971 	if (alignmask) {
972 		/* Check if alignment mask for tfm is correctly set. */
973 		ret = __test_aead(tfm, enc, template, tcount, true,
974 				  alignmask + 1);
975 		if (ret)
976 			return ret;
977 	}
978 
979 	return 0;
980 }
981 
982 static int test_cipher(struct crypto_cipher *tfm, int enc,
983 		       const struct cipher_testvec *template,
984 		       unsigned int tcount)
985 {
986 	const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
987 	unsigned int i, j, k;
988 	char *q;
989 	const char *e;
990 	const char *input, *result;
991 	void *data;
992 	char *xbuf[XBUFSIZE];
993 	int ret = -ENOMEM;
994 
995 	if (testmgr_alloc_buf(xbuf))
996 		goto out_nobuf;
997 
998 	if (enc == ENCRYPT)
999 	        e = "encryption";
1000 	else
1001 		e = "decryption";
1002 
1003 	j = 0;
1004 	for (i = 0; i < tcount; i++) {
1005 		if (template[i].np)
1006 			continue;
1007 
1008 		if (fips_enabled && template[i].fips_skip)
1009 			continue;
1010 
1011 		input  = enc ? template[i].ptext : template[i].ctext;
1012 		result = enc ? template[i].ctext : template[i].ptext;
1013 		j++;
1014 
1015 		ret = -EINVAL;
1016 		if (WARN_ON(template[i].len > PAGE_SIZE))
1017 			goto out;
1018 
1019 		data = xbuf[0];
1020 		memcpy(data, input, template[i].len);
1021 
1022 		crypto_cipher_clear_flags(tfm, ~0);
1023 		if (template[i].wk)
1024 			crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1025 
1026 		ret = crypto_cipher_setkey(tfm, template[i].key,
1027 					   template[i].klen);
1028 		if (template[i].fail == !ret) {
1029 			printk(KERN_ERR "alg: cipher: setkey failed "
1030 			       "on test %d for %s: flags=%x\n", j,
1031 			       algo, crypto_cipher_get_flags(tfm));
1032 			goto out;
1033 		} else if (ret)
1034 			continue;
1035 
1036 		for (k = 0; k < template[i].len;
1037 		     k += crypto_cipher_blocksize(tfm)) {
1038 			if (enc)
1039 				crypto_cipher_encrypt_one(tfm, data + k,
1040 							  data + k);
1041 			else
1042 				crypto_cipher_decrypt_one(tfm, data + k,
1043 							  data + k);
1044 		}
1045 
1046 		q = data;
1047 		if (memcmp(q, result, template[i].len)) {
1048 			printk(KERN_ERR "alg: cipher: Test %d failed "
1049 			       "on %s for %s\n", j, e, algo);
1050 			hexdump(q, template[i].len);
1051 			ret = -EINVAL;
1052 			goto out;
1053 		}
1054 	}
1055 
1056 	ret = 0;
1057 
1058 out:
1059 	testmgr_free_buf(xbuf);
1060 out_nobuf:
1061 	return ret;
1062 }
1063 
1064 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1065 			   const struct cipher_testvec *template,
1066 			   unsigned int tcount,
1067 			   const bool diff_dst, const int align_offset)
1068 {
1069 	const char *algo =
1070 		crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1071 	unsigned int i, j, k, n, temp;
1072 	char *q;
1073 	struct skcipher_request *req;
1074 	struct scatterlist sg[8];
1075 	struct scatterlist sgout[8];
1076 	const char *e, *d;
1077 	struct crypto_wait wait;
1078 	const char *input, *result;
1079 	void *data;
1080 	char iv[MAX_IVLEN];
1081 	char *xbuf[XBUFSIZE];
1082 	char *xoutbuf[XBUFSIZE];
1083 	int ret = -ENOMEM;
1084 	unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1085 
1086 	if (testmgr_alloc_buf(xbuf))
1087 		goto out_nobuf;
1088 
1089 	if (diff_dst && testmgr_alloc_buf(xoutbuf))
1090 		goto out_nooutbuf;
1091 
1092 	if (diff_dst)
1093 		d = "-ddst";
1094 	else
1095 		d = "";
1096 
1097 	if (enc == ENCRYPT)
1098 	        e = "encryption";
1099 	else
1100 		e = "decryption";
1101 
1102 	crypto_init_wait(&wait);
1103 
1104 	req = skcipher_request_alloc(tfm, GFP_KERNEL);
1105 	if (!req) {
1106 		pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1107 		       d, algo);
1108 		goto out;
1109 	}
1110 
1111 	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1112 				      crypto_req_done, &wait);
1113 
1114 	j = 0;
1115 	for (i = 0; i < tcount; i++) {
1116 		if (template[i].np && !template[i].also_non_np)
1117 			continue;
1118 
1119 		if (fips_enabled && template[i].fips_skip)
1120 			continue;
1121 
1122 		if (template[i].iv && !(template[i].generates_iv && enc))
1123 			memcpy(iv, template[i].iv, ivsize);
1124 		else
1125 			memset(iv, 0, MAX_IVLEN);
1126 
1127 		input  = enc ? template[i].ptext : template[i].ctext;
1128 		result = enc ? template[i].ctext : template[i].ptext;
1129 		j++;
1130 		ret = -EINVAL;
1131 		if (WARN_ON(align_offset + template[i].len > PAGE_SIZE))
1132 			goto out;
1133 
1134 		data = xbuf[0];
1135 		data += align_offset;
1136 		memcpy(data, input, template[i].len);
1137 
1138 		crypto_skcipher_clear_flags(tfm, ~0);
1139 		if (template[i].wk)
1140 			crypto_skcipher_set_flags(tfm,
1141 						  CRYPTO_TFM_REQ_WEAK_KEY);
1142 
1143 		ret = crypto_skcipher_setkey(tfm, template[i].key,
1144 					     template[i].klen);
1145 		if (template[i].fail == !ret) {
1146 			pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1147 			       d, j, algo, crypto_skcipher_get_flags(tfm));
1148 			goto out;
1149 		} else if (ret)
1150 			continue;
1151 
1152 		sg_init_one(&sg[0], data, template[i].len);
1153 		if (diff_dst) {
1154 			data = xoutbuf[0];
1155 			data += align_offset;
1156 			sg_init_one(&sgout[0], data, template[i].len);
1157 		}
1158 
1159 		skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1160 					   template[i].len, iv);
1161 		ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1162 				      crypto_skcipher_decrypt(req), &wait);
1163 
1164 		if (ret) {
1165 			pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1166 			       d, e, j, algo, -ret);
1167 			goto out;
1168 		}
1169 
1170 		q = data;
1171 		if (memcmp(q, result, template[i].len)) {
1172 			pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1173 			       d, j, e, algo);
1174 			hexdump(q, template[i].len);
1175 			ret = -EINVAL;
1176 			goto out;
1177 		}
1178 
1179 		if (template[i].generates_iv && enc &&
1180 		    memcmp(iv, template[i].iv, crypto_skcipher_ivsize(tfm))) {
1181 			pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1182 			       d, j, e, algo);
1183 			hexdump(iv, crypto_skcipher_ivsize(tfm));
1184 			ret = -EINVAL;
1185 			goto out;
1186 		}
1187 	}
1188 
1189 	j = 0;
1190 	for (i = 0; i < tcount; i++) {
1191 		/* alignment tests are only done with continuous buffers */
1192 		if (align_offset != 0)
1193 			break;
1194 
1195 		if (!template[i].np)
1196 			continue;
1197 
1198 		if (fips_enabled && template[i].fips_skip)
1199 			continue;
1200 
1201 		if (template[i].iv && !(template[i].generates_iv && enc))
1202 			memcpy(iv, template[i].iv, ivsize);
1203 		else
1204 			memset(iv, 0, MAX_IVLEN);
1205 
1206 		input  = enc ? template[i].ptext : template[i].ctext;
1207 		result = enc ? template[i].ctext : template[i].ptext;
1208 		j++;
1209 		crypto_skcipher_clear_flags(tfm, ~0);
1210 		if (template[i].wk)
1211 			crypto_skcipher_set_flags(tfm,
1212 						  CRYPTO_TFM_REQ_WEAK_KEY);
1213 
1214 		ret = crypto_skcipher_setkey(tfm, template[i].key,
1215 					     template[i].klen);
1216 		if (template[i].fail == !ret) {
1217 			pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1218 			       d, j, algo, crypto_skcipher_get_flags(tfm));
1219 			goto out;
1220 		} else if (ret)
1221 			continue;
1222 
1223 		temp = 0;
1224 		ret = -EINVAL;
1225 		sg_init_table(sg, template[i].np);
1226 		if (diff_dst)
1227 			sg_init_table(sgout, template[i].np);
1228 		for (k = 0; k < template[i].np; k++) {
1229 			if (WARN_ON(offset_in_page(IDX[k]) +
1230 				    template[i].tap[k] > PAGE_SIZE))
1231 				goto out;
1232 
1233 			q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1234 
1235 			memcpy(q, input + temp, template[i].tap[k]);
1236 
1237 			if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1238 				q[template[i].tap[k]] = 0;
1239 
1240 			sg_set_buf(&sg[k], q, template[i].tap[k]);
1241 			if (diff_dst) {
1242 				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1243 				    offset_in_page(IDX[k]);
1244 
1245 				sg_set_buf(&sgout[k], q, template[i].tap[k]);
1246 
1247 				memset(q, 0, template[i].tap[k]);
1248 				if (offset_in_page(q) +
1249 				    template[i].tap[k] < PAGE_SIZE)
1250 					q[template[i].tap[k]] = 0;
1251 			}
1252 
1253 			temp += template[i].tap[k];
1254 		}
1255 
1256 		skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1257 					   template[i].len, iv);
1258 
1259 		ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1260 				      crypto_skcipher_decrypt(req), &wait);
1261 
1262 		if (ret) {
1263 			pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1264 			       d, e, j, algo, -ret);
1265 			goto out;
1266 		}
1267 
1268 		temp = 0;
1269 		ret = -EINVAL;
1270 		for (k = 0; k < template[i].np; k++) {
1271 			if (diff_dst)
1272 				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1273 				    offset_in_page(IDX[k]);
1274 			else
1275 				q = xbuf[IDX[k] >> PAGE_SHIFT] +
1276 				    offset_in_page(IDX[k]);
1277 
1278 			if (memcmp(q, result + temp, template[i].tap[k])) {
1279 				pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1280 				       d, j, e, k, algo);
1281 				hexdump(q, template[i].tap[k]);
1282 				goto out;
1283 			}
1284 
1285 			q += template[i].tap[k];
1286 			for (n = 0; offset_in_page(q + n) && q[n]; n++)
1287 				;
1288 			if (n) {
1289 				pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1290 				       d, j, e, k, algo, n);
1291 				hexdump(q, n);
1292 				goto out;
1293 			}
1294 			temp += template[i].tap[k];
1295 		}
1296 	}
1297 
1298 	ret = 0;
1299 
1300 out:
1301 	skcipher_request_free(req);
1302 	if (diff_dst)
1303 		testmgr_free_buf(xoutbuf);
1304 out_nooutbuf:
1305 	testmgr_free_buf(xbuf);
1306 out_nobuf:
1307 	return ret;
1308 }
1309 
1310 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1311 			 const struct cipher_testvec *template,
1312 			 unsigned int tcount)
1313 {
1314 	unsigned int alignmask;
1315 	int ret;
1316 
1317 	/* test 'dst == src' case */
1318 	ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1319 	if (ret)
1320 		return ret;
1321 
1322 	/* test 'dst != src' case */
1323 	ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1324 	if (ret)
1325 		return ret;
1326 
1327 	/* test unaligned buffers, check with one byte offset */
1328 	ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1329 	if (ret)
1330 		return ret;
1331 
1332 	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1333 	if (alignmask) {
1334 		/* Check if alignment mask for tfm is correctly set. */
1335 		ret = __test_skcipher(tfm, enc, template, tcount, true,
1336 				      alignmask + 1);
1337 		if (ret)
1338 			return ret;
1339 	}
1340 
1341 	return 0;
1342 }
1343 
1344 static int test_comp(struct crypto_comp *tfm,
1345 		     const struct comp_testvec *ctemplate,
1346 		     const struct comp_testvec *dtemplate,
1347 		     int ctcount, int dtcount)
1348 {
1349 	const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1350 	char *output, *decomp_output;
1351 	unsigned int i;
1352 	int ret;
1353 
1354 	output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1355 	if (!output)
1356 		return -ENOMEM;
1357 
1358 	decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1359 	if (!decomp_output) {
1360 		kfree(output);
1361 		return -ENOMEM;
1362 	}
1363 
1364 	for (i = 0; i < ctcount; i++) {
1365 		int ilen;
1366 		unsigned int dlen = COMP_BUF_SIZE;
1367 
1368 		memset(output, 0, sizeof(COMP_BUF_SIZE));
1369 		memset(decomp_output, 0, sizeof(COMP_BUF_SIZE));
1370 
1371 		ilen = ctemplate[i].inlen;
1372 		ret = crypto_comp_compress(tfm, ctemplate[i].input,
1373 					   ilen, output, &dlen);
1374 		if (ret) {
1375 			printk(KERN_ERR "alg: comp: compression failed "
1376 			       "on test %d for %s: ret=%d\n", i + 1, algo,
1377 			       -ret);
1378 			goto out;
1379 		}
1380 
1381 		ilen = dlen;
1382 		dlen = COMP_BUF_SIZE;
1383 		ret = crypto_comp_decompress(tfm, output,
1384 					     ilen, decomp_output, &dlen);
1385 		if (ret) {
1386 			pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n",
1387 			       i + 1, algo, -ret);
1388 			goto out;
1389 		}
1390 
1391 		if (dlen != ctemplate[i].inlen) {
1392 			printk(KERN_ERR "alg: comp: Compression test %d "
1393 			       "failed for %s: output len = %d\n", i + 1, algo,
1394 			       dlen);
1395 			ret = -EINVAL;
1396 			goto out;
1397 		}
1398 
1399 		if (memcmp(decomp_output, ctemplate[i].input,
1400 			   ctemplate[i].inlen)) {
1401 			pr_err("alg: comp: compression failed: output differs: on test %d for %s\n",
1402 			       i + 1, algo);
1403 			hexdump(decomp_output, dlen);
1404 			ret = -EINVAL;
1405 			goto out;
1406 		}
1407 	}
1408 
1409 	for (i = 0; i < dtcount; i++) {
1410 		int ilen;
1411 		unsigned int dlen = COMP_BUF_SIZE;
1412 
1413 		memset(decomp_output, 0, sizeof(COMP_BUF_SIZE));
1414 
1415 		ilen = dtemplate[i].inlen;
1416 		ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1417 					     ilen, decomp_output, &dlen);
1418 		if (ret) {
1419 			printk(KERN_ERR "alg: comp: decompression failed "
1420 			       "on test %d for %s: ret=%d\n", i + 1, algo,
1421 			       -ret);
1422 			goto out;
1423 		}
1424 
1425 		if (dlen != dtemplate[i].outlen) {
1426 			printk(KERN_ERR "alg: comp: Decompression test %d "
1427 			       "failed for %s: output len = %d\n", i + 1, algo,
1428 			       dlen);
1429 			ret = -EINVAL;
1430 			goto out;
1431 		}
1432 
1433 		if (memcmp(decomp_output, dtemplate[i].output, dlen)) {
1434 			printk(KERN_ERR "alg: comp: Decompression test %d "
1435 			       "failed for %s\n", i + 1, algo);
1436 			hexdump(decomp_output, dlen);
1437 			ret = -EINVAL;
1438 			goto out;
1439 		}
1440 	}
1441 
1442 	ret = 0;
1443 
1444 out:
1445 	kfree(decomp_output);
1446 	kfree(output);
1447 	return ret;
1448 }
1449 
1450 static int test_acomp(struct crypto_acomp *tfm,
1451 			      const struct comp_testvec *ctemplate,
1452 		      const struct comp_testvec *dtemplate,
1453 		      int ctcount, int dtcount)
1454 {
1455 	const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
1456 	unsigned int i;
1457 	char *output, *decomp_out;
1458 	int ret;
1459 	struct scatterlist src, dst;
1460 	struct acomp_req *req;
1461 	struct crypto_wait wait;
1462 
1463 	output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1464 	if (!output)
1465 		return -ENOMEM;
1466 
1467 	decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1468 	if (!decomp_out) {
1469 		kfree(output);
1470 		return -ENOMEM;
1471 	}
1472 
1473 	for (i = 0; i < ctcount; i++) {
1474 		unsigned int dlen = COMP_BUF_SIZE;
1475 		int ilen = ctemplate[i].inlen;
1476 		void *input_vec;
1477 
1478 		input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
1479 		if (!input_vec) {
1480 			ret = -ENOMEM;
1481 			goto out;
1482 		}
1483 
1484 		memset(output, 0, dlen);
1485 		crypto_init_wait(&wait);
1486 		sg_init_one(&src, input_vec, ilen);
1487 		sg_init_one(&dst, output, dlen);
1488 
1489 		req = acomp_request_alloc(tfm);
1490 		if (!req) {
1491 			pr_err("alg: acomp: request alloc failed for %s\n",
1492 			       algo);
1493 			kfree(input_vec);
1494 			ret = -ENOMEM;
1495 			goto out;
1496 		}
1497 
1498 		acomp_request_set_params(req, &src, &dst, ilen, dlen);
1499 		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1500 					   crypto_req_done, &wait);
1501 
1502 		ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
1503 		if (ret) {
1504 			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1505 			       i + 1, algo, -ret);
1506 			kfree(input_vec);
1507 			acomp_request_free(req);
1508 			goto out;
1509 		}
1510 
1511 		ilen = req->dlen;
1512 		dlen = COMP_BUF_SIZE;
1513 		sg_init_one(&src, output, ilen);
1514 		sg_init_one(&dst, decomp_out, dlen);
1515 		crypto_init_wait(&wait);
1516 		acomp_request_set_params(req, &src, &dst, ilen, dlen);
1517 
1518 		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1519 		if (ret) {
1520 			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1521 			       i + 1, algo, -ret);
1522 			kfree(input_vec);
1523 			acomp_request_free(req);
1524 			goto out;
1525 		}
1526 
1527 		if (req->dlen != ctemplate[i].inlen) {
1528 			pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
1529 			       i + 1, algo, req->dlen);
1530 			ret = -EINVAL;
1531 			kfree(input_vec);
1532 			acomp_request_free(req);
1533 			goto out;
1534 		}
1535 
1536 		if (memcmp(input_vec, decomp_out, req->dlen)) {
1537 			pr_err("alg: acomp: Compression test %d failed for %s\n",
1538 			       i + 1, algo);
1539 			hexdump(output, req->dlen);
1540 			ret = -EINVAL;
1541 			kfree(input_vec);
1542 			acomp_request_free(req);
1543 			goto out;
1544 		}
1545 
1546 		kfree(input_vec);
1547 		acomp_request_free(req);
1548 	}
1549 
1550 	for (i = 0; i < dtcount; i++) {
1551 		unsigned int dlen = COMP_BUF_SIZE;
1552 		int ilen = dtemplate[i].inlen;
1553 		void *input_vec;
1554 
1555 		input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
1556 		if (!input_vec) {
1557 			ret = -ENOMEM;
1558 			goto out;
1559 		}
1560 
1561 		memset(output, 0, dlen);
1562 		crypto_init_wait(&wait);
1563 		sg_init_one(&src, input_vec, ilen);
1564 		sg_init_one(&dst, output, dlen);
1565 
1566 		req = acomp_request_alloc(tfm);
1567 		if (!req) {
1568 			pr_err("alg: acomp: request alloc failed for %s\n",
1569 			       algo);
1570 			kfree(input_vec);
1571 			ret = -ENOMEM;
1572 			goto out;
1573 		}
1574 
1575 		acomp_request_set_params(req, &src, &dst, ilen, dlen);
1576 		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1577 					   crypto_req_done, &wait);
1578 
1579 		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1580 		if (ret) {
1581 			pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
1582 			       i + 1, algo, -ret);
1583 			kfree(input_vec);
1584 			acomp_request_free(req);
1585 			goto out;
1586 		}
1587 
1588 		if (req->dlen != dtemplate[i].outlen) {
1589 			pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
1590 			       i + 1, algo, req->dlen);
1591 			ret = -EINVAL;
1592 			kfree(input_vec);
1593 			acomp_request_free(req);
1594 			goto out;
1595 		}
1596 
1597 		if (memcmp(output, dtemplate[i].output, req->dlen)) {
1598 			pr_err("alg: acomp: Decompression test %d failed for %s\n",
1599 			       i + 1, algo);
1600 			hexdump(output, req->dlen);
1601 			ret = -EINVAL;
1602 			kfree(input_vec);
1603 			acomp_request_free(req);
1604 			goto out;
1605 		}
1606 
1607 		kfree(input_vec);
1608 		acomp_request_free(req);
1609 	}
1610 
1611 	ret = 0;
1612 
1613 out:
1614 	kfree(decomp_out);
1615 	kfree(output);
1616 	return ret;
1617 }
1618 
1619 static int test_cprng(struct crypto_rng *tfm,
1620 		      const struct cprng_testvec *template,
1621 		      unsigned int tcount)
1622 {
1623 	const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1624 	int err = 0, i, j, seedsize;
1625 	u8 *seed;
1626 	char result[32];
1627 
1628 	seedsize = crypto_rng_seedsize(tfm);
1629 
1630 	seed = kmalloc(seedsize, GFP_KERNEL);
1631 	if (!seed) {
1632 		printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1633 		       "for %s\n", algo);
1634 		return -ENOMEM;
1635 	}
1636 
1637 	for (i = 0; i < tcount; i++) {
1638 		memset(result, 0, 32);
1639 
1640 		memcpy(seed, template[i].v, template[i].vlen);
1641 		memcpy(seed + template[i].vlen, template[i].key,
1642 		       template[i].klen);
1643 		memcpy(seed + template[i].vlen + template[i].klen,
1644 		       template[i].dt, template[i].dtlen);
1645 
1646 		err = crypto_rng_reset(tfm, seed, seedsize);
1647 		if (err) {
1648 			printk(KERN_ERR "alg: cprng: Failed to reset rng "
1649 			       "for %s\n", algo);
1650 			goto out;
1651 		}
1652 
1653 		for (j = 0; j < template[i].loops; j++) {
1654 			err = crypto_rng_get_bytes(tfm, result,
1655 						   template[i].rlen);
1656 			if (err < 0) {
1657 				printk(KERN_ERR "alg: cprng: Failed to obtain "
1658 				       "the correct amount of random data for "
1659 				       "%s (requested %d)\n", algo,
1660 				       template[i].rlen);
1661 				goto out;
1662 			}
1663 		}
1664 
1665 		err = memcmp(result, template[i].result,
1666 			     template[i].rlen);
1667 		if (err) {
1668 			printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1669 			       i, algo);
1670 			hexdump(result, template[i].rlen);
1671 			err = -EINVAL;
1672 			goto out;
1673 		}
1674 	}
1675 
1676 out:
1677 	kfree(seed);
1678 	return err;
1679 }
1680 
1681 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1682 			 u32 type, u32 mask)
1683 {
1684 	struct crypto_aead *tfm;
1685 	int err = 0;
1686 
1687 	tfm = crypto_alloc_aead(driver, type, mask);
1688 	if (IS_ERR(tfm)) {
1689 		printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1690 		       "%ld\n", driver, PTR_ERR(tfm));
1691 		return PTR_ERR(tfm);
1692 	}
1693 
1694 	if (desc->suite.aead.enc.vecs) {
1695 		err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1696 				desc->suite.aead.enc.count);
1697 		if (err)
1698 			goto out;
1699 	}
1700 
1701 	if (!err && desc->suite.aead.dec.vecs)
1702 		err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1703 				desc->suite.aead.dec.count);
1704 
1705 out:
1706 	crypto_free_aead(tfm);
1707 	return err;
1708 }
1709 
1710 static int alg_test_cipher(const struct alg_test_desc *desc,
1711 			   const char *driver, u32 type, u32 mask)
1712 {
1713 	const struct cipher_test_suite *suite = &desc->suite.cipher;
1714 	struct crypto_cipher *tfm;
1715 	int err;
1716 
1717 	tfm = crypto_alloc_cipher(driver, type, mask);
1718 	if (IS_ERR(tfm)) {
1719 		printk(KERN_ERR "alg: cipher: Failed to load transform for "
1720 		       "%s: %ld\n", driver, PTR_ERR(tfm));
1721 		return PTR_ERR(tfm);
1722 	}
1723 
1724 	err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count);
1725 	if (!err)
1726 		err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count);
1727 
1728 	crypto_free_cipher(tfm);
1729 	return err;
1730 }
1731 
1732 static int alg_test_skcipher(const struct alg_test_desc *desc,
1733 			     const char *driver, u32 type, u32 mask)
1734 {
1735 	const struct cipher_test_suite *suite = &desc->suite.cipher;
1736 	struct crypto_skcipher *tfm;
1737 	int err;
1738 
1739 	tfm = crypto_alloc_skcipher(driver, type, mask);
1740 	if (IS_ERR(tfm)) {
1741 		printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1742 		       "%s: %ld\n", driver, PTR_ERR(tfm));
1743 		return PTR_ERR(tfm);
1744 	}
1745 
1746 	err = test_skcipher(tfm, ENCRYPT, suite->vecs, suite->count);
1747 	if (!err)
1748 		err = test_skcipher(tfm, DECRYPT, suite->vecs, suite->count);
1749 
1750 	crypto_free_skcipher(tfm);
1751 	return err;
1752 }
1753 
1754 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1755 			 u32 type, u32 mask)
1756 {
1757 	struct crypto_comp *comp;
1758 	struct crypto_acomp *acomp;
1759 	int err;
1760 	u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
1761 
1762 	if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
1763 		acomp = crypto_alloc_acomp(driver, type, mask);
1764 		if (IS_ERR(acomp)) {
1765 			pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
1766 			       driver, PTR_ERR(acomp));
1767 			return PTR_ERR(acomp);
1768 		}
1769 		err = test_acomp(acomp, desc->suite.comp.comp.vecs,
1770 				 desc->suite.comp.decomp.vecs,
1771 				 desc->suite.comp.comp.count,
1772 				 desc->suite.comp.decomp.count);
1773 		crypto_free_acomp(acomp);
1774 	} else {
1775 		comp = crypto_alloc_comp(driver, type, mask);
1776 		if (IS_ERR(comp)) {
1777 			pr_err("alg: comp: Failed to load transform for %s: %ld\n",
1778 			       driver, PTR_ERR(comp));
1779 			return PTR_ERR(comp);
1780 		}
1781 
1782 		err = test_comp(comp, desc->suite.comp.comp.vecs,
1783 				desc->suite.comp.decomp.vecs,
1784 				desc->suite.comp.comp.count,
1785 				desc->suite.comp.decomp.count);
1786 
1787 		crypto_free_comp(comp);
1788 	}
1789 	return err;
1790 }
1791 
1792 static int __alg_test_hash(const struct hash_testvec *template,
1793 			   unsigned int tcount, const char *driver,
1794 			   u32 type, u32 mask)
1795 {
1796 	struct crypto_ahash *tfm;
1797 	int err;
1798 
1799 	tfm = crypto_alloc_ahash(driver, type, mask);
1800 	if (IS_ERR(tfm)) {
1801 		printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1802 		       "%ld\n", driver, PTR_ERR(tfm));
1803 		return PTR_ERR(tfm);
1804 	}
1805 
1806 	err = test_hash(tfm, template, tcount, true);
1807 	if (!err)
1808 		err = test_hash(tfm, template, tcount, false);
1809 	crypto_free_ahash(tfm);
1810 	return err;
1811 }
1812 
1813 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1814 			 u32 type, u32 mask)
1815 {
1816 	const struct hash_testvec *template = desc->suite.hash.vecs;
1817 	unsigned int tcount = desc->suite.hash.count;
1818 	unsigned int nr_unkeyed, nr_keyed;
1819 	int err;
1820 
1821 	/*
1822 	 * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests
1823 	 * first, before setting a key on the tfm.  To make this easier, we
1824 	 * require that the unkeyed test vectors (if any) are listed first.
1825 	 */
1826 
1827 	for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) {
1828 		if (template[nr_unkeyed].ksize)
1829 			break;
1830 	}
1831 	for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) {
1832 		if (!template[nr_unkeyed + nr_keyed].ksize) {
1833 			pr_err("alg: hash: test vectors for %s out of order, "
1834 			       "unkeyed ones must come first\n", desc->alg);
1835 			return -EINVAL;
1836 		}
1837 	}
1838 
1839 	err = 0;
1840 	if (nr_unkeyed) {
1841 		err = __alg_test_hash(template, nr_unkeyed, driver, type, mask);
1842 		template += nr_unkeyed;
1843 	}
1844 
1845 	if (!err && nr_keyed)
1846 		err = __alg_test_hash(template, nr_keyed, driver, type, mask);
1847 
1848 	return err;
1849 }
1850 
1851 static int alg_test_crc32c(const struct alg_test_desc *desc,
1852 			   const char *driver, u32 type, u32 mask)
1853 {
1854 	struct crypto_shash *tfm;
1855 	u32 val;
1856 	int err;
1857 
1858 	err = alg_test_hash(desc, driver, type, mask);
1859 	if (err)
1860 		goto out;
1861 
1862 	tfm = crypto_alloc_shash(driver, type, mask);
1863 	if (IS_ERR(tfm)) {
1864 		printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1865 		       "%ld\n", driver, PTR_ERR(tfm));
1866 		err = PTR_ERR(tfm);
1867 		goto out;
1868 	}
1869 
1870 	do {
1871 		SHASH_DESC_ON_STACK(shash, tfm);
1872 		u32 *ctx = (u32 *)shash_desc_ctx(shash);
1873 
1874 		shash->tfm = tfm;
1875 		shash->flags = 0;
1876 
1877 		*ctx = le32_to_cpu(420553207);
1878 		err = crypto_shash_final(shash, (u8 *)&val);
1879 		if (err) {
1880 			printk(KERN_ERR "alg: crc32c: Operation failed for "
1881 			       "%s: %d\n", driver, err);
1882 			break;
1883 		}
1884 
1885 		if (val != ~420553207) {
1886 			printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1887 			       "%d\n", driver, val);
1888 			err = -EINVAL;
1889 		}
1890 	} while (0);
1891 
1892 	crypto_free_shash(tfm);
1893 
1894 out:
1895 	return err;
1896 }
1897 
1898 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1899 			  u32 type, u32 mask)
1900 {
1901 	struct crypto_rng *rng;
1902 	int err;
1903 
1904 	rng = crypto_alloc_rng(driver, type, mask);
1905 	if (IS_ERR(rng)) {
1906 		printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1907 		       "%ld\n", driver, PTR_ERR(rng));
1908 		return PTR_ERR(rng);
1909 	}
1910 
1911 	err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1912 
1913 	crypto_free_rng(rng);
1914 
1915 	return err;
1916 }
1917 
1918 
1919 static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
1920 			  const char *driver, u32 type, u32 mask)
1921 {
1922 	int ret = -EAGAIN;
1923 	struct crypto_rng *drng;
1924 	struct drbg_test_data test_data;
1925 	struct drbg_string addtl, pers, testentropy;
1926 	unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1927 
1928 	if (!buf)
1929 		return -ENOMEM;
1930 
1931 	drng = crypto_alloc_rng(driver, type, mask);
1932 	if (IS_ERR(drng)) {
1933 		printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1934 		       "%s\n", driver);
1935 		kzfree(buf);
1936 		return -ENOMEM;
1937 	}
1938 
1939 	test_data.testentropy = &testentropy;
1940 	drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1941 	drbg_string_fill(&pers, test->pers, test->perslen);
1942 	ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1943 	if (ret) {
1944 		printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1945 		goto outbuf;
1946 	}
1947 
1948 	drbg_string_fill(&addtl, test->addtla, test->addtllen);
1949 	if (pr) {
1950 		drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1951 		ret = crypto_drbg_get_bytes_addtl_test(drng,
1952 			buf, test->expectedlen, &addtl,	&test_data);
1953 	} else {
1954 		ret = crypto_drbg_get_bytes_addtl(drng,
1955 			buf, test->expectedlen, &addtl);
1956 	}
1957 	if (ret < 0) {
1958 		printk(KERN_ERR "alg: drbg: could not obtain random data for "
1959 		       "driver %s\n", driver);
1960 		goto outbuf;
1961 	}
1962 
1963 	drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1964 	if (pr) {
1965 		drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1966 		ret = crypto_drbg_get_bytes_addtl_test(drng,
1967 			buf, test->expectedlen, &addtl, &test_data);
1968 	} else {
1969 		ret = crypto_drbg_get_bytes_addtl(drng,
1970 			buf, test->expectedlen, &addtl);
1971 	}
1972 	if (ret < 0) {
1973 		printk(KERN_ERR "alg: drbg: could not obtain random data for "
1974 		       "driver %s\n", driver);
1975 		goto outbuf;
1976 	}
1977 
1978 	ret = memcmp(test->expected, buf, test->expectedlen);
1979 
1980 outbuf:
1981 	crypto_free_rng(drng);
1982 	kzfree(buf);
1983 	return ret;
1984 }
1985 
1986 
1987 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1988 			 u32 type, u32 mask)
1989 {
1990 	int err = 0;
1991 	int pr = 0;
1992 	int i = 0;
1993 	const struct drbg_testvec *template = desc->suite.drbg.vecs;
1994 	unsigned int tcount = desc->suite.drbg.count;
1995 
1996 	if (0 == memcmp(driver, "drbg_pr_", 8))
1997 		pr = 1;
1998 
1999 	for (i = 0; i < tcount; i++) {
2000 		err = drbg_cavs_test(&template[i], pr, driver, type, mask);
2001 		if (err) {
2002 			printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
2003 			       i, driver);
2004 			err = -EINVAL;
2005 			break;
2006 		}
2007 	}
2008 	return err;
2009 
2010 }
2011 
2012 static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
2013 		       const char *alg)
2014 {
2015 	struct kpp_request *req;
2016 	void *input_buf = NULL;
2017 	void *output_buf = NULL;
2018 	void *a_public = NULL;
2019 	void *a_ss = NULL;
2020 	void *shared_secret = NULL;
2021 	struct crypto_wait wait;
2022 	unsigned int out_len_max;
2023 	int err = -ENOMEM;
2024 	struct scatterlist src, dst;
2025 
2026 	req = kpp_request_alloc(tfm, GFP_KERNEL);
2027 	if (!req)
2028 		return err;
2029 
2030 	crypto_init_wait(&wait);
2031 
2032 	err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
2033 	if (err < 0)
2034 		goto free_req;
2035 
2036 	out_len_max = crypto_kpp_maxsize(tfm);
2037 	output_buf = kzalloc(out_len_max, GFP_KERNEL);
2038 	if (!output_buf) {
2039 		err = -ENOMEM;
2040 		goto free_req;
2041 	}
2042 
2043 	/* Use appropriate parameter as base */
2044 	kpp_request_set_input(req, NULL, 0);
2045 	sg_init_one(&dst, output_buf, out_len_max);
2046 	kpp_request_set_output(req, &dst, out_len_max);
2047 	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2048 				 crypto_req_done, &wait);
2049 
2050 	/* Compute party A's public key */
2051 	err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
2052 	if (err) {
2053 		pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
2054 		       alg, err);
2055 		goto free_output;
2056 	}
2057 
2058 	if (vec->genkey) {
2059 		/* Save party A's public key */
2060 		a_public = kzalloc(out_len_max, GFP_KERNEL);
2061 		if (!a_public) {
2062 			err = -ENOMEM;
2063 			goto free_output;
2064 		}
2065 		memcpy(a_public, sg_virt(req->dst), out_len_max);
2066 	} else {
2067 		/* Verify calculated public key */
2068 		if (memcmp(vec->expected_a_public, sg_virt(req->dst),
2069 			   vec->expected_a_public_size)) {
2070 			pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
2071 			       alg);
2072 			err = -EINVAL;
2073 			goto free_output;
2074 		}
2075 	}
2076 
2077 	/* Calculate shared secret key by using counter part (b) public key. */
2078 	input_buf = kzalloc(vec->b_public_size, GFP_KERNEL);
2079 	if (!input_buf) {
2080 		err = -ENOMEM;
2081 		goto free_output;
2082 	}
2083 
2084 	memcpy(input_buf, vec->b_public, vec->b_public_size);
2085 	sg_init_one(&src, input_buf, vec->b_public_size);
2086 	sg_init_one(&dst, output_buf, out_len_max);
2087 	kpp_request_set_input(req, &src, vec->b_public_size);
2088 	kpp_request_set_output(req, &dst, out_len_max);
2089 	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2090 				 crypto_req_done, &wait);
2091 	err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
2092 	if (err) {
2093 		pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
2094 		       alg, err);
2095 		goto free_all;
2096 	}
2097 
2098 	if (vec->genkey) {
2099 		/* Save the shared secret obtained by party A */
2100 		a_ss = kzalloc(vec->expected_ss_size, GFP_KERNEL);
2101 		if (!a_ss) {
2102 			err = -ENOMEM;
2103 			goto free_all;
2104 		}
2105 		memcpy(a_ss, sg_virt(req->dst), vec->expected_ss_size);
2106 
2107 		/*
2108 		 * Calculate party B's shared secret by using party A's
2109 		 * public key.
2110 		 */
2111 		err = crypto_kpp_set_secret(tfm, vec->b_secret,
2112 					    vec->b_secret_size);
2113 		if (err < 0)
2114 			goto free_all;
2115 
2116 		sg_init_one(&src, a_public, vec->expected_a_public_size);
2117 		sg_init_one(&dst, output_buf, out_len_max);
2118 		kpp_request_set_input(req, &src, vec->expected_a_public_size);
2119 		kpp_request_set_output(req, &dst, out_len_max);
2120 		kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2121 					 crypto_req_done, &wait);
2122 		err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
2123 				      &wait);
2124 		if (err) {
2125 			pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
2126 			       alg, err);
2127 			goto free_all;
2128 		}
2129 
2130 		shared_secret = a_ss;
2131 	} else {
2132 		shared_secret = (void *)vec->expected_ss;
2133 	}
2134 
2135 	/*
2136 	 * verify shared secret from which the user will derive
2137 	 * secret key by executing whatever hash it has chosen
2138 	 */
2139 	if (memcmp(shared_secret, sg_virt(req->dst),
2140 		   vec->expected_ss_size)) {
2141 		pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2142 		       alg);
2143 		err = -EINVAL;
2144 	}
2145 
2146 free_all:
2147 	kfree(a_ss);
2148 	kfree(input_buf);
2149 free_output:
2150 	kfree(a_public);
2151 	kfree(output_buf);
2152 free_req:
2153 	kpp_request_free(req);
2154 	return err;
2155 }
2156 
2157 static int test_kpp(struct crypto_kpp *tfm, const char *alg,
2158 		    const struct kpp_testvec *vecs, unsigned int tcount)
2159 {
2160 	int ret, i;
2161 
2162 	for (i = 0; i < tcount; i++) {
2163 		ret = do_test_kpp(tfm, vecs++, alg);
2164 		if (ret) {
2165 			pr_err("alg: %s: test failed on vector %d, err=%d\n",
2166 			       alg, i + 1, ret);
2167 			return ret;
2168 		}
2169 	}
2170 	return 0;
2171 }
2172 
2173 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
2174 			u32 type, u32 mask)
2175 {
2176 	struct crypto_kpp *tfm;
2177 	int err = 0;
2178 
2179 	tfm = crypto_alloc_kpp(driver, type, mask);
2180 	if (IS_ERR(tfm)) {
2181 		pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2182 		       driver, PTR_ERR(tfm));
2183 		return PTR_ERR(tfm);
2184 	}
2185 	if (desc->suite.kpp.vecs)
2186 		err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
2187 			       desc->suite.kpp.count);
2188 
2189 	crypto_free_kpp(tfm);
2190 	return err;
2191 }
2192 
2193 static int test_akcipher_one(struct crypto_akcipher *tfm,
2194 			     const struct akcipher_testvec *vecs)
2195 {
2196 	char *xbuf[XBUFSIZE];
2197 	struct akcipher_request *req;
2198 	void *outbuf_enc = NULL;
2199 	void *outbuf_dec = NULL;
2200 	struct crypto_wait wait;
2201 	unsigned int out_len_max, out_len = 0;
2202 	int err = -ENOMEM;
2203 	struct scatterlist src, dst, src_tab[2];
2204 
2205 	if (testmgr_alloc_buf(xbuf))
2206 		return err;
2207 
2208 	req = akcipher_request_alloc(tfm, GFP_KERNEL);
2209 	if (!req)
2210 		goto free_xbuf;
2211 
2212 	crypto_init_wait(&wait);
2213 
2214 	if (vecs->public_key_vec)
2215 		err = crypto_akcipher_set_pub_key(tfm, vecs->key,
2216 						  vecs->key_len);
2217 	else
2218 		err = crypto_akcipher_set_priv_key(tfm, vecs->key,
2219 						   vecs->key_len);
2220 	if (err)
2221 		goto free_req;
2222 
2223 	err = -ENOMEM;
2224 	out_len_max = crypto_akcipher_maxsize(tfm);
2225 	outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
2226 	if (!outbuf_enc)
2227 		goto free_req;
2228 
2229 	if (WARN_ON(vecs->m_size > PAGE_SIZE))
2230 		goto free_all;
2231 
2232 	memcpy(xbuf[0], vecs->m, vecs->m_size);
2233 
2234 	sg_init_table(src_tab, 2);
2235 	sg_set_buf(&src_tab[0], xbuf[0], 8);
2236 	sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
2237 	sg_init_one(&dst, outbuf_enc, out_len_max);
2238 	akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
2239 				   out_len_max);
2240 	akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2241 				      crypto_req_done, &wait);
2242 
2243 	err = crypto_wait_req(vecs->siggen_sigver_test ?
2244 			      /* Run asymmetric signature generation */
2245 			      crypto_akcipher_sign(req) :
2246 			      /* Run asymmetric encrypt */
2247 			      crypto_akcipher_encrypt(req), &wait);
2248 	if (err) {
2249 		pr_err("alg: akcipher: encrypt test failed. err %d\n", err);
2250 		goto free_all;
2251 	}
2252 	if (req->dst_len != vecs->c_size) {
2253 		pr_err("alg: akcipher: encrypt test failed. Invalid output len\n");
2254 		err = -EINVAL;
2255 		goto free_all;
2256 	}
2257 	/* verify that encrypted message is equal to expected */
2258 	if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
2259 		pr_err("alg: akcipher: encrypt test failed. Invalid output\n");
2260 		hexdump(outbuf_enc, vecs->c_size);
2261 		err = -EINVAL;
2262 		goto free_all;
2263 	}
2264 	/* Don't invoke decrypt for vectors with public key */
2265 	if (vecs->public_key_vec) {
2266 		err = 0;
2267 		goto free_all;
2268 	}
2269 	outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
2270 	if (!outbuf_dec) {
2271 		err = -ENOMEM;
2272 		goto free_all;
2273 	}
2274 
2275 	if (WARN_ON(vecs->c_size > PAGE_SIZE))
2276 		goto free_all;
2277 
2278 	memcpy(xbuf[0], vecs->c, vecs->c_size);
2279 
2280 	sg_init_one(&src, xbuf[0], vecs->c_size);
2281 	sg_init_one(&dst, outbuf_dec, out_len_max);
2282 	crypto_init_wait(&wait);
2283 	akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
2284 
2285 	err = crypto_wait_req(vecs->siggen_sigver_test ?
2286 			      /* Run asymmetric signature verification */
2287 			      crypto_akcipher_verify(req) :
2288 			      /* Run asymmetric decrypt */
2289 			      crypto_akcipher_decrypt(req), &wait);
2290 	if (err) {
2291 		pr_err("alg: akcipher: decrypt test failed. err %d\n", err);
2292 		goto free_all;
2293 	}
2294 	out_len = req->dst_len;
2295 	if (out_len < vecs->m_size) {
2296 		pr_err("alg: akcipher: decrypt test failed. "
2297 		       "Invalid output len %u\n", out_len);
2298 		err = -EINVAL;
2299 		goto free_all;
2300 	}
2301 	/* verify that decrypted message is equal to the original msg */
2302 	if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) ||
2303 	    memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size,
2304 		   vecs->m_size)) {
2305 		pr_err("alg: akcipher: decrypt test failed. Invalid output\n");
2306 		hexdump(outbuf_dec, out_len);
2307 		err = -EINVAL;
2308 	}
2309 free_all:
2310 	kfree(outbuf_dec);
2311 	kfree(outbuf_enc);
2312 free_req:
2313 	akcipher_request_free(req);
2314 free_xbuf:
2315 	testmgr_free_buf(xbuf);
2316 	return err;
2317 }
2318 
2319 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2320 			 const struct akcipher_testvec *vecs,
2321 			 unsigned int tcount)
2322 {
2323 	const char *algo =
2324 		crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
2325 	int ret, i;
2326 
2327 	for (i = 0; i < tcount; i++) {
2328 		ret = test_akcipher_one(tfm, vecs++);
2329 		if (!ret)
2330 			continue;
2331 
2332 		pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2333 		       i + 1, algo, ret);
2334 		return ret;
2335 	}
2336 	return 0;
2337 }
2338 
2339 static int alg_test_akcipher(const struct alg_test_desc *desc,
2340 			     const char *driver, u32 type, u32 mask)
2341 {
2342 	struct crypto_akcipher *tfm;
2343 	int err = 0;
2344 
2345 	tfm = crypto_alloc_akcipher(driver, type, mask);
2346 	if (IS_ERR(tfm)) {
2347 		pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2348 		       driver, PTR_ERR(tfm));
2349 		return PTR_ERR(tfm);
2350 	}
2351 	if (desc->suite.akcipher.vecs)
2352 		err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2353 				    desc->suite.akcipher.count);
2354 
2355 	crypto_free_akcipher(tfm);
2356 	return err;
2357 }
2358 
2359 static int alg_test_null(const struct alg_test_desc *desc,
2360 			     const char *driver, u32 type, u32 mask)
2361 {
2362 	return 0;
2363 }
2364 
2365 #define __VECS(tv)	{ .vecs = tv, .count = ARRAY_SIZE(tv) }
2366 
2367 /* Please keep this list sorted by algorithm name. */
2368 static const struct alg_test_desc alg_test_descs[] = {
2369 	{
2370 		.alg = "aegis128",
2371 		.test = alg_test_aead,
2372 		.suite = {
2373 			.aead = {
2374 				.enc = __VECS(aegis128_enc_tv_template),
2375 				.dec = __VECS(aegis128_dec_tv_template),
2376 			}
2377 		}
2378 	}, {
2379 		.alg = "aegis128l",
2380 		.test = alg_test_aead,
2381 		.suite = {
2382 			.aead = {
2383 				.enc = __VECS(aegis128l_enc_tv_template),
2384 				.dec = __VECS(aegis128l_dec_tv_template),
2385 			}
2386 		}
2387 	}, {
2388 		.alg = "aegis256",
2389 		.test = alg_test_aead,
2390 		.suite = {
2391 			.aead = {
2392 				.enc = __VECS(aegis256_enc_tv_template),
2393 				.dec = __VECS(aegis256_dec_tv_template),
2394 			}
2395 		}
2396 	}, {
2397 		.alg = "ansi_cprng",
2398 		.test = alg_test_cprng,
2399 		.suite = {
2400 			.cprng = __VECS(ansi_cprng_aes_tv_template)
2401 		}
2402 	}, {
2403 		.alg = "authenc(hmac(md5),ecb(cipher_null))",
2404 		.test = alg_test_aead,
2405 		.suite = {
2406 			.aead = {
2407 				.enc = __VECS(hmac_md5_ecb_cipher_null_enc_tv_template),
2408 				.dec = __VECS(hmac_md5_ecb_cipher_null_dec_tv_template)
2409 			}
2410 		}
2411 	}, {
2412 		.alg = "authenc(hmac(sha1),cbc(aes))",
2413 		.test = alg_test_aead,
2414 		.fips_allowed = 1,
2415 		.suite = {
2416 			.aead = {
2417 				.enc = __VECS(hmac_sha1_aes_cbc_enc_tv_temp)
2418 			}
2419 		}
2420 	}, {
2421 		.alg = "authenc(hmac(sha1),cbc(des))",
2422 		.test = alg_test_aead,
2423 		.suite = {
2424 			.aead = {
2425 				.enc = __VECS(hmac_sha1_des_cbc_enc_tv_temp)
2426 			}
2427 		}
2428 	}, {
2429 		.alg = "authenc(hmac(sha1),cbc(des3_ede))",
2430 		.test = alg_test_aead,
2431 		.fips_allowed = 1,
2432 		.suite = {
2433 			.aead = {
2434 				.enc = __VECS(hmac_sha1_des3_ede_cbc_enc_tv_temp)
2435 			}
2436 		}
2437 	}, {
2438 		.alg = "authenc(hmac(sha1),ctr(aes))",
2439 		.test = alg_test_null,
2440 		.fips_allowed = 1,
2441 	}, {
2442 		.alg = "authenc(hmac(sha1),ecb(cipher_null))",
2443 		.test = alg_test_aead,
2444 		.suite = {
2445 			.aead = {
2446 				.enc = __VECS(hmac_sha1_ecb_cipher_null_enc_tv_temp),
2447 				.dec = __VECS(hmac_sha1_ecb_cipher_null_dec_tv_temp)
2448 			}
2449 		}
2450 	}, {
2451 		.alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2452 		.test = alg_test_null,
2453 		.fips_allowed = 1,
2454 	}, {
2455 		.alg = "authenc(hmac(sha224),cbc(des))",
2456 		.test = alg_test_aead,
2457 		.suite = {
2458 			.aead = {
2459 				.enc = __VECS(hmac_sha224_des_cbc_enc_tv_temp)
2460 			}
2461 		}
2462 	}, {
2463 		.alg = "authenc(hmac(sha224),cbc(des3_ede))",
2464 		.test = alg_test_aead,
2465 		.fips_allowed = 1,
2466 		.suite = {
2467 			.aead = {
2468 				.enc = __VECS(hmac_sha224_des3_ede_cbc_enc_tv_temp)
2469 			}
2470 		}
2471 	}, {
2472 		.alg = "authenc(hmac(sha256),cbc(aes))",
2473 		.test = alg_test_aead,
2474 		.fips_allowed = 1,
2475 		.suite = {
2476 			.aead = {
2477 				.enc = __VECS(hmac_sha256_aes_cbc_enc_tv_temp)
2478 			}
2479 		}
2480 	}, {
2481 		.alg = "authenc(hmac(sha256),cbc(des))",
2482 		.test = alg_test_aead,
2483 		.suite = {
2484 			.aead = {
2485 				.enc = __VECS(hmac_sha256_des_cbc_enc_tv_temp)
2486 			}
2487 		}
2488 	}, {
2489 		.alg = "authenc(hmac(sha256),cbc(des3_ede))",
2490 		.test = alg_test_aead,
2491 		.fips_allowed = 1,
2492 		.suite = {
2493 			.aead = {
2494 				.enc = __VECS(hmac_sha256_des3_ede_cbc_enc_tv_temp)
2495 			}
2496 		}
2497 	}, {
2498 		.alg = "authenc(hmac(sha256),ctr(aes))",
2499 		.test = alg_test_null,
2500 		.fips_allowed = 1,
2501 	}, {
2502 		.alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2503 		.test = alg_test_null,
2504 		.fips_allowed = 1,
2505 	}, {
2506 		.alg = "authenc(hmac(sha384),cbc(des))",
2507 		.test = alg_test_aead,
2508 		.suite = {
2509 			.aead = {
2510 				.enc = __VECS(hmac_sha384_des_cbc_enc_tv_temp)
2511 			}
2512 		}
2513 	}, {
2514 		.alg = "authenc(hmac(sha384),cbc(des3_ede))",
2515 		.test = alg_test_aead,
2516 		.fips_allowed = 1,
2517 		.suite = {
2518 			.aead = {
2519 				.enc = __VECS(hmac_sha384_des3_ede_cbc_enc_tv_temp)
2520 			}
2521 		}
2522 	}, {
2523 		.alg = "authenc(hmac(sha384),ctr(aes))",
2524 		.test = alg_test_null,
2525 		.fips_allowed = 1,
2526 	}, {
2527 		.alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2528 		.test = alg_test_null,
2529 		.fips_allowed = 1,
2530 	}, {
2531 		.alg = "authenc(hmac(sha512),cbc(aes))",
2532 		.fips_allowed = 1,
2533 		.test = alg_test_aead,
2534 		.suite = {
2535 			.aead = {
2536 				.enc = __VECS(hmac_sha512_aes_cbc_enc_tv_temp)
2537 			}
2538 		}
2539 	}, {
2540 		.alg = "authenc(hmac(sha512),cbc(des))",
2541 		.test = alg_test_aead,
2542 		.suite = {
2543 			.aead = {
2544 				.enc = __VECS(hmac_sha512_des_cbc_enc_tv_temp)
2545 			}
2546 		}
2547 	}, {
2548 		.alg = "authenc(hmac(sha512),cbc(des3_ede))",
2549 		.test = alg_test_aead,
2550 		.fips_allowed = 1,
2551 		.suite = {
2552 			.aead = {
2553 				.enc = __VECS(hmac_sha512_des3_ede_cbc_enc_tv_temp)
2554 			}
2555 		}
2556 	}, {
2557 		.alg = "authenc(hmac(sha512),ctr(aes))",
2558 		.test = alg_test_null,
2559 		.fips_allowed = 1,
2560 	}, {
2561 		.alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2562 		.test = alg_test_null,
2563 		.fips_allowed = 1,
2564 	}, {
2565 		.alg = "cbc(aes)",
2566 		.test = alg_test_skcipher,
2567 		.fips_allowed = 1,
2568 		.suite = {
2569 			.cipher = __VECS(aes_cbc_tv_template)
2570 		},
2571 	}, {
2572 		.alg = "cbc(anubis)",
2573 		.test = alg_test_skcipher,
2574 		.suite = {
2575 			.cipher = __VECS(anubis_cbc_tv_template)
2576 		},
2577 	}, {
2578 		.alg = "cbc(blowfish)",
2579 		.test = alg_test_skcipher,
2580 		.suite = {
2581 			.cipher = __VECS(bf_cbc_tv_template)
2582 		},
2583 	}, {
2584 		.alg = "cbc(camellia)",
2585 		.test = alg_test_skcipher,
2586 		.suite = {
2587 			.cipher = __VECS(camellia_cbc_tv_template)
2588 		},
2589 	}, {
2590 		.alg = "cbc(cast5)",
2591 		.test = alg_test_skcipher,
2592 		.suite = {
2593 			.cipher = __VECS(cast5_cbc_tv_template)
2594 		},
2595 	}, {
2596 		.alg = "cbc(cast6)",
2597 		.test = alg_test_skcipher,
2598 		.suite = {
2599 			.cipher = __VECS(cast6_cbc_tv_template)
2600 		},
2601 	}, {
2602 		.alg = "cbc(des)",
2603 		.test = alg_test_skcipher,
2604 		.suite = {
2605 			.cipher = __VECS(des_cbc_tv_template)
2606 		},
2607 	}, {
2608 		.alg = "cbc(des3_ede)",
2609 		.test = alg_test_skcipher,
2610 		.fips_allowed = 1,
2611 		.suite = {
2612 			.cipher = __VECS(des3_ede_cbc_tv_template)
2613 		},
2614 	}, {
2615 		/* Same as cbc(aes) except the key is stored in
2616 		 * hardware secure memory which we reference by index
2617 		 */
2618 		.alg = "cbc(paes)",
2619 		.test = alg_test_null,
2620 		.fips_allowed = 1,
2621 	}, {
2622 		.alg = "cbc(serpent)",
2623 		.test = alg_test_skcipher,
2624 		.suite = {
2625 			.cipher = __VECS(serpent_cbc_tv_template)
2626 		},
2627 	}, {
2628 		.alg = "cbc(twofish)",
2629 		.test = alg_test_skcipher,
2630 		.suite = {
2631 			.cipher = __VECS(tf_cbc_tv_template)
2632 		},
2633 	}, {
2634 		.alg = "cbcmac(aes)",
2635 		.fips_allowed = 1,
2636 		.test = alg_test_hash,
2637 		.suite = {
2638 			.hash = __VECS(aes_cbcmac_tv_template)
2639 		}
2640 	}, {
2641 		.alg = "ccm(aes)",
2642 		.test = alg_test_aead,
2643 		.fips_allowed = 1,
2644 		.suite = {
2645 			.aead = {
2646 				.enc = __VECS(aes_ccm_enc_tv_template),
2647 				.dec = __VECS(aes_ccm_dec_tv_template)
2648 			}
2649 		}
2650 	}, {
2651 		.alg = "chacha20",
2652 		.test = alg_test_skcipher,
2653 		.suite = {
2654 			.cipher = __VECS(chacha20_tv_template)
2655 		},
2656 	}, {
2657 		.alg = "cmac(aes)",
2658 		.fips_allowed = 1,
2659 		.test = alg_test_hash,
2660 		.suite = {
2661 			.hash = __VECS(aes_cmac128_tv_template)
2662 		}
2663 	}, {
2664 		.alg = "cmac(des3_ede)",
2665 		.fips_allowed = 1,
2666 		.test = alg_test_hash,
2667 		.suite = {
2668 			.hash = __VECS(des3_ede_cmac64_tv_template)
2669 		}
2670 	}, {
2671 		.alg = "compress_null",
2672 		.test = alg_test_null,
2673 	}, {
2674 		.alg = "crc32",
2675 		.test = alg_test_hash,
2676 		.suite = {
2677 			.hash = __VECS(crc32_tv_template)
2678 		}
2679 	}, {
2680 		.alg = "crc32c",
2681 		.test = alg_test_crc32c,
2682 		.fips_allowed = 1,
2683 		.suite = {
2684 			.hash = __VECS(crc32c_tv_template)
2685 		}
2686 	}, {
2687 		.alg = "crct10dif",
2688 		.test = alg_test_hash,
2689 		.fips_allowed = 1,
2690 		.suite = {
2691 			.hash = __VECS(crct10dif_tv_template)
2692 		}
2693 	}, {
2694 		.alg = "ctr(aes)",
2695 		.test = alg_test_skcipher,
2696 		.fips_allowed = 1,
2697 		.suite = {
2698 			.cipher = __VECS(aes_ctr_tv_template)
2699 		}
2700 	}, {
2701 		.alg = "ctr(blowfish)",
2702 		.test = alg_test_skcipher,
2703 		.suite = {
2704 			.cipher = __VECS(bf_ctr_tv_template)
2705 		}
2706 	}, {
2707 		.alg = "ctr(camellia)",
2708 		.test = alg_test_skcipher,
2709 		.suite = {
2710 			.cipher = __VECS(camellia_ctr_tv_template)
2711 		}
2712 	}, {
2713 		.alg = "ctr(cast5)",
2714 		.test = alg_test_skcipher,
2715 		.suite = {
2716 			.cipher = __VECS(cast5_ctr_tv_template)
2717 		}
2718 	}, {
2719 		.alg = "ctr(cast6)",
2720 		.test = alg_test_skcipher,
2721 		.suite = {
2722 			.cipher = __VECS(cast6_ctr_tv_template)
2723 		}
2724 	}, {
2725 		.alg = "ctr(des)",
2726 		.test = alg_test_skcipher,
2727 		.suite = {
2728 			.cipher = __VECS(des_ctr_tv_template)
2729 		}
2730 	}, {
2731 		.alg = "ctr(des3_ede)",
2732 		.test = alg_test_skcipher,
2733 		.fips_allowed = 1,
2734 		.suite = {
2735 			.cipher = __VECS(des3_ede_ctr_tv_template)
2736 		}
2737 	}, {
2738 		/* Same as ctr(aes) except the key is stored in
2739 		 * hardware secure memory which we reference by index
2740 		 */
2741 		.alg = "ctr(paes)",
2742 		.test = alg_test_null,
2743 		.fips_allowed = 1,
2744 	}, {
2745 		.alg = "ctr(serpent)",
2746 		.test = alg_test_skcipher,
2747 		.suite = {
2748 			.cipher = __VECS(serpent_ctr_tv_template)
2749 		}
2750 	}, {
2751 		.alg = "ctr(twofish)",
2752 		.test = alg_test_skcipher,
2753 		.suite = {
2754 			.cipher = __VECS(tf_ctr_tv_template)
2755 		}
2756 	}, {
2757 		.alg = "cts(cbc(aes))",
2758 		.test = alg_test_skcipher,
2759 		.suite = {
2760 			.cipher = __VECS(cts_mode_tv_template)
2761 		}
2762 	}, {
2763 		.alg = "deflate",
2764 		.test = alg_test_comp,
2765 		.fips_allowed = 1,
2766 		.suite = {
2767 			.comp = {
2768 				.comp = __VECS(deflate_comp_tv_template),
2769 				.decomp = __VECS(deflate_decomp_tv_template)
2770 			}
2771 		}
2772 	}, {
2773 		.alg = "dh",
2774 		.test = alg_test_kpp,
2775 		.fips_allowed = 1,
2776 		.suite = {
2777 			.kpp = __VECS(dh_tv_template)
2778 		}
2779 	}, {
2780 		.alg = "digest_null",
2781 		.test = alg_test_null,
2782 	}, {
2783 		.alg = "drbg_nopr_ctr_aes128",
2784 		.test = alg_test_drbg,
2785 		.fips_allowed = 1,
2786 		.suite = {
2787 			.drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
2788 		}
2789 	}, {
2790 		.alg = "drbg_nopr_ctr_aes192",
2791 		.test = alg_test_drbg,
2792 		.fips_allowed = 1,
2793 		.suite = {
2794 			.drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
2795 		}
2796 	}, {
2797 		.alg = "drbg_nopr_ctr_aes256",
2798 		.test = alg_test_drbg,
2799 		.fips_allowed = 1,
2800 		.suite = {
2801 			.drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
2802 		}
2803 	}, {
2804 		/*
2805 		 * There is no need to specifically test the DRBG with every
2806 		 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2807 		 */
2808 		.alg = "drbg_nopr_hmac_sha1",
2809 		.fips_allowed = 1,
2810 		.test = alg_test_null,
2811 	}, {
2812 		.alg = "drbg_nopr_hmac_sha256",
2813 		.test = alg_test_drbg,
2814 		.fips_allowed = 1,
2815 		.suite = {
2816 			.drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
2817 		}
2818 	}, {
2819 		/* covered by drbg_nopr_hmac_sha256 test */
2820 		.alg = "drbg_nopr_hmac_sha384",
2821 		.fips_allowed = 1,
2822 		.test = alg_test_null,
2823 	}, {
2824 		.alg = "drbg_nopr_hmac_sha512",
2825 		.test = alg_test_null,
2826 		.fips_allowed = 1,
2827 	}, {
2828 		.alg = "drbg_nopr_sha1",
2829 		.fips_allowed = 1,
2830 		.test = alg_test_null,
2831 	}, {
2832 		.alg = "drbg_nopr_sha256",
2833 		.test = alg_test_drbg,
2834 		.fips_allowed = 1,
2835 		.suite = {
2836 			.drbg = __VECS(drbg_nopr_sha256_tv_template)
2837 		}
2838 	}, {
2839 		/* covered by drbg_nopr_sha256 test */
2840 		.alg = "drbg_nopr_sha384",
2841 		.fips_allowed = 1,
2842 		.test = alg_test_null,
2843 	}, {
2844 		.alg = "drbg_nopr_sha512",
2845 		.fips_allowed = 1,
2846 		.test = alg_test_null,
2847 	}, {
2848 		.alg = "drbg_pr_ctr_aes128",
2849 		.test = alg_test_drbg,
2850 		.fips_allowed = 1,
2851 		.suite = {
2852 			.drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
2853 		}
2854 	}, {
2855 		/* covered by drbg_pr_ctr_aes128 test */
2856 		.alg = "drbg_pr_ctr_aes192",
2857 		.fips_allowed = 1,
2858 		.test = alg_test_null,
2859 	}, {
2860 		.alg = "drbg_pr_ctr_aes256",
2861 		.fips_allowed = 1,
2862 		.test = alg_test_null,
2863 	}, {
2864 		.alg = "drbg_pr_hmac_sha1",
2865 		.fips_allowed = 1,
2866 		.test = alg_test_null,
2867 	}, {
2868 		.alg = "drbg_pr_hmac_sha256",
2869 		.test = alg_test_drbg,
2870 		.fips_allowed = 1,
2871 		.suite = {
2872 			.drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
2873 		}
2874 	}, {
2875 		/* covered by drbg_pr_hmac_sha256 test */
2876 		.alg = "drbg_pr_hmac_sha384",
2877 		.fips_allowed = 1,
2878 		.test = alg_test_null,
2879 	}, {
2880 		.alg = "drbg_pr_hmac_sha512",
2881 		.test = alg_test_null,
2882 		.fips_allowed = 1,
2883 	}, {
2884 		.alg = "drbg_pr_sha1",
2885 		.fips_allowed = 1,
2886 		.test = alg_test_null,
2887 	}, {
2888 		.alg = "drbg_pr_sha256",
2889 		.test = alg_test_drbg,
2890 		.fips_allowed = 1,
2891 		.suite = {
2892 			.drbg = __VECS(drbg_pr_sha256_tv_template)
2893 		}
2894 	}, {
2895 		/* covered by drbg_pr_sha256 test */
2896 		.alg = "drbg_pr_sha384",
2897 		.fips_allowed = 1,
2898 		.test = alg_test_null,
2899 	}, {
2900 		.alg = "drbg_pr_sha512",
2901 		.fips_allowed = 1,
2902 		.test = alg_test_null,
2903 	}, {
2904 		.alg = "ecb(aes)",
2905 		.test = alg_test_skcipher,
2906 		.fips_allowed = 1,
2907 		.suite = {
2908 			.cipher = __VECS(aes_tv_template)
2909 		}
2910 	}, {
2911 		.alg = "ecb(anubis)",
2912 		.test = alg_test_skcipher,
2913 		.suite = {
2914 			.cipher = __VECS(anubis_tv_template)
2915 		}
2916 	}, {
2917 		.alg = "ecb(arc4)",
2918 		.test = alg_test_skcipher,
2919 		.suite = {
2920 			.cipher = __VECS(arc4_tv_template)
2921 		}
2922 	}, {
2923 		.alg = "ecb(blowfish)",
2924 		.test = alg_test_skcipher,
2925 		.suite = {
2926 			.cipher = __VECS(bf_tv_template)
2927 		}
2928 	}, {
2929 		.alg = "ecb(camellia)",
2930 		.test = alg_test_skcipher,
2931 		.suite = {
2932 			.cipher = __VECS(camellia_tv_template)
2933 		}
2934 	}, {
2935 		.alg = "ecb(cast5)",
2936 		.test = alg_test_skcipher,
2937 		.suite = {
2938 			.cipher = __VECS(cast5_tv_template)
2939 		}
2940 	}, {
2941 		.alg = "ecb(cast6)",
2942 		.test = alg_test_skcipher,
2943 		.suite = {
2944 			.cipher = __VECS(cast6_tv_template)
2945 		}
2946 	}, {
2947 		.alg = "ecb(cipher_null)",
2948 		.test = alg_test_null,
2949 		.fips_allowed = 1,
2950 	}, {
2951 		.alg = "ecb(des)",
2952 		.test = alg_test_skcipher,
2953 		.suite = {
2954 			.cipher = __VECS(des_tv_template)
2955 		}
2956 	}, {
2957 		.alg = "ecb(des3_ede)",
2958 		.test = alg_test_skcipher,
2959 		.fips_allowed = 1,
2960 		.suite = {
2961 			.cipher = __VECS(des3_ede_tv_template)
2962 		}
2963 	}, {
2964 		.alg = "ecb(fcrypt)",
2965 		.test = alg_test_skcipher,
2966 		.suite = {
2967 			.cipher = {
2968 				.vecs = fcrypt_pcbc_tv_template,
2969 				.count = 1
2970 			}
2971 		}
2972 	}, {
2973 		.alg = "ecb(khazad)",
2974 		.test = alg_test_skcipher,
2975 		.suite = {
2976 			.cipher = __VECS(khazad_tv_template)
2977 		}
2978 	}, {
2979 		/* Same as ecb(aes) except the key is stored in
2980 		 * hardware secure memory which we reference by index
2981 		 */
2982 		.alg = "ecb(paes)",
2983 		.test = alg_test_null,
2984 		.fips_allowed = 1,
2985 	}, {
2986 		.alg = "ecb(seed)",
2987 		.test = alg_test_skcipher,
2988 		.suite = {
2989 			.cipher = __VECS(seed_tv_template)
2990 		}
2991 	}, {
2992 		.alg = "ecb(serpent)",
2993 		.test = alg_test_skcipher,
2994 		.suite = {
2995 			.cipher = __VECS(serpent_tv_template)
2996 		}
2997 	}, {
2998 		.alg = "ecb(sm4)",
2999 		.test = alg_test_skcipher,
3000 		.suite = {
3001 			.cipher = __VECS(sm4_tv_template)
3002 		}
3003 	}, {
3004 		.alg = "ecb(speck128)",
3005 		.test = alg_test_skcipher,
3006 		.suite = {
3007 			.cipher = __VECS(speck128_tv_template)
3008 		}
3009 	}, {
3010 		.alg = "ecb(speck64)",
3011 		.test = alg_test_skcipher,
3012 		.suite = {
3013 			.cipher = __VECS(speck64_tv_template)
3014 		}
3015 	}, {
3016 		.alg = "ecb(tea)",
3017 		.test = alg_test_skcipher,
3018 		.suite = {
3019 			.cipher = __VECS(tea_tv_template)
3020 		}
3021 	}, {
3022 		.alg = "ecb(tnepres)",
3023 		.test = alg_test_skcipher,
3024 		.suite = {
3025 			.cipher = __VECS(tnepres_tv_template)
3026 		}
3027 	}, {
3028 		.alg = "ecb(twofish)",
3029 		.test = alg_test_skcipher,
3030 		.suite = {
3031 			.cipher = __VECS(tf_tv_template)
3032 		}
3033 	}, {
3034 		.alg = "ecb(xeta)",
3035 		.test = alg_test_skcipher,
3036 		.suite = {
3037 			.cipher = __VECS(xeta_tv_template)
3038 		}
3039 	}, {
3040 		.alg = "ecb(xtea)",
3041 		.test = alg_test_skcipher,
3042 		.suite = {
3043 			.cipher = __VECS(xtea_tv_template)
3044 		}
3045 	}, {
3046 		.alg = "ecdh",
3047 		.test = alg_test_kpp,
3048 		.fips_allowed = 1,
3049 		.suite = {
3050 			.kpp = __VECS(ecdh_tv_template)
3051 		}
3052 	}, {
3053 		.alg = "gcm(aes)",
3054 		.test = alg_test_aead,
3055 		.fips_allowed = 1,
3056 		.suite = {
3057 			.aead = {
3058 				.enc = __VECS(aes_gcm_enc_tv_template),
3059 				.dec = __VECS(aes_gcm_dec_tv_template)
3060 			}
3061 		}
3062 	}, {
3063 		.alg = "ghash",
3064 		.test = alg_test_hash,
3065 		.fips_allowed = 1,
3066 		.suite = {
3067 			.hash = __VECS(ghash_tv_template)
3068 		}
3069 	}, {
3070 		.alg = "hmac(md5)",
3071 		.test = alg_test_hash,
3072 		.suite = {
3073 			.hash = __VECS(hmac_md5_tv_template)
3074 		}
3075 	}, {
3076 		.alg = "hmac(rmd128)",
3077 		.test = alg_test_hash,
3078 		.suite = {
3079 			.hash = __VECS(hmac_rmd128_tv_template)
3080 		}
3081 	}, {
3082 		.alg = "hmac(rmd160)",
3083 		.test = alg_test_hash,
3084 		.suite = {
3085 			.hash = __VECS(hmac_rmd160_tv_template)
3086 		}
3087 	}, {
3088 		.alg = "hmac(sha1)",
3089 		.test = alg_test_hash,
3090 		.fips_allowed = 1,
3091 		.suite = {
3092 			.hash = __VECS(hmac_sha1_tv_template)
3093 		}
3094 	}, {
3095 		.alg = "hmac(sha224)",
3096 		.test = alg_test_hash,
3097 		.fips_allowed = 1,
3098 		.suite = {
3099 			.hash = __VECS(hmac_sha224_tv_template)
3100 		}
3101 	}, {
3102 		.alg = "hmac(sha256)",
3103 		.test = alg_test_hash,
3104 		.fips_allowed = 1,
3105 		.suite = {
3106 			.hash = __VECS(hmac_sha256_tv_template)
3107 		}
3108 	}, {
3109 		.alg = "hmac(sha3-224)",
3110 		.test = alg_test_hash,
3111 		.fips_allowed = 1,
3112 		.suite = {
3113 			.hash = __VECS(hmac_sha3_224_tv_template)
3114 		}
3115 	}, {
3116 		.alg = "hmac(sha3-256)",
3117 		.test = alg_test_hash,
3118 		.fips_allowed = 1,
3119 		.suite = {
3120 			.hash = __VECS(hmac_sha3_256_tv_template)
3121 		}
3122 	}, {
3123 		.alg = "hmac(sha3-384)",
3124 		.test = alg_test_hash,
3125 		.fips_allowed = 1,
3126 		.suite = {
3127 			.hash = __VECS(hmac_sha3_384_tv_template)
3128 		}
3129 	}, {
3130 		.alg = "hmac(sha3-512)",
3131 		.test = alg_test_hash,
3132 		.fips_allowed = 1,
3133 		.suite = {
3134 			.hash = __VECS(hmac_sha3_512_tv_template)
3135 		}
3136 	}, {
3137 		.alg = "hmac(sha384)",
3138 		.test = alg_test_hash,
3139 		.fips_allowed = 1,
3140 		.suite = {
3141 			.hash = __VECS(hmac_sha384_tv_template)
3142 		}
3143 	}, {
3144 		.alg = "hmac(sha512)",
3145 		.test = alg_test_hash,
3146 		.fips_allowed = 1,
3147 		.suite = {
3148 			.hash = __VECS(hmac_sha512_tv_template)
3149 		}
3150 	}, {
3151 		.alg = "jitterentropy_rng",
3152 		.fips_allowed = 1,
3153 		.test = alg_test_null,
3154 	}, {
3155 		.alg = "kw(aes)",
3156 		.test = alg_test_skcipher,
3157 		.fips_allowed = 1,
3158 		.suite = {
3159 			.cipher = __VECS(aes_kw_tv_template)
3160 		}
3161 	}, {
3162 		.alg = "lrw(aes)",
3163 		.test = alg_test_skcipher,
3164 		.suite = {
3165 			.cipher = __VECS(aes_lrw_tv_template)
3166 		}
3167 	}, {
3168 		.alg = "lrw(camellia)",
3169 		.test = alg_test_skcipher,
3170 		.suite = {
3171 			.cipher = __VECS(camellia_lrw_tv_template)
3172 		}
3173 	}, {
3174 		.alg = "lrw(cast6)",
3175 		.test = alg_test_skcipher,
3176 		.suite = {
3177 			.cipher = __VECS(cast6_lrw_tv_template)
3178 		}
3179 	}, {
3180 		.alg = "lrw(serpent)",
3181 		.test = alg_test_skcipher,
3182 		.suite = {
3183 			.cipher = __VECS(serpent_lrw_tv_template)
3184 		}
3185 	}, {
3186 		.alg = "lrw(twofish)",
3187 		.test = alg_test_skcipher,
3188 		.suite = {
3189 			.cipher = __VECS(tf_lrw_tv_template)
3190 		}
3191 	}, {
3192 		.alg = "lz4",
3193 		.test = alg_test_comp,
3194 		.fips_allowed = 1,
3195 		.suite = {
3196 			.comp = {
3197 				.comp = __VECS(lz4_comp_tv_template),
3198 				.decomp = __VECS(lz4_decomp_tv_template)
3199 			}
3200 		}
3201 	}, {
3202 		.alg = "lz4hc",
3203 		.test = alg_test_comp,
3204 		.fips_allowed = 1,
3205 		.suite = {
3206 			.comp = {
3207 				.comp = __VECS(lz4hc_comp_tv_template),
3208 				.decomp = __VECS(lz4hc_decomp_tv_template)
3209 			}
3210 		}
3211 	}, {
3212 		.alg = "lzo",
3213 		.test = alg_test_comp,
3214 		.fips_allowed = 1,
3215 		.suite = {
3216 			.comp = {
3217 				.comp = __VECS(lzo_comp_tv_template),
3218 				.decomp = __VECS(lzo_decomp_tv_template)
3219 			}
3220 		}
3221 	}, {
3222 		.alg = "md4",
3223 		.test = alg_test_hash,
3224 		.suite = {
3225 			.hash = __VECS(md4_tv_template)
3226 		}
3227 	}, {
3228 		.alg = "md5",
3229 		.test = alg_test_hash,
3230 		.suite = {
3231 			.hash = __VECS(md5_tv_template)
3232 		}
3233 	}, {
3234 		.alg = "michael_mic",
3235 		.test = alg_test_hash,
3236 		.suite = {
3237 			.hash = __VECS(michael_mic_tv_template)
3238 		}
3239 	}, {
3240 		.alg = "morus1280",
3241 		.test = alg_test_aead,
3242 		.suite = {
3243 			.aead = {
3244 				.enc = __VECS(morus1280_enc_tv_template),
3245 				.dec = __VECS(morus1280_dec_tv_template),
3246 			}
3247 		}
3248 	}, {
3249 		.alg = "morus640",
3250 		.test = alg_test_aead,
3251 		.suite = {
3252 			.aead = {
3253 				.enc = __VECS(morus640_enc_tv_template),
3254 				.dec = __VECS(morus640_dec_tv_template),
3255 			}
3256 		}
3257 	}, {
3258 		.alg = "ofb(aes)",
3259 		.test = alg_test_skcipher,
3260 		.fips_allowed = 1,
3261 		.suite = {
3262 			.cipher = __VECS(aes_ofb_tv_template)
3263 		}
3264 	}, {
3265 		/* Same as ofb(aes) except the key is stored in
3266 		 * hardware secure memory which we reference by index
3267 		 */
3268 		.alg = "ofb(paes)",
3269 		.test = alg_test_null,
3270 		.fips_allowed = 1,
3271 	}, {
3272 		.alg = "pcbc(fcrypt)",
3273 		.test = alg_test_skcipher,
3274 		.suite = {
3275 			.cipher = __VECS(fcrypt_pcbc_tv_template)
3276 		}
3277 	}, {
3278 		.alg = "pkcs1pad(rsa,sha224)",
3279 		.test = alg_test_null,
3280 		.fips_allowed = 1,
3281 	}, {
3282 		.alg = "pkcs1pad(rsa,sha256)",
3283 		.test = alg_test_akcipher,
3284 		.fips_allowed = 1,
3285 		.suite = {
3286 			.akcipher = __VECS(pkcs1pad_rsa_tv_template)
3287 		}
3288 	}, {
3289 		.alg = "pkcs1pad(rsa,sha384)",
3290 		.test = alg_test_null,
3291 		.fips_allowed = 1,
3292 	}, {
3293 		.alg = "pkcs1pad(rsa,sha512)",
3294 		.test = alg_test_null,
3295 		.fips_allowed = 1,
3296 	}, {
3297 		.alg = "poly1305",
3298 		.test = alg_test_hash,
3299 		.suite = {
3300 			.hash = __VECS(poly1305_tv_template)
3301 		}
3302 	}, {
3303 		.alg = "rfc3686(ctr(aes))",
3304 		.test = alg_test_skcipher,
3305 		.fips_allowed = 1,
3306 		.suite = {
3307 			.cipher = __VECS(aes_ctr_rfc3686_tv_template)
3308 		}
3309 	}, {
3310 		.alg = "rfc4106(gcm(aes))",
3311 		.test = alg_test_aead,
3312 		.fips_allowed = 1,
3313 		.suite = {
3314 			.aead = {
3315 				.enc = __VECS(aes_gcm_rfc4106_enc_tv_template),
3316 				.dec = __VECS(aes_gcm_rfc4106_dec_tv_template)
3317 			}
3318 		}
3319 	}, {
3320 		.alg = "rfc4309(ccm(aes))",
3321 		.test = alg_test_aead,
3322 		.fips_allowed = 1,
3323 		.suite = {
3324 			.aead = {
3325 				.enc = __VECS(aes_ccm_rfc4309_enc_tv_template),
3326 				.dec = __VECS(aes_ccm_rfc4309_dec_tv_template)
3327 			}
3328 		}
3329 	}, {
3330 		.alg = "rfc4543(gcm(aes))",
3331 		.test = alg_test_aead,
3332 		.suite = {
3333 			.aead = {
3334 				.enc = __VECS(aes_gcm_rfc4543_enc_tv_template),
3335 				.dec = __VECS(aes_gcm_rfc4543_dec_tv_template),
3336 			}
3337 		}
3338 	}, {
3339 		.alg = "rfc7539(chacha20,poly1305)",
3340 		.test = alg_test_aead,
3341 		.suite = {
3342 			.aead = {
3343 				.enc = __VECS(rfc7539_enc_tv_template),
3344 				.dec = __VECS(rfc7539_dec_tv_template),
3345 			}
3346 		}
3347 	}, {
3348 		.alg = "rfc7539esp(chacha20,poly1305)",
3349 		.test = alg_test_aead,
3350 		.suite = {
3351 			.aead = {
3352 				.enc = __VECS(rfc7539esp_enc_tv_template),
3353 				.dec = __VECS(rfc7539esp_dec_tv_template),
3354 			}
3355 		}
3356 	}, {
3357 		.alg = "rmd128",
3358 		.test = alg_test_hash,
3359 		.suite = {
3360 			.hash = __VECS(rmd128_tv_template)
3361 		}
3362 	}, {
3363 		.alg = "rmd160",
3364 		.test = alg_test_hash,
3365 		.suite = {
3366 			.hash = __VECS(rmd160_tv_template)
3367 		}
3368 	}, {
3369 		.alg = "rmd256",
3370 		.test = alg_test_hash,
3371 		.suite = {
3372 			.hash = __VECS(rmd256_tv_template)
3373 		}
3374 	}, {
3375 		.alg = "rmd320",
3376 		.test = alg_test_hash,
3377 		.suite = {
3378 			.hash = __VECS(rmd320_tv_template)
3379 		}
3380 	}, {
3381 		.alg = "rsa",
3382 		.test = alg_test_akcipher,
3383 		.fips_allowed = 1,
3384 		.suite = {
3385 			.akcipher = __VECS(rsa_tv_template)
3386 		}
3387 	}, {
3388 		.alg = "salsa20",
3389 		.test = alg_test_skcipher,
3390 		.suite = {
3391 			.cipher = __VECS(salsa20_stream_tv_template)
3392 		}
3393 	}, {
3394 		.alg = "sha1",
3395 		.test = alg_test_hash,
3396 		.fips_allowed = 1,
3397 		.suite = {
3398 			.hash = __VECS(sha1_tv_template)
3399 		}
3400 	}, {
3401 		.alg = "sha224",
3402 		.test = alg_test_hash,
3403 		.fips_allowed = 1,
3404 		.suite = {
3405 			.hash = __VECS(sha224_tv_template)
3406 		}
3407 	}, {
3408 		.alg = "sha256",
3409 		.test = alg_test_hash,
3410 		.fips_allowed = 1,
3411 		.suite = {
3412 			.hash = __VECS(sha256_tv_template)
3413 		}
3414 	}, {
3415 		.alg = "sha3-224",
3416 		.test = alg_test_hash,
3417 		.fips_allowed = 1,
3418 		.suite = {
3419 			.hash = __VECS(sha3_224_tv_template)
3420 		}
3421 	}, {
3422 		.alg = "sha3-256",
3423 		.test = alg_test_hash,
3424 		.fips_allowed = 1,
3425 		.suite = {
3426 			.hash = __VECS(sha3_256_tv_template)
3427 		}
3428 	}, {
3429 		.alg = "sha3-384",
3430 		.test = alg_test_hash,
3431 		.fips_allowed = 1,
3432 		.suite = {
3433 			.hash = __VECS(sha3_384_tv_template)
3434 		}
3435 	}, {
3436 		.alg = "sha3-512",
3437 		.test = alg_test_hash,
3438 		.fips_allowed = 1,
3439 		.suite = {
3440 			.hash = __VECS(sha3_512_tv_template)
3441 		}
3442 	}, {
3443 		.alg = "sha384",
3444 		.test = alg_test_hash,
3445 		.fips_allowed = 1,
3446 		.suite = {
3447 			.hash = __VECS(sha384_tv_template)
3448 		}
3449 	}, {
3450 		.alg = "sha512",
3451 		.test = alg_test_hash,
3452 		.fips_allowed = 1,
3453 		.suite = {
3454 			.hash = __VECS(sha512_tv_template)
3455 		}
3456 	}, {
3457 		.alg = "sm3",
3458 		.test = alg_test_hash,
3459 		.suite = {
3460 			.hash = __VECS(sm3_tv_template)
3461 		}
3462 	}, {
3463 		.alg = "tgr128",
3464 		.test = alg_test_hash,
3465 		.suite = {
3466 			.hash = __VECS(tgr128_tv_template)
3467 		}
3468 	}, {
3469 		.alg = "tgr160",
3470 		.test = alg_test_hash,
3471 		.suite = {
3472 			.hash = __VECS(tgr160_tv_template)
3473 		}
3474 	}, {
3475 		.alg = "tgr192",
3476 		.test = alg_test_hash,
3477 		.suite = {
3478 			.hash = __VECS(tgr192_tv_template)
3479 		}
3480 	}, {
3481 		.alg = "vmac(aes)",
3482 		.test = alg_test_hash,
3483 		.suite = {
3484 			.hash = __VECS(aes_vmac128_tv_template)
3485 		}
3486 	}, {
3487 		.alg = "wp256",
3488 		.test = alg_test_hash,
3489 		.suite = {
3490 			.hash = __VECS(wp256_tv_template)
3491 		}
3492 	}, {
3493 		.alg = "wp384",
3494 		.test = alg_test_hash,
3495 		.suite = {
3496 			.hash = __VECS(wp384_tv_template)
3497 		}
3498 	}, {
3499 		.alg = "wp512",
3500 		.test = alg_test_hash,
3501 		.suite = {
3502 			.hash = __VECS(wp512_tv_template)
3503 		}
3504 	}, {
3505 		.alg = "xcbc(aes)",
3506 		.test = alg_test_hash,
3507 		.suite = {
3508 			.hash = __VECS(aes_xcbc128_tv_template)
3509 		}
3510 	}, {
3511 		.alg = "xts(aes)",
3512 		.test = alg_test_skcipher,
3513 		.fips_allowed = 1,
3514 		.suite = {
3515 			.cipher = __VECS(aes_xts_tv_template)
3516 		}
3517 	}, {
3518 		.alg = "xts(camellia)",
3519 		.test = alg_test_skcipher,
3520 		.suite = {
3521 			.cipher = __VECS(camellia_xts_tv_template)
3522 		}
3523 	}, {
3524 		.alg = "xts(cast6)",
3525 		.test = alg_test_skcipher,
3526 		.suite = {
3527 			.cipher = __VECS(cast6_xts_tv_template)
3528 		}
3529 	}, {
3530 		/* Same as xts(aes) except the key is stored in
3531 		 * hardware secure memory which we reference by index
3532 		 */
3533 		.alg = "xts(paes)",
3534 		.test = alg_test_null,
3535 		.fips_allowed = 1,
3536 	}, {
3537 		.alg = "xts(serpent)",
3538 		.test = alg_test_skcipher,
3539 		.suite = {
3540 			.cipher = __VECS(serpent_xts_tv_template)
3541 		}
3542 	}, {
3543 		.alg = "xts(speck128)",
3544 		.test = alg_test_skcipher,
3545 		.suite = {
3546 			.cipher = __VECS(speck128_xts_tv_template)
3547 		}
3548 	}, {
3549 		.alg = "xts(speck64)",
3550 		.test = alg_test_skcipher,
3551 		.suite = {
3552 			.cipher = __VECS(speck64_xts_tv_template)
3553 		}
3554 	}, {
3555 		.alg = "xts(twofish)",
3556 		.test = alg_test_skcipher,
3557 		.suite = {
3558 			.cipher = __VECS(tf_xts_tv_template)
3559 		}
3560 	}, {
3561 		.alg = "xts4096(paes)",
3562 		.test = alg_test_null,
3563 		.fips_allowed = 1,
3564 	}, {
3565 		.alg = "xts512(paes)",
3566 		.test = alg_test_null,
3567 		.fips_allowed = 1,
3568 	}, {
3569 		.alg = "zlib-deflate",
3570 		.test = alg_test_comp,
3571 		.fips_allowed = 1,
3572 		.suite = {
3573 			.comp = {
3574 				.comp = __VECS(zlib_deflate_comp_tv_template),
3575 				.decomp = __VECS(zlib_deflate_decomp_tv_template)
3576 			}
3577 		}
3578 	}, {
3579 		.alg = "zstd",
3580 		.test = alg_test_comp,
3581 		.fips_allowed = 1,
3582 		.suite = {
3583 			.comp = {
3584 				.comp = __VECS(zstd_comp_tv_template),
3585 				.decomp = __VECS(zstd_decomp_tv_template)
3586 			}
3587 		}
3588 	}
3589 };
3590 
3591 static bool alg_test_descs_checked;
3592 
3593 static void alg_test_descs_check_order(void)
3594 {
3595 	int i;
3596 
3597 	/* only check once */
3598 	if (alg_test_descs_checked)
3599 		return;
3600 
3601 	alg_test_descs_checked = true;
3602 
3603 	for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3604 		int diff = strcmp(alg_test_descs[i - 1].alg,
3605 				  alg_test_descs[i].alg);
3606 
3607 		if (WARN_ON(diff > 0)) {
3608 			pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3609 				alg_test_descs[i - 1].alg,
3610 				alg_test_descs[i].alg);
3611 		}
3612 
3613 		if (WARN_ON(diff == 0)) {
3614 			pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3615 				alg_test_descs[i].alg);
3616 		}
3617 	}
3618 }
3619 
3620 static int alg_find_test(const char *alg)
3621 {
3622 	int start = 0;
3623 	int end = ARRAY_SIZE(alg_test_descs);
3624 
3625 	while (start < end) {
3626 		int i = (start + end) / 2;
3627 		int diff = strcmp(alg_test_descs[i].alg, alg);
3628 
3629 		if (diff > 0) {
3630 			end = i;
3631 			continue;
3632 		}
3633 
3634 		if (diff < 0) {
3635 			start = i + 1;
3636 			continue;
3637 		}
3638 
3639 		return i;
3640 	}
3641 
3642 	return -1;
3643 }
3644 
3645 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3646 {
3647 	int i;
3648 	int j;
3649 	int rc;
3650 
3651 	if (!fips_enabled && notests) {
3652 		printk_once(KERN_INFO "alg: self-tests disabled\n");
3653 		return 0;
3654 	}
3655 
3656 	alg_test_descs_check_order();
3657 
3658 	if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3659 		char nalg[CRYPTO_MAX_ALG_NAME];
3660 
3661 		if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3662 		    sizeof(nalg))
3663 			return -ENAMETOOLONG;
3664 
3665 		i = alg_find_test(nalg);
3666 		if (i < 0)
3667 			goto notest;
3668 
3669 		if (fips_enabled && !alg_test_descs[i].fips_allowed)
3670 			goto non_fips_alg;
3671 
3672 		rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3673 		goto test_done;
3674 	}
3675 
3676 	i = alg_find_test(alg);
3677 	j = alg_find_test(driver);
3678 	if (i < 0 && j < 0)
3679 		goto notest;
3680 
3681 	if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3682 			     (j >= 0 && !alg_test_descs[j].fips_allowed)))
3683 		goto non_fips_alg;
3684 
3685 	rc = 0;
3686 	if (i >= 0)
3687 		rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3688 					     type, mask);
3689 	if (j >= 0 && j != i)
3690 		rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3691 					     type, mask);
3692 
3693 test_done:
3694 	if (fips_enabled && rc)
3695 		panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3696 
3697 	if (fips_enabled && !rc)
3698 		pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3699 
3700 	return rc;
3701 
3702 notest:
3703 	printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3704 	return 0;
3705 non_fips_alg:
3706 	return -EINVAL;
3707 }
3708 
3709 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3710 
3711 EXPORT_SYMBOL_GPL(alg_test);
3712