1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * This program is free software; you can redistribute it and/or modify it 10 * under the terms of the GNU General Public License as published by the Free 11 * Software Foundation; either version 2 of the License, or (at your option) 12 * any later version. 13 * 14 */ 15 16 #include <crypto/hash.h> 17 #include <linux/err.h> 18 #include <linux/module.h> 19 #include <linux/scatterlist.h> 20 #include <linux/slab.h> 21 #include <linux/string.h> 22 #include <crypto/rng.h> 23 24 #include "internal.h" 25 #include "testmgr.h" 26 27 /* 28 * Need slab memory for testing (size in number of pages). 29 */ 30 #define XBUFSIZE 8 31 32 /* 33 * Indexes into the xbuf to simulate cross-page access. 34 */ 35 #define IDX1 32 36 #define IDX2 32400 37 #define IDX3 1 38 #define IDX4 8193 39 #define IDX5 22222 40 #define IDX6 17101 41 #define IDX7 27333 42 #define IDX8 3000 43 44 /* 45 * Used by test_cipher() 46 */ 47 #define ENCRYPT 1 48 #define DECRYPT 0 49 50 struct tcrypt_result { 51 struct completion completion; 52 int err; 53 }; 54 55 struct aead_test_suite { 56 struct { 57 struct aead_testvec *vecs; 58 unsigned int count; 59 } enc, dec; 60 }; 61 62 struct cipher_test_suite { 63 struct { 64 struct cipher_testvec *vecs; 65 unsigned int count; 66 } enc, dec; 67 }; 68 69 struct comp_test_suite { 70 struct { 71 struct comp_testvec *vecs; 72 unsigned int count; 73 } comp, decomp; 74 }; 75 76 struct pcomp_test_suite { 77 struct { 78 struct pcomp_testvec *vecs; 79 unsigned int count; 80 } comp, decomp; 81 }; 82 83 struct hash_test_suite { 84 struct hash_testvec *vecs; 85 unsigned int count; 86 }; 87 88 struct cprng_test_suite { 89 struct cprng_testvec *vecs; 90 unsigned int count; 91 }; 92 93 struct alg_test_desc { 94 const char *alg; 95 int (*test)(const struct alg_test_desc *desc, const char *driver, 96 u32 type, u32 mask); 97 int fips_allowed; /* set if alg is allowed in fips mode */ 98 99 union { 100 struct aead_test_suite aead; 101 struct cipher_test_suite cipher; 102 struct comp_test_suite comp; 103 struct pcomp_test_suite pcomp; 104 struct hash_test_suite hash; 105 struct cprng_test_suite cprng; 106 } suite; 107 }; 108 109 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 110 111 static void hexdump(unsigned char *buf, unsigned int len) 112 { 113 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 114 16, 1, 115 buf, len, false); 116 } 117 118 static void tcrypt_complete(struct crypto_async_request *req, int err) 119 { 120 struct tcrypt_result *res = req->data; 121 122 if (err == -EINPROGRESS) 123 return; 124 125 res->err = err; 126 complete(&res->completion); 127 } 128 129 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 130 { 131 int i; 132 133 for (i = 0; i < XBUFSIZE; i++) { 134 buf[i] = (void *)__get_free_page(GFP_KERNEL); 135 if (!buf[i]) 136 goto err_free_buf; 137 } 138 139 return 0; 140 141 err_free_buf: 142 while (i-- > 0) 143 free_page((unsigned long)buf[i]); 144 145 return -ENOMEM; 146 } 147 148 static void testmgr_free_buf(char *buf[XBUFSIZE]) 149 { 150 int i; 151 152 for (i = 0; i < XBUFSIZE; i++) 153 free_page((unsigned long)buf[i]); 154 } 155 156 static int do_one_async_hash_op(struct ahash_request *req, 157 struct tcrypt_result *tr, 158 int ret) 159 { 160 if (ret == -EINPROGRESS || ret == -EBUSY) { 161 ret = wait_for_completion_interruptible(&tr->completion); 162 if (!ret) 163 ret = tr->err; 164 INIT_COMPLETION(tr->completion); 165 } 166 return ret; 167 } 168 169 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 170 unsigned int tcount, bool use_digest) 171 { 172 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 173 unsigned int i, j, k, temp; 174 struct scatterlist sg[8]; 175 char result[64]; 176 struct ahash_request *req; 177 struct tcrypt_result tresult; 178 void *hash_buff; 179 char *xbuf[XBUFSIZE]; 180 int ret = -ENOMEM; 181 182 if (testmgr_alloc_buf(xbuf)) 183 goto out_nobuf; 184 185 init_completion(&tresult.completion); 186 187 req = ahash_request_alloc(tfm, GFP_KERNEL); 188 if (!req) { 189 printk(KERN_ERR "alg: hash: Failed to allocate request for " 190 "%s\n", algo); 191 goto out_noreq; 192 } 193 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 194 tcrypt_complete, &tresult); 195 196 j = 0; 197 for (i = 0; i < tcount; i++) { 198 if (template[i].np) 199 continue; 200 201 j++; 202 memset(result, 0, 64); 203 204 hash_buff = xbuf[0]; 205 206 memcpy(hash_buff, template[i].plaintext, template[i].psize); 207 sg_init_one(&sg[0], hash_buff, template[i].psize); 208 209 if (template[i].ksize) { 210 crypto_ahash_clear_flags(tfm, ~0); 211 ret = crypto_ahash_setkey(tfm, template[i].key, 212 template[i].ksize); 213 if (ret) { 214 printk(KERN_ERR "alg: hash: setkey failed on " 215 "test %d for %s: ret=%d\n", j, algo, 216 -ret); 217 goto out; 218 } 219 } 220 221 ahash_request_set_crypt(req, sg, result, template[i].psize); 222 if (use_digest) { 223 ret = do_one_async_hash_op(req, &tresult, 224 crypto_ahash_digest(req)); 225 if (ret) { 226 pr_err("alg: hash: digest failed on test %d " 227 "for %s: ret=%d\n", j, algo, -ret); 228 goto out; 229 } 230 } else { 231 ret = do_one_async_hash_op(req, &tresult, 232 crypto_ahash_init(req)); 233 if (ret) { 234 pr_err("alt: hash: init failed on test %d " 235 "for %s: ret=%d\n", j, algo, -ret); 236 goto out; 237 } 238 ret = do_one_async_hash_op(req, &tresult, 239 crypto_ahash_update(req)); 240 if (ret) { 241 pr_err("alt: hash: update failed on test %d " 242 "for %s: ret=%d\n", j, algo, -ret); 243 goto out; 244 } 245 ret = do_one_async_hash_op(req, &tresult, 246 crypto_ahash_final(req)); 247 if (ret) { 248 pr_err("alt: hash: final failed on test %d " 249 "for %s: ret=%d\n", j, algo, -ret); 250 goto out; 251 } 252 } 253 254 if (memcmp(result, template[i].digest, 255 crypto_ahash_digestsize(tfm))) { 256 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 257 j, algo); 258 hexdump(result, crypto_ahash_digestsize(tfm)); 259 ret = -EINVAL; 260 goto out; 261 } 262 } 263 264 j = 0; 265 for (i = 0; i < tcount; i++) { 266 if (template[i].np) { 267 j++; 268 memset(result, 0, 64); 269 270 temp = 0; 271 sg_init_table(sg, template[i].np); 272 ret = -EINVAL; 273 for (k = 0; k < template[i].np; k++) { 274 if (WARN_ON(offset_in_page(IDX[k]) + 275 template[i].tap[k] > PAGE_SIZE)) 276 goto out; 277 sg_set_buf(&sg[k], 278 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 279 offset_in_page(IDX[k]), 280 template[i].plaintext + temp, 281 template[i].tap[k]), 282 template[i].tap[k]); 283 temp += template[i].tap[k]; 284 } 285 286 if (template[i].ksize) { 287 crypto_ahash_clear_flags(tfm, ~0); 288 ret = crypto_ahash_setkey(tfm, template[i].key, 289 template[i].ksize); 290 291 if (ret) { 292 printk(KERN_ERR "alg: hash: setkey " 293 "failed on chunking test %d " 294 "for %s: ret=%d\n", j, algo, 295 -ret); 296 goto out; 297 } 298 } 299 300 ahash_request_set_crypt(req, sg, result, 301 template[i].psize); 302 ret = crypto_ahash_digest(req); 303 switch (ret) { 304 case 0: 305 break; 306 case -EINPROGRESS: 307 case -EBUSY: 308 ret = wait_for_completion_interruptible( 309 &tresult.completion); 310 if (!ret && !(ret = tresult.err)) { 311 INIT_COMPLETION(tresult.completion); 312 break; 313 } 314 /* fall through */ 315 default: 316 printk(KERN_ERR "alg: hash: digest failed " 317 "on chunking test %d for %s: " 318 "ret=%d\n", j, algo, -ret); 319 goto out; 320 } 321 322 if (memcmp(result, template[i].digest, 323 crypto_ahash_digestsize(tfm))) { 324 printk(KERN_ERR "alg: hash: Chunking test %d " 325 "failed for %s\n", j, algo); 326 hexdump(result, crypto_ahash_digestsize(tfm)); 327 ret = -EINVAL; 328 goto out; 329 } 330 } 331 } 332 333 ret = 0; 334 335 out: 336 ahash_request_free(req); 337 out_noreq: 338 testmgr_free_buf(xbuf); 339 out_nobuf: 340 return ret; 341 } 342 343 static int test_aead(struct crypto_aead *tfm, int enc, 344 struct aead_testvec *template, unsigned int tcount) 345 { 346 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 347 unsigned int i, j, k, n, temp; 348 int ret = -ENOMEM; 349 char *q; 350 char *key; 351 struct aead_request *req; 352 struct scatterlist sg[8]; 353 struct scatterlist asg[8]; 354 const char *e; 355 struct tcrypt_result result; 356 unsigned int authsize; 357 void *input; 358 void *assoc; 359 char iv[MAX_IVLEN]; 360 char *xbuf[XBUFSIZE]; 361 char *axbuf[XBUFSIZE]; 362 363 if (testmgr_alloc_buf(xbuf)) 364 goto out_noxbuf; 365 if (testmgr_alloc_buf(axbuf)) 366 goto out_noaxbuf; 367 368 if (enc == ENCRYPT) 369 e = "encryption"; 370 else 371 e = "decryption"; 372 373 init_completion(&result.completion); 374 375 req = aead_request_alloc(tfm, GFP_KERNEL); 376 if (!req) { 377 printk(KERN_ERR "alg: aead: Failed to allocate request for " 378 "%s\n", algo); 379 goto out; 380 } 381 382 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 383 tcrypt_complete, &result); 384 385 for (i = 0, j = 0; i < tcount; i++) { 386 if (!template[i].np) { 387 j++; 388 389 /* some tepmplates have no input data but they will 390 * touch input 391 */ 392 input = xbuf[0]; 393 assoc = axbuf[0]; 394 395 ret = -EINVAL; 396 if (WARN_ON(template[i].ilen > PAGE_SIZE || 397 template[i].alen > PAGE_SIZE)) 398 goto out; 399 400 memcpy(input, template[i].input, template[i].ilen); 401 memcpy(assoc, template[i].assoc, template[i].alen); 402 if (template[i].iv) 403 memcpy(iv, template[i].iv, MAX_IVLEN); 404 else 405 memset(iv, 0, MAX_IVLEN); 406 407 crypto_aead_clear_flags(tfm, ~0); 408 if (template[i].wk) 409 crypto_aead_set_flags( 410 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 411 412 key = template[i].key; 413 414 ret = crypto_aead_setkey(tfm, key, 415 template[i].klen); 416 if (!ret == template[i].fail) { 417 printk(KERN_ERR "alg: aead: setkey failed on " 418 "test %d for %s: flags=%x\n", j, algo, 419 crypto_aead_get_flags(tfm)); 420 goto out; 421 } else if (ret) 422 continue; 423 424 authsize = abs(template[i].rlen - template[i].ilen); 425 ret = crypto_aead_setauthsize(tfm, authsize); 426 if (ret) { 427 printk(KERN_ERR "alg: aead: Failed to set " 428 "authsize to %u on test %d for %s\n", 429 authsize, j, algo); 430 goto out; 431 } 432 433 sg_init_one(&sg[0], input, 434 template[i].ilen + (enc ? authsize : 0)); 435 436 sg_init_one(&asg[0], assoc, template[i].alen); 437 438 aead_request_set_crypt(req, sg, sg, 439 template[i].ilen, iv); 440 441 aead_request_set_assoc(req, asg, template[i].alen); 442 443 ret = enc ? 444 crypto_aead_encrypt(req) : 445 crypto_aead_decrypt(req); 446 447 switch (ret) { 448 case 0: 449 if (template[i].novrfy) { 450 /* verification was supposed to fail */ 451 printk(KERN_ERR "alg: aead: %s failed " 452 "on test %d for %s: ret was 0, " 453 "expected -EBADMSG\n", 454 e, j, algo); 455 /* so really, we got a bad message */ 456 ret = -EBADMSG; 457 goto out; 458 } 459 break; 460 case -EINPROGRESS: 461 case -EBUSY: 462 ret = wait_for_completion_interruptible( 463 &result.completion); 464 if (!ret && !(ret = result.err)) { 465 INIT_COMPLETION(result.completion); 466 break; 467 } 468 case -EBADMSG: 469 if (template[i].novrfy) 470 /* verification failure was expected */ 471 continue; 472 /* fall through */ 473 default: 474 printk(KERN_ERR "alg: aead: %s failed on test " 475 "%d for %s: ret=%d\n", e, j, algo, -ret); 476 goto out; 477 } 478 479 q = input; 480 if (memcmp(q, template[i].result, template[i].rlen)) { 481 printk(KERN_ERR "alg: aead: Test %d failed on " 482 "%s for %s\n", j, e, algo); 483 hexdump(q, template[i].rlen); 484 ret = -EINVAL; 485 goto out; 486 } 487 } 488 } 489 490 for (i = 0, j = 0; i < tcount; i++) { 491 if (template[i].np) { 492 j++; 493 494 if (template[i].iv) 495 memcpy(iv, template[i].iv, MAX_IVLEN); 496 else 497 memset(iv, 0, MAX_IVLEN); 498 499 crypto_aead_clear_flags(tfm, ~0); 500 if (template[i].wk) 501 crypto_aead_set_flags( 502 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 503 key = template[i].key; 504 505 ret = crypto_aead_setkey(tfm, key, template[i].klen); 506 if (!ret == template[i].fail) { 507 printk(KERN_ERR "alg: aead: setkey failed on " 508 "chunk test %d for %s: flags=%x\n", j, 509 algo, crypto_aead_get_flags(tfm)); 510 goto out; 511 } else if (ret) 512 continue; 513 514 authsize = abs(template[i].rlen - template[i].ilen); 515 516 ret = -EINVAL; 517 sg_init_table(sg, template[i].np); 518 for (k = 0, temp = 0; k < template[i].np; k++) { 519 if (WARN_ON(offset_in_page(IDX[k]) + 520 template[i].tap[k] > PAGE_SIZE)) 521 goto out; 522 523 q = xbuf[IDX[k] >> PAGE_SHIFT] + 524 offset_in_page(IDX[k]); 525 526 memcpy(q, template[i].input + temp, 527 template[i].tap[k]); 528 529 n = template[i].tap[k]; 530 if (k == template[i].np - 1 && enc) 531 n += authsize; 532 if (offset_in_page(q) + n < PAGE_SIZE) 533 q[n] = 0; 534 535 sg_set_buf(&sg[k], q, template[i].tap[k]); 536 temp += template[i].tap[k]; 537 } 538 539 ret = crypto_aead_setauthsize(tfm, authsize); 540 if (ret) { 541 printk(KERN_ERR "alg: aead: Failed to set " 542 "authsize to %u on chunk test %d for " 543 "%s\n", authsize, j, algo); 544 goto out; 545 } 546 547 if (enc) { 548 if (WARN_ON(sg[k - 1].offset + 549 sg[k - 1].length + authsize > 550 PAGE_SIZE)) { 551 ret = -EINVAL; 552 goto out; 553 } 554 555 sg[k - 1].length += authsize; 556 } 557 558 sg_init_table(asg, template[i].anp); 559 ret = -EINVAL; 560 for (k = 0, temp = 0; k < template[i].anp; k++) { 561 if (WARN_ON(offset_in_page(IDX[k]) + 562 template[i].atap[k] > PAGE_SIZE)) 563 goto out; 564 sg_set_buf(&asg[k], 565 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 566 offset_in_page(IDX[k]), 567 template[i].assoc + temp, 568 template[i].atap[k]), 569 template[i].atap[k]); 570 temp += template[i].atap[k]; 571 } 572 573 aead_request_set_crypt(req, sg, sg, 574 template[i].ilen, 575 iv); 576 577 aead_request_set_assoc(req, asg, template[i].alen); 578 579 ret = enc ? 580 crypto_aead_encrypt(req) : 581 crypto_aead_decrypt(req); 582 583 switch (ret) { 584 case 0: 585 if (template[i].novrfy) { 586 /* verification was supposed to fail */ 587 printk(KERN_ERR "alg: aead: %s failed " 588 "on chunk test %d for %s: ret " 589 "was 0, expected -EBADMSG\n", 590 e, j, algo); 591 /* so really, we got a bad message */ 592 ret = -EBADMSG; 593 goto out; 594 } 595 break; 596 case -EINPROGRESS: 597 case -EBUSY: 598 ret = wait_for_completion_interruptible( 599 &result.completion); 600 if (!ret && !(ret = result.err)) { 601 INIT_COMPLETION(result.completion); 602 break; 603 } 604 case -EBADMSG: 605 if (template[i].novrfy) 606 /* verification failure was expected */ 607 continue; 608 /* fall through */ 609 default: 610 printk(KERN_ERR "alg: aead: %s failed on " 611 "chunk test %d for %s: ret=%d\n", e, j, 612 algo, -ret); 613 goto out; 614 } 615 616 ret = -EINVAL; 617 for (k = 0, temp = 0; k < template[i].np; k++) { 618 q = xbuf[IDX[k] >> PAGE_SHIFT] + 619 offset_in_page(IDX[k]); 620 621 n = template[i].tap[k]; 622 if (k == template[i].np - 1) 623 n += enc ? authsize : -authsize; 624 625 if (memcmp(q, template[i].result + temp, n)) { 626 printk(KERN_ERR "alg: aead: Chunk " 627 "test %d failed on %s at page " 628 "%u for %s\n", j, e, k, algo); 629 hexdump(q, n); 630 goto out; 631 } 632 633 q += n; 634 if (k == template[i].np - 1 && !enc) { 635 if (memcmp(q, template[i].input + 636 temp + n, authsize)) 637 n = authsize; 638 else 639 n = 0; 640 } else { 641 for (n = 0; offset_in_page(q + n) && 642 q[n]; n++) 643 ; 644 } 645 if (n) { 646 printk(KERN_ERR "alg: aead: Result " 647 "buffer corruption in chunk " 648 "test %d on %s at page %u for " 649 "%s: %u bytes:\n", j, e, k, 650 algo, n); 651 hexdump(q, n); 652 goto out; 653 } 654 655 temp += template[i].tap[k]; 656 } 657 } 658 } 659 660 ret = 0; 661 662 out: 663 aead_request_free(req); 664 testmgr_free_buf(axbuf); 665 out_noaxbuf: 666 testmgr_free_buf(xbuf); 667 out_noxbuf: 668 return ret; 669 } 670 671 static int test_cipher(struct crypto_cipher *tfm, int enc, 672 struct cipher_testvec *template, unsigned int tcount) 673 { 674 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 675 unsigned int i, j, k; 676 char *q; 677 const char *e; 678 void *data; 679 char *xbuf[XBUFSIZE]; 680 int ret = -ENOMEM; 681 682 if (testmgr_alloc_buf(xbuf)) 683 goto out_nobuf; 684 685 if (enc == ENCRYPT) 686 e = "encryption"; 687 else 688 e = "decryption"; 689 690 j = 0; 691 for (i = 0; i < tcount; i++) { 692 if (template[i].np) 693 continue; 694 695 j++; 696 697 ret = -EINVAL; 698 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 699 goto out; 700 701 data = xbuf[0]; 702 memcpy(data, template[i].input, template[i].ilen); 703 704 crypto_cipher_clear_flags(tfm, ~0); 705 if (template[i].wk) 706 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 707 708 ret = crypto_cipher_setkey(tfm, template[i].key, 709 template[i].klen); 710 if (!ret == template[i].fail) { 711 printk(KERN_ERR "alg: cipher: setkey failed " 712 "on test %d for %s: flags=%x\n", j, 713 algo, crypto_cipher_get_flags(tfm)); 714 goto out; 715 } else if (ret) 716 continue; 717 718 for (k = 0; k < template[i].ilen; 719 k += crypto_cipher_blocksize(tfm)) { 720 if (enc) 721 crypto_cipher_encrypt_one(tfm, data + k, 722 data + k); 723 else 724 crypto_cipher_decrypt_one(tfm, data + k, 725 data + k); 726 } 727 728 q = data; 729 if (memcmp(q, template[i].result, template[i].rlen)) { 730 printk(KERN_ERR "alg: cipher: Test %d failed " 731 "on %s for %s\n", j, e, algo); 732 hexdump(q, template[i].rlen); 733 ret = -EINVAL; 734 goto out; 735 } 736 } 737 738 ret = 0; 739 740 out: 741 testmgr_free_buf(xbuf); 742 out_nobuf: 743 return ret; 744 } 745 746 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, 747 struct cipher_testvec *template, unsigned int tcount) 748 { 749 const char *algo = 750 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm)); 751 unsigned int i, j, k, n, temp; 752 char *q; 753 struct ablkcipher_request *req; 754 struct scatterlist sg[8]; 755 const char *e; 756 struct tcrypt_result result; 757 void *data; 758 char iv[MAX_IVLEN]; 759 char *xbuf[XBUFSIZE]; 760 int ret = -ENOMEM; 761 762 if (testmgr_alloc_buf(xbuf)) 763 goto out_nobuf; 764 765 if (enc == ENCRYPT) 766 e = "encryption"; 767 else 768 e = "decryption"; 769 770 init_completion(&result.completion); 771 772 req = ablkcipher_request_alloc(tfm, GFP_KERNEL); 773 if (!req) { 774 printk(KERN_ERR "alg: skcipher: Failed to allocate request " 775 "for %s\n", algo); 776 goto out; 777 } 778 779 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 780 tcrypt_complete, &result); 781 782 j = 0; 783 for (i = 0; i < tcount; i++) { 784 if (template[i].iv) 785 memcpy(iv, template[i].iv, MAX_IVLEN); 786 else 787 memset(iv, 0, MAX_IVLEN); 788 789 if (!(template[i].np)) { 790 j++; 791 792 ret = -EINVAL; 793 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 794 goto out; 795 796 data = xbuf[0]; 797 memcpy(data, template[i].input, template[i].ilen); 798 799 crypto_ablkcipher_clear_flags(tfm, ~0); 800 if (template[i].wk) 801 crypto_ablkcipher_set_flags( 802 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 803 804 ret = crypto_ablkcipher_setkey(tfm, template[i].key, 805 template[i].klen); 806 if (!ret == template[i].fail) { 807 printk(KERN_ERR "alg: skcipher: setkey failed " 808 "on test %d for %s: flags=%x\n", j, 809 algo, crypto_ablkcipher_get_flags(tfm)); 810 goto out; 811 } else if (ret) 812 continue; 813 814 sg_init_one(&sg[0], data, template[i].ilen); 815 816 ablkcipher_request_set_crypt(req, sg, sg, 817 template[i].ilen, iv); 818 ret = enc ? 819 crypto_ablkcipher_encrypt(req) : 820 crypto_ablkcipher_decrypt(req); 821 822 switch (ret) { 823 case 0: 824 break; 825 case -EINPROGRESS: 826 case -EBUSY: 827 ret = wait_for_completion_interruptible( 828 &result.completion); 829 if (!ret && !((ret = result.err))) { 830 INIT_COMPLETION(result.completion); 831 break; 832 } 833 /* fall through */ 834 default: 835 printk(KERN_ERR "alg: skcipher: %s failed on " 836 "test %d for %s: ret=%d\n", e, j, algo, 837 -ret); 838 goto out; 839 } 840 841 q = data; 842 if (memcmp(q, template[i].result, template[i].rlen)) { 843 printk(KERN_ERR "alg: skcipher: Test %d " 844 "failed on %s for %s\n", j, e, algo); 845 hexdump(q, template[i].rlen); 846 ret = -EINVAL; 847 goto out; 848 } 849 } 850 } 851 852 j = 0; 853 for (i = 0; i < tcount; i++) { 854 855 if (template[i].iv) 856 memcpy(iv, template[i].iv, MAX_IVLEN); 857 else 858 memset(iv, 0, MAX_IVLEN); 859 860 if (template[i].np) { 861 j++; 862 863 crypto_ablkcipher_clear_flags(tfm, ~0); 864 if (template[i].wk) 865 crypto_ablkcipher_set_flags( 866 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 867 868 ret = crypto_ablkcipher_setkey(tfm, template[i].key, 869 template[i].klen); 870 if (!ret == template[i].fail) { 871 printk(KERN_ERR "alg: skcipher: setkey failed " 872 "on chunk test %d for %s: flags=%x\n", 873 j, algo, 874 crypto_ablkcipher_get_flags(tfm)); 875 goto out; 876 } else if (ret) 877 continue; 878 879 temp = 0; 880 ret = -EINVAL; 881 sg_init_table(sg, template[i].np); 882 for (k = 0; k < template[i].np; k++) { 883 if (WARN_ON(offset_in_page(IDX[k]) + 884 template[i].tap[k] > PAGE_SIZE)) 885 goto out; 886 887 q = xbuf[IDX[k] >> PAGE_SHIFT] + 888 offset_in_page(IDX[k]); 889 890 memcpy(q, template[i].input + temp, 891 template[i].tap[k]); 892 893 if (offset_in_page(q) + template[i].tap[k] < 894 PAGE_SIZE) 895 q[template[i].tap[k]] = 0; 896 897 sg_set_buf(&sg[k], q, template[i].tap[k]); 898 899 temp += template[i].tap[k]; 900 } 901 902 ablkcipher_request_set_crypt(req, sg, sg, 903 template[i].ilen, iv); 904 905 ret = enc ? 906 crypto_ablkcipher_encrypt(req) : 907 crypto_ablkcipher_decrypt(req); 908 909 switch (ret) { 910 case 0: 911 break; 912 case -EINPROGRESS: 913 case -EBUSY: 914 ret = wait_for_completion_interruptible( 915 &result.completion); 916 if (!ret && !((ret = result.err))) { 917 INIT_COMPLETION(result.completion); 918 break; 919 } 920 /* fall through */ 921 default: 922 printk(KERN_ERR "alg: skcipher: %s failed on " 923 "chunk test %d for %s: ret=%d\n", e, j, 924 algo, -ret); 925 goto out; 926 } 927 928 temp = 0; 929 ret = -EINVAL; 930 for (k = 0; k < template[i].np; k++) { 931 q = xbuf[IDX[k] >> PAGE_SHIFT] + 932 offset_in_page(IDX[k]); 933 934 if (memcmp(q, template[i].result + temp, 935 template[i].tap[k])) { 936 printk(KERN_ERR "alg: skcipher: Chunk " 937 "test %d failed on %s at page " 938 "%u for %s\n", j, e, k, algo); 939 hexdump(q, template[i].tap[k]); 940 goto out; 941 } 942 943 q += template[i].tap[k]; 944 for (n = 0; offset_in_page(q + n) && q[n]; n++) 945 ; 946 if (n) { 947 printk(KERN_ERR "alg: skcipher: " 948 "Result buffer corruption in " 949 "chunk test %d on %s at page " 950 "%u for %s: %u bytes:\n", j, e, 951 k, algo, n); 952 hexdump(q, n); 953 goto out; 954 } 955 temp += template[i].tap[k]; 956 } 957 } 958 } 959 960 ret = 0; 961 962 out: 963 ablkcipher_request_free(req); 964 testmgr_free_buf(xbuf); 965 out_nobuf: 966 return ret; 967 } 968 969 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, 970 struct comp_testvec *dtemplate, int ctcount, int dtcount) 971 { 972 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 973 unsigned int i; 974 char result[COMP_BUF_SIZE]; 975 int ret; 976 977 for (i = 0; i < ctcount; i++) { 978 int ilen; 979 unsigned int dlen = COMP_BUF_SIZE; 980 981 memset(result, 0, sizeof (result)); 982 983 ilen = ctemplate[i].inlen; 984 ret = crypto_comp_compress(tfm, ctemplate[i].input, 985 ilen, result, &dlen); 986 if (ret) { 987 printk(KERN_ERR "alg: comp: compression failed " 988 "on test %d for %s: ret=%d\n", i + 1, algo, 989 -ret); 990 goto out; 991 } 992 993 if (dlen != ctemplate[i].outlen) { 994 printk(KERN_ERR "alg: comp: Compression test %d " 995 "failed for %s: output len = %d\n", i + 1, algo, 996 dlen); 997 ret = -EINVAL; 998 goto out; 999 } 1000 1001 if (memcmp(result, ctemplate[i].output, dlen)) { 1002 printk(KERN_ERR "alg: comp: Compression test %d " 1003 "failed for %s\n", i + 1, algo); 1004 hexdump(result, dlen); 1005 ret = -EINVAL; 1006 goto out; 1007 } 1008 } 1009 1010 for (i = 0; i < dtcount; i++) { 1011 int ilen; 1012 unsigned int dlen = COMP_BUF_SIZE; 1013 1014 memset(result, 0, sizeof (result)); 1015 1016 ilen = dtemplate[i].inlen; 1017 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 1018 ilen, result, &dlen); 1019 if (ret) { 1020 printk(KERN_ERR "alg: comp: decompression failed " 1021 "on test %d for %s: ret=%d\n", i + 1, algo, 1022 -ret); 1023 goto out; 1024 } 1025 1026 if (dlen != dtemplate[i].outlen) { 1027 printk(KERN_ERR "alg: comp: Decompression test %d " 1028 "failed for %s: output len = %d\n", i + 1, algo, 1029 dlen); 1030 ret = -EINVAL; 1031 goto out; 1032 } 1033 1034 if (memcmp(result, dtemplate[i].output, dlen)) { 1035 printk(KERN_ERR "alg: comp: Decompression test %d " 1036 "failed for %s\n", i + 1, algo); 1037 hexdump(result, dlen); 1038 ret = -EINVAL; 1039 goto out; 1040 } 1041 } 1042 1043 ret = 0; 1044 1045 out: 1046 return ret; 1047 } 1048 1049 static int test_pcomp(struct crypto_pcomp *tfm, 1050 struct pcomp_testvec *ctemplate, 1051 struct pcomp_testvec *dtemplate, int ctcount, 1052 int dtcount) 1053 { 1054 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm)); 1055 unsigned int i; 1056 char result[COMP_BUF_SIZE]; 1057 int res; 1058 1059 for (i = 0; i < ctcount; i++) { 1060 struct comp_request req; 1061 unsigned int produced = 0; 1062 1063 res = crypto_compress_setup(tfm, ctemplate[i].params, 1064 ctemplate[i].paramsize); 1065 if (res) { 1066 pr_err("alg: pcomp: compression setup failed on test " 1067 "%d for %s: error=%d\n", i + 1, algo, res); 1068 return res; 1069 } 1070 1071 res = crypto_compress_init(tfm); 1072 if (res) { 1073 pr_err("alg: pcomp: compression init failed on test " 1074 "%d for %s: error=%d\n", i + 1, algo, res); 1075 return res; 1076 } 1077 1078 memset(result, 0, sizeof(result)); 1079 1080 req.next_in = ctemplate[i].input; 1081 req.avail_in = ctemplate[i].inlen / 2; 1082 req.next_out = result; 1083 req.avail_out = ctemplate[i].outlen / 2; 1084 1085 res = crypto_compress_update(tfm, &req); 1086 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1087 pr_err("alg: pcomp: compression update failed on test " 1088 "%d for %s: error=%d\n", i + 1, algo, res); 1089 return res; 1090 } 1091 if (res > 0) 1092 produced += res; 1093 1094 /* Add remaining input data */ 1095 req.avail_in += (ctemplate[i].inlen + 1) / 2; 1096 1097 res = crypto_compress_update(tfm, &req); 1098 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1099 pr_err("alg: pcomp: compression update failed on test " 1100 "%d for %s: error=%d\n", i + 1, algo, res); 1101 return res; 1102 } 1103 if (res > 0) 1104 produced += res; 1105 1106 /* Provide remaining output space */ 1107 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2; 1108 1109 res = crypto_compress_final(tfm, &req); 1110 if (res < 0) { 1111 pr_err("alg: pcomp: compression final failed on test " 1112 "%d for %s: error=%d\n", i + 1, algo, res); 1113 return res; 1114 } 1115 produced += res; 1116 1117 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) { 1118 pr_err("alg: comp: Compression test %d failed for %s: " 1119 "output len = %d (expected %d)\n", i + 1, algo, 1120 COMP_BUF_SIZE - req.avail_out, 1121 ctemplate[i].outlen); 1122 return -EINVAL; 1123 } 1124 1125 if (produced != ctemplate[i].outlen) { 1126 pr_err("alg: comp: Compression test %d failed for %s: " 1127 "returned len = %u (expected %d)\n", i + 1, 1128 algo, produced, ctemplate[i].outlen); 1129 return -EINVAL; 1130 } 1131 1132 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) { 1133 pr_err("alg: pcomp: Compression test %d failed for " 1134 "%s\n", i + 1, algo); 1135 hexdump(result, ctemplate[i].outlen); 1136 return -EINVAL; 1137 } 1138 } 1139 1140 for (i = 0; i < dtcount; i++) { 1141 struct comp_request req; 1142 unsigned int produced = 0; 1143 1144 res = crypto_decompress_setup(tfm, dtemplate[i].params, 1145 dtemplate[i].paramsize); 1146 if (res) { 1147 pr_err("alg: pcomp: decompression setup failed on " 1148 "test %d for %s: error=%d\n", i + 1, algo, res); 1149 return res; 1150 } 1151 1152 res = crypto_decompress_init(tfm); 1153 if (res) { 1154 pr_err("alg: pcomp: decompression init failed on test " 1155 "%d for %s: error=%d\n", i + 1, algo, res); 1156 return res; 1157 } 1158 1159 memset(result, 0, sizeof(result)); 1160 1161 req.next_in = dtemplate[i].input; 1162 req.avail_in = dtemplate[i].inlen / 2; 1163 req.next_out = result; 1164 req.avail_out = dtemplate[i].outlen / 2; 1165 1166 res = crypto_decompress_update(tfm, &req); 1167 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1168 pr_err("alg: pcomp: decompression update failed on " 1169 "test %d for %s: error=%d\n", i + 1, algo, res); 1170 return res; 1171 } 1172 if (res > 0) 1173 produced += res; 1174 1175 /* Add remaining input data */ 1176 req.avail_in += (dtemplate[i].inlen + 1) / 2; 1177 1178 res = crypto_decompress_update(tfm, &req); 1179 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1180 pr_err("alg: pcomp: decompression update failed on " 1181 "test %d for %s: error=%d\n", i + 1, algo, res); 1182 return res; 1183 } 1184 if (res > 0) 1185 produced += res; 1186 1187 /* Provide remaining output space */ 1188 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2; 1189 1190 res = crypto_decompress_final(tfm, &req); 1191 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1192 pr_err("alg: pcomp: decompression final failed on " 1193 "test %d for %s: error=%d\n", i + 1, algo, res); 1194 return res; 1195 } 1196 if (res > 0) 1197 produced += res; 1198 1199 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) { 1200 pr_err("alg: comp: Decompression test %d failed for " 1201 "%s: output len = %d (expected %d)\n", i + 1, 1202 algo, COMP_BUF_SIZE - req.avail_out, 1203 dtemplate[i].outlen); 1204 return -EINVAL; 1205 } 1206 1207 if (produced != dtemplate[i].outlen) { 1208 pr_err("alg: comp: Decompression test %d failed for " 1209 "%s: returned len = %u (expected %d)\n", i + 1, 1210 algo, produced, dtemplate[i].outlen); 1211 return -EINVAL; 1212 } 1213 1214 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) { 1215 pr_err("alg: pcomp: Decompression test %d failed for " 1216 "%s\n", i + 1, algo); 1217 hexdump(result, dtemplate[i].outlen); 1218 return -EINVAL; 1219 } 1220 } 1221 1222 return 0; 1223 } 1224 1225 1226 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template, 1227 unsigned int tcount) 1228 { 1229 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 1230 int err = 0, i, j, seedsize; 1231 u8 *seed; 1232 char result[32]; 1233 1234 seedsize = crypto_rng_seedsize(tfm); 1235 1236 seed = kmalloc(seedsize, GFP_KERNEL); 1237 if (!seed) { 1238 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 1239 "for %s\n", algo); 1240 return -ENOMEM; 1241 } 1242 1243 for (i = 0; i < tcount; i++) { 1244 memset(result, 0, 32); 1245 1246 memcpy(seed, template[i].v, template[i].vlen); 1247 memcpy(seed + template[i].vlen, template[i].key, 1248 template[i].klen); 1249 memcpy(seed + template[i].vlen + template[i].klen, 1250 template[i].dt, template[i].dtlen); 1251 1252 err = crypto_rng_reset(tfm, seed, seedsize); 1253 if (err) { 1254 printk(KERN_ERR "alg: cprng: Failed to reset rng " 1255 "for %s\n", algo); 1256 goto out; 1257 } 1258 1259 for (j = 0; j < template[i].loops; j++) { 1260 err = crypto_rng_get_bytes(tfm, result, 1261 template[i].rlen); 1262 if (err != template[i].rlen) { 1263 printk(KERN_ERR "alg: cprng: Failed to obtain " 1264 "the correct amount of random data for " 1265 "%s (requested %d, got %d)\n", algo, 1266 template[i].rlen, err); 1267 goto out; 1268 } 1269 } 1270 1271 err = memcmp(result, template[i].result, 1272 template[i].rlen); 1273 if (err) { 1274 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 1275 i, algo); 1276 hexdump(result, template[i].rlen); 1277 err = -EINVAL; 1278 goto out; 1279 } 1280 } 1281 1282 out: 1283 kfree(seed); 1284 return err; 1285 } 1286 1287 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1288 u32 type, u32 mask) 1289 { 1290 struct crypto_aead *tfm; 1291 int err = 0; 1292 1293 tfm = crypto_alloc_aead(driver, type, mask); 1294 if (IS_ERR(tfm)) { 1295 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1296 "%ld\n", driver, PTR_ERR(tfm)); 1297 return PTR_ERR(tfm); 1298 } 1299 1300 if (desc->suite.aead.enc.vecs) { 1301 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1302 desc->suite.aead.enc.count); 1303 if (err) 1304 goto out; 1305 } 1306 1307 if (!err && desc->suite.aead.dec.vecs) 1308 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1309 desc->suite.aead.dec.count); 1310 1311 out: 1312 crypto_free_aead(tfm); 1313 return err; 1314 } 1315 1316 static int alg_test_cipher(const struct alg_test_desc *desc, 1317 const char *driver, u32 type, u32 mask) 1318 { 1319 struct crypto_cipher *tfm; 1320 int err = 0; 1321 1322 tfm = crypto_alloc_cipher(driver, type, mask); 1323 if (IS_ERR(tfm)) { 1324 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1325 "%s: %ld\n", driver, PTR_ERR(tfm)); 1326 return PTR_ERR(tfm); 1327 } 1328 1329 if (desc->suite.cipher.enc.vecs) { 1330 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1331 desc->suite.cipher.enc.count); 1332 if (err) 1333 goto out; 1334 } 1335 1336 if (desc->suite.cipher.dec.vecs) 1337 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1338 desc->suite.cipher.dec.count); 1339 1340 out: 1341 crypto_free_cipher(tfm); 1342 return err; 1343 } 1344 1345 static int alg_test_skcipher(const struct alg_test_desc *desc, 1346 const char *driver, u32 type, u32 mask) 1347 { 1348 struct crypto_ablkcipher *tfm; 1349 int err = 0; 1350 1351 tfm = crypto_alloc_ablkcipher(driver, type, mask); 1352 if (IS_ERR(tfm)) { 1353 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1354 "%s: %ld\n", driver, PTR_ERR(tfm)); 1355 return PTR_ERR(tfm); 1356 } 1357 1358 if (desc->suite.cipher.enc.vecs) { 1359 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1360 desc->suite.cipher.enc.count); 1361 if (err) 1362 goto out; 1363 } 1364 1365 if (desc->suite.cipher.dec.vecs) 1366 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1367 desc->suite.cipher.dec.count); 1368 1369 out: 1370 crypto_free_ablkcipher(tfm); 1371 return err; 1372 } 1373 1374 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1375 u32 type, u32 mask) 1376 { 1377 struct crypto_comp *tfm; 1378 int err; 1379 1380 tfm = crypto_alloc_comp(driver, type, mask); 1381 if (IS_ERR(tfm)) { 1382 printk(KERN_ERR "alg: comp: Failed to load transform for %s: " 1383 "%ld\n", driver, PTR_ERR(tfm)); 1384 return PTR_ERR(tfm); 1385 } 1386 1387 err = test_comp(tfm, desc->suite.comp.comp.vecs, 1388 desc->suite.comp.decomp.vecs, 1389 desc->suite.comp.comp.count, 1390 desc->suite.comp.decomp.count); 1391 1392 crypto_free_comp(tfm); 1393 return err; 1394 } 1395 1396 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver, 1397 u32 type, u32 mask) 1398 { 1399 struct crypto_pcomp *tfm; 1400 int err; 1401 1402 tfm = crypto_alloc_pcomp(driver, type, mask); 1403 if (IS_ERR(tfm)) { 1404 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n", 1405 driver, PTR_ERR(tfm)); 1406 return PTR_ERR(tfm); 1407 } 1408 1409 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs, 1410 desc->suite.pcomp.decomp.vecs, 1411 desc->suite.pcomp.comp.count, 1412 desc->suite.pcomp.decomp.count); 1413 1414 crypto_free_pcomp(tfm); 1415 return err; 1416 } 1417 1418 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1419 u32 type, u32 mask) 1420 { 1421 struct crypto_ahash *tfm; 1422 int err; 1423 1424 tfm = crypto_alloc_ahash(driver, type, mask); 1425 if (IS_ERR(tfm)) { 1426 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1427 "%ld\n", driver, PTR_ERR(tfm)); 1428 return PTR_ERR(tfm); 1429 } 1430 1431 err = test_hash(tfm, desc->suite.hash.vecs, 1432 desc->suite.hash.count, true); 1433 if (!err) 1434 err = test_hash(tfm, desc->suite.hash.vecs, 1435 desc->suite.hash.count, false); 1436 1437 crypto_free_ahash(tfm); 1438 return err; 1439 } 1440 1441 static int alg_test_crc32c(const struct alg_test_desc *desc, 1442 const char *driver, u32 type, u32 mask) 1443 { 1444 struct crypto_shash *tfm; 1445 u32 val; 1446 int err; 1447 1448 err = alg_test_hash(desc, driver, type, mask); 1449 if (err) 1450 goto out; 1451 1452 tfm = crypto_alloc_shash(driver, type, mask); 1453 if (IS_ERR(tfm)) { 1454 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1455 "%ld\n", driver, PTR_ERR(tfm)); 1456 err = PTR_ERR(tfm); 1457 goto out; 1458 } 1459 1460 do { 1461 struct { 1462 struct shash_desc shash; 1463 char ctx[crypto_shash_descsize(tfm)]; 1464 } sdesc; 1465 1466 sdesc.shash.tfm = tfm; 1467 sdesc.shash.flags = 0; 1468 1469 *(u32 *)sdesc.ctx = le32_to_cpu(420553207); 1470 err = crypto_shash_final(&sdesc.shash, (u8 *)&val); 1471 if (err) { 1472 printk(KERN_ERR "alg: crc32c: Operation failed for " 1473 "%s: %d\n", driver, err); 1474 break; 1475 } 1476 1477 if (val != ~420553207) { 1478 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1479 "%d\n", driver, val); 1480 err = -EINVAL; 1481 } 1482 } while (0); 1483 1484 crypto_free_shash(tfm); 1485 1486 out: 1487 return err; 1488 } 1489 1490 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 1491 u32 type, u32 mask) 1492 { 1493 struct crypto_rng *rng; 1494 int err; 1495 1496 rng = crypto_alloc_rng(driver, type, mask); 1497 if (IS_ERR(rng)) { 1498 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 1499 "%ld\n", driver, PTR_ERR(rng)); 1500 return PTR_ERR(rng); 1501 } 1502 1503 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 1504 1505 crypto_free_rng(rng); 1506 1507 return err; 1508 } 1509 1510 static int alg_test_null(const struct alg_test_desc *desc, 1511 const char *driver, u32 type, u32 mask) 1512 { 1513 return 0; 1514 } 1515 1516 /* Please keep this list sorted by algorithm name. */ 1517 static const struct alg_test_desc alg_test_descs[] = { 1518 { 1519 .alg = "__driver-cbc-aes-aesni", 1520 .test = alg_test_null, 1521 .suite = { 1522 .cipher = { 1523 .enc = { 1524 .vecs = NULL, 1525 .count = 0 1526 }, 1527 .dec = { 1528 .vecs = NULL, 1529 .count = 0 1530 } 1531 } 1532 } 1533 }, { 1534 .alg = "__driver-ecb-aes-aesni", 1535 .test = alg_test_null, 1536 .suite = { 1537 .cipher = { 1538 .enc = { 1539 .vecs = NULL, 1540 .count = 0 1541 }, 1542 .dec = { 1543 .vecs = NULL, 1544 .count = 0 1545 } 1546 } 1547 } 1548 }, { 1549 .alg = "__ghash-pclmulqdqni", 1550 .test = alg_test_null, 1551 .suite = { 1552 .hash = { 1553 .vecs = NULL, 1554 .count = 0 1555 } 1556 } 1557 }, { 1558 .alg = "ansi_cprng", 1559 .test = alg_test_cprng, 1560 .fips_allowed = 1, 1561 .suite = { 1562 .cprng = { 1563 .vecs = ansi_cprng_aes_tv_template, 1564 .count = ANSI_CPRNG_AES_TEST_VECTORS 1565 } 1566 } 1567 }, { 1568 .alg = "cbc(aes)", 1569 .test = alg_test_skcipher, 1570 .fips_allowed = 1, 1571 .suite = { 1572 .cipher = { 1573 .enc = { 1574 .vecs = aes_cbc_enc_tv_template, 1575 .count = AES_CBC_ENC_TEST_VECTORS 1576 }, 1577 .dec = { 1578 .vecs = aes_cbc_dec_tv_template, 1579 .count = AES_CBC_DEC_TEST_VECTORS 1580 } 1581 } 1582 } 1583 }, { 1584 .alg = "cbc(anubis)", 1585 .test = alg_test_skcipher, 1586 .suite = { 1587 .cipher = { 1588 .enc = { 1589 .vecs = anubis_cbc_enc_tv_template, 1590 .count = ANUBIS_CBC_ENC_TEST_VECTORS 1591 }, 1592 .dec = { 1593 .vecs = anubis_cbc_dec_tv_template, 1594 .count = ANUBIS_CBC_DEC_TEST_VECTORS 1595 } 1596 } 1597 } 1598 }, { 1599 .alg = "cbc(blowfish)", 1600 .test = alg_test_skcipher, 1601 .suite = { 1602 .cipher = { 1603 .enc = { 1604 .vecs = bf_cbc_enc_tv_template, 1605 .count = BF_CBC_ENC_TEST_VECTORS 1606 }, 1607 .dec = { 1608 .vecs = bf_cbc_dec_tv_template, 1609 .count = BF_CBC_DEC_TEST_VECTORS 1610 } 1611 } 1612 } 1613 }, { 1614 .alg = "cbc(camellia)", 1615 .test = alg_test_skcipher, 1616 .suite = { 1617 .cipher = { 1618 .enc = { 1619 .vecs = camellia_cbc_enc_tv_template, 1620 .count = CAMELLIA_CBC_ENC_TEST_VECTORS 1621 }, 1622 .dec = { 1623 .vecs = camellia_cbc_dec_tv_template, 1624 .count = CAMELLIA_CBC_DEC_TEST_VECTORS 1625 } 1626 } 1627 } 1628 }, { 1629 .alg = "cbc(des)", 1630 .test = alg_test_skcipher, 1631 .suite = { 1632 .cipher = { 1633 .enc = { 1634 .vecs = des_cbc_enc_tv_template, 1635 .count = DES_CBC_ENC_TEST_VECTORS 1636 }, 1637 .dec = { 1638 .vecs = des_cbc_dec_tv_template, 1639 .count = DES_CBC_DEC_TEST_VECTORS 1640 } 1641 } 1642 } 1643 }, { 1644 .alg = "cbc(des3_ede)", 1645 .test = alg_test_skcipher, 1646 .fips_allowed = 1, 1647 .suite = { 1648 .cipher = { 1649 .enc = { 1650 .vecs = des3_ede_cbc_enc_tv_template, 1651 .count = DES3_EDE_CBC_ENC_TEST_VECTORS 1652 }, 1653 .dec = { 1654 .vecs = des3_ede_cbc_dec_tv_template, 1655 .count = DES3_EDE_CBC_DEC_TEST_VECTORS 1656 } 1657 } 1658 } 1659 }, { 1660 .alg = "cbc(twofish)", 1661 .test = alg_test_skcipher, 1662 .suite = { 1663 .cipher = { 1664 .enc = { 1665 .vecs = tf_cbc_enc_tv_template, 1666 .count = TF_CBC_ENC_TEST_VECTORS 1667 }, 1668 .dec = { 1669 .vecs = tf_cbc_dec_tv_template, 1670 .count = TF_CBC_DEC_TEST_VECTORS 1671 } 1672 } 1673 } 1674 }, { 1675 .alg = "ccm(aes)", 1676 .test = alg_test_aead, 1677 .fips_allowed = 1, 1678 .suite = { 1679 .aead = { 1680 .enc = { 1681 .vecs = aes_ccm_enc_tv_template, 1682 .count = AES_CCM_ENC_TEST_VECTORS 1683 }, 1684 .dec = { 1685 .vecs = aes_ccm_dec_tv_template, 1686 .count = AES_CCM_DEC_TEST_VECTORS 1687 } 1688 } 1689 } 1690 }, { 1691 .alg = "crc32c", 1692 .test = alg_test_crc32c, 1693 .fips_allowed = 1, 1694 .suite = { 1695 .hash = { 1696 .vecs = crc32c_tv_template, 1697 .count = CRC32C_TEST_VECTORS 1698 } 1699 } 1700 }, { 1701 .alg = "cryptd(__driver-ecb-aes-aesni)", 1702 .test = alg_test_null, 1703 .suite = { 1704 .cipher = { 1705 .enc = { 1706 .vecs = NULL, 1707 .count = 0 1708 }, 1709 .dec = { 1710 .vecs = NULL, 1711 .count = 0 1712 } 1713 } 1714 } 1715 }, { 1716 .alg = "cryptd(__ghash-pclmulqdqni)", 1717 .test = alg_test_null, 1718 .suite = { 1719 .hash = { 1720 .vecs = NULL, 1721 .count = 0 1722 } 1723 } 1724 }, { 1725 .alg = "ctr(aes)", 1726 .test = alg_test_skcipher, 1727 .fips_allowed = 1, 1728 .suite = { 1729 .cipher = { 1730 .enc = { 1731 .vecs = aes_ctr_enc_tv_template, 1732 .count = AES_CTR_ENC_TEST_VECTORS 1733 }, 1734 .dec = { 1735 .vecs = aes_ctr_dec_tv_template, 1736 .count = AES_CTR_DEC_TEST_VECTORS 1737 } 1738 } 1739 } 1740 }, { 1741 .alg = "cts(cbc(aes))", 1742 .test = alg_test_skcipher, 1743 .suite = { 1744 .cipher = { 1745 .enc = { 1746 .vecs = cts_mode_enc_tv_template, 1747 .count = CTS_MODE_ENC_TEST_VECTORS 1748 }, 1749 .dec = { 1750 .vecs = cts_mode_dec_tv_template, 1751 .count = CTS_MODE_DEC_TEST_VECTORS 1752 } 1753 } 1754 } 1755 }, { 1756 .alg = "deflate", 1757 .test = alg_test_comp, 1758 .suite = { 1759 .comp = { 1760 .comp = { 1761 .vecs = deflate_comp_tv_template, 1762 .count = DEFLATE_COMP_TEST_VECTORS 1763 }, 1764 .decomp = { 1765 .vecs = deflate_decomp_tv_template, 1766 .count = DEFLATE_DECOMP_TEST_VECTORS 1767 } 1768 } 1769 } 1770 }, { 1771 .alg = "ecb(__aes-aesni)", 1772 .test = alg_test_null, 1773 .suite = { 1774 .cipher = { 1775 .enc = { 1776 .vecs = NULL, 1777 .count = 0 1778 }, 1779 .dec = { 1780 .vecs = NULL, 1781 .count = 0 1782 } 1783 } 1784 } 1785 }, { 1786 .alg = "ecb(aes)", 1787 .test = alg_test_skcipher, 1788 .fips_allowed = 1, 1789 .suite = { 1790 .cipher = { 1791 .enc = { 1792 .vecs = aes_enc_tv_template, 1793 .count = AES_ENC_TEST_VECTORS 1794 }, 1795 .dec = { 1796 .vecs = aes_dec_tv_template, 1797 .count = AES_DEC_TEST_VECTORS 1798 } 1799 } 1800 } 1801 }, { 1802 .alg = "ecb(anubis)", 1803 .test = alg_test_skcipher, 1804 .suite = { 1805 .cipher = { 1806 .enc = { 1807 .vecs = anubis_enc_tv_template, 1808 .count = ANUBIS_ENC_TEST_VECTORS 1809 }, 1810 .dec = { 1811 .vecs = anubis_dec_tv_template, 1812 .count = ANUBIS_DEC_TEST_VECTORS 1813 } 1814 } 1815 } 1816 }, { 1817 .alg = "ecb(arc4)", 1818 .test = alg_test_skcipher, 1819 .suite = { 1820 .cipher = { 1821 .enc = { 1822 .vecs = arc4_enc_tv_template, 1823 .count = ARC4_ENC_TEST_VECTORS 1824 }, 1825 .dec = { 1826 .vecs = arc4_dec_tv_template, 1827 .count = ARC4_DEC_TEST_VECTORS 1828 } 1829 } 1830 } 1831 }, { 1832 .alg = "ecb(blowfish)", 1833 .test = alg_test_skcipher, 1834 .suite = { 1835 .cipher = { 1836 .enc = { 1837 .vecs = bf_enc_tv_template, 1838 .count = BF_ENC_TEST_VECTORS 1839 }, 1840 .dec = { 1841 .vecs = bf_dec_tv_template, 1842 .count = BF_DEC_TEST_VECTORS 1843 } 1844 } 1845 } 1846 }, { 1847 .alg = "ecb(camellia)", 1848 .test = alg_test_skcipher, 1849 .suite = { 1850 .cipher = { 1851 .enc = { 1852 .vecs = camellia_enc_tv_template, 1853 .count = CAMELLIA_ENC_TEST_VECTORS 1854 }, 1855 .dec = { 1856 .vecs = camellia_dec_tv_template, 1857 .count = CAMELLIA_DEC_TEST_VECTORS 1858 } 1859 } 1860 } 1861 }, { 1862 .alg = "ecb(cast5)", 1863 .test = alg_test_skcipher, 1864 .suite = { 1865 .cipher = { 1866 .enc = { 1867 .vecs = cast5_enc_tv_template, 1868 .count = CAST5_ENC_TEST_VECTORS 1869 }, 1870 .dec = { 1871 .vecs = cast5_dec_tv_template, 1872 .count = CAST5_DEC_TEST_VECTORS 1873 } 1874 } 1875 } 1876 }, { 1877 .alg = "ecb(cast6)", 1878 .test = alg_test_skcipher, 1879 .suite = { 1880 .cipher = { 1881 .enc = { 1882 .vecs = cast6_enc_tv_template, 1883 .count = CAST6_ENC_TEST_VECTORS 1884 }, 1885 .dec = { 1886 .vecs = cast6_dec_tv_template, 1887 .count = CAST6_DEC_TEST_VECTORS 1888 } 1889 } 1890 } 1891 }, { 1892 .alg = "ecb(des)", 1893 .test = alg_test_skcipher, 1894 .fips_allowed = 1, 1895 .suite = { 1896 .cipher = { 1897 .enc = { 1898 .vecs = des_enc_tv_template, 1899 .count = DES_ENC_TEST_VECTORS 1900 }, 1901 .dec = { 1902 .vecs = des_dec_tv_template, 1903 .count = DES_DEC_TEST_VECTORS 1904 } 1905 } 1906 } 1907 }, { 1908 .alg = "ecb(des3_ede)", 1909 .test = alg_test_skcipher, 1910 .fips_allowed = 1, 1911 .suite = { 1912 .cipher = { 1913 .enc = { 1914 .vecs = des3_ede_enc_tv_template, 1915 .count = DES3_EDE_ENC_TEST_VECTORS 1916 }, 1917 .dec = { 1918 .vecs = des3_ede_dec_tv_template, 1919 .count = DES3_EDE_DEC_TEST_VECTORS 1920 } 1921 } 1922 } 1923 }, { 1924 .alg = "ecb(khazad)", 1925 .test = alg_test_skcipher, 1926 .suite = { 1927 .cipher = { 1928 .enc = { 1929 .vecs = khazad_enc_tv_template, 1930 .count = KHAZAD_ENC_TEST_VECTORS 1931 }, 1932 .dec = { 1933 .vecs = khazad_dec_tv_template, 1934 .count = KHAZAD_DEC_TEST_VECTORS 1935 } 1936 } 1937 } 1938 }, { 1939 .alg = "ecb(seed)", 1940 .test = alg_test_skcipher, 1941 .suite = { 1942 .cipher = { 1943 .enc = { 1944 .vecs = seed_enc_tv_template, 1945 .count = SEED_ENC_TEST_VECTORS 1946 }, 1947 .dec = { 1948 .vecs = seed_dec_tv_template, 1949 .count = SEED_DEC_TEST_VECTORS 1950 } 1951 } 1952 } 1953 }, { 1954 .alg = "ecb(serpent)", 1955 .test = alg_test_skcipher, 1956 .suite = { 1957 .cipher = { 1958 .enc = { 1959 .vecs = serpent_enc_tv_template, 1960 .count = SERPENT_ENC_TEST_VECTORS 1961 }, 1962 .dec = { 1963 .vecs = serpent_dec_tv_template, 1964 .count = SERPENT_DEC_TEST_VECTORS 1965 } 1966 } 1967 } 1968 }, { 1969 .alg = "ecb(tea)", 1970 .test = alg_test_skcipher, 1971 .suite = { 1972 .cipher = { 1973 .enc = { 1974 .vecs = tea_enc_tv_template, 1975 .count = TEA_ENC_TEST_VECTORS 1976 }, 1977 .dec = { 1978 .vecs = tea_dec_tv_template, 1979 .count = TEA_DEC_TEST_VECTORS 1980 } 1981 } 1982 } 1983 }, { 1984 .alg = "ecb(tnepres)", 1985 .test = alg_test_skcipher, 1986 .suite = { 1987 .cipher = { 1988 .enc = { 1989 .vecs = tnepres_enc_tv_template, 1990 .count = TNEPRES_ENC_TEST_VECTORS 1991 }, 1992 .dec = { 1993 .vecs = tnepres_dec_tv_template, 1994 .count = TNEPRES_DEC_TEST_VECTORS 1995 } 1996 } 1997 } 1998 }, { 1999 .alg = "ecb(twofish)", 2000 .test = alg_test_skcipher, 2001 .suite = { 2002 .cipher = { 2003 .enc = { 2004 .vecs = tf_enc_tv_template, 2005 .count = TF_ENC_TEST_VECTORS 2006 }, 2007 .dec = { 2008 .vecs = tf_dec_tv_template, 2009 .count = TF_DEC_TEST_VECTORS 2010 } 2011 } 2012 } 2013 }, { 2014 .alg = "ecb(xeta)", 2015 .test = alg_test_skcipher, 2016 .suite = { 2017 .cipher = { 2018 .enc = { 2019 .vecs = xeta_enc_tv_template, 2020 .count = XETA_ENC_TEST_VECTORS 2021 }, 2022 .dec = { 2023 .vecs = xeta_dec_tv_template, 2024 .count = XETA_DEC_TEST_VECTORS 2025 } 2026 } 2027 } 2028 }, { 2029 .alg = "ecb(xtea)", 2030 .test = alg_test_skcipher, 2031 .suite = { 2032 .cipher = { 2033 .enc = { 2034 .vecs = xtea_enc_tv_template, 2035 .count = XTEA_ENC_TEST_VECTORS 2036 }, 2037 .dec = { 2038 .vecs = xtea_dec_tv_template, 2039 .count = XTEA_DEC_TEST_VECTORS 2040 } 2041 } 2042 } 2043 }, { 2044 .alg = "gcm(aes)", 2045 .test = alg_test_aead, 2046 .fips_allowed = 1, 2047 .suite = { 2048 .aead = { 2049 .enc = { 2050 .vecs = aes_gcm_enc_tv_template, 2051 .count = AES_GCM_ENC_TEST_VECTORS 2052 }, 2053 .dec = { 2054 .vecs = aes_gcm_dec_tv_template, 2055 .count = AES_GCM_DEC_TEST_VECTORS 2056 } 2057 } 2058 } 2059 }, { 2060 .alg = "ghash", 2061 .test = alg_test_hash, 2062 .suite = { 2063 .hash = { 2064 .vecs = ghash_tv_template, 2065 .count = GHASH_TEST_VECTORS 2066 } 2067 } 2068 }, { 2069 .alg = "hmac(md5)", 2070 .test = alg_test_hash, 2071 .suite = { 2072 .hash = { 2073 .vecs = hmac_md5_tv_template, 2074 .count = HMAC_MD5_TEST_VECTORS 2075 } 2076 } 2077 }, { 2078 .alg = "hmac(rmd128)", 2079 .test = alg_test_hash, 2080 .suite = { 2081 .hash = { 2082 .vecs = hmac_rmd128_tv_template, 2083 .count = HMAC_RMD128_TEST_VECTORS 2084 } 2085 } 2086 }, { 2087 .alg = "hmac(rmd160)", 2088 .test = alg_test_hash, 2089 .suite = { 2090 .hash = { 2091 .vecs = hmac_rmd160_tv_template, 2092 .count = HMAC_RMD160_TEST_VECTORS 2093 } 2094 } 2095 }, { 2096 .alg = "hmac(sha1)", 2097 .test = alg_test_hash, 2098 .fips_allowed = 1, 2099 .suite = { 2100 .hash = { 2101 .vecs = hmac_sha1_tv_template, 2102 .count = HMAC_SHA1_TEST_VECTORS 2103 } 2104 } 2105 }, { 2106 .alg = "hmac(sha224)", 2107 .test = alg_test_hash, 2108 .fips_allowed = 1, 2109 .suite = { 2110 .hash = { 2111 .vecs = hmac_sha224_tv_template, 2112 .count = HMAC_SHA224_TEST_VECTORS 2113 } 2114 } 2115 }, { 2116 .alg = "hmac(sha256)", 2117 .test = alg_test_hash, 2118 .fips_allowed = 1, 2119 .suite = { 2120 .hash = { 2121 .vecs = hmac_sha256_tv_template, 2122 .count = HMAC_SHA256_TEST_VECTORS 2123 } 2124 } 2125 }, { 2126 .alg = "hmac(sha384)", 2127 .test = alg_test_hash, 2128 .fips_allowed = 1, 2129 .suite = { 2130 .hash = { 2131 .vecs = hmac_sha384_tv_template, 2132 .count = HMAC_SHA384_TEST_VECTORS 2133 } 2134 } 2135 }, { 2136 .alg = "hmac(sha512)", 2137 .test = alg_test_hash, 2138 .fips_allowed = 1, 2139 .suite = { 2140 .hash = { 2141 .vecs = hmac_sha512_tv_template, 2142 .count = HMAC_SHA512_TEST_VECTORS 2143 } 2144 } 2145 }, { 2146 .alg = "lrw(aes)", 2147 .test = alg_test_skcipher, 2148 .suite = { 2149 .cipher = { 2150 .enc = { 2151 .vecs = aes_lrw_enc_tv_template, 2152 .count = AES_LRW_ENC_TEST_VECTORS 2153 }, 2154 .dec = { 2155 .vecs = aes_lrw_dec_tv_template, 2156 .count = AES_LRW_DEC_TEST_VECTORS 2157 } 2158 } 2159 } 2160 }, { 2161 .alg = "lzo", 2162 .test = alg_test_comp, 2163 .suite = { 2164 .comp = { 2165 .comp = { 2166 .vecs = lzo_comp_tv_template, 2167 .count = LZO_COMP_TEST_VECTORS 2168 }, 2169 .decomp = { 2170 .vecs = lzo_decomp_tv_template, 2171 .count = LZO_DECOMP_TEST_VECTORS 2172 } 2173 } 2174 } 2175 }, { 2176 .alg = "md4", 2177 .test = alg_test_hash, 2178 .suite = { 2179 .hash = { 2180 .vecs = md4_tv_template, 2181 .count = MD4_TEST_VECTORS 2182 } 2183 } 2184 }, { 2185 .alg = "md5", 2186 .test = alg_test_hash, 2187 .suite = { 2188 .hash = { 2189 .vecs = md5_tv_template, 2190 .count = MD5_TEST_VECTORS 2191 } 2192 } 2193 }, { 2194 .alg = "michael_mic", 2195 .test = alg_test_hash, 2196 .suite = { 2197 .hash = { 2198 .vecs = michael_mic_tv_template, 2199 .count = MICHAEL_MIC_TEST_VECTORS 2200 } 2201 } 2202 }, { 2203 .alg = "pcbc(fcrypt)", 2204 .test = alg_test_skcipher, 2205 .suite = { 2206 .cipher = { 2207 .enc = { 2208 .vecs = fcrypt_pcbc_enc_tv_template, 2209 .count = FCRYPT_ENC_TEST_VECTORS 2210 }, 2211 .dec = { 2212 .vecs = fcrypt_pcbc_dec_tv_template, 2213 .count = FCRYPT_DEC_TEST_VECTORS 2214 } 2215 } 2216 } 2217 }, { 2218 .alg = "rfc3686(ctr(aes))", 2219 .test = alg_test_skcipher, 2220 .fips_allowed = 1, 2221 .suite = { 2222 .cipher = { 2223 .enc = { 2224 .vecs = aes_ctr_rfc3686_enc_tv_template, 2225 .count = AES_CTR_3686_ENC_TEST_VECTORS 2226 }, 2227 .dec = { 2228 .vecs = aes_ctr_rfc3686_dec_tv_template, 2229 .count = AES_CTR_3686_DEC_TEST_VECTORS 2230 } 2231 } 2232 } 2233 }, { 2234 .alg = "rfc4309(ccm(aes))", 2235 .test = alg_test_aead, 2236 .fips_allowed = 1, 2237 .suite = { 2238 .aead = { 2239 .enc = { 2240 .vecs = aes_ccm_rfc4309_enc_tv_template, 2241 .count = AES_CCM_4309_ENC_TEST_VECTORS 2242 }, 2243 .dec = { 2244 .vecs = aes_ccm_rfc4309_dec_tv_template, 2245 .count = AES_CCM_4309_DEC_TEST_VECTORS 2246 } 2247 } 2248 } 2249 }, { 2250 .alg = "rmd128", 2251 .test = alg_test_hash, 2252 .suite = { 2253 .hash = { 2254 .vecs = rmd128_tv_template, 2255 .count = RMD128_TEST_VECTORS 2256 } 2257 } 2258 }, { 2259 .alg = "rmd160", 2260 .test = alg_test_hash, 2261 .suite = { 2262 .hash = { 2263 .vecs = rmd160_tv_template, 2264 .count = RMD160_TEST_VECTORS 2265 } 2266 } 2267 }, { 2268 .alg = "rmd256", 2269 .test = alg_test_hash, 2270 .suite = { 2271 .hash = { 2272 .vecs = rmd256_tv_template, 2273 .count = RMD256_TEST_VECTORS 2274 } 2275 } 2276 }, { 2277 .alg = "rmd320", 2278 .test = alg_test_hash, 2279 .suite = { 2280 .hash = { 2281 .vecs = rmd320_tv_template, 2282 .count = RMD320_TEST_VECTORS 2283 } 2284 } 2285 }, { 2286 .alg = "salsa20", 2287 .test = alg_test_skcipher, 2288 .suite = { 2289 .cipher = { 2290 .enc = { 2291 .vecs = salsa20_stream_enc_tv_template, 2292 .count = SALSA20_STREAM_ENC_TEST_VECTORS 2293 } 2294 } 2295 } 2296 }, { 2297 .alg = "sha1", 2298 .test = alg_test_hash, 2299 .fips_allowed = 1, 2300 .suite = { 2301 .hash = { 2302 .vecs = sha1_tv_template, 2303 .count = SHA1_TEST_VECTORS 2304 } 2305 } 2306 }, { 2307 .alg = "sha224", 2308 .test = alg_test_hash, 2309 .fips_allowed = 1, 2310 .suite = { 2311 .hash = { 2312 .vecs = sha224_tv_template, 2313 .count = SHA224_TEST_VECTORS 2314 } 2315 } 2316 }, { 2317 .alg = "sha256", 2318 .test = alg_test_hash, 2319 .fips_allowed = 1, 2320 .suite = { 2321 .hash = { 2322 .vecs = sha256_tv_template, 2323 .count = SHA256_TEST_VECTORS 2324 } 2325 } 2326 }, { 2327 .alg = "sha384", 2328 .test = alg_test_hash, 2329 .fips_allowed = 1, 2330 .suite = { 2331 .hash = { 2332 .vecs = sha384_tv_template, 2333 .count = SHA384_TEST_VECTORS 2334 } 2335 } 2336 }, { 2337 .alg = "sha512", 2338 .test = alg_test_hash, 2339 .fips_allowed = 1, 2340 .suite = { 2341 .hash = { 2342 .vecs = sha512_tv_template, 2343 .count = SHA512_TEST_VECTORS 2344 } 2345 } 2346 }, { 2347 .alg = "tgr128", 2348 .test = alg_test_hash, 2349 .suite = { 2350 .hash = { 2351 .vecs = tgr128_tv_template, 2352 .count = TGR128_TEST_VECTORS 2353 } 2354 } 2355 }, { 2356 .alg = "tgr160", 2357 .test = alg_test_hash, 2358 .suite = { 2359 .hash = { 2360 .vecs = tgr160_tv_template, 2361 .count = TGR160_TEST_VECTORS 2362 } 2363 } 2364 }, { 2365 .alg = "tgr192", 2366 .test = alg_test_hash, 2367 .suite = { 2368 .hash = { 2369 .vecs = tgr192_tv_template, 2370 .count = TGR192_TEST_VECTORS 2371 } 2372 } 2373 }, { 2374 .alg = "vmac(aes)", 2375 .test = alg_test_hash, 2376 .suite = { 2377 .hash = { 2378 .vecs = aes_vmac128_tv_template, 2379 .count = VMAC_AES_TEST_VECTORS 2380 } 2381 } 2382 }, { 2383 .alg = "wp256", 2384 .test = alg_test_hash, 2385 .suite = { 2386 .hash = { 2387 .vecs = wp256_tv_template, 2388 .count = WP256_TEST_VECTORS 2389 } 2390 } 2391 }, { 2392 .alg = "wp384", 2393 .test = alg_test_hash, 2394 .suite = { 2395 .hash = { 2396 .vecs = wp384_tv_template, 2397 .count = WP384_TEST_VECTORS 2398 } 2399 } 2400 }, { 2401 .alg = "wp512", 2402 .test = alg_test_hash, 2403 .suite = { 2404 .hash = { 2405 .vecs = wp512_tv_template, 2406 .count = WP512_TEST_VECTORS 2407 } 2408 } 2409 }, { 2410 .alg = "xcbc(aes)", 2411 .test = alg_test_hash, 2412 .suite = { 2413 .hash = { 2414 .vecs = aes_xcbc128_tv_template, 2415 .count = XCBC_AES_TEST_VECTORS 2416 } 2417 } 2418 }, { 2419 .alg = "xts(aes)", 2420 .test = alg_test_skcipher, 2421 .suite = { 2422 .cipher = { 2423 .enc = { 2424 .vecs = aes_xts_enc_tv_template, 2425 .count = AES_XTS_ENC_TEST_VECTORS 2426 }, 2427 .dec = { 2428 .vecs = aes_xts_dec_tv_template, 2429 .count = AES_XTS_DEC_TEST_VECTORS 2430 } 2431 } 2432 } 2433 }, { 2434 .alg = "zlib", 2435 .test = alg_test_pcomp, 2436 .suite = { 2437 .pcomp = { 2438 .comp = { 2439 .vecs = zlib_comp_tv_template, 2440 .count = ZLIB_COMP_TEST_VECTORS 2441 }, 2442 .decomp = { 2443 .vecs = zlib_decomp_tv_template, 2444 .count = ZLIB_DECOMP_TEST_VECTORS 2445 } 2446 } 2447 } 2448 } 2449 }; 2450 2451 static int alg_find_test(const char *alg) 2452 { 2453 int start = 0; 2454 int end = ARRAY_SIZE(alg_test_descs); 2455 2456 while (start < end) { 2457 int i = (start + end) / 2; 2458 int diff = strcmp(alg_test_descs[i].alg, alg); 2459 2460 if (diff > 0) { 2461 end = i; 2462 continue; 2463 } 2464 2465 if (diff < 0) { 2466 start = i + 1; 2467 continue; 2468 } 2469 2470 return i; 2471 } 2472 2473 return -1; 2474 } 2475 2476 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 2477 { 2478 int i; 2479 int j; 2480 int rc; 2481 2482 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 2483 char nalg[CRYPTO_MAX_ALG_NAME]; 2484 2485 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 2486 sizeof(nalg)) 2487 return -ENAMETOOLONG; 2488 2489 i = alg_find_test(nalg); 2490 if (i < 0) 2491 goto notest; 2492 2493 if (fips_enabled && !alg_test_descs[i].fips_allowed) 2494 goto non_fips_alg; 2495 2496 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 2497 goto test_done; 2498 } 2499 2500 i = alg_find_test(alg); 2501 j = alg_find_test(driver); 2502 if (i < 0 && j < 0) 2503 goto notest; 2504 2505 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 2506 (j >= 0 && !alg_test_descs[j].fips_allowed))) 2507 goto non_fips_alg; 2508 2509 rc = 0; 2510 if (i >= 0) 2511 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 2512 type, mask); 2513 if (j >= 0) 2514 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 2515 type, mask); 2516 2517 test_done: 2518 if (fips_enabled && rc) 2519 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 2520 2521 if (fips_enabled && !rc) 2522 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n", 2523 driver, alg); 2524 2525 return rc; 2526 2527 notest: 2528 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 2529 return 0; 2530 non_fips_alg: 2531 return -EINVAL; 2532 } 2533 EXPORT_SYMBOL_GPL(alg_test); 2534