1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * Updated RFC4106 AES-GCM testing. 10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) 11 * Adrian Hoban <adrian.hoban@intel.com> 12 * Gabriele Paoloni <gabriele.paoloni@intel.com> 13 * Tadeusz Struk (tadeusz.struk@intel.com) 14 * Copyright (c) 2010, Intel Corporation. 15 * 16 * This program is free software; you can redistribute it and/or modify it 17 * under the terms of the GNU General Public License as published by the Free 18 * Software Foundation; either version 2 of the License, or (at your option) 19 * any later version. 20 * 21 */ 22 23 #include <crypto/hash.h> 24 #include <linux/err.h> 25 #include <linux/module.h> 26 #include <linux/scatterlist.h> 27 #include <linux/slab.h> 28 #include <linux/string.h> 29 #include <crypto/rng.h> 30 31 #include "internal.h" 32 33 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 34 35 /* a perfect nop */ 36 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 37 { 38 return 0; 39 } 40 41 #else 42 43 #include "testmgr.h" 44 45 /* 46 * Need slab memory for testing (size in number of pages). 47 */ 48 #define XBUFSIZE 8 49 50 /* 51 * Indexes into the xbuf to simulate cross-page access. 52 */ 53 #define IDX1 32 54 #define IDX2 32400 55 #define IDX3 1 56 #define IDX4 8193 57 #define IDX5 22222 58 #define IDX6 17101 59 #define IDX7 27333 60 #define IDX8 3000 61 62 /* 63 * Used by test_cipher() 64 */ 65 #define ENCRYPT 1 66 #define DECRYPT 0 67 68 struct tcrypt_result { 69 struct completion completion; 70 int err; 71 }; 72 73 struct aead_test_suite { 74 struct { 75 struct aead_testvec *vecs; 76 unsigned int count; 77 } enc, dec; 78 }; 79 80 struct cipher_test_suite { 81 struct { 82 struct cipher_testvec *vecs; 83 unsigned int count; 84 } enc, dec; 85 }; 86 87 struct comp_test_suite { 88 struct { 89 struct comp_testvec *vecs; 90 unsigned int count; 91 } comp, decomp; 92 }; 93 94 struct pcomp_test_suite { 95 struct { 96 struct pcomp_testvec *vecs; 97 unsigned int count; 98 } comp, decomp; 99 }; 100 101 struct hash_test_suite { 102 struct hash_testvec *vecs; 103 unsigned int count; 104 }; 105 106 struct cprng_test_suite { 107 struct cprng_testvec *vecs; 108 unsigned int count; 109 }; 110 111 struct alg_test_desc { 112 const char *alg; 113 int (*test)(const struct alg_test_desc *desc, const char *driver, 114 u32 type, u32 mask); 115 int fips_allowed; /* set if alg is allowed in fips mode */ 116 117 union { 118 struct aead_test_suite aead; 119 struct cipher_test_suite cipher; 120 struct comp_test_suite comp; 121 struct pcomp_test_suite pcomp; 122 struct hash_test_suite hash; 123 struct cprng_test_suite cprng; 124 } suite; 125 }; 126 127 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 128 129 static void hexdump(unsigned char *buf, unsigned int len) 130 { 131 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 132 16, 1, 133 buf, len, false); 134 } 135 136 static void tcrypt_complete(struct crypto_async_request *req, int err) 137 { 138 struct tcrypt_result *res = req->data; 139 140 if (err == -EINPROGRESS) 141 return; 142 143 res->err = err; 144 complete(&res->completion); 145 } 146 147 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 148 { 149 int i; 150 151 for (i = 0; i < XBUFSIZE; i++) { 152 buf[i] = (void *)__get_free_page(GFP_KERNEL); 153 if (!buf[i]) 154 goto err_free_buf; 155 } 156 157 return 0; 158 159 err_free_buf: 160 while (i-- > 0) 161 free_page((unsigned long)buf[i]); 162 163 return -ENOMEM; 164 } 165 166 static void testmgr_free_buf(char *buf[XBUFSIZE]) 167 { 168 int i; 169 170 for (i = 0; i < XBUFSIZE; i++) 171 free_page((unsigned long)buf[i]); 172 } 173 174 static int do_one_async_hash_op(struct ahash_request *req, 175 struct tcrypt_result *tr, 176 int ret) 177 { 178 if (ret == -EINPROGRESS || ret == -EBUSY) { 179 ret = wait_for_completion_interruptible(&tr->completion); 180 if (!ret) 181 ret = tr->err; 182 INIT_COMPLETION(tr->completion); 183 } 184 return ret; 185 } 186 187 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 188 unsigned int tcount, bool use_digest) 189 { 190 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 191 unsigned int i, j, k, temp; 192 struct scatterlist sg[8]; 193 char result[64]; 194 struct ahash_request *req; 195 struct tcrypt_result tresult; 196 void *hash_buff; 197 char *xbuf[XBUFSIZE]; 198 int ret = -ENOMEM; 199 200 if (testmgr_alloc_buf(xbuf)) 201 goto out_nobuf; 202 203 init_completion(&tresult.completion); 204 205 req = ahash_request_alloc(tfm, GFP_KERNEL); 206 if (!req) { 207 printk(KERN_ERR "alg: hash: Failed to allocate request for " 208 "%s\n", algo); 209 goto out_noreq; 210 } 211 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 212 tcrypt_complete, &tresult); 213 214 j = 0; 215 for (i = 0; i < tcount; i++) { 216 if (template[i].np) 217 continue; 218 219 j++; 220 memset(result, 0, 64); 221 222 hash_buff = xbuf[0]; 223 224 memcpy(hash_buff, template[i].plaintext, template[i].psize); 225 sg_init_one(&sg[0], hash_buff, template[i].psize); 226 227 if (template[i].ksize) { 228 crypto_ahash_clear_flags(tfm, ~0); 229 ret = crypto_ahash_setkey(tfm, template[i].key, 230 template[i].ksize); 231 if (ret) { 232 printk(KERN_ERR "alg: hash: setkey failed on " 233 "test %d for %s: ret=%d\n", j, algo, 234 -ret); 235 goto out; 236 } 237 } 238 239 ahash_request_set_crypt(req, sg, result, template[i].psize); 240 if (use_digest) { 241 ret = do_one_async_hash_op(req, &tresult, 242 crypto_ahash_digest(req)); 243 if (ret) { 244 pr_err("alg: hash: digest failed on test %d " 245 "for %s: ret=%d\n", j, algo, -ret); 246 goto out; 247 } 248 } else { 249 ret = do_one_async_hash_op(req, &tresult, 250 crypto_ahash_init(req)); 251 if (ret) { 252 pr_err("alt: hash: init failed on test %d " 253 "for %s: ret=%d\n", j, algo, -ret); 254 goto out; 255 } 256 ret = do_one_async_hash_op(req, &tresult, 257 crypto_ahash_update(req)); 258 if (ret) { 259 pr_err("alt: hash: update failed on test %d " 260 "for %s: ret=%d\n", j, algo, -ret); 261 goto out; 262 } 263 ret = do_one_async_hash_op(req, &tresult, 264 crypto_ahash_final(req)); 265 if (ret) { 266 pr_err("alt: hash: final failed on test %d " 267 "for %s: ret=%d\n", j, algo, -ret); 268 goto out; 269 } 270 } 271 272 if (memcmp(result, template[i].digest, 273 crypto_ahash_digestsize(tfm))) { 274 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 275 j, algo); 276 hexdump(result, crypto_ahash_digestsize(tfm)); 277 ret = -EINVAL; 278 goto out; 279 } 280 } 281 282 j = 0; 283 for (i = 0; i < tcount; i++) { 284 if (template[i].np) { 285 j++; 286 memset(result, 0, 64); 287 288 temp = 0; 289 sg_init_table(sg, template[i].np); 290 ret = -EINVAL; 291 for (k = 0; k < template[i].np; k++) { 292 if (WARN_ON(offset_in_page(IDX[k]) + 293 template[i].tap[k] > PAGE_SIZE)) 294 goto out; 295 sg_set_buf(&sg[k], 296 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 297 offset_in_page(IDX[k]), 298 template[i].plaintext + temp, 299 template[i].tap[k]), 300 template[i].tap[k]); 301 temp += template[i].tap[k]; 302 } 303 304 if (template[i].ksize) { 305 crypto_ahash_clear_flags(tfm, ~0); 306 ret = crypto_ahash_setkey(tfm, template[i].key, 307 template[i].ksize); 308 309 if (ret) { 310 printk(KERN_ERR "alg: hash: setkey " 311 "failed on chunking test %d " 312 "for %s: ret=%d\n", j, algo, 313 -ret); 314 goto out; 315 } 316 } 317 318 ahash_request_set_crypt(req, sg, result, 319 template[i].psize); 320 ret = crypto_ahash_digest(req); 321 switch (ret) { 322 case 0: 323 break; 324 case -EINPROGRESS: 325 case -EBUSY: 326 ret = wait_for_completion_interruptible( 327 &tresult.completion); 328 if (!ret && !(ret = tresult.err)) { 329 INIT_COMPLETION(tresult.completion); 330 break; 331 } 332 /* fall through */ 333 default: 334 printk(KERN_ERR "alg: hash: digest failed " 335 "on chunking test %d for %s: " 336 "ret=%d\n", j, algo, -ret); 337 goto out; 338 } 339 340 if (memcmp(result, template[i].digest, 341 crypto_ahash_digestsize(tfm))) { 342 printk(KERN_ERR "alg: hash: Chunking test %d " 343 "failed for %s\n", j, algo); 344 hexdump(result, crypto_ahash_digestsize(tfm)); 345 ret = -EINVAL; 346 goto out; 347 } 348 } 349 } 350 351 ret = 0; 352 353 out: 354 ahash_request_free(req); 355 out_noreq: 356 testmgr_free_buf(xbuf); 357 out_nobuf: 358 return ret; 359 } 360 361 static int test_aead(struct crypto_aead *tfm, int enc, 362 struct aead_testvec *template, unsigned int tcount) 363 { 364 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 365 unsigned int i, j, k, n, temp; 366 int ret = -ENOMEM; 367 char *q; 368 char *key; 369 struct aead_request *req; 370 struct scatterlist sg[8]; 371 struct scatterlist asg[8]; 372 const char *e; 373 struct tcrypt_result result; 374 unsigned int authsize; 375 void *input; 376 void *assoc; 377 char iv[MAX_IVLEN]; 378 char *xbuf[XBUFSIZE]; 379 char *axbuf[XBUFSIZE]; 380 381 if (testmgr_alloc_buf(xbuf)) 382 goto out_noxbuf; 383 if (testmgr_alloc_buf(axbuf)) 384 goto out_noaxbuf; 385 386 if (enc == ENCRYPT) 387 e = "encryption"; 388 else 389 e = "decryption"; 390 391 init_completion(&result.completion); 392 393 req = aead_request_alloc(tfm, GFP_KERNEL); 394 if (!req) { 395 printk(KERN_ERR "alg: aead: Failed to allocate request for " 396 "%s\n", algo); 397 goto out; 398 } 399 400 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 401 tcrypt_complete, &result); 402 403 for (i = 0, j = 0; i < tcount; i++) { 404 if (!template[i].np) { 405 j++; 406 407 /* some tepmplates have no input data but they will 408 * touch input 409 */ 410 input = xbuf[0]; 411 assoc = axbuf[0]; 412 413 ret = -EINVAL; 414 if (WARN_ON(template[i].ilen > PAGE_SIZE || 415 template[i].alen > PAGE_SIZE)) 416 goto out; 417 418 memcpy(input, template[i].input, template[i].ilen); 419 memcpy(assoc, template[i].assoc, template[i].alen); 420 if (template[i].iv) 421 memcpy(iv, template[i].iv, MAX_IVLEN); 422 else 423 memset(iv, 0, MAX_IVLEN); 424 425 crypto_aead_clear_flags(tfm, ~0); 426 if (template[i].wk) 427 crypto_aead_set_flags( 428 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 429 430 key = template[i].key; 431 432 ret = crypto_aead_setkey(tfm, key, 433 template[i].klen); 434 if (!ret == template[i].fail) { 435 printk(KERN_ERR "alg: aead: setkey failed on " 436 "test %d for %s: flags=%x\n", j, algo, 437 crypto_aead_get_flags(tfm)); 438 goto out; 439 } else if (ret) 440 continue; 441 442 authsize = abs(template[i].rlen - template[i].ilen); 443 ret = crypto_aead_setauthsize(tfm, authsize); 444 if (ret) { 445 printk(KERN_ERR "alg: aead: Failed to set " 446 "authsize to %u on test %d for %s\n", 447 authsize, j, algo); 448 goto out; 449 } 450 451 sg_init_one(&sg[0], input, 452 template[i].ilen + (enc ? authsize : 0)); 453 454 sg_init_one(&asg[0], assoc, template[i].alen); 455 456 aead_request_set_crypt(req, sg, sg, 457 template[i].ilen, iv); 458 459 aead_request_set_assoc(req, asg, template[i].alen); 460 461 ret = enc ? 462 crypto_aead_encrypt(req) : 463 crypto_aead_decrypt(req); 464 465 switch (ret) { 466 case 0: 467 if (template[i].novrfy) { 468 /* verification was supposed to fail */ 469 printk(KERN_ERR "alg: aead: %s failed " 470 "on test %d for %s: ret was 0, " 471 "expected -EBADMSG\n", 472 e, j, algo); 473 /* so really, we got a bad message */ 474 ret = -EBADMSG; 475 goto out; 476 } 477 break; 478 case -EINPROGRESS: 479 case -EBUSY: 480 ret = wait_for_completion_interruptible( 481 &result.completion); 482 if (!ret && !(ret = result.err)) { 483 INIT_COMPLETION(result.completion); 484 break; 485 } 486 case -EBADMSG: 487 if (template[i].novrfy) 488 /* verification failure was expected */ 489 continue; 490 /* fall through */ 491 default: 492 printk(KERN_ERR "alg: aead: %s failed on test " 493 "%d for %s: ret=%d\n", e, j, algo, -ret); 494 goto out; 495 } 496 497 q = input; 498 if (memcmp(q, template[i].result, template[i].rlen)) { 499 printk(KERN_ERR "alg: aead: Test %d failed on " 500 "%s for %s\n", j, e, algo); 501 hexdump(q, template[i].rlen); 502 ret = -EINVAL; 503 goto out; 504 } 505 } 506 } 507 508 for (i = 0, j = 0; i < tcount; i++) { 509 if (template[i].np) { 510 j++; 511 512 if (template[i].iv) 513 memcpy(iv, template[i].iv, MAX_IVLEN); 514 else 515 memset(iv, 0, MAX_IVLEN); 516 517 crypto_aead_clear_flags(tfm, ~0); 518 if (template[i].wk) 519 crypto_aead_set_flags( 520 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 521 key = template[i].key; 522 523 ret = crypto_aead_setkey(tfm, key, template[i].klen); 524 if (!ret == template[i].fail) { 525 printk(KERN_ERR "alg: aead: setkey failed on " 526 "chunk test %d for %s: flags=%x\n", j, 527 algo, crypto_aead_get_flags(tfm)); 528 goto out; 529 } else if (ret) 530 continue; 531 532 authsize = abs(template[i].rlen - template[i].ilen); 533 534 ret = -EINVAL; 535 sg_init_table(sg, template[i].np); 536 for (k = 0, temp = 0; k < template[i].np; k++) { 537 if (WARN_ON(offset_in_page(IDX[k]) + 538 template[i].tap[k] > PAGE_SIZE)) 539 goto out; 540 541 q = xbuf[IDX[k] >> PAGE_SHIFT] + 542 offset_in_page(IDX[k]); 543 544 memcpy(q, template[i].input + temp, 545 template[i].tap[k]); 546 547 n = template[i].tap[k]; 548 if (k == template[i].np - 1 && enc) 549 n += authsize; 550 if (offset_in_page(q) + n < PAGE_SIZE) 551 q[n] = 0; 552 553 sg_set_buf(&sg[k], q, template[i].tap[k]); 554 temp += template[i].tap[k]; 555 } 556 557 ret = crypto_aead_setauthsize(tfm, authsize); 558 if (ret) { 559 printk(KERN_ERR "alg: aead: Failed to set " 560 "authsize to %u on chunk test %d for " 561 "%s\n", authsize, j, algo); 562 goto out; 563 } 564 565 if (enc) { 566 if (WARN_ON(sg[k - 1].offset + 567 sg[k - 1].length + authsize > 568 PAGE_SIZE)) { 569 ret = -EINVAL; 570 goto out; 571 } 572 573 sg[k - 1].length += authsize; 574 } 575 576 sg_init_table(asg, template[i].anp); 577 ret = -EINVAL; 578 for (k = 0, temp = 0; k < template[i].anp; k++) { 579 if (WARN_ON(offset_in_page(IDX[k]) + 580 template[i].atap[k] > PAGE_SIZE)) 581 goto out; 582 sg_set_buf(&asg[k], 583 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 584 offset_in_page(IDX[k]), 585 template[i].assoc + temp, 586 template[i].atap[k]), 587 template[i].atap[k]); 588 temp += template[i].atap[k]; 589 } 590 591 aead_request_set_crypt(req, sg, sg, 592 template[i].ilen, 593 iv); 594 595 aead_request_set_assoc(req, asg, template[i].alen); 596 597 ret = enc ? 598 crypto_aead_encrypt(req) : 599 crypto_aead_decrypt(req); 600 601 switch (ret) { 602 case 0: 603 if (template[i].novrfy) { 604 /* verification was supposed to fail */ 605 printk(KERN_ERR "alg: aead: %s failed " 606 "on chunk test %d for %s: ret " 607 "was 0, expected -EBADMSG\n", 608 e, j, algo); 609 /* so really, we got a bad message */ 610 ret = -EBADMSG; 611 goto out; 612 } 613 break; 614 case -EINPROGRESS: 615 case -EBUSY: 616 ret = wait_for_completion_interruptible( 617 &result.completion); 618 if (!ret && !(ret = result.err)) { 619 INIT_COMPLETION(result.completion); 620 break; 621 } 622 case -EBADMSG: 623 if (template[i].novrfy) 624 /* verification failure was expected */ 625 continue; 626 /* fall through */ 627 default: 628 printk(KERN_ERR "alg: aead: %s failed on " 629 "chunk test %d for %s: ret=%d\n", e, j, 630 algo, -ret); 631 goto out; 632 } 633 634 ret = -EINVAL; 635 for (k = 0, temp = 0; k < template[i].np; k++) { 636 q = xbuf[IDX[k] >> PAGE_SHIFT] + 637 offset_in_page(IDX[k]); 638 639 n = template[i].tap[k]; 640 if (k == template[i].np - 1) 641 n += enc ? authsize : -authsize; 642 643 if (memcmp(q, template[i].result + temp, n)) { 644 printk(KERN_ERR "alg: aead: Chunk " 645 "test %d failed on %s at page " 646 "%u for %s\n", j, e, k, algo); 647 hexdump(q, n); 648 goto out; 649 } 650 651 q += n; 652 if (k == template[i].np - 1 && !enc) { 653 if (memcmp(q, template[i].input + 654 temp + n, authsize)) 655 n = authsize; 656 else 657 n = 0; 658 } else { 659 for (n = 0; offset_in_page(q + n) && 660 q[n]; n++) 661 ; 662 } 663 if (n) { 664 printk(KERN_ERR "alg: aead: Result " 665 "buffer corruption in chunk " 666 "test %d on %s at page %u for " 667 "%s: %u bytes:\n", j, e, k, 668 algo, n); 669 hexdump(q, n); 670 goto out; 671 } 672 673 temp += template[i].tap[k]; 674 } 675 } 676 } 677 678 ret = 0; 679 680 out: 681 aead_request_free(req); 682 testmgr_free_buf(axbuf); 683 out_noaxbuf: 684 testmgr_free_buf(xbuf); 685 out_noxbuf: 686 return ret; 687 } 688 689 static int test_cipher(struct crypto_cipher *tfm, int enc, 690 struct cipher_testvec *template, unsigned int tcount) 691 { 692 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 693 unsigned int i, j, k; 694 char *q; 695 const char *e; 696 void *data; 697 char *xbuf[XBUFSIZE]; 698 int ret = -ENOMEM; 699 700 if (testmgr_alloc_buf(xbuf)) 701 goto out_nobuf; 702 703 if (enc == ENCRYPT) 704 e = "encryption"; 705 else 706 e = "decryption"; 707 708 j = 0; 709 for (i = 0; i < tcount; i++) { 710 if (template[i].np) 711 continue; 712 713 j++; 714 715 ret = -EINVAL; 716 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 717 goto out; 718 719 data = xbuf[0]; 720 memcpy(data, template[i].input, template[i].ilen); 721 722 crypto_cipher_clear_flags(tfm, ~0); 723 if (template[i].wk) 724 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 725 726 ret = crypto_cipher_setkey(tfm, template[i].key, 727 template[i].klen); 728 if (!ret == template[i].fail) { 729 printk(KERN_ERR "alg: cipher: setkey failed " 730 "on test %d for %s: flags=%x\n", j, 731 algo, crypto_cipher_get_flags(tfm)); 732 goto out; 733 } else if (ret) 734 continue; 735 736 for (k = 0; k < template[i].ilen; 737 k += crypto_cipher_blocksize(tfm)) { 738 if (enc) 739 crypto_cipher_encrypt_one(tfm, data + k, 740 data + k); 741 else 742 crypto_cipher_decrypt_one(tfm, data + k, 743 data + k); 744 } 745 746 q = data; 747 if (memcmp(q, template[i].result, template[i].rlen)) { 748 printk(KERN_ERR "alg: cipher: Test %d failed " 749 "on %s for %s\n", j, e, algo); 750 hexdump(q, template[i].rlen); 751 ret = -EINVAL; 752 goto out; 753 } 754 } 755 756 ret = 0; 757 758 out: 759 testmgr_free_buf(xbuf); 760 out_nobuf: 761 return ret; 762 } 763 764 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, 765 struct cipher_testvec *template, unsigned int tcount) 766 { 767 const char *algo = 768 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm)); 769 unsigned int i, j, k, n, temp; 770 char *q; 771 struct ablkcipher_request *req; 772 struct scatterlist sg[8]; 773 const char *e; 774 struct tcrypt_result result; 775 void *data; 776 char iv[MAX_IVLEN]; 777 char *xbuf[XBUFSIZE]; 778 int ret = -ENOMEM; 779 780 if (testmgr_alloc_buf(xbuf)) 781 goto out_nobuf; 782 783 if (enc == ENCRYPT) 784 e = "encryption"; 785 else 786 e = "decryption"; 787 788 init_completion(&result.completion); 789 790 req = ablkcipher_request_alloc(tfm, GFP_KERNEL); 791 if (!req) { 792 printk(KERN_ERR "alg: skcipher: Failed to allocate request " 793 "for %s\n", algo); 794 goto out; 795 } 796 797 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 798 tcrypt_complete, &result); 799 800 j = 0; 801 for (i = 0; i < tcount; i++) { 802 if (template[i].iv) 803 memcpy(iv, template[i].iv, MAX_IVLEN); 804 else 805 memset(iv, 0, MAX_IVLEN); 806 807 if (!(template[i].np)) { 808 j++; 809 810 ret = -EINVAL; 811 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 812 goto out; 813 814 data = xbuf[0]; 815 memcpy(data, template[i].input, template[i].ilen); 816 817 crypto_ablkcipher_clear_flags(tfm, ~0); 818 if (template[i].wk) 819 crypto_ablkcipher_set_flags( 820 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 821 822 ret = crypto_ablkcipher_setkey(tfm, template[i].key, 823 template[i].klen); 824 if (!ret == template[i].fail) { 825 printk(KERN_ERR "alg: skcipher: setkey failed " 826 "on test %d for %s: flags=%x\n", j, 827 algo, crypto_ablkcipher_get_flags(tfm)); 828 goto out; 829 } else if (ret) 830 continue; 831 832 sg_init_one(&sg[0], data, template[i].ilen); 833 834 ablkcipher_request_set_crypt(req, sg, sg, 835 template[i].ilen, iv); 836 ret = enc ? 837 crypto_ablkcipher_encrypt(req) : 838 crypto_ablkcipher_decrypt(req); 839 840 switch (ret) { 841 case 0: 842 break; 843 case -EINPROGRESS: 844 case -EBUSY: 845 ret = wait_for_completion_interruptible( 846 &result.completion); 847 if (!ret && !((ret = result.err))) { 848 INIT_COMPLETION(result.completion); 849 break; 850 } 851 /* fall through */ 852 default: 853 printk(KERN_ERR "alg: skcipher: %s failed on " 854 "test %d for %s: ret=%d\n", e, j, algo, 855 -ret); 856 goto out; 857 } 858 859 q = data; 860 if (memcmp(q, template[i].result, template[i].rlen)) { 861 printk(KERN_ERR "alg: skcipher: Test %d " 862 "failed on %s for %s\n", j, e, algo); 863 hexdump(q, template[i].rlen); 864 ret = -EINVAL; 865 goto out; 866 } 867 } 868 } 869 870 j = 0; 871 for (i = 0; i < tcount; i++) { 872 873 if (template[i].iv) 874 memcpy(iv, template[i].iv, MAX_IVLEN); 875 else 876 memset(iv, 0, MAX_IVLEN); 877 878 if (template[i].np) { 879 j++; 880 881 crypto_ablkcipher_clear_flags(tfm, ~0); 882 if (template[i].wk) 883 crypto_ablkcipher_set_flags( 884 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 885 886 ret = crypto_ablkcipher_setkey(tfm, template[i].key, 887 template[i].klen); 888 if (!ret == template[i].fail) { 889 printk(KERN_ERR "alg: skcipher: setkey failed " 890 "on chunk test %d for %s: flags=%x\n", 891 j, algo, 892 crypto_ablkcipher_get_flags(tfm)); 893 goto out; 894 } else if (ret) 895 continue; 896 897 temp = 0; 898 ret = -EINVAL; 899 sg_init_table(sg, template[i].np); 900 for (k = 0; k < template[i].np; k++) { 901 if (WARN_ON(offset_in_page(IDX[k]) + 902 template[i].tap[k] > PAGE_SIZE)) 903 goto out; 904 905 q = xbuf[IDX[k] >> PAGE_SHIFT] + 906 offset_in_page(IDX[k]); 907 908 memcpy(q, template[i].input + temp, 909 template[i].tap[k]); 910 911 if (offset_in_page(q) + template[i].tap[k] < 912 PAGE_SIZE) 913 q[template[i].tap[k]] = 0; 914 915 sg_set_buf(&sg[k], q, template[i].tap[k]); 916 917 temp += template[i].tap[k]; 918 } 919 920 ablkcipher_request_set_crypt(req, sg, sg, 921 template[i].ilen, iv); 922 923 ret = enc ? 924 crypto_ablkcipher_encrypt(req) : 925 crypto_ablkcipher_decrypt(req); 926 927 switch (ret) { 928 case 0: 929 break; 930 case -EINPROGRESS: 931 case -EBUSY: 932 ret = wait_for_completion_interruptible( 933 &result.completion); 934 if (!ret && !((ret = result.err))) { 935 INIT_COMPLETION(result.completion); 936 break; 937 } 938 /* fall through */ 939 default: 940 printk(KERN_ERR "alg: skcipher: %s failed on " 941 "chunk test %d for %s: ret=%d\n", e, j, 942 algo, -ret); 943 goto out; 944 } 945 946 temp = 0; 947 ret = -EINVAL; 948 for (k = 0; k < template[i].np; k++) { 949 q = xbuf[IDX[k] >> PAGE_SHIFT] + 950 offset_in_page(IDX[k]); 951 952 if (memcmp(q, template[i].result + temp, 953 template[i].tap[k])) { 954 printk(KERN_ERR "alg: skcipher: Chunk " 955 "test %d failed on %s at page " 956 "%u for %s\n", j, e, k, algo); 957 hexdump(q, template[i].tap[k]); 958 goto out; 959 } 960 961 q += template[i].tap[k]; 962 for (n = 0; offset_in_page(q + n) && q[n]; n++) 963 ; 964 if (n) { 965 printk(KERN_ERR "alg: skcipher: " 966 "Result buffer corruption in " 967 "chunk test %d on %s at page " 968 "%u for %s: %u bytes:\n", j, e, 969 k, algo, n); 970 hexdump(q, n); 971 goto out; 972 } 973 temp += template[i].tap[k]; 974 } 975 } 976 } 977 978 ret = 0; 979 980 out: 981 ablkcipher_request_free(req); 982 testmgr_free_buf(xbuf); 983 out_nobuf: 984 return ret; 985 } 986 987 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, 988 struct comp_testvec *dtemplate, int ctcount, int dtcount) 989 { 990 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 991 unsigned int i; 992 char result[COMP_BUF_SIZE]; 993 int ret; 994 995 for (i = 0; i < ctcount; i++) { 996 int ilen; 997 unsigned int dlen = COMP_BUF_SIZE; 998 999 memset(result, 0, sizeof (result)); 1000 1001 ilen = ctemplate[i].inlen; 1002 ret = crypto_comp_compress(tfm, ctemplate[i].input, 1003 ilen, result, &dlen); 1004 if (ret) { 1005 printk(KERN_ERR "alg: comp: compression failed " 1006 "on test %d for %s: ret=%d\n", i + 1, algo, 1007 -ret); 1008 goto out; 1009 } 1010 1011 if (dlen != ctemplate[i].outlen) { 1012 printk(KERN_ERR "alg: comp: Compression test %d " 1013 "failed for %s: output len = %d\n", i + 1, algo, 1014 dlen); 1015 ret = -EINVAL; 1016 goto out; 1017 } 1018 1019 if (memcmp(result, ctemplate[i].output, dlen)) { 1020 printk(KERN_ERR "alg: comp: Compression test %d " 1021 "failed for %s\n", i + 1, algo); 1022 hexdump(result, dlen); 1023 ret = -EINVAL; 1024 goto out; 1025 } 1026 } 1027 1028 for (i = 0; i < dtcount; i++) { 1029 int ilen; 1030 unsigned int dlen = COMP_BUF_SIZE; 1031 1032 memset(result, 0, sizeof (result)); 1033 1034 ilen = dtemplate[i].inlen; 1035 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 1036 ilen, result, &dlen); 1037 if (ret) { 1038 printk(KERN_ERR "alg: comp: decompression failed " 1039 "on test %d for %s: ret=%d\n", i + 1, algo, 1040 -ret); 1041 goto out; 1042 } 1043 1044 if (dlen != dtemplate[i].outlen) { 1045 printk(KERN_ERR "alg: comp: Decompression test %d " 1046 "failed for %s: output len = %d\n", i + 1, algo, 1047 dlen); 1048 ret = -EINVAL; 1049 goto out; 1050 } 1051 1052 if (memcmp(result, dtemplate[i].output, dlen)) { 1053 printk(KERN_ERR "alg: comp: Decompression test %d " 1054 "failed for %s\n", i + 1, algo); 1055 hexdump(result, dlen); 1056 ret = -EINVAL; 1057 goto out; 1058 } 1059 } 1060 1061 ret = 0; 1062 1063 out: 1064 return ret; 1065 } 1066 1067 static int test_pcomp(struct crypto_pcomp *tfm, 1068 struct pcomp_testvec *ctemplate, 1069 struct pcomp_testvec *dtemplate, int ctcount, 1070 int dtcount) 1071 { 1072 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm)); 1073 unsigned int i; 1074 char result[COMP_BUF_SIZE]; 1075 int res; 1076 1077 for (i = 0; i < ctcount; i++) { 1078 struct comp_request req; 1079 unsigned int produced = 0; 1080 1081 res = crypto_compress_setup(tfm, ctemplate[i].params, 1082 ctemplate[i].paramsize); 1083 if (res) { 1084 pr_err("alg: pcomp: compression setup failed on test " 1085 "%d for %s: error=%d\n", i + 1, algo, res); 1086 return res; 1087 } 1088 1089 res = crypto_compress_init(tfm); 1090 if (res) { 1091 pr_err("alg: pcomp: compression init failed on test " 1092 "%d for %s: error=%d\n", i + 1, algo, res); 1093 return res; 1094 } 1095 1096 memset(result, 0, sizeof(result)); 1097 1098 req.next_in = ctemplate[i].input; 1099 req.avail_in = ctemplate[i].inlen / 2; 1100 req.next_out = result; 1101 req.avail_out = ctemplate[i].outlen / 2; 1102 1103 res = crypto_compress_update(tfm, &req); 1104 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1105 pr_err("alg: pcomp: compression update failed on test " 1106 "%d for %s: error=%d\n", i + 1, algo, res); 1107 return res; 1108 } 1109 if (res > 0) 1110 produced += res; 1111 1112 /* Add remaining input data */ 1113 req.avail_in += (ctemplate[i].inlen + 1) / 2; 1114 1115 res = crypto_compress_update(tfm, &req); 1116 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1117 pr_err("alg: pcomp: compression update failed on test " 1118 "%d for %s: error=%d\n", i + 1, algo, res); 1119 return res; 1120 } 1121 if (res > 0) 1122 produced += res; 1123 1124 /* Provide remaining output space */ 1125 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2; 1126 1127 res = crypto_compress_final(tfm, &req); 1128 if (res < 0) { 1129 pr_err("alg: pcomp: compression final failed on test " 1130 "%d for %s: error=%d\n", i + 1, algo, res); 1131 return res; 1132 } 1133 produced += res; 1134 1135 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) { 1136 pr_err("alg: comp: Compression test %d failed for %s: " 1137 "output len = %d (expected %d)\n", i + 1, algo, 1138 COMP_BUF_SIZE - req.avail_out, 1139 ctemplate[i].outlen); 1140 return -EINVAL; 1141 } 1142 1143 if (produced != ctemplate[i].outlen) { 1144 pr_err("alg: comp: Compression test %d failed for %s: " 1145 "returned len = %u (expected %d)\n", i + 1, 1146 algo, produced, ctemplate[i].outlen); 1147 return -EINVAL; 1148 } 1149 1150 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) { 1151 pr_err("alg: pcomp: Compression test %d failed for " 1152 "%s\n", i + 1, algo); 1153 hexdump(result, ctemplate[i].outlen); 1154 return -EINVAL; 1155 } 1156 } 1157 1158 for (i = 0; i < dtcount; i++) { 1159 struct comp_request req; 1160 unsigned int produced = 0; 1161 1162 res = crypto_decompress_setup(tfm, dtemplate[i].params, 1163 dtemplate[i].paramsize); 1164 if (res) { 1165 pr_err("alg: pcomp: decompression setup failed on " 1166 "test %d for %s: error=%d\n", i + 1, algo, res); 1167 return res; 1168 } 1169 1170 res = crypto_decompress_init(tfm); 1171 if (res) { 1172 pr_err("alg: pcomp: decompression init failed on test " 1173 "%d for %s: error=%d\n", i + 1, algo, res); 1174 return res; 1175 } 1176 1177 memset(result, 0, sizeof(result)); 1178 1179 req.next_in = dtemplate[i].input; 1180 req.avail_in = dtemplate[i].inlen / 2; 1181 req.next_out = result; 1182 req.avail_out = dtemplate[i].outlen / 2; 1183 1184 res = crypto_decompress_update(tfm, &req); 1185 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1186 pr_err("alg: pcomp: decompression update failed on " 1187 "test %d for %s: error=%d\n", i + 1, algo, res); 1188 return res; 1189 } 1190 if (res > 0) 1191 produced += res; 1192 1193 /* Add remaining input data */ 1194 req.avail_in += (dtemplate[i].inlen + 1) / 2; 1195 1196 res = crypto_decompress_update(tfm, &req); 1197 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1198 pr_err("alg: pcomp: decompression update failed on " 1199 "test %d for %s: error=%d\n", i + 1, algo, res); 1200 return res; 1201 } 1202 if (res > 0) 1203 produced += res; 1204 1205 /* Provide remaining output space */ 1206 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2; 1207 1208 res = crypto_decompress_final(tfm, &req); 1209 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1210 pr_err("alg: pcomp: decompression final failed on " 1211 "test %d for %s: error=%d\n", i + 1, algo, res); 1212 return res; 1213 } 1214 if (res > 0) 1215 produced += res; 1216 1217 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) { 1218 pr_err("alg: comp: Decompression test %d failed for " 1219 "%s: output len = %d (expected %d)\n", i + 1, 1220 algo, COMP_BUF_SIZE - req.avail_out, 1221 dtemplate[i].outlen); 1222 return -EINVAL; 1223 } 1224 1225 if (produced != dtemplate[i].outlen) { 1226 pr_err("alg: comp: Decompression test %d failed for " 1227 "%s: returned len = %u (expected %d)\n", i + 1, 1228 algo, produced, dtemplate[i].outlen); 1229 return -EINVAL; 1230 } 1231 1232 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) { 1233 pr_err("alg: pcomp: Decompression test %d failed for " 1234 "%s\n", i + 1, algo); 1235 hexdump(result, dtemplate[i].outlen); 1236 return -EINVAL; 1237 } 1238 } 1239 1240 return 0; 1241 } 1242 1243 1244 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template, 1245 unsigned int tcount) 1246 { 1247 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 1248 int err = 0, i, j, seedsize; 1249 u8 *seed; 1250 char result[32]; 1251 1252 seedsize = crypto_rng_seedsize(tfm); 1253 1254 seed = kmalloc(seedsize, GFP_KERNEL); 1255 if (!seed) { 1256 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 1257 "for %s\n", algo); 1258 return -ENOMEM; 1259 } 1260 1261 for (i = 0; i < tcount; i++) { 1262 memset(result, 0, 32); 1263 1264 memcpy(seed, template[i].v, template[i].vlen); 1265 memcpy(seed + template[i].vlen, template[i].key, 1266 template[i].klen); 1267 memcpy(seed + template[i].vlen + template[i].klen, 1268 template[i].dt, template[i].dtlen); 1269 1270 err = crypto_rng_reset(tfm, seed, seedsize); 1271 if (err) { 1272 printk(KERN_ERR "alg: cprng: Failed to reset rng " 1273 "for %s\n", algo); 1274 goto out; 1275 } 1276 1277 for (j = 0; j < template[i].loops; j++) { 1278 err = crypto_rng_get_bytes(tfm, result, 1279 template[i].rlen); 1280 if (err != template[i].rlen) { 1281 printk(KERN_ERR "alg: cprng: Failed to obtain " 1282 "the correct amount of random data for " 1283 "%s (requested %d, got %d)\n", algo, 1284 template[i].rlen, err); 1285 goto out; 1286 } 1287 } 1288 1289 err = memcmp(result, template[i].result, 1290 template[i].rlen); 1291 if (err) { 1292 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 1293 i, algo); 1294 hexdump(result, template[i].rlen); 1295 err = -EINVAL; 1296 goto out; 1297 } 1298 } 1299 1300 out: 1301 kfree(seed); 1302 return err; 1303 } 1304 1305 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1306 u32 type, u32 mask) 1307 { 1308 struct crypto_aead *tfm; 1309 int err = 0; 1310 1311 tfm = crypto_alloc_aead(driver, type, mask); 1312 if (IS_ERR(tfm)) { 1313 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1314 "%ld\n", driver, PTR_ERR(tfm)); 1315 return PTR_ERR(tfm); 1316 } 1317 1318 if (desc->suite.aead.enc.vecs) { 1319 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1320 desc->suite.aead.enc.count); 1321 if (err) 1322 goto out; 1323 } 1324 1325 if (!err && desc->suite.aead.dec.vecs) 1326 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1327 desc->suite.aead.dec.count); 1328 1329 out: 1330 crypto_free_aead(tfm); 1331 return err; 1332 } 1333 1334 static int alg_test_cipher(const struct alg_test_desc *desc, 1335 const char *driver, u32 type, u32 mask) 1336 { 1337 struct crypto_cipher *tfm; 1338 int err = 0; 1339 1340 tfm = crypto_alloc_cipher(driver, type, mask); 1341 if (IS_ERR(tfm)) { 1342 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1343 "%s: %ld\n", driver, PTR_ERR(tfm)); 1344 return PTR_ERR(tfm); 1345 } 1346 1347 if (desc->suite.cipher.enc.vecs) { 1348 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1349 desc->suite.cipher.enc.count); 1350 if (err) 1351 goto out; 1352 } 1353 1354 if (desc->suite.cipher.dec.vecs) 1355 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1356 desc->suite.cipher.dec.count); 1357 1358 out: 1359 crypto_free_cipher(tfm); 1360 return err; 1361 } 1362 1363 static int alg_test_skcipher(const struct alg_test_desc *desc, 1364 const char *driver, u32 type, u32 mask) 1365 { 1366 struct crypto_ablkcipher *tfm; 1367 int err = 0; 1368 1369 tfm = crypto_alloc_ablkcipher(driver, type, mask); 1370 if (IS_ERR(tfm)) { 1371 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1372 "%s: %ld\n", driver, PTR_ERR(tfm)); 1373 return PTR_ERR(tfm); 1374 } 1375 1376 if (desc->suite.cipher.enc.vecs) { 1377 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1378 desc->suite.cipher.enc.count); 1379 if (err) 1380 goto out; 1381 } 1382 1383 if (desc->suite.cipher.dec.vecs) 1384 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1385 desc->suite.cipher.dec.count); 1386 1387 out: 1388 crypto_free_ablkcipher(tfm); 1389 return err; 1390 } 1391 1392 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1393 u32 type, u32 mask) 1394 { 1395 struct crypto_comp *tfm; 1396 int err; 1397 1398 tfm = crypto_alloc_comp(driver, type, mask); 1399 if (IS_ERR(tfm)) { 1400 printk(KERN_ERR "alg: comp: Failed to load transform for %s: " 1401 "%ld\n", driver, PTR_ERR(tfm)); 1402 return PTR_ERR(tfm); 1403 } 1404 1405 err = test_comp(tfm, desc->suite.comp.comp.vecs, 1406 desc->suite.comp.decomp.vecs, 1407 desc->suite.comp.comp.count, 1408 desc->suite.comp.decomp.count); 1409 1410 crypto_free_comp(tfm); 1411 return err; 1412 } 1413 1414 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver, 1415 u32 type, u32 mask) 1416 { 1417 struct crypto_pcomp *tfm; 1418 int err; 1419 1420 tfm = crypto_alloc_pcomp(driver, type, mask); 1421 if (IS_ERR(tfm)) { 1422 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n", 1423 driver, PTR_ERR(tfm)); 1424 return PTR_ERR(tfm); 1425 } 1426 1427 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs, 1428 desc->suite.pcomp.decomp.vecs, 1429 desc->suite.pcomp.comp.count, 1430 desc->suite.pcomp.decomp.count); 1431 1432 crypto_free_pcomp(tfm); 1433 return err; 1434 } 1435 1436 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1437 u32 type, u32 mask) 1438 { 1439 struct crypto_ahash *tfm; 1440 int err; 1441 1442 tfm = crypto_alloc_ahash(driver, type, mask); 1443 if (IS_ERR(tfm)) { 1444 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1445 "%ld\n", driver, PTR_ERR(tfm)); 1446 return PTR_ERR(tfm); 1447 } 1448 1449 err = test_hash(tfm, desc->suite.hash.vecs, 1450 desc->suite.hash.count, true); 1451 if (!err) 1452 err = test_hash(tfm, desc->suite.hash.vecs, 1453 desc->suite.hash.count, false); 1454 1455 crypto_free_ahash(tfm); 1456 return err; 1457 } 1458 1459 static int alg_test_crc32c(const struct alg_test_desc *desc, 1460 const char *driver, u32 type, u32 mask) 1461 { 1462 struct crypto_shash *tfm; 1463 u32 val; 1464 int err; 1465 1466 err = alg_test_hash(desc, driver, type, mask); 1467 if (err) 1468 goto out; 1469 1470 tfm = crypto_alloc_shash(driver, type, mask); 1471 if (IS_ERR(tfm)) { 1472 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1473 "%ld\n", driver, PTR_ERR(tfm)); 1474 err = PTR_ERR(tfm); 1475 goto out; 1476 } 1477 1478 do { 1479 struct { 1480 struct shash_desc shash; 1481 char ctx[crypto_shash_descsize(tfm)]; 1482 } sdesc; 1483 1484 sdesc.shash.tfm = tfm; 1485 sdesc.shash.flags = 0; 1486 1487 *(u32 *)sdesc.ctx = le32_to_cpu(420553207); 1488 err = crypto_shash_final(&sdesc.shash, (u8 *)&val); 1489 if (err) { 1490 printk(KERN_ERR "alg: crc32c: Operation failed for " 1491 "%s: %d\n", driver, err); 1492 break; 1493 } 1494 1495 if (val != ~420553207) { 1496 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1497 "%d\n", driver, val); 1498 err = -EINVAL; 1499 } 1500 } while (0); 1501 1502 crypto_free_shash(tfm); 1503 1504 out: 1505 return err; 1506 } 1507 1508 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 1509 u32 type, u32 mask) 1510 { 1511 struct crypto_rng *rng; 1512 int err; 1513 1514 rng = crypto_alloc_rng(driver, type, mask); 1515 if (IS_ERR(rng)) { 1516 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 1517 "%ld\n", driver, PTR_ERR(rng)); 1518 return PTR_ERR(rng); 1519 } 1520 1521 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 1522 1523 crypto_free_rng(rng); 1524 1525 return err; 1526 } 1527 1528 static int alg_test_null(const struct alg_test_desc *desc, 1529 const char *driver, u32 type, u32 mask) 1530 { 1531 return 0; 1532 } 1533 1534 /* Please keep this list sorted by algorithm name. */ 1535 static const struct alg_test_desc alg_test_descs[] = { 1536 { 1537 .alg = "__driver-cbc-aes-aesni", 1538 .test = alg_test_null, 1539 .suite = { 1540 .cipher = { 1541 .enc = { 1542 .vecs = NULL, 1543 .count = 0 1544 }, 1545 .dec = { 1546 .vecs = NULL, 1547 .count = 0 1548 } 1549 } 1550 } 1551 }, { 1552 .alg = "__driver-ecb-aes-aesni", 1553 .test = alg_test_null, 1554 .suite = { 1555 .cipher = { 1556 .enc = { 1557 .vecs = NULL, 1558 .count = 0 1559 }, 1560 .dec = { 1561 .vecs = NULL, 1562 .count = 0 1563 } 1564 } 1565 } 1566 }, { 1567 .alg = "__ghash-pclmulqdqni", 1568 .test = alg_test_null, 1569 .suite = { 1570 .hash = { 1571 .vecs = NULL, 1572 .count = 0 1573 } 1574 } 1575 }, { 1576 .alg = "ansi_cprng", 1577 .test = alg_test_cprng, 1578 .fips_allowed = 1, 1579 .suite = { 1580 .cprng = { 1581 .vecs = ansi_cprng_aes_tv_template, 1582 .count = ANSI_CPRNG_AES_TEST_VECTORS 1583 } 1584 } 1585 }, { 1586 .alg = "cbc(aes)", 1587 .test = alg_test_skcipher, 1588 .fips_allowed = 1, 1589 .suite = { 1590 .cipher = { 1591 .enc = { 1592 .vecs = aes_cbc_enc_tv_template, 1593 .count = AES_CBC_ENC_TEST_VECTORS 1594 }, 1595 .dec = { 1596 .vecs = aes_cbc_dec_tv_template, 1597 .count = AES_CBC_DEC_TEST_VECTORS 1598 } 1599 } 1600 } 1601 }, { 1602 .alg = "cbc(anubis)", 1603 .test = alg_test_skcipher, 1604 .suite = { 1605 .cipher = { 1606 .enc = { 1607 .vecs = anubis_cbc_enc_tv_template, 1608 .count = ANUBIS_CBC_ENC_TEST_VECTORS 1609 }, 1610 .dec = { 1611 .vecs = anubis_cbc_dec_tv_template, 1612 .count = ANUBIS_CBC_DEC_TEST_VECTORS 1613 } 1614 } 1615 } 1616 }, { 1617 .alg = "cbc(blowfish)", 1618 .test = alg_test_skcipher, 1619 .suite = { 1620 .cipher = { 1621 .enc = { 1622 .vecs = bf_cbc_enc_tv_template, 1623 .count = BF_CBC_ENC_TEST_VECTORS 1624 }, 1625 .dec = { 1626 .vecs = bf_cbc_dec_tv_template, 1627 .count = BF_CBC_DEC_TEST_VECTORS 1628 } 1629 } 1630 } 1631 }, { 1632 .alg = "cbc(camellia)", 1633 .test = alg_test_skcipher, 1634 .suite = { 1635 .cipher = { 1636 .enc = { 1637 .vecs = camellia_cbc_enc_tv_template, 1638 .count = CAMELLIA_CBC_ENC_TEST_VECTORS 1639 }, 1640 .dec = { 1641 .vecs = camellia_cbc_dec_tv_template, 1642 .count = CAMELLIA_CBC_DEC_TEST_VECTORS 1643 } 1644 } 1645 } 1646 }, { 1647 .alg = "cbc(des)", 1648 .test = alg_test_skcipher, 1649 .suite = { 1650 .cipher = { 1651 .enc = { 1652 .vecs = des_cbc_enc_tv_template, 1653 .count = DES_CBC_ENC_TEST_VECTORS 1654 }, 1655 .dec = { 1656 .vecs = des_cbc_dec_tv_template, 1657 .count = DES_CBC_DEC_TEST_VECTORS 1658 } 1659 } 1660 } 1661 }, { 1662 .alg = "cbc(des3_ede)", 1663 .test = alg_test_skcipher, 1664 .fips_allowed = 1, 1665 .suite = { 1666 .cipher = { 1667 .enc = { 1668 .vecs = des3_ede_cbc_enc_tv_template, 1669 .count = DES3_EDE_CBC_ENC_TEST_VECTORS 1670 }, 1671 .dec = { 1672 .vecs = des3_ede_cbc_dec_tv_template, 1673 .count = DES3_EDE_CBC_DEC_TEST_VECTORS 1674 } 1675 } 1676 } 1677 }, { 1678 .alg = "cbc(twofish)", 1679 .test = alg_test_skcipher, 1680 .suite = { 1681 .cipher = { 1682 .enc = { 1683 .vecs = tf_cbc_enc_tv_template, 1684 .count = TF_CBC_ENC_TEST_VECTORS 1685 }, 1686 .dec = { 1687 .vecs = tf_cbc_dec_tv_template, 1688 .count = TF_CBC_DEC_TEST_VECTORS 1689 } 1690 } 1691 } 1692 }, { 1693 .alg = "ccm(aes)", 1694 .test = alg_test_aead, 1695 .fips_allowed = 1, 1696 .suite = { 1697 .aead = { 1698 .enc = { 1699 .vecs = aes_ccm_enc_tv_template, 1700 .count = AES_CCM_ENC_TEST_VECTORS 1701 }, 1702 .dec = { 1703 .vecs = aes_ccm_dec_tv_template, 1704 .count = AES_CCM_DEC_TEST_VECTORS 1705 } 1706 } 1707 } 1708 }, { 1709 .alg = "crc32c", 1710 .test = alg_test_crc32c, 1711 .fips_allowed = 1, 1712 .suite = { 1713 .hash = { 1714 .vecs = crc32c_tv_template, 1715 .count = CRC32C_TEST_VECTORS 1716 } 1717 } 1718 }, { 1719 .alg = "cryptd(__driver-ecb-aes-aesni)", 1720 .test = alg_test_null, 1721 .suite = { 1722 .cipher = { 1723 .enc = { 1724 .vecs = NULL, 1725 .count = 0 1726 }, 1727 .dec = { 1728 .vecs = NULL, 1729 .count = 0 1730 } 1731 } 1732 } 1733 }, { 1734 .alg = "cryptd(__ghash-pclmulqdqni)", 1735 .test = alg_test_null, 1736 .suite = { 1737 .hash = { 1738 .vecs = NULL, 1739 .count = 0 1740 } 1741 } 1742 }, { 1743 .alg = "ctr(aes)", 1744 .test = alg_test_skcipher, 1745 .fips_allowed = 1, 1746 .suite = { 1747 .cipher = { 1748 .enc = { 1749 .vecs = aes_ctr_enc_tv_template, 1750 .count = AES_CTR_ENC_TEST_VECTORS 1751 }, 1752 .dec = { 1753 .vecs = aes_ctr_dec_tv_template, 1754 .count = AES_CTR_DEC_TEST_VECTORS 1755 } 1756 } 1757 } 1758 }, { 1759 .alg = "cts(cbc(aes))", 1760 .test = alg_test_skcipher, 1761 .suite = { 1762 .cipher = { 1763 .enc = { 1764 .vecs = cts_mode_enc_tv_template, 1765 .count = CTS_MODE_ENC_TEST_VECTORS 1766 }, 1767 .dec = { 1768 .vecs = cts_mode_dec_tv_template, 1769 .count = CTS_MODE_DEC_TEST_VECTORS 1770 } 1771 } 1772 } 1773 }, { 1774 .alg = "deflate", 1775 .test = alg_test_comp, 1776 .suite = { 1777 .comp = { 1778 .comp = { 1779 .vecs = deflate_comp_tv_template, 1780 .count = DEFLATE_COMP_TEST_VECTORS 1781 }, 1782 .decomp = { 1783 .vecs = deflate_decomp_tv_template, 1784 .count = DEFLATE_DECOMP_TEST_VECTORS 1785 } 1786 } 1787 } 1788 }, { 1789 .alg = "ecb(__aes-aesni)", 1790 .test = alg_test_null, 1791 .suite = { 1792 .cipher = { 1793 .enc = { 1794 .vecs = NULL, 1795 .count = 0 1796 }, 1797 .dec = { 1798 .vecs = NULL, 1799 .count = 0 1800 } 1801 } 1802 } 1803 }, { 1804 .alg = "ecb(aes)", 1805 .test = alg_test_skcipher, 1806 .fips_allowed = 1, 1807 .suite = { 1808 .cipher = { 1809 .enc = { 1810 .vecs = aes_enc_tv_template, 1811 .count = AES_ENC_TEST_VECTORS 1812 }, 1813 .dec = { 1814 .vecs = aes_dec_tv_template, 1815 .count = AES_DEC_TEST_VECTORS 1816 } 1817 } 1818 } 1819 }, { 1820 .alg = "ecb(anubis)", 1821 .test = alg_test_skcipher, 1822 .suite = { 1823 .cipher = { 1824 .enc = { 1825 .vecs = anubis_enc_tv_template, 1826 .count = ANUBIS_ENC_TEST_VECTORS 1827 }, 1828 .dec = { 1829 .vecs = anubis_dec_tv_template, 1830 .count = ANUBIS_DEC_TEST_VECTORS 1831 } 1832 } 1833 } 1834 }, { 1835 .alg = "ecb(arc4)", 1836 .test = alg_test_skcipher, 1837 .suite = { 1838 .cipher = { 1839 .enc = { 1840 .vecs = arc4_enc_tv_template, 1841 .count = ARC4_ENC_TEST_VECTORS 1842 }, 1843 .dec = { 1844 .vecs = arc4_dec_tv_template, 1845 .count = ARC4_DEC_TEST_VECTORS 1846 } 1847 } 1848 } 1849 }, { 1850 .alg = "ecb(blowfish)", 1851 .test = alg_test_skcipher, 1852 .suite = { 1853 .cipher = { 1854 .enc = { 1855 .vecs = bf_enc_tv_template, 1856 .count = BF_ENC_TEST_VECTORS 1857 }, 1858 .dec = { 1859 .vecs = bf_dec_tv_template, 1860 .count = BF_DEC_TEST_VECTORS 1861 } 1862 } 1863 } 1864 }, { 1865 .alg = "ecb(camellia)", 1866 .test = alg_test_skcipher, 1867 .suite = { 1868 .cipher = { 1869 .enc = { 1870 .vecs = camellia_enc_tv_template, 1871 .count = CAMELLIA_ENC_TEST_VECTORS 1872 }, 1873 .dec = { 1874 .vecs = camellia_dec_tv_template, 1875 .count = CAMELLIA_DEC_TEST_VECTORS 1876 } 1877 } 1878 } 1879 }, { 1880 .alg = "ecb(cast5)", 1881 .test = alg_test_skcipher, 1882 .suite = { 1883 .cipher = { 1884 .enc = { 1885 .vecs = cast5_enc_tv_template, 1886 .count = CAST5_ENC_TEST_VECTORS 1887 }, 1888 .dec = { 1889 .vecs = cast5_dec_tv_template, 1890 .count = CAST5_DEC_TEST_VECTORS 1891 } 1892 } 1893 } 1894 }, { 1895 .alg = "ecb(cast6)", 1896 .test = alg_test_skcipher, 1897 .suite = { 1898 .cipher = { 1899 .enc = { 1900 .vecs = cast6_enc_tv_template, 1901 .count = CAST6_ENC_TEST_VECTORS 1902 }, 1903 .dec = { 1904 .vecs = cast6_dec_tv_template, 1905 .count = CAST6_DEC_TEST_VECTORS 1906 } 1907 } 1908 } 1909 }, { 1910 .alg = "ecb(des)", 1911 .test = alg_test_skcipher, 1912 .fips_allowed = 1, 1913 .suite = { 1914 .cipher = { 1915 .enc = { 1916 .vecs = des_enc_tv_template, 1917 .count = DES_ENC_TEST_VECTORS 1918 }, 1919 .dec = { 1920 .vecs = des_dec_tv_template, 1921 .count = DES_DEC_TEST_VECTORS 1922 } 1923 } 1924 } 1925 }, { 1926 .alg = "ecb(des3_ede)", 1927 .test = alg_test_skcipher, 1928 .fips_allowed = 1, 1929 .suite = { 1930 .cipher = { 1931 .enc = { 1932 .vecs = des3_ede_enc_tv_template, 1933 .count = DES3_EDE_ENC_TEST_VECTORS 1934 }, 1935 .dec = { 1936 .vecs = des3_ede_dec_tv_template, 1937 .count = DES3_EDE_DEC_TEST_VECTORS 1938 } 1939 } 1940 } 1941 }, { 1942 .alg = "ecb(khazad)", 1943 .test = alg_test_skcipher, 1944 .suite = { 1945 .cipher = { 1946 .enc = { 1947 .vecs = khazad_enc_tv_template, 1948 .count = KHAZAD_ENC_TEST_VECTORS 1949 }, 1950 .dec = { 1951 .vecs = khazad_dec_tv_template, 1952 .count = KHAZAD_DEC_TEST_VECTORS 1953 } 1954 } 1955 } 1956 }, { 1957 .alg = "ecb(seed)", 1958 .test = alg_test_skcipher, 1959 .suite = { 1960 .cipher = { 1961 .enc = { 1962 .vecs = seed_enc_tv_template, 1963 .count = SEED_ENC_TEST_VECTORS 1964 }, 1965 .dec = { 1966 .vecs = seed_dec_tv_template, 1967 .count = SEED_DEC_TEST_VECTORS 1968 } 1969 } 1970 } 1971 }, { 1972 .alg = "ecb(serpent)", 1973 .test = alg_test_skcipher, 1974 .suite = { 1975 .cipher = { 1976 .enc = { 1977 .vecs = serpent_enc_tv_template, 1978 .count = SERPENT_ENC_TEST_VECTORS 1979 }, 1980 .dec = { 1981 .vecs = serpent_dec_tv_template, 1982 .count = SERPENT_DEC_TEST_VECTORS 1983 } 1984 } 1985 } 1986 }, { 1987 .alg = "ecb(tea)", 1988 .test = alg_test_skcipher, 1989 .suite = { 1990 .cipher = { 1991 .enc = { 1992 .vecs = tea_enc_tv_template, 1993 .count = TEA_ENC_TEST_VECTORS 1994 }, 1995 .dec = { 1996 .vecs = tea_dec_tv_template, 1997 .count = TEA_DEC_TEST_VECTORS 1998 } 1999 } 2000 } 2001 }, { 2002 .alg = "ecb(tnepres)", 2003 .test = alg_test_skcipher, 2004 .suite = { 2005 .cipher = { 2006 .enc = { 2007 .vecs = tnepres_enc_tv_template, 2008 .count = TNEPRES_ENC_TEST_VECTORS 2009 }, 2010 .dec = { 2011 .vecs = tnepres_dec_tv_template, 2012 .count = TNEPRES_DEC_TEST_VECTORS 2013 } 2014 } 2015 } 2016 }, { 2017 .alg = "ecb(twofish)", 2018 .test = alg_test_skcipher, 2019 .suite = { 2020 .cipher = { 2021 .enc = { 2022 .vecs = tf_enc_tv_template, 2023 .count = TF_ENC_TEST_VECTORS 2024 }, 2025 .dec = { 2026 .vecs = tf_dec_tv_template, 2027 .count = TF_DEC_TEST_VECTORS 2028 } 2029 } 2030 } 2031 }, { 2032 .alg = "ecb(xeta)", 2033 .test = alg_test_skcipher, 2034 .suite = { 2035 .cipher = { 2036 .enc = { 2037 .vecs = xeta_enc_tv_template, 2038 .count = XETA_ENC_TEST_VECTORS 2039 }, 2040 .dec = { 2041 .vecs = xeta_dec_tv_template, 2042 .count = XETA_DEC_TEST_VECTORS 2043 } 2044 } 2045 } 2046 }, { 2047 .alg = "ecb(xtea)", 2048 .test = alg_test_skcipher, 2049 .suite = { 2050 .cipher = { 2051 .enc = { 2052 .vecs = xtea_enc_tv_template, 2053 .count = XTEA_ENC_TEST_VECTORS 2054 }, 2055 .dec = { 2056 .vecs = xtea_dec_tv_template, 2057 .count = XTEA_DEC_TEST_VECTORS 2058 } 2059 } 2060 } 2061 }, { 2062 .alg = "gcm(aes)", 2063 .test = alg_test_aead, 2064 .fips_allowed = 1, 2065 .suite = { 2066 .aead = { 2067 .enc = { 2068 .vecs = aes_gcm_enc_tv_template, 2069 .count = AES_GCM_ENC_TEST_VECTORS 2070 }, 2071 .dec = { 2072 .vecs = aes_gcm_dec_tv_template, 2073 .count = AES_GCM_DEC_TEST_VECTORS 2074 } 2075 } 2076 } 2077 }, { 2078 .alg = "ghash", 2079 .test = alg_test_hash, 2080 .fips_allowed = 1, 2081 .suite = { 2082 .hash = { 2083 .vecs = ghash_tv_template, 2084 .count = GHASH_TEST_VECTORS 2085 } 2086 } 2087 }, { 2088 .alg = "hmac(md5)", 2089 .test = alg_test_hash, 2090 .suite = { 2091 .hash = { 2092 .vecs = hmac_md5_tv_template, 2093 .count = HMAC_MD5_TEST_VECTORS 2094 } 2095 } 2096 }, { 2097 .alg = "hmac(rmd128)", 2098 .test = alg_test_hash, 2099 .suite = { 2100 .hash = { 2101 .vecs = hmac_rmd128_tv_template, 2102 .count = HMAC_RMD128_TEST_VECTORS 2103 } 2104 } 2105 }, { 2106 .alg = "hmac(rmd160)", 2107 .test = alg_test_hash, 2108 .suite = { 2109 .hash = { 2110 .vecs = hmac_rmd160_tv_template, 2111 .count = HMAC_RMD160_TEST_VECTORS 2112 } 2113 } 2114 }, { 2115 .alg = "hmac(sha1)", 2116 .test = alg_test_hash, 2117 .fips_allowed = 1, 2118 .suite = { 2119 .hash = { 2120 .vecs = hmac_sha1_tv_template, 2121 .count = HMAC_SHA1_TEST_VECTORS 2122 } 2123 } 2124 }, { 2125 .alg = "hmac(sha224)", 2126 .test = alg_test_hash, 2127 .fips_allowed = 1, 2128 .suite = { 2129 .hash = { 2130 .vecs = hmac_sha224_tv_template, 2131 .count = HMAC_SHA224_TEST_VECTORS 2132 } 2133 } 2134 }, { 2135 .alg = "hmac(sha256)", 2136 .test = alg_test_hash, 2137 .fips_allowed = 1, 2138 .suite = { 2139 .hash = { 2140 .vecs = hmac_sha256_tv_template, 2141 .count = HMAC_SHA256_TEST_VECTORS 2142 } 2143 } 2144 }, { 2145 .alg = "hmac(sha384)", 2146 .test = alg_test_hash, 2147 .fips_allowed = 1, 2148 .suite = { 2149 .hash = { 2150 .vecs = hmac_sha384_tv_template, 2151 .count = HMAC_SHA384_TEST_VECTORS 2152 } 2153 } 2154 }, { 2155 .alg = "hmac(sha512)", 2156 .test = alg_test_hash, 2157 .fips_allowed = 1, 2158 .suite = { 2159 .hash = { 2160 .vecs = hmac_sha512_tv_template, 2161 .count = HMAC_SHA512_TEST_VECTORS 2162 } 2163 } 2164 }, { 2165 .alg = "lrw(aes)", 2166 .test = alg_test_skcipher, 2167 .suite = { 2168 .cipher = { 2169 .enc = { 2170 .vecs = aes_lrw_enc_tv_template, 2171 .count = AES_LRW_ENC_TEST_VECTORS 2172 }, 2173 .dec = { 2174 .vecs = aes_lrw_dec_tv_template, 2175 .count = AES_LRW_DEC_TEST_VECTORS 2176 } 2177 } 2178 } 2179 }, { 2180 .alg = "lzo", 2181 .test = alg_test_comp, 2182 .suite = { 2183 .comp = { 2184 .comp = { 2185 .vecs = lzo_comp_tv_template, 2186 .count = LZO_COMP_TEST_VECTORS 2187 }, 2188 .decomp = { 2189 .vecs = lzo_decomp_tv_template, 2190 .count = LZO_DECOMP_TEST_VECTORS 2191 } 2192 } 2193 } 2194 }, { 2195 .alg = "md4", 2196 .test = alg_test_hash, 2197 .suite = { 2198 .hash = { 2199 .vecs = md4_tv_template, 2200 .count = MD4_TEST_VECTORS 2201 } 2202 } 2203 }, { 2204 .alg = "md5", 2205 .test = alg_test_hash, 2206 .suite = { 2207 .hash = { 2208 .vecs = md5_tv_template, 2209 .count = MD5_TEST_VECTORS 2210 } 2211 } 2212 }, { 2213 .alg = "michael_mic", 2214 .test = alg_test_hash, 2215 .suite = { 2216 .hash = { 2217 .vecs = michael_mic_tv_template, 2218 .count = MICHAEL_MIC_TEST_VECTORS 2219 } 2220 } 2221 }, { 2222 .alg = "ofb(aes)", 2223 .test = alg_test_skcipher, 2224 .fips_allowed = 1, 2225 .suite = { 2226 .cipher = { 2227 .enc = { 2228 .vecs = aes_ofb_enc_tv_template, 2229 .count = AES_OFB_ENC_TEST_VECTORS 2230 }, 2231 .dec = { 2232 .vecs = aes_ofb_dec_tv_template, 2233 .count = AES_OFB_DEC_TEST_VECTORS 2234 } 2235 } 2236 } 2237 }, { 2238 .alg = "pcbc(fcrypt)", 2239 .test = alg_test_skcipher, 2240 .suite = { 2241 .cipher = { 2242 .enc = { 2243 .vecs = fcrypt_pcbc_enc_tv_template, 2244 .count = FCRYPT_ENC_TEST_VECTORS 2245 }, 2246 .dec = { 2247 .vecs = fcrypt_pcbc_dec_tv_template, 2248 .count = FCRYPT_DEC_TEST_VECTORS 2249 } 2250 } 2251 } 2252 }, { 2253 .alg = "rfc3686(ctr(aes))", 2254 .test = alg_test_skcipher, 2255 .fips_allowed = 1, 2256 .suite = { 2257 .cipher = { 2258 .enc = { 2259 .vecs = aes_ctr_rfc3686_enc_tv_template, 2260 .count = AES_CTR_3686_ENC_TEST_VECTORS 2261 }, 2262 .dec = { 2263 .vecs = aes_ctr_rfc3686_dec_tv_template, 2264 .count = AES_CTR_3686_DEC_TEST_VECTORS 2265 } 2266 } 2267 } 2268 }, { 2269 .alg = "rfc4106(gcm(aes))", 2270 .test = alg_test_aead, 2271 .suite = { 2272 .aead = { 2273 .enc = { 2274 .vecs = aes_gcm_rfc4106_enc_tv_template, 2275 .count = AES_GCM_4106_ENC_TEST_VECTORS 2276 }, 2277 .dec = { 2278 .vecs = aes_gcm_rfc4106_dec_tv_template, 2279 .count = AES_GCM_4106_DEC_TEST_VECTORS 2280 } 2281 } 2282 } 2283 }, { 2284 2285 2286 .alg = "rfc4309(ccm(aes))", 2287 .test = alg_test_aead, 2288 .fips_allowed = 1, 2289 .suite = { 2290 .aead = { 2291 .enc = { 2292 .vecs = aes_ccm_rfc4309_enc_tv_template, 2293 .count = AES_CCM_4309_ENC_TEST_VECTORS 2294 }, 2295 .dec = { 2296 .vecs = aes_ccm_rfc4309_dec_tv_template, 2297 .count = AES_CCM_4309_DEC_TEST_VECTORS 2298 } 2299 } 2300 } 2301 }, { 2302 .alg = "rmd128", 2303 .test = alg_test_hash, 2304 .suite = { 2305 .hash = { 2306 .vecs = rmd128_tv_template, 2307 .count = RMD128_TEST_VECTORS 2308 } 2309 } 2310 }, { 2311 .alg = "rmd160", 2312 .test = alg_test_hash, 2313 .suite = { 2314 .hash = { 2315 .vecs = rmd160_tv_template, 2316 .count = RMD160_TEST_VECTORS 2317 } 2318 } 2319 }, { 2320 .alg = "rmd256", 2321 .test = alg_test_hash, 2322 .suite = { 2323 .hash = { 2324 .vecs = rmd256_tv_template, 2325 .count = RMD256_TEST_VECTORS 2326 } 2327 } 2328 }, { 2329 .alg = "rmd320", 2330 .test = alg_test_hash, 2331 .suite = { 2332 .hash = { 2333 .vecs = rmd320_tv_template, 2334 .count = RMD320_TEST_VECTORS 2335 } 2336 } 2337 }, { 2338 .alg = "salsa20", 2339 .test = alg_test_skcipher, 2340 .suite = { 2341 .cipher = { 2342 .enc = { 2343 .vecs = salsa20_stream_enc_tv_template, 2344 .count = SALSA20_STREAM_ENC_TEST_VECTORS 2345 } 2346 } 2347 } 2348 }, { 2349 .alg = "sha1", 2350 .test = alg_test_hash, 2351 .fips_allowed = 1, 2352 .suite = { 2353 .hash = { 2354 .vecs = sha1_tv_template, 2355 .count = SHA1_TEST_VECTORS 2356 } 2357 } 2358 }, { 2359 .alg = "sha224", 2360 .test = alg_test_hash, 2361 .fips_allowed = 1, 2362 .suite = { 2363 .hash = { 2364 .vecs = sha224_tv_template, 2365 .count = SHA224_TEST_VECTORS 2366 } 2367 } 2368 }, { 2369 .alg = "sha256", 2370 .test = alg_test_hash, 2371 .fips_allowed = 1, 2372 .suite = { 2373 .hash = { 2374 .vecs = sha256_tv_template, 2375 .count = SHA256_TEST_VECTORS 2376 } 2377 } 2378 }, { 2379 .alg = "sha384", 2380 .test = alg_test_hash, 2381 .fips_allowed = 1, 2382 .suite = { 2383 .hash = { 2384 .vecs = sha384_tv_template, 2385 .count = SHA384_TEST_VECTORS 2386 } 2387 } 2388 }, { 2389 .alg = "sha512", 2390 .test = alg_test_hash, 2391 .fips_allowed = 1, 2392 .suite = { 2393 .hash = { 2394 .vecs = sha512_tv_template, 2395 .count = SHA512_TEST_VECTORS 2396 } 2397 } 2398 }, { 2399 .alg = "tgr128", 2400 .test = alg_test_hash, 2401 .suite = { 2402 .hash = { 2403 .vecs = tgr128_tv_template, 2404 .count = TGR128_TEST_VECTORS 2405 } 2406 } 2407 }, { 2408 .alg = "tgr160", 2409 .test = alg_test_hash, 2410 .suite = { 2411 .hash = { 2412 .vecs = tgr160_tv_template, 2413 .count = TGR160_TEST_VECTORS 2414 } 2415 } 2416 }, { 2417 .alg = "tgr192", 2418 .test = alg_test_hash, 2419 .suite = { 2420 .hash = { 2421 .vecs = tgr192_tv_template, 2422 .count = TGR192_TEST_VECTORS 2423 } 2424 } 2425 }, { 2426 .alg = "vmac(aes)", 2427 .test = alg_test_hash, 2428 .suite = { 2429 .hash = { 2430 .vecs = aes_vmac128_tv_template, 2431 .count = VMAC_AES_TEST_VECTORS 2432 } 2433 } 2434 }, { 2435 .alg = "wp256", 2436 .test = alg_test_hash, 2437 .suite = { 2438 .hash = { 2439 .vecs = wp256_tv_template, 2440 .count = WP256_TEST_VECTORS 2441 } 2442 } 2443 }, { 2444 .alg = "wp384", 2445 .test = alg_test_hash, 2446 .suite = { 2447 .hash = { 2448 .vecs = wp384_tv_template, 2449 .count = WP384_TEST_VECTORS 2450 } 2451 } 2452 }, { 2453 .alg = "wp512", 2454 .test = alg_test_hash, 2455 .suite = { 2456 .hash = { 2457 .vecs = wp512_tv_template, 2458 .count = WP512_TEST_VECTORS 2459 } 2460 } 2461 }, { 2462 .alg = "xcbc(aes)", 2463 .test = alg_test_hash, 2464 .suite = { 2465 .hash = { 2466 .vecs = aes_xcbc128_tv_template, 2467 .count = XCBC_AES_TEST_VECTORS 2468 } 2469 } 2470 }, { 2471 .alg = "xts(aes)", 2472 .test = alg_test_skcipher, 2473 .fips_allowed = 1, 2474 .suite = { 2475 .cipher = { 2476 .enc = { 2477 .vecs = aes_xts_enc_tv_template, 2478 .count = AES_XTS_ENC_TEST_VECTORS 2479 }, 2480 .dec = { 2481 .vecs = aes_xts_dec_tv_template, 2482 .count = AES_XTS_DEC_TEST_VECTORS 2483 } 2484 } 2485 } 2486 }, { 2487 .alg = "zlib", 2488 .test = alg_test_pcomp, 2489 .suite = { 2490 .pcomp = { 2491 .comp = { 2492 .vecs = zlib_comp_tv_template, 2493 .count = ZLIB_COMP_TEST_VECTORS 2494 }, 2495 .decomp = { 2496 .vecs = zlib_decomp_tv_template, 2497 .count = ZLIB_DECOMP_TEST_VECTORS 2498 } 2499 } 2500 } 2501 } 2502 }; 2503 2504 static int alg_find_test(const char *alg) 2505 { 2506 int start = 0; 2507 int end = ARRAY_SIZE(alg_test_descs); 2508 2509 while (start < end) { 2510 int i = (start + end) / 2; 2511 int diff = strcmp(alg_test_descs[i].alg, alg); 2512 2513 if (diff > 0) { 2514 end = i; 2515 continue; 2516 } 2517 2518 if (diff < 0) { 2519 start = i + 1; 2520 continue; 2521 } 2522 2523 return i; 2524 } 2525 2526 return -1; 2527 } 2528 2529 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 2530 { 2531 int i; 2532 int j; 2533 int rc; 2534 2535 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 2536 char nalg[CRYPTO_MAX_ALG_NAME]; 2537 2538 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 2539 sizeof(nalg)) 2540 return -ENAMETOOLONG; 2541 2542 i = alg_find_test(nalg); 2543 if (i < 0) 2544 goto notest; 2545 2546 if (fips_enabled && !alg_test_descs[i].fips_allowed) 2547 goto non_fips_alg; 2548 2549 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 2550 goto test_done; 2551 } 2552 2553 i = alg_find_test(alg); 2554 j = alg_find_test(driver); 2555 if (i < 0 && j < 0) 2556 goto notest; 2557 2558 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 2559 (j >= 0 && !alg_test_descs[j].fips_allowed))) 2560 goto non_fips_alg; 2561 2562 rc = 0; 2563 if (i >= 0) 2564 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 2565 type, mask); 2566 if (j >= 0) 2567 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 2568 type, mask); 2569 2570 test_done: 2571 if (fips_enabled && rc) 2572 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 2573 2574 if (fips_enabled && !rc) 2575 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n", 2576 driver, alg); 2577 2578 return rc; 2579 2580 notest: 2581 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 2582 return 0; 2583 non_fips_alg: 2584 return -EINVAL; 2585 } 2586 2587 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ 2588 2589 EXPORT_SYMBOL_GPL(alg_test); 2590