1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * Updated RFC4106 AES-GCM testing. 10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) 11 * Adrian Hoban <adrian.hoban@intel.com> 12 * Gabriele Paoloni <gabriele.paoloni@intel.com> 13 * Tadeusz Struk (tadeusz.struk@intel.com) 14 * Copyright (c) 2010, Intel Corporation. 15 * 16 * This program is free software; you can redistribute it and/or modify it 17 * under the terms of the GNU General Public License as published by the Free 18 * Software Foundation; either version 2 of the License, or (at your option) 19 * any later version. 20 * 21 */ 22 23 #include <crypto/aead.h> 24 #include <crypto/hash.h> 25 #include <crypto/skcipher.h> 26 #include <linux/err.h> 27 #include <linux/fips.h> 28 #include <linux/module.h> 29 #include <linux/scatterlist.h> 30 #include <linux/slab.h> 31 #include <linux/string.h> 32 #include <crypto/rng.h> 33 #include <crypto/drbg.h> 34 #include <crypto/akcipher.h> 35 36 #include "internal.h" 37 38 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 39 40 /* a perfect nop */ 41 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 42 { 43 return 0; 44 } 45 46 #else 47 48 #include "testmgr.h" 49 50 /* 51 * Need slab memory for testing (size in number of pages). 52 */ 53 #define XBUFSIZE 8 54 55 /* 56 * Indexes into the xbuf to simulate cross-page access. 57 */ 58 #define IDX1 32 59 #define IDX2 32400 60 #define IDX3 1 61 #define IDX4 8193 62 #define IDX5 22222 63 #define IDX6 17101 64 #define IDX7 27333 65 #define IDX8 3000 66 67 /* 68 * Used by test_cipher() 69 */ 70 #define ENCRYPT 1 71 #define DECRYPT 0 72 73 struct tcrypt_result { 74 struct completion completion; 75 int err; 76 }; 77 78 struct aead_test_suite { 79 struct { 80 struct aead_testvec *vecs; 81 unsigned int count; 82 } enc, dec; 83 }; 84 85 struct cipher_test_suite { 86 struct { 87 struct cipher_testvec *vecs; 88 unsigned int count; 89 } enc, dec; 90 }; 91 92 struct comp_test_suite { 93 struct { 94 struct comp_testvec *vecs; 95 unsigned int count; 96 } comp, decomp; 97 }; 98 99 struct pcomp_test_suite { 100 struct { 101 struct pcomp_testvec *vecs; 102 unsigned int count; 103 } comp, decomp; 104 }; 105 106 struct hash_test_suite { 107 struct hash_testvec *vecs; 108 unsigned int count; 109 }; 110 111 struct cprng_test_suite { 112 struct cprng_testvec *vecs; 113 unsigned int count; 114 }; 115 116 struct drbg_test_suite { 117 struct drbg_testvec *vecs; 118 unsigned int count; 119 }; 120 121 struct akcipher_test_suite { 122 struct akcipher_testvec *vecs; 123 unsigned int count; 124 }; 125 126 struct alg_test_desc { 127 const char *alg; 128 int (*test)(const struct alg_test_desc *desc, const char *driver, 129 u32 type, u32 mask); 130 int fips_allowed; /* set if alg is allowed in fips mode */ 131 132 union { 133 struct aead_test_suite aead; 134 struct cipher_test_suite cipher; 135 struct comp_test_suite comp; 136 struct pcomp_test_suite pcomp; 137 struct hash_test_suite hash; 138 struct cprng_test_suite cprng; 139 struct drbg_test_suite drbg; 140 struct akcipher_test_suite akcipher; 141 } suite; 142 }; 143 144 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 145 146 static void hexdump(unsigned char *buf, unsigned int len) 147 { 148 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 149 16, 1, 150 buf, len, false); 151 } 152 153 static void tcrypt_complete(struct crypto_async_request *req, int err) 154 { 155 struct tcrypt_result *res = req->data; 156 157 if (err == -EINPROGRESS) 158 return; 159 160 res->err = err; 161 complete(&res->completion); 162 } 163 164 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 165 { 166 int i; 167 168 for (i = 0; i < XBUFSIZE; i++) { 169 buf[i] = (void *)__get_free_page(GFP_KERNEL); 170 if (!buf[i]) 171 goto err_free_buf; 172 } 173 174 return 0; 175 176 err_free_buf: 177 while (i-- > 0) 178 free_page((unsigned long)buf[i]); 179 180 return -ENOMEM; 181 } 182 183 static void testmgr_free_buf(char *buf[XBUFSIZE]) 184 { 185 int i; 186 187 for (i = 0; i < XBUFSIZE; i++) 188 free_page((unsigned long)buf[i]); 189 } 190 191 static int wait_async_op(struct tcrypt_result *tr, int ret) 192 { 193 if (ret == -EINPROGRESS || ret == -EBUSY) { 194 wait_for_completion(&tr->completion); 195 reinit_completion(&tr->completion); 196 ret = tr->err; 197 } 198 return ret; 199 } 200 201 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 202 unsigned int tcount, bool use_digest, 203 const int align_offset) 204 { 205 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 206 unsigned int i, j, k, temp; 207 struct scatterlist sg[8]; 208 char *result; 209 char *key; 210 struct ahash_request *req; 211 struct tcrypt_result tresult; 212 void *hash_buff; 213 char *xbuf[XBUFSIZE]; 214 int ret = -ENOMEM; 215 216 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL); 217 if (!result) 218 return ret; 219 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 220 if (!key) 221 goto out_nobuf; 222 if (testmgr_alloc_buf(xbuf)) 223 goto out_nobuf; 224 225 init_completion(&tresult.completion); 226 227 req = ahash_request_alloc(tfm, GFP_KERNEL); 228 if (!req) { 229 printk(KERN_ERR "alg: hash: Failed to allocate request for " 230 "%s\n", algo); 231 goto out_noreq; 232 } 233 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 234 tcrypt_complete, &tresult); 235 236 j = 0; 237 for (i = 0; i < tcount; i++) { 238 if (template[i].np) 239 continue; 240 241 ret = -EINVAL; 242 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) 243 goto out; 244 245 j++; 246 memset(result, 0, MAX_DIGEST_SIZE); 247 248 hash_buff = xbuf[0]; 249 hash_buff += align_offset; 250 251 memcpy(hash_buff, template[i].plaintext, template[i].psize); 252 sg_init_one(&sg[0], hash_buff, template[i].psize); 253 254 if (template[i].ksize) { 255 crypto_ahash_clear_flags(tfm, ~0); 256 if (template[i].ksize > MAX_KEYLEN) { 257 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 258 j, algo, template[i].ksize, MAX_KEYLEN); 259 ret = -EINVAL; 260 goto out; 261 } 262 memcpy(key, template[i].key, template[i].ksize); 263 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 264 if (ret) { 265 printk(KERN_ERR "alg: hash: setkey failed on " 266 "test %d for %s: ret=%d\n", j, algo, 267 -ret); 268 goto out; 269 } 270 } 271 272 ahash_request_set_crypt(req, sg, result, template[i].psize); 273 if (use_digest) { 274 ret = wait_async_op(&tresult, crypto_ahash_digest(req)); 275 if (ret) { 276 pr_err("alg: hash: digest failed on test %d " 277 "for %s: ret=%d\n", j, algo, -ret); 278 goto out; 279 } 280 } else { 281 ret = wait_async_op(&tresult, crypto_ahash_init(req)); 282 if (ret) { 283 pr_err("alt: hash: init failed on test %d " 284 "for %s: ret=%d\n", j, algo, -ret); 285 goto out; 286 } 287 ret = wait_async_op(&tresult, crypto_ahash_update(req)); 288 if (ret) { 289 pr_err("alt: hash: update failed on test %d " 290 "for %s: ret=%d\n", j, algo, -ret); 291 goto out; 292 } 293 ret = wait_async_op(&tresult, crypto_ahash_final(req)); 294 if (ret) { 295 pr_err("alt: hash: final failed on test %d " 296 "for %s: ret=%d\n", j, algo, -ret); 297 goto out; 298 } 299 } 300 301 if (memcmp(result, template[i].digest, 302 crypto_ahash_digestsize(tfm))) { 303 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 304 j, algo); 305 hexdump(result, crypto_ahash_digestsize(tfm)); 306 ret = -EINVAL; 307 goto out; 308 } 309 } 310 311 j = 0; 312 for (i = 0; i < tcount; i++) { 313 /* alignment tests are only done with continuous buffers */ 314 if (align_offset != 0) 315 break; 316 317 if (!template[i].np) 318 continue; 319 320 j++; 321 memset(result, 0, MAX_DIGEST_SIZE); 322 323 temp = 0; 324 sg_init_table(sg, template[i].np); 325 ret = -EINVAL; 326 for (k = 0; k < template[i].np; k++) { 327 if (WARN_ON(offset_in_page(IDX[k]) + 328 template[i].tap[k] > PAGE_SIZE)) 329 goto out; 330 sg_set_buf(&sg[k], 331 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 332 offset_in_page(IDX[k]), 333 template[i].plaintext + temp, 334 template[i].tap[k]), 335 template[i].tap[k]); 336 temp += template[i].tap[k]; 337 } 338 339 if (template[i].ksize) { 340 if (template[i].ksize > MAX_KEYLEN) { 341 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 342 j, algo, template[i].ksize, MAX_KEYLEN); 343 ret = -EINVAL; 344 goto out; 345 } 346 crypto_ahash_clear_flags(tfm, ~0); 347 memcpy(key, template[i].key, template[i].ksize); 348 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 349 350 if (ret) { 351 printk(KERN_ERR "alg: hash: setkey " 352 "failed on chunking test %d " 353 "for %s: ret=%d\n", j, algo, -ret); 354 goto out; 355 } 356 } 357 358 ahash_request_set_crypt(req, sg, result, template[i].psize); 359 ret = crypto_ahash_digest(req); 360 switch (ret) { 361 case 0: 362 break; 363 case -EINPROGRESS: 364 case -EBUSY: 365 wait_for_completion(&tresult.completion); 366 reinit_completion(&tresult.completion); 367 ret = tresult.err; 368 if (!ret) 369 break; 370 /* fall through */ 371 default: 372 printk(KERN_ERR "alg: hash: digest failed " 373 "on chunking test %d for %s: " 374 "ret=%d\n", j, algo, -ret); 375 goto out; 376 } 377 378 if (memcmp(result, template[i].digest, 379 crypto_ahash_digestsize(tfm))) { 380 printk(KERN_ERR "alg: hash: Chunking test %d " 381 "failed for %s\n", j, algo); 382 hexdump(result, crypto_ahash_digestsize(tfm)); 383 ret = -EINVAL; 384 goto out; 385 } 386 } 387 388 ret = 0; 389 390 out: 391 ahash_request_free(req); 392 out_noreq: 393 testmgr_free_buf(xbuf); 394 out_nobuf: 395 kfree(key); 396 kfree(result); 397 return ret; 398 } 399 400 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 401 unsigned int tcount, bool use_digest) 402 { 403 unsigned int alignmask; 404 int ret; 405 406 ret = __test_hash(tfm, template, tcount, use_digest, 0); 407 if (ret) 408 return ret; 409 410 /* test unaligned buffers, check with one byte offset */ 411 ret = __test_hash(tfm, template, tcount, use_digest, 1); 412 if (ret) 413 return ret; 414 415 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 416 if (alignmask) { 417 /* Check if alignment mask for tfm is correctly set. */ 418 ret = __test_hash(tfm, template, tcount, use_digest, 419 alignmask + 1); 420 if (ret) 421 return ret; 422 } 423 424 return 0; 425 } 426 427 static int __test_aead(struct crypto_aead *tfm, int enc, 428 struct aead_testvec *template, unsigned int tcount, 429 const bool diff_dst, const int align_offset) 430 { 431 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 432 unsigned int i, j, k, n, temp; 433 int ret = -ENOMEM; 434 char *q; 435 char *key; 436 struct aead_request *req; 437 struct scatterlist *sg; 438 struct scatterlist *sgout; 439 const char *e, *d; 440 struct tcrypt_result result; 441 unsigned int authsize, iv_len; 442 void *input; 443 void *output; 444 void *assoc; 445 char *iv; 446 char *xbuf[XBUFSIZE]; 447 char *xoutbuf[XBUFSIZE]; 448 char *axbuf[XBUFSIZE]; 449 450 iv = kzalloc(MAX_IVLEN, GFP_KERNEL); 451 if (!iv) 452 return ret; 453 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 454 if (!key) 455 goto out_noxbuf; 456 if (testmgr_alloc_buf(xbuf)) 457 goto out_noxbuf; 458 if (testmgr_alloc_buf(axbuf)) 459 goto out_noaxbuf; 460 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 461 goto out_nooutbuf; 462 463 /* avoid "the frame size is larger than 1024 bytes" compiler warning */ 464 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL); 465 if (!sg) 466 goto out_nosg; 467 sgout = &sg[16]; 468 469 if (diff_dst) 470 d = "-ddst"; 471 else 472 d = ""; 473 474 if (enc == ENCRYPT) 475 e = "encryption"; 476 else 477 e = "decryption"; 478 479 init_completion(&result.completion); 480 481 req = aead_request_alloc(tfm, GFP_KERNEL); 482 if (!req) { 483 pr_err("alg: aead%s: Failed to allocate request for %s\n", 484 d, algo); 485 goto out; 486 } 487 488 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 489 tcrypt_complete, &result); 490 491 for (i = 0, j = 0; i < tcount; i++) { 492 if (template[i].np) 493 continue; 494 495 j++; 496 497 /* some templates have no input data but they will 498 * touch input 499 */ 500 input = xbuf[0]; 501 input += align_offset; 502 assoc = axbuf[0]; 503 504 ret = -EINVAL; 505 if (WARN_ON(align_offset + template[i].ilen > 506 PAGE_SIZE || template[i].alen > PAGE_SIZE)) 507 goto out; 508 509 memcpy(input, template[i].input, template[i].ilen); 510 memcpy(assoc, template[i].assoc, template[i].alen); 511 iv_len = crypto_aead_ivsize(tfm); 512 if (template[i].iv) 513 memcpy(iv, template[i].iv, iv_len); 514 else 515 memset(iv, 0, iv_len); 516 517 crypto_aead_clear_flags(tfm, ~0); 518 if (template[i].wk) 519 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 520 521 if (template[i].klen > MAX_KEYLEN) { 522 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 523 d, j, algo, template[i].klen, 524 MAX_KEYLEN); 525 ret = -EINVAL; 526 goto out; 527 } 528 memcpy(key, template[i].key, template[i].klen); 529 530 ret = crypto_aead_setkey(tfm, key, template[i].klen); 531 if (!ret == template[i].fail) { 532 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", 533 d, j, algo, crypto_aead_get_flags(tfm)); 534 goto out; 535 } else if (ret) 536 continue; 537 538 authsize = abs(template[i].rlen - template[i].ilen); 539 ret = crypto_aead_setauthsize(tfm, authsize); 540 if (ret) { 541 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", 542 d, authsize, j, algo); 543 goto out; 544 } 545 546 k = !!template[i].alen; 547 sg_init_table(sg, k + 1); 548 sg_set_buf(&sg[0], assoc, template[i].alen); 549 sg_set_buf(&sg[k], input, 550 template[i].ilen + (enc ? authsize : 0)); 551 output = input; 552 553 if (diff_dst) { 554 sg_init_table(sgout, k + 1); 555 sg_set_buf(&sgout[0], assoc, template[i].alen); 556 557 output = xoutbuf[0]; 558 output += align_offset; 559 sg_set_buf(&sgout[k], output, 560 template[i].rlen + (enc ? 0 : authsize)); 561 } 562 563 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 564 template[i].ilen, iv); 565 566 aead_request_set_ad(req, template[i].alen); 567 568 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 569 570 switch (ret) { 571 case 0: 572 if (template[i].novrfy) { 573 /* verification was supposed to fail */ 574 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", 575 d, e, j, algo); 576 /* so really, we got a bad message */ 577 ret = -EBADMSG; 578 goto out; 579 } 580 break; 581 case -EINPROGRESS: 582 case -EBUSY: 583 wait_for_completion(&result.completion); 584 reinit_completion(&result.completion); 585 ret = result.err; 586 if (!ret) 587 break; 588 case -EBADMSG: 589 if (template[i].novrfy) 590 /* verification failure was expected */ 591 continue; 592 /* fall through */ 593 default: 594 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", 595 d, e, j, algo, -ret); 596 goto out; 597 } 598 599 q = output; 600 if (memcmp(q, template[i].result, template[i].rlen)) { 601 pr_err("alg: aead%s: Test %d failed on %s for %s\n", 602 d, j, e, algo); 603 hexdump(q, template[i].rlen); 604 ret = -EINVAL; 605 goto out; 606 } 607 } 608 609 for (i = 0, j = 0; i < tcount; i++) { 610 /* alignment tests are only done with continuous buffers */ 611 if (align_offset != 0) 612 break; 613 614 if (!template[i].np) 615 continue; 616 617 j++; 618 619 if (template[i].iv) 620 memcpy(iv, template[i].iv, MAX_IVLEN); 621 else 622 memset(iv, 0, MAX_IVLEN); 623 624 crypto_aead_clear_flags(tfm, ~0); 625 if (template[i].wk) 626 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 627 if (template[i].klen > MAX_KEYLEN) { 628 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 629 d, j, algo, template[i].klen, MAX_KEYLEN); 630 ret = -EINVAL; 631 goto out; 632 } 633 memcpy(key, template[i].key, template[i].klen); 634 635 ret = crypto_aead_setkey(tfm, key, template[i].klen); 636 if (!ret == template[i].fail) { 637 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", 638 d, j, algo, crypto_aead_get_flags(tfm)); 639 goto out; 640 } else if (ret) 641 continue; 642 643 authsize = abs(template[i].rlen - template[i].ilen); 644 645 ret = -EINVAL; 646 sg_init_table(sg, template[i].anp + template[i].np); 647 if (diff_dst) 648 sg_init_table(sgout, template[i].anp + template[i].np); 649 650 ret = -EINVAL; 651 for (k = 0, temp = 0; k < template[i].anp; k++) { 652 if (WARN_ON(offset_in_page(IDX[k]) + 653 template[i].atap[k] > PAGE_SIZE)) 654 goto out; 655 sg_set_buf(&sg[k], 656 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 657 offset_in_page(IDX[k]), 658 template[i].assoc + temp, 659 template[i].atap[k]), 660 template[i].atap[k]); 661 if (diff_dst) 662 sg_set_buf(&sgout[k], 663 axbuf[IDX[k] >> PAGE_SHIFT] + 664 offset_in_page(IDX[k]), 665 template[i].atap[k]); 666 temp += template[i].atap[k]; 667 } 668 669 for (k = 0, temp = 0; k < template[i].np; k++) { 670 if (WARN_ON(offset_in_page(IDX[k]) + 671 template[i].tap[k] > PAGE_SIZE)) 672 goto out; 673 674 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 675 memcpy(q, template[i].input + temp, template[i].tap[k]); 676 sg_set_buf(&sg[template[i].anp + k], 677 q, template[i].tap[k]); 678 679 if (diff_dst) { 680 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 681 offset_in_page(IDX[k]); 682 683 memset(q, 0, template[i].tap[k]); 684 685 sg_set_buf(&sgout[template[i].anp + k], 686 q, template[i].tap[k]); 687 } 688 689 n = template[i].tap[k]; 690 if (k == template[i].np - 1 && enc) 691 n += authsize; 692 if (offset_in_page(q) + n < PAGE_SIZE) 693 q[n] = 0; 694 695 temp += template[i].tap[k]; 696 } 697 698 ret = crypto_aead_setauthsize(tfm, authsize); 699 if (ret) { 700 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", 701 d, authsize, j, algo); 702 goto out; 703 } 704 705 if (enc) { 706 if (WARN_ON(sg[template[i].anp + k - 1].offset + 707 sg[template[i].anp + k - 1].length + 708 authsize > PAGE_SIZE)) { 709 ret = -EINVAL; 710 goto out; 711 } 712 713 if (diff_dst) 714 sgout[template[i].anp + k - 1].length += 715 authsize; 716 sg[template[i].anp + k - 1].length += authsize; 717 } 718 719 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 720 template[i].ilen, 721 iv); 722 723 aead_request_set_ad(req, template[i].alen); 724 725 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 726 727 switch (ret) { 728 case 0: 729 if (template[i].novrfy) { 730 /* verification was supposed to fail */ 731 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", 732 d, e, j, algo); 733 /* so really, we got a bad message */ 734 ret = -EBADMSG; 735 goto out; 736 } 737 break; 738 case -EINPROGRESS: 739 case -EBUSY: 740 wait_for_completion(&result.completion); 741 reinit_completion(&result.completion); 742 ret = result.err; 743 if (!ret) 744 break; 745 case -EBADMSG: 746 if (template[i].novrfy) 747 /* verification failure was expected */ 748 continue; 749 /* fall through */ 750 default: 751 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", 752 d, e, j, algo, -ret); 753 goto out; 754 } 755 756 ret = -EINVAL; 757 for (k = 0, temp = 0; k < template[i].np; k++) { 758 if (diff_dst) 759 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 760 offset_in_page(IDX[k]); 761 else 762 q = xbuf[IDX[k] >> PAGE_SHIFT] + 763 offset_in_page(IDX[k]); 764 765 n = template[i].tap[k]; 766 if (k == template[i].np - 1) 767 n += enc ? authsize : -authsize; 768 769 if (memcmp(q, template[i].result + temp, n)) { 770 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", 771 d, j, e, k, algo); 772 hexdump(q, n); 773 goto out; 774 } 775 776 q += n; 777 if (k == template[i].np - 1 && !enc) { 778 if (!diff_dst && 779 memcmp(q, template[i].input + 780 temp + n, authsize)) 781 n = authsize; 782 else 783 n = 0; 784 } else { 785 for (n = 0; offset_in_page(q + n) && q[n]; n++) 786 ; 787 } 788 if (n) { 789 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 790 d, j, e, k, algo, n); 791 hexdump(q, n); 792 goto out; 793 } 794 795 temp += template[i].tap[k]; 796 } 797 } 798 799 ret = 0; 800 801 out: 802 aead_request_free(req); 803 kfree(sg); 804 out_nosg: 805 if (diff_dst) 806 testmgr_free_buf(xoutbuf); 807 out_nooutbuf: 808 testmgr_free_buf(axbuf); 809 out_noaxbuf: 810 testmgr_free_buf(xbuf); 811 out_noxbuf: 812 kfree(key); 813 kfree(iv); 814 return ret; 815 } 816 817 static int test_aead(struct crypto_aead *tfm, int enc, 818 struct aead_testvec *template, unsigned int tcount) 819 { 820 unsigned int alignmask; 821 int ret; 822 823 /* test 'dst == src' case */ 824 ret = __test_aead(tfm, enc, template, tcount, false, 0); 825 if (ret) 826 return ret; 827 828 /* test 'dst != src' case */ 829 ret = __test_aead(tfm, enc, template, tcount, true, 0); 830 if (ret) 831 return ret; 832 833 /* test unaligned buffers, check with one byte offset */ 834 ret = __test_aead(tfm, enc, template, tcount, true, 1); 835 if (ret) 836 return ret; 837 838 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 839 if (alignmask) { 840 /* Check if alignment mask for tfm is correctly set. */ 841 ret = __test_aead(tfm, enc, template, tcount, true, 842 alignmask + 1); 843 if (ret) 844 return ret; 845 } 846 847 return 0; 848 } 849 850 static int test_cipher(struct crypto_cipher *tfm, int enc, 851 struct cipher_testvec *template, unsigned int tcount) 852 { 853 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 854 unsigned int i, j, k; 855 char *q; 856 const char *e; 857 void *data; 858 char *xbuf[XBUFSIZE]; 859 int ret = -ENOMEM; 860 861 if (testmgr_alloc_buf(xbuf)) 862 goto out_nobuf; 863 864 if (enc == ENCRYPT) 865 e = "encryption"; 866 else 867 e = "decryption"; 868 869 j = 0; 870 for (i = 0; i < tcount; i++) { 871 if (template[i].np) 872 continue; 873 874 j++; 875 876 ret = -EINVAL; 877 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 878 goto out; 879 880 data = xbuf[0]; 881 memcpy(data, template[i].input, template[i].ilen); 882 883 crypto_cipher_clear_flags(tfm, ~0); 884 if (template[i].wk) 885 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 886 887 ret = crypto_cipher_setkey(tfm, template[i].key, 888 template[i].klen); 889 if (!ret == template[i].fail) { 890 printk(KERN_ERR "alg: cipher: setkey failed " 891 "on test %d for %s: flags=%x\n", j, 892 algo, crypto_cipher_get_flags(tfm)); 893 goto out; 894 } else if (ret) 895 continue; 896 897 for (k = 0; k < template[i].ilen; 898 k += crypto_cipher_blocksize(tfm)) { 899 if (enc) 900 crypto_cipher_encrypt_one(tfm, data + k, 901 data + k); 902 else 903 crypto_cipher_decrypt_one(tfm, data + k, 904 data + k); 905 } 906 907 q = data; 908 if (memcmp(q, template[i].result, template[i].rlen)) { 909 printk(KERN_ERR "alg: cipher: Test %d failed " 910 "on %s for %s\n", j, e, algo); 911 hexdump(q, template[i].rlen); 912 ret = -EINVAL; 913 goto out; 914 } 915 } 916 917 ret = 0; 918 919 out: 920 testmgr_free_buf(xbuf); 921 out_nobuf: 922 return ret; 923 } 924 925 static int __test_skcipher(struct crypto_skcipher *tfm, int enc, 926 struct cipher_testvec *template, unsigned int tcount, 927 const bool diff_dst, const int align_offset) 928 { 929 const char *algo = 930 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)); 931 unsigned int i, j, k, n, temp; 932 char *q; 933 struct skcipher_request *req; 934 struct scatterlist sg[8]; 935 struct scatterlist sgout[8]; 936 const char *e, *d; 937 struct tcrypt_result result; 938 void *data; 939 char iv[MAX_IVLEN]; 940 char *xbuf[XBUFSIZE]; 941 char *xoutbuf[XBUFSIZE]; 942 int ret = -ENOMEM; 943 unsigned int ivsize = crypto_skcipher_ivsize(tfm); 944 945 if (testmgr_alloc_buf(xbuf)) 946 goto out_nobuf; 947 948 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 949 goto out_nooutbuf; 950 951 if (diff_dst) 952 d = "-ddst"; 953 else 954 d = ""; 955 956 if (enc == ENCRYPT) 957 e = "encryption"; 958 else 959 e = "decryption"; 960 961 init_completion(&result.completion); 962 963 req = skcipher_request_alloc(tfm, GFP_KERNEL); 964 if (!req) { 965 pr_err("alg: skcipher%s: Failed to allocate request for %s\n", 966 d, algo); 967 goto out; 968 } 969 970 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 971 tcrypt_complete, &result); 972 973 j = 0; 974 for (i = 0; i < tcount; i++) { 975 if (template[i].np && !template[i].also_non_np) 976 continue; 977 978 if (template[i].iv) 979 memcpy(iv, template[i].iv, ivsize); 980 else 981 memset(iv, 0, MAX_IVLEN); 982 983 j++; 984 ret = -EINVAL; 985 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE)) 986 goto out; 987 988 data = xbuf[0]; 989 data += align_offset; 990 memcpy(data, template[i].input, template[i].ilen); 991 992 crypto_skcipher_clear_flags(tfm, ~0); 993 if (template[i].wk) 994 crypto_skcipher_set_flags(tfm, 995 CRYPTO_TFM_REQ_WEAK_KEY); 996 997 ret = crypto_skcipher_setkey(tfm, template[i].key, 998 template[i].klen); 999 if (!ret == template[i].fail) { 1000 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n", 1001 d, j, algo, crypto_skcipher_get_flags(tfm)); 1002 goto out; 1003 } else if (ret) 1004 continue; 1005 1006 sg_init_one(&sg[0], data, template[i].ilen); 1007 if (diff_dst) { 1008 data = xoutbuf[0]; 1009 data += align_offset; 1010 sg_init_one(&sgout[0], data, template[i].ilen); 1011 } 1012 1013 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1014 template[i].ilen, iv); 1015 ret = enc ? crypto_skcipher_encrypt(req) : 1016 crypto_skcipher_decrypt(req); 1017 1018 switch (ret) { 1019 case 0: 1020 break; 1021 case -EINPROGRESS: 1022 case -EBUSY: 1023 wait_for_completion(&result.completion); 1024 reinit_completion(&result.completion); 1025 ret = result.err; 1026 if (!ret) 1027 break; 1028 /* fall through */ 1029 default: 1030 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", 1031 d, e, j, algo, -ret); 1032 goto out; 1033 } 1034 1035 q = data; 1036 if (memcmp(q, template[i].result, template[i].rlen)) { 1037 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n", 1038 d, j, e, algo); 1039 hexdump(q, template[i].rlen); 1040 ret = -EINVAL; 1041 goto out; 1042 } 1043 1044 if (template[i].iv_out && 1045 memcmp(iv, template[i].iv_out, 1046 crypto_skcipher_ivsize(tfm))) { 1047 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n", 1048 d, j, e, algo); 1049 hexdump(iv, crypto_skcipher_ivsize(tfm)); 1050 ret = -EINVAL; 1051 goto out; 1052 } 1053 } 1054 1055 j = 0; 1056 for (i = 0; i < tcount; i++) { 1057 /* alignment tests are only done with continuous buffers */ 1058 if (align_offset != 0) 1059 break; 1060 1061 if (!template[i].np) 1062 continue; 1063 1064 if (template[i].iv) 1065 memcpy(iv, template[i].iv, ivsize); 1066 else 1067 memset(iv, 0, MAX_IVLEN); 1068 1069 j++; 1070 crypto_skcipher_clear_flags(tfm, ~0); 1071 if (template[i].wk) 1072 crypto_skcipher_set_flags(tfm, 1073 CRYPTO_TFM_REQ_WEAK_KEY); 1074 1075 ret = crypto_skcipher_setkey(tfm, template[i].key, 1076 template[i].klen); 1077 if (!ret == template[i].fail) { 1078 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n", 1079 d, j, algo, crypto_skcipher_get_flags(tfm)); 1080 goto out; 1081 } else if (ret) 1082 continue; 1083 1084 temp = 0; 1085 ret = -EINVAL; 1086 sg_init_table(sg, template[i].np); 1087 if (diff_dst) 1088 sg_init_table(sgout, template[i].np); 1089 for (k = 0; k < template[i].np; k++) { 1090 if (WARN_ON(offset_in_page(IDX[k]) + 1091 template[i].tap[k] > PAGE_SIZE)) 1092 goto out; 1093 1094 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 1095 1096 memcpy(q, template[i].input + temp, template[i].tap[k]); 1097 1098 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE) 1099 q[template[i].tap[k]] = 0; 1100 1101 sg_set_buf(&sg[k], q, template[i].tap[k]); 1102 if (diff_dst) { 1103 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1104 offset_in_page(IDX[k]); 1105 1106 sg_set_buf(&sgout[k], q, template[i].tap[k]); 1107 1108 memset(q, 0, template[i].tap[k]); 1109 if (offset_in_page(q) + 1110 template[i].tap[k] < PAGE_SIZE) 1111 q[template[i].tap[k]] = 0; 1112 } 1113 1114 temp += template[i].tap[k]; 1115 } 1116 1117 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1118 template[i].ilen, iv); 1119 1120 ret = enc ? crypto_skcipher_encrypt(req) : 1121 crypto_skcipher_decrypt(req); 1122 1123 switch (ret) { 1124 case 0: 1125 break; 1126 case -EINPROGRESS: 1127 case -EBUSY: 1128 wait_for_completion(&result.completion); 1129 reinit_completion(&result.completion); 1130 ret = result.err; 1131 if (!ret) 1132 break; 1133 /* fall through */ 1134 default: 1135 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", 1136 d, e, j, algo, -ret); 1137 goto out; 1138 } 1139 1140 temp = 0; 1141 ret = -EINVAL; 1142 for (k = 0; k < template[i].np; k++) { 1143 if (diff_dst) 1144 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1145 offset_in_page(IDX[k]); 1146 else 1147 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1148 offset_in_page(IDX[k]); 1149 1150 if (memcmp(q, template[i].result + temp, 1151 template[i].tap[k])) { 1152 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", 1153 d, j, e, k, algo); 1154 hexdump(q, template[i].tap[k]); 1155 goto out; 1156 } 1157 1158 q += template[i].tap[k]; 1159 for (n = 0; offset_in_page(q + n) && q[n]; n++) 1160 ; 1161 if (n) { 1162 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 1163 d, j, e, k, algo, n); 1164 hexdump(q, n); 1165 goto out; 1166 } 1167 temp += template[i].tap[k]; 1168 } 1169 } 1170 1171 ret = 0; 1172 1173 out: 1174 skcipher_request_free(req); 1175 if (diff_dst) 1176 testmgr_free_buf(xoutbuf); 1177 out_nooutbuf: 1178 testmgr_free_buf(xbuf); 1179 out_nobuf: 1180 return ret; 1181 } 1182 1183 static int test_skcipher(struct crypto_skcipher *tfm, int enc, 1184 struct cipher_testvec *template, unsigned int tcount) 1185 { 1186 unsigned int alignmask; 1187 int ret; 1188 1189 /* test 'dst == src' case */ 1190 ret = __test_skcipher(tfm, enc, template, tcount, false, 0); 1191 if (ret) 1192 return ret; 1193 1194 /* test 'dst != src' case */ 1195 ret = __test_skcipher(tfm, enc, template, tcount, true, 0); 1196 if (ret) 1197 return ret; 1198 1199 /* test unaligned buffers, check with one byte offset */ 1200 ret = __test_skcipher(tfm, enc, template, tcount, true, 1); 1201 if (ret) 1202 return ret; 1203 1204 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1205 if (alignmask) { 1206 /* Check if alignment mask for tfm is correctly set. */ 1207 ret = __test_skcipher(tfm, enc, template, tcount, true, 1208 alignmask + 1); 1209 if (ret) 1210 return ret; 1211 } 1212 1213 return 0; 1214 } 1215 1216 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, 1217 struct comp_testvec *dtemplate, int ctcount, int dtcount) 1218 { 1219 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 1220 unsigned int i; 1221 char result[COMP_BUF_SIZE]; 1222 int ret; 1223 1224 for (i = 0; i < ctcount; i++) { 1225 int ilen; 1226 unsigned int dlen = COMP_BUF_SIZE; 1227 1228 memset(result, 0, sizeof (result)); 1229 1230 ilen = ctemplate[i].inlen; 1231 ret = crypto_comp_compress(tfm, ctemplate[i].input, 1232 ilen, result, &dlen); 1233 if (ret) { 1234 printk(KERN_ERR "alg: comp: compression failed " 1235 "on test %d for %s: ret=%d\n", i + 1, algo, 1236 -ret); 1237 goto out; 1238 } 1239 1240 if (dlen != ctemplate[i].outlen) { 1241 printk(KERN_ERR "alg: comp: Compression test %d " 1242 "failed for %s: output len = %d\n", i + 1, algo, 1243 dlen); 1244 ret = -EINVAL; 1245 goto out; 1246 } 1247 1248 if (memcmp(result, ctemplate[i].output, dlen)) { 1249 printk(KERN_ERR "alg: comp: Compression test %d " 1250 "failed for %s\n", i + 1, algo); 1251 hexdump(result, dlen); 1252 ret = -EINVAL; 1253 goto out; 1254 } 1255 } 1256 1257 for (i = 0; i < dtcount; i++) { 1258 int ilen; 1259 unsigned int dlen = COMP_BUF_SIZE; 1260 1261 memset(result, 0, sizeof (result)); 1262 1263 ilen = dtemplate[i].inlen; 1264 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 1265 ilen, result, &dlen); 1266 if (ret) { 1267 printk(KERN_ERR "alg: comp: decompression failed " 1268 "on test %d for %s: ret=%d\n", i + 1, algo, 1269 -ret); 1270 goto out; 1271 } 1272 1273 if (dlen != dtemplate[i].outlen) { 1274 printk(KERN_ERR "alg: comp: Decompression test %d " 1275 "failed for %s: output len = %d\n", i + 1, algo, 1276 dlen); 1277 ret = -EINVAL; 1278 goto out; 1279 } 1280 1281 if (memcmp(result, dtemplate[i].output, dlen)) { 1282 printk(KERN_ERR "alg: comp: Decompression test %d " 1283 "failed for %s\n", i + 1, algo); 1284 hexdump(result, dlen); 1285 ret = -EINVAL; 1286 goto out; 1287 } 1288 } 1289 1290 ret = 0; 1291 1292 out: 1293 return ret; 1294 } 1295 1296 static int test_pcomp(struct crypto_pcomp *tfm, 1297 struct pcomp_testvec *ctemplate, 1298 struct pcomp_testvec *dtemplate, int ctcount, 1299 int dtcount) 1300 { 1301 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm)); 1302 unsigned int i; 1303 char result[COMP_BUF_SIZE]; 1304 int res; 1305 1306 for (i = 0; i < ctcount; i++) { 1307 struct comp_request req; 1308 unsigned int produced = 0; 1309 1310 res = crypto_compress_setup(tfm, ctemplate[i].params, 1311 ctemplate[i].paramsize); 1312 if (res) { 1313 pr_err("alg: pcomp: compression setup failed on test " 1314 "%d for %s: error=%d\n", i + 1, algo, res); 1315 return res; 1316 } 1317 1318 res = crypto_compress_init(tfm); 1319 if (res) { 1320 pr_err("alg: pcomp: compression init failed on test " 1321 "%d for %s: error=%d\n", i + 1, algo, res); 1322 return res; 1323 } 1324 1325 memset(result, 0, sizeof(result)); 1326 1327 req.next_in = ctemplate[i].input; 1328 req.avail_in = ctemplate[i].inlen / 2; 1329 req.next_out = result; 1330 req.avail_out = ctemplate[i].outlen / 2; 1331 1332 res = crypto_compress_update(tfm, &req); 1333 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1334 pr_err("alg: pcomp: compression update failed on test " 1335 "%d for %s: error=%d\n", i + 1, algo, res); 1336 return res; 1337 } 1338 if (res > 0) 1339 produced += res; 1340 1341 /* Add remaining input data */ 1342 req.avail_in += (ctemplate[i].inlen + 1) / 2; 1343 1344 res = crypto_compress_update(tfm, &req); 1345 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1346 pr_err("alg: pcomp: compression update failed on test " 1347 "%d for %s: error=%d\n", i + 1, algo, res); 1348 return res; 1349 } 1350 if (res > 0) 1351 produced += res; 1352 1353 /* Provide remaining output space */ 1354 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2; 1355 1356 res = crypto_compress_final(tfm, &req); 1357 if (res < 0) { 1358 pr_err("alg: pcomp: compression final failed on test " 1359 "%d for %s: error=%d\n", i + 1, algo, res); 1360 return res; 1361 } 1362 produced += res; 1363 1364 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) { 1365 pr_err("alg: comp: Compression test %d failed for %s: " 1366 "output len = %d (expected %d)\n", i + 1, algo, 1367 COMP_BUF_SIZE - req.avail_out, 1368 ctemplate[i].outlen); 1369 return -EINVAL; 1370 } 1371 1372 if (produced != ctemplate[i].outlen) { 1373 pr_err("alg: comp: Compression test %d failed for %s: " 1374 "returned len = %u (expected %d)\n", i + 1, 1375 algo, produced, ctemplate[i].outlen); 1376 return -EINVAL; 1377 } 1378 1379 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) { 1380 pr_err("alg: pcomp: Compression test %d failed for " 1381 "%s\n", i + 1, algo); 1382 hexdump(result, ctemplate[i].outlen); 1383 return -EINVAL; 1384 } 1385 } 1386 1387 for (i = 0; i < dtcount; i++) { 1388 struct comp_request req; 1389 unsigned int produced = 0; 1390 1391 res = crypto_decompress_setup(tfm, dtemplate[i].params, 1392 dtemplate[i].paramsize); 1393 if (res) { 1394 pr_err("alg: pcomp: decompression setup failed on " 1395 "test %d for %s: error=%d\n", i + 1, algo, res); 1396 return res; 1397 } 1398 1399 res = crypto_decompress_init(tfm); 1400 if (res) { 1401 pr_err("alg: pcomp: decompression init failed on test " 1402 "%d for %s: error=%d\n", i + 1, algo, res); 1403 return res; 1404 } 1405 1406 memset(result, 0, sizeof(result)); 1407 1408 req.next_in = dtemplate[i].input; 1409 req.avail_in = dtemplate[i].inlen / 2; 1410 req.next_out = result; 1411 req.avail_out = dtemplate[i].outlen / 2; 1412 1413 res = crypto_decompress_update(tfm, &req); 1414 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1415 pr_err("alg: pcomp: decompression update failed on " 1416 "test %d for %s: error=%d\n", i + 1, algo, res); 1417 return res; 1418 } 1419 if (res > 0) 1420 produced += res; 1421 1422 /* Add remaining input data */ 1423 req.avail_in += (dtemplate[i].inlen + 1) / 2; 1424 1425 res = crypto_decompress_update(tfm, &req); 1426 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1427 pr_err("alg: pcomp: decompression update failed on " 1428 "test %d for %s: error=%d\n", i + 1, algo, res); 1429 return res; 1430 } 1431 if (res > 0) 1432 produced += res; 1433 1434 /* Provide remaining output space */ 1435 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2; 1436 1437 res = crypto_decompress_final(tfm, &req); 1438 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1439 pr_err("alg: pcomp: decompression final failed on " 1440 "test %d for %s: error=%d\n", i + 1, algo, res); 1441 return res; 1442 } 1443 if (res > 0) 1444 produced += res; 1445 1446 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) { 1447 pr_err("alg: comp: Decompression test %d failed for " 1448 "%s: output len = %d (expected %d)\n", i + 1, 1449 algo, COMP_BUF_SIZE - req.avail_out, 1450 dtemplate[i].outlen); 1451 return -EINVAL; 1452 } 1453 1454 if (produced != dtemplate[i].outlen) { 1455 pr_err("alg: comp: Decompression test %d failed for " 1456 "%s: returned len = %u (expected %d)\n", i + 1, 1457 algo, produced, dtemplate[i].outlen); 1458 return -EINVAL; 1459 } 1460 1461 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) { 1462 pr_err("alg: pcomp: Decompression test %d failed for " 1463 "%s\n", i + 1, algo); 1464 hexdump(result, dtemplate[i].outlen); 1465 return -EINVAL; 1466 } 1467 } 1468 1469 return 0; 1470 } 1471 1472 1473 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template, 1474 unsigned int tcount) 1475 { 1476 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 1477 int err = 0, i, j, seedsize; 1478 u8 *seed; 1479 char result[32]; 1480 1481 seedsize = crypto_rng_seedsize(tfm); 1482 1483 seed = kmalloc(seedsize, GFP_KERNEL); 1484 if (!seed) { 1485 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 1486 "for %s\n", algo); 1487 return -ENOMEM; 1488 } 1489 1490 for (i = 0; i < tcount; i++) { 1491 memset(result, 0, 32); 1492 1493 memcpy(seed, template[i].v, template[i].vlen); 1494 memcpy(seed + template[i].vlen, template[i].key, 1495 template[i].klen); 1496 memcpy(seed + template[i].vlen + template[i].klen, 1497 template[i].dt, template[i].dtlen); 1498 1499 err = crypto_rng_reset(tfm, seed, seedsize); 1500 if (err) { 1501 printk(KERN_ERR "alg: cprng: Failed to reset rng " 1502 "for %s\n", algo); 1503 goto out; 1504 } 1505 1506 for (j = 0; j < template[i].loops; j++) { 1507 err = crypto_rng_get_bytes(tfm, result, 1508 template[i].rlen); 1509 if (err < 0) { 1510 printk(KERN_ERR "alg: cprng: Failed to obtain " 1511 "the correct amount of random data for " 1512 "%s (requested %d)\n", algo, 1513 template[i].rlen); 1514 goto out; 1515 } 1516 } 1517 1518 err = memcmp(result, template[i].result, 1519 template[i].rlen); 1520 if (err) { 1521 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 1522 i, algo); 1523 hexdump(result, template[i].rlen); 1524 err = -EINVAL; 1525 goto out; 1526 } 1527 } 1528 1529 out: 1530 kfree(seed); 1531 return err; 1532 } 1533 1534 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1535 u32 type, u32 mask) 1536 { 1537 struct crypto_aead *tfm; 1538 int err = 0; 1539 1540 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask); 1541 if (IS_ERR(tfm)) { 1542 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1543 "%ld\n", driver, PTR_ERR(tfm)); 1544 return PTR_ERR(tfm); 1545 } 1546 1547 if (desc->suite.aead.enc.vecs) { 1548 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1549 desc->suite.aead.enc.count); 1550 if (err) 1551 goto out; 1552 } 1553 1554 if (!err && desc->suite.aead.dec.vecs) 1555 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1556 desc->suite.aead.dec.count); 1557 1558 out: 1559 crypto_free_aead(tfm); 1560 return err; 1561 } 1562 1563 static int alg_test_cipher(const struct alg_test_desc *desc, 1564 const char *driver, u32 type, u32 mask) 1565 { 1566 struct crypto_cipher *tfm; 1567 int err = 0; 1568 1569 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask); 1570 if (IS_ERR(tfm)) { 1571 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1572 "%s: %ld\n", driver, PTR_ERR(tfm)); 1573 return PTR_ERR(tfm); 1574 } 1575 1576 if (desc->suite.cipher.enc.vecs) { 1577 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1578 desc->suite.cipher.enc.count); 1579 if (err) 1580 goto out; 1581 } 1582 1583 if (desc->suite.cipher.dec.vecs) 1584 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1585 desc->suite.cipher.dec.count); 1586 1587 out: 1588 crypto_free_cipher(tfm); 1589 return err; 1590 } 1591 1592 static int alg_test_skcipher(const struct alg_test_desc *desc, 1593 const char *driver, u32 type, u32 mask) 1594 { 1595 struct crypto_skcipher *tfm; 1596 int err = 0; 1597 1598 tfm = crypto_alloc_skcipher(driver, type | CRYPTO_ALG_INTERNAL, mask); 1599 if (IS_ERR(tfm)) { 1600 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1601 "%s: %ld\n", driver, PTR_ERR(tfm)); 1602 return PTR_ERR(tfm); 1603 } 1604 1605 if (desc->suite.cipher.enc.vecs) { 1606 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1607 desc->suite.cipher.enc.count); 1608 if (err) 1609 goto out; 1610 } 1611 1612 if (desc->suite.cipher.dec.vecs) 1613 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1614 desc->suite.cipher.dec.count); 1615 1616 out: 1617 crypto_free_skcipher(tfm); 1618 return err; 1619 } 1620 1621 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1622 u32 type, u32 mask) 1623 { 1624 struct crypto_comp *tfm; 1625 int err; 1626 1627 tfm = crypto_alloc_comp(driver, type, mask); 1628 if (IS_ERR(tfm)) { 1629 printk(KERN_ERR "alg: comp: Failed to load transform for %s: " 1630 "%ld\n", driver, PTR_ERR(tfm)); 1631 return PTR_ERR(tfm); 1632 } 1633 1634 err = test_comp(tfm, desc->suite.comp.comp.vecs, 1635 desc->suite.comp.decomp.vecs, 1636 desc->suite.comp.comp.count, 1637 desc->suite.comp.decomp.count); 1638 1639 crypto_free_comp(tfm); 1640 return err; 1641 } 1642 1643 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver, 1644 u32 type, u32 mask) 1645 { 1646 struct crypto_pcomp *tfm; 1647 int err; 1648 1649 tfm = crypto_alloc_pcomp(driver, type, mask); 1650 if (IS_ERR(tfm)) { 1651 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n", 1652 driver, PTR_ERR(tfm)); 1653 return PTR_ERR(tfm); 1654 } 1655 1656 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs, 1657 desc->suite.pcomp.decomp.vecs, 1658 desc->suite.pcomp.comp.count, 1659 desc->suite.pcomp.decomp.count); 1660 1661 crypto_free_pcomp(tfm); 1662 return err; 1663 } 1664 1665 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1666 u32 type, u32 mask) 1667 { 1668 struct crypto_ahash *tfm; 1669 int err; 1670 1671 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask); 1672 if (IS_ERR(tfm)) { 1673 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1674 "%ld\n", driver, PTR_ERR(tfm)); 1675 return PTR_ERR(tfm); 1676 } 1677 1678 err = test_hash(tfm, desc->suite.hash.vecs, 1679 desc->suite.hash.count, true); 1680 if (!err) 1681 err = test_hash(tfm, desc->suite.hash.vecs, 1682 desc->suite.hash.count, false); 1683 1684 crypto_free_ahash(tfm); 1685 return err; 1686 } 1687 1688 static int alg_test_crc32c(const struct alg_test_desc *desc, 1689 const char *driver, u32 type, u32 mask) 1690 { 1691 struct crypto_shash *tfm; 1692 u32 val; 1693 int err; 1694 1695 err = alg_test_hash(desc, driver, type, mask); 1696 if (err) 1697 goto out; 1698 1699 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask); 1700 if (IS_ERR(tfm)) { 1701 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1702 "%ld\n", driver, PTR_ERR(tfm)); 1703 err = PTR_ERR(tfm); 1704 goto out; 1705 } 1706 1707 do { 1708 SHASH_DESC_ON_STACK(shash, tfm); 1709 u32 *ctx = (u32 *)shash_desc_ctx(shash); 1710 1711 shash->tfm = tfm; 1712 shash->flags = 0; 1713 1714 *ctx = le32_to_cpu(420553207); 1715 err = crypto_shash_final(shash, (u8 *)&val); 1716 if (err) { 1717 printk(KERN_ERR "alg: crc32c: Operation failed for " 1718 "%s: %d\n", driver, err); 1719 break; 1720 } 1721 1722 if (val != ~420553207) { 1723 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1724 "%d\n", driver, val); 1725 err = -EINVAL; 1726 } 1727 } while (0); 1728 1729 crypto_free_shash(tfm); 1730 1731 out: 1732 return err; 1733 } 1734 1735 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 1736 u32 type, u32 mask) 1737 { 1738 struct crypto_rng *rng; 1739 int err; 1740 1741 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask); 1742 if (IS_ERR(rng)) { 1743 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 1744 "%ld\n", driver, PTR_ERR(rng)); 1745 return PTR_ERR(rng); 1746 } 1747 1748 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 1749 1750 crypto_free_rng(rng); 1751 1752 return err; 1753 } 1754 1755 1756 static int drbg_cavs_test(struct drbg_testvec *test, int pr, 1757 const char *driver, u32 type, u32 mask) 1758 { 1759 int ret = -EAGAIN; 1760 struct crypto_rng *drng; 1761 struct drbg_test_data test_data; 1762 struct drbg_string addtl, pers, testentropy; 1763 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL); 1764 1765 if (!buf) 1766 return -ENOMEM; 1767 1768 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask); 1769 if (IS_ERR(drng)) { 1770 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for " 1771 "%s\n", driver); 1772 kzfree(buf); 1773 return -ENOMEM; 1774 } 1775 1776 test_data.testentropy = &testentropy; 1777 drbg_string_fill(&testentropy, test->entropy, test->entropylen); 1778 drbg_string_fill(&pers, test->pers, test->perslen); 1779 ret = crypto_drbg_reset_test(drng, &pers, &test_data); 1780 if (ret) { 1781 printk(KERN_ERR "alg: drbg: Failed to reset rng\n"); 1782 goto outbuf; 1783 } 1784 1785 drbg_string_fill(&addtl, test->addtla, test->addtllen); 1786 if (pr) { 1787 drbg_string_fill(&testentropy, test->entpra, test->entprlen); 1788 ret = crypto_drbg_get_bytes_addtl_test(drng, 1789 buf, test->expectedlen, &addtl, &test_data); 1790 } else { 1791 ret = crypto_drbg_get_bytes_addtl(drng, 1792 buf, test->expectedlen, &addtl); 1793 } 1794 if (ret < 0) { 1795 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1796 "driver %s\n", driver); 1797 goto outbuf; 1798 } 1799 1800 drbg_string_fill(&addtl, test->addtlb, test->addtllen); 1801 if (pr) { 1802 drbg_string_fill(&testentropy, test->entprb, test->entprlen); 1803 ret = crypto_drbg_get_bytes_addtl_test(drng, 1804 buf, test->expectedlen, &addtl, &test_data); 1805 } else { 1806 ret = crypto_drbg_get_bytes_addtl(drng, 1807 buf, test->expectedlen, &addtl); 1808 } 1809 if (ret < 0) { 1810 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1811 "driver %s\n", driver); 1812 goto outbuf; 1813 } 1814 1815 ret = memcmp(test->expected, buf, test->expectedlen); 1816 1817 outbuf: 1818 crypto_free_rng(drng); 1819 kzfree(buf); 1820 return ret; 1821 } 1822 1823 1824 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver, 1825 u32 type, u32 mask) 1826 { 1827 int err = 0; 1828 int pr = 0; 1829 int i = 0; 1830 struct drbg_testvec *template = desc->suite.drbg.vecs; 1831 unsigned int tcount = desc->suite.drbg.count; 1832 1833 if (0 == memcmp(driver, "drbg_pr_", 8)) 1834 pr = 1; 1835 1836 for (i = 0; i < tcount; i++) { 1837 err = drbg_cavs_test(&template[i], pr, driver, type, mask); 1838 if (err) { 1839 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n", 1840 i, driver); 1841 err = -EINVAL; 1842 break; 1843 } 1844 } 1845 return err; 1846 1847 } 1848 1849 static int do_test_rsa(struct crypto_akcipher *tfm, 1850 struct akcipher_testvec *vecs) 1851 { 1852 struct akcipher_request *req; 1853 void *outbuf_enc = NULL; 1854 void *outbuf_dec = NULL; 1855 struct tcrypt_result result; 1856 unsigned int out_len_max, out_len = 0; 1857 int err = -ENOMEM; 1858 struct scatterlist src, dst, src_tab[2]; 1859 1860 req = akcipher_request_alloc(tfm, GFP_KERNEL); 1861 if (!req) 1862 return err; 1863 1864 init_completion(&result.completion); 1865 1866 if (vecs->public_key_vec) 1867 err = crypto_akcipher_set_pub_key(tfm, vecs->key, 1868 vecs->key_len); 1869 else 1870 err = crypto_akcipher_set_priv_key(tfm, vecs->key, 1871 vecs->key_len); 1872 if (err) 1873 goto free_req; 1874 1875 out_len_max = crypto_akcipher_maxsize(tfm); 1876 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL); 1877 if (!outbuf_enc) 1878 goto free_req; 1879 1880 sg_init_table(src_tab, 2); 1881 sg_set_buf(&src_tab[0], vecs->m, 8); 1882 sg_set_buf(&src_tab[1], vecs->m + 8, vecs->m_size - 8); 1883 sg_init_one(&dst, outbuf_enc, out_len_max); 1884 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size, 1885 out_len_max); 1886 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1887 tcrypt_complete, &result); 1888 1889 /* Run RSA encrypt - c = m^e mod n;*/ 1890 err = wait_async_op(&result, crypto_akcipher_encrypt(req)); 1891 if (err) { 1892 pr_err("alg: rsa: encrypt test failed. err %d\n", err); 1893 goto free_all; 1894 } 1895 if (req->dst_len != vecs->c_size) { 1896 pr_err("alg: rsa: encrypt test failed. Invalid output len\n"); 1897 err = -EINVAL; 1898 goto free_all; 1899 } 1900 /* verify that encrypted message is equal to expected */ 1901 if (memcmp(vecs->c, sg_virt(req->dst), vecs->c_size)) { 1902 pr_err("alg: rsa: encrypt test failed. Invalid output\n"); 1903 err = -EINVAL; 1904 goto free_all; 1905 } 1906 /* Don't invoke decrypt for vectors with public key */ 1907 if (vecs->public_key_vec) { 1908 err = 0; 1909 goto free_all; 1910 } 1911 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL); 1912 if (!outbuf_dec) { 1913 err = -ENOMEM; 1914 goto free_all; 1915 } 1916 sg_init_one(&src, vecs->c, vecs->c_size); 1917 sg_init_one(&dst, outbuf_dec, out_len_max); 1918 init_completion(&result.completion); 1919 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max); 1920 1921 /* Run RSA decrypt - m = c^d mod n;*/ 1922 err = wait_async_op(&result, crypto_akcipher_decrypt(req)); 1923 if (err) { 1924 pr_err("alg: rsa: decrypt test failed. err %d\n", err); 1925 goto free_all; 1926 } 1927 out_len = req->dst_len; 1928 if (out_len != vecs->m_size) { 1929 pr_err("alg: rsa: decrypt test failed. Invalid output len\n"); 1930 err = -EINVAL; 1931 goto free_all; 1932 } 1933 /* verify that decrypted message is equal to the original msg */ 1934 if (memcmp(vecs->m, outbuf_dec, vecs->m_size)) { 1935 pr_err("alg: rsa: decrypt test failed. Invalid output\n"); 1936 err = -EINVAL; 1937 } 1938 free_all: 1939 kfree(outbuf_dec); 1940 kfree(outbuf_enc); 1941 free_req: 1942 akcipher_request_free(req); 1943 return err; 1944 } 1945 1946 static int test_rsa(struct crypto_akcipher *tfm, struct akcipher_testvec *vecs, 1947 unsigned int tcount) 1948 { 1949 int ret, i; 1950 1951 for (i = 0; i < tcount; i++) { 1952 ret = do_test_rsa(tfm, vecs++); 1953 if (ret) { 1954 pr_err("alg: rsa: test failed on vector %d, err=%d\n", 1955 i + 1, ret); 1956 return ret; 1957 } 1958 } 1959 return 0; 1960 } 1961 1962 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg, 1963 struct akcipher_testvec *vecs, unsigned int tcount) 1964 { 1965 if (strncmp(alg, "rsa", 3) == 0) 1966 return test_rsa(tfm, vecs, tcount); 1967 1968 return 0; 1969 } 1970 1971 static int alg_test_akcipher(const struct alg_test_desc *desc, 1972 const char *driver, u32 type, u32 mask) 1973 { 1974 struct crypto_akcipher *tfm; 1975 int err = 0; 1976 1977 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask); 1978 if (IS_ERR(tfm)) { 1979 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n", 1980 driver, PTR_ERR(tfm)); 1981 return PTR_ERR(tfm); 1982 } 1983 if (desc->suite.akcipher.vecs) 1984 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs, 1985 desc->suite.akcipher.count); 1986 1987 crypto_free_akcipher(tfm); 1988 return err; 1989 } 1990 1991 static int alg_test_null(const struct alg_test_desc *desc, 1992 const char *driver, u32 type, u32 mask) 1993 { 1994 return 0; 1995 } 1996 1997 /* Please keep this list sorted by algorithm name. */ 1998 static const struct alg_test_desc alg_test_descs[] = { 1999 { 2000 .alg = "__cbc-cast5-avx", 2001 .test = alg_test_null, 2002 }, { 2003 .alg = "__cbc-cast6-avx", 2004 .test = alg_test_null, 2005 }, { 2006 .alg = "__cbc-serpent-avx", 2007 .test = alg_test_null, 2008 }, { 2009 .alg = "__cbc-serpent-avx2", 2010 .test = alg_test_null, 2011 }, { 2012 .alg = "__cbc-serpent-sse2", 2013 .test = alg_test_null, 2014 }, { 2015 .alg = "__cbc-twofish-avx", 2016 .test = alg_test_null, 2017 }, { 2018 .alg = "__driver-cbc-aes-aesni", 2019 .test = alg_test_null, 2020 .fips_allowed = 1, 2021 }, { 2022 .alg = "__driver-cbc-camellia-aesni", 2023 .test = alg_test_null, 2024 }, { 2025 .alg = "__driver-cbc-camellia-aesni-avx2", 2026 .test = alg_test_null, 2027 }, { 2028 .alg = "__driver-cbc-cast5-avx", 2029 .test = alg_test_null, 2030 }, { 2031 .alg = "__driver-cbc-cast6-avx", 2032 .test = alg_test_null, 2033 }, { 2034 .alg = "__driver-cbc-serpent-avx", 2035 .test = alg_test_null, 2036 }, { 2037 .alg = "__driver-cbc-serpent-avx2", 2038 .test = alg_test_null, 2039 }, { 2040 .alg = "__driver-cbc-serpent-sse2", 2041 .test = alg_test_null, 2042 }, { 2043 .alg = "__driver-cbc-twofish-avx", 2044 .test = alg_test_null, 2045 }, { 2046 .alg = "__driver-ecb-aes-aesni", 2047 .test = alg_test_null, 2048 .fips_allowed = 1, 2049 }, { 2050 .alg = "__driver-ecb-camellia-aesni", 2051 .test = alg_test_null, 2052 }, { 2053 .alg = "__driver-ecb-camellia-aesni-avx2", 2054 .test = alg_test_null, 2055 }, { 2056 .alg = "__driver-ecb-cast5-avx", 2057 .test = alg_test_null, 2058 }, { 2059 .alg = "__driver-ecb-cast6-avx", 2060 .test = alg_test_null, 2061 }, { 2062 .alg = "__driver-ecb-serpent-avx", 2063 .test = alg_test_null, 2064 }, { 2065 .alg = "__driver-ecb-serpent-avx2", 2066 .test = alg_test_null, 2067 }, { 2068 .alg = "__driver-ecb-serpent-sse2", 2069 .test = alg_test_null, 2070 }, { 2071 .alg = "__driver-ecb-twofish-avx", 2072 .test = alg_test_null, 2073 }, { 2074 .alg = "__driver-gcm-aes-aesni", 2075 .test = alg_test_null, 2076 .fips_allowed = 1, 2077 }, { 2078 .alg = "__ghash-pclmulqdqni", 2079 .test = alg_test_null, 2080 .fips_allowed = 1, 2081 }, { 2082 .alg = "ansi_cprng", 2083 .test = alg_test_cprng, 2084 .fips_allowed = 1, 2085 .suite = { 2086 .cprng = { 2087 .vecs = ansi_cprng_aes_tv_template, 2088 .count = ANSI_CPRNG_AES_TEST_VECTORS 2089 } 2090 } 2091 }, { 2092 .alg = "authenc(hmac(md5),ecb(cipher_null))", 2093 .test = alg_test_aead, 2094 .suite = { 2095 .aead = { 2096 .enc = { 2097 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template, 2098 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS 2099 }, 2100 .dec = { 2101 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template, 2102 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS 2103 } 2104 } 2105 } 2106 }, { 2107 .alg = "authenc(hmac(sha1),cbc(aes))", 2108 .test = alg_test_aead, 2109 .suite = { 2110 .aead = { 2111 .enc = { 2112 .vecs = 2113 hmac_sha1_aes_cbc_enc_tv_temp, 2114 .count = 2115 HMAC_SHA1_AES_CBC_ENC_TEST_VEC 2116 } 2117 } 2118 } 2119 }, { 2120 .alg = "authenc(hmac(sha1),cbc(des))", 2121 .test = alg_test_aead, 2122 .suite = { 2123 .aead = { 2124 .enc = { 2125 .vecs = 2126 hmac_sha1_des_cbc_enc_tv_temp, 2127 .count = 2128 HMAC_SHA1_DES_CBC_ENC_TEST_VEC 2129 } 2130 } 2131 } 2132 }, { 2133 .alg = "authenc(hmac(sha1),cbc(des3_ede))", 2134 .test = alg_test_aead, 2135 .suite = { 2136 .aead = { 2137 .enc = { 2138 .vecs = 2139 hmac_sha1_des3_ede_cbc_enc_tv_temp, 2140 .count = 2141 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC 2142 } 2143 } 2144 } 2145 }, { 2146 .alg = "authenc(hmac(sha1),ecb(cipher_null))", 2147 .test = alg_test_aead, 2148 .suite = { 2149 .aead = { 2150 .enc = { 2151 .vecs = 2152 hmac_sha1_ecb_cipher_null_enc_tv_temp, 2153 .count = 2154 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC 2155 }, 2156 .dec = { 2157 .vecs = 2158 hmac_sha1_ecb_cipher_null_dec_tv_temp, 2159 .count = 2160 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC 2161 } 2162 } 2163 } 2164 }, { 2165 .alg = "authenc(hmac(sha224),cbc(des))", 2166 .test = alg_test_aead, 2167 .suite = { 2168 .aead = { 2169 .enc = { 2170 .vecs = 2171 hmac_sha224_des_cbc_enc_tv_temp, 2172 .count = 2173 HMAC_SHA224_DES_CBC_ENC_TEST_VEC 2174 } 2175 } 2176 } 2177 }, { 2178 .alg = "authenc(hmac(sha224),cbc(des3_ede))", 2179 .test = alg_test_aead, 2180 .suite = { 2181 .aead = { 2182 .enc = { 2183 .vecs = 2184 hmac_sha224_des3_ede_cbc_enc_tv_temp, 2185 .count = 2186 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC 2187 } 2188 } 2189 } 2190 }, { 2191 .alg = "authenc(hmac(sha256),cbc(aes))", 2192 .test = alg_test_aead, 2193 .suite = { 2194 .aead = { 2195 .enc = { 2196 .vecs = 2197 hmac_sha256_aes_cbc_enc_tv_temp, 2198 .count = 2199 HMAC_SHA256_AES_CBC_ENC_TEST_VEC 2200 } 2201 } 2202 } 2203 }, { 2204 .alg = "authenc(hmac(sha256),cbc(des))", 2205 .test = alg_test_aead, 2206 .suite = { 2207 .aead = { 2208 .enc = { 2209 .vecs = 2210 hmac_sha256_des_cbc_enc_tv_temp, 2211 .count = 2212 HMAC_SHA256_DES_CBC_ENC_TEST_VEC 2213 } 2214 } 2215 } 2216 }, { 2217 .alg = "authenc(hmac(sha256),cbc(des3_ede))", 2218 .test = alg_test_aead, 2219 .suite = { 2220 .aead = { 2221 .enc = { 2222 .vecs = 2223 hmac_sha256_des3_ede_cbc_enc_tv_temp, 2224 .count = 2225 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC 2226 } 2227 } 2228 } 2229 }, { 2230 .alg = "authenc(hmac(sha384),cbc(des))", 2231 .test = alg_test_aead, 2232 .suite = { 2233 .aead = { 2234 .enc = { 2235 .vecs = 2236 hmac_sha384_des_cbc_enc_tv_temp, 2237 .count = 2238 HMAC_SHA384_DES_CBC_ENC_TEST_VEC 2239 } 2240 } 2241 } 2242 }, { 2243 .alg = "authenc(hmac(sha384),cbc(des3_ede))", 2244 .test = alg_test_aead, 2245 .suite = { 2246 .aead = { 2247 .enc = { 2248 .vecs = 2249 hmac_sha384_des3_ede_cbc_enc_tv_temp, 2250 .count = 2251 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC 2252 } 2253 } 2254 } 2255 }, { 2256 .alg = "authenc(hmac(sha512),cbc(aes))", 2257 .test = alg_test_aead, 2258 .suite = { 2259 .aead = { 2260 .enc = { 2261 .vecs = 2262 hmac_sha512_aes_cbc_enc_tv_temp, 2263 .count = 2264 HMAC_SHA512_AES_CBC_ENC_TEST_VEC 2265 } 2266 } 2267 } 2268 }, { 2269 .alg = "authenc(hmac(sha512),cbc(des))", 2270 .test = alg_test_aead, 2271 .suite = { 2272 .aead = { 2273 .enc = { 2274 .vecs = 2275 hmac_sha512_des_cbc_enc_tv_temp, 2276 .count = 2277 HMAC_SHA512_DES_CBC_ENC_TEST_VEC 2278 } 2279 } 2280 } 2281 }, { 2282 .alg = "authenc(hmac(sha512),cbc(des3_ede))", 2283 .test = alg_test_aead, 2284 .suite = { 2285 .aead = { 2286 .enc = { 2287 .vecs = 2288 hmac_sha512_des3_ede_cbc_enc_tv_temp, 2289 .count = 2290 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC 2291 } 2292 } 2293 } 2294 }, { 2295 .alg = "cbc(aes)", 2296 .test = alg_test_skcipher, 2297 .fips_allowed = 1, 2298 .suite = { 2299 .cipher = { 2300 .enc = { 2301 .vecs = aes_cbc_enc_tv_template, 2302 .count = AES_CBC_ENC_TEST_VECTORS 2303 }, 2304 .dec = { 2305 .vecs = aes_cbc_dec_tv_template, 2306 .count = AES_CBC_DEC_TEST_VECTORS 2307 } 2308 } 2309 } 2310 }, { 2311 .alg = "cbc(anubis)", 2312 .test = alg_test_skcipher, 2313 .suite = { 2314 .cipher = { 2315 .enc = { 2316 .vecs = anubis_cbc_enc_tv_template, 2317 .count = ANUBIS_CBC_ENC_TEST_VECTORS 2318 }, 2319 .dec = { 2320 .vecs = anubis_cbc_dec_tv_template, 2321 .count = ANUBIS_CBC_DEC_TEST_VECTORS 2322 } 2323 } 2324 } 2325 }, { 2326 .alg = "cbc(blowfish)", 2327 .test = alg_test_skcipher, 2328 .suite = { 2329 .cipher = { 2330 .enc = { 2331 .vecs = bf_cbc_enc_tv_template, 2332 .count = BF_CBC_ENC_TEST_VECTORS 2333 }, 2334 .dec = { 2335 .vecs = bf_cbc_dec_tv_template, 2336 .count = BF_CBC_DEC_TEST_VECTORS 2337 } 2338 } 2339 } 2340 }, { 2341 .alg = "cbc(camellia)", 2342 .test = alg_test_skcipher, 2343 .suite = { 2344 .cipher = { 2345 .enc = { 2346 .vecs = camellia_cbc_enc_tv_template, 2347 .count = CAMELLIA_CBC_ENC_TEST_VECTORS 2348 }, 2349 .dec = { 2350 .vecs = camellia_cbc_dec_tv_template, 2351 .count = CAMELLIA_CBC_DEC_TEST_VECTORS 2352 } 2353 } 2354 } 2355 }, { 2356 .alg = "cbc(cast5)", 2357 .test = alg_test_skcipher, 2358 .suite = { 2359 .cipher = { 2360 .enc = { 2361 .vecs = cast5_cbc_enc_tv_template, 2362 .count = CAST5_CBC_ENC_TEST_VECTORS 2363 }, 2364 .dec = { 2365 .vecs = cast5_cbc_dec_tv_template, 2366 .count = CAST5_CBC_DEC_TEST_VECTORS 2367 } 2368 } 2369 } 2370 }, { 2371 .alg = "cbc(cast6)", 2372 .test = alg_test_skcipher, 2373 .suite = { 2374 .cipher = { 2375 .enc = { 2376 .vecs = cast6_cbc_enc_tv_template, 2377 .count = CAST6_CBC_ENC_TEST_VECTORS 2378 }, 2379 .dec = { 2380 .vecs = cast6_cbc_dec_tv_template, 2381 .count = CAST6_CBC_DEC_TEST_VECTORS 2382 } 2383 } 2384 } 2385 }, { 2386 .alg = "cbc(des)", 2387 .test = alg_test_skcipher, 2388 .suite = { 2389 .cipher = { 2390 .enc = { 2391 .vecs = des_cbc_enc_tv_template, 2392 .count = DES_CBC_ENC_TEST_VECTORS 2393 }, 2394 .dec = { 2395 .vecs = des_cbc_dec_tv_template, 2396 .count = DES_CBC_DEC_TEST_VECTORS 2397 } 2398 } 2399 } 2400 }, { 2401 .alg = "cbc(des3_ede)", 2402 .test = alg_test_skcipher, 2403 .fips_allowed = 1, 2404 .suite = { 2405 .cipher = { 2406 .enc = { 2407 .vecs = des3_ede_cbc_enc_tv_template, 2408 .count = DES3_EDE_CBC_ENC_TEST_VECTORS 2409 }, 2410 .dec = { 2411 .vecs = des3_ede_cbc_dec_tv_template, 2412 .count = DES3_EDE_CBC_DEC_TEST_VECTORS 2413 } 2414 } 2415 } 2416 }, { 2417 .alg = "cbc(serpent)", 2418 .test = alg_test_skcipher, 2419 .suite = { 2420 .cipher = { 2421 .enc = { 2422 .vecs = serpent_cbc_enc_tv_template, 2423 .count = SERPENT_CBC_ENC_TEST_VECTORS 2424 }, 2425 .dec = { 2426 .vecs = serpent_cbc_dec_tv_template, 2427 .count = SERPENT_CBC_DEC_TEST_VECTORS 2428 } 2429 } 2430 } 2431 }, { 2432 .alg = "cbc(twofish)", 2433 .test = alg_test_skcipher, 2434 .suite = { 2435 .cipher = { 2436 .enc = { 2437 .vecs = tf_cbc_enc_tv_template, 2438 .count = TF_CBC_ENC_TEST_VECTORS 2439 }, 2440 .dec = { 2441 .vecs = tf_cbc_dec_tv_template, 2442 .count = TF_CBC_DEC_TEST_VECTORS 2443 } 2444 } 2445 } 2446 }, { 2447 .alg = "ccm(aes)", 2448 .test = alg_test_aead, 2449 .fips_allowed = 1, 2450 .suite = { 2451 .aead = { 2452 .enc = { 2453 .vecs = aes_ccm_enc_tv_template, 2454 .count = AES_CCM_ENC_TEST_VECTORS 2455 }, 2456 .dec = { 2457 .vecs = aes_ccm_dec_tv_template, 2458 .count = AES_CCM_DEC_TEST_VECTORS 2459 } 2460 } 2461 } 2462 }, { 2463 .alg = "chacha20", 2464 .test = alg_test_skcipher, 2465 .suite = { 2466 .cipher = { 2467 .enc = { 2468 .vecs = chacha20_enc_tv_template, 2469 .count = CHACHA20_ENC_TEST_VECTORS 2470 }, 2471 .dec = { 2472 .vecs = chacha20_enc_tv_template, 2473 .count = CHACHA20_ENC_TEST_VECTORS 2474 }, 2475 } 2476 } 2477 }, { 2478 .alg = "cmac(aes)", 2479 .fips_allowed = 1, 2480 .test = alg_test_hash, 2481 .suite = { 2482 .hash = { 2483 .vecs = aes_cmac128_tv_template, 2484 .count = CMAC_AES_TEST_VECTORS 2485 } 2486 } 2487 }, { 2488 .alg = "cmac(des3_ede)", 2489 .fips_allowed = 1, 2490 .test = alg_test_hash, 2491 .suite = { 2492 .hash = { 2493 .vecs = des3_ede_cmac64_tv_template, 2494 .count = CMAC_DES3_EDE_TEST_VECTORS 2495 } 2496 } 2497 }, { 2498 .alg = "compress_null", 2499 .test = alg_test_null, 2500 }, { 2501 .alg = "crc32", 2502 .test = alg_test_hash, 2503 .suite = { 2504 .hash = { 2505 .vecs = crc32_tv_template, 2506 .count = CRC32_TEST_VECTORS 2507 } 2508 } 2509 }, { 2510 .alg = "crc32c", 2511 .test = alg_test_crc32c, 2512 .fips_allowed = 1, 2513 .suite = { 2514 .hash = { 2515 .vecs = crc32c_tv_template, 2516 .count = CRC32C_TEST_VECTORS 2517 } 2518 } 2519 }, { 2520 .alg = "crct10dif", 2521 .test = alg_test_hash, 2522 .fips_allowed = 1, 2523 .suite = { 2524 .hash = { 2525 .vecs = crct10dif_tv_template, 2526 .count = CRCT10DIF_TEST_VECTORS 2527 } 2528 } 2529 }, { 2530 .alg = "cryptd(__driver-cbc-aes-aesni)", 2531 .test = alg_test_null, 2532 .fips_allowed = 1, 2533 }, { 2534 .alg = "cryptd(__driver-cbc-camellia-aesni)", 2535 .test = alg_test_null, 2536 }, { 2537 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)", 2538 .test = alg_test_null, 2539 }, { 2540 .alg = "cryptd(__driver-cbc-serpent-avx2)", 2541 .test = alg_test_null, 2542 }, { 2543 .alg = "cryptd(__driver-ecb-aes-aesni)", 2544 .test = alg_test_null, 2545 .fips_allowed = 1, 2546 }, { 2547 .alg = "cryptd(__driver-ecb-camellia-aesni)", 2548 .test = alg_test_null, 2549 }, { 2550 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)", 2551 .test = alg_test_null, 2552 }, { 2553 .alg = "cryptd(__driver-ecb-cast5-avx)", 2554 .test = alg_test_null, 2555 }, { 2556 .alg = "cryptd(__driver-ecb-cast6-avx)", 2557 .test = alg_test_null, 2558 }, { 2559 .alg = "cryptd(__driver-ecb-serpent-avx)", 2560 .test = alg_test_null, 2561 }, { 2562 .alg = "cryptd(__driver-ecb-serpent-avx2)", 2563 .test = alg_test_null, 2564 }, { 2565 .alg = "cryptd(__driver-ecb-serpent-sse2)", 2566 .test = alg_test_null, 2567 }, { 2568 .alg = "cryptd(__driver-ecb-twofish-avx)", 2569 .test = alg_test_null, 2570 }, { 2571 .alg = "cryptd(__driver-gcm-aes-aesni)", 2572 .test = alg_test_null, 2573 .fips_allowed = 1, 2574 }, { 2575 .alg = "cryptd(__ghash-pclmulqdqni)", 2576 .test = alg_test_null, 2577 .fips_allowed = 1, 2578 }, { 2579 .alg = "ctr(aes)", 2580 .test = alg_test_skcipher, 2581 .fips_allowed = 1, 2582 .suite = { 2583 .cipher = { 2584 .enc = { 2585 .vecs = aes_ctr_enc_tv_template, 2586 .count = AES_CTR_ENC_TEST_VECTORS 2587 }, 2588 .dec = { 2589 .vecs = aes_ctr_dec_tv_template, 2590 .count = AES_CTR_DEC_TEST_VECTORS 2591 } 2592 } 2593 } 2594 }, { 2595 .alg = "ctr(blowfish)", 2596 .test = alg_test_skcipher, 2597 .suite = { 2598 .cipher = { 2599 .enc = { 2600 .vecs = bf_ctr_enc_tv_template, 2601 .count = BF_CTR_ENC_TEST_VECTORS 2602 }, 2603 .dec = { 2604 .vecs = bf_ctr_dec_tv_template, 2605 .count = BF_CTR_DEC_TEST_VECTORS 2606 } 2607 } 2608 } 2609 }, { 2610 .alg = "ctr(camellia)", 2611 .test = alg_test_skcipher, 2612 .suite = { 2613 .cipher = { 2614 .enc = { 2615 .vecs = camellia_ctr_enc_tv_template, 2616 .count = CAMELLIA_CTR_ENC_TEST_VECTORS 2617 }, 2618 .dec = { 2619 .vecs = camellia_ctr_dec_tv_template, 2620 .count = CAMELLIA_CTR_DEC_TEST_VECTORS 2621 } 2622 } 2623 } 2624 }, { 2625 .alg = "ctr(cast5)", 2626 .test = alg_test_skcipher, 2627 .suite = { 2628 .cipher = { 2629 .enc = { 2630 .vecs = cast5_ctr_enc_tv_template, 2631 .count = CAST5_CTR_ENC_TEST_VECTORS 2632 }, 2633 .dec = { 2634 .vecs = cast5_ctr_dec_tv_template, 2635 .count = CAST5_CTR_DEC_TEST_VECTORS 2636 } 2637 } 2638 } 2639 }, { 2640 .alg = "ctr(cast6)", 2641 .test = alg_test_skcipher, 2642 .suite = { 2643 .cipher = { 2644 .enc = { 2645 .vecs = cast6_ctr_enc_tv_template, 2646 .count = CAST6_CTR_ENC_TEST_VECTORS 2647 }, 2648 .dec = { 2649 .vecs = cast6_ctr_dec_tv_template, 2650 .count = CAST6_CTR_DEC_TEST_VECTORS 2651 } 2652 } 2653 } 2654 }, { 2655 .alg = "ctr(des)", 2656 .test = alg_test_skcipher, 2657 .suite = { 2658 .cipher = { 2659 .enc = { 2660 .vecs = des_ctr_enc_tv_template, 2661 .count = DES_CTR_ENC_TEST_VECTORS 2662 }, 2663 .dec = { 2664 .vecs = des_ctr_dec_tv_template, 2665 .count = DES_CTR_DEC_TEST_VECTORS 2666 } 2667 } 2668 } 2669 }, { 2670 .alg = "ctr(des3_ede)", 2671 .test = alg_test_skcipher, 2672 .suite = { 2673 .cipher = { 2674 .enc = { 2675 .vecs = des3_ede_ctr_enc_tv_template, 2676 .count = DES3_EDE_CTR_ENC_TEST_VECTORS 2677 }, 2678 .dec = { 2679 .vecs = des3_ede_ctr_dec_tv_template, 2680 .count = DES3_EDE_CTR_DEC_TEST_VECTORS 2681 } 2682 } 2683 } 2684 }, { 2685 .alg = "ctr(serpent)", 2686 .test = alg_test_skcipher, 2687 .suite = { 2688 .cipher = { 2689 .enc = { 2690 .vecs = serpent_ctr_enc_tv_template, 2691 .count = SERPENT_CTR_ENC_TEST_VECTORS 2692 }, 2693 .dec = { 2694 .vecs = serpent_ctr_dec_tv_template, 2695 .count = SERPENT_CTR_DEC_TEST_VECTORS 2696 } 2697 } 2698 } 2699 }, { 2700 .alg = "ctr(twofish)", 2701 .test = alg_test_skcipher, 2702 .suite = { 2703 .cipher = { 2704 .enc = { 2705 .vecs = tf_ctr_enc_tv_template, 2706 .count = TF_CTR_ENC_TEST_VECTORS 2707 }, 2708 .dec = { 2709 .vecs = tf_ctr_dec_tv_template, 2710 .count = TF_CTR_DEC_TEST_VECTORS 2711 } 2712 } 2713 } 2714 }, { 2715 .alg = "cts(cbc(aes))", 2716 .test = alg_test_skcipher, 2717 .suite = { 2718 .cipher = { 2719 .enc = { 2720 .vecs = cts_mode_enc_tv_template, 2721 .count = CTS_MODE_ENC_TEST_VECTORS 2722 }, 2723 .dec = { 2724 .vecs = cts_mode_dec_tv_template, 2725 .count = CTS_MODE_DEC_TEST_VECTORS 2726 } 2727 } 2728 } 2729 }, { 2730 .alg = "deflate", 2731 .test = alg_test_comp, 2732 .fips_allowed = 1, 2733 .suite = { 2734 .comp = { 2735 .comp = { 2736 .vecs = deflate_comp_tv_template, 2737 .count = DEFLATE_COMP_TEST_VECTORS 2738 }, 2739 .decomp = { 2740 .vecs = deflate_decomp_tv_template, 2741 .count = DEFLATE_DECOMP_TEST_VECTORS 2742 } 2743 } 2744 } 2745 }, { 2746 .alg = "digest_null", 2747 .test = alg_test_null, 2748 }, { 2749 .alg = "drbg_nopr_ctr_aes128", 2750 .test = alg_test_drbg, 2751 .fips_allowed = 1, 2752 .suite = { 2753 .drbg = { 2754 .vecs = drbg_nopr_ctr_aes128_tv_template, 2755 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template) 2756 } 2757 } 2758 }, { 2759 .alg = "drbg_nopr_ctr_aes192", 2760 .test = alg_test_drbg, 2761 .fips_allowed = 1, 2762 .suite = { 2763 .drbg = { 2764 .vecs = drbg_nopr_ctr_aes192_tv_template, 2765 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template) 2766 } 2767 } 2768 }, { 2769 .alg = "drbg_nopr_ctr_aes256", 2770 .test = alg_test_drbg, 2771 .fips_allowed = 1, 2772 .suite = { 2773 .drbg = { 2774 .vecs = drbg_nopr_ctr_aes256_tv_template, 2775 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template) 2776 } 2777 } 2778 }, { 2779 /* 2780 * There is no need to specifically test the DRBG with every 2781 * backend cipher -- covered by drbg_nopr_hmac_sha256 test 2782 */ 2783 .alg = "drbg_nopr_hmac_sha1", 2784 .fips_allowed = 1, 2785 .test = alg_test_null, 2786 }, { 2787 .alg = "drbg_nopr_hmac_sha256", 2788 .test = alg_test_drbg, 2789 .fips_allowed = 1, 2790 .suite = { 2791 .drbg = { 2792 .vecs = drbg_nopr_hmac_sha256_tv_template, 2793 .count = 2794 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template) 2795 } 2796 } 2797 }, { 2798 /* covered by drbg_nopr_hmac_sha256 test */ 2799 .alg = "drbg_nopr_hmac_sha384", 2800 .fips_allowed = 1, 2801 .test = alg_test_null, 2802 }, { 2803 .alg = "drbg_nopr_hmac_sha512", 2804 .test = alg_test_null, 2805 .fips_allowed = 1, 2806 }, { 2807 .alg = "drbg_nopr_sha1", 2808 .fips_allowed = 1, 2809 .test = alg_test_null, 2810 }, { 2811 .alg = "drbg_nopr_sha256", 2812 .test = alg_test_drbg, 2813 .fips_allowed = 1, 2814 .suite = { 2815 .drbg = { 2816 .vecs = drbg_nopr_sha256_tv_template, 2817 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template) 2818 } 2819 } 2820 }, { 2821 /* covered by drbg_nopr_sha256 test */ 2822 .alg = "drbg_nopr_sha384", 2823 .fips_allowed = 1, 2824 .test = alg_test_null, 2825 }, { 2826 .alg = "drbg_nopr_sha512", 2827 .fips_allowed = 1, 2828 .test = alg_test_null, 2829 }, { 2830 .alg = "drbg_pr_ctr_aes128", 2831 .test = alg_test_drbg, 2832 .fips_allowed = 1, 2833 .suite = { 2834 .drbg = { 2835 .vecs = drbg_pr_ctr_aes128_tv_template, 2836 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template) 2837 } 2838 } 2839 }, { 2840 /* covered by drbg_pr_ctr_aes128 test */ 2841 .alg = "drbg_pr_ctr_aes192", 2842 .fips_allowed = 1, 2843 .test = alg_test_null, 2844 }, { 2845 .alg = "drbg_pr_ctr_aes256", 2846 .fips_allowed = 1, 2847 .test = alg_test_null, 2848 }, { 2849 .alg = "drbg_pr_hmac_sha1", 2850 .fips_allowed = 1, 2851 .test = alg_test_null, 2852 }, { 2853 .alg = "drbg_pr_hmac_sha256", 2854 .test = alg_test_drbg, 2855 .fips_allowed = 1, 2856 .suite = { 2857 .drbg = { 2858 .vecs = drbg_pr_hmac_sha256_tv_template, 2859 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template) 2860 } 2861 } 2862 }, { 2863 /* covered by drbg_pr_hmac_sha256 test */ 2864 .alg = "drbg_pr_hmac_sha384", 2865 .fips_allowed = 1, 2866 .test = alg_test_null, 2867 }, { 2868 .alg = "drbg_pr_hmac_sha512", 2869 .test = alg_test_null, 2870 .fips_allowed = 1, 2871 }, { 2872 .alg = "drbg_pr_sha1", 2873 .fips_allowed = 1, 2874 .test = alg_test_null, 2875 }, { 2876 .alg = "drbg_pr_sha256", 2877 .test = alg_test_drbg, 2878 .fips_allowed = 1, 2879 .suite = { 2880 .drbg = { 2881 .vecs = drbg_pr_sha256_tv_template, 2882 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template) 2883 } 2884 } 2885 }, { 2886 /* covered by drbg_pr_sha256 test */ 2887 .alg = "drbg_pr_sha384", 2888 .fips_allowed = 1, 2889 .test = alg_test_null, 2890 }, { 2891 .alg = "drbg_pr_sha512", 2892 .fips_allowed = 1, 2893 .test = alg_test_null, 2894 }, { 2895 .alg = "ecb(__aes-aesni)", 2896 .test = alg_test_null, 2897 .fips_allowed = 1, 2898 }, { 2899 .alg = "ecb(aes)", 2900 .test = alg_test_skcipher, 2901 .fips_allowed = 1, 2902 .suite = { 2903 .cipher = { 2904 .enc = { 2905 .vecs = aes_enc_tv_template, 2906 .count = AES_ENC_TEST_VECTORS 2907 }, 2908 .dec = { 2909 .vecs = aes_dec_tv_template, 2910 .count = AES_DEC_TEST_VECTORS 2911 } 2912 } 2913 } 2914 }, { 2915 .alg = "ecb(anubis)", 2916 .test = alg_test_skcipher, 2917 .suite = { 2918 .cipher = { 2919 .enc = { 2920 .vecs = anubis_enc_tv_template, 2921 .count = ANUBIS_ENC_TEST_VECTORS 2922 }, 2923 .dec = { 2924 .vecs = anubis_dec_tv_template, 2925 .count = ANUBIS_DEC_TEST_VECTORS 2926 } 2927 } 2928 } 2929 }, { 2930 .alg = "ecb(arc4)", 2931 .test = alg_test_skcipher, 2932 .suite = { 2933 .cipher = { 2934 .enc = { 2935 .vecs = arc4_enc_tv_template, 2936 .count = ARC4_ENC_TEST_VECTORS 2937 }, 2938 .dec = { 2939 .vecs = arc4_dec_tv_template, 2940 .count = ARC4_DEC_TEST_VECTORS 2941 } 2942 } 2943 } 2944 }, { 2945 .alg = "ecb(blowfish)", 2946 .test = alg_test_skcipher, 2947 .suite = { 2948 .cipher = { 2949 .enc = { 2950 .vecs = bf_enc_tv_template, 2951 .count = BF_ENC_TEST_VECTORS 2952 }, 2953 .dec = { 2954 .vecs = bf_dec_tv_template, 2955 .count = BF_DEC_TEST_VECTORS 2956 } 2957 } 2958 } 2959 }, { 2960 .alg = "ecb(camellia)", 2961 .test = alg_test_skcipher, 2962 .suite = { 2963 .cipher = { 2964 .enc = { 2965 .vecs = camellia_enc_tv_template, 2966 .count = CAMELLIA_ENC_TEST_VECTORS 2967 }, 2968 .dec = { 2969 .vecs = camellia_dec_tv_template, 2970 .count = CAMELLIA_DEC_TEST_VECTORS 2971 } 2972 } 2973 } 2974 }, { 2975 .alg = "ecb(cast5)", 2976 .test = alg_test_skcipher, 2977 .suite = { 2978 .cipher = { 2979 .enc = { 2980 .vecs = cast5_enc_tv_template, 2981 .count = CAST5_ENC_TEST_VECTORS 2982 }, 2983 .dec = { 2984 .vecs = cast5_dec_tv_template, 2985 .count = CAST5_DEC_TEST_VECTORS 2986 } 2987 } 2988 } 2989 }, { 2990 .alg = "ecb(cast6)", 2991 .test = alg_test_skcipher, 2992 .suite = { 2993 .cipher = { 2994 .enc = { 2995 .vecs = cast6_enc_tv_template, 2996 .count = CAST6_ENC_TEST_VECTORS 2997 }, 2998 .dec = { 2999 .vecs = cast6_dec_tv_template, 3000 .count = CAST6_DEC_TEST_VECTORS 3001 } 3002 } 3003 } 3004 }, { 3005 .alg = "ecb(cipher_null)", 3006 .test = alg_test_null, 3007 }, { 3008 .alg = "ecb(des)", 3009 .test = alg_test_skcipher, 3010 .suite = { 3011 .cipher = { 3012 .enc = { 3013 .vecs = des_enc_tv_template, 3014 .count = DES_ENC_TEST_VECTORS 3015 }, 3016 .dec = { 3017 .vecs = des_dec_tv_template, 3018 .count = DES_DEC_TEST_VECTORS 3019 } 3020 } 3021 } 3022 }, { 3023 .alg = "ecb(des3_ede)", 3024 .test = alg_test_skcipher, 3025 .fips_allowed = 1, 3026 .suite = { 3027 .cipher = { 3028 .enc = { 3029 .vecs = des3_ede_enc_tv_template, 3030 .count = DES3_EDE_ENC_TEST_VECTORS 3031 }, 3032 .dec = { 3033 .vecs = des3_ede_dec_tv_template, 3034 .count = DES3_EDE_DEC_TEST_VECTORS 3035 } 3036 } 3037 } 3038 }, { 3039 .alg = "ecb(fcrypt)", 3040 .test = alg_test_skcipher, 3041 .suite = { 3042 .cipher = { 3043 .enc = { 3044 .vecs = fcrypt_pcbc_enc_tv_template, 3045 .count = 1 3046 }, 3047 .dec = { 3048 .vecs = fcrypt_pcbc_dec_tv_template, 3049 .count = 1 3050 } 3051 } 3052 } 3053 }, { 3054 .alg = "ecb(khazad)", 3055 .test = alg_test_skcipher, 3056 .suite = { 3057 .cipher = { 3058 .enc = { 3059 .vecs = khazad_enc_tv_template, 3060 .count = KHAZAD_ENC_TEST_VECTORS 3061 }, 3062 .dec = { 3063 .vecs = khazad_dec_tv_template, 3064 .count = KHAZAD_DEC_TEST_VECTORS 3065 } 3066 } 3067 } 3068 }, { 3069 .alg = "ecb(seed)", 3070 .test = alg_test_skcipher, 3071 .suite = { 3072 .cipher = { 3073 .enc = { 3074 .vecs = seed_enc_tv_template, 3075 .count = SEED_ENC_TEST_VECTORS 3076 }, 3077 .dec = { 3078 .vecs = seed_dec_tv_template, 3079 .count = SEED_DEC_TEST_VECTORS 3080 } 3081 } 3082 } 3083 }, { 3084 .alg = "ecb(serpent)", 3085 .test = alg_test_skcipher, 3086 .suite = { 3087 .cipher = { 3088 .enc = { 3089 .vecs = serpent_enc_tv_template, 3090 .count = SERPENT_ENC_TEST_VECTORS 3091 }, 3092 .dec = { 3093 .vecs = serpent_dec_tv_template, 3094 .count = SERPENT_DEC_TEST_VECTORS 3095 } 3096 } 3097 } 3098 }, { 3099 .alg = "ecb(tea)", 3100 .test = alg_test_skcipher, 3101 .suite = { 3102 .cipher = { 3103 .enc = { 3104 .vecs = tea_enc_tv_template, 3105 .count = TEA_ENC_TEST_VECTORS 3106 }, 3107 .dec = { 3108 .vecs = tea_dec_tv_template, 3109 .count = TEA_DEC_TEST_VECTORS 3110 } 3111 } 3112 } 3113 }, { 3114 .alg = "ecb(tnepres)", 3115 .test = alg_test_skcipher, 3116 .suite = { 3117 .cipher = { 3118 .enc = { 3119 .vecs = tnepres_enc_tv_template, 3120 .count = TNEPRES_ENC_TEST_VECTORS 3121 }, 3122 .dec = { 3123 .vecs = tnepres_dec_tv_template, 3124 .count = TNEPRES_DEC_TEST_VECTORS 3125 } 3126 } 3127 } 3128 }, { 3129 .alg = "ecb(twofish)", 3130 .test = alg_test_skcipher, 3131 .suite = { 3132 .cipher = { 3133 .enc = { 3134 .vecs = tf_enc_tv_template, 3135 .count = TF_ENC_TEST_VECTORS 3136 }, 3137 .dec = { 3138 .vecs = tf_dec_tv_template, 3139 .count = TF_DEC_TEST_VECTORS 3140 } 3141 } 3142 } 3143 }, { 3144 .alg = "ecb(xeta)", 3145 .test = alg_test_skcipher, 3146 .suite = { 3147 .cipher = { 3148 .enc = { 3149 .vecs = xeta_enc_tv_template, 3150 .count = XETA_ENC_TEST_VECTORS 3151 }, 3152 .dec = { 3153 .vecs = xeta_dec_tv_template, 3154 .count = XETA_DEC_TEST_VECTORS 3155 } 3156 } 3157 } 3158 }, { 3159 .alg = "ecb(xtea)", 3160 .test = alg_test_skcipher, 3161 .suite = { 3162 .cipher = { 3163 .enc = { 3164 .vecs = xtea_enc_tv_template, 3165 .count = XTEA_ENC_TEST_VECTORS 3166 }, 3167 .dec = { 3168 .vecs = xtea_dec_tv_template, 3169 .count = XTEA_DEC_TEST_VECTORS 3170 } 3171 } 3172 } 3173 }, { 3174 .alg = "gcm(aes)", 3175 .test = alg_test_aead, 3176 .fips_allowed = 1, 3177 .suite = { 3178 .aead = { 3179 .enc = { 3180 .vecs = aes_gcm_enc_tv_template, 3181 .count = AES_GCM_ENC_TEST_VECTORS 3182 }, 3183 .dec = { 3184 .vecs = aes_gcm_dec_tv_template, 3185 .count = AES_GCM_DEC_TEST_VECTORS 3186 } 3187 } 3188 } 3189 }, { 3190 .alg = "ghash", 3191 .test = alg_test_hash, 3192 .fips_allowed = 1, 3193 .suite = { 3194 .hash = { 3195 .vecs = ghash_tv_template, 3196 .count = GHASH_TEST_VECTORS 3197 } 3198 } 3199 }, { 3200 .alg = "hmac(crc32)", 3201 .test = alg_test_hash, 3202 .suite = { 3203 .hash = { 3204 .vecs = bfin_crc_tv_template, 3205 .count = BFIN_CRC_TEST_VECTORS 3206 } 3207 } 3208 }, { 3209 .alg = "hmac(md5)", 3210 .test = alg_test_hash, 3211 .suite = { 3212 .hash = { 3213 .vecs = hmac_md5_tv_template, 3214 .count = HMAC_MD5_TEST_VECTORS 3215 } 3216 } 3217 }, { 3218 .alg = "hmac(rmd128)", 3219 .test = alg_test_hash, 3220 .suite = { 3221 .hash = { 3222 .vecs = hmac_rmd128_tv_template, 3223 .count = HMAC_RMD128_TEST_VECTORS 3224 } 3225 } 3226 }, { 3227 .alg = "hmac(rmd160)", 3228 .test = alg_test_hash, 3229 .suite = { 3230 .hash = { 3231 .vecs = hmac_rmd160_tv_template, 3232 .count = HMAC_RMD160_TEST_VECTORS 3233 } 3234 } 3235 }, { 3236 .alg = "hmac(sha1)", 3237 .test = alg_test_hash, 3238 .fips_allowed = 1, 3239 .suite = { 3240 .hash = { 3241 .vecs = hmac_sha1_tv_template, 3242 .count = HMAC_SHA1_TEST_VECTORS 3243 } 3244 } 3245 }, { 3246 .alg = "hmac(sha224)", 3247 .test = alg_test_hash, 3248 .fips_allowed = 1, 3249 .suite = { 3250 .hash = { 3251 .vecs = hmac_sha224_tv_template, 3252 .count = HMAC_SHA224_TEST_VECTORS 3253 } 3254 } 3255 }, { 3256 .alg = "hmac(sha256)", 3257 .test = alg_test_hash, 3258 .fips_allowed = 1, 3259 .suite = { 3260 .hash = { 3261 .vecs = hmac_sha256_tv_template, 3262 .count = HMAC_SHA256_TEST_VECTORS 3263 } 3264 } 3265 }, { 3266 .alg = "hmac(sha384)", 3267 .test = alg_test_hash, 3268 .fips_allowed = 1, 3269 .suite = { 3270 .hash = { 3271 .vecs = hmac_sha384_tv_template, 3272 .count = HMAC_SHA384_TEST_VECTORS 3273 } 3274 } 3275 }, { 3276 .alg = "hmac(sha512)", 3277 .test = alg_test_hash, 3278 .fips_allowed = 1, 3279 .suite = { 3280 .hash = { 3281 .vecs = hmac_sha512_tv_template, 3282 .count = HMAC_SHA512_TEST_VECTORS 3283 } 3284 } 3285 }, { 3286 .alg = "jitterentropy_rng", 3287 .fips_allowed = 1, 3288 .test = alg_test_null, 3289 }, { 3290 .alg = "kw(aes)", 3291 .test = alg_test_skcipher, 3292 .fips_allowed = 1, 3293 .suite = { 3294 .cipher = { 3295 .enc = { 3296 .vecs = aes_kw_enc_tv_template, 3297 .count = ARRAY_SIZE(aes_kw_enc_tv_template) 3298 }, 3299 .dec = { 3300 .vecs = aes_kw_dec_tv_template, 3301 .count = ARRAY_SIZE(aes_kw_dec_tv_template) 3302 } 3303 } 3304 } 3305 }, { 3306 .alg = "lrw(aes)", 3307 .test = alg_test_skcipher, 3308 .suite = { 3309 .cipher = { 3310 .enc = { 3311 .vecs = aes_lrw_enc_tv_template, 3312 .count = AES_LRW_ENC_TEST_VECTORS 3313 }, 3314 .dec = { 3315 .vecs = aes_lrw_dec_tv_template, 3316 .count = AES_LRW_DEC_TEST_VECTORS 3317 } 3318 } 3319 } 3320 }, { 3321 .alg = "lrw(camellia)", 3322 .test = alg_test_skcipher, 3323 .suite = { 3324 .cipher = { 3325 .enc = { 3326 .vecs = camellia_lrw_enc_tv_template, 3327 .count = CAMELLIA_LRW_ENC_TEST_VECTORS 3328 }, 3329 .dec = { 3330 .vecs = camellia_lrw_dec_tv_template, 3331 .count = CAMELLIA_LRW_DEC_TEST_VECTORS 3332 } 3333 } 3334 } 3335 }, { 3336 .alg = "lrw(cast6)", 3337 .test = alg_test_skcipher, 3338 .suite = { 3339 .cipher = { 3340 .enc = { 3341 .vecs = cast6_lrw_enc_tv_template, 3342 .count = CAST6_LRW_ENC_TEST_VECTORS 3343 }, 3344 .dec = { 3345 .vecs = cast6_lrw_dec_tv_template, 3346 .count = CAST6_LRW_DEC_TEST_VECTORS 3347 } 3348 } 3349 } 3350 }, { 3351 .alg = "lrw(serpent)", 3352 .test = alg_test_skcipher, 3353 .suite = { 3354 .cipher = { 3355 .enc = { 3356 .vecs = serpent_lrw_enc_tv_template, 3357 .count = SERPENT_LRW_ENC_TEST_VECTORS 3358 }, 3359 .dec = { 3360 .vecs = serpent_lrw_dec_tv_template, 3361 .count = SERPENT_LRW_DEC_TEST_VECTORS 3362 } 3363 } 3364 } 3365 }, { 3366 .alg = "lrw(twofish)", 3367 .test = alg_test_skcipher, 3368 .suite = { 3369 .cipher = { 3370 .enc = { 3371 .vecs = tf_lrw_enc_tv_template, 3372 .count = TF_LRW_ENC_TEST_VECTORS 3373 }, 3374 .dec = { 3375 .vecs = tf_lrw_dec_tv_template, 3376 .count = TF_LRW_DEC_TEST_VECTORS 3377 } 3378 } 3379 } 3380 }, { 3381 .alg = "lz4", 3382 .test = alg_test_comp, 3383 .fips_allowed = 1, 3384 .suite = { 3385 .comp = { 3386 .comp = { 3387 .vecs = lz4_comp_tv_template, 3388 .count = LZ4_COMP_TEST_VECTORS 3389 }, 3390 .decomp = { 3391 .vecs = lz4_decomp_tv_template, 3392 .count = LZ4_DECOMP_TEST_VECTORS 3393 } 3394 } 3395 } 3396 }, { 3397 .alg = "lz4hc", 3398 .test = alg_test_comp, 3399 .fips_allowed = 1, 3400 .suite = { 3401 .comp = { 3402 .comp = { 3403 .vecs = lz4hc_comp_tv_template, 3404 .count = LZ4HC_COMP_TEST_VECTORS 3405 }, 3406 .decomp = { 3407 .vecs = lz4hc_decomp_tv_template, 3408 .count = LZ4HC_DECOMP_TEST_VECTORS 3409 } 3410 } 3411 } 3412 }, { 3413 .alg = "lzo", 3414 .test = alg_test_comp, 3415 .fips_allowed = 1, 3416 .suite = { 3417 .comp = { 3418 .comp = { 3419 .vecs = lzo_comp_tv_template, 3420 .count = LZO_COMP_TEST_VECTORS 3421 }, 3422 .decomp = { 3423 .vecs = lzo_decomp_tv_template, 3424 .count = LZO_DECOMP_TEST_VECTORS 3425 } 3426 } 3427 } 3428 }, { 3429 .alg = "md4", 3430 .test = alg_test_hash, 3431 .suite = { 3432 .hash = { 3433 .vecs = md4_tv_template, 3434 .count = MD4_TEST_VECTORS 3435 } 3436 } 3437 }, { 3438 .alg = "md5", 3439 .test = alg_test_hash, 3440 .suite = { 3441 .hash = { 3442 .vecs = md5_tv_template, 3443 .count = MD5_TEST_VECTORS 3444 } 3445 } 3446 }, { 3447 .alg = "michael_mic", 3448 .test = alg_test_hash, 3449 .suite = { 3450 .hash = { 3451 .vecs = michael_mic_tv_template, 3452 .count = MICHAEL_MIC_TEST_VECTORS 3453 } 3454 } 3455 }, { 3456 .alg = "ofb(aes)", 3457 .test = alg_test_skcipher, 3458 .fips_allowed = 1, 3459 .suite = { 3460 .cipher = { 3461 .enc = { 3462 .vecs = aes_ofb_enc_tv_template, 3463 .count = AES_OFB_ENC_TEST_VECTORS 3464 }, 3465 .dec = { 3466 .vecs = aes_ofb_dec_tv_template, 3467 .count = AES_OFB_DEC_TEST_VECTORS 3468 } 3469 } 3470 } 3471 }, { 3472 .alg = "pcbc(fcrypt)", 3473 .test = alg_test_skcipher, 3474 .suite = { 3475 .cipher = { 3476 .enc = { 3477 .vecs = fcrypt_pcbc_enc_tv_template, 3478 .count = FCRYPT_ENC_TEST_VECTORS 3479 }, 3480 .dec = { 3481 .vecs = fcrypt_pcbc_dec_tv_template, 3482 .count = FCRYPT_DEC_TEST_VECTORS 3483 } 3484 } 3485 } 3486 }, { 3487 .alg = "poly1305", 3488 .test = alg_test_hash, 3489 .suite = { 3490 .hash = { 3491 .vecs = poly1305_tv_template, 3492 .count = POLY1305_TEST_VECTORS 3493 } 3494 } 3495 }, { 3496 .alg = "rfc3686(ctr(aes))", 3497 .test = alg_test_skcipher, 3498 .fips_allowed = 1, 3499 .suite = { 3500 .cipher = { 3501 .enc = { 3502 .vecs = aes_ctr_rfc3686_enc_tv_template, 3503 .count = AES_CTR_3686_ENC_TEST_VECTORS 3504 }, 3505 .dec = { 3506 .vecs = aes_ctr_rfc3686_dec_tv_template, 3507 .count = AES_CTR_3686_DEC_TEST_VECTORS 3508 } 3509 } 3510 } 3511 }, { 3512 .alg = "rfc4106(gcm(aes))", 3513 .test = alg_test_aead, 3514 .fips_allowed = 1, 3515 .suite = { 3516 .aead = { 3517 .enc = { 3518 .vecs = aes_gcm_rfc4106_enc_tv_template, 3519 .count = AES_GCM_4106_ENC_TEST_VECTORS 3520 }, 3521 .dec = { 3522 .vecs = aes_gcm_rfc4106_dec_tv_template, 3523 .count = AES_GCM_4106_DEC_TEST_VECTORS 3524 } 3525 } 3526 } 3527 }, { 3528 .alg = "rfc4309(ccm(aes))", 3529 .test = alg_test_aead, 3530 .fips_allowed = 1, 3531 .suite = { 3532 .aead = { 3533 .enc = { 3534 .vecs = aes_ccm_rfc4309_enc_tv_template, 3535 .count = AES_CCM_4309_ENC_TEST_VECTORS 3536 }, 3537 .dec = { 3538 .vecs = aes_ccm_rfc4309_dec_tv_template, 3539 .count = AES_CCM_4309_DEC_TEST_VECTORS 3540 } 3541 } 3542 } 3543 }, { 3544 .alg = "rfc4543(gcm(aes))", 3545 .test = alg_test_aead, 3546 .suite = { 3547 .aead = { 3548 .enc = { 3549 .vecs = aes_gcm_rfc4543_enc_tv_template, 3550 .count = AES_GCM_4543_ENC_TEST_VECTORS 3551 }, 3552 .dec = { 3553 .vecs = aes_gcm_rfc4543_dec_tv_template, 3554 .count = AES_GCM_4543_DEC_TEST_VECTORS 3555 }, 3556 } 3557 } 3558 }, { 3559 .alg = "rfc7539(chacha20,poly1305)", 3560 .test = alg_test_aead, 3561 .suite = { 3562 .aead = { 3563 .enc = { 3564 .vecs = rfc7539_enc_tv_template, 3565 .count = RFC7539_ENC_TEST_VECTORS 3566 }, 3567 .dec = { 3568 .vecs = rfc7539_dec_tv_template, 3569 .count = RFC7539_DEC_TEST_VECTORS 3570 }, 3571 } 3572 } 3573 }, { 3574 .alg = "rfc7539esp(chacha20,poly1305)", 3575 .test = alg_test_aead, 3576 .suite = { 3577 .aead = { 3578 .enc = { 3579 .vecs = rfc7539esp_enc_tv_template, 3580 .count = RFC7539ESP_ENC_TEST_VECTORS 3581 }, 3582 .dec = { 3583 .vecs = rfc7539esp_dec_tv_template, 3584 .count = RFC7539ESP_DEC_TEST_VECTORS 3585 }, 3586 } 3587 } 3588 }, { 3589 .alg = "rmd128", 3590 .test = alg_test_hash, 3591 .suite = { 3592 .hash = { 3593 .vecs = rmd128_tv_template, 3594 .count = RMD128_TEST_VECTORS 3595 } 3596 } 3597 }, { 3598 .alg = "rmd160", 3599 .test = alg_test_hash, 3600 .suite = { 3601 .hash = { 3602 .vecs = rmd160_tv_template, 3603 .count = RMD160_TEST_VECTORS 3604 } 3605 } 3606 }, { 3607 .alg = "rmd256", 3608 .test = alg_test_hash, 3609 .suite = { 3610 .hash = { 3611 .vecs = rmd256_tv_template, 3612 .count = RMD256_TEST_VECTORS 3613 } 3614 } 3615 }, { 3616 .alg = "rmd320", 3617 .test = alg_test_hash, 3618 .suite = { 3619 .hash = { 3620 .vecs = rmd320_tv_template, 3621 .count = RMD320_TEST_VECTORS 3622 } 3623 } 3624 }, { 3625 .alg = "rsa", 3626 .test = alg_test_akcipher, 3627 .fips_allowed = 1, 3628 .suite = { 3629 .akcipher = { 3630 .vecs = rsa_tv_template, 3631 .count = RSA_TEST_VECTORS 3632 } 3633 } 3634 }, { 3635 .alg = "salsa20", 3636 .test = alg_test_skcipher, 3637 .suite = { 3638 .cipher = { 3639 .enc = { 3640 .vecs = salsa20_stream_enc_tv_template, 3641 .count = SALSA20_STREAM_ENC_TEST_VECTORS 3642 } 3643 } 3644 } 3645 }, { 3646 .alg = "sha1", 3647 .test = alg_test_hash, 3648 .fips_allowed = 1, 3649 .suite = { 3650 .hash = { 3651 .vecs = sha1_tv_template, 3652 .count = SHA1_TEST_VECTORS 3653 } 3654 } 3655 }, { 3656 .alg = "sha224", 3657 .test = alg_test_hash, 3658 .fips_allowed = 1, 3659 .suite = { 3660 .hash = { 3661 .vecs = sha224_tv_template, 3662 .count = SHA224_TEST_VECTORS 3663 } 3664 } 3665 }, { 3666 .alg = "sha256", 3667 .test = alg_test_hash, 3668 .fips_allowed = 1, 3669 .suite = { 3670 .hash = { 3671 .vecs = sha256_tv_template, 3672 .count = SHA256_TEST_VECTORS 3673 } 3674 } 3675 }, { 3676 .alg = "sha384", 3677 .test = alg_test_hash, 3678 .fips_allowed = 1, 3679 .suite = { 3680 .hash = { 3681 .vecs = sha384_tv_template, 3682 .count = SHA384_TEST_VECTORS 3683 } 3684 } 3685 }, { 3686 .alg = "sha512", 3687 .test = alg_test_hash, 3688 .fips_allowed = 1, 3689 .suite = { 3690 .hash = { 3691 .vecs = sha512_tv_template, 3692 .count = SHA512_TEST_VECTORS 3693 } 3694 } 3695 }, { 3696 .alg = "tgr128", 3697 .test = alg_test_hash, 3698 .suite = { 3699 .hash = { 3700 .vecs = tgr128_tv_template, 3701 .count = TGR128_TEST_VECTORS 3702 } 3703 } 3704 }, { 3705 .alg = "tgr160", 3706 .test = alg_test_hash, 3707 .suite = { 3708 .hash = { 3709 .vecs = tgr160_tv_template, 3710 .count = TGR160_TEST_VECTORS 3711 } 3712 } 3713 }, { 3714 .alg = "tgr192", 3715 .test = alg_test_hash, 3716 .suite = { 3717 .hash = { 3718 .vecs = tgr192_tv_template, 3719 .count = TGR192_TEST_VECTORS 3720 } 3721 } 3722 }, { 3723 .alg = "vmac(aes)", 3724 .test = alg_test_hash, 3725 .suite = { 3726 .hash = { 3727 .vecs = aes_vmac128_tv_template, 3728 .count = VMAC_AES_TEST_VECTORS 3729 } 3730 } 3731 }, { 3732 .alg = "wp256", 3733 .test = alg_test_hash, 3734 .suite = { 3735 .hash = { 3736 .vecs = wp256_tv_template, 3737 .count = WP256_TEST_VECTORS 3738 } 3739 } 3740 }, { 3741 .alg = "wp384", 3742 .test = alg_test_hash, 3743 .suite = { 3744 .hash = { 3745 .vecs = wp384_tv_template, 3746 .count = WP384_TEST_VECTORS 3747 } 3748 } 3749 }, { 3750 .alg = "wp512", 3751 .test = alg_test_hash, 3752 .suite = { 3753 .hash = { 3754 .vecs = wp512_tv_template, 3755 .count = WP512_TEST_VECTORS 3756 } 3757 } 3758 }, { 3759 .alg = "xcbc(aes)", 3760 .test = alg_test_hash, 3761 .suite = { 3762 .hash = { 3763 .vecs = aes_xcbc128_tv_template, 3764 .count = XCBC_AES_TEST_VECTORS 3765 } 3766 } 3767 }, { 3768 .alg = "xts(aes)", 3769 .test = alg_test_skcipher, 3770 .fips_allowed = 1, 3771 .suite = { 3772 .cipher = { 3773 .enc = { 3774 .vecs = aes_xts_enc_tv_template, 3775 .count = AES_XTS_ENC_TEST_VECTORS 3776 }, 3777 .dec = { 3778 .vecs = aes_xts_dec_tv_template, 3779 .count = AES_XTS_DEC_TEST_VECTORS 3780 } 3781 } 3782 } 3783 }, { 3784 .alg = "xts(camellia)", 3785 .test = alg_test_skcipher, 3786 .suite = { 3787 .cipher = { 3788 .enc = { 3789 .vecs = camellia_xts_enc_tv_template, 3790 .count = CAMELLIA_XTS_ENC_TEST_VECTORS 3791 }, 3792 .dec = { 3793 .vecs = camellia_xts_dec_tv_template, 3794 .count = CAMELLIA_XTS_DEC_TEST_VECTORS 3795 } 3796 } 3797 } 3798 }, { 3799 .alg = "xts(cast6)", 3800 .test = alg_test_skcipher, 3801 .suite = { 3802 .cipher = { 3803 .enc = { 3804 .vecs = cast6_xts_enc_tv_template, 3805 .count = CAST6_XTS_ENC_TEST_VECTORS 3806 }, 3807 .dec = { 3808 .vecs = cast6_xts_dec_tv_template, 3809 .count = CAST6_XTS_DEC_TEST_VECTORS 3810 } 3811 } 3812 } 3813 }, { 3814 .alg = "xts(serpent)", 3815 .test = alg_test_skcipher, 3816 .suite = { 3817 .cipher = { 3818 .enc = { 3819 .vecs = serpent_xts_enc_tv_template, 3820 .count = SERPENT_XTS_ENC_TEST_VECTORS 3821 }, 3822 .dec = { 3823 .vecs = serpent_xts_dec_tv_template, 3824 .count = SERPENT_XTS_DEC_TEST_VECTORS 3825 } 3826 } 3827 } 3828 }, { 3829 .alg = "xts(twofish)", 3830 .test = alg_test_skcipher, 3831 .suite = { 3832 .cipher = { 3833 .enc = { 3834 .vecs = tf_xts_enc_tv_template, 3835 .count = TF_XTS_ENC_TEST_VECTORS 3836 }, 3837 .dec = { 3838 .vecs = tf_xts_dec_tv_template, 3839 .count = TF_XTS_DEC_TEST_VECTORS 3840 } 3841 } 3842 } 3843 }, { 3844 .alg = "zlib", 3845 .test = alg_test_pcomp, 3846 .fips_allowed = 1, 3847 .suite = { 3848 .pcomp = { 3849 .comp = { 3850 .vecs = zlib_comp_tv_template, 3851 .count = ZLIB_COMP_TEST_VECTORS 3852 }, 3853 .decomp = { 3854 .vecs = zlib_decomp_tv_template, 3855 .count = ZLIB_DECOMP_TEST_VECTORS 3856 } 3857 } 3858 } 3859 } 3860 }; 3861 3862 static bool alg_test_descs_checked; 3863 3864 static void alg_test_descs_check_order(void) 3865 { 3866 int i; 3867 3868 /* only check once */ 3869 if (alg_test_descs_checked) 3870 return; 3871 3872 alg_test_descs_checked = true; 3873 3874 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { 3875 int diff = strcmp(alg_test_descs[i - 1].alg, 3876 alg_test_descs[i].alg); 3877 3878 if (WARN_ON(diff > 0)) { 3879 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", 3880 alg_test_descs[i - 1].alg, 3881 alg_test_descs[i].alg); 3882 } 3883 3884 if (WARN_ON(diff == 0)) { 3885 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", 3886 alg_test_descs[i].alg); 3887 } 3888 } 3889 } 3890 3891 static int alg_find_test(const char *alg) 3892 { 3893 int start = 0; 3894 int end = ARRAY_SIZE(alg_test_descs); 3895 3896 while (start < end) { 3897 int i = (start + end) / 2; 3898 int diff = strcmp(alg_test_descs[i].alg, alg); 3899 3900 if (diff > 0) { 3901 end = i; 3902 continue; 3903 } 3904 3905 if (diff < 0) { 3906 start = i + 1; 3907 continue; 3908 } 3909 3910 return i; 3911 } 3912 3913 return -1; 3914 } 3915 3916 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 3917 { 3918 int i; 3919 int j; 3920 int rc; 3921 3922 alg_test_descs_check_order(); 3923 3924 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 3925 char nalg[CRYPTO_MAX_ALG_NAME]; 3926 3927 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 3928 sizeof(nalg)) 3929 return -ENAMETOOLONG; 3930 3931 i = alg_find_test(nalg); 3932 if (i < 0) 3933 goto notest; 3934 3935 if (fips_enabled && !alg_test_descs[i].fips_allowed) 3936 goto non_fips_alg; 3937 3938 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 3939 goto test_done; 3940 } 3941 3942 i = alg_find_test(alg); 3943 j = alg_find_test(driver); 3944 if (i < 0 && j < 0) 3945 goto notest; 3946 3947 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 3948 (j >= 0 && !alg_test_descs[j].fips_allowed))) 3949 goto non_fips_alg; 3950 3951 rc = 0; 3952 if (i >= 0) 3953 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 3954 type, mask); 3955 if (j >= 0 && j != i) 3956 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 3957 type, mask); 3958 3959 test_done: 3960 if (fips_enabled && rc) 3961 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 3962 3963 if (fips_enabled && !rc) 3964 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg); 3965 3966 return rc; 3967 3968 notest: 3969 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 3970 return 0; 3971 non_fips_alg: 3972 return -EINVAL; 3973 } 3974 3975 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ 3976 3977 EXPORT_SYMBOL_GPL(alg_test); 3978