1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * Updated RFC4106 AES-GCM testing. 10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) 11 * Adrian Hoban <adrian.hoban@intel.com> 12 * Gabriele Paoloni <gabriele.paoloni@intel.com> 13 * Tadeusz Struk (tadeusz.struk@intel.com) 14 * Copyright (c) 2010, Intel Corporation. 15 * 16 * This program is free software; you can redistribute it and/or modify it 17 * under the terms of the GNU General Public License as published by the Free 18 * Software Foundation; either version 2 of the License, or (at your option) 19 * any later version. 20 * 21 */ 22 23 #include <crypto/aead.h> 24 #include <crypto/hash.h> 25 #include <crypto/skcipher.h> 26 #include <linux/err.h> 27 #include <linux/fips.h> 28 #include <linux/module.h> 29 #include <linux/scatterlist.h> 30 #include <linux/slab.h> 31 #include <linux/string.h> 32 #include <crypto/rng.h> 33 #include <crypto/drbg.h> 34 #include <crypto/akcipher.h> 35 36 #include "internal.h" 37 38 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 39 40 /* a perfect nop */ 41 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 42 { 43 return 0; 44 } 45 46 #else 47 48 #include "testmgr.h" 49 50 /* 51 * Need slab memory for testing (size in number of pages). 52 */ 53 #define XBUFSIZE 8 54 55 /* 56 * Indexes into the xbuf to simulate cross-page access. 57 */ 58 #define IDX1 32 59 #define IDX2 32400 60 #define IDX3 1 61 #define IDX4 8193 62 #define IDX5 22222 63 #define IDX6 17101 64 #define IDX7 27333 65 #define IDX8 3000 66 67 /* 68 * Used by test_cipher() 69 */ 70 #define ENCRYPT 1 71 #define DECRYPT 0 72 73 struct tcrypt_result { 74 struct completion completion; 75 int err; 76 }; 77 78 struct aead_test_suite { 79 struct { 80 struct aead_testvec *vecs; 81 unsigned int count; 82 } enc, dec; 83 }; 84 85 struct cipher_test_suite { 86 struct { 87 struct cipher_testvec *vecs; 88 unsigned int count; 89 } enc, dec; 90 }; 91 92 struct comp_test_suite { 93 struct { 94 struct comp_testvec *vecs; 95 unsigned int count; 96 } comp, decomp; 97 }; 98 99 struct pcomp_test_suite { 100 struct { 101 struct pcomp_testvec *vecs; 102 unsigned int count; 103 } comp, decomp; 104 }; 105 106 struct hash_test_suite { 107 struct hash_testvec *vecs; 108 unsigned int count; 109 }; 110 111 struct cprng_test_suite { 112 struct cprng_testvec *vecs; 113 unsigned int count; 114 }; 115 116 struct drbg_test_suite { 117 struct drbg_testvec *vecs; 118 unsigned int count; 119 }; 120 121 struct akcipher_test_suite { 122 struct akcipher_testvec *vecs; 123 unsigned int count; 124 }; 125 126 struct alg_test_desc { 127 const char *alg; 128 int (*test)(const struct alg_test_desc *desc, const char *driver, 129 u32 type, u32 mask); 130 int fips_allowed; /* set if alg is allowed in fips mode */ 131 132 union { 133 struct aead_test_suite aead; 134 struct cipher_test_suite cipher; 135 struct comp_test_suite comp; 136 struct pcomp_test_suite pcomp; 137 struct hash_test_suite hash; 138 struct cprng_test_suite cprng; 139 struct drbg_test_suite drbg; 140 struct akcipher_test_suite akcipher; 141 } suite; 142 }; 143 144 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 145 146 static void hexdump(unsigned char *buf, unsigned int len) 147 { 148 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 149 16, 1, 150 buf, len, false); 151 } 152 153 static void tcrypt_complete(struct crypto_async_request *req, int err) 154 { 155 struct tcrypt_result *res = req->data; 156 157 if (err == -EINPROGRESS) 158 return; 159 160 res->err = err; 161 complete(&res->completion); 162 } 163 164 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 165 { 166 int i; 167 168 for (i = 0; i < XBUFSIZE; i++) { 169 buf[i] = (void *)__get_free_page(GFP_KERNEL); 170 if (!buf[i]) 171 goto err_free_buf; 172 } 173 174 return 0; 175 176 err_free_buf: 177 while (i-- > 0) 178 free_page((unsigned long)buf[i]); 179 180 return -ENOMEM; 181 } 182 183 static void testmgr_free_buf(char *buf[XBUFSIZE]) 184 { 185 int i; 186 187 for (i = 0; i < XBUFSIZE; i++) 188 free_page((unsigned long)buf[i]); 189 } 190 191 static int wait_async_op(struct tcrypt_result *tr, int ret) 192 { 193 if (ret == -EINPROGRESS || ret == -EBUSY) { 194 wait_for_completion(&tr->completion); 195 reinit_completion(&tr->completion); 196 ret = tr->err; 197 } 198 return ret; 199 } 200 201 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 202 unsigned int tcount, bool use_digest, 203 const int align_offset) 204 { 205 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 206 unsigned int i, j, k, temp; 207 struct scatterlist sg[8]; 208 char *result; 209 char *key; 210 struct ahash_request *req; 211 struct tcrypt_result tresult; 212 void *hash_buff; 213 char *xbuf[XBUFSIZE]; 214 int ret = -ENOMEM; 215 216 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL); 217 if (!result) 218 return ret; 219 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 220 if (!key) 221 goto out_nobuf; 222 if (testmgr_alloc_buf(xbuf)) 223 goto out_nobuf; 224 225 init_completion(&tresult.completion); 226 227 req = ahash_request_alloc(tfm, GFP_KERNEL); 228 if (!req) { 229 printk(KERN_ERR "alg: hash: Failed to allocate request for " 230 "%s\n", algo); 231 goto out_noreq; 232 } 233 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 234 tcrypt_complete, &tresult); 235 236 j = 0; 237 for (i = 0; i < tcount; i++) { 238 if (template[i].np) 239 continue; 240 241 ret = -EINVAL; 242 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) 243 goto out; 244 245 j++; 246 memset(result, 0, MAX_DIGEST_SIZE); 247 248 hash_buff = xbuf[0]; 249 hash_buff += align_offset; 250 251 memcpy(hash_buff, template[i].plaintext, template[i].psize); 252 sg_init_one(&sg[0], hash_buff, template[i].psize); 253 254 if (template[i].ksize) { 255 crypto_ahash_clear_flags(tfm, ~0); 256 if (template[i].ksize > MAX_KEYLEN) { 257 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 258 j, algo, template[i].ksize, MAX_KEYLEN); 259 ret = -EINVAL; 260 goto out; 261 } 262 memcpy(key, template[i].key, template[i].ksize); 263 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 264 if (ret) { 265 printk(KERN_ERR "alg: hash: setkey failed on " 266 "test %d for %s: ret=%d\n", j, algo, 267 -ret); 268 goto out; 269 } 270 } 271 272 ahash_request_set_crypt(req, sg, result, template[i].psize); 273 if (use_digest) { 274 ret = wait_async_op(&tresult, crypto_ahash_digest(req)); 275 if (ret) { 276 pr_err("alg: hash: digest failed on test %d " 277 "for %s: ret=%d\n", j, algo, -ret); 278 goto out; 279 } 280 } else { 281 ret = wait_async_op(&tresult, crypto_ahash_init(req)); 282 if (ret) { 283 pr_err("alt: hash: init failed on test %d " 284 "for %s: ret=%d\n", j, algo, -ret); 285 goto out; 286 } 287 ret = wait_async_op(&tresult, crypto_ahash_update(req)); 288 if (ret) { 289 pr_err("alt: hash: update failed on test %d " 290 "for %s: ret=%d\n", j, algo, -ret); 291 goto out; 292 } 293 ret = wait_async_op(&tresult, crypto_ahash_final(req)); 294 if (ret) { 295 pr_err("alt: hash: final failed on test %d " 296 "for %s: ret=%d\n", j, algo, -ret); 297 goto out; 298 } 299 } 300 301 if (memcmp(result, template[i].digest, 302 crypto_ahash_digestsize(tfm))) { 303 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 304 j, algo); 305 hexdump(result, crypto_ahash_digestsize(tfm)); 306 ret = -EINVAL; 307 goto out; 308 } 309 } 310 311 j = 0; 312 for (i = 0; i < tcount; i++) { 313 /* alignment tests are only done with continuous buffers */ 314 if (align_offset != 0) 315 break; 316 317 if (!template[i].np) 318 continue; 319 320 j++; 321 memset(result, 0, MAX_DIGEST_SIZE); 322 323 temp = 0; 324 sg_init_table(sg, template[i].np); 325 ret = -EINVAL; 326 for (k = 0; k < template[i].np; k++) { 327 if (WARN_ON(offset_in_page(IDX[k]) + 328 template[i].tap[k] > PAGE_SIZE)) 329 goto out; 330 sg_set_buf(&sg[k], 331 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 332 offset_in_page(IDX[k]), 333 template[i].plaintext + temp, 334 template[i].tap[k]), 335 template[i].tap[k]); 336 temp += template[i].tap[k]; 337 } 338 339 if (template[i].ksize) { 340 if (template[i].ksize > MAX_KEYLEN) { 341 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 342 j, algo, template[i].ksize, MAX_KEYLEN); 343 ret = -EINVAL; 344 goto out; 345 } 346 crypto_ahash_clear_flags(tfm, ~0); 347 memcpy(key, template[i].key, template[i].ksize); 348 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 349 350 if (ret) { 351 printk(KERN_ERR "alg: hash: setkey " 352 "failed on chunking test %d " 353 "for %s: ret=%d\n", j, algo, -ret); 354 goto out; 355 } 356 } 357 358 ahash_request_set_crypt(req, sg, result, template[i].psize); 359 ret = crypto_ahash_digest(req); 360 switch (ret) { 361 case 0: 362 break; 363 case -EINPROGRESS: 364 case -EBUSY: 365 wait_for_completion(&tresult.completion); 366 reinit_completion(&tresult.completion); 367 ret = tresult.err; 368 if (!ret) 369 break; 370 /* fall through */ 371 default: 372 printk(KERN_ERR "alg: hash: digest failed " 373 "on chunking test %d for %s: " 374 "ret=%d\n", j, algo, -ret); 375 goto out; 376 } 377 378 if (memcmp(result, template[i].digest, 379 crypto_ahash_digestsize(tfm))) { 380 printk(KERN_ERR "alg: hash: Chunking test %d " 381 "failed for %s\n", j, algo); 382 hexdump(result, crypto_ahash_digestsize(tfm)); 383 ret = -EINVAL; 384 goto out; 385 } 386 } 387 388 ret = 0; 389 390 out: 391 ahash_request_free(req); 392 out_noreq: 393 testmgr_free_buf(xbuf); 394 out_nobuf: 395 kfree(key); 396 kfree(result); 397 return ret; 398 } 399 400 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 401 unsigned int tcount, bool use_digest) 402 { 403 unsigned int alignmask; 404 int ret; 405 406 ret = __test_hash(tfm, template, tcount, use_digest, 0); 407 if (ret) 408 return ret; 409 410 /* test unaligned buffers, check with one byte offset */ 411 ret = __test_hash(tfm, template, tcount, use_digest, 1); 412 if (ret) 413 return ret; 414 415 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 416 if (alignmask) { 417 /* Check if alignment mask for tfm is correctly set. */ 418 ret = __test_hash(tfm, template, tcount, use_digest, 419 alignmask + 1); 420 if (ret) 421 return ret; 422 } 423 424 return 0; 425 } 426 427 static int __test_aead(struct crypto_aead *tfm, int enc, 428 struct aead_testvec *template, unsigned int tcount, 429 const bool diff_dst, const int align_offset) 430 { 431 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 432 unsigned int i, j, k, n, temp; 433 int ret = -ENOMEM; 434 char *q; 435 char *key; 436 struct aead_request *req; 437 struct scatterlist *sg; 438 struct scatterlist *sgout; 439 const char *e, *d; 440 struct tcrypt_result result; 441 unsigned int authsize, iv_len; 442 void *input; 443 void *output; 444 void *assoc; 445 char *iv; 446 char *xbuf[XBUFSIZE]; 447 char *xoutbuf[XBUFSIZE]; 448 char *axbuf[XBUFSIZE]; 449 450 iv = kzalloc(MAX_IVLEN, GFP_KERNEL); 451 if (!iv) 452 return ret; 453 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 454 if (!key) 455 goto out_noxbuf; 456 if (testmgr_alloc_buf(xbuf)) 457 goto out_noxbuf; 458 if (testmgr_alloc_buf(axbuf)) 459 goto out_noaxbuf; 460 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 461 goto out_nooutbuf; 462 463 /* avoid "the frame size is larger than 1024 bytes" compiler warning */ 464 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL); 465 if (!sg) 466 goto out_nosg; 467 sgout = &sg[16]; 468 469 if (diff_dst) 470 d = "-ddst"; 471 else 472 d = ""; 473 474 if (enc == ENCRYPT) 475 e = "encryption"; 476 else 477 e = "decryption"; 478 479 init_completion(&result.completion); 480 481 req = aead_request_alloc(tfm, GFP_KERNEL); 482 if (!req) { 483 pr_err("alg: aead%s: Failed to allocate request for %s\n", 484 d, algo); 485 goto out; 486 } 487 488 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 489 tcrypt_complete, &result); 490 491 for (i = 0, j = 0; i < tcount; i++) { 492 if (template[i].np) 493 continue; 494 495 j++; 496 497 /* some templates have no input data but they will 498 * touch input 499 */ 500 input = xbuf[0]; 501 input += align_offset; 502 assoc = axbuf[0]; 503 504 ret = -EINVAL; 505 if (WARN_ON(align_offset + template[i].ilen > 506 PAGE_SIZE || template[i].alen > PAGE_SIZE)) 507 goto out; 508 509 memcpy(input, template[i].input, template[i].ilen); 510 memcpy(assoc, template[i].assoc, template[i].alen); 511 iv_len = crypto_aead_ivsize(tfm); 512 if (template[i].iv) 513 memcpy(iv, template[i].iv, iv_len); 514 else 515 memset(iv, 0, iv_len); 516 517 crypto_aead_clear_flags(tfm, ~0); 518 if (template[i].wk) 519 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 520 521 if (template[i].klen > MAX_KEYLEN) { 522 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 523 d, j, algo, template[i].klen, 524 MAX_KEYLEN); 525 ret = -EINVAL; 526 goto out; 527 } 528 memcpy(key, template[i].key, template[i].klen); 529 530 ret = crypto_aead_setkey(tfm, key, template[i].klen); 531 if (!ret == template[i].fail) { 532 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", 533 d, j, algo, crypto_aead_get_flags(tfm)); 534 goto out; 535 } else if (ret) 536 continue; 537 538 authsize = abs(template[i].rlen - template[i].ilen); 539 ret = crypto_aead_setauthsize(tfm, authsize); 540 if (ret) { 541 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", 542 d, authsize, j, algo); 543 goto out; 544 } 545 546 k = !!template[i].alen; 547 sg_init_table(sg, k + 1); 548 sg_set_buf(&sg[0], assoc, template[i].alen); 549 sg_set_buf(&sg[k], input, 550 template[i].ilen + (enc ? authsize : 0)); 551 output = input; 552 553 if (diff_dst) { 554 sg_init_table(sgout, k + 1); 555 sg_set_buf(&sgout[0], assoc, template[i].alen); 556 557 output = xoutbuf[0]; 558 output += align_offset; 559 sg_set_buf(&sgout[k], output, 560 template[i].rlen + (enc ? 0 : authsize)); 561 } 562 563 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 564 template[i].ilen, iv); 565 566 aead_request_set_ad(req, template[i].alen); 567 568 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 569 570 switch (ret) { 571 case 0: 572 if (template[i].novrfy) { 573 /* verification was supposed to fail */ 574 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", 575 d, e, j, algo); 576 /* so really, we got a bad message */ 577 ret = -EBADMSG; 578 goto out; 579 } 580 break; 581 case -EINPROGRESS: 582 case -EBUSY: 583 wait_for_completion(&result.completion); 584 reinit_completion(&result.completion); 585 ret = result.err; 586 if (!ret) 587 break; 588 case -EBADMSG: 589 if (template[i].novrfy) 590 /* verification failure was expected */ 591 continue; 592 /* fall through */ 593 default: 594 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", 595 d, e, j, algo, -ret); 596 goto out; 597 } 598 599 q = output; 600 if (memcmp(q, template[i].result, template[i].rlen)) { 601 pr_err("alg: aead%s: Test %d failed on %s for %s\n", 602 d, j, e, algo); 603 hexdump(q, template[i].rlen); 604 ret = -EINVAL; 605 goto out; 606 } 607 } 608 609 for (i = 0, j = 0; i < tcount; i++) { 610 /* alignment tests are only done with continuous buffers */ 611 if (align_offset != 0) 612 break; 613 614 if (!template[i].np) 615 continue; 616 617 j++; 618 619 if (template[i].iv) 620 memcpy(iv, template[i].iv, MAX_IVLEN); 621 else 622 memset(iv, 0, MAX_IVLEN); 623 624 crypto_aead_clear_flags(tfm, ~0); 625 if (template[i].wk) 626 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 627 if (template[i].klen > MAX_KEYLEN) { 628 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 629 d, j, algo, template[i].klen, MAX_KEYLEN); 630 ret = -EINVAL; 631 goto out; 632 } 633 memcpy(key, template[i].key, template[i].klen); 634 635 ret = crypto_aead_setkey(tfm, key, template[i].klen); 636 if (!ret == template[i].fail) { 637 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", 638 d, j, algo, crypto_aead_get_flags(tfm)); 639 goto out; 640 } else if (ret) 641 continue; 642 643 authsize = abs(template[i].rlen - template[i].ilen); 644 645 ret = -EINVAL; 646 sg_init_table(sg, template[i].anp + template[i].np); 647 if (diff_dst) 648 sg_init_table(sgout, template[i].anp + template[i].np); 649 650 ret = -EINVAL; 651 for (k = 0, temp = 0; k < template[i].anp; k++) { 652 if (WARN_ON(offset_in_page(IDX[k]) + 653 template[i].atap[k] > PAGE_SIZE)) 654 goto out; 655 sg_set_buf(&sg[k], 656 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 657 offset_in_page(IDX[k]), 658 template[i].assoc + temp, 659 template[i].atap[k]), 660 template[i].atap[k]); 661 if (diff_dst) 662 sg_set_buf(&sgout[k], 663 axbuf[IDX[k] >> PAGE_SHIFT] + 664 offset_in_page(IDX[k]), 665 template[i].atap[k]); 666 temp += template[i].atap[k]; 667 } 668 669 for (k = 0, temp = 0; k < template[i].np; k++) { 670 if (WARN_ON(offset_in_page(IDX[k]) + 671 template[i].tap[k] > PAGE_SIZE)) 672 goto out; 673 674 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 675 memcpy(q, template[i].input + temp, template[i].tap[k]); 676 sg_set_buf(&sg[template[i].anp + k], 677 q, template[i].tap[k]); 678 679 if (diff_dst) { 680 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 681 offset_in_page(IDX[k]); 682 683 memset(q, 0, template[i].tap[k]); 684 685 sg_set_buf(&sgout[template[i].anp + k], 686 q, template[i].tap[k]); 687 } 688 689 n = template[i].tap[k]; 690 if (k == template[i].np - 1 && enc) 691 n += authsize; 692 if (offset_in_page(q) + n < PAGE_SIZE) 693 q[n] = 0; 694 695 temp += template[i].tap[k]; 696 } 697 698 ret = crypto_aead_setauthsize(tfm, authsize); 699 if (ret) { 700 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", 701 d, authsize, j, algo); 702 goto out; 703 } 704 705 if (enc) { 706 if (WARN_ON(sg[template[i].anp + k - 1].offset + 707 sg[template[i].anp + k - 1].length + 708 authsize > PAGE_SIZE)) { 709 ret = -EINVAL; 710 goto out; 711 } 712 713 if (diff_dst) 714 sgout[template[i].anp + k - 1].length += 715 authsize; 716 sg[template[i].anp + k - 1].length += authsize; 717 } 718 719 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 720 template[i].ilen, 721 iv); 722 723 aead_request_set_ad(req, template[i].alen); 724 725 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 726 727 switch (ret) { 728 case 0: 729 if (template[i].novrfy) { 730 /* verification was supposed to fail */ 731 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", 732 d, e, j, algo); 733 /* so really, we got a bad message */ 734 ret = -EBADMSG; 735 goto out; 736 } 737 break; 738 case -EINPROGRESS: 739 case -EBUSY: 740 wait_for_completion(&result.completion); 741 reinit_completion(&result.completion); 742 ret = result.err; 743 if (!ret) 744 break; 745 case -EBADMSG: 746 if (template[i].novrfy) 747 /* verification failure was expected */ 748 continue; 749 /* fall through */ 750 default: 751 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", 752 d, e, j, algo, -ret); 753 goto out; 754 } 755 756 ret = -EINVAL; 757 for (k = 0, temp = 0; k < template[i].np; k++) { 758 if (diff_dst) 759 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 760 offset_in_page(IDX[k]); 761 else 762 q = xbuf[IDX[k] >> PAGE_SHIFT] + 763 offset_in_page(IDX[k]); 764 765 n = template[i].tap[k]; 766 if (k == template[i].np - 1) 767 n += enc ? authsize : -authsize; 768 769 if (memcmp(q, template[i].result + temp, n)) { 770 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", 771 d, j, e, k, algo); 772 hexdump(q, n); 773 goto out; 774 } 775 776 q += n; 777 if (k == template[i].np - 1 && !enc) { 778 if (!diff_dst && 779 memcmp(q, template[i].input + 780 temp + n, authsize)) 781 n = authsize; 782 else 783 n = 0; 784 } else { 785 for (n = 0; offset_in_page(q + n) && q[n]; n++) 786 ; 787 } 788 if (n) { 789 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 790 d, j, e, k, algo, n); 791 hexdump(q, n); 792 goto out; 793 } 794 795 temp += template[i].tap[k]; 796 } 797 } 798 799 ret = 0; 800 801 out: 802 aead_request_free(req); 803 kfree(sg); 804 out_nosg: 805 if (diff_dst) 806 testmgr_free_buf(xoutbuf); 807 out_nooutbuf: 808 testmgr_free_buf(axbuf); 809 out_noaxbuf: 810 testmgr_free_buf(xbuf); 811 out_noxbuf: 812 kfree(key); 813 kfree(iv); 814 return ret; 815 } 816 817 static int test_aead(struct crypto_aead *tfm, int enc, 818 struct aead_testvec *template, unsigned int tcount) 819 { 820 unsigned int alignmask; 821 int ret; 822 823 /* test 'dst == src' case */ 824 ret = __test_aead(tfm, enc, template, tcount, false, 0); 825 if (ret) 826 return ret; 827 828 /* test 'dst != src' case */ 829 ret = __test_aead(tfm, enc, template, tcount, true, 0); 830 if (ret) 831 return ret; 832 833 /* test unaligned buffers, check with one byte offset */ 834 ret = __test_aead(tfm, enc, template, tcount, true, 1); 835 if (ret) 836 return ret; 837 838 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 839 if (alignmask) { 840 /* Check if alignment mask for tfm is correctly set. */ 841 ret = __test_aead(tfm, enc, template, tcount, true, 842 alignmask + 1); 843 if (ret) 844 return ret; 845 } 846 847 return 0; 848 } 849 850 static int test_cipher(struct crypto_cipher *tfm, int enc, 851 struct cipher_testvec *template, unsigned int tcount) 852 { 853 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 854 unsigned int i, j, k; 855 char *q; 856 const char *e; 857 void *data; 858 char *xbuf[XBUFSIZE]; 859 int ret = -ENOMEM; 860 861 if (testmgr_alloc_buf(xbuf)) 862 goto out_nobuf; 863 864 if (enc == ENCRYPT) 865 e = "encryption"; 866 else 867 e = "decryption"; 868 869 j = 0; 870 for (i = 0; i < tcount; i++) { 871 if (template[i].np) 872 continue; 873 874 j++; 875 876 ret = -EINVAL; 877 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 878 goto out; 879 880 data = xbuf[0]; 881 memcpy(data, template[i].input, template[i].ilen); 882 883 crypto_cipher_clear_flags(tfm, ~0); 884 if (template[i].wk) 885 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 886 887 ret = crypto_cipher_setkey(tfm, template[i].key, 888 template[i].klen); 889 if (!ret == template[i].fail) { 890 printk(KERN_ERR "alg: cipher: setkey failed " 891 "on test %d for %s: flags=%x\n", j, 892 algo, crypto_cipher_get_flags(tfm)); 893 goto out; 894 } else if (ret) 895 continue; 896 897 for (k = 0; k < template[i].ilen; 898 k += crypto_cipher_blocksize(tfm)) { 899 if (enc) 900 crypto_cipher_encrypt_one(tfm, data + k, 901 data + k); 902 else 903 crypto_cipher_decrypt_one(tfm, data + k, 904 data + k); 905 } 906 907 q = data; 908 if (memcmp(q, template[i].result, template[i].rlen)) { 909 printk(KERN_ERR "alg: cipher: Test %d failed " 910 "on %s for %s\n", j, e, algo); 911 hexdump(q, template[i].rlen); 912 ret = -EINVAL; 913 goto out; 914 } 915 } 916 917 ret = 0; 918 919 out: 920 testmgr_free_buf(xbuf); 921 out_nobuf: 922 return ret; 923 } 924 925 static int __test_skcipher(struct crypto_skcipher *tfm, int enc, 926 struct cipher_testvec *template, unsigned int tcount, 927 const bool diff_dst, const int align_offset) 928 { 929 const char *algo = 930 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)); 931 unsigned int i, j, k, n, temp; 932 char *q; 933 struct skcipher_request *req; 934 struct scatterlist sg[8]; 935 struct scatterlist sgout[8]; 936 const char *e, *d; 937 struct tcrypt_result result; 938 void *data; 939 char iv[MAX_IVLEN]; 940 char *xbuf[XBUFSIZE]; 941 char *xoutbuf[XBUFSIZE]; 942 int ret = -ENOMEM; 943 unsigned int ivsize = crypto_skcipher_ivsize(tfm); 944 945 if (testmgr_alloc_buf(xbuf)) 946 goto out_nobuf; 947 948 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 949 goto out_nooutbuf; 950 951 if (diff_dst) 952 d = "-ddst"; 953 else 954 d = ""; 955 956 if (enc == ENCRYPT) 957 e = "encryption"; 958 else 959 e = "decryption"; 960 961 init_completion(&result.completion); 962 963 req = skcipher_request_alloc(tfm, GFP_KERNEL); 964 if (!req) { 965 pr_err("alg: skcipher%s: Failed to allocate request for %s\n", 966 d, algo); 967 goto out; 968 } 969 970 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 971 tcrypt_complete, &result); 972 973 j = 0; 974 for (i = 0; i < tcount; i++) { 975 if (template[i].np && !template[i].also_non_np) 976 continue; 977 978 if (template[i].iv) 979 memcpy(iv, template[i].iv, ivsize); 980 else 981 memset(iv, 0, MAX_IVLEN); 982 983 j++; 984 ret = -EINVAL; 985 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE)) 986 goto out; 987 988 data = xbuf[0]; 989 data += align_offset; 990 memcpy(data, template[i].input, template[i].ilen); 991 992 crypto_skcipher_clear_flags(tfm, ~0); 993 if (template[i].wk) 994 crypto_skcipher_set_flags(tfm, 995 CRYPTO_TFM_REQ_WEAK_KEY); 996 997 ret = crypto_skcipher_setkey(tfm, template[i].key, 998 template[i].klen); 999 if (!ret == template[i].fail) { 1000 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n", 1001 d, j, algo, crypto_skcipher_get_flags(tfm)); 1002 goto out; 1003 } else if (ret) 1004 continue; 1005 1006 sg_init_one(&sg[0], data, template[i].ilen); 1007 if (diff_dst) { 1008 data = xoutbuf[0]; 1009 data += align_offset; 1010 sg_init_one(&sgout[0], data, template[i].ilen); 1011 } 1012 1013 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1014 template[i].ilen, iv); 1015 ret = enc ? crypto_skcipher_encrypt(req) : 1016 crypto_skcipher_decrypt(req); 1017 1018 switch (ret) { 1019 case 0: 1020 break; 1021 case -EINPROGRESS: 1022 case -EBUSY: 1023 wait_for_completion(&result.completion); 1024 reinit_completion(&result.completion); 1025 ret = result.err; 1026 if (!ret) 1027 break; 1028 /* fall through */ 1029 default: 1030 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", 1031 d, e, j, algo, -ret); 1032 goto out; 1033 } 1034 1035 q = data; 1036 if (memcmp(q, template[i].result, template[i].rlen)) { 1037 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n", 1038 d, j, e, algo); 1039 hexdump(q, template[i].rlen); 1040 ret = -EINVAL; 1041 goto out; 1042 } 1043 } 1044 1045 j = 0; 1046 for (i = 0; i < tcount; i++) { 1047 /* alignment tests are only done with continuous buffers */ 1048 if (align_offset != 0) 1049 break; 1050 1051 if (!template[i].np) 1052 continue; 1053 1054 if (template[i].iv) 1055 memcpy(iv, template[i].iv, ivsize); 1056 else 1057 memset(iv, 0, MAX_IVLEN); 1058 1059 j++; 1060 crypto_skcipher_clear_flags(tfm, ~0); 1061 if (template[i].wk) 1062 crypto_skcipher_set_flags(tfm, 1063 CRYPTO_TFM_REQ_WEAK_KEY); 1064 1065 ret = crypto_skcipher_setkey(tfm, template[i].key, 1066 template[i].klen); 1067 if (!ret == template[i].fail) { 1068 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n", 1069 d, j, algo, crypto_skcipher_get_flags(tfm)); 1070 goto out; 1071 } else if (ret) 1072 continue; 1073 1074 temp = 0; 1075 ret = -EINVAL; 1076 sg_init_table(sg, template[i].np); 1077 if (diff_dst) 1078 sg_init_table(sgout, template[i].np); 1079 for (k = 0; k < template[i].np; k++) { 1080 if (WARN_ON(offset_in_page(IDX[k]) + 1081 template[i].tap[k] > PAGE_SIZE)) 1082 goto out; 1083 1084 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 1085 1086 memcpy(q, template[i].input + temp, template[i].tap[k]); 1087 1088 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE) 1089 q[template[i].tap[k]] = 0; 1090 1091 sg_set_buf(&sg[k], q, template[i].tap[k]); 1092 if (diff_dst) { 1093 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1094 offset_in_page(IDX[k]); 1095 1096 sg_set_buf(&sgout[k], q, template[i].tap[k]); 1097 1098 memset(q, 0, template[i].tap[k]); 1099 if (offset_in_page(q) + 1100 template[i].tap[k] < PAGE_SIZE) 1101 q[template[i].tap[k]] = 0; 1102 } 1103 1104 temp += template[i].tap[k]; 1105 } 1106 1107 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1108 template[i].ilen, iv); 1109 1110 ret = enc ? crypto_skcipher_encrypt(req) : 1111 crypto_skcipher_decrypt(req); 1112 1113 switch (ret) { 1114 case 0: 1115 break; 1116 case -EINPROGRESS: 1117 case -EBUSY: 1118 wait_for_completion(&result.completion); 1119 reinit_completion(&result.completion); 1120 ret = result.err; 1121 if (!ret) 1122 break; 1123 /* fall through */ 1124 default: 1125 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", 1126 d, e, j, algo, -ret); 1127 goto out; 1128 } 1129 1130 temp = 0; 1131 ret = -EINVAL; 1132 for (k = 0; k < template[i].np; k++) { 1133 if (diff_dst) 1134 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1135 offset_in_page(IDX[k]); 1136 else 1137 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1138 offset_in_page(IDX[k]); 1139 1140 if (memcmp(q, template[i].result + temp, 1141 template[i].tap[k])) { 1142 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", 1143 d, j, e, k, algo); 1144 hexdump(q, template[i].tap[k]); 1145 goto out; 1146 } 1147 1148 q += template[i].tap[k]; 1149 for (n = 0; offset_in_page(q + n) && q[n]; n++) 1150 ; 1151 if (n) { 1152 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 1153 d, j, e, k, algo, n); 1154 hexdump(q, n); 1155 goto out; 1156 } 1157 temp += template[i].tap[k]; 1158 } 1159 } 1160 1161 ret = 0; 1162 1163 out: 1164 skcipher_request_free(req); 1165 if (diff_dst) 1166 testmgr_free_buf(xoutbuf); 1167 out_nooutbuf: 1168 testmgr_free_buf(xbuf); 1169 out_nobuf: 1170 return ret; 1171 } 1172 1173 static int test_skcipher(struct crypto_skcipher *tfm, int enc, 1174 struct cipher_testvec *template, unsigned int tcount) 1175 { 1176 unsigned int alignmask; 1177 int ret; 1178 1179 /* test 'dst == src' case */ 1180 ret = __test_skcipher(tfm, enc, template, tcount, false, 0); 1181 if (ret) 1182 return ret; 1183 1184 /* test 'dst != src' case */ 1185 ret = __test_skcipher(tfm, enc, template, tcount, true, 0); 1186 if (ret) 1187 return ret; 1188 1189 /* test unaligned buffers, check with one byte offset */ 1190 ret = __test_skcipher(tfm, enc, template, tcount, true, 1); 1191 if (ret) 1192 return ret; 1193 1194 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1195 if (alignmask) { 1196 /* Check if alignment mask for tfm is correctly set. */ 1197 ret = __test_skcipher(tfm, enc, template, tcount, true, 1198 alignmask + 1); 1199 if (ret) 1200 return ret; 1201 } 1202 1203 return 0; 1204 } 1205 1206 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, 1207 struct comp_testvec *dtemplate, int ctcount, int dtcount) 1208 { 1209 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 1210 unsigned int i; 1211 char result[COMP_BUF_SIZE]; 1212 int ret; 1213 1214 for (i = 0; i < ctcount; i++) { 1215 int ilen; 1216 unsigned int dlen = COMP_BUF_SIZE; 1217 1218 memset(result, 0, sizeof (result)); 1219 1220 ilen = ctemplate[i].inlen; 1221 ret = crypto_comp_compress(tfm, ctemplate[i].input, 1222 ilen, result, &dlen); 1223 if (ret) { 1224 printk(KERN_ERR "alg: comp: compression failed " 1225 "on test %d for %s: ret=%d\n", i + 1, algo, 1226 -ret); 1227 goto out; 1228 } 1229 1230 if (dlen != ctemplate[i].outlen) { 1231 printk(KERN_ERR "alg: comp: Compression test %d " 1232 "failed for %s: output len = %d\n", i + 1, algo, 1233 dlen); 1234 ret = -EINVAL; 1235 goto out; 1236 } 1237 1238 if (memcmp(result, ctemplate[i].output, dlen)) { 1239 printk(KERN_ERR "alg: comp: Compression test %d " 1240 "failed for %s\n", i + 1, algo); 1241 hexdump(result, dlen); 1242 ret = -EINVAL; 1243 goto out; 1244 } 1245 } 1246 1247 for (i = 0; i < dtcount; i++) { 1248 int ilen; 1249 unsigned int dlen = COMP_BUF_SIZE; 1250 1251 memset(result, 0, sizeof (result)); 1252 1253 ilen = dtemplate[i].inlen; 1254 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 1255 ilen, result, &dlen); 1256 if (ret) { 1257 printk(KERN_ERR "alg: comp: decompression failed " 1258 "on test %d for %s: ret=%d\n", i + 1, algo, 1259 -ret); 1260 goto out; 1261 } 1262 1263 if (dlen != dtemplate[i].outlen) { 1264 printk(KERN_ERR "alg: comp: Decompression test %d " 1265 "failed for %s: output len = %d\n", i + 1, algo, 1266 dlen); 1267 ret = -EINVAL; 1268 goto out; 1269 } 1270 1271 if (memcmp(result, dtemplate[i].output, dlen)) { 1272 printk(KERN_ERR "alg: comp: Decompression test %d " 1273 "failed for %s\n", i + 1, algo); 1274 hexdump(result, dlen); 1275 ret = -EINVAL; 1276 goto out; 1277 } 1278 } 1279 1280 ret = 0; 1281 1282 out: 1283 return ret; 1284 } 1285 1286 static int test_pcomp(struct crypto_pcomp *tfm, 1287 struct pcomp_testvec *ctemplate, 1288 struct pcomp_testvec *dtemplate, int ctcount, 1289 int dtcount) 1290 { 1291 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm)); 1292 unsigned int i; 1293 char result[COMP_BUF_SIZE]; 1294 int res; 1295 1296 for (i = 0; i < ctcount; i++) { 1297 struct comp_request req; 1298 unsigned int produced = 0; 1299 1300 res = crypto_compress_setup(tfm, ctemplate[i].params, 1301 ctemplate[i].paramsize); 1302 if (res) { 1303 pr_err("alg: pcomp: compression setup failed on test " 1304 "%d for %s: error=%d\n", i + 1, algo, res); 1305 return res; 1306 } 1307 1308 res = crypto_compress_init(tfm); 1309 if (res) { 1310 pr_err("alg: pcomp: compression init failed on test " 1311 "%d for %s: error=%d\n", i + 1, algo, res); 1312 return res; 1313 } 1314 1315 memset(result, 0, sizeof(result)); 1316 1317 req.next_in = ctemplate[i].input; 1318 req.avail_in = ctemplate[i].inlen / 2; 1319 req.next_out = result; 1320 req.avail_out = ctemplate[i].outlen / 2; 1321 1322 res = crypto_compress_update(tfm, &req); 1323 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1324 pr_err("alg: pcomp: compression update failed on test " 1325 "%d for %s: error=%d\n", i + 1, algo, res); 1326 return res; 1327 } 1328 if (res > 0) 1329 produced += res; 1330 1331 /* Add remaining input data */ 1332 req.avail_in += (ctemplate[i].inlen + 1) / 2; 1333 1334 res = crypto_compress_update(tfm, &req); 1335 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1336 pr_err("alg: pcomp: compression update failed on test " 1337 "%d for %s: error=%d\n", i + 1, algo, res); 1338 return res; 1339 } 1340 if (res > 0) 1341 produced += res; 1342 1343 /* Provide remaining output space */ 1344 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2; 1345 1346 res = crypto_compress_final(tfm, &req); 1347 if (res < 0) { 1348 pr_err("alg: pcomp: compression final failed on test " 1349 "%d for %s: error=%d\n", i + 1, algo, res); 1350 return res; 1351 } 1352 produced += res; 1353 1354 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) { 1355 pr_err("alg: comp: Compression test %d failed for %s: " 1356 "output len = %d (expected %d)\n", i + 1, algo, 1357 COMP_BUF_SIZE - req.avail_out, 1358 ctemplate[i].outlen); 1359 return -EINVAL; 1360 } 1361 1362 if (produced != ctemplate[i].outlen) { 1363 pr_err("alg: comp: Compression test %d failed for %s: " 1364 "returned len = %u (expected %d)\n", i + 1, 1365 algo, produced, ctemplate[i].outlen); 1366 return -EINVAL; 1367 } 1368 1369 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) { 1370 pr_err("alg: pcomp: Compression test %d failed for " 1371 "%s\n", i + 1, algo); 1372 hexdump(result, ctemplate[i].outlen); 1373 return -EINVAL; 1374 } 1375 } 1376 1377 for (i = 0; i < dtcount; i++) { 1378 struct comp_request req; 1379 unsigned int produced = 0; 1380 1381 res = crypto_decompress_setup(tfm, dtemplate[i].params, 1382 dtemplate[i].paramsize); 1383 if (res) { 1384 pr_err("alg: pcomp: decompression setup failed on " 1385 "test %d for %s: error=%d\n", i + 1, algo, res); 1386 return res; 1387 } 1388 1389 res = crypto_decompress_init(tfm); 1390 if (res) { 1391 pr_err("alg: pcomp: decompression init failed on test " 1392 "%d for %s: error=%d\n", i + 1, algo, res); 1393 return res; 1394 } 1395 1396 memset(result, 0, sizeof(result)); 1397 1398 req.next_in = dtemplate[i].input; 1399 req.avail_in = dtemplate[i].inlen / 2; 1400 req.next_out = result; 1401 req.avail_out = dtemplate[i].outlen / 2; 1402 1403 res = crypto_decompress_update(tfm, &req); 1404 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1405 pr_err("alg: pcomp: decompression update failed on " 1406 "test %d for %s: error=%d\n", i + 1, algo, res); 1407 return res; 1408 } 1409 if (res > 0) 1410 produced += res; 1411 1412 /* Add remaining input data */ 1413 req.avail_in += (dtemplate[i].inlen + 1) / 2; 1414 1415 res = crypto_decompress_update(tfm, &req); 1416 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1417 pr_err("alg: pcomp: decompression update failed on " 1418 "test %d for %s: error=%d\n", i + 1, algo, res); 1419 return res; 1420 } 1421 if (res > 0) 1422 produced += res; 1423 1424 /* Provide remaining output space */ 1425 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2; 1426 1427 res = crypto_decompress_final(tfm, &req); 1428 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1429 pr_err("alg: pcomp: decompression final failed on " 1430 "test %d for %s: error=%d\n", i + 1, algo, res); 1431 return res; 1432 } 1433 if (res > 0) 1434 produced += res; 1435 1436 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) { 1437 pr_err("alg: comp: Decompression test %d failed for " 1438 "%s: output len = %d (expected %d)\n", i + 1, 1439 algo, COMP_BUF_SIZE - req.avail_out, 1440 dtemplate[i].outlen); 1441 return -EINVAL; 1442 } 1443 1444 if (produced != dtemplate[i].outlen) { 1445 pr_err("alg: comp: Decompression test %d failed for " 1446 "%s: returned len = %u (expected %d)\n", i + 1, 1447 algo, produced, dtemplate[i].outlen); 1448 return -EINVAL; 1449 } 1450 1451 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) { 1452 pr_err("alg: pcomp: Decompression test %d failed for " 1453 "%s\n", i + 1, algo); 1454 hexdump(result, dtemplate[i].outlen); 1455 return -EINVAL; 1456 } 1457 } 1458 1459 return 0; 1460 } 1461 1462 1463 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template, 1464 unsigned int tcount) 1465 { 1466 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 1467 int err = 0, i, j, seedsize; 1468 u8 *seed; 1469 char result[32]; 1470 1471 seedsize = crypto_rng_seedsize(tfm); 1472 1473 seed = kmalloc(seedsize, GFP_KERNEL); 1474 if (!seed) { 1475 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 1476 "for %s\n", algo); 1477 return -ENOMEM; 1478 } 1479 1480 for (i = 0; i < tcount; i++) { 1481 memset(result, 0, 32); 1482 1483 memcpy(seed, template[i].v, template[i].vlen); 1484 memcpy(seed + template[i].vlen, template[i].key, 1485 template[i].klen); 1486 memcpy(seed + template[i].vlen + template[i].klen, 1487 template[i].dt, template[i].dtlen); 1488 1489 err = crypto_rng_reset(tfm, seed, seedsize); 1490 if (err) { 1491 printk(KERN_ERR "alg: cprng: Failed to reset rng " 1492 "for %s\n", algo); 1493 goto out; 1494 } 1495 1496 for (j = 0; j < template[i].loops; j++) { 1497 err = crypto_rng_get_bytes(tfm, result, 1498 template[i].rlen); 1499 if (err < 0) { 1500 printk(KERN_ERR "alg: cprng: Failed to obtain " 1501 "the correct amount of random data for " 1502 "%s (requested %d)\n", algo, 1503 template[i].rlen); 1504 goto out; 1505 } 1506 } 1507 1508 err = memcmp(result, template[i].result, 1509 template[i].rlen); 1510 if (err) { 1511 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 1512 i, algo); 1513 hexdump(result, template[i].rlen); 1514 err = -EINVAL; 1515 goto out; 1516 } 1517 } 1518 1519 out: 1520 kfree(seed); 1521 return err; 1522 } 1523 1524 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1525 u32 type, u32 mask) 1526 { 1527 struct crypto_aead *tfm; 1528 int err = 0; 1529 1530 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask); 1531 if (IS_ERR(tfm)) { 1532 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1533 "%ld\n", driver, PTR_ERR(tfm)); 1534 return PTR_ERR(tfm); 1535 } 1536 1537 if (desc->suite.aead.enc.vecs) { 1538 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1539 desc->suite.aead.enc.count); 1540 if (err) 1541 goto out; 1542 } 1543 1544 if (!err && desc->suite.aead.dec.vecs) 1545 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1546 desc->suite.aead.dec.count); 1547 1548 out: 1549 crypto_free_aead(tfm); 1550 return err; 1551 } 1552 1553 static int alg_test_cipher(const struct alg_test_desc *desc, 1554 const char *driver, u32 type, u32 mask) 1555 { 1556 struct crypto_cipher *tfm; 1557 int err = 0; 1558 1559 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask); 1560 if (IS_ERR(tfm)) { 1561 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1562 "%s: %ld\n", driver, PTR_ERR(tfm)); 1563 return PTR_ERR(tfm); 1564 } 1565 1566 if (desc->suite.cipher.enc.vecs) { 1567 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1568 desc->suite.cipher.enc.count); 1569 if (err) 1570 goto out; 1571 } 1572 1573 if (desc->suite.cipher.dec.vecs) 1574 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1575 desc->suite.cipher.dec.count); 1576 1577 out: 1578 crypto_free_cipher(tfm); 1579 return err; 1580 } 1581 1582 static int alg_test_skcipher(const struct alg_test_desc *desc, 1583 const char *driver, u32 type, u32 mask) 1584 { 1585 struct crypto_skcipher *tfm; 1586 int err = 0; 1587 1588 tfm = crypto_alloc_skcipher(driver, type | CRYPTO_ALG_INTERNAL, mask); 1589 if (IS_ERR(tfm)) { 1590 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1591 "%s: %ld\n", driver, PTR_ERR(tfm)); 1592 return PTR_ERR(tfm); 1593 } 1594 1595 if (desc->suite.cipher.enc.vecs) { 1596 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1597 desc->suite.cipher.enc.count); 1598 if (err) 1599 goto out; 1600 } 1601 1602 if (desc->suite.cipher.dec.vecs) 1603 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1604 desc->suite.cipher.dec.count); 1605 1606 out: 1607 crypto_free_skcipher(tfm); 1608 return err; 1609 } 1610 1611 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1612 u32 type, u32 mask) 1613 { 1614 struct crypto_comp *tfm; 1615 int err; 1616 1617 tfm = crypto_alloc_comp(driver, type, mask); 1618 if (IS_ERR(tfm)) { 1619 printk(KERN_ERR "alg: comp: Failed to load transform for %s: " 1620 "%ld\n", driver, PTR_ERR(tfm)); 1621 return PTR_ERR(tfm); 1622 } 1623 1624 err = test_comp(tfm, desc->suite.comp.comp.vecs, 1625 desc->suite.comp.decomp.vecs, 1626 desc->suite.comp.comp.count, 1627 desc->suite.comp.decomp.count); 1628 1629 crypto_free_comp(tfm); 1630 return err; 1631 } 1632 1633 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver, 1634 u32 type, u32 mask) 1635 { 1636 struct crypto_pcomp *tfm; 1637 int err; 1638 1639 tfm = crypto_alloc_pcomp(driver, type, mask); 1640 if (IS_ERR(tfm)) { 1641 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n", 1642 driver, PTR_ERR(tfm)); 1643 return PTR_ERR(tfm); 1644 } 1645 1646 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs, 1647 desc->suite.pcomp.decomp.vecs, 1648 desc->suite.pcomp.comp.count, 1649 desc->suite.pcomp.decomp.count); 1650 1651 crypto_free_pcomp(tfm); 1652 return err; 1653 } 1654 1655 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1656 u32 type, u32 mask) 1657 { 1658 struct crypto_ahash *tfm; 1659 int err; 1660 1661 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask); 1662 if (IS_ERR(tfm)) { 1663 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1664 "%ld\n", driver, PTR_ERR(tfm)); 1665 return PTR_ERR(tfm); 1666 } 1667 1668 err = test_hash(tfm, desc->suite.hash.vecs, 1669 desc->suite.hash.count, true); 1670 if (!err) 1671 err = test_hash(tfm, desc->suite.hash.vecs, 1672 desc->suite.hash.count, false); 1673 1674 crypto_free_ahash(tfm); 1675 return err; 1676 } 1677 1678 static int alg_test_crc32c(const struct alg_test_desc *desc, 1679 const char *driver, u32 type, u32 mask) 1680 { 1681 struct crypto_shash *tfm; 1682 u32 val; 1683 int err; 1684 1685 err = alg_test_hash(desc, driver, type, mask); 1686 if (err) 1687 goto out; 1688 1689 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask); 1690 if (IS_ERR(tfm)) { 1691 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1692 "%ld\n", driver, PTR_ERR(tfm)); 1693 err = PTR_ERR(tfm); 1694 goto out; 1695 } 1696 1697 do { 1698 SHASH_DESC_ON_STACK(shash, tfm); 1699 u32 *ctx = (u32 *)shash_desc_ctx(shash); 1700 1701 shash->tfm = tfm; 1702 shash->flags = 0; 1703 1704 *ctx = le32_to_cpu(420553207); 1705 err = crypto_shash_final(shash, (u8 *)&val); 1706 if (err) { 1707 printk(KERN_ERR "alg: crc32c: Operation failed for " 1708 "%s: %d\n", driver, err); 1709 break; 1710 } 1711 1712 if (val != ~420553207) { 1713 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1714 "%d\n", driver, val); 1715 err = -EINVAL; 1716 } 1717 } while (0); 1718 1719 crypto_free_shash(tfm); 1720 1721 out: 1722 return err; 1723 } 1724 1725 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 1726 u32 type, u32 mask) 1727 { 1728 struct crypto_rng *rng; 1729 int err; 1730 1731 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask); 1732 if (IS_ERR(rng)) { 1733 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 1734 "%ld\n", driver, PTR_ERR(rng)); 1735 return PTR_ERR(rng); 1736 } 1737 1738 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 1739 1740 crypto_free_rng(rng); 1741 1742 return err; 1743 } 1744 1745 1746 static int drbg_cavs_test(struct drbg_testvec *test, int pr, 1747 const char *driver, u32 type, u32 mask) 1748 { 1749 int ret = -EAGAIN; 1750 struct crypto_rng *drng; 1751 struct drbg_test_data test_data; 1752 struct drbg_string addtl, pers, testentropy; 1753 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL); 1754 1755 if (!buf) 1756 return -ENOMEM; 1757 1758 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask); 1759 if (IS_ERR(drng)) { 1760 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for " 1761 "%s\n", driver); 1762 kzfree(buf); 1763 return -ENOMEM; 1764 } 1765 1766 test_data.testentropy = &testentropy; 1767 drbg_string_fill(&testentropy, test->entropy, test->entropylen); 1768 drbg_string_fill(&pers, test->pers, test->perslen); 1769 ret = crypto_drbg_reset_test(drng, &pers, &test_data); 1770 if (ret) { 1771 printk(KERN_ERR "alg: drbg: Failed to reset rng\n"); 1772 goto outbuf; 1773 } 1774 1775 drbg_string_fill(&addtl, test->addtla, test->addtllen); 1776 if (pr) { 1777 drbg_string_fill(&testentropy, test->entpra, test->entprlen); 1778 ret = crypto_drbg_get_bytes_addtl_test(drng, 1779 buf, test->expectedlen, &addtl, &test_data); 1780 } else { 1781 ret = crypto_drbg_get_bytes_addtl(drng, 1782 buf, test->expectedlen, &addtl); 1783 } 1784 if (ret < 0) { 1785 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1786 "driver %s\n", driver); 1787 goto outbuf; 1788 } 1789 1790 drbg_string_fill(&addtl, test->addtlb, test->addtllen); 1791 if (pr) { 1792 drbg_string_fill(&testentropy, test->entprb, test->entprlen); 1793 ret = crypto_drbg_get_bytes_addtl_test(drng, 1794 buf, test->expectedlen, &addtl, &test_data); 1795 } else { 1796 ret = crypto_drbg_get_bytes_addtl(drng, 1797 buf, test->expectedlen, &addtl); 1798 } 1799 if (ret < 0) { 1800 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1801 "driver %s\n", driver); 1802 goto outbuf; 1803 } 1804 1805 ret = memcmp(test->expected, buf, test->expectedlen); 1806 1807 outbuf: 1808 crypto_free_rng(drng); 1809 kzfree(buf); 1810 return ret; 1811 } 1812 1813 1814 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver, 1815 u32 type, u32 mask) 1816 { 1817 int err = 0; 1818 int pr = 0; 1819 int i = 0; 1820 struct drbg_testvec *template = desc->suite.drbg.vecs; 1821 unsigned int tcount = desc->suite.drbg.count; 1822 1823 if (0 == memcmp(driver, "drbg_pr_", 8)) 1824 pr = 1; 1825 1826 for (i = 0; i < tcount; i++) { 1827 err = drbg_cavs_test(&template[i], pr, driver, type, mask); 1828 if (err) { 1829 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n", 1830 i, driver); 1831 err = -EINVAL; 1832 break; 1833 } 1834 } 1835 return err; 1836 1837 } 1838 1839 static int do_test_rsa(struct crypto_akcipher *tfm, 1840 struct akcipher_testvec *vecs) 1841 { 1842 struct akcipher_request *req; 1843 void *outbuf_enc = NULL; 1844 void *outbuf_dec = NULL; 1845 struct tcrypt_result result; 1846 unsigned int out_len_max, out_len = 0; 1847 int err = -ENOMEM; 1848 1849 req = akcipher_request_alloc(tfm, GFP_KERNEL); 1850 if (!req) 1851 return err; 1852 1853 init_completion(&result.completion); 1854 err = crypto_akcipher_setkey(tfm, vecs->key, vecs->key_len); 1855 if (err) 1856 goto free_req; 1857 1858 akcipher_request_set_crypt(req, vecs->m, outbuf_enc, vecs->m_size, 1859 out_len); 1860 /* expect this to fail, and update the required buf len */ 1861 crypto_akcipher_encrypt(req); 1862 out_len = req->dst_len; 1863 if (!out_len) { 1864 err = -EINVAL; 1865 goto free_req; 1866 } 1867 1868 out_len_max = out_len; 1869 err = -ENOMEM; 1870 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL); 1871 if (!outbuf_enc) 1872 goto free_req; 1873 1874 akcipher_request_set_crypt(req, vecs->m, outbuf_enc, vecs->m_size, 1875 out_len); 1876 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1877 tcrypt_complete, &result); 1878 1879 /* Run RSA encrypt - c = m^e mod n;*/ 1880 err = wait_async_op(&result, crypto_akcipher_encrypt(req)); 1881 if (err) { 1882 pr_err("alg: rsa: encrypt test failed. err %d\n", err); 1883 goto free_all; 1884 } 1885 if (out_len != vecs->c_size) { 1886 pr_err("alg: rsa: encrypt test failed. Invalid output len\n"); 1887 err = -EINVAL; 1888 goto free_all; 1889 } 1890 /* verify that encrypted message is equal to expected */ 1891 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) { 1892 pr_err("alg: rsa: encrypt test failed. Invalid output\n"); 1893 err = -EINVAL; 1894 goto free_all; 1895 } 1896 /* Don't invoke decrypt for vectors with public key */ 1897 if (vecs->public_key_vec) { 1898 err = 0; 1899 goto free_all; 1900 } 1901 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL); 1902 if (!outbuf_dec) { 1903 err = -ENOMEM; 1904 goto free_all; 1905 } 1906 init_completion(&result.completion); 1907 akcipher_request_set_crypt(req, outbuf_enc, outbuf_dec, vecs->c_size, 1908 out_len); 1909 1910 /* Run RSA decrypt - m = c^d mod n;*/ 1911 err = wait_async_op(&result, crypto_akcipher_decrypt(req)); 1912 if (err) { 1913 pr_err("alg: rsa: decrypt test failed. err %d\n", err); 1914 goto free_all; 1915 } 1916 out_len = req->dst_len; 1917 if (out_len != vecs->m_size) { 1918 pr_err("alg: rsa: decrypt test failed. Invalid output len\n"); 1919 err = -EINVAL; 1920 goto free_all; 1921 } 1922 /* verify that decrypted message is equal to the original msg */ 1923 if (memcmp(vecs->m, outbuf_dec, vecs->m_size)) { 1924 pr_err("alg: rsa: decrypt test failed. Invalid output\n"); 1925 err = -EINVAL; 1926 } 1927 free_all: 1928 kfree(outbuf_dec); 1929 kfree(outbuf_enc); 1930 free_req: 1931 akcipher_request_free(req); 1932 return err; 1933 } 1934 1935 static int test_rsa(struct crypto_akcipher *tfm, struct akcipher_testvec *vecs, 1936 unsigned int tcount) 1937 { 1938 int ret, i; 1939 1940 for (i = 0; i < tcount; i++) { 1941 ret = do_test_rsa(tfm, vecs++); 1942 if (ret) { 1943 pr_err("alg: rsa: test failed on vector %d, err=%d\n", 1944 i + 1, ret); 1945 return ret; 1946 } 1947 } 1948 return 0; 1949 } 1950 1951 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg, 1952 struct akcipher_testvec *vecs, unsigned int tcount) 1953 { 1954 if (strncmp(alg, "rsa", 3) == 0) 1955 return test_rsa(tfm, vecs, tcount); 1956 1957 return 0; 1958 } 1959 1960 static int alg_test_akcipher(const struct alg_test_desc *desc, 1961 const char *driver, u32 type, u32 mask) 1962 { 1963 struct crypto_akcipher *tfm; 1964 int err = 0; 1965 1966 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask); 1967 if (IS_ERR(tfm)) { 1968 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n", 1969 driver, PTR_ERR(tfm)); 1970 return PTR_ERR(tfm); 1971 } 1972 if (desc->suite.akcipher.vecs) 1973 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs, 1974 desc->suite.akcipher.count); 1975 1976 crypto_free_akcipher(tfm); 1977 return err; 1978 } 1979 1980 static int alg_test_null(const struct alg_test_desc *desc, 1981 const char *driver, u32 type, u32 mask) 1982 { 1983 return 0; 1984 } 1985 1986 /* Please keep this list sorted by algorithm name. */ 1987 static const struct alg_test_desc alg_test_descs[] = { 1988 { 1989 .alg = "__cbc-cast5-avx", 1990 .test = alg_test_null, 1991 }, { 1992 .alg = "__cbc-cast6-avx", 1993 .test = alg_test_null, 1994 }, { 1995 .alg = "__cbc-serpent-avx", 1996 .test = alg_test_null, 1997 }, { 1998 .alg = "__cbc-serpent-avx2", 1999 .test = alg_test_null, 2000 }, { 2001 .alg = "__cbc-serpent-sse2", 2002 .test = alg_test_null, 2003 }, { 2004 .alg = "__cbc-twofish-avx", 2005 .test = alg_test_null, 2006 }, { 2007 .alg = "__driver-cbc-aes-aesni", 2008 .test = alg_test_null, 2009 .fips_allowed = 1, 2010 }, { 2011 .alg = "__driver-cbc-camellia-aesni", 2012 .test = alg_test_null, 2013 }, { 2014 .alg = "__driver-cbc-camellia-aesni-avx2", 2015 .test = alg_test_null, 2016 }, { 2017 .alg = "__driver-cbc-cast5-avx", 2018 .test = alg_test_null, 2019 }, { 2020 .alg = "__driver-cbc-cast6-avx", 2021 .test = alg_test_null, 2022 }, { 2023 .alg = "__driver-cbc-serpent-avx", 2024 .test = alg_test_null, 2025 }, { 2026 .alg = "__driver-cbc-serpent-avx2", 2027 .test = alg_test_null, 2028 }, { 2029 .alg = "__driver-cbc-serpent-sse2", 2030 .test = alg_test_null, 2031 }, { 2032 .alg = "__driver-cbc-twofish-avx", 2033 .test = alg_test_null, 2034 }, { 2035 .alg = "__driver-ecb-aes-aesni", 2036 .test = alg_test_null, 2037 .fips_allowed = 1, 2038 }, { 2039 .alg = "__driver-ecb-camellia-aesni", 2040 .test = alg_test_null, 2041 }, { 2042 .alg = "__driver-ecb-camellia-aesni-avx2", 2043 .test = alg_test_null, 2044 }, { 2045 .alg = "__driver-ecb-cast5-avx", 2046 .test = alg_test_null, 2047 }, { 2048 .alg = "__driver-ecb-cast6-avx", 2049 .test = alg_test_null, 2050 }, { 2051 .alg = "__driver-ecb-serpent-avx", 2052 .test = alg_test_null, 2053 }, { 2054 .alg = "__driver-ecb-serpent-avx2", 2055 .test = alg_test_null, 2056 }, { 2057 .alg = "__driver-ecb-serpent-sse2", 2058 .test = alg_test_null, 2059 }, { 2060 .alg = "__driver-ecb-twofish-avx", 2061 .test = alg_test_null, 2062 }, { 2063 .alg = "__driver-gcm-aes-aesni", 2064 .test = alg_test_null, 2065 .fips_allowed = 1, 2066 }, { 2067 .alg = "__ghash-pclmulqdqni", 2068 .test = alg_test_null, 2069 .fips_allowed = 1, 2070 }, { 2071 .alg = "ansi_cprng", 2072 .test = alg_test_cprng, 2073 .fips_allowed = 1, 2074 .suite = { 2075 .cprng = { 2076 .vecs = ansi_cprng_aes_tv_template, 2077 .count = ANSI_CPRNG_AES_TEST_VECTORS 2078 } 2079 } 2080 }, { 2081 .alg = "authenc(hmac(md5),ecb(cipher_null))", 2082 .test = alg_test_aead, 2083 .fips_allowed = 1, 2084 .suite = { 2085 .aead = { 2086 .enc = { 2087 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template, 2088 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS 2089 }, 2090 .dec = { 2091 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template, 2092 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS 2093 } 2094 } 2095 } 2096 }, { 2097 .alg = "authenc(hmac(sha1),cbc(aes))", 2098 .test = alg_test_aead, 2099 .fips_allowed = 1, 2100 .suite = { 2101 .aead = { 2102 .enc = { 2103 .vecs = 2104 hmac_sha1_aes_cbc_enc_tv_temp, 2105 .count = 2106 HMAC_SHA1_AES_CBC_ENC_TEST_VEC 2107 } 2108 } 2109 } 2110 }, { 2111 .alg = "authenc(hmac(sha1),cbc(des))", 2112 .test = alg_test_aead, 2113 .fips_allowed = 1, 2114 .suite = { 2115 .aead = { 2116 .enc = { 2117 .vecs = 2118 hmac_sha1_des_cbc_enc_tv_temp, 2119 .count = 2120 HMAC_SHA1_DES_CBC_ENC_TEST_VEC 2121 } 2122 } 2123 } 2124 }, { 2125 .alg = "authenc(hmac(sha1),cbc(des3_ede))", 2126 .test = alg_test_aead, 2127 .fips_allowed = 1, 2128 .suite = { 2129 .aead = { 2130 .enc = { 2131 .vecs = 2132 hmac_sha1_des3_ede_cbc_enc_tv_temp, 2133 .count = 2134 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC 2135 } 2136 } 2137 } 2138 }, { 2139 .alg = "authenc(hmac(sha1),ecb(cipher_null))", 2140 .test = alg_test_aead, 2141 .fips_allowed = 1, 2142 .suite = { 2143 .aead = { 2144 .enc = { 2145 .vecs = 2146 hmac_sha1_ecb_cipher_null_enc_tv_temp, 2147 .count = 2148 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC 2149 }, 2150 .dec = { 2151 .vecs = 2152 hmac_sha1_ecb_cipher_null_dec_tv_temp, 2153 .count = 2154 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC 2155 } 2156 } 2157 } 2158 }, { 2159 .alg = "authenc(hmac(sha224),cbc(des))", 2160 .test = alg_test_aead, 2161 .fips_allowed = 1, 2162 .suite = { 2163 .aead = { 2164 .enc = { 2165 .vecs = 2166 hmac_sha224_des_cbc_enc_tv_temp, 2167 .count = 2168 HMAC_SHA224_DES_CBC_ENC_TEST_VEC 2169 } 2170 } 2171 } 2172 }, { 2173 .alg = "authenc(hmac(sha224),cbc(des3_ede))", 2174 .test = alg_test_aead, 2175 .fips_allowed = 1, 2176 .suite = { 2177 .aead = { 2178 .enc = { 2179 .vecs = 2180 hmac_sha224_des3_ede_cbc_enc_tv_temp, 2181 .count = 2182 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC 2183 } 2184 } 2185 } 2186 }, { 2187 .alg = "authenc(hmac(sha256),cbc(aes))", 2188 .test = alg_test_aead, 2189 .fips_allowed = 1, 2190 .suite = { 2191 .aead = { 2192 .enc = { 2193 .vecs = 2194 hmac_sha256_aes_cbc_enc_tv_temp, 2195 .count = 2196 HMAC_SHA256_AES_CBC_ENC_TEST_VEC 2197 } 2198 } 2199 } 2200 }, { 2201 .alg = "authenc(hmac(sha256),cbc(des))", 2202 .test = alg_test_aead, 2203 .fips_allowed = 1, 2204 .suite = { 2205 .aead = { 2206 .enc = { 2207 .vecs = 2208 hmac_sha256_des_cbc_enc_tv_temp, 2209 .count = 2210 HMAC_SHA256_DES_CBC_ENC_TEST_VEC 2211 } 2212 } 2213 } 2214 }, { 2215 .alg = "authenc(hmac(sha256),cbc(des3_ede))", 2216 .test = alg_test_aead, 2217 .fips_allowed = 1, 2218 .suite = { 2219 .aead = { 2220 .enc = { 2221 .vecs = 2222 hmac_sha256_des3_ede_cbc_enc_tv_temp, 2223 .count = 2224 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC 2225 } 2226 } 2227 } 2228 }, { 2229 .alg = "authenc(hmac(sha384),cbc(des))", 2230 .test = alg_test_aead, 2231 .fips_allowed = 1, 2232 .suite = { 2233 .aead = { 2234 .enc = { 2235 .vecs = 2236 hmac_sha384_des_cbc_enc_tv_temp, 2237 .count = 2238 HMAC_SHA384_DES_CBC_ENC_TEST_VEC 2239 } 2240 } 2241 } 2242 }, { 2243 .alg = "authenc(hmac(sha384),cbc(des3_ede))", 2244 .test = alg_test_aead, 2245 .fips_allowed = 1, 2246 .suite = { 2247 .aead = { 2248 .enc = { 2249 .vecs = 2250 hmac_sha384_des3_ede_cbc_enc_tv_temp, 2251 .count = 2252 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC 2253 } 2254 } 2255 } 2256 }, { 2257 .alg = "authenc(hmac(sha512),cbc(aes))", 2258 .test = alg_test_aead, 2259 .fips_allowed = 1, 2260 .suite = { 2261 .aead = { 2262 .enc = { 2263 .vecs = 2264 hmac_sha512_aes_cbc_enc_tv_temp, 2265 .count = 2266 HMAC_SHA512_AES_CBC_ENC_TEST_VEC 2267 } 2268 } 2269 } 2270 }, { 2271 .alg = "authenc(hmac(sha512),cbc(des))", 2272 .test = alg_test_aead, 2273 .fips_allowed = 1, 2274 .suite = { 2275 .aead = { 2276 .enc = { 2277 .vecs = 2278 hmac_sha512_des_cbc_enc_tv_temp, 2279 .count = 2280 HMAC_SHA512_DES_CBC_ENC_TEST_VEC 2281 } 2282 } 2283 } 2284 }, { 2285 .alg = "authenc(hmac(sha512),cbc(des3_ede))", 2286 .test = alg_test_aead, 2287 .fips_allowed = 1, 2288 .suite = { 2289 .aead = { 2290 .enc = { 2291 .vecs = 2292 hmac_sha512_des3_ede_cbc_enc_tv_temp, 2293 .count = 2294 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC 2295 } 2296 } 2297 } 2298 }, { 2299 .alg = "cbc(aes)", 2300 .test = alg_test_skcipher, 2301 .fips_allowed = 1, 2302 .suite = { 2303 .cipher = { 2304 .enc = { 2305 .vecs = aes_cbc_enc_tv_template, 2306 .count = AES_CBC_ENC_TEST_VECTORS 2307 }, 2308 .dec = { 2309 .vecs = aes_cbc_dec_tv_template, 2310 .count = AES_CBC_DEC_TEST_VECTORS 2311 } 2312 } 2313 } 2314 }, { 2315 .alg = "cbc(anubis)", 2316 .test = alg_test_skcipher, 2317 .suite = { 2318 .cipher = { 2319 .enc = { 2320 .vecs = anubis_cbc_enc_tv_template, 2321 .count = ANUBIS_CBC_ENC_TEST_VECTORS 2322 }, 2323 .dec = { 2324 .vecs = anubis_cbc_dec_tv_template, 2325 .count = ANUBIS_CBC_DEC_TEST_VECTORS 2326 } 2327 } 2328 } 2329 }, { 2330 .alg = "cbc(blowfish)", 2331 .test = alg_test_skcipher, 2332 .suite = { 2333 .cipher = { 2334 .enc = { 2335 .vecs = bf_cbc_enc_tv_template, 2336 .count = BF_CBC_ENC_TEST_VECTORS 2337 }, 2338 .dec = { 2339 .vecs = bf_cbc_dec_tv_template, 2340 .count = BF_CBC_DEC_TEST_VECTORS 2341 } 2342 } 2343 } 2344 }, { 2345 .alg = "cbc(camellia)", 2346 .test = alg_test_skcipher, 2347 .suite = { 2348 .cipher = { 2349 .enc = { 2350 .vecs = camellia_cbc_enc_tv_template, 2351 .count = CAMELLIA_CBC_ENC_TEST_VECTORS 2352 }, 2353 .dec = { 2354 .vecs = camellia_cbc_dec_tv_template, 2355 .count = CAMELLIA_CBC_DEC_TEST_VECTORS 2356 } 2357 } 2358 } 2359 }, { 2360 .alg = "cbc(cast5)", 2361 .test = alg_test_skcipher, 2362 .suite = { 2363 .cipher = { 2364 .enc = { 2365 .vecs = cast5_cbc_enc_tv_template, 2366 .count = CAST5_CBC_ENC_TEST_VECTORS 2367 }, 2368 .dec = { 2369 .vecs = cast5_cbc_dec_tv_template, 2370 .count = CAST5_CBC_DEC_TEST_VECTORS 2371 } 2372 } 2373 } 2374 }, { 2375 .alg = "cbc(cast6)", 2376 .test = alg_test_skcipher, 2377 .suite = { 2378 .cipher = { 2379 .enc = { 2380 .vecs = cast6_cbc_enc_tv_template, 2381 .count = CAST6_CBC_ENC_TEST_VECTORS 2382 }, 2383 .dec = { 2384 .vecs = cast6_cbc_dec_tv_template, 2385 .count = CAST6_CBC_DEC_TEST_VECTORS 2386 } 2387 } 2388 } 2389 }, { 2390 .alg = "cbc(des)", 2391 .test = alg_test_skcipher, 2392 .suite = { 2393 .cipher = { 2394 .enc = { 2395 .vecs = des_cbc_enc_tv_template, 2396 .count = DES_CBC_ENC_TEST_VECTORS 2397 }, 2398 .dec = { 2399 .vecs = des_cbc_dec_tv_template, 2400 .count = DES_CBC_DEC_TEST_VECTORS 2401 } 2402 } 2403 } 2404 }, { 2405 .alg = "cbc(des3_ede)", 2406 .test = alg_test_skcipher, 2407 .fips_allowed = 1, 2408 .suite = { 2409 .cipher = { 2410 .enc = { 2411 .vecs = des3_ede_cbc_enc_tv_template, 2412 .count = DES3_EDE_CBC_ENC_TEST_VECTORS 2413 }, 2414 .dec = { 2415 .vecs = des3_ede_cbc_dec_tv_template, 2416 .count = DES3_EDE_CBC_DEC_TEST_VECTORS 2417 } 2418 } 2419 } 2420 }, { 2421 .alg = "cbc(serpent)", 2422 .test = alg_test_skcipher, 2423 .suite = { 2424 .cipher = { 2425 .enc = { 2426 .vecs = serpent_cbc_enc_tv_template, 2427 .count = SERPENT_CBC_ENC_TEST_VECTORS 2428 }, 2429 .dec = { 2430 .vecs = serpent_cbc_dec_tv_template, 2431 .count = SERPENT_CBC_DEC_TEST_VECTORS 2432 } 2433 } 2434 } 2435 }, { 2436 .alg = "cbc(twofish)", 2437 .test = alg_test_skcipher, 2438 .suite = { 2439 .cipher = { 2440 .enc = { 2441 .vecs = tf_cbc_enc_tv_template, 2442 .count = TF_CBC_ENC_TEST_VECTORS 2443 }, 2444 .dec = { 2445 .vecs = tf_cbc_dec_tv_template, 2446 .count = TF_CBC_DEC_TEST_VECTORS 2447 } 2448 } 2449 } 2450 }, { 2451 .alg = "ccm(aes)", 2452 .test = alg_test_aead, 2453 .fips_allowed = 1, 2454 .suite = { 2455 .aead = { 2456 .enc = { 2457 .vecs = aes_ccm_enc_tv_template, 2458 .count = AES_CCM_ENC_TEST_VECTORS 2459 }, 2460 .dec = { 2461 .vecs = aes_ccm_dec_tv_template, 2462 .count = AES_CCM_DEC_TEST_VECTORS 2463 } 2464 } 2465 } 2466 }, { 2467 .alg = "chacha20", 2468 .test = alg_test_skcipher, 2469 .suite = { 2470 .cipher = { 2471 .enc = { 2472 .vecs = chacha20_enc_tv_template, 2473 .count = CHACHA20_ENC_TEST_VECTORS 2474 }, 2475 .dec = { 2476 .vecs = chacha20_enc_tv_template, 2477 .count = CHACHA20_ENC_TEST_VECTORS 2478 }, 2479 } 2480 } 2481 }, { 2482 .alg = "cmac(aes)", 2483 .fips_allowed = 1, 2484 .test = alg_test_hash, 2485 .suite = { 2486 .hash = { 2487 .vecs = aes_cmac128_tv_template, 2488 .count = CMAC_AES_TEST_VECTORS 2489 } 2490 } 2491 }, { 2492 .alg = "cmac(des3_ede)", 2493 .fips_allowed = 1, 2494 .test = alg_test_hash, 2495 .suite = { 2496 .hash = { 2497 .vecs = des3_ede_cmac64_tv_template, 2498 .count = CMAC_DES3_EDE_TEST_VECTORS 2499 } 2500 } 2501 }, { 2502 .alg = "compress_null", 2503 .test = alg_test_null, 2504 }, { 2505 .alg = "crc32", 2506 .test = alg_test_hash, 2507 .suite = { 2508 .hash = { 2509 .vecs = crc32_tv_template, 2510 .count = CRC32_TEST_VECTORS 2511 } 2512 } 2513 }, { 2514 .alg = "crc32c", 2515 .test = alg_test_crc32c, 2516 .fips_allowed = 1, 2517 .suite = { 2518 .hash = { 2519 .vecs = crc32c_tv_template, 2520 .count = CRC32C_TEST_VECTORS 2521 } 2522 } 2523 }, { 2524 .alg = "crct10dif", 2525 .test = alg_test_hash, 2526 .fips_allowed = 1, 2527 .suite = { 2528 .hash = { 2529 .vecs = crct10dif_tv_template, 2530 .count = CRCT10DIF_TEST_VECTORS 2531 } 2532 } 2533 }, { 2534 .alg = "cryptd(__driver-cbc-aes-aesni)", 2535 .test = alg_test_null, 2536 .fips_allowed = 1, 2537 }, { 2538 .alg = "cryptd(__driver-cbc-camellia-aesni)", 2539 .test = alg_test_null, 2540 }, { 2541 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)", 2542 .test = alg_test_null, 2543 }, { 2544 .alg = "cryptd(__driver-cbc-serpent-avx2)", 2545 .test = alg_test_null, 2546 }, { 2547 .alg = "cryptd(__driver-ecb-aes-aesni)", 2548 .test = alg_test_null, 2549 .fips_allowed = 1, 2550 }, { 2551 .alg = "cryptd(__driver-ecb-camellia-aesni)", 2552 .test = alg_test_null, 2553 }, { 2554 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)", 2555 .test = alg_test_null, 2556 }, { 2557 .alg = "cryptd(__driver-ecb-cast5-avx)", 2558 .test = alg_test_null, 2559 }, { 2560 .alg = "cryptd(__driver-ecb-cast6-avx)", 2561 .test = alg_test_null, 2562 }, { 2563 .alg = "cryptd(__driver-ecb-serpent-avx)", 2564 .test = alg_test_null, 2565 }, { 2566 .alg = "cryptd(__driver-ecb-serpent-avx2)", 2567 .test = alg_test_null, 2568 }, { 2569 .alg = "cryptd(__driver-ecb-serpent-sse2)", 2570 .test = alg_test_null, 2571 }, { 2572 .alg = "cryptd(__driver-ecb-twofish-avx)", 2573 .test = alg_test_null, 2574 }, { 2575 .alg = "cryptd(__driver-gcm-aes-aesni)", 2576 .test = alg_test_null, 2577 .fips_allowed = 1, 2578 }, { 2579 .alg = "cryptd(__ghash-pclmulqdqni)", 2580 .test = alg_test_null, 2581 .fips_allowed = 1, 2582 }, { 2583 .alg = "ctr(aes)", 2584 .test = alg_test_skcipher, 2585 .fips_allowed = 1, 2586 .suite = { 2587 .cipher = { 2588 .enc = { 2589 .vecs = aes_ctr_enc_tv_template, 2590 .count = AES_CTR_ENC_TEST_VECTORS 2591 }, 2592 .dec = { 2593 .vecs = aes_ctr_dec_tv_template, 2594 .count = AES_CTR_DEC_TEST_VECTORS 2595 } 2596 } 2597 } 2598 }, { 2599 .alg = "ctr(blowfish)", 2600 .test = alg_test_skcipher, 2601 .suite = { 2602 .cipher = { 2603 .enc = { 2604 .vecs = bf_ctr_enc_tv_template, 2605 .count = BF_CTR_ENC_TEST_VECTORS 2606 }, 2607 .dec = { 2608 .vecs = bf_ctr_dec_tv_template, 2609 .count = BF_CTR_DEC_TEST_VECTORS 2610 } 2611 } 2612 } 2613 }, { 2614 .alg = "ctr(camellia)", 2615 .test = alg_test_skcipher, 2616 .suite = { 2617 .cipher = { 2618 .enc = { 2619 .vecs = camellia_ctr_enc_tv_template, 2620 .count = CAMELLIA_CTR_ENC_TEST_VECTORS 2621 }, 2622 .dec = { 2623 .vecs = camellia_ctr_dec_tv_template, 2624 .count = CAMELLIA_CTR_DEC_TEST_VECTORS 2625 } 2626 } 2627 } 2628 }, { 2629 .alg = "ctr(cast5)", 2630 .test = alg_test_skcipher, 2631 .suite = { 2632 .cipher = { 2633 .enc = { 2634 .vecs = cast5_ctr_enc_tv_template, 2635 .count = CAST5_CTR_ENC_TEST_VECTORS 2636 }, 2637 .dec = { 2638 .vecs = cast5_ctr_dec_tv_template, 2639 .count = CAST5_CTR_DEC_TEST_VECTORS 2640 } 2641 } 2642 } 2643 }, { 2644 .alg = "ctr(cast6)", 2645 .test = alg_test_skcipher, 2646 .suite = { 2647 .cipher = { 2648 .enc = { 2649 .vecs = cast6_ctr_enc_tv_template, 2650 .count = CAST6_CTR_ENC_TEST_VECTORS 2651 }, 2652 .dec = { 2653 .vecs = cast6_ctr_dec_tv_template, 2654 .count = CAST6_CTR_DEC_TEST_VECTORS 2655 } 2656 } 2657 } 2658 }, { 2659 .alg = "ctr(des)", 2660 .test = alg_test_skcipher, 2661 .suite = { 2662 .cipher = { 2663 .enc = { 2664 .vecs = des_ctr_enc_tv_template, 2665 .count = DES_CTR_ENC_TEST_VECTORS 2666 }, 2667 .dec = { 2668 .vecs = des_ctr_dec_tv_template, 2669 .count = DES_CTR_DEC_TEST_VECTORS 2670 } 2671 } 2672 } 2673 }, { 2674 .alg = "ctr(des3_ede)", 2675 .test = alg_test_skcipher, 2676 .suite = { 2677 .cipher = { 2678 .enc = { 2679 .vecs = des3_ede_ctr_enc_tv_template, 2680 .count = DES3_EDE_CTR_ENC_TEST_VECTORS 2681 }, 2682 .dec = { 2683 .vecs = des3_ede_ctr_dec_tv_template, 2684 .count = DES3_EDE_CTR_DEC_TEST_VECTORS 2685 } 2686 } 2687 } 2688 }, { 2689 .alg = "ctr(serpent)", 2690 .test = alg_test_skcipher, 2691 .suite = { 2692 .cipher = { 2693 .enc = { 2694 .vecs = serpent_ctr_enc_tv_template, 2695 .count = SERPENT_CTR_ENC_TEST_VECTORS 2696 }, 2697 .dec = { 2698 .vecs = serpent_ctr_dec_tv_template, 2699 .count = SERPENT_CTR_DEC_TEST_VECTORS 2700 } 2701 } 2702 } 2703 }, { 2704 .alg = "ctr(twofish)", 2705 .test = alg_test_skcipher, 2706 .suite = { 2707 .cipher = { 2708 .enc = { 2709 .vecs = tf_ctr_enc_tv_template, 2710 .count = TF_CTR_ENC_TEST_VECTORS 2711 }, 2712 .dec = { 2713 .vecs = tf_ctr_dec_tv_template, 2714 .count = TF_CTR_DEC_TEST_VECTORS 2715 } 2716 } 2717 } 2718 }, { 2719 .alg = "cts(cbc(aes))", 2720 .test = alg_test_skcipher, 2721 .suite = { 2722 .cipher = { 2723 .enc = { 2724 .vecs = cts_mode_enc_tv_template, 2725 .count = CTS_MODE_ENC_TEST_VECTORS 2726 }, 2727 .dec = { 2728 .vecs = cts_mode_dec_tv_template, 2729 .count = CTS_MODE_DEC_TEST_VECTORS 2730 } 2731 } 2732 } 2733 }, { 2734 .alg = "deflate", 2735 .test = alg_test_comp, 2736 .fips_allowed = 1, 2737 .suite = { 2738 .comp = { 2739 .comp = { 2740 .vecs = deflate_comp_tv_template, 2741 .count = DEFLATE_COMP_TEST_VECTORS 2742 }, 2743 .decomp = { 2744 .vecs = deflate_decomp_tv_template, 2745 .count = DEFLATE_DECOMP_TEST_VECTORS 2746 } 2747 } 2748 } 2749 }, { 2750 .alg = "digest_null", 2751 .test = alg_test_null, 2752 }, { 2753 .alg = "drbg_nopr_ctr_aes128", 2754 .test = alg_test_drbg, 2755 .fips_allowed = 1, 2756 .suite = { 2757 .drbg = { 2758 .vecs = drbg_nopr_ctr_aes128_tv_template, 2759 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template) 2760 } 2761 } 2762 }, { 2763 .alg = "drbg_nopr_ctr_aes192", 2764 .test = alg_test_drbg, 2765 .fips_allowed = 1, 2766 .suite = { 2767 .drbg = { 2768 .vecs = drbg_nopr_ctr_aes192_tv_template, 2769 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template) 2770 } 2771 } 2772 }, { 2773 .alg = "drbg_nopr_ctr_aes256", 2774 .test = alg_test_drbg, 2775 .fips_allowed = 1, 2776 .suite = { 2777 .drbg = { 2778 .vecs = drbg_nopr_ctr_aes256_tv_template, 2779 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template) 2780 } 2781 } 2782 }, { 2783 /* 2784 * There is no need to specifically test the DRBG with every 2785 * backend cipher -- covered by drbg_nopr_hmac_sha256 test 2786 */ 2787 .alg = "drbg_nopr_hmac_sha1", 2788 .fips_allowed = 1, 2789 .test = alg_test_null, 2790 }, { 2791 .alg = "drbg_nopr_hmac_sha256", 2792 .test = alg_test_drbg, 2793 .fips_allowed = 1, 2794 .suite = { 2795 .drbg = { 2796 .vecs = drbg_nopr_hmac_sha256_tv_template, 2797 .count = 2798 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template) 2799 } 2800 } 2801 }, { 2802 /* covered by drbg_nopr_hmac_sha256 test */ 2803 .alg = "drbg_nopr_hmac_sha384", 2804 .fips_allowed = 1, 2805 .test = alg_test_null, 2806 }, { 2807 .alg = "drbg_nopr_hmac_sha512", 2808 .test = alg_test_null, 2809 .fips_allowed = 1, 2810 }, { 2811 .alg = "drbg_nopr_sha1", 2812 .fips_allowed = 1, 2813 .test = alg_test_null, 2814 }, { 2815 .alg = "drbg_nopr_sha256", 2816 .test = alg_test_drbg, 2817 .fips_allowed = 1, 2818 .suite = { 2819 .drbg = { 2820 .vecs = drbg_nopr_sha256_tv_template, 2821 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template) 2822 } 2823 } 2824 }, { 2825 /* covered by drbg_nopr_sha256 test */ 2826 .alg = "drbg_nopr_sha384", 2827 .fips_allowed = 1, 2828 .test = alg_test_null, 2829 }, { 2830 .alg = "drbg_nopr_sha512", 2831 .fips_allowed = 1, 2832 .test = alg_test_null, 2833 }, { 2834 .alg = "drbg_pr_ctr_aes128", 2835 .test = alg_test_drbg, 2836 .fips_allowed = 1, 2837 .suite = { 2838 .drbg = { 2839 .vecs = drbg_pr_ctr_aes128_tv_template, 2840 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template) 2841 } 2842 } 2843 }, { 2844 /* covered by drbg_pr_ctr_aes128 test */ 2845 .alg = "drbg_pr_ctr_aes192", 2846 .fips_allowed = 1, 2847 .test = alg_test_null, 2848 }, { 2849 .alg = "drbg_pr_ctr_aes256", 2850 .fips_allowed = 1, 2851 .test = alg_test_null, 2852 }, { 2853 .alg = "drbg_pr_hmac_sha1", 2854 .fips_allowed = 1, 2855 .test = alg_test_null, 2856 }, { 2857 .alg = "drbg_pr_hmac_sha256", 2858 .test = alg_test_drbg, 2859 .fips_allowed = 1, 2860 .suite = { 2861 .drbg = { 2862 .vecs = drbg_pr_hmac_sha256_tv_template, 2863 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template) 2864 } 2865 } 2866 }, { 2867 /* covered by drbg_pr_hmac_sha256 test */ 2868 .alg = "drbg_pr_hmac_sha384", 2869 .fips_allowed = 1, 2870 .test = alg_test_null, 2871 }, { 2872 .alg = "drbg_pr_hmac_sha512", 2873 .test = alg_test_null, 2874 .fips_allowed = 1, 2875 }, { 2876 .alg = "drbg_pr_sha1", 2877 .fips_allowed = 1, 2878 .test = alg_test_null, 2879 }, { 2880 .alg = "drbg_pr_sha256", 2881 .test = alg_test_drbg, 2882 .fips_allowed = 1, 2883 .suite = { 2884 .drbg = { 2885 .vecs = drbg_pr_sha256_tv_template, 2886 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template) 2887 } 2888 } 2889 }, { 2890 /* covered by drbg_pr_sha256 test */ 2891 .alg = "drbg_pr_sha384", 2892 .fips_allowed = 1, 2893 .test = alg_test_null, 2894 }, { 2895 .alg = "drbg_pr_sha512", 2896 .fips_allowed = 1, 2897 .test = alg_test_null, 2898 }, { 2899 .alg = "ecb(__aes-aesni)", 2900 .test = alg_test_null, 2901 .fips_allowed = 1, 2902 }, { 2903 .alg = "ecb(aes)", 2904 .test = alg_test_skcipher, 2905 .fips_allowed = 1, 2906 .suite = { 2907 .cipher = { 2908 .enc = { 2909 .vecs = aes_enc_tv_template, 2910 .count = AES_ENC_TEST_VECTORS 2911 }, 2912 .dec = { 2913 .vecs = aes_dec_tv_template, 2914 .count = AES_DEC_TEST_VECTORS 2915 } 2916 } 2917 } 2918 }, { 2919 .alg = "ecb(anubis)", 2920 .test = alg_test_skcipher, 2921 .suite = { 2922 .cipher = { 2923 .enc = { 2924 .vecs = anubis_enc_tv_template, 2925 .count = ANUBIS_ENC_TEST_VECTORS 2926 }, 2927 .dec = { 2928 .vecs = anubis_dec_tv_template, 2929 .count = ANUBIS_DEC_TEST_VECTORS 2930 } 2931 } 2932 } 2933 }, { 2934 .alg = "ecb(arc4)", 2935 .test = alg_test_skcipher, 2936 .suite = { 2937 .cipher = { 2938 .enc = { 2939 .vecs = arc4_enc_tv_template, 2940 .count = ARC4_ENC_TEST_VECTORS 2941 }, 2942 .dec = { 2943 .vecs = arc4_dec_tv_template, 2944 .count = ARC4_DEC_TEST_VECTORS 2945 } 2946 } 2947 } 2948 }, { 2949 .alg = "ecb(blowfish)", 2950 .test = alg_test_skcipher, 2951 .suite = { 2952 .cipher = { 2953 .enc = { 2954 .vecs = bf_enc_tv_template, 2955 .count = BF_ENC_TEST_VECTORS 2956 }, 2957 .dec = { 2958 .vecs = bf_dec_tv_template, 2959 .count = BF_DEC_TEST_VECTORS 2960 } 2961 } 2962 } 2963 }, { 2964 .alg = "ecb(camellia)", 2965 .test = alg_test_skcipher, 2966 .suite = { 2967 .cipher = { 2968 .enc = { 2969 .vecs = camellia_enc_tv_template, 2970 .count = CAMELLIA_ENC_TEST_VECTORS 2971 }, 2972 .dec = { 2973 .vecs = camellia_dec_tv_template, 2974 .count = CAMELLIA_DEC_TEST_VECTORS 2975 } 2976 } 2977 } 2978 }, { 2979 .alg = "ecb(cast5)", 2980 .test = alg_test_skcipher, 2981 .suite = { 2982 .cipher = { 2983 .enc = { 2984 .vecs = cast5_enc_tv_template, 2985 .count = CAST5_ENC_TEST_VECTORS 2986 }, 2987 .dec = { 2988 .vecs = cast5_dec_tv_template, 2989 .count = CAST5_DEC_TEST_VECTORS 2990 } 2991 } 2992 } 2993 }, { 2994 .alg = "ecb(cast6)", 2995 .test = alg_test_skcipher, 2996 .suite = { 2997 .cipher = { 2998 .enc = { 2999 .vecs = cast6_enc_tv_template, 3000 .count = CAST6_ENC_TEST_VECTORS 3001 }, 3002 .dec = { 3003 .vecs = cast6_dec_tv_template, 3004 .count = CAST6_DEC_TEST_VECTORS 3005 } 3006 } 3007 } 3008 }, { 3009 .alg = "ecb(cipher_null)", 3010 .test = alg_test_null, 3011 }, { 3012 .alg = "ecb(des)", 3013 .test = alg_test_skcipher, 3014 .fips_allowed = 1, 3015 .suite = { 3016 .cipher = { 3017 .enc = { 3018 .vecs = des_enc_tv_template, 3019 .count = DES_ENC_TEST_VECTORS 3020 }, 3021 .dec = { 3022 .vecs = des_dec_tv_template, 3023 .count = DES_DEC_TEST_VECTORS 3024 } 3025 } 3026 } 3027 }, { 3028 .alg = "ecb(des3_ede)", 3029 .test = alg_test_skcipher, 3030 .fips_allowed = 1, 3031 .suite = { 3032 .cipher = { 3033 .enc = { 3034 .vecs = des3_ede_enc_tv_template, 3035 .count = DES3_EDE_ENC_TEST_VECTORS 3036 }, 3037 .dec = { 3038 .vecs = des3_ede_dec_tv_template, 3039 .count = DES3_EDE_DEC_TEST_VECTORS 3040 } 3041 } 3042 } 3043 }, { 3044 .alg = "ecb(fcrypt)", 3045 .test = alg_test_skcipher, 3046 .suite = { 3047 .cipher = { 3048 .enc = { 3049 .vecs = fcrypt_pcbc_enc_tv_template, 3050 .count = 1 3051 }, 3052 .dec = { 3053 .vecs = fcrypt_pcbc_dec_tv_template, 3054 .count = 1 3055 } 3056 } 3057 } 3058 }, { 3059 .alg = "ecb(khazad)", 3060 .test = alg_test_skcipher, 3061 .suite = { 3062 .cipher = { 3063 .enc = { 3064 .vecs = khazad_enc_tv_template, 3065 .count = KHAZAD_ENC_TEST_VECTORS 3066 }, 3067 .dec = { 3068 .vecs = khazad_dec_tv_template, 3069 .count = KHAZAD_DEC_TEST_VECTORS 3070 } 3071 } 3072 } 3073 }, { 3074 .alg = "ecb(seed)", 3075 .test = alg_test_skcipher, 3076 .suite = { 3077 .cipher = { 3078 .enc = { 3079 .vecs = seed_enc_tv_template, 3080 .count = SEED_ENC_TEST_VECTORS 3081 }, 3082 .dec = { 3083 .vecs = seed_dec_tv_template, 3084 .count = SEED_DEC_TEST_VECTORS 3085 } 3086 } 3087 } 3088 }, { 3089 .alg = "ecb(serpent)", 3090 .test = alg_test_skcipher, 3091 .suite = { 3092 .cipher = { 3093 .enc = { 3094 .vecs = serpent_enc_tv_template, 3095 .count = SERPENT_ENC_TEST_VECTORS 3096 }, 3097 .dec = { 3098 .vecs = serpent_dec_tv_template, 3099 .count = SERPENT_DEC_TEST_VECTORS 3100 } 3101 } 3102 } 3103 }, { 3104 .alg = "ecb(tea)", 3105 .test = alg_test_skcipher, 3106 .suite = { 3107 .cipher = { 3108 .enc = { 3109 .vecs = tea_enc_tv_template, 3110 .count = TEA_ENC_TEST_VECTORS 3111 }, 3112 .dec = { 3113 .vecs = tea_dec_tv_template, 3114 .count = TEA_DEC_TEST_VECTORS 3115 } 3116 } 3117 } 3118 }, { 3119 .alg = "ecb(tnepres)", 3120 .test = alg_test_skcipher, 3121 .suite = { 3122 .cipher = { 3123 .enc = { 3124 .vecs = tnepres_enc_tv_template, 3125 .count = TNEPRES_ENC_TEST_VECTORS 3126 }, 3127 .dec = { 3128 .vecs = tnepres_dec_tv_template, 3129 .count = TNEPRES_DEC_TEST_VECTORS 3130 } 3131 } 3132 } 3133 }, { 3134 .alg = "ecb(twofish)", 3135 .test = alg_test_skcipher, 3136 .suite = { 3137 .cipher = { 3138 .enc = { 3139 .vecs = tf_enc_tv_template, 3140 .count = TF_ENC_TEST_VECTORS 3141 }, 3142 .dec = { 3143 .vecs = tf_dec_tv_template, 3144 .count = TF_DEC_TEST_VECTORS 3145 } 3146 } 3147 } 3148 }, { 3149 .alg = "ecb(xeta)", 3150 .test = alg_test_skcipher, 3151 .suite = { 3152 .cipher = { 3153 .enc = { 3154 .vecs = xeta_enc_tv_template, 3155 .count = XETA_ENC_TEST_VECTORS 3156 }, 3157 .dec = { 3158 .vecs = xeta_dec_tv_template, 3159 .count = XETA_DEC_TEST_VECTORS 3160 } 3161 } 3162 } 3163 }, { 3164 .alg = "ecb(xtea)", 3165 .test = alg_test_skcipher, 3166 .suite = { 3167 .cipher = { 3168 .enc = { 3169 .vecs = xtea_enc_tv_template, 3170 .count = XTEA_ENC_TEST_VECTORS 3171 }, 3172 .dec = { 3173 .vecs = xtea_dec_tv_template, 3174 .count = XTEA_DEC_TEST_VECTORS 3175 } 3176 } 3177 } 3178 }, { 3179 .alg = "gcm(aes)", 3180 .test = alg_test_aead, 3181 .fips_allowed = 1, 3182 .suite = { 3183 .aead = { 3184 .enc = { 3185 .vecs = aes_gcm_enc_tv_template, 3186 .count = AES_GCM_ENC_TEST_VECTORS 3187 }, 3188 .dec = { 3189 .vecs = aes_gcm_dec_tv_template, 3190 .count = AES_GCM_DEC_TEST_VECTORS 3191 } 3192 } 3193 } 3194 }, { 3195 .alg = "ghash", 3196 .test = alg_test_hash, 3197 .fips_allowed = 1, 3198 .suite = { 3199 .hash = { 3200 .vecs = ghash_tv_template, 3201 .count = GHASH_TEST_VECTORS 3202 } 3203 } 3204 }, { 3205 .alg = "hmac(crc32)", 3206 .test = alg_test_hash, 3207 .suite = { 3208 .hash = { 3209 .vecs = bfin_crc_tv_template, 3210 .count = BFIN_CRC_TEST_VECTORS 3211 } 3212 } 3213 }, { 3214 .alg = "hmac(md5)", 3215 .test = alg_test_hash, 3216 .suite = { 3217 .hash = { 3218 .vecs = hmac_md5_tv_template, 3219 .count = HMAC_MD5_TEST_VECTORS 3220 } 3221 } 3222 }, { 3223 .alg = "hmac(rmd128)", 3224 .test = alg_test_hash, 3225 .suite = { 3226 .hash = { 3227 .vecs = hmac_rmd128_tv_template, 3228 .count = HMAC_RMD128_TEST_VECTORS 3229 } 3230 } 3231 }, { 3232 .alg = "hmac(rmd160)", 3233 .test = alg_test_hash, 3234 .suite = { 3235 .hash = { 3236 .vecs = hmac_rmd160_tv_template, 3237 .count = HMAC_RMD160_TEST_VECTORS 3238 } 3239 } 3240 }, { 3241 .alg = "hmac(sha1)", 3242 .test = alg_test_hash, 3243 .fips_allowed = 1, 3244 .suite = { 3245 .hash = { 3246 .vecs = hmac_sha1_tv_template, 3247 .count = HMAC_SHA1_TEST_VECTORS 3248 } 3249 } 3250 }, { 3251 .alg = "hmac(sha224)", 3252 .test = alg_test_hash, 3253 .fips_allowed = 1, 3254 .suite = { 3255 .hash = { 3256 .vecs = hmac_sha224_tv_template, 3257 .count = HMAC_SHA224_TEST_VECTORS 3258 } 3259 } 3260 }, { 3261 .alg = "hmac(sha256)", 3262 .test = alg_test_hash, 3263 .fips_allowed = 1, 3264 .suite = { 3265 .hash = { 3266 .vecs = hmac_sha256_tv_template, 3267 .count = HMAC_SHA256_TEST_VECTORS 3268 } 3269 } 3270 }, { 3271 .alg = "hmac(sha384)", 3272 .test = alg_test_hash, 3273 .fips_allowed = 1, 3274 .suite = { 3275 .hash = { 3276 .vecs = hmac_sha384_tv_template, 3277 .count = HMAC_SHA384_TEST_VECTORS 3278 } 3279 } 3280 }, { 3281 .alg = "hmac(sha512)", 3282 .test = alg_test_hash, 3283 .fips_allowed = 1, 3284 .suite = { 3285 .hash = { 3286 .vecs = hmac_sha512_tv_template, 3287 .count = HMAC_SHA512_TEST_VECTORS 3288 } 3289 } 3290 }, { 3291 .alg = "jitterentropy_rng", 3292 .fips_allowed = 1, 3293 .test = alg_test_null, 3294 }, { 3295 .alg = "lrw(aes)", 3296 .test = alg_test_skcipher, 3297 .suite = { 3298 .cipher = { 3299 .enc = { 3300 .vecs = aes_lrw_enc_tv_template, 3301 .count = AES_LRW_ENC_TEST_VECTORS 3302 }, 3303 .dec = { 3304 .vecs = aes_lrw_dec_tv_template, 3305 .count = AES_LRW_DEC_TEST_VECTORS 3306 } 3307 } 3308 } 3309 }, { 3310 .alg = "lrw(camellia)", 3311 .test = alg_test_skcipher, 3312 .suite = { 3313 .cipher = { 3314 .enc = { 3315 .vecs = camellia_lrw_enc_tv_template, 3316 .count = CAMELLIA_LRW_ENC_TEST_VECTORS 3317 }, 3318 .dec = { 3319 .vecs = camellia_lrw_dec_tv_template, 3320 .count = CAMELLIA_LRW_DEC_TEST_VECTORS 3321 } 3322 } 3323 } 3324 }, { 3325 .alg = "lrw(cast6)", 3326 .test = alg_test_skcipher, 3327 .suite = { 3328 .cipher = { 3329 .enc = { 3330 .vecs = cast6_lrw_enc_tv_template, 3331 .count = CAST6_LRW_ENC_TEST_VECTORS 3332 }, 3333 .dec = { 3334 .vecs = cast6_lrw_dec_tv_template, 3335 .count = CAST6_LRW_DEC_TEST_VECTORS 3336 } 3337 } 3338 } 3339 }, { 3340 .alg = "lrw(serpent)", 3341 .test = alg_test_skcipher, 3342 .suite = { 3343 .cipher = { 3344 .enc = { 3345 .vecs = serpent_lrw_enc_tv_template, 3346 .count = SERPENT_LRW_ENC_TEST_VECTORS 3347 }, 3348 .dec = { 3349 .vecs = serpent_lrw_dec_tv_template, 3350 .count = SERPENT_LRW_DEC_TEST_VECTORS 3351 } 3352 } 3353 } 3354 }, { 3355 .alg = "lrw(twofish)", 3356 .test = alg_test_skcipher, 3357 .suite = { 3358 .cipher = { 3359 .enc = { 3360 .vecs = tf_lrw_enc_tv_template, 3361 .count = TF_LRW_ENC_TEST_VECTORS 3362 }, 3363 .dec = { 3364 .vecs = tf_lrw_dec_tv_template, 3365 .count = TF_LRW_DEC_TEST_VECTORS 3366 } 3367 } 3368 } 3369 }, { 3370 .alg = "lz4", 3371 .test = alg_test_comp, 3372 .fips_allowed = 1, 3373 .suite = { 3374 .comp = { 3375 .comp = { 3376 .vecs = lz4_comp_tv_template, 3377 .count = LZ4_COMP_TEST_VECTORS 3378 }, 3379 .decomp = { 3380 .vecs = lz4_decomp_tv_template, 3381 .count = LZ4_DECOMP_TEST_VECTORS 3382 } 3383 } 3384 } 3385 }, { 3386 .alg = "lz4hc", 3387 .test = alg_test_comp, 3388 .fips_allowed = 1, 3389 .suite = { 3390 .comp = { 3391 .comp = { 3392 .vecs = lz4hc_comp_tv_template, 3393 .count = LZ4HC_COMP_TEST_VECTORS 3394 }, 3395 .decomp = { 3396 .vecs = lz4hc_decomp_tv_template, 3397 .count = LZ4HC_DECOMP_TEST_VECTORS 3398 } 3399 } 3400 } 3401 }, { 3402 .alg = "lzo", 3403 .test = alg_test_comp, 3404 .fips_allowed = 1, 3405 .suite = { 3406 .comp = { 3407 .comp = { 3408 .vecs = lzo_comp_tv_template, 3409 .count = LZO_COMP_TEST_VECTORS 3410 }, 3411 .decomp = { 3412 .vecs = lzo_decomp_tv_template, 3413 .count = LZO_DECOMP_TEST_VECTORS 3414 } 3415 } 3416 } 3417 }, { 3418 .alg = "md4", 3419 .test = alg_test_hash, 3420 .suite = { 3421 .hash = { 3422 .vecs = md4_tv_template, 3423 .count = MD4_TEST_VECTORS 3424 } 3425 } 3426 }, { 3427 .alg = "md5", 3428 .test = alg_test_hash, 3429 .suite = { 3430 .hash = { 3431 .vecs = md5_tv_template, 3432 .count = MD5_TEST_VECTORS 3433 } 3434 } 3435 }, { 3436 .alg = "michael_mic", 3437 .test = alg_test_hash, 3438 .suite = { 3439 .hash = { 3440 .vecs = michael_mic_tv_template, 3441 .count = MICHAEL_MIC_TEST_VECTORS 3442 } 3443 } 3444 }, { 3445 .alg = "ofb(aes)", 3446 .test = alg_test_skcipher, 3447 .fips_allowed = 1, 3448 .suite = { 3449 .cipher = { 3450 .enc = { 3451 .vecs = aes_ofb_enc_tv_template, 3452 .count = AES_OFB_ENC_TEST_VECTORS 3453 }, 3454 .dec = { 3455 .vecs = aes_ofb_dec_tv_template, 3456 .count = AES_OFB_DEC_TEST_VECTORS 3457 } 3458 } 3459 } 3460 }, { 3461 .alg = "pcbc(fcrypt)", 3462 .test = alg_test_skcipher, 3463 .suite = { 3464 .cipher = { 3465 .enc = { 3466 .vecs = fcrypt_pcbc_enc_tv_template, 3467 .count = FCRYPT_ENC_TEST_VECTORS 3468 }, 3469 .dec = { 3470 .vecs = fcrypt_pcbc_dec_tv_template, 3471 .count = FCRYPT_DEC_TEST_VECTORS 3472 } 3473 } 3474 } 3475 }, { 3476 .alg = "poly1305", 3477 .test = alg_test_hash, 3478 .suite = { 3479 .hash = { 3480 .vecs = poly1305_tv_template, 3481 .count = POLY1305_TEST_VECTORS 3482 } 3483 } 3484 }, { 3485 .alg = "rfc3686(ctr(aes))", 3486 .test = alg_test_skcipher, 3487 .fips_allowed = 1, 3488 .suite = { 3489 .cipher = { 3490 .enc = { 3491 .vecs = aes_ctr_rfc3686_enc_tv_template, 3492 .count = AES_CTR_3686_ENC_TEST_VECTORS 3493 }, 3494 .dec = { 3495 .vecs = aes_ctr_rfc3686_dec_tv_template, 3496 .count = AES_CTR_3686_DEC_TEST_VECTORS 3497 } 3498 } 3499 } 3500 }, { 3501 .alg = "rfc4106(gcm(aes))", 3502 .test = alg_test_aead, 3503 .fips_allowed = 1, 3504 .suite = { 3505 .aead = { 3506 .enc = { 3507 .vecs = aes_gcm_rfc4106_enc_tv_template, 3508 .count = AES_GCM_4106_ENC_TEST_VECTORS 3509 }, 3510 .dec = { 3511 .vecs = aes_gcm_rfc4106_dec_tv_template, 3512 .count = AES_GCM_4106_DEC_TEST_VECTORS 3513 } 3514 } 3515 } 3516 }, { 3517 .alg = "rfc4309(ccm(aes))", 3518 .test = alg_test_aead, 3519 .fips_allowed = 1, 3520 .suite = { 3521 .aead = { 3522 .enc = { 3523 .vecs = aes_ccm_rfc4309_enc_tv_template, 3524 .count = AES_CCM_4309_ENC_TEST_VECTORS 3525 }, 3526 .dec = { 3527 .vecs = aes_ccm_rfc4309_dec_tv_template, 3528 .count = AES_CCM_4309_DEC_TEST_VECTORS 3529 } 3530 } 3531 } 3532 }, { 3533 .alg = "rfc4543(gcm(aes))", 3534 .test = alg_test_aead, 3535 .suite = { 3536 .aead = { 3537 .enc = { 3538 .vecs = aes_gcm_rfc4543_enc_tv_template, 3539 .count = AES_GCM_4543_ENC_TEST_VECTORS 3540 }, 3541 .dec = { 3542 .vecs = aes_gcm_rfc4543_dec_tv_template, 3543 .count = AES_GCM_4543_DEC_TEST_VECTORS 3544 }, 3545 } 3546 } 3547 }, { 3548 .alg = "rfc7539(chacha20,poly1305)", 3549 .test = alg_test_aead, 3550 .suite = { 3551 .aead = { 3552 .enc = { 3553 .vecs = rfc7539_enc_tv_template, 3554 .count = RFC7539_ENC_TEST_VECTORS 3555 }, 3556 .dec = { 3557 .vecs = rfc7539_dec_tv_template, 3558 .count = RFC7539_DEC_TEST_VECTORS 3559 }, 3560 } 3561 } 3562 }, { 3563 .alg = "rfc7539esp(chacha20,poly1305)", 3564 .test = alg_test_aead, 3565 .suite = { 3566 .aead = { 3567 .enc = { 3568 .vecs = rfc7539esp_enc_tv_template, 3569 .count = RFC7539ESP_ENC_TEST_VECTORS 3570 }, 3571 .dec = { 3572 .vecs = rfc7539esp_dec_tv_template, 3573 .count = RFC7539ESP_DEC_TEST_VECTORS 3574 }, 3575 } 3576 } 3577 }, { 3578 .alg = "rmd128", 3579 .test = alg_test_hash, 3580 .suite = { 3581 .hash = { 3582 .vecs = rmd128_tv_template, 3583 .count = RMD128_TEST_VECTORS 3584 } 3585 } 3586 }, { 3587 .alg = "rmd160", 3588 .test = alg_test_hash, 3589 .suite = { 3590 .hash = { 3591 .vecs = rmd160_tv_template, 3592 .count = RMD160_TEST_VECTORS 3593 } 3594 } 3595 }, { 3596 .alg = "rmd256", 3597 .test = alg_test_hash, 3598 .suite = { 3599 .hash = { 3600 .vecs = rmd256_tv_template, 3601 .count = RMD256_TEST_VECTORS 3602 } 3603 } 3604 }, { 3605 .alg = "rmd320", 3606 .test = alg_test_hash, 3607 .suite = { 3608 .hash = { 3609 .vecs = rmd320_tv_template, 3610 .count = RMD320_TEST_VECTORS 3611 } 3612 } 3613 }, { 3614 .alg = "rsa", 3615 .test = alg_test_akcipher, 3616 .fips_allowed = 1, 3617 .suite = { 3618 .akcipher = { 3619 .vecs = rsa_tv_template, 3620 .count = RSA_TEST_VECTORS 3621 } 3622 } 3623 }, { 3624 .alg = "salsa20", 3625 .test = alg_test_skcipher, 3626 .suite = { 3627 .cipher = { 3628 .enc = { 3629 .vecs = salsa20_stream_enc_tv_template, 3630 .count = SALSA20_STREAM_ENC_TEST_VECTORS 3631 } 3632 } 3633 } 3634 }, { 3635 .alg = "sha1", 3636 .test = alg_test_hash, 3637 .fips_allowed = 1, 3638 .suite = { 3639 .hash = { 3640 .vecs = sha1_tv_template, 3641 .count = SHA1_TEST_VECTORS 3642 } 3643 } 3644 }, { 3645 .alg = "sha224", 3646 .test = alg_test_hash, 3647 .fips_allowed = 1, 3648 .suite = { 3649 .hash = { 3650 .vecs = sha224_tv_template, 3651 .count = SHA224_TEST_VECTORS 3652 } 3653 } 3654 }, { 3655 .alg = "sha256", 3656 .test = alg_test_hash, 3657 .fips_allowed = 1, 3658 .suite = { 3659 .hash = { 3660 .vecs = sha256_tv_template, 3661 .count = SHA256_TEST_VECTORS 3662 } 3663 } 3664 }, { 3665 .alg = "sha384", 3666 .test = alg_test_hash, 3667 .fips_allowed = 1, 3668 .suite = { 3669 .hash = { 3670 .vecs = sha384_tv_template, 3671 .count = SHA384_TEST_VECTORS 3672 } 3673 } 3674 }, { 3675 .alg = "sha512", 3676 .test = alg_test_hash, 3677 .fips_allowed = 1, 3678 .suite = { 3679 .hash = { 3680 .vecs = sha512_tv_template, 3681 .count = SHA512_TEST_VECTORS 3682 } 3683 } 3684 }, { 3685 .alg = "tgr128", 3686 .test = alg_test_hash, 3687 .suite = { 3688 .hash = { 3689 .vecs = tgr128_tv_template, 3690 .count = TGR128_TEST_VECTORS 3691 } 3692 } 3693 }, { 3694 .alg = "tgr160", 3695 .test = alg_test_hash, 3696 .suite = { 3697 .hash = { 3698 .vecs = tgr160_tv_template, 3699 .count = TGR160_TEST_VECTORS 3700 } 3701 } 3702 }, { 3703 .alg = "tgr192", 3704 .test = alg_test_hash, 3705 .suite = { 3706 .hash = { 3707 .vecs = tgr192_tv_template, 3708 .count = TGR192_TEST_VECTORS 3709 } 3710 } 3711 }, { 3712 .alg = "vmac(aes)", 3713 .test = alg_test_hash, 3714 .suite = { 3715 .hash = { 3716 .vecs = aes_vmac128_tv_template, 3717 .count = VMAC_AES_TEST_VECTORS 3718 } 3719 } 3720 }, { 3721 .alg = "wp256", 3722 .test = alg_test_hash, 3723 .suite = { 3724 .hash = { 3725 .vecs = wp256_tv_template, 3726 .count = WP256_TEST_VECTORS 3727 } 3728 } 3729 }, { 3730 .alg = "wp384", 3731 .test = alg_test_hash, 3732 .suite = { 3733 .hash = { 3734 .vecs = wp384_tv_template, 3735 .count = WP384_TEST_VECTORS 3736 } 3737 } 3738 }, { 3739 .alg = "wp512", 3740 .test = alg_test_hash, 3741 .suite = { 3742 .hash = { 3743 .vecs = wp512_tv_template, 3744 .count = WP512_TEST_VECTORS 3745 } 3746 } 3747 }, { 3748 .alg = "xcbc(aes)", 3749 .test = alg_test_hash, 3750 .suite = { 3751 .hash = { 3752 .vecs = aes_xcbc128_tv_template, 3753 .count = XCBC_AES_TEST_VECTORS 3754 } 3755 } 3756 }, { 3757 .alg = "xts(aes)", 3758 .test = alg_test_skcipher, 3759 .fips_allowed = 1, 3760 .suite = { 3761 .cipher = { 3762 .enc = { 3763 .vecs = aes_xts_enc_tv_template, 3764 .count = AES_XTS_ENC_TEST_VECTORS 3765 }, 3766 .dec = { 3767 .vecs = aes_xts_dec_tv_template, 3768 .count = AES_XTS_DEC_TEST_VECTORS 3769 } 3770 } 3771 } 3772 }, { 3773 .alg = "xts(camellia)", 3774 .test = alg_test_skcipher, 3775 .suite = { 3776 .cipher = { 3777 .enc = { 3778 .vecs = camellia_xts_enc_tv_template, 3779 .count = CAMELLIA_XTS_ENC_TEST_VECTORS 3780 }, 3781 .dec = { 3782 .vecs = camellia_xts_dec_tv_template, 3783 .count = CAMELLIA_XTS_DEC_TEST_VECTORS 3784 } 3785 } 3786 } 3787 }, { 3788 .alg = "xts(cast6)", 3789 .test = alg_test_skcipher, 3790 .suite = { 3791 .cipher = { 3792 .enc = { 3793 .vecs = cast6_xts_enc_tv_template, 3794 .count = CAST6_XTS_ENC_TEST_VECTORS 3795 }, 3796 .dec = { 3797 .vecs = cast6_xts_dec_tv_template, 3798 .count = CAST6_XTS_DEC_TEST_VECTORS 3799 } 3800 } 3801 } 3802 }, { 3803 .alg = "xts(serpent)", 3804 .test = alg_test_skcipher, 3805 .suite = { 3806 .cipher = { 3807 .enc = { 3808 .vecs = serpent_xts_enc_tv_template, 3809 .count = SERPENT_XTS_ENC_TEST_VECTORS 3810 }, 3811 .dec = { 3812 .vecs = serpent_xts_dec_tv_template, 3813 .count = SERPENT_XTS_DEC_TEST_VECTORS 3814 } 3815 } 3816 } 3817 }, { 3818 .alg = "xts(twofish)", 3819 .test = alg_test_skcipher, 3820 .suite = { 3821 .cipher = { 3822 .enc = { 3823 .vecs = tf_xts_enc_tv_template, 3824 .count = TF_XTS_ENC_TEST_VECTORS 3825 }, 3826 .dec = { 3827 .vecs = tf_xts_dec_tv_template, 3828 .count = TF_XTS_DEC_TEST_VECTORS 3829 } 3830 } 3831 } 3832 }, { 3833 .alg = "zlib", 3834 .test = alg_test_pcomp, 3835 .fips_allowed = 1, 3836 .suite = { 3837 .pcomp = { 3838 .comp = { 3839 .vecs = zlib_comp_tv_template, 3840 .count = ZLIB_COMP_TEST_VECTORS 3841 }, 3842 .decomp = { 3843 .vecs = zlib_decomp_tv_template, 3844 .count = ZLIB_DECOMP_TEST_VECTORS 3845 } 3846 } 3847 } 3848 } 3849 }; 3850 3851 static bool alg_test_descs_checked; 3852 3853 static void alg_test_descs_check_order(void) 3854 { 3855 int i; 3856 3857 /* only check once */ 3858 if (alg_test_descs_checked) 3859 return; 3860 3861 alg_test_descs_checked = true; 3862 3863 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { 3864 int diff = strcmp(alg_test_descs[i - 1].alg, 3865 alg_test_descs[i].alg); 3866 3867 if (WARN_ON(diff > 0)) { 3868 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", 3869 alg_test_descs[i - 1].alg, 3870 alg_test_descs[i].alg); 3871 } 3872 3873 if (WARN_ON(diff == 0)) { 3874 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", 3875 alg_test_descs[i].alg); 3876 } 3877 } 3878 } 3879 3880 static int alg_find_test(const char *alg) 3881 { 3882 int start = 0; 3883 int end = ARRAY_SIZE(alg_test_descs); 3884 3885 while (start < end) { 3886 int i = (start + end) / 2; 3887 int diff = strcmp(alg_test_descs[i].alg, alg); 3888 3889 if (diff > 0) { 3890 end = i; 3891 continue; 3892 } 3893 3894 if (diff < 0) { 3895 start = i + 1; 3896 continue; 3897 } 3898 3899 return i; 3900 } 3901 3902 return -1; 3903 } 3904 3905 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 3906 { 3907 int i; 3908 int j; 3909 int rc; 3910 3911 alg_test_descs_check_order(); 3912 3913 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 3914 char nalg[CRYPTO_MAX_ALG_NAME]; 3915 3916 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 3917 sizeof(nalg)) 3918 return -ENAMETOOLONG; 3919 3920 i = alg_find_test(nalg); 3921 if (i < 0) 3922 goto notest; 3923 3924 if (fips_enabled && !alg_test_descs[i].fips_allowed) 3925 goto non_fips_alg; 3926 3927 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 3928 goto test_done; 3929 } 3930 3931 i = alg_find_test(alg); 3932 j = alg_find_test(driver); 3933 if (i < 0 && j < 0) 3934 goto notest; 3935 3936 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 3937 (j >= 0 && !alg_test_descs[j].fips_allowed))) 3938 goto non_fips_alg; 3939 3940 rc = 0; 3941 if (i >= 0) 3942 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 3943 type, mask); 3944 if (j >= 0 && j != i) 3945 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 3946 type, mask); 3947 3948 test_done: 3949 if (fips_enabled && rc) 3950 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 3951 3952 if (fips_enabled && !rc) 3953 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg); 3954 3955 return rc; 3956 3957 notest: 3958 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 3959 return 0; 3960 non_fips_alg: 3961 return -EINVAL; 3962 } 3963 3964 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ 3965 3966 EXPORT_SYMBOL_GPL(alg_test); 3967