1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * Updated RFC4106 AES-GCM testing. 10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) 11 * Adrian Hoban <adrian.hoban@intel.com> 12 * Gabriele Paoloni <gabriele.paoloni@intel.com> 13 * Tadeusz Struk (tadeusz.struk@intel.com) 14 * Copyright (c) 2010, Intel Corporation. 15 * 16 * This program is free software; you can redistribute it and/or modify it 17 * under the terms of the GNU General Public License as published by the Free 18 * Software Foundation; either version 2 of the License, or (at your option) 19 * any later version. 20 * 21 */ 22 23 #include <crypto/aead.h> 24 #include <crypto/hash.h> 25 #include <crypto/skcipher.h> 26 #include <linux/err.h> 27 #include <linux/fips.h> 28 #include <linux/module.h> 29 #include <linux/scatterlist.h> 30 #include <linux/slab.h> 31 #include <linux/string.h> 32 #include <crypto/rng.h> 33 #include <crypto/drbg.h> 34 #include <crypto/akcipher.h> 35 #include <crypto/kpp.h> 36 #include <crypto/acompress.h> 37 38 #include "internal.h" 39 40 static bool notests; 41 module_param(notests, bool, 0644); 42 MODULE_PARM_DESC(notests, "disable crypto self-tests"); 43 44 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 45 46 /* a perfect nop */ 47 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 48 { 49 return 0; 50 } 51 52 #else 53 54 #include "testmgr.h" 55 56 /* 57 * Need slab memory for testing (size in number of pages). 58 */ 59 #define XBUFSIZE 8 60 61 /* 62 * Indexes into the xbuf to simulate cross-page access. 63 */ 64 #define IDX1 32 65 #define IDX2 32400 66 #define IDX3 1511 67 #define IDX4 8193 68 #define IDX5 22222 69 #define IDX6 17101 70 #define IDX7 27333 71 #define IDX8 3000 72 73 /* 74 * Used by test_cipher() 75 */ 76 #define ENCRYPT 1 77 #define DECRYPT 0 78 79 struct aead_test_suite { 80 struct { 81 const struct aead_testvec *vecs; 82 unsigned int count; 83 } enc, dec; 84 }; 85 86 struct cipher_test_suite { 87 const struct cipher_testvec *vecs; 88 unsigned int count; 89 }; 90 91 struct comp_test_suite { 92 struct { 93 const struct comp_testvec *vecs; 94 unsigned int count; 95 } comp, decomp; 96 }; 97 98 struct hash_test_suite { 99 const struct hash_testvec *vecs; 100 unsigned int count; 101 }; 102 103 struct cprng_test_suite { 104 const struct cprng_testvec *vecs; 105 unsigned int count; 106 }; 107 108 struct drbg_test_suite { 109 const struct drbg_testvec *vecs; 110 unsigned int count; 111 }; 112 113 struct akcipher_test_suite { 114 const struct akcipher_testvec *vecs; 115 unsigned int count; 116 }; 117 118 struct kpp_test_suite { 119 const struct kpp_testvec *vecs; 120 unsigned int count; 121 }; 122 123 struct alg_test_desc { 124 const char *alg; 125 int (*test)(const struct alg_test_desc *desc, const char *driver, 126 u32 type, u32 mask); 127 int fips_allowed; /* set if alg is allowed in fips mode */ 128 129 union { 130 struct aead_test_suite aead; 131 struct cipher_test_suite cipher; 132 struct comp_test_suite comp; 133 struct hash_test_suite hash; 134 struct cprng_test_suite cprng; 135 struct drbg_test_suite drbg; 136 struct akcipher_test_suite akcipher; 137 struct kpp_test_suite kpp; 138 } suite; 139 }; 140 141 static const unsigned int IDX[8] = { 142 IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 143 144 static void hexdump(unsigned char *buf, unsigned int len) 145 { 146 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 147 16, 1, 148 buf, len, false); 149 } 150 151 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 152 { 153 int i; 154 155 for (i = 0; i < XBUFSIZE; i++) { 156 buf[i] = (void *)__get_free_page(GFP_KERNEL); 157 if (!buf[i]) 158 goto err_free_buf; 159 } 160 161 return 0; 162 163 err_free_buf: 164 while (i-- > 0) 165 free_page((unsigned long)buf[i]); 166 167 return -ENOMEM; 168 } 169 170 static void testmgr_free_buf(char *buf[XBUFSIZE]) 171 { 172 int i; 173 174 for (i = 0; i < XBUFSIZE; i++) 175 free_page((unsigned long)buf[i]); 176 } 177 178 static int ahash_guard_result(char *result, char c, int size) 179 { 180 int i; 181 182 for (i = 0; i < size; i++) { 183 if (result[i] != c) 184 return -EINVAL; 185 } 186 187 return 0; 188 } 189 190 static int ahash_partial_update(struct ahash_request **preq, 191 struct crypto_ahash *tfm, const struct hash_testvec *template, 192 void *hash_buff, int k, int temp, struct scatterlist *sg, 193 const char *algo, char *result, struct crypto_wait *wait) 194 { 195 char *state; 196 struct ahash_request *req; 197 int statesize, ret = -EINVAL; 198 static const unsigned char guard[] = { 0x00, 0xba, 0xad, 0x00 }; 199 int digestsize = crypto_ahash_digestsize(tfm); 200 201 req = *preq; 202 statesize = crypto_ahash_statesize( 203 crypto_ahash_reqtfm(req)); 204 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL); 205 if (!state) { 206 pr_err("alg: hash: Failed to alloc state for %s\n", algo); 207 goto out_nostate; 208 } 209 memcpy(state + statesize, guard, sizeof(guard)); 210 memset(result, 1, digestsize); 211 ret = crypto_ahash_export(req, state); 212 WARN_ON(memcmp(state + statesize, guard, sizeof(guard))); 213 if (ret) { 214 pr_err("alg: hash: Failed to export() for %s\n", algo); 215 goto out; 216 } 217 ret = ahash_guard_result(result, 1, digestsize); 218 if (ret) { 219 pr_err("alg: hash: Failed, export used req->result for %s\n", 220 algo); 221 goto out; 222 } 223 ahash_request_free(req); 224 req = ahash_request_alloc(tfm, GFP_KERNEL); 225 if (!req) { 226 pr_err("alg: hash: Failed to alloc request for %s\n", algo); 227 goto out_noreq; 228 } 229 ahash_request_set_callback(req, 230 CRYPTO_TFM_REQ_MAY_BACKLOG, 231 crypto_req_done, wait); 232 233 memcpy(hash_buff, template->plaintext + temp, 234 template->tap[k]); 235 sg_init_one(&sg[0], hash_buff, template->tap[k]); 236 ahash_request_set_crypt(req, sg, result, template->tap[k]); 237 ret = crypto_ahash_import(req, state); 238 if (ret) { 239 pr_err("alg: hash: Failed to import() for %s\n", algo); 240 goto out; 241 } 242 ret = ahash_guard_result(result, 1, digestsize); 243 if (ret) { 244 pr_err("alg: hash: Failed, import used req->result for %s\n", 245 algo); 246 goto out; 247 } 248 ret = crypto_wait_req(crypto_ahash_update(req), wait); 249 if (ret) 250 goto out; 251 *preq = req; 252 ret = 0; 253 goto out_noreq; 254 out: 255 ahash_request_free(req); 256 out_noreq: 257 kfree(state); 258 out_nostate: 259 return ret; 260 } 261 262 static int __test_hash(struct crypto_ahash *tfm, 263 const struct hash_testvec *template, unsigned int tcount, 264 bool use_digest, const int align_offset) 265 { 266 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 267 size_t digest_size = crypto_ahash_digestsize(tfm); 268 unsigned int i, j, k, temp; 269 struct scatterlist sg[8]; 270 char *result; 271 char *key; 272 struct ahash_request *req; 273 struct crypto_wait wait; 274 void *hash_buff; 275 char *xbuf[XBUFSIZE]; 276 int ret = -ENOMEM; 277 278 result = kmalloc(digest_size, GFP_KERNEL); 279 if (!result) 280 return ret; 281 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 282 if (!key) 283 goto out_nobuf; 284 if (testmgr_alloc_buf(xbuf)) 285 goto out_nobuf; 286 287 crypto_init_wait(&wait); 288 289 req = ahash_request_alloc(tfm, GFP_KERNEL); 290 if (!req) { 291 printk(KERN_ERR "alg: hash: Failed to allocate request for " 292 "%s\n", algo); 293 goto out_noreq; 294 } 295 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 296 crypto_req_done, &wait); 297 298 j = 0; 299 for (i = 0; i < tcount; i++) { 300 if (template[i].np) 301 continue; 302 303 ret = -EINVAL; 304 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) 305 goto out; 306 307 j++; 308 memset(result, 0, digest_size); 309 310 hash_buff = xbuf[0]; 311 hash_buff += align_offset; 312 313 memcpy(hash_buff, template[i].plaintext, template[i].psize); 314 sg_init_one(&sg[0], hash_buff, template[i].psize); 315 316 if (template[i].ksize) { 317 crypto_ahash_clear_flags(tfm, ~0); 318 if (template[i].ksize > MAX_KEYLEN) { 319 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 320 j, algo, template[i].ksize, MAX_KEYLEN); 321 ret = -EINVAL; 322 goto out; 323 } 324 memcpy(key, template[i].key, template[i].ksize); 325 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 326 if (ret) { 327 printk(KERN_ERR "alg: hash: setkey failed on " 328 "test %d for %s: ret=%d\n", j, algo, 329 -ret); 330 goto out; 331 } 332 } 333 334 ahash_request_set_crypt(req, sg, result, template[i].psize); 335 if (use_digest) { 336 ret = crypto_wait_req(crypto_ahash_digest(req), &wait); 337 if (ret) { 338 pr_err("alg: hash: digest failed on test %d " 339 "for %s: ret=%d\n", j, algo, -ret); 340 goto out; 341 } 342 } else { 343 memset(result, 1, digest_size); 344 ret = crypto_wait_req(crypto_ahash_init(req), &wait); 345 if (ret) { 346 pr_err("alg: hash: init failed on test %d " 347 "for %s: ret=%d\n", j, algo, -ret); 348 goto out; 349 } 350 ret = ahash_guard_result(result, 1, digest_size); 351 if (ret) { 352 pr_err("alg: hash: init failed on test %d " 353 "for %s: used req->result\n", j, algo); 354 goto out; 355 } 356 ret = crypto_wait_req(crypto_ahash_update(req), &wait); 357 if (ret) { 358 pr_err("alg: hash: update failed on test %d " 359 "for %s: ret=%d\n", j, algo, -ret); 360 goto out; 361 } 362 ret = ahash_guard_result(result, 1, digest_size); 363 if (ret) { 364 pr_err("alg: hash: update failed on test %d " 365 "for %s: used req->result\n", j, algo); 366 goto out; 367 } 368 ret = crypto_wait_req(crypto_ahash_final(req), &wait); 369 if (ret) { 370 pr_err("alg: hash: final failed on test %d " 371 "for %s: ret=%d\n", j, algo, -ret); 372 goto out; 373 } 374 } 375 376 if (memcmp(result, template[i].digest, 377 crypto_ahash_digestsize(tfm))) { 378 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 379 j, algo); 380 hexdump(result, crypto_ahash_digestsize(tfm)); 381 ret = -EINVAL; 382 goto out; 383 } 384 } 385 386 j = 0; 387 for (i = 0; i < tcount; i++) { 388 /* alignment tests are only done with continuous buffers */ 389 if (align_offset != 0) 390 break; 391 392 if (!template[i].np) 393 continue; 394 395 j++; 396 memset(result, 0, digest_size); 397 398 temp = 0; 399 sg_init_table(sg, template[i].np); 400 ret = -EINVAL; 401 for (k = 0; k < template[i].np; k++) { 402 if (WARN_ON(offset_in_page(IDX[k]) + 403 template[i].tap[k] > PAGE_SIZE)) 404 goto out; 405 sg_set_buf(&sg[k], 406 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 407 offset_in_page(IDX[k]), 408 template[i].plaintext + temp, 409 template[i].tap[k]), 410 template[i].tap[k]); 411 temp += template[i].tap[k]; 412 } 413 414 if (template[i].ksize) { 415 if (template[i].ksize > MAX_KEYLEN) { 416 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 417 j, algo, template[i].ksize, MAX_KEYLEN); 418 ret = -EINVAL; 419 goto out; 420 } 421 crypto_ahash_clear_flags(tfm, ~0); 422 memcpy(key, template[i].key, template[i].ksize); 423 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 424 425 if (ret) { 426 printk(KERN_ERR "alg: hash: setkey " 427 "failed on chunking test %d " 428 "for %s: ret=%d\n", j, algo, -ret); 429 goto out; 430 } 431 } 432 433 ahash_request_set_crypt(req, sg, result, template[i].psize); 434 ret = crypto_wait_req(crypto_ahash_digest(req), &wait); 435 if (ret) { 436 pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n", 437 j, algo, -ret); 438 goto out; 439 } 440 441 if (memcmp(result, template[i].digest, 442 crypto_ahash_digestsize(tfm))) { 443 printk(KERN_ERR "alg: hash: Chunking test %d " 444 "failed for %s\n", j, algo); 445 hexdump(result, crypto_ahash_digestsize(tfm)); 446 ret = -EINVAL; 447 goto out; 448 } 449 } 450 451 /* partial update exercise */ 452 j = 0; 453 for (i = 0; i < tcount; i++) { 454 /* alignment tests are only done with continuous buffers */ 455 if (align_offset != 0) 456 break; 457 458 if (template[i].np < 2) 459 continue; 460 461 j++; 462 memset(result, 0, digest_size); 463 464 ret = -EINVAL; 465 hash_buff = xbuf[0]; 466 memcpy(hash_buff, template[i].plaintext, 467 template[i].tap[0]); 468 sg_init_one(&sg[0], hash_buff, template[i].tap[0]); 469 470 if (template[i].ksize) { 471 crypto_ahash_clear_flags(tfm, ~0); 472 if (template[i].ksize > MAX_KEYLEN) { 473 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 474 j, algo, template[i].ksize, MAX_KEYLEN); 475 ret = -EINVAL; 476 goto out; 477 } 478 memcpy(key, template[i].key, template[i].ksize); 479 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 480 if (ret) { 481 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n", 482 j, algo, -ret); 483 goto out; 484 } 485 } 486 487 ahash_request_set_crypt(req, sg, result, template[i].tap[0]); 488 ret = crypto_wait_req(crypto_ahash_init(req), &wait); 489 if (ret) { 490 pr_err("alg: hash: init failed on test %d for %s: ret=%d\n", 491 j, algo, -ret); 492 goto out; 493 } 494 ret = crypto_wait_req(crypto_ahash_update(req), &wait); 495 if (ret) { 496 pr_err("alg: hash: update failed on test %d for %s: ret=%d\n", 497 j, algo, -ret); 498 goto out; 499 } 500 501 temp = template[i].tap[0]; 502 for (k = 1; k < template[i].np; k++) { 503 ret = ahash_partial_update(&req, tfm, &template[i], 504 hash_buff, k, temp, &sg[0], algo, result, 505 &wait); 506 if (ret) { 507 pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n", 508 j, algo, -ret); 509 goto out_noreq; 510 } 511 temp += template[i].tap[k]; 512 } 513 ret = crypto_wait_req(crypto_ahash_final(req), &wait); 514 if (ret) { 515 pr_err("alg: hash: final failed on test %d for %s: ret=%d\n", 516 j, algo, -ret); 517 goto out; 518 } 519 if (memcmp(result, template[i].digest, 520 crypto_ahash_digestsize(tfm))) { 521 pr_err("alg: hash: Partial Test %d failed for %s\n", 522 j, algo); 523 hexdump(result, crypto_ahash_digestsize(tfm)); 524 ret = -EINVAL; 525 goto out; 526 } 527 } 528 529 ret = 0; 530 531 out: 532 ahash_request_free(req); 533 out_noreq: 534 testmgr_free_buf(xbuf); 535 out_nobuf: 536 kfree(key); 537 kfree(result); 538 return ret; 539 } 540 541 static int test_hash(struct crypto_ahash *tfm, 542 const struct hash_testvec *template, 543 unsigned int tcount, bool use_digest) 544 { 545 unsigned int alignmask; 546 int ret; 547 548 ret = __test_hash(tfm, template, tcount, use_digest, 0); 549 if (ret) 550 return ret; 551 552 /* test unaligned buffers, check with one byte offset */ 553 ret = __test_hash(tfm, template, tcount, use_digest, 1); 554 if (ret) 555 return ret; 556 557 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 558 if (alignmask) { 559 /* Check if alignment mask for tfm is correctly set. */ 560 ret = __test_hash(tfm, template, tcount, use_digest, 561 alignmask + 1); 562 if (ret) 563 return ret; 564 } 565 566 return 0; 567 } 568 569 static int __test_aead(struct crypto_aead *tfm, int enc, 570 const struct aead_testvec *template, unsigned int tcount, 571 const bool diff_dst, const int align_offset) 572 { 573 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 574 unsigned int i, j, k, n, temp; 575 int ret = -ENOMEM; 576 char *q; 577 char *key; 578 struct aead_request *req; 579 struct scatterlist *sg; 580 struct scatterlist *sgout; 581 const char *e, *d; 582 struct crypto_wait wait; 583 unsigned int authsize, iv_len; 584 void *input; 585 void *output; 586 void *assoc; 587 char *iv; 588 char *xbuf[XBUFSIZE]; 589 char *xoutbuf[XBUFSIZE]; 590 char *axbuf[XBUFSIZE]; 591 592 iv = kzalloc(MAX_IVLEN, GFP_KERNEL); 593 if (!iv) 594 return ret; 595 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 596 if (!key) 597 goto out_noxbuf; 598 if (testmgr_alloc_buf(xbuf)) 599 goto out_noxbuf; 600 if (testmgr_alloc_buf(axbuf)) 601 goto out_noaxbuf; 602 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 603 goto out_nooutbuf; 604 605 /* avoid "the frame size is larger than 1024 bytes" compiler warning */ 606 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL); 607 if (!sg) 608 goto out_nosg; 609 sgout = &sg[16]; 610 611 if (diff_dst) 612 d = "-ddst"; 613 else 614 d = ""; 615 616 if (enc == ENCRYPT) 617 e = "encryption"; 618 else 619 e = "decryption"; 620 621 crypto_init_wait(&wait); 622 623 req = aead_request_alloc(tfm, GFP_KERNEL); 624 if (!req) { 625 pr_err("alg: aead%s: Failed to allocate request for %s\n", 626 d, algo); 627 goto out; 628 } 629 630 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 631 crypto_req_done, &wait); 632 633 iv_len = crypto_aead_ivsize(tfm); 634 635 for (i = 0, j = 0; i < tcount; i++) { 636 if (template[i].np) 637 continue; 638 639 j++; 640 641 /* some templates have no input data but they will 642 * touch input 643 */ 644 input = xbuf[0]; 645 input += align_offset; 646 assoc = axbuf[0]; 647 648 ret = -EINVAL; 649 if (WARN_ON(align_offset + template[i].ilen > 650 PAGE_SIZE || template[i].alen > PAGE_SIZE)) 651 goto out; 652 653 memcpy(input, template[i].input, template[i].ilen); 654 memcpy(assoc, template[i].assoc, template[i].alen); 655 if (template[i].iv) 656 memcpy(iv, template[i].iv, iv_len); 657 else 658 memset(iv, 0, iv_len); 659 660 crypto_aead_clear_flags(tfm, ~0); 661 if (template[i].wk) 662 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 663 664 if (template[i].klen > MAX_KEYLEN) { 665 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 666 d, j, algo, template[i].klen, 667 MAX_KEYLEN); 668 ret = -EINVAL; 669 goto out; 670 } 671 memcpy(key, template[i].key, template[i].klen); 672 673 ret = crypto_aead_setkey(tfm, key, template[i].klen); 674 if (template[i].fail == !ret) { 675 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", 676 d, j, algo, crypto_aead_get_flags(tfm)); 677 goto out; 678 } else if (ret) 679 continue; 680 681 authsize = abs(template[i].rlen - template[i].ilen); 682 ret = crypto_aead_setauthsize(tfm, authsize); 683 if (ret) { 684 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", 685 d, authsize, j, algo); 686 goto out; 687 } 688 689 k = !!template[i].alen; 690 sg_init_table(sg, k + 1); 691 sg_set_buf(&sg[0], assoc, template[i].alen); 692 sg_set_buf(&sg[k], input, 693 template[i].ilen + (enc ? authsize : 0)); 694 output = input; 695 696 if (diff_dst) { 697 sg_init_table(sgout, k + 1); 698 sg_set_buf(&sgout[0], assoc, template[i].alen); 699 700 output = xoutbuf[0]; 701 output += align_offset; 702 sg_set_buf(&sgout[k], output, 703 template[i].rlen + (enc ? 0 : authsize)); 704 } 705 706 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 707 template[i].ilen, iv); 708 709 aead_request_set_ad(req, template[i].alen); 710 711 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req) 712 : crypto_aead_decrypt(req), &wait); 713 714 switch (ret) { 715 case 0: 716 if (template[i].novrfy) { 717 /* verification was supposed to fail */ 718 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", 719 d, e, j, algo); 720 /* so really, we got a bad message */ 721 ret = -EBADMSG; 722 goto out; 723 } 724 break; 725 case -EBADMSG: 726 if (template[i].novrfy) 727 /* verification failure was expected */ 728 continue; 729 /* fall through */ 730 default: 731 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", 732 d, e, j, algo, -ret); 733 goto out; 734 } 735 736 q = output; 737 if (memcmp(q, template[i].result, template[i].rlen)) { 738 pr_err("alg: aead%s: Test %d failed on %s for %s\n", 739 d, j, e, algo); 740 hexdump(q, template[i].rlen); 741 ret = -EINVAL; 742 goto out; 743 } 744 } 745 746 for (i = 0, j = 0; i < tcount; i++) { 747 /* alignment tests are only done with continuous buffers */ 748 if (align_offset != 0) 749 break; 750 751 if (!template[i].np) 752 continue; 753 754 j++; 755 756 if (template[i].iv) 757 memcpy(iv, template[i].iv, iv_len); 758 else 759 memset(iv, 0, MAX_IVLEN); 760 761 crypto_aead_clear_flags(tfm, ~0); 762 if (template[i].wk) 763 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 764 if (template[i].klen > MAX_KEYLEN) { 765 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 766 d, j, algo, template[i].klen, MAX_KEYLEN); 767 ret = -EINVAL; 768 goto out; 769 } 770 memcpy(key, template[i].key, template[i].klen); 771 772 ret = crypto_aead_setkey(tfm, key, template[i].klen); 773 if (template[i].fail == !ret) { 774 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", 775 d, j, algo, crypto_aead_get_flags(tfm)); 776 goto out; 777 } else if (ret) 778 continue; 779 780 authsize = abs(template[i].rlen - template[i].ilen); 781 782 ret = -EINVAL; 783 sg_init_table(sg, template[i].anp + template[i].np); 784 if (diff_dst) 785 sg_init_table(sgout, template[i].anp + template[i].np); 786 787 ret = -EINVAL; 788 for (k = 0, temp = 0; k < template[i].anp; k++) { 789 if (WARN_ON(offset_in_page(IDX[k]) + 790 template[i].atap[k] > PAGE_SIZE)) 791 goto out; 792 sg_set_buf(&sg[k], 793 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 794 offset_in_page(IDX[k]), 795 template[i].assoc + temp, 796 template[i].atap[k]), 797 template[i].atap[k]); 798 if (diff_dst) 799 sg_set_buf(&sgout[k], 800 axbuf[IDX[k] >> PAGE_SHIFT] + 801 offset_in_page(IDX[k]), 802 template[i].atap[k]); 803 temp += template[i].atap[k]; 804 } 805 806 for (k = 0, temp = 0; k < template[i].np; k++) { 807 if (WARN_ON(offset_in_page(IDX[k]) + 808 template[i].tap[k] > PAGE_SIZE)) 809 goto out; 810 811 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 812 memcpy(q, template[i].input + temp, template[i].tap[k]); 813 sg_set_buf(&sg[template[i].anp + k], 814 q, template[i].tap[k]); 815 816 if (diff_dst) { 817 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 818 offset_in_page(IDX[k]); 819 820 memset(q, 0, template[i].tap[k]); 821 822 sg_set_buf(&sgout[template[i].anp + k], 823 q, template[i].tap[k]); 824 } 825 826 n = template[i].tap[k]; 827 if (k == template[i].np - 1 && enc) 828 n += authsize; 829 if (offset_in_page(q) + n < PAGE_SIZE) 830 q[n] = 0; 831 832 temp += template[i].tap[k]; 833 } 834 835 ret = crypto_aead_setauthsize(tfm, authsize); 836 if (ret) { 837 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", 838 d, authsize, j, algo); 839 goto out; 840 } 841 842 if (enc) { 843 if (WARN_ON(sg[template[i].anp + k - 1].offset + 844 sg[template[i].anp + k - 1].length + 845 authsize > PAGE_SIZE)) { 846 ret = -EINVAL; 847 goto out; 848 } 849 850 if (diff_dst) 851 sgout[template[i].anp + k - 1].length += 852 authsize; 853 sg[template[i].anp + k - 1].length += authsize; 854 } 855 856 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 857 template[i].ilen, 858 iv); 859 860 aead_request_set_ad(req, template[i].alen); 861 862 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req) 863 : crypto_aead_decrypt(req), &wait); 864 865 switch (ret) { 866 case 0: 867 if (template[i].novrfy) { 868 /* verification was supposed to fail */ 869 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", 870 d, e, j, algo); 871 /* so really, we got a bad message */ 872 ret = -EBADMSG; 873 goto out; 874 } 875 break; 876 case -EBADMSG: 877 if (template[i].novrfy) 878 /* verification failure was expected */ 879 continue; 880 /* fall through */ 881 default: 882 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", 883 d, e, j, algo, -ret); 884 goto out; 885 } 886 887 ret = -EINVAL; 888 for (k = 0, temp = 0; k < template[i].np; k++) { 889 if (diff_dst) 890 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 891 offset_in_page(IDX[k]); 892 else 893 q = xbuf[IDX[k] >> PAGE_SHIFT] + 894 offset_in_page(IDX[k]); 895 896 n = template[i].tap[k]; 897 if (k == template[i].np - 1) 898 n += enc ? authsize : -authsize; 899 900 if (memcmp(q, template[i].result + temp, n)) { 901 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", 902 d, j, e, k, algo); 903 hexdump(q, n); 904 goto out; 905 } 906 907 q += n; 908 if (k == template[i].np - 1 && !enc) { 909 if (!diff_dst && 910 memcmp(q, template[i].input + 911 temp + n, authsize)) 912 n = authsize; 913 else 914 n = 0; 915 } else { 916 for (n = 0; offset_in_page(q + n) && q[n]; n++) 917 ; 918 } 919 if (n) { 920 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 921 d, j, e, k, algo, n); 922 hexdump(q, n); 923 goto out; 924 } 925 926 temp += template[i].tap[k]; 927 } 928 } 929 930 ret = 0; 931 932 out: 933 aead_request_free(req); 934 kfree(sg); 935 out_nosg: 936 if (diff_dst) 937 testmgr_free_buf(xoutbuf); 938 out_nooutbuf: 939 testmgr_free_buf(axbuf); 940 out_noaxbuf: 941 testmgr_free_buf(xbuf); 942 out_noxbuf: 943 kfree(key); 944 kfree(iv); 945 return ret; 946 } 947 948 static int test_aead(struct crypto_aead *tfm, int enc, 949 const struct aead_testvec *template, unsigned int tcount) 950 { 951 unsigned int alignmask; 952 int ret; 953 954 /* test 'dst == src' case */ 955 ret = __test_aead(tfm, enc, template, tcount, false, 0); 956 if (ret) 957 return ret; 958 959 /* test 'dst != src' case */ 960 ret = __test_aead(tfm, enc, template, tcount, true, 0); 961 if (ret) 962 return ret; 963 964 /* test unaligned buffers, check with one byte offset */ 965 ret = __test_aead(tfm, enc, template, tcount, true, 1); 966 if (ret) 967 return ret; 968 969 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 970 if (alignmask) { 971 /* Check if alignment mask for tfm is correctly set. */ 972 ret = __test_aead(tfm, enc, template, tcount, true, 973 alignmask + 1); 974 if (ret) 975 return ret; 976 } 977 978 return 0; 979 } 980 981 static int test_cipher(struct crypto_cipher *tfm, int enc, 982 const struct cipher_testvec *template, 983 unsigned int tcount) 984 { 985 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 986 unsigned int i, j, k; 987 char *q; 988 const char *e; 989 const char *input, *result; 990 void *data; 991 char *xbuf[XBUFSIZE]; 992 int ret = -ENOMEM; 993 994 if (testmgr_alloc_buf(xbuf)) 995 goto out_nobuf; 996 997 if (enc == ENCRYPT) 998 e = "encryption"; 999 else 1000 e = "decryption"; 1001 1002 j = 0; 1003 for (i = 0; i < tcount; i++) { 1004 if (template[i].np) 1005 continue; 1006 1007 if (fips_enabled && template[i].fips_skip) 1008 continue; 1009 1010 input = enc ? template[i].ptext : template[i].ctext; 1011 result = enc ? template[i].ctext : template[i].ptext; 1012 j++; 1013 1014 ret = -EINVAL; 1015 if (WARN_ON(template[i].len > PAGE_SIZE)) 1016 goto out; 1017 1018 data = xbuf[0]; 1019 memcpy(data, input, template[i].len); 1020 1021 crypto_cipher_clear_flags(tfm, ~0); 1022 if (template[i].wk) 1023 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 1024 1025 ret = crypto_cipher_setkey(tfm, template[i].key, 1026 template[i].klen); 1027 if (template[i].fail == !ret) { 1028 printk(KERN_ERR "alg: cipher: setkey failed " 1029 "on test %d for %s: flags=%x\n", j, 1030 algo, crypto_cipher_get_flags(tfm)); 1031 goto out; 1032 } else if (ret) 1033 continue; 1034 1035 for (k = 0; k < template[i].len; 1036 k += crypto_cipher_blocksize(tfm)) { 1037 if (enc) 1038 crypto_cipher_encrypt_one(tfm, data + k, 1039 data + k); 1040 else 1041 crypto_cipher_decrypt_one(tfm, data + k, 1042 data + k); 1043 } 1044 1045 q = data; 1046 if (memcmp(q, result, template[i].len)) { 1047 printk(KERN_ERR "alg: cipher: Test %d failed " 1048 "on %s for %s\n", j, e, algo); 1049 hexdump(q, template[i].len); 1050 ret = -EINVAL; 1051 goto out; 1052 } 1053 } 1054 1055 ret = 0; 1056 1057 out: 1058 testmgr_free_buf(xbuf); 1059 out_nobuf: 1060 return ret; 1061 } 1062 1063 static int __test_skcipher(struct crypto_skcipher *tfm, int enc, 1064 const struct cipher_testvec *template, 1065 unsigned int tcount, 1066 const bool diff_dst, const int align_offset) 1067 { 1068 const char *algo = 1069 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)); 1070 unsigned int i, j, k, n, temp; 1071 char *q; 1072 struct skcipher_request *req; 1073 struct scatterlist sg[8]; 1074 struct scatterlist sgout[8]; 1075 const char *e, *d; 1076 struct crypto_wait wait; 1077 const char *input, *result; 1078 void *data; 1079 char iv[MAX_IVLEN]; 1080 char *xbuf[XBUFSIZE]; 1081 char *xoutbuf[XBUFSIZE]; 1082 int ret = -ENOMEM; 1083 unsigned int ivsize = crypto_skcipher_ivsize(tfm); 1084 1085 if (testmgr_alloc_buf(xbuf)) 1086 goto out_nobuf; 1087 1088 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 1089 goto out_nooutbuf; 1090 1091 if (diff_dst) 1092 d = "-ddst"; 1093 else 1094 d = ""; 1095 1096 if (enc == ENCRYPT) 1097 e = "encryption"; 1098 else 1099 e = "decryption"; 1100 1101 crypto_init_wait(&wait); 1102 1103 req = skcipher_request_alloc(tfm, GFP_KERNEL); 1104 if (!req) { 1105 pr_err("alg: skcipher%s: Failed to allocate request for %s\n", 1106 d, algo); 1107 goto out; 1108 } 1109 1110 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1111 crypto_req_done, &wait); 1112 1113 j = 0; 1114 for (i = 0; i < tcount; i++) { 1115 if (template[i].np && !template[i].also_non_np) 1116 continue; 1117 1118 if (fips_enabled && template[i].fips_skip) 1119 continue; 1120 1121 if (template[i].iv && !(template[i].generates_iv && enc)) 1122 memcpy(iv, template[i].iv, ivsize); 1123 else 1124 memset(iv, 0, MAX_IVLEN); 1125 1126 input = enc ? template[i].ptext : template[i].ctext; 1127 result = enc ? template[i].ctext : template[i].ptext; 1128 j++; 1129 ret = -EINVAL; 1130 if (WARN_ON(align_offset + template[i].len > PAGE_SIZE)) 1131 goto out; 1132 1133 data = xbuf[0]; 1134 data += align_offset; 1135 memcpy(data, input, template[i].len); 1136 1137 crypto_skcipher_clear_flags(tfm, ~0); 1138 if (template[i].wk) 1139 crypto_skcipher_set_flags(tfm, 1140 CRYPTO_TFM_REQ_WEAK_KEY); 1141 1142 ret = crypto_skcipher_setkey(tfm, template[i].key, 1143 template[i].klen); 1144 if (template[i].fail == !ret) { 1145 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n", 1146 d, j, algo, crypto_skcipher_get_flags(tfm)); 1147 goto out; 1148 } else if (ret) 1149 continue; 1150 1151 sg_init_one(&sg[0], data, template[i].len); 1152 if (diff_dst) { 1153 data = xoutbuf[0]; 1154 data += align_offset; 1155 sg_init_one(&sgout[0], data, template[i].len); 1156 } 1157 1158 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1159 template[i].len, iv); 1160 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) : 1161 crypto_skcipher_decrypt(req), &wait); 1162 1163 if (ret) { 1164 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", 1165 d, e, j, algo, -ret); 1166 goto out; 1167 } 1168 1169 q = data; 1170 if (memcmp(q, result, template[i].len)) { 1171 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n", 1172 d, j, e, algo); 1173 hexdump(q, template[i].len); 1174 ret = -EINVAL; 1175 goto out; 1176 } 1177 1178 if (template[i].generates_iv && enc && 1179 memcmp(iv, template[i].iv, crypto_skcipher_ivsize(tfm))) { 1180 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n", 1181 d, j, e, algo); 1182 hexdump(iv, crypto_skcipher_ivsize(tfm)); 1183 ret = -EINVAL; 1184 goto out; 1185 } 1186 } 1187 1188 j = 0; 1189 for (i = 0; i < tcount; i++) { 1190 /* alignment tests are only done with continuous buffers */ 1191 if (align_offset != 0) 1192 break; 1193 1194 if (!template[i].np) 1195 continue; 1196 1197 if (fips_enabled && template[i].fips_skip) 1198 continue; 1199 1200 if (template[i].iv && !(template[i].generates_iv && enc)) 1201 memcpy(iv, template[i].iv, ivsize); 1202 else 1203 memset(iv, 0, MAX_IVLEN); 1204 1205 input = enc ? template[i].ptext : template[i].ctext; 1206 result = enc ? template[i].ctext : template[i].ptext; 1207 j++; 1208 crypto_skcipher_clear_flags(tfm, ~0); 1209 if (template[i].wk) 1210 crypto_skcipher_set_flags(tfm, 1211 CRYPTO_TFM_REQ_WEAK_KEY); 1212 1213 ret = crypto_skcipher_setkey(tfm, template[i].key, 1214 template[i].klen); 1215 if (template[i].fail == !ret) { 1216 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n", 1217 d, j, algo, crypto_skcipher_get_flags(tfm)); 1218 goto out; 1219 } else if (ret) 1220 continue; 1221 1222 temp = 0; 1223 ret = -EINVAL; 1224 sg_init_table(sg, template[i].np); 1225 if (diff_dst) 1226 sg_init_table(sgout, template[i].np); 1227 for (k = 0; k < template[i].np; k++) { 1228 if (WARN_ON(offset_in_page(IDX[k]) + 1229 template[i].tap[k] > PAGE_SIZE)) 1230 goto out; 1231 1232 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 1233 1234 memcpy(q, input + temp, template[i].tap[k]); 1235 1236 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE) 1237 q[template[i].tap[k]] = 0; 1238 1239 sg_set_buf(&sg[k], q, template[i].tap[k]); 1240 if (diff_dst) { 1241 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1242 offset_in_page(IDX[k]); 1243 1244 sg_set_buf(&sgout[k], q, template[i].tap[k]); 1245 1246 memset(q, 0, template[i].tap[k]); 1247 if (offset_in_page(q) + 1248 template[i].tap[k] < PAGE_SIZE) 1249 q[template[i].tap[k]] = 0; 1250 } 1251 1252 temp += template[i].tap[k]; 1253 } 1254 1255 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1256 template[i].len, iv); 1257 1258 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) : 1259 crypto_skcipher_decrypt(req), &wait); 1260 1261 if (ret) { 1262 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", 1263 d, e, j, algo, -ret); 1264 goto out; 1265 } 1266 1267 temp = 0; 1268 ret = -EINVAL; 1269 for (k = 0; k < template[i].np; k++) { 1270 if (diff_dst) 1271 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1272 offset_in_page(IDX[k]); 1273 else 1274 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1275 offset_in_page(IDX[k]); 1276 1277 if (memcmp(q, result + temp, template[i].tap[k])) { 1278 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", 1279 d, j, e, k, algo); 1280 hexdump(q, template[i].tap[k]); 1281 goto out; 1282 } 1283 1284 q += template[i].tap[k]; 1285 for (n = 0; offset_in_page(q + n) && q[n]; n++) 1286 ; 1287 if (n) { 1288 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 1289 d, j, e, k, algo, n); 1290 hexdump(q, n); 1291 goto out; 1292 } 1293 temp += template[i].tap[k]; 1294 } 1295 } 1296 1297 ret = 0; 1298 1299 out: 1300 skcipher_request_free(req); 1301 if (diff_dst) 1302 testmgr_free_buf(xoutbuf); 1303 out_nooutbuf: 1304 testmgr_free_buf(xbuf); 1305 out_nobuf: 1306 return ret; 1307 } 1308 1309 static int test_skcipher(struct crypto_skcipher *tfm, int enc, 1310 const struct cipher_testvec *template, 1311 unsigned int tcount) 1312 { 1313 unsigned int alignmask; 1314 int ret; 1315 1316 /* test 'dst == src' case */ 1317 ret = __test_skcipher(tfm, enc, template, tcount, false, 0); 1318 if (ret) 1319 return ret; 1320 1321 /* test 'dst != src' case */ 1322 ret = __test_skcipher(tfm, enc, template, tcount, true, 0); 1323 if (ret) 1324 return ret; 1325 1326 /* test unaligned buffers, check with one byte offset */ 1327 ret = __test_skcipher(tfm, enc, template, tcount, true, 1); 1328 if (ret) 1329 return ret; 1330 1331 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1332 if (alignmask) { 1333 /* Check if alignment mask for tfm is correctly set. */ 1334 ret = __test_skcipher(tfm, enc, template, tcount, true, 1335 alignmask + 1); 1336 if (ret) 1337 return ret; 1338 } 1339 1340 return 0; 1341 } 1342 1343 static int test_comp(struct crypto_comp *tfm, 1344 const struct comp_testvec *ctemplate, 1345 const struct comp_testvec *dtemplate, 1346 int ctcount, int dtcount) 1347 { 1348 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 1349 char *output, *decomp_output; 1350 unsigned int i; 1351 int ret; 1352 1353 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 1354 if (!output) 1355 return -ENOMEM; 1356 1357 decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 1358 if (!decomp_output) { 1359 kfree(output); 1360 return -ENOMEM; 1361 } 1362 1363 for (i = 0; i < ctcount; i++) { 1364 int ilen; 1365 unsigned int dlen = COMP_BUF_SIZE; 1366 1367 memset(output, 0, sizeof(COMP_BUF_SIZE)); 1368 memset(decomp_output, 0, sizeof(COMP_BUF_SIZE)); 1369 1370 ilen = ctemplate[i].inlen; 1371 ret = crypto_comp_compress(tfm, ctemplate[i].input, 1372 ilen, output, &dlen); 1373 if (ret) { 1374 printk(KERN_ERR "alg: comp: compression failed " 1375 "on test %d for %s: ret=%d\n", i + 1, algo, 1376 -ret); 1377 goto out; 1378 } 1379 1380 ilen = dlen; 1381 dlen = COMP_BUF_SIZE; 1382 ret = crypto_comp_decompress(tfm, output, 1383 ilen, decomp_output, &dlen); 1384 if (ret) { 1385 pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n", 1386 i + 1, algo, -ret); 1387 goto out; 1388 } 1389 1390 if (dlen != ctemplate[i].inlen) { 1391 printk(KERN_ERR "alg: comp: Compression test %d " 1392 "failed for %s: output len = %d\n", i + 1, algo, 1393 dlen); 1394 ret = -EINVAL; 1395 goto out; 1396 } 1397 1398 if (memcmp(decomp_output, ctemplate[i].input, 1399 ctemplate[i].inlen)) { 1400 pr_err("alg: comp: compression failed: output differs: on test %d for %s\n", 1401 i + 1, algo); 1402 hexdump(decomp_output, dlen); 1403 ret = -EINVAL; 1404 goto out; 1405 } 1406 } 1407 1408 for (i = 0; i < dtcount; i++) { 1409 int ilen; 1410 unsigned int dlen = COMP_BUF_SIZE; 1411 1412 memset(decomp_output, 0, sizeof(COMP_BUF_SIZE)); 1413 1414 ilen = dtemplate[i].inlen; 1415 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 1416 ilen, decomp_output, &dlen); 1417 if (ret) { 1418 printk(KERN_ERR "alg: comp: decompression failed " 1419 "on test %d for %s: ret=%d\n", i + 1, algo, 1420 -ret); 1421 goto out; 1422 } 1423 1424 if (dlen != dtemplate[i].outlen) { 1425 printk(KERN_ERR "alg: comp: Decompression test %d " 1426 "failed for %s: output len = %d\n", i + 1, algo, 1427 dlen); 1428 ret = -EINVAL; 1429 goto out; 1430 } 1431 1432 if (memcmp(decomp_output, dtemplate[i].output, dlen)) { 1433 printk(KERN_ERR "alg: comp: Decompression test %d " 1434 "failed for %s\n", i + 1, algo); 1435 hexdump(decomp_output, dlen); 1436 ret = -EINVAL; 1437 goto out; 1438 } 1439 } 1440 1441 ret = 0; 1442 1443 out: 1444 kfree(decomp_output); 1445 kfree(output); 1446 return ret; 1447 } 1448 1449 static int test_acomp(struct crypto_acomp *tfm, 1450 const struct comp_testvec *ctemplate, 1451 const struct comp_testvec *dtemplate, 1452 int ctcount, int dtcount) 1453 { 1454 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm)); 1455 unsigned int i; 1456 char *output, *decomp_out; 1457 int ret; 1458 struct scatterlist src, dst; 1459 struct acomp_req *req; 1460 struct crypto_wait wait; 1461 1462 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 1463 if (!output) 1464 return -ENOMEM; 1465 1466 decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 1467 if (!decomp_out) { 1468 kfree(output); 1469 return -ENOMEM; 1470 } 1471 1472 for (i = 0; i < ctcount; i++) { 1473 unsigned int dlen = COMP_BUF_SIZE; 1474 int ilen = ctemplate[i].inlen; 1475 void *input_vec; 1476 1477 input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL); 1478 if (!input_vec) { 1479 ret = -ENOMEM; 1480 goto out; 1481 } 1482 1483 memset(output, 0, dlen); 1484 crypto_init_wait(&wait); 1485 sg_init_one(&src, input_vec, ilen); 1486 sg_init_one(&dst, output, dlen); 1487 1488 req = acomp_request_alloc(tfm); 1489 if (!req) { 1490 pr_err("alg: acomp: request alloc failed for %s\n", 1491 algo); 1492 kfree(input_vec); 1493 ret = -ENOMEM; 1494 goto out; 1495 } 1496 1497 acomp_request_set_params(req, &src, &dst, ilen, dlen); 1498 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1499 crypto_req_done, &wait); 1500 1501 ret = crypto_wait_req(crypto_acomp_compress(req), &wait); 1502 if (ret) { 1503 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n", 1504 i + 1, algo, -ret); 1505 kfree(input_vec); 1506 acomp_request_free(req); 1507 goto out; 1508 } 1509 1510 ilen = req->dlen; 1511 dlen = COMP_BUF_SIZE; 1512 sg_init_one(&src, output, ilen); 1513 sg_init_one(&dst, decomp_out, dlen); 1514 crypto_init_wait(&wait); 1515 acomp_request_set_params(req, &src, &dst, ilen, dlen); 1516 1517 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait); 1518 if (ret) { 1519 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n", 1520 i + 1, algo, -ret); 1521 kfree(input_vec); 1522 acomp_request_free(req); 1523 goto out; 1524 } 1525 1526 if (req->dlen != ctemplate[i].inlen) { 1527 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n", 1528 i + 1, algo, req->dlen); 1529 ret = -EINVAL; 1530 kfree(input_vec); 1531 acomp_request_free(req); 1532 goto out; 1533 } 1534 1535 if (memcmp(input_vec, decomp_out, req->dlen)) { 1536 pr_err("alg: acomp: Compression test %d failed for %s\n", 1537 i + 1, algo); 1538 hexdump(output, req->dlen); 1539 ret = -EINVAL; 1540 kfree(input_vec); 1541 acomp_request_free(req); 1542 goto out; 1543 } 1544 1545 kfree(input_vec); 1546 acomp_request_free(req); 1547 } 1548 1549 for (i = 0; i < dtcount; i++) { 1550 unsigned int dlen = COMP_BUF_SIZE; 1551 int ilen = dtemplate[i].inlen; 1552 void *input_vec; 1553 1554 input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL); 1555 if (!input_vec) { 1556 ret = -ENOMEM; 1557 goto out; 1558 } 1559 1560 memset(output, 0, dlen); 1561 crypto_init_wait(&wait); 1562 sg_init_one(&src, input_vec, ilen); 1563 sg_init_one(&dst, output, dlen); 1564 1565 req = acomp_request_alloc(tfm); 1566 if (!req) { 1567 pr_err("alg: acomp: request alloc failed for %s\n", 1568 algo); 1569 kfree(input_vec); 1570 ret = -ENOMEM; 1571 goto out; 1572 } 1573 1574 acomp_request_set_params(req, &src, &dst, ilen, dlen); 1575 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1576 crypto_req_done, &wait); 1577 1578 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait); 1579 if (ret) { 1580 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n", 1581 i + 1, algo, -ret); 1582 kfree(input_vec); 1583 acomp_request_free(req); 1584 goto out; 1585 } 1586 1587 if (req->dlen != dtemplate[i].outlen) { 1588 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n", 1589 i + 1, algo, req->dlen); 1590 ret = -EINVAL; 1591 kfree(input_vec); 1592 acomp_request_free(req); 1593 goto out; 1594 } 1595 1596 if (memcmp(output, dtemplate[i].output, req->dlen)) { 1597 pr_err("alg: acomp: Decompression test %d failed for %s\n", 1598 i + 1, algo); 1599 hexdump(output, req->dlen); 1600 ret = -EINVAL; 1601 kfree(input_vec); 1602 acomp_request_free(req); 1603 goto out; 1604 } 1605 1606 kfree(input_vec); 1607 acomp_request_free(req); 1608 } 1609 1610 ret = 0; 1611 1612 out: 1613 kfree(decomp_out); 1614 kfree(output); 1615 return ret; 1616 } 1617 1618 static int test_cprng(struct crypto_rng *tfm, 1619 const struct cprng_testvec *template, 1620 unsigned int tcount) 1621 { 1622 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 1623 int err = 0, i, j, seedsize; 1624 u8 *seed; 1625 char result[32]; 1626 1627 seedsize = crypto_rng_seedsize(tfm); 1628 1629 seed = kmalloc(seedsize, GFP_KERNEL); 1630 if (!seed) { 1631 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 1632 "for %s\n", algo); 1633 return -ENOMEM; 1634 } 1635 1636 for (i = 0; i < tcount; i++) { 1637 memset(result, 0, 32); 1638 1639 memcpy(seed, template[i].v, template[i].vlen); 1640 memcpy(seed + template[i].vlen, template[i].key, 1641 template[i].klen); 1642 memcpy(seed + template[i].vlen + template[i].klen, 1643 template[i].dt, template[i].dtlen); 1644 1645 err = crypto_rng_reset(tfm, seed, seedsize); 1646 if (err) { 1647 printk(KERN_ERR "alg: cprng: Failed to reset rng " 1648 "for %s\n", algo); 1649 goto out; 1650 } 1651 1652 for (j = 0; j < template[i].loops; j++) { 1653 err = crypto_rng_get_bytes(tfm, result, 1654 template[i].rlen); 1655 if (err < 0) { 1656 printk(KERN_ERR "alg: cprng: Failed to obtain " 1657 "the correct amount of random data for " 1658 "%s (requested %d)\n", algo, 1659 template[i].rlen); 1660 goto out; 1661 } 1662 } 1663 1664 err = memcmp(result, template[i].result, 1665 template[i].rlen); 1666 if (err) { 1667 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 1668 i, algo); 1669 hexdump(result, template[i].rlen); 1670 err = -EINVAL; 1671 goto out; 1672 } 1673 } 1674 1675 out: 1676 kfree(seed); 1677 return err; 1678 } 1679 1680 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1681 u32 type, u32 mask) 1682 { 1683 struct crypto_aead *tfm; 1684 int err = 0; 1685 1686 tfm = crypto_alloc_aead(driver, type, mask); 1687 if (IS_ERR(tfm)) { 1688 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1689 "%ld\n", driver, PTR_ERR(tfm)); 1690 return PTR_ERR(tfm); 1691 } 1692 1693 if (desc->suite.aead.enc.vecs) { 1694 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1695 desc->suite.aead.enc.count); 1696 if (err) 1697 goto out; 1698 } 1699 1700 if (!err && desc->suite.aead.dec.vecs) 1701 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1702 desc->suite.aead.dec.count); 1703 1704 out: 1705 crypto_free_aead(tfm); 1706 return err; 1707 } 1708 1709 static int alg_test_cipher(const struct alg_test_desc *desc, 1710 const char *driver, u32 type, u32 mask) 1711 { 1712 const struct cipher_test_suite *suite = &desc->suite.cipher; 1713 struct crypto_cipher *tfm; 1714 int err; 1715 1716 tfm = crypto_alloc_cipher(driver, type, mask); 1717 if (IS_ERR(tfm)) { 1718 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1719 "%s: %ld\n", driver, PTR_ERR(tfm)); 1720 return PTR_ERR(tfm); 1721 } 1722 1723 err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count); 1724 if (!err) 1725 err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count); 1726 1727 crypto_free_cipher(tfm); 1728 return err; 1729 } 1730 1731 static int alg_test_skcipher(const struct alg_test_desc *desc, 1732 const char *driver, u32 type, u32 mask) 1733 { 1734 const struct cipher_test_suite *suite = &desc->suite.cipher; 1735 struct crypto_skcipher *tfm; 1736 int err; 1737 1738 tfm = crypto_alloc_skcipher(driver, type, mask); 1739 if (IS_ERR(tfm)) { 1740 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1741 "%s: %ld\n", driver, PTR_ERR(tfm)); 1742 return PTR_ERR(tfm); 1743 } 1744 1745 err = test_skcipher(tfm, ENCRYPT, suite->vecs, suite->count); 1746 if (!err) 1747 err = test_skcipher(tfm, DECRYPT, suite->vecs, suite->count); 1748 1749 crypto_free_skcipher(tfm); 1750 return err; 1751 } 1752 1753 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1754 u32 type, u32 mask) 1755 { 1756 struct crypto_comp *comp; 1757 struct crypto_acomp *acomp; 1758 int err; 1759 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK; 1760 1761 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) { 1762 acomp = crypto_alloc_acomp(driver, type, mask); 1763 if (IS_ERR(acomp)) { 1764 pr_err("alg: acomp: Failed to load transform for %s: %ld\n", 1765 driver, PTR_ERR(acomp)); 1766 return PTR_ERR(acomp); 1767 } 1768 err = test_acomp(acomp, desc->suite.comp.comp.vecs, 1769 desc->suite.comp.decomp.vecs, 1770 desc->suite.comp.comp.count, 1771 desc->suite.comp.decomp.count); 1772 crypto_free_acomp(acomp); 1773 } else { 1774 comp = crypto_alloc_comp(driver, type, mask); 1775 if (IS_ERR(comp)) { 1776 pr_err("alg: comp: Failed to load transform for %s: %ld\n", 1777 driver, PTR_ERR(comp)); 1778 return PTR_ERR(comp); 1779 } 1780 1781 err = test_comp(comp, desc->suite.comp.comp.vecs, 1782 desc->suite.comp.decomp.vecs, 1783 desc->suite.comp.comp.count, 1784 desc->suite.comp.decomp.count); 1785 1786 crypto_free_comp(comp); 1787 } 1788 return err; 1789 } 1790 1791 static int __alg_test_hash(const struct hash_testvec *template, 1792 unsigned int tcount, const char *driver, 1793 u32 type, u32 mask) 1794 { 1795 struct crypto_ahash *tfm; 1796 int err; 1797 1798 tfm = crypto_alloc_ahash(driver, type, mask); 1799 if (IS_ERR(tfm)) { 1800 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1801 "%ld\n", driver, PTR_ERR(tfm)); 1802 return PTR_ERR(tfm); 1803 } 1804 1805 err = test_hash(tfm, template, tcount, true); 1806 if (!err) 1807 err = test_hash(tfm, template, tcount, false); 1808 crypto_free_ahash(tfm); 1809 return err; 1810 } 1811 1812 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1813 u32 type, u32 mask) 1814 { 1815 const struct hash_testvec *template = desc->suite.hash.vecs; 1816 unsigned int tcount = desc->suite.hash.count; 1817 unsigned int nr_unkeyed, nr_keyed; 1818 int err; 1819 1820 /* 1821 * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests 1822 * first, before setting a key on the tfm. To make this easier, we 1823 * require that the unkeyed test vectors (if any) are listed first. 1824 */ 1825 1826 for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) { 1827 if (template[nr_unkeyed].ksize) 1828 break; 1829 } 1830 for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) { 1831 if (!template[nr_unkeyed + nr_keyed].ksize) { 1832 pr_err("alg: hash: test vectors for %s out of order, " 1833 "unkeyed ones must come first\n", desc->alg); 1834 return -EINVAL; 1835 } 1836 } 1837 1838 err = 0; 1839 if (nr_unkeyed) { 1840 err = __alg_test_hash(template, nr_unkeyed, driver, type, mask); 1841 template += nr_unkeyed; 1842 } 1843 1844 if (!err && nr_keyed) 1845 err = __alg_test_hash(template, nr_keyed, driver, type, mask); 1846 1847 return err; 1848 } 1849 1850 static int alg_test_crc32c(const struct alg_test_desc *desc, 1851 const char *driver, u32 type, u32 mask) 1852 { 1853 struct crypto_shash *tfm; 1854 u32 val; 1855 int err; 1856 1857 err = alg_test_hash(desc, driver, type, mask); 1858 if (err) 1859 goto out; 1860 1861 tfm = crypto_alloc_shash(driver, type, mask); 1862 if (IS_ERR(tfm)) { 1863 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1864 "%ld\n", driver, PTR_ERR(tfm)); 1865 err = PTR_ERR(tfm); 1866 goto out; 1867 } 1868 1869 do { 1870 SHASH_DESC_ON_STACK(shash, tfm); 1871 u32 *ctx = (u32 *)shash_desc_ctx(shash); 1872 1873 shash->tfm = tfm; 1874 shash->flags = 0; 1875 1876 *ctx = le32_to_cpu(420553207); 1877 err = crypto_shash_final(shash, (u8 *)&val); 1878 if (err) { 1879 printk(KERN_ERR "alg: crc32c: Operation failed for " 1880 "%s: %d\n", driver, err); 1881 break; 1882 } 1883 1884 if (val != ~420553207) { 1885 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1886 "%d\n", driver, val); 1887 err = -EINVAL; 1888 } 1889 } while (0); 1890 1891 crypto_free_shash(tfm); 1892 1893 out: 1894 return err; 1895 } 1896 1897 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 1898 u32 type, u32 mask) 1899 { 1900 struct crypto_rng *rng; 1901 int err; 1902 1903 rng = crypto_alloc_rng(driver, type, mask); 1904 if (IS_ERR(rng)) { 1905 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 1906 "%ld\n", driver, PTR_ERR(rng)); 1907 return PTR_ERR(rng); 1908 } 1909 1910 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 1911 1912 crypto_free_rng(rng); 1913 1914 return err; 1915 } 1916 1917 1918 static int drbg_cavs_test(const struct drbg_testvec *test, int pr, 1919 const char *driver, u32 type, u32 mask) 1920 { 1921 int ret = -EAGAIN; 1922 struct crypto_rng *drng; 1923 struct drbg_test_data test_data; 1924 struct drbg_string addtl, pers, testentropy; 1925 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL); 1926 1927 if (!buf) 1928 return -ENOMEM; 1929 1930 drng = crypto_alloc_rng(driver, type, mask); 1931 if (IS_ERR(drng)) { 1932 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for " 1933 "%s\n", driver); 1934 kzfree(buf); 1935 return -ENOMEM; 1936 } 1937 1938 test_data.testentropy = &testentropy; 1939 drbg_string_fill(&testentropy, test->entropy, test->entropylen); 1940 drbg_string_fill(&pers, test->pers, test->perslen); 1941 ret = crypto_drbg_reset_test(drng, &pers, &test_data); 1942 if (ret) { 1943 printk(KERN_ERR "alg: drbg: Failed to reset rng\n"); 1944 goto outbuf; 1945 } 1946 1947 drbg_string_fill(&addtl, test->addtla, test->addtllen); 1948 if (pr) { 1949 drbg_string_fill(&testentropy, test->entpra, test->entprlen); 1950 ret = crypto_drbg_get_bytes_addtl_test(drng, 1951 buf, test->expectedlen, &addtl, &test_data); 1952 } else { 1953 ret = crypto_drbg_get_bytes_addtl(drng, 1954 buf, test->expectedlen, &addtl); 1955 } 1956 if (ret < 0) { 1957 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1958 "driver %s\n", driver); 1959 goto outbuf; 1960 } 1961 1962 drbg_string_fill(&addtl, test->addtlb, test->addtllen); 1963 if (pr) { 1964 drbg_string_fill(&testentropy, test->entprb, test->entprlen); 1965 ret = crypto_drbg_get_bytes_addtl_test(drng, 1966 buf, test->expectedlen, &addtl, &test_data); 1967 } else { 1968 ret = crypto_drbg_get_bytes_addtl(drng, 1969 buf, test->expectedlen, &addtl); 1970 } 1971 if (ret < 0) { 1972 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1973 "driver %s\n", driver); 1974 goto outbuf; 1975 } 1976 1977 ret = memcmp(test->expected, buf, test->expectedlen); 1978 1979 outbuf: 1980 crypto_free_rng(drng); 1981 kzfree(buf); 1982 return ret; 1983 } 1984 1985 1986 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver, 1987 u32 type, u32 mask) 1988 { 1989 int err = 0; 1990 int pr = 0; 1991 int i = 0; 1992 const struct drbg_testvec *template = desc->suite.drbg.vecs; 1993 unsigned int tcount = desc->suite.drbg.count; 1994 1995 if (0 == memcmp(driver, "drbg_pr_", 8)) 1996 pr = 1; 1997 1998 for (i = 0; i < tcount; i++) { 1999 err = drbg_cavs_test(&template[i], pr, driver, type, mask); 2000 if (err) { 2001 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n", 2002 i, driver); 2003 err = -EINVAL; 2004 break; 2005 } 2006 } 2007 return err; 2008 2009 } 2010 2011 static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec, 2012 const char *alg) 2013 { 2014 struct kpp_request *req; 2015 void *input_buf = NULL; 2016 void *output_buf = NULL; 2017 void *a_public = NULL; 2018 void *a_ss = NULL; 2019 void *shared_secret = NULL; 2020 struct crypto_wait wait; 2021 unsigned int out_len_max; 2022 int err = -ENOMEM; 2023 struct scatterlist src, dst; 2024 2025 req = kpp_request_alloc(tfm, GFP_KERNEL); 2026 if (!req) 2027 return err; 2028 2029 crypto_init_wait(&wait); 2030 2031 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size); 2032 if (err < 0) 2033 goto free_req; 2034 2035 out_len_max = crypto_kpp_maxsize(tfm); 2036 output_buf = kzalloc(out_len_max, GFP_KERNEL); 2037 if (!output_buf) { 2038 err = -ENOMEM; 2039 goto free_req; 2040 } 2041 2042 /* Use appropriate parameter as base */ 2043 kpp_request_set_input(req, NULL, 0); 2044 sg_init_one(&dst, output_buf, out_len_max); 2045 kpp_request_set_output(req, &dst, out_len_max); 2046 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2047 crypto_req_done, &wait); 2048 2049 /* Compute party A's public key */ 2050 err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait); 2051 if (err) { 2052 pr_err("alg: %s: Party A: generate public key test failed. err %d\n", 2053 alg, err); 2054 goto free_output; 2055 } 2056 2057 if (vec->genkey) { 2058 /* Save party A's public key */ 2059 a_public = kzalloc(out_len_max, GFP_KERNEL); 2060 if (!a_public) { 2061 err = -ENOMEM; 2062 goto free_output; 2063 } 2064 memcpy(a_public, sg_virt(req->dst), out_len_max); 2065 } else { 2066 /* Verify calculated public key */ 2067 if (memcmp(vec->expected_a_public, sg_virt(req->dst), 2068 vec->expected_a_public_size)) { 2069 pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n", 2070 alg); 2071 err = -EINVAL; 2072 goto free_output; 2073 } 2074 } 2075 2076 /* Calculate shared secret key by using counter part (b) public key. */ 2077 input_buf = kzalloc(vec->b_public_size, GFP_KERNEL); 2078 if (!input_buf) { 2079 err = -ENOMEM; 2080 goto free_output; 2081 } 2082 2083 memcpy(input_buf, vec->b_public, vec->b_public_size); 2084 sg_init_one(&src, input_buf, vec->b_public_size); 2085 sg_init_one(&dst, output_buf, out_len_max); 2086 kpp_request_set_input(req, &src, vec->b_public_size); 2087 kpp_request_set_output(req, &dst, out_len_max); 2088 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2089 crypto_req_done, &wait); 2090 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait); 2091 if (err) { 2092 pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n", 2093 alg, err); 2094 goto free_all; 2095 } 2096 2097 if (vec->genkey) { 2098 /* Save the shared secret obtained by party A */ 2099 a_ss = kzalloc(vec->expected_ss_size, GFP_KERNEL); 2100 if (!a_ss) { 2101 err = -ENOMEM; 2102 goto free_all; 2103 } 2104 memcpy(a_ss, sg_virt(req->dst), vec->expected_ss_size); 2105 2106 /* 2107 * Calculate party B's shared secret by using party A's 2108 * public key. 2109 */ 2110 err = crypto_kpp_set_secret(tfm, vec->b_secret, 2111 vec->b_secret_size); 2112 if (err < 0) 2113 goto free_all; 2114 2115 sg_init_one(&src, a_public, vec->expected_a_public_size); 2116 sg_init_one(&dst, output_buf, out_len_max); 2117 kpp_request_set_input(req, &src, vec->expected_a_public_size); 2118 kpp_request_set_output(req, &dst, out_len_max); 2119 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2120 crypto_req_done, &wait); 2121 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), 2122 &wait); 2123 if (err) { 2124 pr_err("alg: %s: Party B: compute shared secret failed. err %d\n", 2125 alg, err); 2126 goto free_all; 2127 } 2128 2129 shared_secret = a_ss; 2130 } else { 2131 shared_secret = (void *)vec->expected_ss; 2132 } 2133 2134 /* 2135 * verify shared secret from which the user will derive 2136 * secret key by executing whatever hash it has chosen 2137 */ 2138 if (memcmp(shared_secret, sg_virt(req->dst), 2139 vec->expected_ss_size)) { 2140 pr_err("alg: %s: compute shared secret test failed. Invalid output\n", 2141 alg); 2142 err = -EINVAL; 2143 } 2144 2145 free_all: 2146 kfree(a_ss); 2147 kfree(input_buf); 2148 free_output: 2149 kfree(a_public); 2150 kfree(output_buf); 2151 free_req: 2152 kpp_request_free(req); 2153 return err; 2154 } 2155 2156 static int test_kpp(struct crypto_kpp *tfm, const char *alg, 2157 const struct kpp_testvec *vecs, unsigned int tcount) 2158 { 2159 int ret, i; 2160 2161 for (i = 0; i < tcount; i++) { 2162 ret = do_test_kpp(tfm, vecs++, alg); 2163 if (ret) { 2164 pr_err("alg: %s: test failed on vector %d, err=%d\n", 2165 alg, i + 1, ret); 2166 return ret; 2167 } 2168 } 2169 return 0; 2170 } 2171 2172 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver, 2173 u32 type, u32 mask) 2174 { 2175 struct crypto_kpp *tfm; 2176 int err = 0; 2177 2178 tfm = crypto_alloc_kpp(driver, type, mask); 2179 if (IS_ERR(tfm)) { 2180 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n", 2181 driver, PTR_ERR(tfm)); 2182 return PTR_ERR(tfm); 2183 } 2184 if (desc->suite.kpp.vecs) 2185 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs, 2186 desc->suite.kpp.count); 2187 2188 crypto_free_kpp(tfm); 2189 return err; 2190 } 2191 2192 static int test_akcipher_one(struct crypto_akcipher *tfm, 2193 const struct akcipher_testvec *vecs) 2194 { 2195 char *xbuf[XBUFSIZE]; 2196 struct akcipher_request *req; 2197 void *outbuf_enc = NULL; 2198 void *outbuf_dec = NULL; 2199 struct crypto_wait wait; 2200 unsigned int out_len_max, out_len = 0; 2201 int err = -ENOMEM; 2202 struct scatterlist src, dst, src_tab[2]; 2203 2204 if (testmgr_alloc_buf(xbuf)) 2205 return err; 2206 2207 req = akcipher_request_alloc(tfm, GFP_KERNEL); 2208 if (!req) 2209 goto free_xbuf; 2210 2211 crypto_init_wait(&wait); 2212 2213 if (vecs->public_key_vec) 2214 err = crypto_akcipher_set_pub_key(tfm, vecs->key, 2215 vecs->key_len); 2216 else 2217 err = crypto_akcipher_set_priv_key(tfm, vecs->key, 2218 vecs->key_len); 2219 if (err) 2220 goto free_req; 2221 2222 err = -ENOMEM; 2223 out_len_max = crypto_akcipher_maxsize(tfm); 2224 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL); 2225 if (!outbuf_enc) 2226 goto free_req; 2227 2228 if (WARN_ON(vecs->m_size > PAGE_SIZE)) 2229 goto free_all; 2230 2231 memcpy(xbuf[0], vecs->m, vecs->m_size); 2232 2233 sg_init_table(src_tab, 2); 2234 sg_set_buf(&src_tab[0], xbuf[0], 8); 2235 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8); 2236 sg_init_one(&dst, outbuf_enc, out_len_max); 2237 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size, 2238 out_len_max); 2239 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2240 crypto_req_done, &wait); 2241 2242 err = crypto_wait_req(vecs->siggen_sigver_test ? 2243 /* Run asymmetric signature generation */ 2244 crypto_akcipher_sign(req) : 2245 /* Run asymmetric encrypt */ 2246 crypto_akcipher_encrypt(req), &wait); 2247 if (err) { 2248 pr_err("alg: akcipher: encrypt test failed. err %d\n", err); 2249 goto free_all; 2250 } 2251 if (req->dst_len != vecs->c_size) { 2252 pr_err("alg: akcipher: encrypt test failed. Invalid output len\n"); 2253 err = -EINVAL; 2254 goto free_all; 2255 } 2256 /* verify that encrypted message is equal to expected */ 2257 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) { 2258 pr_err("alg: akcipher: encrypt test failed. Invalid output\n"); 2259 hexdump(outbuf_enc, vecs->c_size); 2260 err = -EINVAL; 2261 goto free_all; 2262 } 2263 /* Don't invoke decrypt for vectors with public key */ 2264 if (vecs->public_key_vec) { 2265 err = 0; 2266 goto free_all; 2267 } 2268 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL); 2269 if (!outbuf_dec) { 2270 err = -ENOMEM; 2271 goto free_all; 2272 } 2273 2274 if (WARN_ON(vecs->c_size > PAGE_SIZE)) 2275 goto free_all; 2276 2277 memcpy(xbuf[0], vecs->c, vecs->c_size); 2278 2279 sg_init_one(&src, xbuf[0], vecs->c_size); 2280 sg_init_one(&dst, outbuf_dec, out_len_max); 2281 crypto_init_wait(&wait); 2282 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max); 2283 2284 err = crypto_wait_req(vecs->siggen_sigver_test ? 2285 /* Run asymmetric signature verification */ 2286 crypto_akcipher_verify(req) : 2287 /* Run asymmetric decrypt */ 2288 crypto_akcipher_decrypt(req), &wait); 2289 if (err) { 2290 pr_err("alg: akcipher: decrypt test failed. err %d\n", err); 2291 goto free_all; 2292 } 2293 out_len = req->dst_len; 2294 if (out_len < vecs->m_size) { 2295 pr_err("alg: akcipher: decrypt test failed. " 2296 "Invalid output len %u\n", out_len); 2297 err = -EINVAL; 2298 goto free_all; 2299 } 2300 /* verify that decrypted message is equal to the original msg */ 2301 if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) || 2302 memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size, 2303 vecs->m_size)) { 2304 pr_err("alg: akcipher: decrypt test failed. Invalid output\n"); 2305 hexdump(outbuf_dec, out_len); 2306 err = -EINVAL; 2307 } 2308 free_all: 2309 kfree(outbuf_dec); 2310 kfree(outbuf_enc); 2311 free_req: 2312 akcipher_request_free(req); 2313 free_xbuf: 2314 testmgr_free_buf(xbuf); 2315 return err; 2316 } 2317 2318 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg, 2319 const struct akcipher_testvec *vecs, 2320 unsigned int tcount) 2321 { 2322 const char *algo = 2323 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm)); 2324 int ret, i; 2325 2326 for (i = 0; i < tcount; i++) { 2327 ret = test_akcipher_one(tfm, vecs++); 2328 if (!ret) 2329 continue; 2330 2331 pr_err("alg: akcipher: test %d failed for %s, err=%d\n", 2332 i + 1, algo, ret); 2333 return ret; 2334 } 2335 return 0; 2336 } 2337 2338 static int alg_test_akcipher(const struct alg_test_desc *desc, 2339 const char *driver, u32 type, u32 mask) 2340 { 2341 struct crypto_akcipher *tfm; 2342 int err = 0; 2343 2344 tfm = crypto_alloc_akcipher(driver, type, mask); 2345 if (IS_ERR(tfm)) { 2346 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n", 2347 driver, PTR_ERR(tfm)); 2348 return PTR_ERR(tfm); 2349 } 2350 if (desc->suite.akcipher.vecs) 2351 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs, 2352 desc->suite.akcipher.count); 2353 2354 crypto_free_akcipher(tfm); 2355 return err; 2356 } 2357 2358 static int alg_test_null(const struct alg_test_desc *desc, 2359 const char *driver, u32 type, u32 mask) 2360 { 2361 return 0; 2362 } 2363 2364 #define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) } 2365 2366 /* Please keep this list sorted by algorithm name. */ 2367 static const struct alg_test_desc alg_test_descs[] = { 2368 { 2369 .alg = "aegis128", 2370 .test = alg_test_aead, 2371 .suite = { 2372 .aead = { 2373 .enc = __VECS(aegis128_enc_tv_template), 2374 .dec = __VECS(aegis128_dec_tv_template), 2375 } 2376 } 2377 }, { 2378 .alg = "aegis128l", 2379 .test = alg_test_aead, 2380 .suite = { 2381 .aead = { 2382 .enc = __VECS(aegis128l_enc_tv_template), 2383 .dec = __VECS(aegis128l_dec_tv_template), 2384 } 2385 } 2386 }, { 2387 .alg = "aegis256", 2388 .test = alg_test_aead, 2389 .suite = { 2390 .aead = { 2391 .enc = __VECS(aegis256_enc_tv_template), 2392 .dec = __VECS(aegis256_dec_tv_template), 2393 } 2394 } 2395 }, { 2396 .alg = "ansi_cprng", 2397 .test = alg_test_cprng, 2398 .suite = { 2399 .cprng = __VECS(ansi_cprng_aes_tv_template) 2400 } 2401 }, { 2402 .alg = "authenc(hmac(md5),ecb(cipher_null))", 2403 .test = alg_test_aead, 2404 .suite = { 2405 .aead = { 2406 .enc = __VECS(hmac_md5_ecb_cipher_null_enc_tv_template), 2407 .dec = __VECS(hmac_md5_ecb_cipher_null_dec_tv_template) 2408 } 2409 } 2410 }, { 2411 .alg = "authenc(hmac(sha1),cbc(aes))", 2412 .test = alg_test_aead, 2413 .fips_allowed = 1, 2414 .suite = { 2415 .aead = { 2416 .enc = __VECS(hmac_sha1_aes_cbc_enc_tv_temp) 2417 } 2418 } 2419 }, { 2420 .alg = "authenc(hmac(sha1),cbc(des))", 2421 .test = alg_test_aead, 2422 .suite = { 2423 .aead = { 2424 .enc = __VECS(hmac_sha1_des_cbc_enc_tv_temp) 2425 } 2426 } 2427 }, { 2428 .alg = "authenc(hmac(sha1),cbc(des3_ede))", 2429 .test = alg_test_aead, 2430 .fips_allowed = 1, 2431 .suite = { 2432 .aead = { 2433 .enc = __VECS(hmac_sha1_des3_ede_cbc_enc_tv_temp) 2434 } 2435 } 2436 }, { 2437 .alg = "authenc(hmac(sha1),ctr(aes))", 2438 .test = alg_test_null, 2439 .fips_allowed = 1, 2440 }, { 2441 .alg = "authenc(hmac(sha1),ecb(cipher_null))", 2442 .test = alg_test_aead, 2443 .suite = { 2444 .aead = { 2445 .enc = __VECS(hmac_sha1_ecb_cipher_null_enc_tv_temp), 2446 .dec = __VECS(hmac_sha1_ecb_cipher_null_dec_tv_temp) 2447 } 2448 } 2449 }, { 2450 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))", 2451 .test = alg_test_null, 2452 .fips_allowed = 1, 2453 }, { 2454 .alg = "authenc(hmac(sha224),cbc(des))", 2455 .test = alg_test_aead, 2456 .suite = { 2457 .aead = { 2458 .enc = __VECS(hmac_sha224_des_cbc_enc_tv_temp) 2459 } 2460 } 2461 }, { 2462 .alg = "authenc(hmac(sha224),cbc(des3_ede))", 2463 .test = alg_test_aead, 2464 .fips_allowed = 1, 2465 .suite = { 2466 .aead = { 2467 .enc = __VECS(hmac_sha224_des3_ede_cbc_enc_tv_temp) 2468 } 2469 } 2470 }, { 2471 .alg = "authenc(hmac(sha256),cbc(aes))", 2472 .test = alg_test_aead, 2473 .fips_allowed = 1, 2474 .suite = { 2475 .aead = { 2476 .enc = __VECS(hmac_sha256_aes_cbc_enc_tv_temp) 2477 } 2478 } 2479 }, { 2480 .alg = "authenc(hmac(sha256),cbc(des))", 2481 .test = alg_test_aead, 2482 .suite = { 2483 .aead = { 2484 .enc = __VECS(hmac_sha256_des_cbc_enc_tv_temp) 2485 } 2486 } 2487 }, { 2488 .alg = "authenc(hmac(sha256),cbc(des3_ede))", 2489 .test = alg_test_aead, 2490 .fips_allowed = 1, 2491 .suite = { 2492 .aead = { 2493 .enc = __VECS(hmac_sha256_des3_ede_cbc_enc_tv_temp) 2494 } 2495 } 2496 }, { 2497 .alg = "authenc(hmac(sha256),ctr(aes))", 2498 .test = alg_test_null, 2499 .fips_allowed = 1, 2500 }, { 2501 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))", 2502 .test = alg_test_null, 2503 .fips_allowed = 1, 2504 }, { 2505 .alg = "authenc(hmac(sha384),cbc(des))", 2506 .test = alg_test_aead, 2507 .suite = { 2508 .aead = { 2509 .enc = __VECS(hmac_sha384_des_cbc_enc_tv_temp) 2510 } 2511 } 2512 }, { 2513 .alg = "authenc(hmac(sha384),cbc(des3_ede))", 2514 .test = alg_test_aead, 2515 .fips_allowed = 1, 2516 .suite = { 2517 .aead = { 2518 .enc = __VECS(hmac_sha384_des3_ede_cbc_enc_tv_temp) 2519 } 2520 } 2521 }, { 2522 .alg = "authenc(hmac(sha384),ctr(aes))", 2523 .test = alg_test_null, 2524 .fips_allowed = 1, 2525 }, { 2526 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))", 2527 .test = alg_test_null, 2528 .fips_allowed = 1, 2529 }, { 2530 .alg = "authenc(hmac(sha512),cbc(aes))", 2531 .fips_allowed = 1, 2532 .test = alg_test_aead, 2533 .suite = { 2534 .aead = { 2535 .enc = __VECS(hmac_sha512_aes_cbc_enc_tv_temp) 2536 } 2537 } 2538 }, { 2539 .alg = "authenc(hmac(sha512),cbc(des))", 2540 .test = alg_test_aead, 2541 .suite = { 2542 .aead = { 2543 .enc = __VECS(hmac_sha512_des_cbc_enc_tv_temp) 2544 } 2545 } 2546 }, { 2547 .alg = "authenc(hmac(sha512),cbc(des3_ede))", 2548 .test = alg_test_aead, 2549 .fips_allowed = 1, 2550 .suite = { 2551 .aead = { 2552 .enc = __VECS(hmac_sha512_des3_ede_cbc_enc_tv_temp) 2553 } 2554 } 2555 }, { 2556 .alg = "authenc(hmac(sha512),ctr(aes))", 2557 .test = alg_test_null, 2558 .fips_allowed = 1, 2559 }, { 2560 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))", 2561 .test = alg_test_null, 2562 .fips_allowed = 1, 2563 }, { 2564 .alg = "cbc(aes)", 2565 .test = alg_test_skcipher, 2566 .fips_allowed = 1, 2567 .suite = { 2568 .cipher = __VECS(aes_cbc_tv_template) 2569 }, 2570 }, { 2571 .alg = "cbc(anubis)", 2572 .test = alg_test_skcipher, 2573 .suite = { 2574 .cipher = __VECS(anubis_cbc_tv_template) 2575 }, 2576 }, { 2577 .alg = "cbc(blowfish)", 2578 .test = alg_test_skcipher, 2579 .suite = { 2580 .cipher = __VECS(bf_cbc_tv_template) 2581 }, 2582 }, { 2583 .alg = "cbc(camellia)", 2584 .test = alg_test_skcipher, 2585 .suite = { 2586 .cipher = __VECS(camellia_cbc_tv_template) 2587 }, 2588 }, { 2589 .alg = "cbc(cast5)", 2590 .test = alg_test_skcipher, 2591 .suite = { 2592 .cipher = __VECS(cast5_cbc_tv_template) 2593 }, 2594 }, { 2595 .alg = "cbc(cast6)", 2596 .test = alg_test_skcipher, 2597 .suite = { 2598 .cipher = __VECS(cast6_cbc_tv_template) 2599 }, 2600 }, { 2601 .alg = "cbc(des)", 2602 .test = alg_test_skcipher, 2603 .suite = { 2604 .cipher = __VECS(des_cbc_tv_template) 2605 }, 2606 }, { 2607 .alg = "cbc(des3_ede)", 2608 .test = alg_test_skcipher, 2609 .fips_allowed = 1, 2610 .suite = { 2611 .cipher = __VECS(des3_ede_cbc_tv_template) 2612 }, 2613 }, { 2614 /* Same as cbc(aes) except the key is stored in 2615 * hardware secure memory which we reference by index 2616 */ 2617 .alg = "cbc(paes)", 2618 .test = alg_test_null, 2619 .fips_allowed = 1, 2620 }, { 2621 .alg = "cbc(serpent)", 2622 .test = alg_test_skcipher, 2623 .suite = { 2624 .cipher = __VECS(serpent_cbc_tv_template) 2625 }, 2626 }, { 2627 .alg = "cbc(twofish)", 2628 .test = alg_test_skcipher, 2629 .suite = { 2630 .cipher = __VECS(tf_cbc_tv_template) 2631 }, 2632 }, { 2633 .alg = "cbcmac(aes)", 2634 .fips_allowed = 1, 2635 .test = alg_test_hash, 2636 .suite = { 2637 .hash = __VECS(aes_cbcmac_tv_template) 2638 } 2639 }, { 2640 .alg = "ccm(aes)", 2641 .test = alg_test_aead, 2642 .fips_allowed = 1, 2643 .suite = { 2644 .aead = { 2645 .enc = __VECS(aes_ccm_enc_tv_template), 2646 .dec = __VECS(aes_ccm_dec_tv_template) 2647 } 2648 } 2649 }, { 2650 .alg = "chacha20", 2651 .test = alg_test_skcipher, 2652 .suite = { 2653 .cipher = __VECS(chacha20_tv_template) 2654 }, 2655 }, { 2656 .alg = "cmac(aes)", 2657 .fips_allowed = 1, 2658 .test = alg_test_hash, 2659 .suite = { 2660 .hash = __VECS(aes_cmac128_tv_template) 2661 } 2662 }, { 2663 .alg = "cmac(des3_ede)", 2664 .fips_allowed = 1, 2665 .test = alg_test_hash, 2666 .suite = { 2667 .hash = __VECS(des3_ede_cmac64_tv_template) 2668 } 2669 }, { 2670 .alg = "compress_null", 2671 .test = alg_test_null, 2672 }, { 2673 .alg = "crc32", 2674 .test = alg_test_hash, 2675 .suite = { 2676 .hash = __VECS(crc32_tv_template) 2677 } 2678 }, { 2679 .alg = "crc32c", 2680 .test = alg_test_crc32c, 2681 .fips_allowed = 1, 2682 .suite = { 2683 .hash = __VECS(crc32c_tv_template) 2684 } 2685 }, { 2686 .alg = "crct10dif", 2687 .test = alg_test_hash, 2688 .fips_allowed = 1, 2689 .suite = { 2690 .hash = __VECS(crct10dif_tv_template) 2691 } 2692 }, { 2693 .alg = "ctr(aes)", 2694 .test = alg_test_skcipher, 2695 .fips_allowed = 1, 2696 .suite = { 2697 .cipher = __VECS(aes_ctr_tv_template) 2698 } 2699 }, { 2700 .alg = "ctr(blowfish)", 2701 .test = alg_test_skcipher, 2702 .suite = { 2703 .cipher = __VECS(bf_ctr_tv_template) 2704 } 2705 }, { 2706 .alg = "ctr(camellia)", 2707 .test = alg_test_skcipher, 2708 .suite = { 2709 .cipher = __VECS(camellia_ctr_tv_template) 2710 } 2711 }, { 2712 .alg = "ctr(cast5)", 2713 .test = alg_test_skcipher, 2714 .suite = { 2715 .cipher = __VECS(cast5_ctr_tv_template) 2716 } 2717 }, { 2718 .alg = "ctr(cast6)", 2719 .test = alg_test_skcipher, 2720 .suite = { 2721 .cipher = __VECS(cast6_ctr_tv_template) 2722 } 2723 }, { 2724 .alg = "ctr(des)", 2725 .test = alg_test_skcipher, 2726 .suite = { 2727 .cipher = __VECS(des_ctr_tv_template) 2728 } 2729 }, { 2730 .alg = "ctr(des3_ede)", 2731 .test = alg_test_skcipher, 2732 .fips_allowed = 1, 2733 .suite = { 2734 .cipher = __VECS(des3_ede_ctr_tv_template) 2735 } 2736 }, { 2737 /* Same as ctr(aes) except the key is stored in 2738 * hardware secure memory which we reference by index 2739 */ 2740 .alg = "ctr(paes)", 2741 .test = alg_test_null, 2742 .fips_allowed = 1, 2743 }, { 2744 .alg = "ctr(serpent)", 2745 .test = alg_test_skcipher, 2746 .suite = { 2747 .cipher = __VECS(serpent_ctr_tv_template) 2748 } 2749 }, { 2750 .alg = "ctr(twofish)", 2751 .test = alg_test_skcipher, 2752 .suite = { 2753 .cipher = __VECS(tf_ctr_tv_template) 2754 } 2755 }, { 2756 .alg = "cts(cbc(aes))", 2757 .test = alg_test_skcipher, 2758 .suite = { 2759 .cipher = __VECS(cts_mode_tv_template) 2760 } 2761 }, { 2762 .alg = "deflate", 2763 .test = alg_test_comp, 2764 .fips_allowed = 1, 2765 .suite = { 2766 .comp = { 2767 .comp = __VECS(deflate_comp_tv_template), 2768 .decomp = __VECS(deflate_decomp_tv_template) 2769 } 2770 } 2771 }, { 2772 .alg = "dh", 2773 .test = alg_test_kpp, 2774 .fips_allowed = 1, 2775 .suite = { 2776 .kpp = __VECS(dh_tv_template) 2777 } 2778 }, { 2779 .alg = "digest_null", 2780 .test = alg_test_null, 2781 }, { 2782 .alg = "drbg_nopr_ctr_aes128", 2783 .test = alg_test_drbg, 2784 .fips_allowed = 1, 2785 .suite = { 2786 .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template) 2787 } 2788 }, { 2789 .alg = "drbg_nopr_ctr_aes192", 2790 .test = alg_test_drbg, 2791 .fips_allowed = 1, 2792 .suite = { 2793 .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template) 2794 } 2795 }, { 2796 .alg = "drbg_nopr_ctr_aes256", 2797 .test = alg_test_drbg, 2798 .fips_allowed = 1, 2799 .suite = { 2800 .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template) 2801 } 2802 }, { 2803 /* 2804 * There is no need to specifically test the DRBG with every 2805 * backend cipher -- covered by drbg_nopr_hmac_sha256 test 2806 */ 2807 .alg = "drbg_nopr_hmac_sha1", 2808 .fips_allowed = 1, 2809 .test = alg_test_null, 2810 }, { 2811 .alg = "drbg_nopr_hmac_sha256", 2812 .test = alg_test_drbg, 2813 .fips_allowed = 1, 2814 .suite = { 2815 .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template) 2816 } 2817 }, { 2818 /* covered by drbg_nopr_hmac_sha256 test */ 2819 .alg = "drbg_nopr_hmac_sha384", 2820 .fips_allowed = 1, 2821 .test = alg_test_null, 2822 }, { 2823 .alg = "drbg_nopr_hmac_sha512", 2824 .test = alg_test_null, 2825 .fips_allowed = 1, 2826 }, { 2827 .alg = "drbg_nopr_sha1", 2828 .fips_allowed = 1, 2829 .test = alg_test_null, 2830 }, { 2831 .alg = "drbg_nopr_sha256", 2832 .test = alg_test_drbg, 2833 .fips_allowed = 1, 2834 .suite = { 2835 .drbg = __VECS(drbg_nopr_sha256_tv_template) 2836 } 2837 }, { 2838 /* covered by drbg_nopr_sha256 test */ 2839 .alg = "drbg_nopr_sha384", 2840 .fips_allowed = 1, 2841 .test = alg_test_null, 2842 }, { 2843 .alg = "drbg_nopr_sha512", 2844 .fips_allowed = 1, 2845 .test = alg_test_null, 2846 }, { 2847 .alg = "drbg_pr_ctr_aes128", 2848 .test = alg_test_drbg, 2849 .fips_allowed = 1, 2850 .suite = { 2851 .drbg = __VECS(drbg_pr_ctr_aes128_tv_template) 2852 } 2853 }, { 2854 /* covered by drbg_pr_ctr_aes128 test */ 2855 .alg = "drbg_pr_ctr_aes192", 2856 .fips_allowed = 1, 2857 .test = alg_test_null, 2858 }, { 2859 .alg = "drbg_pr_ctr_aes256", 2860 .fips_allowed = 1, 2861 .test = alg_test_null, 2862 }, { 2863 .alg = "drbg_pr_hmac_sha1", 2864 .fips_allowed = 1, 2865 .test = alg_test_null, 2866 }, { 2867 .alg = "drbg_pr_hmac_sha256", 2868 .test = alg_test_drbg, 2869 .fips_allowed = 1, 2870 .suite = { 2871 .drbg = __VECS(drbg_pr_hmac_sha256_tv_template) 2872 } 2873 }, { 2874 /* covered by drbg_pr_hmac_sha256 test */ 2875 .alg = "drbg_pr_hmac_sha384", 2876 .fips_allowed = 1, 2877 .test = alg_test_null, 2878 }, { 2879 .alg = "drbg_pr_hmac_sha512", 2880 .test = alg_test_null, 2881 .fips_allowed = 1, 2882 }, { 2883 .alg = "drbg_pr_sha1", 2884 .fips_allowed = 1, 2885 .test = alg_test_null, 2886 }, { 2887 .alg = "drbg_pr_sha256", 2888 .test = alg_test_drbg, 2889 .fips_allowed = 1, 2890 .suite = { 2891 .drbg = __VECS(drbg_pr_sha256_tv_template) 2892 } 2893 }, { 2894 /* covered by drbg_pr_sha256 test */ 2895 .alg = "drbg_pr_sha384", 2896 .fips_allowed = 1, 2897 .test = alg_test_null, 2898 }, { 2899 .alg = "drbg_pr_sha512", 2900 .fips_allowed = 1, 2901 .test = alg_test_null, 2902 }, { 2903 .alg = "ecb(aes)", 2904 .test = alg_test_skcipher, 2905 .fips_allowed = 1, 2906 .suite = { 2907 .cipher = __VECS(aes_tv_template) 2908 } 2909 }, { 2910 .alg = "ecb(anubis)", 2911 .test = alg_test_skcipher, 2912 .suite = { 2913 .cipher = __VECS(anubis_tv_template) 2914 } 2915 }, { 2916 .alg = "ecb(arc4)", 2917 .test = alg_test_skcipher, 2918 .suite = { 2919 .cipher = __VECS(arc4_tv_template) 2920 } 2921 }, { 2922 .alg = "ecb(blowfish)", 2923 .test = alg_test_skcipher, 2924 .suite = { 2925 .cipher = __VECS(bf_tv_template) 2926 } 2927 }, { 2928 .alg = "ecb(camellia)", 2929 .test = alg_test_skcipher, 2930 .suite = { 2931 .cipher = __VECS(camellia_tv_template) 2932 } 2933 }, { 2934 .alg = "ecb(cast5)", 2935 .test = alg_test_skcipher, 2936 .suite = { 2937 .cipher = __VECS(cast5_tv_template) 2938 } 2939 }, { 2940 .alg = "ecb(cast6)", 2941 .test = alg_test_skcipher, 2942 .suite = { 2943 .cipher = __VECS(cast6_tv_template) 2944 } 2945 }, { 2946 .alg = "ecb(cipher_null)", 2947 .test = alg_test_null, 2948 .fips_allowed = 1, 2949 }, { 2950 .alg = "ecb(des)", 2951 .test = alg_test_skcipher, 2952 .suite = { 2953 .cipher = __VECS(des_tv_template) 2954 } 2955 }, { 2956 .alg = "ecb(des3_ede)", 2957 .test = alg_test_skcipher, 2958 .fips_allowed = 1, 2959 .suite = { 2960 .cipher = __VECS(des3_ede_tv_template) 2961 } 2962 }, { 2963 .alg = "ecb(fcrypt)", 2964 .test = alg_test_skcipher, 2965 .suite = { 2966 .cipher = { 2967 .vecs = fcrypt_pcbc_tv_template, 2968 .count = 1 2969 } 2970 } 2971 }, { 2972 .alg = "ecb(khazad)", 2973 .test = alg_test_skcipher, 2974 .suite = { 2975 .cipher = __VECS(khazad_tv_template) 2976 } 2977 }, { 2978 /* Same as ecb(aes) except the key is stored in 2979 * hardware secure memory which we reference by index 2980 */ 2981 .alg = "ecb(paes)", 2982 .test = alg_test_null, 2983 .fips_allowed = 1, 2984 }, { 2985 .alg = "ecb(seed)", 2986 .test = alg_test_skcipher, 2987 .suite = { 2988 .cipher = __VECS(seed_tv_template) 2989 } 2990 }, { 2991 .alg = "ecb(serpent)", 2992 .test = alg_test_skcipher, 2993 .suite = { 2994 .cipher = __VECS(serpent_tv_template) 2995 } 2996 }, { 2997 .alg = "ecb(sm4)", 2998 .test = alg_test_skcipher, 2999 .suite = { 3000 .cipher = __VECS(sm4_tv_template) 3001 } 3002 }, { 3003 .alg = "ecb(speck128)", 3004 .test = alg_test_skcipher, 3005 .suite = { 3006 .cipher = __VECS(speck128_tv_template) 3007 } 3008 }, { 3009 .alg = "ecb(speck64)", 3010 .test = alg_test_skcipher, 3011 .suite = { 3012 .cipher = __VECS(speck64_tv_template) 3013 } 3014 }, { 3015 .alg = "ecb(tea)", 3016 .test = alg_test_skcipher, 3017 .suite = { 3018 .cipher = __VECS(tea_tv_template) 3019 } 3020 }, { 3021 .alg = "ecb(tnepres)", 3022 .test = alg_test_skcipher, 3023 .suite = { 3024 .cipher = __VECS(tnepres_tv_template) 3025 } 3026 }, { 3027 .alg = "ecb(twofish)", 3028 .test = alg_test_skcipher, 3029 .suite = { 3030 .cipher = __VECS(tf_tv_template) 3031 } 3032 }, { 3033 .alg = "ecb(xeta)", 3034 .test = alg_test_skcipher, 3035 .suite = { 3036 .cipher = __VECS(xeta_tv_template) 3037 } 3038 }, { 3039 .alg = "ecb(xtea)", 3040 .test = alg_test_skcipher, 3041 .suite = { 3042 .cipher = __VECS(xtea_tv_template) 3043 } 3044 }, { 3045 .alg = "ecdh", 3046 .test = alg_test_kpp, 3047 .fips_allowed = 1, 3048 .suite = { 3049 .kpp = __VECS(ecdh_tv_template) 3050 } 3051 }, { 3052 .alg = "gcm(aes)", 3053 .test = alg_test_aead, 3054 .fips_allowed = 1, 3055 .suite = { 3056 .aead = { 3057 .enc = __VECS(aes_gcm_enc_tv_template), 3058 .dec = __VECS(aes_gcm_dec_tv_template) 3059 } 3060 } 3061 }, { 3062 .alg = "ghash", 3063 .test = alg_test_hash, 3064 .fips_allowed = 1, 3065 .suite = { 3066 .hash = __VECS(ghash_tv_template) 3067 } 3068 }, { 3069 .alg = "hmac(md5)", 3070 .test = alg_test_hash, 3071 .suite = { 3072 .hash = __VECS(hmac_md5_tv_template) 3073 } 3074 }, { 3075 .alg = "hmac(rmd128)", 3076 .test = alg_test_hash, 3077 .suite = { 3078 .hash = __VECS(hmac_rmd128_tv_template) 3079 } 3080 }, { 3081 .alg = "hmac(rmd160)", 3082 .test = alg_test_hash, 3083 .suite = { 3084 .hash = __VECS(hmac_rmd160_tv_template) 3085 } 3086 }, { 3087 .alg = "hmac(sha1)", 3088 .test = alg_test_hash, 3089 .fips_allowed = 1, 3090 .suite = { 3091 .hash = __VECS(hmac_sha1_tv_template) 3092 } 3093 }, { 3094 .alg = "hmac(sha224)", 3095 .test = alg_test_hash, 3096 .fips_allowed = 1, 3097 .suite = { 3098 .hash = __VECS(hmac_sha224_tv_template) 3099 } 3100 }, { 3101 .alg = "hmac(sha256)", 3102 .test = alg_test_hash, 3103 .fips_allowed = 1, 3104 .suite = { 3105 .hash = __VECS(hmac_sha256_tv_template) 3106 } 3107 }, { 3108 .alg = "hmac(sha3-224)", 3109 .test = alg_test_hash, 3110 .fips_allowed = 1, 3111 .suite = { 3112 .hash = __VECS(hmac_sha3_224_tv_template) 3113 } 3114 }, { 3115 .alg = "hmac(sha3-256)", 3116 .test = alg_test_hash, 3117 .fips_allowed = 1, 3118 .suite = { 3119 .hash = __VECS(hmac_sha3_256_tv_template) 3120 } 3121 }, { 3122 .alg = "hmac(sha3-384)", 3123 .test = alg_test_hash, 3124 .fips_allowed = 1, 3125 .suite = { 3126 .hash = __VECS(hmac_sha3_384_tv_template) 3127 } 3128 }, { 3129 .alg = "hmac(sha3-512)", 3130 .test = alg_test_hash, 3131 .fips_allowed = 1, 3132 .suite = { 3133 .hash = __VECS(hmac_sha3_512_tv_template) 3134 } 3135 }, { 3136 .alg = "hmac(sha384)", 3137 .test = alg_test_hash, 3138 .fips_allowed = 1, 3139 .suite = { 3140 .hash = __VECS(hmac_sha384_tv_template) 3141 } 3142 }, { 3143 .alg = "hmac(sha512)", 3144 .test = alg_test_hash, 3145 .fips_allowed = 1, 3146 .suite = { 3147 .hash = __VECS(hmac_sha512_tv_template) 3148 } 3149 }, { 3150 .alg = "jitterentropy_rng", 3151 .fips_allowed = 1, 3152 .test = alg_test_null, 3153 }, { 3154 .alg = "kw(aes)", 3155 .test = alg_test_skcipher, 3156 .fips_allowed = 1, 3157 .suite = { 3158 .cipher = __VECS(aes_kw_tv_template) 3159 } 3160 }, { 3161 .alg = "lrw(aes)", 3162 .test = alg_test_skcipher, 3163 .suite = { 3164 .cipher = __VECS(aes_lrw_tv_template) 3165 } 3166 }, { 3167 .alg = "lrw(camellia)", 3168 .test = alg_test_skcipher, 3169 .suite = { 3170 .cipher = __VECS(camellia_lrw_tv_template) 3171 } 3172 }, { 3173 .alg = "lrw(cast6)", 3174 .test = alg_test_skcipher, 3175 .suite = { 3176 .cipher = __VECS(cast6_lrw_tv_template) 3177 } 3178 }, { 3179 .alg = "lrw(serpent)", 3180 .test = alg_test_skcipher, 3181 .suite = { 3182 .cipher = __VECS(serpent_lrw_tv_template) 3183 } 3184 }, { 3185 .alg = "lrw(twofish)", 3186 .test = alg_test_skcipher, 3187 .suite = { 3188 .cipher = __VECS(tf_lrw_tv_template) 3189 } 3190 }, { 3191 .alg = "lz4", 3192 .test = alg_test_comp, 3193 .fips_allowed = 1, 3194 .suite = { 3195 .comp = { 3196 .comp = __VECS(lz4_comp_tv_template), 3197 .decomp = __VECS(lz4_decomp_tv_template) 3198 } 3199 } 3200 }, { 3201 .alg = "lz4hc", 3202 .test = alg_test_comp, 3203 .fips_allowed = 1, 3204 .suite = { 3205 .comp = { 3206 .comp = __VECS(lz4hc_comp_tv_template), 3207 .decomp = __VECS(lz4hc_decomp_tv_template) 3208 } 3209 } 3210 }, { 3211 .alg = "lzo", 3212 .test = alg_test_comp, 3213 .fips_allowed = 1, 3214 .suite = { 3215 .comp = { 3216 .comp = __VECS(lzo_comp_tv_template), 3217 .decomp = __VECS(lzo_decomp_tv_template) 3218 } 3219 } 3220 }, { 3221 .alg = "md4", 3222 .test = alg_test_hash, 3223 .suite = { 3224 .hash = __VECS(md4_tv_template) 3225 } 3226 }, { 3227 .alg = "md5", 3228 .test = alg_test_hash, 3229 .suite = { 3230 .hash = __VECS(md5_tv_template) 3231 } 3232 }, { 3233 .alg = "michael_mic", 3234 .test = alg_test_hash, 3235 .suite = { 3236 .hash = __VECS(michael_mic_tv_template) 3237 } 3238 }, { 3239 .alg = "morus1280", 3240 .test = alg_test_aead, 3241 .suite = { 3242 .aead = { 3243 .enc = __VECS(morus1280_enc_tv_template), 3244 .dec = __VECS(morus1280_dec_tv_template), 3245 } 3246 } 3247 }, { 3248 .alg = "morus640", 3249 .test = alg_test_aead, 3250 .suite = { 3251 .aead = { 3252 .enc = __VECS(morus640_enc_tv_template), 3253 .dec = __VECS(morus640_dec_tv_template), 3254 } 3255 } 3256 }, { 3257 .alg = "ofb(aes)", 3258 .test = alg_test_skcipher, 3259 .fips_allowed = 1, 3260 .suite = { 3261 .cipher = __VECS(aes_ofb_tv_template) 3262 } 3263 }, { 3264 /* Same as ofb(aes) except the key is stored in 3265 * hardware secure memory which we reference by index 3266 */ 3267 .alg = "ofb(paes)", 3268 .test = alg_test_null, 3269 .fips_allowed = 1, 3270 }, { 3271 .alg = "pcbc(fcrypt)", 3272 .test = alg_test_skcipher, 3273 .suite = { 3274 .cipher = __VECS(fcrypt_pcbc_tv_template) 3275 } 3276 }, { 3277 .alg = "pkcs1pad(rsa,sha224)", 3278 .test = alg_test_null, 3279 .fips_allowed = 1, 3280 }, { 3281 .alg = "pkcs1pad(rsa,sha256)", 3282 .test = alg_test_akcipher, 3283 .fips_allowed = 1, 3284 .suite = { 3285 .akcipher = __VECS(pkcs1pad_rsa_tv_template) 3286 } 3287 }, { 3288 .alg = "pkcs1pad(rsa,sha384)", 3289 .test = alg_test_null, 3290 .fips_allowed = 1, 3291 }, { 3292 .alg = "pkcs1pad(rsa,sha512)", 3293 .test = alg_test_null, 3294 .fips_allowed = 1, 3295 }, { 3296 .alg = "poly1305", 3297 .test = alg_test_hash, 3298 .suite = { 3299 .hash = __VECS(poly1305_tv_template) 3300 } 3301 }, { 3302 .alg = "rfc3686(ctr(aes))", 3303 .test = alg_test_skcipher, 3304 .fips_allowed = 1, 3305 .suite = { 3306 .cipher = __VECS(aes_ctr_rfc3686_tv_template) 3307 } 3308 }, { 3309 .alg = "rfc4106(gcm(aes))", 3310 .test = alg_test_aead, 3311 .fips_allowed = 1, 3312 .suite = { 3313 .aead = { 3314 .enc = __VECS(aes_gcm_rfc4106_enc_tv_template), 3315 .dec = __VECS(aes_gcm_rfc4106_dec_tv_template) 3316 } 3317 } 3318 }, { 3319 .alg = "rfc4309(ccm(aes))", 3320 .test = alg_test_aead, 3321 .fips_allowed = 1, 3322 .suite = { 3323 .aead = { 3324 .enc = __VECS(aes_ccm_rfc4309_enc_tv_template), 3325 .dec = __VECS(aes_ccm_rfc4309_dec_tv_template) 3326 } 3327 } 3328 }, { 3329 .alg = "rfc4543(gcm(aes))", 3330 .test = alg_test_aead, 3331 .suite = { 3332 .aead = { 3333 .enc = __VECS(aes_gcm_rfc4543_enc_tv_template), 3334 .dec = __VECS(aes_gcm_rfc4543_dec_tv_template), 3335 } 3336 } 3337 }, { 3338 .alg = "rfc7539(chacha20,poly1305)", 3339 .test = alg_test_aead, 3340 .suite = { 3341 .aead = { 3342 .enc = __VECS(rfc7539_enc_tv_template), 3343 .dec = __VECS(rfc7539_dec_tv_template), 3344 } 3345 } 3346 }, { 3347 .alg = "rfc7539esp(chacha20,poly1305)", 3348 .test = alg_test_aead, 3349 .suite = { 3350 .aead = { 3351 .enc = __VECS(rfc7539esp_enc_tv_template), 3352 .dec = __VECS(rfc7539esp_dec_tv_template), 3353 } 3354 } 3355 }, { 3356 .alg = "rmd128", 3357 .test = alg_test_hash, 3358 .suite = { 3359 .hash = __VECS(rmd128_tv_template) 3360 } 3361 }, { 3362 .alg = "rmd160", 3363 .test = alg_test_hash, 3364 .suite = { 3365 .hash = __VECS(rmd160_tv_template) 3366 } 3367 }, { 3368 .alg = "rmd256", 3369 .test = alg_test_hash, 3370 .suite = { 3371 .hash = __VECS(rmd256_tv_template) 3372 } 3373 }, { 3374 .alg = "rmd320", 3375 .test = alg_test_hash, 3376 .suite = { 3377 .hash = __VECS(rmd320_tv_template) 3378 } 3379 }, { 3380 .alg = "rsa", 3381 .test = alg_test_akcipher, 3382 .fips_allowed = 1, 3383 .suite = { 3384 .akcipher = __VECS(rsa_tv_template) 3385 } 3386 }, { 3387 .alg = "salsa20", 3388 .test = alg_test_skcipher, 3389 .suite = { 3390 .cipher = __VECS(salsa20_stream_tv_template) 3391 } 3392 }, { 3393 .alg = "sha1", 3394 .test = alg_test_hash, 3395 .fips_allowed = 1, 3396 .suite = { 3397 .hash = __VECS(sha1_tv_template) 3398 } 3399 }, { 3400 .alg = "sha224", 3401 .test = alg_test_hash, 3402 .fips_allowed = 1, 3403 .suite = { 3404 .hash = __VECS(sha224_tv_template) 3405 } 3406 }, { 3407 .alg = "sha256", 3408 .test = alg_test_hash, 3409 .fips_allowed = 1, 3410 .suite = { 3411 .hash = __VECS(sha256_tv_template) 3412 } 3413 }, { 3414 .alg = "sha3-224", 3415 .test = alg_test_hash, 3416 .fips_allowed = 1, 3417 .suite = { 3418 .hash = __VECS(sha3_224_tv_template) 3419 } 3420 }, { 3421 .alg = "sha3-256", 3422 .test = alg_test_hash, 3423 .fips_allowed = 1, 3424 .suite = { 3425 .hash = __VECS(sha3_256_tv_template) 3426 } 3427 }, { 3428 .alg = "sha3-384", 3429 .test = alg_test_hash, 3430 .fips_allowed = 1, 3431 .suite = { 3432 .hash = __VECS(sha3_384_tv_template) 3433 } 3434 }, { 3435 .alg = "sha3-512", 3436 .test = alg_test_hash, 3437 .fips_allowed = 1, 3438 .suite = { 3439 .hash = __VECS(sha3_512_tv_template) 3440 } 3441 }, { 3442 .alg = "sha384", 3443 .test = alg_test_hash, 3444 .fips_allowed = 1, 3445 .suite = { 3446 .hash = __VECS(sha384_tv_template) 3447 } 3448 }, { 3449 .alg = "sha512", 3450 .test = alg_test_hash, 3451 .fips_allowed = 1, 3452 .suite = { 3453 .hash = __VECS(sha512_tv_template) 3454 } 3455 }, { 3456 .alg = "sm3", 3457 .test = alg_test_hash, 3458 .suite = { 3459 .hash = __VECS(sm3_tv_template) 3460 } 3461 }, { 3462 .alg = "tgr128", 3463 .test = alg_test_hash, 3464 .suite = { 3465 .hash = __VECS(tgr128_tv_template) 3466 } 3467 }, { 3468 .alg = "tgr160", 3469 .test = alg_test_hash, 3470 .suite = { 3471 .hash = __VECS(tgr160_tv_template) 3472 } 3473 }, { 3474 .alg = "tgr192", 3475 .test = alg_test_hash, 3476 .suite = { 3477 .hash = __VECS(tgr192_tv_template) 3478 } 3479 }, { 3480 .alg = "vmac(aes)", 3481 .test = alg_test_hash, 3482 .suite = { 3483 .hash = __VECS(aes_vmac128_tv_template) 3484 } 3485 }, { 3486 .alg = "wp256", 3487 .test = alg_test_hash, 3488 .suite = { 3489 .hash = __VECS(wp256_tv_template) 3490 } 3491 }, { 3492 .alg = "wp384", 3493 .test = alg_test_hash, 3494 .suite = { 3495 .hash = __VECS(wp384_tv_template) 3496 } 3497 }, { 3498 .alg = "wp512", 3499 .test = alg_test_hash, 3500 .suite = { 3501 .hash = __VECS(wp512_tv_template) 3502 } 3503 }, { 3504 .alg = "xcbc(aes)", 3505 .test = alg_test_hash, 3506 .suite = { 3507 .hash = __VECS(aes_xcbc128_tv_template) 3508 } 3509 }, { 3510 .alg = "xts(aes)", 3511 .test = alg_test_skcipher, 3512 .fips_allowed = 1, 3513 .suite = { 3514 .cipher = __VECS(aes_xts_tv_template) 3515 } 3516 }, { 3517 .alg = "xts(camellia)", 3518 .test = alg_test_skcipher, 3519 .suite = { 3520 .cipher = __VECS(camellia_xts_tv_template) 3521 } 3522 }, { 3523 .alg = "xts(cast6)", 3524 .test = alg_test_skcipher, 3525 .suite = { 3526 .cipher = __VECS(cast6_xts_tv_template) 3527 } 3528 }, { 3529 /* Same as xts(aes) except the key is stored in 3530 * hardware secure memory which we reference by index 3531 */ 3532 .alg = "xts(paes)", 3533 .test = alg_test_null, 3534 .fips_allowed = 1, 3535 }, { 3536 .alg = "xts(serpent)", 3537 .test = alg_test_skcipher, 3538 .suite = { 3539 .cipher = __VECS(serpent_xts_tv_template) 3540 } 3541 }, { 3542 .alg = "xts(speck128)", 3543 .test = alg_test_skcipher, 3544 .suite = { 3545 .cipher = __VECS(speck128_xts_tv_template) 3546 } 3547 }, { 3548 .alg = "xts(speck64)", 3549 .test = alg_test_skcipher, 3550 .suite = { 3551 .cipher = __VECS(speck64_xts_tv_template) 3552 } 3553 }, { 3554 .alg = "xts(twofish)", 3555 .test = alg_test_skcipher, 3556 .suite = { 3557 .cipher = __VECS(tf_xts_tv_template) 3558 } 3559 }, { 3560 .alg = "xts4096(paes)", 3561 .test = alg_test_null, 3562 .fips_allowed = 1, 3563 }, { 3564 .alg = "xts512(paes)", 3565 .test = alg_test_null, 3566 .fips_allowed = 1, 3567 }, { 3568 .alg = "zlib-deflate", 3569 .test = alg_test_comp, 3570 .fips_allowed = 1, 3571 .suite = { 3572 .comp = { 3573 .comp = __VECS(zlib_deflate_comp_tv_template), 3574 .decomp = __VECS(zlib_deflate_decomp_tv_template) 3575 } 3576 } 3577 }, { 3578 .alg = "zstd", 3579 .test = alg_test_comp, 3580 .fips_allowed = 1, 3581 .suite = { 3582 .comp = { 3583 .comp = __VECS(zstd_comp_tv_template), 3584 .decomp = __VECS(zstd_decomp_tv_template) 3585 } 3586 } 3587 } 3588 }; 3589 3590 static bool alg_test_descs_checked; 3591 3592 static void alg_test_descs_check_order(void) 3593 { 3594 int i; 3595 3596 /* only check once */ 3597 if (alg_test_descs_checked) 3598 return; 3599 3600 alg_test_descs_checked = true; 3601 3602 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { 3603 int diff = strcmp(alg_test_descs[i - 1].alg, 3604 alg_test_descs[i].alg); 3605 3606 if (WARN_ON(diff > 0)) { 3607 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", 3608 alg_test_descs[i - 1].alg, 3609 alg_test_descs[i].alg); 3610 } 3611 3612 if (WARN_ON(diff == 0)) { 3613 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", 3614 alg_test_descs[i].alg); 3615 } 3616 } 3617 } 3618 3619 static int alg_find_test(const char *alg) 3620 { 3621 int start = 0; 3622 int end = ARRAY_SIZE(alg_test_descs); 3623 3624 while (start < end) { 3625 int i = (start + end) / 2; 3626 int diff = strcmp(alg_test_descs[i].alg, alg); 3627 3628 if (diff > 0) { 3629 end = i; 3630 continue; 3631 } 3632 3633 if (diff < 0) { 3634 start = i + 1; 3635 continue; 3636 } 3637 3638 return i; 3639 } 3640 3641 return -1; 3642 } 3643 3644 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 3645 { 3646 int i; 3647 int j; 3648 int rc; 3649 3650 if (!fips_enabled && notests) { 3651 printk_once(KERN_INFO "alg: self-tests disabled\n"); 3652 return 0; 3653 } 3654 3655 alg_test_descs_check_order(); 3656 3657 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 3658 char nalg[CRYPTO_MAX_ALG_NAME]; 3659 3660 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 3661 sizeof(nalg)) 3662 return -ENAMETOOLONG; 3663 3664 i = alg_find_test(nalg); 3665 if (i < 0) 3666 goto notest; 3667 3668 if (fips_enabled && !alg_test_descs[i].fips_allowed) 3669 goto non_fips_alg; 3670 3671 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 3672 goto test_done; 3673 } 3674 3675 i = alg_find_test(alg); 3676 j = alg_find_test(driver); 3677 if (i < 0 && j < 0) 3678 goto notest; 3679 3680 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 3681 (j >= 0 && !alg_test_descs[j].fips_allowed))) 3682 goto non_fips_alg; 3683 3684 rc = 0; 3685 if (i >= 0) 3686 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 3687 type, mask); 3688 if (j >= 0 && j != i) 3689 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 3690 type, mask); 3691 3692 test_done: 3693 if (fips_enabled && rc) 3694 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 3695 3696 if (fips_enabled && !rc) 3697 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg); 3698 3699 return rc; 3700 3701 notest: 3702 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 3703 return 0; 3704 non_fips_alg: 3705 return -EINVAL; 3706 } 3707 3708 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ 3709 3710 EXPORT_SYMBOL_GPL(alg_test); 3711