1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */ 2 3 /*- 4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu) 5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting 6 * 7 * This code was written by Angelos D. Keromytis in Athens, Greece, in 8 * February 2000. Network Security Technologies Inc. (NSTI) kindly 9 * supported the development of this code. 10 * 11 * Copyright (c) 2000, 2001 Angelos D. Keromytis 12 * Copyright (c) 2014 The FreeBSD Foundation 13 * All rights reserved. 14 * 15 * Portions of this software were developed by John-Mark Gurney 16 * under sponsorship of the FreeBSD Foundation and 17 * Rubicon Communications, LLC (Netgate). 18 * 19 * Permission to use, copy, and modify this software with or without fee 20 * is hereby granted, provided that this entire notice is included in 21 * all source code copies of any software which is or includes a copy or 22 * modification of this software. 23 * 24 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR 25 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY 26 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE 27 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR 28 * PURPOSE. 29 */ 30 31 #include <sys/cdefs.h> 32 __FBSDID("$FreeBSD$"); 33 34 #include <sys/param.h> 35 #include <sys/systm.h> 36 #include <sys/malloc.h> 37 #include <sys/mbuf.h> 38 #include <sys/module.h> 39 #include <sys/sysctl.h> 40 #include <sys/errno.h> 41 #include <sys/random.h> 42 #include <sys/kernel.h> 43 #include <sys/uio.h> 44 #include <sys/lock.h> 45 #include <sys/rwlock.h> 46 #include <sys/endian.h> 47 #include <sys/limits.h> 48 49 #include <crypto/blowfish/blowfish.h> 50 #include <crypto/sha1.h> 51 #include <opencrypto/rmd160.h> 52 #include <opencrypto/cast.h> 53 #include <opencrypto/skipjack.h> 54 #include <sys/md5.h> 55 56 #include <opencrypto/cryptodev.h> 57 #include <opencrypto/cryptosoft.h> 58 #include <opencrypto/xform.h> 59 60 #include <sys/kobj.h> 61 #include <sys/bus.h> 62 #include "cryptodev_if.h" 63 64 static int32_t swcr_id; 65 66 u_int8_t hmac_ipad_buffer[HMAC_MAX_BLOCK_LEN]; 67 u_int8_t hmac_opad_buffer[HMAC_MAX_BLOCK_LEN]; 68 69 static int swcr_encdec(struct cryptodesc *, struct swcr_data *, caddr_t, int); 70 static int swcr_authcompute(struct cryptodesc *, struct swcr_data *, caddr_t, int); 71 static int swcr_authenc(struct cryptop *crp); 72 static int swcr_compdec(struct cryptodesc *, struct swcr_data *, caddr_t, int); 73 static void swcr_freesession(device_t dev, crypto_session_t cses); 74 75 /* 76 * Apply a symmetric encryption/decryption algorithm. 77 */ 78 static int 79 swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf, 80 int flags) 81 { 82 unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN]; 83 unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN]; 84 struct enc_xform *exf; 85 int i, j, k, blks, ind, count, ivlen; 86 struct uio *uio, uiolcl; 87 struct iovec iovlcl[4]; 88 struct iovec *iov; 89 int iovcnt, iovalloc; 90 int error; 91 92 error = 0; 93 94 exf = sw->sw_exf; 95 blks = exf->blocksize; 96 ivlen = exf->ivsize; 97 98 /* Check for non-padded data */ 99 if (crd->crd_len % blks) 100 return EINVAL; 101 102 if (crd->crd_alg == CRYPTO_AES_ICM && 103 (crd->crd_flags & CRD_F_IV_EXPLICIT) == 0) 104 return (EINVAL); 105 106 /* Initialize the IV */ 107 if (crd->crd_flags & CRD_F_ENCRYPT) { 108 /* IV explicitly provided ? */ 109 if (crd->crd_flags & CRD_F_IV_EXPLICIT) 110 bcopy(crd->crd_iv, iv, ivlen); 111 else 112 arc4rand(iv, ivlen, 0); 113 114 /* Do we need to write the IV */ 115 if (!(crd->crd_flags & CRD_F_IV_PRESENT)) 116 crypto_copyback(flags, buf, crd->crd_inject, ivlen, iv); 117 118 } else { /* Decryption */ 119 /* IV explicitly provided ? */ 120 if (crd->crd_flags & CRD_F_IV_EXPLICIT) 121 bcopy(crd->crd_iv, iv, ivlen); 122 else { 123 /* Get IV off buf */ 124 crypto_copydata(flags, buf, crd->crd_inject, ivlen, iv); 125 } 126 } 127 128 if (crd->crd_flags & CRD_F_KEY_EXPLICIT) { 129 int error; 130 131 if (sw->sw_kschedule) 132 exf->zerokey(&(sw->sw_kschedule)); 133 134 error = exf->setkey(&sw->sw_kschedule, 135 crd->crd_key, crd->crd_klen / 8); 136 if (error) 137 return (error); 138 } 139 140 iov = iovlcl; 141 iovcnt = nitems(iovlcl); 142 iovalloc = 0; 143 uio = &uiolcl; 144 if ((flags & CRYPTO_F_IMBUF) != 0) { 145 error = crypto_mbuftoiov((struct mbuf *)buf, &iov, &iovcnt, 146 &iovalloc); 147 if (error) 148 return (error); 149 uio->uio_iov = iov; 150 uio->uio_iovcnt = iovcnt; 151 } else if ((flags & CRYPTO_F_IOV) != 0) 152 uio = (struct uio *)buf; 153 else { 154 iov[0].iov_base = buf; 155 iov[0].iov_len = crd->crd_skip + crd->crd_len; 156 uio->uio_iov = iov; 157 uio->uio_iovcnt = 1; 158 } 159 160 ivp = iv; 161 162 if (exf->reinit) { 163 /* 164 * xforms that provide a reinit method perform all IV 165 * handling themselves. 166 */ 167 exf->reinit(sw->sw_kschedule, iv); 168 } 169 170 count = crd->crd_skip; 171 ind = cuio_getptr(uio, count, &k); 172 if (ind == -1) { 173 error = EINVAL; 174 goto out; 175 } 176 177 i = crd->crd_len; 178 179 while (i > 0) { 180 /* 181 * If there's insufficient data at the end of 182 * an iovec, we have to do some copying. 183 */ 184 if (uio->uio_iov[ind].iov_len < k + blks && 185 uio->uio_iov[ind].iov_len != k) { 186 cuio_copydata(uio, count, blks, blk); 187 188 /* Actual encryption/decryption */ 189 if (exf->reinit) { 190 if (crd->crd_flags & CRD_F_ENCRYPT) { 191 exf->encrypt(sw->sw_kschedule, 192 blk); 193 } else { 194 exf->decrypt(sw->sw_kschedule, 195 blk); 196 } 197 } else if (crd->crd_flags & CRD_F_ENCRYPT) { 198 /* XOR with previous block */ 199 for (j = 0; j < blks; j++) 200 blk[j] ^= ivp[j]; 201 202 exf->encrypt(sw->sw_kschedule, blk); 203 204 /* 205 * Keep encrypted block for XOR'ing 206 * with next block 207 */ 208 bcopy(blk, iv, blks); 209 ivp = iv; 210 } else { /* decrypt */ 211 /* 212 * Keep encrypted block for XOR'ing 213 * with next block 214 */ 215 nivp = (ivp == iv) ? iv2 : iv; 216 bcopy(blk, nivp, blks); 217 218 exf->decrypt(sw->sw_kschedule, blk); 219 220 /* XOR with previous block */ 221 for (j = 0; j < blks; j++) 222 blk[j] ^= ivp[j]; 223 224 ivp = nivp; 225 } 226 227 /* Copy back decrypted block */ 228 cuio_copyback(uio, count, blks, blk); 229 230 count += blks; 231 232 /* Advance pointer */ 233 ind = cuio_getptr(uio, count, &k); 234 if (ind == -1) { 235 error = EINVAL; 236 goto out; 237 } 238 239 i -= blks; 240 241 /* Could be done... */ 242 if (i == 0) 243 break; 244 } 245 246 while (uio->uio_iov[ind].iov_len >= k + blks && i > 0) { 247 uint8_t *idat; 248 size_t nb, rem; 249 250 nb = blks; 251 rem = MIN((size_t)i, 252 uio->uio_iov[ind].iov_len - (size_t)k); 253 idat = (uint8_t *)uio->uio_iov[ind].iov_base + k; 254 255 if (exf->reinit) { 256 if ((crd->crd_flags & CRD_F_ENCRYPT) != 0 && 257 exf->encrypt_multi == NULL) 258 exf->encrypt(sw->sw_kschedule, 259 idat); 260 else if ((crd->crd_flags & CRD_F_ENCRYPT) != 0) { 261 nb = rounddown(rem, blks); 262 exf->encrypt_multi(sw->sw_kschedule, 263 idat, nb); 264 } else if (exf->decrypt_multi == NULL) 265 exf->decrypt(sw->sw_kschedule, 266 idat); 267 else { 268 nb = rounddown(rem, blks); 269 exf->decrypt_multi(sw->sw_kschedule, 270 idat, nb); 271 } 272 } else if (crd->crd_flags & CRD_F_ENCRYPT) { 273 /* XOR with previous block/IV */ 274 for (j = 0; j < blks; j++) 275 idat[j] ^= ivp[j]; 276 277 exf->encrypt(sw->sw_kschedule, idat); 278 ivp = idat; 279 } else { /* decrypt */ 280 /* 281 * Keep encrypted block to be used 282 * in next block's processing. 283 */ 284 nivp = (ivp == iv) ? iv2 : iv; 285 bcopy(idat, nivp, blks); 286 287 exf->decrypt(sw->sw_kschedule, idat); 288 289 /* XOR with previous block/IV */ 290 for (j = 0; j < blks; j++) 291 idat[j] ^= ivp[j]; 292 293 ivp = nivp; 294 } 295 296 count += nb; 297 k += nb; 298 i -= nb; 299 } 300 301 /* 302 * Advance to the next iov if the end of the current iov 303 * is aligned with the end of a cipher block. 304 * Note that the code is equivalent to calling: 305 * ind = cuio_getptr(uio, count, &k); 306 */ 307 if (i > 0 && k == uio->uio_iov[ind].iov_len) { 308 k = 0; 309 ind++; 310 if (ind >= uio->uio_iovcnt) { 311 error = EINVAL; 312 goto out; 313 } 314 } 315 } 316 317 out: 318 if (iovalloc) 319 free(iov, M_CRYPTO_DATA); 320 321 return (error); 322 } 323 324 static void 325 swcr_authprepare(struct auth_hash *axf, struct swcr_data *sw, u_char *key, 326 int klen) 327 { 328 int k; 329 330 klen /= 8; 331 332 switch (axf->type) { 333 case CRYPTO_MD5_HMAC: 334 case CRYPTO_SHA1_HMAC: 335 case CRYPTO_SHA2_224_HMAC: 336 case CRYPTO_SHA2_256_HMAC: 337 case CRYPTO_SHA2_384_HMAC: 338 case CRYPTO_SHA2_512_HMAC: 339 case CRYPTO_NULL_HMAC: 340 case CRYPTO_RIPEMD160_HMAC: 341 for (k = 0; k < klen; k++) 342 key[k] ^= HMAC_IPAD_VAL; 343 344 axf->Init(sw->sw_ictx); 345 axf->Update(sw->sw_ictx, key, klen); 346 axf->Update(sw->sw_ictx, hmac_ipad_buffer, axf->blocksize - klen); 347 348 for (k = 0; k < klen; k++) 349 key[k] ^= (HMAC_IPAD_VAL ^ HMAC_OPAD_VAL); 350 351 axf->Init(sw->sw_octx); 352 axf->Update(sw->sw_octx, key, klen); 353 axf->Update(sw->sw_octx, hmac_opad_buffer, axf->blocksize - klen); 354 355 for (k = 0; k < klen; k++) 356 key[k] ^= HMAC_OPAD_VAL; 357 break; 358 case CRYPTO_MD5_KPDK: 359 case CRYPTO_SHA1_KPDK: 360 { 361 /* 362 * We need a buffer that can hold an md5 and a sha1 result 363 * just to throw it away. 364 * What we do here is the initial part of: 365 * ALGO( key, keyfill, .. ) 366 * adding the key to sw_ictx and abusing Final() to get the 367 * "keyfill" padding. 368 * In addition we abuse the sw_octx to save the key to have 369 * it to be able to append it at the end in swcr_authcompute(). 370 */ 371 u_char buf[SHA1_RESULTLEN]; 372 373 sw->sw_klen = klen; 374 bcopy(key, sw->sw_octx, klen); 375 axf->Init(sw->sw_ictx); 376 axf->Update(sw->sw_ictx, key, klen); 377 axf->Final(buf, sw->sw_ictx); 378 break; 379 } 380 case CRYPTO_BLAKE2B: 381 case CRYPTO_BLAKE2S: 382 axf->Setkey(sw->sw_ictx, key, klen); 383 axf->Init(sw->sw_ictx); 384 break; 385 default: 386 printf("%s: CRD_F_KEY_EXPLICIT flag given, but algorithm %d " 387 "doesn't use keys.\n", __func__, axf->type); 388 } 389 } 390 391 /* 392 * Compute keyed-hash authenticator. 393 */ 394 static int 395 swcr_authcompute(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf, 396 int flags) 397 { 398 unsigned char aalg[HASH_MAX_LEN]; 399 struct auth_hash *axf; 400 union authctx ctx; 401 int err; 402 403 if (sw->sw_ictx == 0) 404 return EINVAL; 405 406 axf = sw->sw_axf; 407 408 if (crd->crd_flags & CRD_F_KEY_EXPLICIT) 409 swcr_authprepare(axf, sw, crd->crd_key, crd->crd_klen); 410 411 bcopy(sw->sw_ictx, &ctx, axf->ctxsize); 412 413 err = crypto_apply(flags, buf, crd->crd_skip, crd->crd_len, 414 (int (*)(void *, void *, unsigned int))axf->Update, (caddr_t)&ctx); 415 if (err) 416 return err; 417 418 switch (sw->sw_alg) { 419 case CRYPTO_SHA1: 420 case CRYPTO_SHA2_224: 421 case CRYPTO_SHA2_256: 422 case CRYPTO_SHA2_384: 423 case CRYPTO_SHA2_512: 424 axf->Final(aalg, &ctx); 425 break; 426 427 case CRYPTO_MD5_HMAC: 428 case CRYPTO_SHA1_HMAC: 429 case CRYPTO_SHA2_224_HMAC: 430 case CRYPTO_SHA2_256_HMAC: 431 case CRYPTO_SHA2_384_HMAC: 432 case CRYPTO_SHA2_512_HMAC: 433 case CRYPTO_RIPEMD160_HMAC: 434 if (sw->sw_octx == NULL) 435 return EINVAL; 436 437 axf->Final(aalg, &ctx); 438 bcopy(sw->sw_octx, &ctx, axf->ctxsize); 439 axf->Update(&ctx, aalg, axf->hashsize); 440 axf->Final(aalg, &ctx); 441 break; 442 443 case CRYPTO_MD5_KPDK: 444 case CRYPTO_SHA1_KPDK: 445 /* If we have no key saved, return error. */ 446 if (sw->sw_octx == NULL) 447 return EINVAL; 448 449 /* 450 * Add the trailing copy of the key (see comment in 451 * swcr_authprepare()) after the data: 452 * ALGO( .., key, algofill ) 453 * and let Final() do the proper, natural "algofill" 454 * padding. 455 */ 456 axf->Update(&ctx, sw->sw_octx, sw->sw_klen); 457 axf->Final(aalg, &ctx); 458 break; 459 460 case CRYPTO_BLAKE2B: 461 case CRYPTO_BLAKE2S: 462 case CRYPTO_NULL_HMAC: 463 axf->Final(aalg, &ctx); 464 break; 465 } 466 467 /* Inject the authentication data */ 468 crypto_copyback(flags, buf, crd->crd_inject, 469 sw->sw_mlen == 0 ? axf->hashsize : sw->sw_mlen, aalg); 470 return 0; 471 } 472 473 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */ 474 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */ 475 476 /* 477 * Apply a combined encryption-authentication transformation 478 */ 479 static int 480 swcr_authenc(struct cryptop *crp) 481 { 482 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))]; 483 u_char *blk = (u_char *)blkbuf; 484 u_char aalg[AALG_MAX_RESULT_LEN]; 485 u_char uaalg[AALG_MAX_RESULT_LEN]; 486 u_char iv[EALG_MAX_BLOCK_LEN]; 487 union authctx ctx; 488 struct cryptodesc *crd, *crda = NULL, *crde = NULL; 489 struct swcr_data *sw, *swa, *swe = NULL; 490 struct auth_hash *axf = NULL; 491 struct enc_xform *exf = NULL; 492 caddr_t buf = (caddr_t)crp->crp_buf; 493 uint32_t *blkp; 494 int aadlen, blksz, i, ivlen, len, iskip, oskip, r; 495 496 ivlen = blksz = iskip = oskip = 0; 497 498 for (crd = crp->crp_desc; crd; crd = crd->crd_next) { 499 for (sw = crypto_get_driver_session(crp->crp_session); 500 sw && sw->sw_alg != crd->crd_alg; 501 sw = sw->sw_next) 502 ; 503 if (sw == NULL) 504 return (EINVAL); 505 506 switch (sw->sw_alg) { 507 case CRYPTO_AES_NIST_GCM_16: 508 case CRYPTO_AES_NIST_GMAC: 509 swe = sw; 510 crde = crd; 511 exf = swe->sw_exf; 512 ivlen = 12; 513 break; 514 case CRYPTO_AES_128_NIST_GMAC: 515 case CRYPTO_AES_192_NIST_GMAC: 516 case CRYPTO_AES_256_NIST_GMAC: 517 swa = sw; 518 crda = crd; 519 axf = swa->sw_axf; 520 if (swa->sw_ictx == 0) 521 return (EINVAL); 522 bcopy(swa->sw_ictx, &ctx, axf->ctxsize); 523 blksz = axf->blocksize; 524 break; 525 default: 526 return (EINVAL); 527 } 528 } 529 if (crde == NULL || crda == NULL) 530 return (EINVAL); 531 532 if (crde->crd_alg == CRYPTO_AES_NIST_GCM_16 && 533 (crde->crd_flags & CRD_F_IV_EXPLICIT) == 0) 534 return (EINVAL); 535 536 if (crde->crd_klen != crda->crd_klen) 537 return (EINVAL); 538 539 /* Initialize the IV */ 540 if (crde->crd_flags & CRD_F_ENCRYPT) { 541 /* IV explicitly provided ? */ 542 if (crde->crd_flags & CRD_F_IV_EXPLICIT) 543 bcopy(crde->crd_iv, iv, ivlen); 544 else 545 arc4rand(iv, ivlen, 0); 546 547 /* Do we need to write the IV */ 548 if (!(crde->crd_flags & CRD_F_IV_PRESENT)) 549 crypto_copyback(crp->crp_flags, buf, crde->crd_inject, 550 ivlen, iv); 551 552 } else { /* Decryption */ 553 /* IV explicitly provided ? */ 554 if (crde->crd_flags & CRD_F_IV_EXPLICIT) 555 bcopy(crde->crd_iv, iv, ivlen); 556 else { 557 /* Get IV off buf */ 558 crypto_copydata(crp->crp_flags, buf, crde->crd_inject, 559 ivlen, iv); 560 } 561 } 562 563 /* Supply MAC with IV */ 564 if (axf->Reinit) 565 axf->Reinit(&ctx, iv, ivlen); 566 567 /* Supply MAC with AAD */ 568 aadlen = crda->crd_len; 569 570 for (i = iskip; i < crda->crd_len; i += blksz) { 571 len = MIN(crda->crd_len - i, blksz - oskip); 572 crypto_copydata(crp->crp_flags, buf, crda->crd_skip + i, len, 573 blk + oskip); 574 bzero(blk + len + oskip, blksz - len - oskip); 575 axf->Update(&ctx, blk, blksz); 576 oskip = 0; /* reset initial output offset */ 577 } 578 579 if (exf->reinit) 580 exf->reinit(swe->sw_kschedule, iv); 581 582 /* Do encryption/decryption with MAC */ 583 for (i = 0; i < crde->crd_len; i += len) { 584 if (exf->encrypt_multi != NULL) { 585 len = rounddown(crde->crd_len - i, blksz); 586 if (len == 0) 587 len = blksz; 588 else 589 len = MIN(len, sizeof(blkbuf)); 590 } else 591 len = blksz; 592 len = MIN(crde->crd_len - i, len); 593 if (len < blksz) 594 bzero(blk, blksz); 595 crypto_copydata(crp->crp_flags, buf, crde->crd_skip + i, len, 596 blk); 597 if (crde->crd_flags & CRD_F_ENCRYPT) { 598 if (exf->encrypt_multi != NULL) 599 exf->encrypt_multi(swe->sw_kschedule, blk, 600 len); 601 else 602 exf->encrypt(swe->sw_kschedule, blk); 603 axf->Update(&ctx, blk, len); 604 crypto_copyback(crp->crp_flags, buf, 605 crde->crd_skip + i, len, blk); 606 } else { 607 axf->Update(&ctx, blk, len); 608 } 609 } 610 611 /* Do any required special finalization */ 612 switch (crda->crd_alg) { 613 case CRYPTO_AES_128_NIST_GMAC: 614 case CRYPTO_AES_192_NIST_GMAC: 615 case CRYPTO_AES_256_NIST_GMAC: 616 /* length block */ 617 bzero(blk, blksz); 618 blkp = (uint32_t *)blk + 1; 619 *blkp = htobe32(aadlen * 8); 620 blkp = (uint32_t *)blk + 3; 621 *blkp = htobe32(crde->crd_len * 8); 622 axf->Update(&ctx, blk, blksz); 623 break; 624 } 625 626 /* Finalize MAC */ 627 axf->Final(aalg, &ctx); 628 629 /* Validate tag */ 630 if (!(crde->crd_flags & CRD_F_ENCRYPT)) { 631 crypto_copydata(crp->crp_flags, buf, crda->crd_inject, 632 axf->hashsize, uaalg); 633 634 r = timingsafe_bcmp(aalg, uaalg, axf->hashsize); 635 if (r == 0) { 636 /* tag matches, decrypt data */ 637 for (i = 0; i < crde->crd_len; i += blksz) { 638 len = MIN(crde->crd_len - i, blksz); 639 if (len < blksz) 640 bzero(blk, blksz); 641 crypto_copydata(crp->crp_flags, buf, 642 crde->crd_skip + i, len, blk); 643 exf->decrypt(swe->sw_kschedule, blk); 644 crypto_copyback(crp->crp_flags, buf, 645 crde->crd_skip + i, len, blk); 646 } 647 } else 648 return (EBADMSG); 649 } else { 650 /* Inject the authentication data */ 651 crypto_copyback(crp->crp_flags, buf, crda->crd_inject, 652 axf->hashsize, aalg); 653 } 654 655 return (0); 656 } 657 658 /* 659 * Apply a compression/decompression algorithm 660 */ 661 static int 662 swcr_compdec(struct cryptodesc *crd, struct swcr_data *sw, 663 caddr_t buf, int flags) 664 { 665 u_int8_t *data, *out; 666 struct comp_algo *cxf; 667 int adj; 668 u_int32_t result; 669 670 cxf = sw->sw_cxf; 671 672 /* We must handle the whole buffer of data in one time 673 * then if there is not all the data in the mbuf, we must 674 * copy in a buffer. 675 */ 676 677 data = malloc(crd->crd_len, M_CRYPTO_DATA, M_NOWAIT); 678 if (data == NULL) 679 return (EINVAL); 680 crypto_copydata(flags, buf, crd->crd_skip, crd->crd_len, data); 681 682 if (crd->crd_flags & CRD_F_COMP) 683 result = cxf->compress(data, crd->crd_len, &out); 684 else 685 result = cxf->decompress(data, crd->crd_len, &out); 686 687 free(data, M_CRYPTO_DATA); 688 if (result == 0) 689 return EINVAL; 690 691 /* Copy back the (de)compressed data. m_copyback is 692 * extending the mbuf as necessary. 693 */ 694 sw->sw_size = result; 695 /* Check the compressed size when doing compression */ 696 if (crd->crd_flags & CRD_F_COMP) { 697 if (result >= crd->crd_len) { 698 /* Compression was useless, we lost time */ 699 free(out, M_CRYPTO_DATA); 700 return 0; 701 } 702 } 703 704 crypto_copyback(flags, buf, crd->crd_skip, result, out); 705 if (result < crd->crd_len) { 706 adj = result - crd->crd_len; 707 if (flags & CRYPTO_F_IMBUF) { 708 adj = result - crd->crd_len; 709 m_adj((struct mbuf *)buf, adj); 710 } else if (flags & CRYPTO_F_IOV) { 711 struct uio *uio = (struct uio *)buf; 712 int ind; 713 714 adj = crd->crd_len - result; 715 ind = uio->uio_iovcnt - 1; 716 717 while (adj > 0 && ind >= 0) { 718 if (adj < uio->uio_iov[ind].iov_len) { 719 uio->uio_iov[ind].iov_len -= adj; 720 break; 721 } 722 723 adj -= uio->uio_iov[ind].iov_len; 724 uio->uio_iov[ind].iov_len = 0; 725 ind--; 726 uio->uio_iovcnt--; 727 } 728 } 729 } 730 free(out, M_CRYPTO_DATA); 731 return 0; 732 } 733 734 /* 735 * Generate a new software session. 736 */ 737 static int 738 swcr_newsession(device_t dev, crypto_session_t cses, struct cryptoini *cri) 739 { 740 struct swcr_data **swd, *ses; 741 struct auth_hash *axf; 742 struct enc_xform *txf; 743 struct comp_algo *cxf; 744 int len; 745 int error; 746 747 if (cses == NULL || cri == NULL) 748 return EINVAL; 749 750 ses = crypto_get_driver_session(cses); 751 swd = &ses; 752 753 while (cri) { 754 if (*swd == NULL) 755 *swd = malloc(sizeof(struct swcr_data), 756 M_CRYPTO_DATA, M_WAITOK | M_ZERO); 757 if (*swd == NULL) { 758 swcr_freesession(dev, cses); 759 return ENOBUFS; 760 } 761 762 switch (cri->cri_alg) { 763 case CRYPTO_DES_CBC: 764 txf = &enc_xform_des; 765 goto enccommon; 766 case CRYPTO_3DES_CBC: 767 txf = &enc_xform_3des; 768 goto enccommon; 769 case CRYPTO_BLF_CBC: 770 txf = &enc_xform_blf; 771 goto enccommon; 772 case CRYPTO_CAST_CBC: 773 txf = &enc_xform_cast5; 774 goto enccommon; 775 case CRYPTO_SKIPJACK_CBC: 776 txf = &enc_xform_skipjack; 777 goto enccommon; 778 case CRYPTO_RIJNDAEL128_CBC: 779 txf = &enc_xform_rijndael128; 780 goto enccommon; 781 case CRYPTO_AES_XTS: 782 txf = &enc_xform_aes_xts; 783 goto enccommon; 784 case CRYPTO_AES_ICM: 785 txf = &enc_xform_aes_icm; 786 goto enccommon; 787 case CRYPTO_AES_NIST_GCM_16: 788 txf = &enc_xform_aes_nist_gcm; 789 goto enccommon; 790 case CRYPTO_AES_NIST_GMAC: 791 txf = &enc_xform_aes_nist_gmac; 792 (*swd)->sw_exf = txf; 793 break; 794 case CRYPTO_CAMELLIA_CBC: 795 txf = &enc_xform_camellia; 796 goto enccommon; 797 case CRYPTO_NULL_CBC: 798 txf = &enc_xform_null; 799 goto enccommon; 800 case CRYPTO_CHACHA20: 801 txf = &enc_xform_chacha20; 802 goto enccommon; 803 enccommon: 804 if (cri->cri_key != NULL) { 805 error = txf->setkey(&((*swd)->sw_kschedule), 806 cri->cri_key, cri->cri_klen / 8); 807 if (error) { 808 swcr_freesession(dev, cses); 809 return error; 810 } 811 } 812 (*swd)->sw_exf = txf; 813 break; 814 815 case CRYPTO_MD5_HMAC: 816 axf = &auth_hash_hmac_md5; 817 goto authcommon; 818 case CRYPTO_SHA1_HMAC: 819 axf = &auth_hash_hmac_sha1; 820 goto authcommon; 821 case CRYPTO_SHA2_224_HMAC: 822 axf = &auth_hash_hmac_sha2_224; 823 goto authcommon; 824 case CRYPTO_SHA2_256_HMAC: 825 axf = &auth_hash_hmac_sha2_256; 826 goto authcommon; 827 case CRYPTO_SHA2_384_HMAC: 828 axf = &auth_hash_hmac_sha2_384; 829 goto authcommon; 830 case CRYPTO_SHA2_512_HMAC: 831 axf = &auth_hash_hmac_sha2_512; 832 goto authcommon; 833 case CRYPTO_NULL_HMAC: 834 axf = &auth_hash_null; 835 goto authcommon; 836 case CRYPTO_RIPEMD160_HMAC: 837 axf = &auth_hash_hmac_ripemd_160; 838 authcommon: 839 (*swd)->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, 840 M_NOWAIT); 841 if ((*swd)->sw_ictx == NULL) { 842 swcr_freesession(dev, cses); 843 return ENOBUFS; 844 } 845 846 (*swd)->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA, 847 M_NOWAIT); 848 if ((*swd)->sw_octx == NULL) { 849 swcr_freesession(dev, cses); 850 return ENOBUFS; 851 } 852 853 if (cri->cri_key != NULL) { 854 swcr_authprepare(axf, *swd, cri->cri_key, 855 cri->cri_klen); 856 } 857 858 (*swd)->sw_mlen = cri->cri_mlen; 859 (*swd)->sw_axf = axf; 860 break; 861 862 case CRYPTO_MD5_KPDK: 863 axf = &auth_hash_key_md5; 864 goto auth2common; 865 866 case CRYPTO_SHA1_KPDK: 867 axf = &auth_hash_key_sha1; 868 auth2common: 869 (*swd)->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, 870 M_NOWAIT); 871 if ((*swd)->sw_ictx == NULL) { 872 swcr_freesession(dev, cses); 873 return ENOBUFS; 874 } 875 876 (*swd)->sw_octx = malloc(cri->cri_klen / 8, 877 M_CRYPTO_DATA, M_NOWAIT); 878 if ((*swd)->sw_octx == NULL) { 879 swcr_freesession(dev, cses); 880 return ENOBUFS; 881 } 882 883 /* Store the key so we can "append" it to the payload */ 884 if (cri->cri_key != NULL) { 885 swcr_authprepare(axf, *swd, cri->cri_key, 886 cri->cri_klen); 887 } 888 889 (*swd)->sw_mlen = cri->cri_mlen; 890 (*swd)->sw_axf = axf; 891 break; 892 #ifdef notdef 893 case CRYPTO_MD5: 894 axf = &auth_hash_md5; 895 goto auth3common; 896 #endif 897 898 case CRYPTO_SHA1: 899 axf = &auth_hash_sha1; 900 goto auth3common; 901 case CRYPTO_SHA2_224: 902 axf = &auth_hash_sha2_224; 903 goto auth3common; 904 case CRYPTO_SHA2_256: 905 axf = &auth_hash_sha2_256; 906 goto auth3common; 907 case CRYPTO_SHA2_384: 908 axf = &auth_hash_sha2_384; 909 goto auth3common; 910 case CRYPTO_SHA2_512: 911 axf = &auth_hash_sha2_512; 912 913 auth3common: 914 (*swd)->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, 915 M_NOWAIT); 916 if ((*swd)->sw_ictx == NULL) { 917 swcr_freesession(dev, cses); 918 return ENOBUFS; 919 } 920 921 axf->Init((*swd)->sw_ictx); 922 (*swd)->sw_mlen = cri->cri_mlen; 923 (*swd)->sw_axf = axf; 924 break; 925 926 case CRYPTO_AES_128_NIST_GMAC: 927 axf = &auth_hash_nist_gmac_aes_128; 928 goto auth4common; 929 930 case CRYPTO_AES_192_NIST_GMAC: 931 axf = &auth_hash_nist_gmac_aes_192; 932 goto auth4common; 933 934 case CRYPTO_AES_256_NIST_GMAC: 935 axf = &auth_hash_nist_gmac_aes_256; 936 auth4common: 937 len = cri->cri_klen / 8; 938 if (len != 16 && len != 24 && len != 32) { 939 swcr_freesession(dev, cses); 940 return EINVAL; 941 } 942 943 (*swd)->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, 944 M_NOWAIT); 945 if ((*swd)->sw_ictx == NULL) { 946 swcr_freesession(dev, cses); 947 return ENOBUFS; 948 } 949 axf->Init((*swd)->sw_ictx); 950 axf->Setkey((*swd)->sw_ictx, cri->cri_key, len); 951 (*swd)->sw_axf = axf; 952 break; 953 954 case CRYPTO_BLAKE2B: 955 axf = &auth_hash_blake2b; 956 goto auth5common; 957 case CRYPTO_BLAKE2S: 958 axf = &auth_hash_blake2s; 959 auth5common: 960 (*swd)->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, 961 M_NOWAIT); 962 if ((*swd)->sw_ictx == NULL) { 963 swcr_freesession(dev, cses); 964 return ENOBUFS; 965 } 966 axf->Setkey((*swd)->sw_ictx, cri->cri_key, 967 cri->cri_klen / 8); 968 axf->Init((*swd)->sw_ictx); 969 (*swd)->sw_axf = axf; 970 break; 971 972 case CRYPTO_DEFLATE_COMP: 973 cxf = &comp_algo_deflate; 974 (*swd)->sw_cxf = cxf; 975 break; 976 default: 977 swcr_freesession(dev, cses); 978 return EINVAL; 979 } 980 981 (*swd)->sw_alg = cri->cri_alg; 982 cri = cri->cri_next; 983 swd = &((*swd)->sw_next); 984 } 985 return 0; 986 } 987 988 static void 989 swcr_freesession(device_t dev, crypto_session_t cses) 990 { 991 struct swcr_data *ses, *swd, *next; 992 struct enc_xform *txf; 993 struct auth_hash *axf; 994 995 ses = crypto_get_driver_session(cses); 996 997 for (swd = ses; swd != NULL; swd = next) { 998 next = swd->sw_next; 999 1000 switch (swd->sw_alg) { 1001 case CRYPTO_DES_CBC: 1002 case CRYPTO_3DES_CBC: 1003 case CRYPTO_BLF_CBC: 1004 case CRYPTO_CAST_CBC: 1005 case CRYPTO_SKIPJACK_CBC: 1006 case CRYPTO_RIJNDAEL128_CBC: 1007 case CRYPTO_AES_XTS: 1008 case CRYPTO_AES_ICM: 1009 case CRYPTO_AES_NIST_GCM_16: 1010 case CRYPTO_AES_NIST_GMAC: 1011 case CRYPTO_CAMELLIA_CBC: 1012 case CRYPTO_NULL_CBC: 1013 case CRYPTO_CHACHA20: 1014 txf = swd->sw_exf; 1015 1016 if (swd->sw_kschedule) 1017 txf->zerokey(&(swd->sw_kschedule)); 1018 break; 1019 1020 case CRYPTO_MD5_HMAC: 1021 case CRYPTO_SHA1_HMAC: 1022 case CRYPTO_SHA2_224_HMAC: 1023 case CRYPTO_SHA2_256_HMAC: 1024 case CRYPTO_SHA2_384_HMAC: 1025 case CRYPTO_SHA2_512_HMAC: 1026 case CRYPTO_RIPEMD160_HMAC: 1027 case CRYPTO_NULL_HMAC: 1028 axf = swd->sw_axf; 1029 1030 if (swd->sw_ictx) { 1031 bzero(swd->sw_ictx, axf->ctxsize); 1032 free(swd->sw_ictx, M_CRYPTO_DATA); 1033 } 1034 if (swd->sw_octx) { 1035 bzero(swd->sw_octx, axf->ctxsize); 1036 free(swd->sw_octx, M_CRYPTO_DATA); 1037 } 1038 break; 1039 1040 case CRYPTO_MD5_KPDK: 1041 case CRYPTO_SHA1_KPDK: 1042 axf = swd->sw_axf; 1043 1044 if (swd->sw_ictx) { 1045 bzero(swd->sw_ictx, axf->ctxsize); 1046 free(swd->sw_ictx, M_CRYPTO_DATA); 1047 } 1048 if (swd->sw_octx) { 1049 bzero(swd->sw_octx, swd->sw_klen); 1050 free(swd->sw_octx, M_CRYPTO_DATA); 1051 } 1052 break; 1053 1054 case CRYPTO_BLAKE2B: 1055 case CRYPTO_BLAKE2S: 1056 case CRYPTO_MD5: 1057 case CRYPTO_SHA1: 1058 case CRYPTO_SHA2_224: 1059 case CRYPTO_SHA2_256: 1060 case CRYPTO_SHA2_384: 1061 case CRYPTO_SHA2_512: 1062 axf = swd->sw_axf; 1063 1064 if (swd->sw_ictx) { 1065 explicit_bzero(swd->sw_ictx, axf->ctxsize); 1066 free(swd->sw_ictx, M_CRYPTO_DATA); 1067 } 1068 break; 1069 1070 case CRYPTO_DEFLATE_COMP: 1071 /* Nothing to do */ 1072 break; 1073 } 1074 1075 /* OCF owns and frees the primary session object */ 1076 if (swd != ses) 1077 free(swd, M_CRYPTO_DATA); 1078 } 1079 } 1080 1081 /* 1082 * Process a software request. 1083 */ 1084 static int 1085 swcr_process(device_t dev, struct cryptop *crp, int hint) 1086 { 1087 struct cryptodesc *crd; 1088 struct swcr_data *sw, *ses; 1089 1090 /* Sanity check */ 1091 if (crp == NULL) 1092 return EINVAL; 1093 1094 if (crp->crp_desc == NULL || crp->crp_buf == NULL) { 1095 crp->crp_etype = EINVAL; 1096 goto done; 1097 } 1098 1099 ses = crypto_get_driver_session(crp->crp_session); 1100 1101 /* Go through crypto descriptors, processing as we go */ 1102 for (crd = crp->crp_desc; crd; crd = crd->crd_next) { 1103 /* 1104 * Find the crypto context. 1105 * 1106 * XXX Note that the logic here prevents us from having 1107 * XXX the same algorithm multiple times in a session 1108 * XXX (or rather, we can but it won't give us the right 1109 * XXX results). To do that, we'd need some way of differentiating 1110 * XXX between the various instances of an algorithm (so we can 1111 * XXX locate the correct crypto context). 1112 */ 1113 for (sw = ses; sw && sw->sw_alg != crd->crd_alg; 1114 sw = sw->sw_next) 1115 ; 1116 1117 /* No such context ? */ 1118 if (sw == NULL) { 1119 crp->crp_etype = EINVAL; 1120 goto done; 1121 } 1122 switch (sw->sw_alg) { 1123 case CRYPTO_DES_CBC: 1124 case CRYPTO_3DES_CBC: 1125 case CRYPTO_BLF_CBC: 1126 case CRYPTO_CAST_CBC: 1127 case CRYPTO_SKIPJACK_CBC: 1128 case CRYPTO_RIJNDAEL128_CBC: 1129 case CRYPTO_AES_XTS: 1130 case CRYPTO_AES_ICM: 1131 case CRYPTO_CAMELLIA_CBC: 1132 case CRYPTO_CHACHA20: 1133 if ((crp->crp_etype = swcr_encdec(crd, sw, 1134 crp->crp_buf, crp->crp_flags)) != 0) 1135 goto done; 1136 break; 1137 case CRYPTO_NULL_CBC: 1138 crp->crp_etype = 0; 1139 break; 1140 case CRYPTO_MD5_HMAC: 1141 case CRYPTO_SHA1_HMAC: 1142 case CRYPTO_SHA2_224_HMAC: 1143 case CRYPTO_SHA2_256_HMAC: 1144 case CRYPTO_SHA2_384_HMAC: 1145 case CRYPTO_SHA2_512_HMAC: 1146 case CRYPTO_RIPEMD160_HMAC: 1147 case CRYPTO_NULL_HMAC: 1148 case CRYPTO_MD5_KPDK: 1149 case CRYPTO_SHA1_KPDK: 1150 case CRYPTO_MD5: 1151 case CRYPTO_SHA1: 1152 case CRYPTO_SHA2_224: 1153 case CRYPTO_SHA2_256: 1154 case CRYPTO_SHA2_384: 1155 case CRYPTO_SHA2_512: 1156 case CRYPTO_BLAKE2B: 1157 case CRYPTO_BLAKE2S: 1158 if ((crp->crp_etype = swcr_authcompute(crd, sw, 1159 crp->crp_buf, crp->crp_flags)) != 0) 1160 goto done; 1161 break; 1162 1163 case CRYPTO_AES_NIST_GCM_16: 1164 case CRYPTO_AES_NIST_GMAC: 1165 case CRYPTO_AES_128_NIST_GMAC: 1166 case CRYPTO_AES_192_NIST_GMAC: 1167 case CRYPTO_AES_256_NIST_GMAC: 1168 crp->crp_etype = swcr_authenc(crp); 1169 goto done; 1170 1171 case CRYPTO_DEFLATE_COMP: 1172 if ((crp->crp_etype = swcr_compdec(crd, sw, 1173 crp->crp_buf, crp->crp_flags)) != 0) 1174 goto done; 1175 else 1176 crp->crp_olen = (int)sw->sw_size; 1177 break; 1178 1179 default: 1180 /* Unknown/unsupported algorithm */ 1181 crp->crp_etype = EINVAL; 1182 goto done; 1183 } 1184 } 1185 1186 done: 1187 crypto_done(crp); 1188 return 0; 1189 } 1190 1191 static void 1192 swcr_identify(driver_t *drv, device_t parent) 1193 { 1194 /* NB: order 10 is so we get attached after h/w devices */ 1195 if (device_find_child(parent, "cryptosoft", -1) == NULL && 1196 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0) 1197 panic("cryptosoft: could not attach"); 1198 } 1199 1200 static int 1201 swcr_probe(device_t dev) 1202 { 1203 device_set_desc(dev, "software crypto"); 1204 return (BUS_PROBE_NOWILDCARD); 1205 } 1206 1207 static int 1208 swcr_attach(device_t dev) 1209 { 1210 memset(hmac_ipad_buffer, HMAC_IPAD_VAL, HMAC_MAX_BLOCK_LEN); 1211 memset(hmac_opad_buffer, HMAC_OPAD_VAL, HMAC_MAX_BLOCK_LEN); 1212 1213 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_data), 1214 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC); 1215 if (swcr_id < 0) { 1216 device_printf(dev, "cannot initialize!"); 1217 return ENOMEM; 1218 } 1219 #define REGISTER(alg) \ 1220 crypto_register(swcr_id, alg, 0,0) 1221 REGISTER(CRYPTO_DES_CBC); 1222 REGISTER(CRYPTO_3DES_CBC); 1223 REGISTER(CRYPTO_BLF_CBC); 1224 REGISTER(CRYPTO_CAST_CBC); 1225 REGISTER(CRYPTO_SKIPJACK_CBC); 1226 REGISTER(CRYPTO_NULL_CBC); 1227 REGISTER(CRYPTO_MD5_HMAC); 1228 REGISTER(CRYPTO_SHA1_HMAC); 1229 REGISTER(CRYPTO_SHA2_224_HMAC); 1230 REGISTER(CRYPTO_SHA2_256_HMAC); 1231 REGISTER(CRYPTO_SHA2_384_HMAC); 1232 REGISTER(CRYPTO_SHA2_512_HMAC); 1233 REGISTER(CRYPTO_RIPEMD160_HMAC); 1234 REGISTER(CRYPTO_NULL_HMAC); 1235 REGISTER(CRYPTO_MD5_KPDK); 1236 REGISTER(CRYPTO_SHA1_KPDK); 1237 REGISTER(CRYPTO_MD5); 1238 REGISTER(CRYPTO_SHA1); 1239 REGISTER(CRYPTO_SHA2_224); 1240 REGISTER(CRYPTO_SHA2_256); 1241 REGISTER(CRYPTO_SHA2_384); 1242 REGISTER(CRYPTO_SHA2_512); 1243 REGISTER(CRYPTO_RIJNDAEL128_CBC); 1244 REGISTER(CRYPTO_AES_XTS); 1245 REGISTER(CRYPTO_AES_ICM); 1246 REGISTER(CRYPTO_AES_NIST_GCM_16); 1247 REGISTER(CRYPTO_AES_NIST_GMAC); 1248 REGISTER(CRYPTO_AES_128_NIST_GMAC); 1249 REGISTER(CRYPTO_AES_192_NIST_GMAC); 1250 REGISTER(CRYPTO_AES_256_NIST_GMAC); 1251 REGISTER(CRYPTO_CAMELLIA_CBC); 1252 REGISTER(CRYPTO_DEFLATE_COMP); 1253 REGISTER(CRYPTO_BLAKE2B); 1254 REGISTER(CRYPTO_BLAKE2S); 1255 REGISTER(CRYPTO_CHACHA20); 1256 #undef REGISTER 1257 1258 return 0; 1259 } 1260 1261 static int 1262 swcr_detach(device_t dev) 1263 { 1264 crypto_unregister_all(swcr_id); 1265 return 0; 1266 } 1267 1268 static device_method_t swcr_methods[] = { 1269 DEVMETHOD(device_identify, swcr_identify), 1270 DEVMETHOD(device_probe, swcr_probe), 1271 DEVMETHOD(device_attach, swcr_attach), 1272 DEVMETHOD(device_detach, swcr_detach), 1273 1274 DEVMETHOD(cryptodev_newsession, swcr_newsession), 1275 DEVMETHOD(cryptodev_freesession,swcr_freesession), 1276 DEVMETHOD(cryptodev_process, swcr_process), 1277 1278 {0, 0}, 1279 }; 1280 1281 static driver_t swcr_driver = { 1282 "cryptosoft", 1283 swcr_methods, 1284 0, /* NB: no softc */ 1285 }; 1286 static devclass_t swcr_devclass; 1287 1288 /* 1289 * NB: We explicitly reference the crypto module so we 1290 * get the necessary ordering when built as a loadable 1291 * module. This is required because we bundle the crypto 1292 * module code together with the cryptosoft driver (otherwise 1293 * normal module dependencies would handle things). 1294 */ 1295 extern int crypto_modevent(struct module *, int, void *); 1296 /* XXX where to attach */ 1297 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0); 1298 MODULE_VERSION(cryptosoft, 1); 1299 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1); 1300