1091d81d1SSam Leffler /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */ 2091d81d1SSam Leffler 360727d8bSWarner Losh /*- 4091d81d1SSam Leffler * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu) 56810ad6fSSam Leffler * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting 6091d81d1SSam Leffler * 7091d81d1SSam Leffler * This code was written by Angelos D. Keromytis in Athens, Greece, in 8091d81d1SSam Leffler * February 2000. Network Security Technologies Inc. (NSTI) kindly 9091d81d1SSam Leffler * supported the development of this code. 10091d81d1SSam Leffler * 11091d81d1SSam Leffler * Copyright (c) 2000, 2001 Angelos D. Keromytis 1208fca7a5SJohn-Mark Gurney * Copyright (c) 2014 The FreeBSD Foundation 1308fca7a5SJohn-Mark Gurney * All rights reserved. 1408fca7a5SJohn-Mark Gurney * 1508fca7a5SJohn-Mark Gurney * Portions of this software were developed by John-Mark Gurney 1608fca7a5SJohn-Mark Gurney * under sponsorship of the FreeBSD Foundation and 1708fca7a5SJohn-Mark Gurney * Rubicon Communications, LLC (Netgate). 18091d81d1SSam Leffler * 19091d81d1SSam Leffler * Permission to use, copy, and modify this software with or without fee 20091d81d1SSam Leffler * is hereby granted, provided that this entire notice is included in 21091d81d1SSam Leffler * all source code copies of any software which is or includes a copy or 22091d81d1SSam Leffler * modification of this software. 23091d81d1SSam Leffler * 24091d81d1SSam Leffler * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR 25091d81d1SSam Leffler * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY 26091d81d1SSam Leffler * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE 27091d81d1SSam Leffler * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR 28091d81d1SSam Leffler * PURPOSE. 29091d81d1SSam Leffler */ 30091d81d1SSam Leffler 312c446514SDavid E. O'Brien #include <sys/cdefs.h> 322c446514SDavid E. O'Brien __FBSDID("$FreeBSD$"); 332c446514SDavid E. O'Brien 34091d81d1SSam Leffler #include <sys/param.h> 35091d81d1SSam Leffler #include <sys/systm.h> 36091d81d1SSam Leffler #include <sys/malloc.h> 37091d81d1SSam Leffler #include <sys/mbuf.h> 386810ad6fSSam Leffler #include <sys/module.h> 39091d81d1SSam Leffler #include <sys/sysctl.h> 40091d81d1SSam Leffler #include <sys/errno.h> 41091d81d1SSam Leffler #include <sys/random.h> 42091d81d1SSam Leffler #include <sys/kernel.h> 43091d81d1SSam Leffler #include <sys/uio.h> 44109919c6SBenno Rice #include <sys/lock.h> 45109919c6SBenno Rice #include <sys/rwlock.h> 4608fca7a5SJohn-Mark Gurney #include <sys/endian.h> 4708fca7a5SJohn-Mark Gurney #include <sys/limits.h> 48a7fcb1afSSean Eric Fagan #include <sys/mutex.h> 49091d81d1SSam Leffler 50091d81d1SSam Leffler #include <crypto/sha1.h> 51091d81d1SSam Leffler #include <opencrypto/rmd160.h> 52091d81d1SSam Leffler 53091d81d1SSam Leffler #include <opencrypto/cryptodev.h> 54091d81d1SSam Leffler #include <opencrypto/xform.h> 55091d81d1SSam Leffler 566810ad6fSSam Leffler #include <sys/kobj.h> 576810ad6fSSam Leffler #include <sys/bus.h> 586810ad6fSSam Leffler #include "cryptodev_if.h" 59091d81d1SSam Leffler 60c0341432SJohn Baldwin struct swcr_auth { 61c0341432SJohn Baldwin void *sw_ictx; 62c0341432SJohn Baldwin void *sw_octx; 63c0341432SJohn Baldwin struct auth_hash *sw_axf; 64c0341432SJohn Baldwin uint16_t sw_mlen; 65c0341432SJohn Baldwin }; 66c0341432SJohn Baldwin 67c0341432SJohn Baldwin struct swcr_encdec { 683e947048SJohn Baldwin void *sw_kschedule; 69c0341432SJohn Baldwin struct enc_xform *sw_exf; 70c0341432SJohn Baldwin }; 71c0341432SJohn Baldwin 72c0341432SJohn Baldwin struct swcr_compdec { 73c0341432SJohn Baldwin struct comp_algo *sw_cxf; 74c0341432SJohn Baldwin }; 75c0341432SJohn Baldwin 76c0341432SJohn Baldwin struct swcr_session { 77c0341432SJohn Baldwin struct mtx swcr_lock; 78c0341432SJohn Baldwin int (*swcr_process)(struct swcr_session *, struct cryptop *); 79c0341432SJohn Baldwin 80c0341432SJohn Baldwin struct swcr_auth swcr_auth; 81c0341432SJohn Baldwin struct swcr_encdec swcr_encdec; 82c0341432SJohn Baldwin struct swcr_compdec swcr_compdec; 83c0341432SJohn Baldwin }; 84507281e5SSean Eric Fagan 856810ad6fSSam Leffler static int32_t swcr_id; 866810ad6fSSam Leffler 871b0909d5SConrad Meyer static void swcr_freesession(device_t dev, crypto_session_t cses); 88091d81d1SSam Leffler 89c0341432SJohn Baldwin /* Used for CRYPTO_NULL_CBC. */ 90c0341432SJohn Baldwin static int 91c0341432SJohn Baldwin swcr_null(struct swcr_session *ses, struct cryptop *crp) 92c0341432SJohn Baldwin { 93c0341432SJohn Baldwin 94c0341432SJohn Baldwin return (0); 95c0341432SJohn Baldwin } 96c0341432SJohn Baldwin 97091d81d1SSam Leffler /* 98091d81d1SSam Leffler * Apply a symmetric encryption/decryption algorithm. 99091d81d1SSam Leffler */ 100091d81d1SSam Leffler static int 101c0341432SJohn Baldwin swcr_encdec(struct swcr_session *ses, struct cryptop *crp) 102091d81d1SSam Leffler { 1035d7ae54aSConrad Meyer unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN]; 10408fca7a5SJohn-Mark Gurney unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN]; 105c0341432SJohn Baldwin const struct crypto_session_params *csp; 106c0341432SJohn Baldwin struct swcr_encdec *sw; 107091d81d1SSam Leffler struct enc_xform *exf; 1089c0e3d3aSJohn Baldwin int i, blks, inlen, ivlen, outlen, resid; 1099c0e3d3aSJohn Baldwin struct crypto_buffer_cursor cc_in, cc_out; 11026d292d3SJohn Baldwin const unsigned char *inblk; 11126d292d3SJohn Baldwin unsigned char *outblk; 11208fca7a5SJohn-Mark Gurney int error; 113c0341432SJohn Baldwin bool encrypting; 11408fca7a5SJohn-Mark Gurney 11508fca7a5SJohn-Mark Gurney error = 0; 116091d81d1SSam Leffler 117c0341432SJohn Baldwin sw = &ses->swcr_encdec; 118091d81d1SSam Leffler exf = sw->sw_exf; 11908fca7a5SJohn-Mark Gurney ivlen = exf->ivsize; 120091d81d1SSam Leffler 121723d8764SJohn Baldwin if (exf->native_blocksize == 0) { 122091d81d1SSam Leffler /* Check for non-padded data */ 123723d8764SJohn Baldwin if ((crp->crp_payload_length % exf->blocksize) != 0) 124723d8764SJohn Baldwin return (EINVAL); 125723d8764SJohn Baldwin 126723d8764SJohn Baldwin blks = exf->blocksize; 127723d8764SJohn Baldwin } else 128723d8764SJohn Baldwin blks = exf->native_blocksize; 129091d81d1SSam Leffler 130c0341432SJohn Baldwin if (exf == &enc_xform_aes_icm && 131c0341432SJohn Baldwin (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0) 13208fca7a5SJohn-Mark Gurney return (EINVAL); 13308fca7a5SJohn-Mark Gurney 134c0341432SJohn Baldwin if (crp->crp_cipher_key != NULL) { 135c0341432SJohn Baldwin csp = crypto_get_params(crp->crp_session); 1363e947048SJohn Baldwin error = exf->setkey(sw->sw_kschedule, 137c0341432SJohn Baldwin crp->crp_cipher_key, csp->csp_cipher_klen); 138c740ae4bSPoul-Henning Kamp if (error) 139c740ae4bSPoul-Henning Kamp return (error); 140c740ae4bSPoul-Henning Kamp } 141d295bdeeSPawel Jakub Dawidek 14220c128daSJohn Baldwin crypto_read_iv(crp, iv); 14320c128daSJohn Baldwin 14408fca7a5SJohn-Mark Gurney if (exf->reinit) { 145d295bdeeSPawel Jakub Dawidek /* 146d295bdeeSPawel Jakub Dawidek * xforms that provide a reinit method perform all IV 147d295bdeeSPawel Jakub Dawidek * handling themselves. 148d295bdeeSPawel Jakub Dawidek */ 149d295bdeeSPawel Jakub Dawidek exf->reinit(sw->sw_kschedule, iv); 150091d81d1SSam Leffler } 151091d81d1SSam Leffler 1529c0e3d3aSJohn Baldwin ivp = iv; 153091d81d1SSam Leffler 1549c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 1559c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_payload_start); 1569c0e3d3aSJohn Baldwin inlen = crypto_cursor_seglen(&cc_in); 1579c0e3d3aSJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 1589c0e3d3aSJohn Baldwin if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) { 1599c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_out, &crp->crp_obuf); 1609c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_out, crp->crp_payload_output_start); 1619c0e3d3aSJohn Baldwin } else 1629c0e3d3aSJohn Baldwin cc_out = cc_in; 1639c0e3d3aSJohn Baldwin outlen = crypto_cursor_seglen(&cc_out); 1649c0e3d3aSJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 1659c0e3d3aSJohn Baldwin 1669c0e3d3aSJohn Baldwin resid = crp->crp_payload_length; 167c0341432SJohn Baldwin encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op); 168091d81d1SSam Leffler 169091d81d1SSam Leffler /* 1709c0e3d3aSJohn Baldwin * Loop through encrypting blocks. 'inlen' is the remaining 1719c0e3d3aSJohn Baldwin * length of the current segment in the input buffer. 1729c0e3d3aSJohn Baldwin * 'outlen' is the remaining length of current segment in the 1739c0e3d3aSJohn Baldwin * output buffer. 174091d81d1SSam Leffler */ 1759c0e3d3aSJohn Baldwin while (resid >= blks) { 1769c0e3d3aSJohn Baldwin /* 1779c0e3d3aSJohn Baldwin * If the current block is not contained within the 1789c0e3d3aSJohn Baldwin * current input/output segment, use 'blk' as a local 1799c0e3d3aSJohn Baldwin * buffer. 1809c0e3d3aSJohn Baldwin */ 1819c0e3d3aSJohn Baldwin if (inlen < blks) { 1829c0e3d3aSJohn Baldwin crypto_cursor_copydata(&cc_in, blks, blk); 1839c0e3d3aSJohn Baldwin inblk = blk; 184d295bdeeSPawel Jakub Dawidek } 1859c0e3d3aSJohn Baldwin if (outlen < blks) 1869c0e3d3aSJohn Baldwin outblk = blk; 1879c0e3d3aSJohn Baldwin 1889c0e3d3aSJohn Baldwin /* 1899c0e3d3aSJohn Baldwin * Ciphers without a 'reinit' hook are assumed to be 1909c0e3d3aSJohn Baldwin * used in CBC mode where the chaining is done here. 1919c0e3d3aSJohn Baldwin */ 1929c0e3d3aSJohn Baldwin if (exf->reinit != NULL) { 1939c0e3d3aSJohn Baldwin if (encrypting) 1949c0e3d3aSJohn Baldwin exf->encrypt(sw->sw_kschedule, inblk, outblk); 1959c0e3d3aSJohn Baldwin else 1969c0e3d3aSJohn Baldwin exf->decrypt(sw->sw_kschedule, inblk, outblk); 197c0341432SJohn Baldwin } else if (encrypting) { 198091d81d1SSam Leffler /* XOR with previous block */ 1999c0e3d3aSJohn Baldwin for (i = 0; i < blks; i++) 2009c0e3d3aSJohn Baldwin outblk[i] = inblk[i] ^ ivp[i]; 201091d81d1SSam Leffler 2029c0e3d3aSJohn Baldwin exf->encrypt(sw->sw_kschedule, outblk, outblk); 203091d81d1SSam Leffler 204091d81d1SSam Leffler /* 205091d81d1SSam Leffler * Keep encrypted block for XOR'ing 206091d81d1SSam Leffler * with next block 207091d81d1SSam Leffler */ 2089c0e3d3aSJohn Baldwin memcpy(iv, outblk, blks); 209091d81d1SSam Leffler ivp = iv; 210091d81d1SSam Leffler } else { /* decrypt */ 211091d81d1SSam Leffler /* 212091d81d1SSam Leffler * Keep encrypted block for XOR'ing 213091d81d1SSam Leffler * with next block 214091d81d1SSam Leffler */ 21508fca7a5SJohn-Mark Gurney nivp = (ivp == iv) ? iv2 : iv; 2169c0e3d3aSJohn Baldwin memcpy(nivp, inblk, blks); 217091d81d1SSam Leffler 2189c0e3d3aSJohn Baldwin exf->decrypt(sw->sw_kschedule, inblk, outblk); 219091d81d1SSam Leffler 220091d81d1SSam Leffler /* XOR with previous block */ 2219c0e3d3aSJohn Baldwin for (i = 0; i < blks; i++) 2229c0e3d3aSJohn Baldwin outblk[i] ^= ivp[i]; 223091d81d1SSam Leffler 22408fca7a5SJohn-Mark Gurney ivp = nivp; 225091d81d1SSam Leffler } 226091d81d1SSam Leffler 2279c0e3d3aSJohn Baldwin if (inlen < blks) { 2289c0e3d3aSJohn Baldwin inlen = crypto_cursor_seglen(&cc_in); 2299c0e3d3aSJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 2309c0e3d3aSJohn Baldwin } else { 2319c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, blks); 2329c0e3d3aSJohn Baldwin inlen -= blks; 2339c0e3d3aSJohn Baldwin inblk += blks; 23408fca7a5SJohn-Mark Gurney } 235091d81d1SSam Leffler 2369c0e3d3aSJohn Baldwin if (outlen < blks) { 2379c0e3d3aSJohn Baldwin crypto_cursor_copyback(&cc_out, blks, blk); 2389c0e3d3aSJohn Baldwin outlen = crypto_cursor_seglen(&cc_out); 2399c0e3d3aSJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 2409c0e3d3aSJohn Baldwin } else { 2419c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_out, blks); 2429c0e3d3aSJohn Baldwin outlen -= blks; 2439c0e3d3aSJohn Baldwin outblk += blks; 244091d81d1SSam Leffler } 245091d81d1SSam Leffler 2469c0e3d3aSJohn Baldwin resid -= blks; 247f34a967bSPawel Jakub Dawidek } 248f34a967bSPawel Jakub Dawidek 249723d8764SJohn Baldwin /* Handle trailing partial block for stream ciphers. */ 2509c0e3d3aSJohn Baldwin if (resid > 0) { 251723d8764SJohn Baldwin KASSERT(exf->native_blocksize != 0, 252723d8764SJohn Baldwin ("%s: partial block of %d bytes for cipher %s", 253723d8764SJohn Baldwin __func__, i, exf->name)); 254723d8764SJohn Baldwin KASSERT(exf->reinit != NULL, 255723d8764SJohn Baldwin ("%s: partial block cipher %s without reinit hook", 256723d8764SJohn Baldwin __func__, exf->name)); 2579c0e3d3aSJohn Baldwin KASSERT(resid < blks, ("%s: partial block too big", __func__)); 258723d8764SJohn Baldwin 2599c0e3d3aSJohn Baldwin inlen = crypto_cursor_seglen(&cc_in); 2609c0e3d3aSJohn Baldwin outlen = crypto_cursor_seglen(&cc_out); 2619c0e3d3aSJohn Baldwin if (inlen < resid) { 2629c0e3d3aSJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 2639c0e3d3aSJohn Baldwin inblk = blk; 2649c0e3d3aSJohn Baldwin } else 2659c0e3d3aSJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 2669c0e3d3aSJohn Baldwin if (outlen < resid) 2679c0e3d3aSJohn Baldwin outblk = blk; 2689c0e3d3aSJohn Baldwin else 2699c0e3d3aSJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 2709c0e3d3aSJohn Baldwin if (encrypting) 2719c0e3d3aSJohn Baldwin exf->encrypt_last(sw->sw_kschedule, inblk, outblk, 2729c0e3d3aSJohn Baldwin resid); 2739c0e3d3aSJohn Baldwin else 2749c0e3d3aSJohn Baldwin exf->decrypt_last(sw->sw_kschedule, inblk, outblk, 2759c0e3d3aSJohn Baldwin resid); 2769c0e3d3aSJohn Baldwin if (outlen < resid) 2779c0e3d3aSJohn Baldwin crypto_cursor_copyback(&cc_out, resid, blk); 278723d8764SJohn Baldwin } 279723d8764SJohn Baldwin 28020c128daSJohn Baldwin explicit_bzero(blk, sizeof(blk)); 28120c128daSJohn Baldwin explicit_bzero(iv, sizeof(iv)); 28220c128daSJohn Baldwin explicit_bzero(iv2, sizeof(iv2)); 2839c0e3d3aSJohn Baldwin return (0); 284091d81d1SSam Leffler } 285091d81d1SSam Leffler 286c0341432SJohn Baldwin static void 287c0341432SJohn Baldwin swcr_authprepare(struct auth_hash *axf, struct swcr_auth *sw, 288c0341432SJohn Baldwin const uint8_t *key, int klen) 289f6c4bc3bSPawel Jakub Dawidek { 290f6c4bc3bSPawel Jakub Dawidek 291f6c4bc3bSPawel Jakub Dawidek switch (axf->type) { 292f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA1_HMAC: 293c97f39ceSConrad Meyer case CRYPTO_SHA2_224_HMAC: 294f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_256_HMAC: 295f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_384_HMAC: 296f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_512_HMAC: 297f6c4bc3bSPawel Jakub Dawidek case CRYPTO_NULL_HMAC: 298f6c4bc3bSPawel Jakub Dawidek case CRYPTO_RIPEMD160_HMAC: 299c0341432SJohn Baldwin hmac_init_ipad(axf, key, klen, sw->sw_ictx); 300c0341432SJohn Baldwin hmac_init_opad(axf, key, klen, sw->sw_octx); 301f6c4bc3bSPawel Jakub Dawidek break; 30225b7033bSConrad Meyer case CRYPTO_POLY1305: 3030e33efe4SConrad Meyer case CRYPTO_BLAKE2B: 3040e33efe4SConrad Meyer case CRYPTO_BLAKE2S: 3050e33efe4SConrad Meyer axf->Setkey(sw->sw_ictx, key, klen); 3060e33efe4SConrad Meyer axf->Init(sw->sw_ictx); 3070e33efe4SConrad Meyer break; 308f6c4bc3bSPawel Jakub Dawidek default: 309c0341432SJohn Baldwin panic("%s: algorithm %d doesn't use keys", __func__, axf->type); 310f6c4bc3bSPawel Jakub Dawidek } 311f6c4bc3bSPawel Jakub Dawidek } 312f6c4bc3bSPawel Jakub Dawidek 313091d81d1SSam Leffler /* 314c0341432SJohn Baldwin * Compute or verify hash. 315091d81d1SSam Leffler */ 316091d81d1SSam Leffler static int 317c0341432SJohn Baldwin swcr_authcompute(struct swcr_session *ses, struct cryptop *crp) 318091d81d1SSam Leffler { 319c0341432SJohn Baldwin u_char aalg[HASH_MAX_LEN]; 320c0341432SJohn Baldwin const struct crypto_session_params *csp; 321c0341432SJohn Baldwin struct swcr_auth *sw; 322091d81d1SSam Leffler struct auth_hash *axf; 323091d81d1SSam Leffler union authctx ctx; 324091d81d1SSam Leffler int err; 325091d81d1SSam Leffler 326c0341432SJohn Baldwin sw = &ses->swcr_auth; 327091d81d1SSam Leffler 328091d81d1SSam Leffler axf = sw->sw_axf; 329091d81d1SSam Leffler 330c0341432SJohn Baldwin csp = crypto_get_params(crp->crp_session); 3316038018aSMarcin Wojtas if (crp->crp_auth_key != NULL) { 332c0341432SJohn Baldwin swcr_authprepare(axf, sw, crp->crp_auth_key, 333c0341432SJohn Baldwin csp->csp_auth_klen); 33425b7033bSConrad Meyer } 335f6c4bc3bSPawel Jakub Dawidek 336091d81d1SSam Leffler bcopy(sw->sw_ictx, &ctx, axf->ctxsize); 337091d81d1SSam Leffler 3389b774dc0SJohn Baldwin if (crp->crp_aad != NULL) 3399b774dc0SJohn Baldwin err = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length); 3409b774dc0SJohn Baldwin else 341c0341432SJohn Baldwin err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length, 3429b6b2f86SJohn Baldwin axf->Update, &ctx); 343091d81d1SSam Leffler if (err) 344e0b155feSJohn Baldwin goto out; 345091d81d1SSam Leffler 3469c0e3d3aSJohn Baldwin if (CRYPTO_HAS_OUTPUT_BUFFER(crp) && 3479c0e3d3aSJohn Baldwin CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) 3489c0e3d3aSJohn Baldwin err = crypto_apply_buf(&crp->crp_obuf, 3499c0e3d3aSJohn Baldwin crp->crp_payload_output_start, crp->crp_payload_length, 3509b6b2f86SJohn Baldwin axf->Update, &ctx); 3519c0e3d3aSJohn Baldwin else 3529c0e3d3aSJohn Baldwin err = crypto_apply(crp, crp->crp_payload_start, 3539b6b2f86SJohn Baldwin crp->crp_payload_length, axf->Update, &ctx); 354c0341432SJohn Baldwin if (err) 355e0b155feSJohn Baldwin goto out; 356091d81d1SSam Leffler 3576038018aSMarcin Wojtas if (csp->csp_flags & CSP_F_ESN) 3586038018aSMarcin Wojtas axf->Update(&ctx, crp->crp_esn, 4); 3596038018aSMarcin Wojtas 360091d81d1SSam Leffler axf->Final(aalg, &ctx); 361e0b155feSJohn Baldwin if (sw->sw_octx != NULL) { 362091d81d1SSam Leffler bcopy(sw->sw_octx, &ctx, axf->ctxsize); 363091d81d1SSam Leffler axf->Update(&ctx, aalg, axf->hashsize); 364091d81d1SSam Leffler axf->Final(aalg, &ctx); 365091d81d1SSam Leffler } 366091d81d1SSam Leffler 367c0341432SJohn Baldwin if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) { 36820c128daSJohn Baldwin u_char uaalg[HASH_MAX_LEN]; 36920c128daSJohn Baldwin 370c0341432SJohn Baldwin crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, uaalg); 371c0341432SJohn Baldwin if (timingsafe_bcmp(aalg, uaalg, sw->sw_mlen) != 0) 37220c128daSJohn Baldwin err = EBADMSG; 37320c128daSJohn Baldwin explicit_bzero(uaalg, sizeof(uaalg)); 374c0341432SJohn Baldwin } else { 375091d81d1SSam Leffler /* Inject the authentication data */ 376c0341432SJohn Baldwin crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, aalg); 377c0341432SJohn Baldwin } 37820c128daSJohn Baldwin explicit_bzero(aalg, sizeof(aalg)); 379e0b155feSJohn Baldwin out: 380e0b155feSJohn Baldwin explicit_bzero(&ctx, sizeof(ctx)); 38120c128daSJohn Baldwin return (err); 382091d81d1SSam Leffler } 383091d81d1SSam Leffler 38408fca7a5SJohn-Mark Gurney CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */ 38508fca7a5SJohn-Mark Gurney CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */ 38608fca7a5SJohn-Mark Gurney 38708fca7a5SJohn-Mark Gurney static int 388c0341432SJohn Baldwin swcr_gmac(struct swcr_session *ses, struct cryptop *crp) 38908fca7a5SJohn-Mark Gurney { 39026d292d3SJohn Baldwin uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))]; 39108fca7a5SJohn-Mark Gurney u_char *blk = (u_char *)blkbuf; 39226d292d3SJohn Baldwin u_char tag[GMAC_DIGEST_LEN]; 39326d292d3SJohn Baldwin u_char iv[AES_BLOCK_LEN]; 3949c0e3d3aSJohn Baldwin struct crypto_buffer_cursor cc; 39526d292d3SJohn Baldwin const u_char *inblk; 39608fca7a5SJohn-Mark Gurney union authctx ctx; 397c0341432SJohn Baldwin struct swcr_auth *swa; 398c0341432SJohn Baldwin struct auth_hash *axf; 39908fca7a5SJohn-Mark Gurney uint32_t *blkp; 40020c128daSJohn Baldwin int blksz, error, ivlen, len, resid; 40108fca7a5SJohn-Mark Gurney 402c0341432SJohn Baldwin swa = &ses->swcr_auth; 40308fca7a5SJohn-Mark Gurney axf = swa->sw_axf; 404c0341432SJohn Baldwin 40508fca7a5SJohn-Mark Gurney bcopy(swa->sw_ictx, &ctx, axf->ctxsize); 40626d292d3SJohn Baldwin blksz = GMAC_BLOCK_LEN; 40726d292d3SJohn Baldwin KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch", 40826d292d3SJohn Baldwin __func__)); 40908fca7a5SJohn-Mark Gurney 41008fca7a5SJohn-Mark Gurney /* Initialize the IV */ 411c0341432SJohn Baldwin ivlen = AES_GCM_IV_LEN; 41229fe41ddSJohn Baldwin crypto_read_iv(crp, iv); 41308fca7a5SJohn-Mark Gurney 41408fca7a5SJohn-Mark Gurney axf->Reinit(&ctx, iv, ivlen); 4159c0e3d3aSJohn Baldwin crypto_cursor_init(&cc, &crp->crp_buf); 4169c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc, crp->crp_payload_start); 41726d292d3SJohn Baldwin for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) { 41826d292d3SJohn Baldwin len = crypto_cursor_seglen(&cc); 41926d292d3SJohn Baldwin if (len >= blksz) { 42026d292d3SJohn Baldwin inblk = crypto_cursor_segbase(&cc); 42126d292d3SJohn Baldwin len = rounddown(MIN(len, resid), blksz); 42226d292d3SJohn Baldwin crypto_cursor_advance(&cc, len); 42326d292d3SJohn Baldwin } else { 42426d292d3SJohn Baldwin len = blksz; 4259c0e3d3aSJohn Baldwin crypto_cursor_copydata(&cc, len, blk); 42626d292d3SJohn Baldwin inblk = blk; 42726d292d3SJohn Baldwin } 42826d292d3SJohn Baldwin axf->Update(&ctx, inblk, len); 42926d292d3SJohn Baldwin } 43026d292d3SJohn Baldwin if (resid > 0) { 43126d292d3SJohn Baldwin memset(blk, 0, blksz); 43226d292d3SJohn Baldwin crypto_cursor_copydata(&cc, resid, blk); 43308fca7a5SJohn-Mark Gurney axf->Update(&ctx, blk, blksz); 43408fca7a5SJohn-Mark Gurney } 43508fca7a5SJohn-Mark Gurney 43608fca7a5SJohn-Mark Gurney /* length block */ 43726d292d3SJohn Baldwin memset(blk, 0, blksz); 43808fca7a5SJohn-Mark Gurney blkp = (uint32_t *)blk + 1; 439c0341432SJohn Baldwin *blkp = htobe32(crp->crp_payload_length * 8); 44008fca7a5SJohn-Mark Gurney axf->Update(&ctx, blk, blksz); 441c0341432SJohn Baldwin 442c0341432SJohn Baldwin /* Finalize MAC */ 44326d292d3SJohn Baldwin axf->Final(tag, &ctx); 444c0341432SJohn Baldwin 44520c128daSJohn Baldwin error = 0; 446c0341432SJohn Baldwin if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) { 44726d292d3SJohn Baldwin u_char tag2[GMAC_DIGEST_LEN]; 44820c128daSJohn Baldwin 449c0341432SJohn Baldwin crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, 45026d292d3SJohn Baldwin tag2); 45126d292d3SJohn Baldwin if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0) 45220c128daSJohn Baldwin error = EBADMSG; 45326d292d3SJohn Baldwin explicit_bzero(tag2, sizeof(tag2)); 454c0341432SJohn Baldwin } else { 455c0341432SJohn Baldwin /* Inject the authentication data */ 45626d292d3SJohn Baldwin crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag); 457c0341432SJohn Baldwin } 45820c128daSJohn Baldwin explicit_bzero(blkbuf, sizeof(blkbuf)); 45926d292d3SJohn Baldwin explicit_bzero(tag, sizeof(tag)); 46020c128daSJohn Baldwin explicit_bzero(iv, sizeof(iv)); 46120c128daSJohn Baldwin return (error); 462c0341432SJohn Baldwin } 463c0341432SJohn Baldwin 464c0341432SJohn Baldwin static int 465c0341432SJohn Baldwin swcr_gcm(struct swcr_session *ses, struct cryptop *crp) 466c0341432SJohn Baldwin { 46726d292d3SJohn Baldwin uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))]; 468c0341432SJohn Baldwin u_char *blk = (u_char *)blkbuf; 46926d292d3SJohn Baldwin u_char tag[GMAC_DIGEST_LEN]; 47026d292d3SJohn Baldwin u_char iv[AES_BLOCK_LEN]; 4719c0e3d3aSJohn Baldwin struct crypto_buffer_cursor cc_in, cc_out; 47226d292d3SJohn Baldwin const u_char *inblk; 47326d292d3SJohn Baldwin u_char *outblk; 474c0341432SJohn Baldwin union authctx ctx; 475c0341432SJohn Baldwin struct swcr_auth *swa; 476c0341432SJohn Baldwin struct swcr_encdec *swe; 477c0341432SJohn Baldwin struct auth_hash *axf; 478c0341432SJohn Baldwin struct enc_xform *exf; 479c0341432SJohn Baldwin uint32_t *blkp; 48020c128daSJohn Baldwin int blksz, error, ivlen, len, r, resid; 481c0341432SJohn Baldwin 482c0341432SJohn Baldwin swa = &ses->swcr_auth; 483c0341432SJohn Baldwin axf = swa->sw_axf; 484c0341432SJohn Baldwin 485c0341432SJohn Baldwin bcopy(swa->sw_ictx, &ctx, axf->ctxsize); 48626d292d3SJohn Baldwin blksz = GMAC_BLOCK_LEN; 48726d292d3SJohn Baldwin KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch", 48826d292d3SJohn Baldwin __func__)); 489c0341432SJohn Baldwin 490c0341432SJohn Baldwin swe = &ses->swcr_encdec; 491c0341432SJohn Baldwin exf = swe->sw_exf; 492723d8764SJohn Baldwin KASSERT(axf->blocksize == exf->native_blocksize, 493723d8764SJohn Baldwin ("%s: blocksize mismatch", __func__)); 494c0341432SJohn Baldwin 495c0341432SJohn Baldwin if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0) 496c0341432SJohn Baldwin return (EINVAL); 497c0341432SJohn Baldwin 498c0341432SJohn Baldwin /* Initialize the IV */ 499c0341432SJohn Baldwin ivlen = AES_GCM_IV_LEN; 500c0341432SJohn Baldwin bcopy(crp->crp_iv, iv, ivlen); 501c0341432SJohn Baldwin 502c0341432SJohn Baldwin /* Supply MAC with IV */ 503c0341432SJohn Baldwin axf->Reinit(&ctx, iv, ivlen); 504c0341432SJohn Baldwin 505c0341432SJohn Baldwin /* Supply MAC with AAD */ 5069b774dc0SJohn Baldwin if (crp->crp_aad != NULL) { 5079b774dc0SJohn Baldwin len = rounddown(crp->crp_aad_length, blksz); 5089b774dc0SJohn Baldwin if (len != 0) 5099b774dc0SJohn Baldwin axf->Update(&ctx, crp->crp_aad, len); 5109b774dc0SJohn Baldwin if (crp->crp_aad_length != len) { 5119b774dc0SJohn Baldwin memset(blk, 0, blksz); 5129b774dc0SJohn Baldwin memcpy(blk, (char *)crp->crp_aad + len, 5139b774dc0SJohn Baldwin crp->crp_aad_length - len); 5149b774dc0SJohn Baldwin axf->Update(&ctx, blk, blksz); 5159b774dc0SJohn Baldwin } 5169b774dc0SJohn Baldwin } else { 5179c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 5189c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_aad_start); 5199b774dc0SJohn Baldwin for (resid = crp->crp_aad_length; resid >= blksz; 5209b774dc0SJohn Baldwin resid -= len) { 52126d292d3SJohn Baldwin len = crypto_cursor_seglen(&cc_in); 52226d292d3SJohn Baldwin if (len >= blksz) { 52326d292d3SJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 52426d292d3SJohn Baldwin len = rounddown(MIN(len, resid), blksz); 52526d292d3SJohn Baldwin crypto_cursor_advance(&cc_in, len); 52626d292d3SJohn Baldwin } else { 52726d292d3SJohn Baldwin len = blksz; 5289c0e3d3aSJohn Baldwin crypto_cursor_copydata(&cc_in, len, blk); 52926d292d3SJohn Baldwin inblk = blk; 53026d292d3SJohn Baldwin } 53126d292d3SJohn Baldwin axf->Update(&ctx, inblk, len); 53226d292d3SJohn Baldwin } 53326d292d3SJohn Baldwin if (resid > 0) { 53426d292d3SJohn Baldwin memset(blk, 0, blksz); 53526d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 536c0341432SJohn Baldwin axf->Update(&ctx, blk, blksz); 537c0341432SJohn Baldwin } 5389b774dc0SJohn Baldwin } 539c0341432SJohn Baldwin 540c0341432SJohn Baldwin exf->reinit(swe->sw_kschedule, iv); 541c0341432SJohn Baldwin 542c0341432SJohn Baldwin /* Do encryption with MAC */ 5439c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 5449c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_payload_start); 5459c0e3d3aSJohn Baldwin if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) { 5469c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_out, &crp->crp_obuf); 5479c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_out, crp->crp_payload_output_start); 5489c0e3d3aSJohn Baldwin } else 5499c0e3d3aSJohn Baldwin cc_out = cc_in; 55026d292d3SJohn Baldwin for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) { 55126d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_in) < blksz) { 55226d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, blksz, blk); 55326d292d3SJohn Baldwin inblk = blk; 554c0341432SJohn Baldwin } else { 55526d292d3SJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 55626d292d3SJohn Baldwin crypto_cursor_advance(&cc_in, blksz); 557c0341432SJohn Baldwin } 55826d292d3SJohn Baldwin if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 55926d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_out) < blksz) 56026d292d3SJohn Baldwin outblk = blk; 56126d292d3SJohn Baldwin else 56226d292d3SJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 56326d292d3SJohn Baldwin exf->encrypt(swe->sw_kschedule, inblk, outblk); 56426d292d3SJohn Baldwin axf->Update(&ctx, outblk, blksz); 56526d292d3SJohn Baldwin if (outblk == blk) 56626d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, blksz, blk); 56726d292d3SJohn Baldwin else 56826d292d3SJohn Baldwin crypto_cursor_advance(&cc_out, blksz); 56926d292d3SJohn Baldwin } else { 57026d292d3SJohn Baldwin axf->Update(&ctx, inblk, blksz); 57126d292d3SJohn Baldwin } 57226d292d3SJohn Baldwin } 57326d292d3SJohn Baldwin if (resid > 0) { 57426d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 57526d292d3SJohn Baldwin if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 57626d292d3SJohn Baldwin exf->encrypt_last(swe->sw_kschedule, blk, blk, resid); 57726d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, resid, blk); 57826d292d3SJohn Baldwin } 57926d292d3SJohn Baldwin axf->Update(&ctx, blk, resid); 580c0341432SJohn Baldwin } 581c0341432SJohn Baldwin 582c0341432SJohn Baldwin /* length block */ 58326d292d3SJohn Baldwin memset(blk, 0, blksz); 584c0341432SJohn Baldwin blkp = (uint32_t *)blk + 1; 585c0341432SJohn Baldwin *blkp = htobe32(crp->crp_aad_length * 8); 586c0341432SJohn Baldwin blkp = (uint32_t *)blk + 3; 587c0341432SJohn Baldwin *blkp = htobe32(crp->crp_payload_length * 8); 588c0341432SJohn Baldwin axf->Update(&ctx, blk, blksz); 589c0341432SJohn Baldwin 590c0341432SJohn Baldwin /* Finalize MAC */ 59126d292d3SJohn Baldwin axf->Final(tag, &ctx); 592c0341432SJohn Baldwin 593c0341432SJohn Baldwin /* Validate tag */ 59420c128daSJohn Baldwin error = 0; 595c0341432SJohn Baldwin if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 59626d292d3SJohn Baldwin u_char tag2[GMAC_DIGEST_LEN]; 59720c128daSJohn Baldwin 59826d292d3SJohn Baldwin crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, tag2); 599c0341432SJohn Baldwin 60026d292d3SJohn Baldwin r = timingsafe_bcmp(tag, tag2, swa->sw_mlen); 60126d292d3SJohn Baldwin explicit_bzero(tag2, sizeof(tag2)); 60220c128daSJohn Baldwin if (r != 0) { 60320c128daSJohn Baldwin error = EBADMSG; 60420c128daSJohn Baldwin goto out; 60520c128daSJohn Baldwin } 606c0341432SJohn Baldwin 607c0341432SJohn Baldwin /* tag matches, decrypt data */ 6089c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 6099c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_payload_start); 61026d292d3SJohn Baldwin for (resid = crp->crp_payload_length; resid > blksz; 61126d292d3SJohn Baldwin resid -= blksz) { 61226d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_in) < blksz) { 61326d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, blksz, blk); 61426d292d3SJohn Baldwin inblk = blk; 61526d292d3SJohn Baldwin } else { 61626d292d3SJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 61726d292d3SJohn Baldwin crypto_cursor_advance(&cc_in, blksz); 61826d292d3SJohn Baldwin } 61926d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_out) < blksz) 62026d292d3SJohn Baldwin outblk = blk; 62126d292d3SJohn Baldwin else 62226d292d3SJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 62326d292d3SJohn Baldwin exf->decrypt(swe->sw_kschedule, inblk, outblk); 62426d292d3SJohn Baldwin if (outblk == blk) 62526d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, blksz, blk); 62626d292d3SJohn Baldwin else 62726d292d3SJohn Baldwin crypto_cursor_advance(&cc_out, blksz); 62826d292d3SJohn Baldwin } 62926d292d3SJohn Baldwin if (resid > 0) { 63026d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 63126d292d3SJohn Baldwin exf->decrypt_last(swe->sw_kschedule, blk, blk, resid); 63226d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, resid, blk); 633c0341432SJohn Baldwin } 634c0341432SJohn Baldwin } else { 635c0341432SJohn Baldwin /* Inject the authentication data */ 63626d292d3SJohn Baldwin crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag); 637c0341432SJohn Baldwin } 638c0341432SJohn Baldwin 63920c128daSJohn Baldwin out: 64020c128daSJohn Baldwin explicit_bzero(blkbuf, sizeof(blkbuf)); 64126d292d3SJohn Baldwin explicit_bzero(tag, sizeof(tag)); 64220c128daSJohn Baldwin explicit_bzero(iv, sizeof(iv)); 64320c128daSJohn Baldwin 64420c128daSJohn Baldwin return (error); 645c0341432SJohn Baldwin } 646c0341432SJohn Baldwin 647c0341432SJohn Baldwin static int 648c0341432SJohn Baldwin swcr_ccm_cbc_mac(struct swcr_session *ses, struct cryptop *crp) 649c0341432SJohn Baldwin { 65026d292d3SJohn Baldwin u_char tag[AES_CBC_MAC_HASH_LEN]; 65126d292d3SJohn Baldwin u_char iv[AES_BLOCK_LEN]; 652c0341432SJohn Baldwin union authctx ctx; 653c0341432SJohn Baldwin struct swcr_auth *swa; 654c0341432SJohn Baldwin struct auth_hash *axf; 65526d292d3SJohn Baldwin int error, ivlen; 656c0341432SJohn Baldwin 657c0341432SJohn Baldwin swa = &ses->swcr_auth; 658c0341432SJohn Baldwin axf = swa->sw_axf; 659c0341432SJohn Baldwin 660c0341432SJohn Baldwin bcopy(swa->sw_ictx, &ctx, axf->ctxsize); 661c0341432SJohn Baldwin 662c0341432SJohn Baldwin /* Initialize the IV */ 663c0341432SJohn Baldwin ivlen = AES_CCM_IV_LEN; 66429fe41ddSJohn Baldwin crypto_read_iv(crp, iv); 665c0341432SJohn Baldwin 666c0341432SJohn Baldwin /* 667c0341432SJohn Baldwin * AES CCM-CBC-MAC needs to know the length of both the auth 668c0341432SJohn Baldwin * data and payload data before doing the auth computation. 669c0341432SJohn Baldwin */ 670c0341432SJohn Baldwin ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_payload_length; 671c0341432SJohn Baldwin ctx.aes_cbc_mac_ctx.cryptDataLength = 0; 672c0341432SJohn Baldwin 673c0341432SJohn Baldwin axf->Reinit(&ctx, iv, ivlen); 6749b774dc0SJohn Baldwin if (crp->crp_aad != NULL) 6759b774dc0SJohn Baldwin error = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length); 6769b774dc0SJohn Baldwin else 67726d292d3SJohn Baldwin error = crypto_apply(crp, crp->crp_payload_start, 67826d292d3SJohn Baldwin crp->crp_payload_length, axf->Update, &ctx); 67926d292d3SJohn Baldwin if (error) 68026d292d3SJohn Baldwin return (error); 681c0341432SJohn Baldwin 682c0341432SJohn Baldwin /* Finalize MAC */ 68326d292d3SJohn Baldwin axf->Final(tag, &ctx); 684c0341432SJohn Baldwin 685c0341432SJohn Baldwin if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) { 68626d292d3SJohn Baldwin u_char tag2[AES_CBC_MAC_HASH_LEN]; 68720c128daSJohn Baldwin 688c0341432SJohn Baldwin crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, 68926d292d3SJohn Baldwin tag2); 69026d292d3SJohn Baldwin if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0) 69120c128daSJohn Baldwin error = EBADMSG; 69226d292d3SJohn Baldwin explicit_bzero(tag2, sizeof(tag)); 693c0341432SJohn Baldwin } else { 694c0341432SJohn Baldwin /* Inject the authentication data */ 69526d292d3SJohn Baldwin crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag); 696c0341432SJohn Baldwin } 69726d292d3SJohn Baldwin explicit_bzero(tag, sizeof(tag)); 69820c128daSJohn Baldwin explicit_bzero(iv, sizeof(iv)); 69920c128daSJohn Baldwin return (error); 700c0341432SJohn Baldwin } 701c0341432SJohn Baldwin 702c0341432SJohn Baldwin static int 703c0341432SJohn Baldwin swcr_ccm(struct swcr_session *ses, struct cryptop *crp) 704c0341432SJohn Baldwin { 70526d292d3SJohn Baldwin uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))]; 706c0341432SJohn Baldwin u_char *blk = (u_char *)blkbuf; 70726d292d3SJohn Baldwin u_char tag[AES_CBC_MAC_HASH_LEN]; 70826d292d3SJohn Baldwin u_char iv[AES_BLOCK_LEN]; 7099c0e3d3aSJohn Baldwin struct crypto_buffer_cursor cc_in, cc_out; 71026d292d3SJohn Baldwin const u_char *inblk; 71126d292d3SJohn Baldwin u_char *outblk; 712c0341432SJohn Baldwin union authctx ctx; 713c0341432SJohn Baldwin struct swcr_auth *swa; 714c0341432SJohn Baldwin struct swcr_encdec *swe; 715c0341432SJohn Baldwin struct auth_hash *axf; 716c0341432SJohn Baldwin struct enc_xform *exf; 71726d292d3SJohn Baldwin int blksz, error, ivlen, r, resid; 718c0341432SJohn Baldwin 719c0341432SJohn Baldwin swa = &ses->swcr_auth; 720c0341432SJohn Baldwin axf = swa->sw_axf; 721c0341432SJohn Baldwin 722c0341432SJohn Baldwin bcopy(swa->sw_ictx, &ctx, axf->ctxsize); 72326d292d3SJohn Baldwin blksz = AES_BLOCK_LEN; 72426d292d3SJohn Baldwin KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch", 72526d292d3SJohn Baldwin __func__)); 726c0341432SJohn Baldwin 727c0341432SJohn Baldwin swe = &ses->swcr_encdec; 728c0341432SJohn Baldwin exf = swe->sw_exf; 729723d8764SJohn Baldwin KASSERT(axf->blocksize == exf->native_blocksize, 730723d8764SJohn Baldwin ("%s: blocksize mismatch", __func__)); 731c0341432SJohn Baldwin 732c0341432SJohn Baldwin if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0) 733c0341432SJohn Baldwin return (EINVAL); 734c0341432SJohn Baldwin 735c0341432SJohn Baldwin /* Initialize the IV */ 736c0341432SJohn Baldwin ivlen = AES_CCM_IV_LEN; 737c0341432SJohn Baldwin bcopy(crp->crp_iv, iv, ivlen); 738c0341432SJohn Baldwin 739c0341432SJohn Baldwin /* 740c0341432SJohn Baldwin * AES CCM-CBC-MAC needs to know the length of both the auth 741c0341432SJohn Baldwin * data and payload data before doing the auth computation. 742c0341432SJohn Baldwin */ 743c0341432SJohn Baldwin ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_aad_length; 744c0341432SJohn Baldwin ctx.aes_cbc_mac_ctx.cryptDataLength = crp->crp_payload_length; 745c0341432SJohn Baldwin 746c0341432SJohn Baldwin /* Supply MAC with IV */ 747c0341432SJohn Baldwin axf->Reinit(&ctx, iv, ivlen); 748c0341432SJohn Baldwin 749c0341432SJohn Baldwin /* Supply MAC with AAD */ 7509b774dc0SJohn Baldwin if (crp->crp_aad != NULL) 7519b774dc0SJohn Baldwin error = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length); 7529b774dc0SJohn Baldwin else 7539b774dc0SJohn Baldwin error = crypto_apply(crp, crp->crp_aad_start, 7549b774dc0SJohn Baldwin crp->crp_aad_length, axf->Update, &ctx); 75526d292d3SJohn Baldwin if (error) 75626d292d3SJohn Baldwin return (error); 757c0341432SJohn Baldwin 758c0341432SJohn Baldwin exf->reinit(swe->sw_kschedule, iv); 759c0341432SJohn Baldwin 760c0341432SJohn Baldwin /* Do encryption/decryption with MAC */ 7619c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 7629c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_payload_start); 7639c0e3d3aSJohn Baldwin if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) { 7649c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_out, &crp->crp_obuf); 7659c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_out, crp->crp_payload_output_start); 7669c0e3d3aSJohn Baldwin } else 7679c0e3d3aSJohn Baldwin cc_out = cc_in; 76826d292d3SJohn Baldwin for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) { 76926d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_in) < blksz) { 77026d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, blksz, blk); 77126d292d3SJohn Baldwin inblk = blk; 77226d292d3SJohn Baldwin } else { 77326d292d3SJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 77426d292d3SJohn Baldwin crypto_cursor_advance(&cc_in, blksz); 77526d292d3SJohn Baldwin } 776c0341432SJohn Baldwin if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 77726d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_out) < blksz) 77826d292d3SJohn Baldwin outblk = blk; 77926d292d3SJohn Baldwin else 78026d292d3SJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 78126d292d3SJohn Baldwin axf->Update(&ctx, inblk, blksz); 78226d292d3SJohn Baldwin exf->encrypt(swe->sw_kschedule, inblk, outblk); 78326d292d3SJohn Baldwin if (outblk == blk) 78426d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, blksz, blk); 78526d292d3SJohn Baldwin else 78626d292d3SJohn Baldwin crypto_cursor_advance(&cc_out, blksz); 787c0341432SJohn Baldwin } else { 788c0341432SJohn Baldwin /* 789c0341432SJohn Baldwin * One of the problems with CCM+CBC is that 790c0341432SJohn Baldwin * the authentication is done on the 79126d292d3SJohn Baldwin * unencrypted data. As a result, we have to 792c0341432SJohn Baldwin * decrypt the data twice: once to generate 793c0341432SJohn Baldwin * the tag and a second time after the tag is 794c0341432SJohn Baldwin * verified. 795c0341432SJohn Baldwin */ 79626d292d3SJohn Baldwin exf->decrypt(swe->sw_kschedule, inblk, blk); 79726d292d3SJohn Baldwin axf->Update(&ctx, blk, blksz); 79826d292d3SJohn Baldwin } 79926d292d3SJohn Baldwin } 80026d292d3SJohn Baldwin if (resid > 0) { 80126d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 80226d292d3SJohn Baldwin if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 80326d292d3SJohn Baldwin axf->Update(&ctx, blk, resid); 80426d292d3SJohn Baldwin exf->encrypt_last(swe->sw_kschedule, blk, blk, resid); 80526d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, resid, blk); 80626d292d3SJohn Baldwin } else { 80726d292d3SJohn Baldwin exf->decrypt_last(swe->sw_kschedule, blk, blk, resid); 80826d292d3SJohn Baldwin axf->Update(&ctx, blk, resid); 809c0341432SJohn Baldwin } 81008fca7a5SJohn-Mark Gurney } 81108fca7a5SJohn-Mark Gurney 81208fca7a5SJohn-Mark Gurney /* Finalize MAC */ 81326d292d3SJohn Baldwin axf->Final(tag, &ctx); 81408fca7a5SJohn-Mark Gurney 81508fca7a5SJohn-Mark Gurney /* Validate tag */ 81620c128daSJohn Baldwin error = 0; 817c0341432SJohn Baldwin if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 81826d292d3SJohn Baldwin u_char tag2[AES_CBC_MAC_HASH_LEN]; 81920c128daSJohn Baldwin 820c0341432SJohn Baldwin crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, 82126d292d3SJohn Baldwin tag2); 82208fca7a5SJohn-Mark Gurney 82326d292d3SJohn Baldwin r = timingsafe_bcmp(tag, tag2, swa->sw_mlen); 82426d292d3SJohn Baldwin explicit_bzero(tag2, sizeof(tag2)); 82520c128daSJohn Baldwin if (r != 0) { 82620c128daSJohn Baldwin error = EBADMSG; 82720c128daSJohn Baldwin goto out; 82820c128daSJohn Baldwin } 829c0341432SJohn Baldwin 83008fca7a5SJohn-Mark Gurney /* tag matches, decrypt data */ 831507281e5SSean Eric Fagan exf->reinit(swe->sw_kschedule, iv); 8329c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 8339c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_payload_start); 83426d292d3SJohn Baldwin for (resid = crp->crp_payload_length; resid > blksz; 83526d292d3SJohn Baldwin resid -= blksz) { 83626d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_in) < blksz) { 83726d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, blksz, blk); 83826d292d3SJohn Baldwin inblk = blk; 83926d292d3SJohn Baldwin } else { 84026d292d3SJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 84126d292d3SJohn Baldwin crypto_cursor_advance(&cc_in, blksz); 84226d292d3SJohn Baldwin } 84326d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_out) < blksz) 84426d292d3SJohn Baldwin outblk = blk; 84526d292d3SJohn Baldwin else 84626d292d3SJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 84726d292d3SJohn Baldwin exf->decrypt(swe->sw_kschedule, inblk, outblk); 84826d292d3SJohn Baldwin if (outblk == blk) 84926d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, blksz, blk); 85026d292d3SJohn Baldwin else 85126d292d3SJohn Baldwin crypto_cursor_advance(&cc_out, blksz); 85226d292d3SJohn Baldwin } 85326d292d3SJohn Baldwin if (resid > 0) { 85426d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 85526d292d3SJohn Baldwin exf->decrypt_last(swe->sw_kschedule, blk, blk, resid); 85626d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, resid, blk); 85708fca7a5SJohn-Mark Gurney } 85808fca7a5SJohn-Mark Gurney } else { 85908fca7a5SJohn-Mark Gurney /* Inject the authentication data */ 86026d292d3SJohn Baldwin crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag); 86108fca7a5SJohn-Mark Gurney } 86208fca7a5SJohn-Mark Gurney 86320c128daSJohn Baldwin out: 86420c128daSJohn Baldwin explicit_bzero(blkbuf, sizeof(blkbuf)); 86526d292d3SJohn Baldwin explicit_bzero(tag, sizeof(tag)); 86620c128daSJohn Baldwin explicit_bzero(iv, sizeof(iv)); 86720c128daSJohn Baldwin return (error); 86808fca7a5SJohn-Mark Gurney } 86908fca7a5SJohn-Mark Gurney 870*dd2e1352SJohn Baldwin static int 871*dd2e1352SJohn Baldwin swcr_chacha20_poly1305(struct swcr_session *ses, struct cryptop *crp) 872*dd2e1352SJohn Baldwin { 873*dd2e1352SJohn Baldwin const struct crypto_session_params *csp; 874*dd2e1352SJohn Baldwin uint64_t blkbuf[howmany(CHACHA20_NATIVE_BLOCK_LEN, sizeof(uint64_t))]; 875*dd2e1352SJohn Baldwin u_char *blk = (u_char *)blkbuf; 876*dd2e1352SJohn Baldwin u_char tag[POLY1305_HASH_LEN]; 877*dd2e1352SJohn Baldwin struct crypto_buffer_cursor cc_in, cc_out; 878*dd2e1352SJohn Baldwin const u_char *inblk; 879*dd2e1352SJohn Baldwin u_char *outblk; 880*dd2e1352SJohn Baldwin uint64_t *blkp; 881*dd2e1352SJohn Baldwin union authctx ctx; 882*dd2e1352SJohn Baldwin struct swcr_auth *swa; 883*dd2e1352SJohn Baldwin struct swcr_encdec *swe; 884*dd2e1352SJohn Baldwin struct auth_hash *axf; 885*dd2e1352SJohn Baldwin struct enc_xform *exf; 886*dd2e1352SJohn Baldwin int blksz, error, r, resid; 887*dd2e1352SJohn Baldwin 888*dd2e1352SJohn Baldwin swa = &ses->swcr_auth; 889*dd2e1352SJohn Baldwin axf = swa->sw_axf; 890*dd2e1352SJohn Baldwin 891*dd2e1352SJohn Baldwin swe = &ses->swcr_encdec; 892*dd2e1352SJohn Baldwin exf = swe->sw_exf; 893*dd2e1352SJohn Baldwin blksz = exf->native_blocksize; 894*dd2e1352SJohn Baldwin KASSERT(blksz <= sizeof(blkbuf), ("%s: blocksize mismatch", __func__)); 895*dd2e1352SJohn Baldwin 896*dd2e1352SJohn Baldwin if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0) 897*dd2e1352SJohn Baldwin return (EINVAL); 898*dd2e1352SJohn Baldwin 899*dd2e1352SJohn Baldwin csp = crypto_get_params(crp->crp_session); 900*dd2e1352SJohn Baldwin 901*dd2e1352SJohn Baldwin /* Generate Poly1305 key. */ 902*dd2e1352SJohn Baldwin if (crp->crp_cipher_key != NULL) 903*dd2e1352SJohn Baldwin axf->Setkey(&ctx, crp->crp_cipher_key, csp->csp_cipher_klen); 904*dd2e1352SJohn Baldwin else 905*dd2e1352SJohn Baldwin axf->Setkey(&ctx, csp->csp_cipher_key, csp->csp_cipher_klen); 906*dd2e1352SJohn Baldwin axf->Reinit(&ctx, crp->crp_iv, csp->csp_ivlen); 907*dd2e1352SJohn Baldwin 908*dd2e1352SJohn Baldwin /* Supply MAC with AAD */ 909*dd2e1352SJohn Baldwin if (crp->crp_aad != NULL) 910*dd2e1352SJohn Baldwin axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length); 911*dd2e1352SJohn Baldwin else 912*dd2e1352SJohn Baldwin crypto_apply(crp, crp->crp_aad_start, 913*dd2e1352SJohn Baldwin crp->crp_aad_length, axf->Update, &ctx); 914*dd2e1352SJohn Baldwin if (crp->crp_aad_length % 16 != 0) { 915*dd2e1352SJohn Baldwin /* padding1 */ 916*dd2e1352SJohn Baldwin memset(blk, 0, 16); 917*dd2e1352SJohn Baldwin axf->Update(&ctx, blk, 16 - crp->crp_aad_length % 16); 918*dd2e1352SJohn Baldwin } 919*dd2e1352SJohn Baldwin 920*dd2e1352SJohn Baldwin if (crp->crp_cipher_key != NULL) 921*dd2e1352SJohn Baldwin exf->setkey(swe->sw_kschedule, crp->crp_cipher_key, 922*dd2e1352SJohn Baldwin csp->csp_cipher_klen); 923*dd2e1352SJohn Baldwin exf->reinit(swe->sw_kschedule, crp->crp_iv); 924*dd2e1352SJohn Baldwin 925*dd2e1352SJohn Baldwin /* Do encryption with MAC */ 926*dd2e1352SJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 927*dd2e1352SJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_payload_start); 928*dd2e1352SJohn Baldwin if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) { 929*dd2e1352SJohn Baldwin crypto_cursor_init(&cc_out, &crp->crp_obuf); 930*dd2e1352SJohn Baldwin crypto_cursor_advance(&cc_out, crp->crp_payload_output_start); 931*dd2e1352SJohn Baldwin } else 932*dd2e1352SJohn Baldwin cc_out = cc_in; 933*dd2e1352SJohn Baldwin for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) { 934*dd2e1352SJohn Baldwin if (crypto_cursor_seglen(&cc_in) < blksz) { 935*dd2e1352SJohn Baldwin crypto_cursor_copydata(&cc_in, blksz, blk); 936*dd2e1352SJohn Baldwin inblk = blk; 937*dd2e1352SJohn Baldwin } else { 938*dd2e1352SJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 939*dd2e1352SJohn Baldwin crypto_cursor_advance(&cc_in, blksz); 940*dd2e1352SJohn Baldwin } 941*dd2e1352SJohn Baldwin if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 942*dd2e1352SJohn Baldwin if (crypto_cursor_seglen(&cc_out) < blksz) 943*dd2e1352SJohn Baldwin outblk = blk; 944*dd2e1352SJohn Baldwin else 945*dd2e1352SJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 946*dd2e1352SJohn Baldwin exf->encrypt(swe->sw_kschedule, inblk, outblk); 947*dd2e1352SJohn Baldwin axf->Update(&ctx, outblk, blksz); 948*dd2e1352SJohn Baldwin if (outblk == blk) 949*dd2e1352SJohn Baldwin crypto_cursor_copyback(&cc_out, blksz, blk); 950*dd2e1352SJohn Baldwin else 951*dd2e1352SJohn Baldwin crypto_cursor_advance(&cc_out, blksz); 952*dd2e1352SJohn Baldwin } else { 953*dd2e1352SJohn Baldwin axf->Update(&ctx, inblk, blksz); 954*dd2e1352SJohn Baldwin } 955*dd2e1352SJohn Baldwin } 956*dd2e1352SJohn Baldwin if (resid > 0) { 957*dd2e1352SJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 958*dd2e1352SJohn Baldwin if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 959*dd2e1352SJohn Baldwin exf->encrypt_last(swe->sw_kschedule, blk, blk, resid); 960*dd2e1352SJohn Baldwin crypto_cursor_copyback(&cc_out, resid, blk); 961*dd2e1352SJohn Baldwin } 962*dd2e1352SJohn Baldwin axf->Update(&ctx, blk, resid); 963*dd2e1352SJohn Baldwin if (resid % 16 != 0) { 964*dd2e1352SJohn Baldwin /* padding2 */ 965*dd2e1352SJohn Baldwin memset(blk, 0, 16); 966*dd2e1352SJohn Baldwin axf->Update(&ctx, blk, 16 - resid % 16); 967*dd2e1352SJohn Baldwin } 968*dd2e1352SJohn Baldwin } 969*dd2e1352SJohn Baldwin 970*dd2e1352SJohn Baldwin /* lengths */ 971*dd2e1352SJohn Baldwin blkp = (uint64_t *)blk; 972*dd2e1352SJohn Baldwin blkp[0] = htole64(crp->crp_aad_length); 973*dd2e1352SJohn Baldwin blkp[1] = htole64(crp->crp_payload_length); 974*dd2e1352SJohn Baldwin axf->Update(&ctx, blk, sizeof(uint64_t) * 2); 975*dd2e1352SJohn Baldwin 976*dd2e1352SJohn Baldwin /* Finalize MAC */ 977*dd2e1352SJohn Baldwin axf->Final(tag, &ctx); 978*dd2e1352SJohn Baldwin 979*dd2e1352SJohn Baldwin /* Validate tag */ 980*dd2e1352SJohn Baldwin error = 0; 981*dd2e1352SJohn Baldwin if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 982*dd2e1352SJohn Baldwin u_char tag2[POLY1305_HASH_LEN]; 983*dd2e1352SJohn Baldwin 984*dd2e1352SJohn Baldwin crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, tag2); 985*dd2e1352SJohn Baldwin 986*dd2e1352SJohn Baldwin r = timingsafe_bcmp(tag, tag2, swa->sw_mlen); 987*dd2e1352SJohn Baldwin explicit_bzero(tag2, sizeof(tag2)); 988*dd2e1352SJohn Baldwin if (r != 0) { 989*dd2e1352SJohn Baldwin error = EBADMSG; 990*dd2e1352SJohn Baldwin goto out; 991*dd2e1352SJohn Baldwin } 992*dd2e1352SJohn Baldwin 993*dd2e1352SJohn Baldwin /* tag matches, decrypt data */ 994*dd2e1352SJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 995*dd2e1352SJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_payload_start); 996*dd2e1352SJohn Baldwin for (resid = crp->crp_payload_length; resid > blksz; 997*dd2e1352SJohn Baldwin resid -= blksz) { 998*dd2e1352SJohn Baldwin if (crypto_cursor_seglen(&cc_in) < blksz) { 999*dd2e1352SJohn Baldwin crypto_cursor_copydata(&cc_in, blksz, blk); 1000*dd2e1352SJohn Baldwin inblk = blk; 1001*dd2e1352SJohn Baldwin } else { 1002*dd2e1352SJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 1003*dd2e1352SJohn Baldwin crypto_cursor_advance(&cc_in, blksz); 1004*dd2e1352SJohn Baldwin } 1005*dd2e1352SJohn Baldwin if (crypto_cursor_seglen(&cc_out) < blksz) 1006*dd2e1352SJohn Baldwin outblk = blk; 1007*dd2e1352SJohn Baldwin else 1008*dd2e1352SJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 1009*dd2e1352SJohn Baldwin exf->decrypt(swe->sw_kschedule, inblk, outblk); 1010*dd2e1352SJohn Baldwin if (outblk == blk) 1011*dd2e1352SJohn Baldwin crypto_cursor_copyback(&cc_out, blksz, blk); 1012*dd2e1352SJohn Baldwin else 1013*dd2e1352SJohn Baldwin crypto_cursor_advance(&cc_out, blksz); 1014*dd2e1352SJohn Baldwin } 1015*dd2e1352SJohn Baldwin if (resid > 0) { 1016*dd2e1352SJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 1017*dd2e1352SJohn Baldwin exf->decrypt_last(swe->sw_kschedule, blk, blk, resid); 1018*dd2e1352SJohn Baldwin crypto_cursor_copyback(&cc_out, resid, blk); 1019*dd2e1352SJohn Baldwin } 1020*dd2e1352SJohn Baldwin } else { 1021*dd2e1352SJohn Baldwin /* Inject the authentication data */ 1022*dd2e1352SJohn Baldwin crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag); 1023*dd2e1352SJohn Baldwin } 1024*dd2e1352SJohn Baldwin 1025*dd2e1352SJohn Baldwin out: 1026*dd2e1352SJohn Baldwin explicit_bzero(blkbuf, sizeof(blkbuf)); 1027*dd2e1352SJohn Baldwin explicit_bzero(tag, sizeof(tag)); 1028*dd2e1352SJohn Baldwin explicit_bzero(&ctx, sizeof(ctx)); 1029*dd2e1352SJohn Baldwin return (error); 1030*dd2e1352SJohn Baldwin } 1031*dd2e1352SJohn Baldwin 1032091d81d1SSam Leffler /* 1033c0341432SJohn Baldwin * Apply a cipher and a digest to perform EtA. 1034c0341432SJohn Baldwin */ 1035c0341432SJohn Baldwin static int 1036c0341432SJohn Baldwin swcr_eta(struct swcr_session *ses, struct cryptop *crp) 1037c0341432SJohn Baldwin { 1038c0341432SJohn Baldwin int error; 1039c0341432SJohn Baldwin 1040c0341432SJohn Baldwin if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 1041c0341432SJohn Baldwin error = swcr_encdec(ses, crp); 1042c0341432SJohn Baldwin if (error == 0) 1043c0341432SJohn Baldwin error = swcr_authcompute(ses, crp); 1044c0341432SJohn Baldwin } else { 1045c0341432SJohn Baldwin error = swcr_authcompute(ses, crp); 1046c0341432SJohn Baldwin if (error == 0) 1047c0341432SJohn Baldwin error = swcr_encdec(ses, crp); 1048c0341432SJohn Baldwin } 1049c0341432SJohn Baldwin return (error); 1050c0341432SJohn Baldwin } 1051c0341432SJohn Baldwin 1052c0341432SJohn Baldwin /* 1053091d81d1SSam Leffler * Apply a compression/decompression algorithm 1054091d81d1SSam Leffler */ 1055091d81d1SSam Leffler static int 1056c0341432SJohn Baldwin swcr_compdec(struct swcr_session *ses, struct cryptop *crp) 1057091d81d1SSam Leffler { 1058d3d79e96SJohn Baldwin uint8_t *data, *out; 1059091d81d1SSam Leffler struct comp_algo *cxf; 1060091d81d1SSam Leffler int adj; 1061d3d79e96SJohn Baldwin uint32_t result; 1062091d81d1SSam Leffler 1063c0341432SJohn Baldwin cxf = ses->swcr_compdec.sw_cxf; 1064091d81d1SSam Leffler 1065091d81d1SSam Leffler /* We must handle the whole buffer of data in one time 1066091d81d1SSam Leffler * then if there is not all the data in the mbuf, we must 1067091d81d1SSam Leffler * copy in a buffer. 1068091d81d1SSam Leffler */ 1069091d81d1SSam Leffler 1070c0341432SJohn Baldwin data = malloc(crp->crp_payload_length, M_CRYPTO_DATA, M_NOWAIT); 1071091d81d1SSam Leffler if (data == NULL) 1072091d81d1SSam Leffler return (EINVAL); 1073c0341432SJohn Baldwin crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length, 1074c0341432SJohn Baldwin data); 1075091d81d1SSam Leffler 1076c0341432SJohn Baldwin if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) 1077c0341432SJohn Baldwin result = cxf->compress(data, crp->crp_payload_length, &out); 1078091d81d1SSam Leffler else 1079c0341432SJohn Baldwin result = cxf->decompress(data, crp->crp_payload_length, &out); 1080091d81d1SSam Leffler 10811ede983cSDag-Erling Smørgrav free(data, M_CRYPTO_DATA); 1082091d81d1SSam Leffler if (result == 0) 1083c0341432SJohn Baldwin return (EINVAL); 1084c0341432SJohn Baldwin crp->crp_olen = result; 1085c0341432SJohn Baldwin 1086c0341432SJohn Baldwin /* Check the compressed size when doing compression */ 1087c0341432SJohn Baldwin if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) { 1088c0341432SJohn Baldwin if (result >= crp->crp_payload_length) { 1089c0341432SJohn Baldwin /* Compression was useless, we lost time */ 1090c0341432SJohn Baldwin free(out, M_CRYPTO_DATA); 1091c0341432SJohn Baldwin return (0); 1092c0341432SJohn Baldwin } 1093c0341432SJohn Baldwin } 1094091d81d1SSam Leffler 1095091d81d1SSam Leffler /* Copy back the (de)compressed data. m_copyback is 1096091d81d1SSam Leffler * extending the mbuf as necessary. 1097091d81d1SSam Leffler */ 1098c0341432SJohn Baldwin crypto_copyback(crp, crp->crp_payload_start, result, out); 1099c0341432SJohn Baldwin if (result < crp->crp_payload_length) { 11009c0e3d3aSJohn Baldwin switch (crp->crp_buf.cb_type) { 1101c0341432SJohn Baldwin case CRYPTO_BUF_MBUF: 1102c0341432SJohn Baldwin adj = result - crp->crp_payload_length; 11039c0e3d3aSJohn Baldwin m_adj(crp->crp_buf.cb_mbuf, adj); 1104c0341432SJohn Baldwin break; 1105c0341432SJohn Baldwin case CRYPTO_BUF_UIO: { 11069c0e3d3aSJohn Baldwin struct uio *uio = crp->crp_buf.cb_uio; 1107091d81d1SSam Leffler int ind; 1108091d81d1SSam Leffler 1109c0341432SJohn Baldwin adj = crp->crp_payload_length - result; 1110091d81d1SSam Leffler ind = uio->uio_iovcnt - 1; 1111091d81d1SSam Leffler 1112091d81d1SSam Leffler while (adj > 0 && ind >= 0) { 1113091d81d1SSam Leffler if (adj < uio->uio_iov[ind].iov_len) { 1114091d81d1SSam Leffler uio->uio_iov[ind].iov_len -= adj; 1115091d81d1SSam Leffler break; 1116091d81d1SSam Leffler } 1117091d81d1SSam Leffler 1118091d81d1SSam Leffler adj -= uio->uio_iov[ind].iov_len; 1119091d81d1SSam Leffler uio->uio_iov[ind].iov_len = 0; 1120091d81d1SSam Leffler ind--; 1121091d81d1SSam Leffler uio->uio_iovcnt--; 1122091d81d1SSam Leffler } 1123091d81d1SSam Leffler } 1124c0341432SJohn Baldwin break; 1125e6f6d0c9SAlan Somers case CRYPTO_BUF_VMPAGE: 1126e6f6d0c9SAlan Somers adj = crp->crp_payload_length - result; 1127e6f6d0c9SAlan Somers crp->crp_buf.cb_vm_page_len -= adj; 1128e6f6d0c9SAlan Somers break; 11299c0e3d3aSJohn Baldwin default: 11309c0e3d3aSJohn Baldwin break; 1131c0341432SJohn Baldwin } 1132091d81d1SSam Leffler } 11331ede983cSDag-Erling Smørgrav free(out, M_CRYPTO_DATA); 1134091d81d1SSam Leffler return 0; 1135091d81d1SSam Leffler } 1136091d81d1SSam Leffler 1137091d81d1SSam Leffler static int 11383e947048SJohn Baldwin swcr_setup_cipher(struct swcr_session *ses, 1139c0341432SJohn Baldwin const struct crypto_session_params *csp) 1140091d81d1SSam Leffler { 1141c0341432SJohn Baldwin struct swcr_encdec *swe; 1142091d81d1SSam Leffler struct enc_xform *txf; 1143f6c4bc3bSPawel Jakub Dawidek int error; 1144091d81d1SSam Leffler 1145c0341432SJohn Baldwin swe = &ses->swcr_encdec; 1146c0341432SJohn Baldwin txf = crypto_cipher(csp); 1147c0341432SJohn Baldwin MPASS(txf->ivsize == csp->csp_ivlen); 11483e947048SJohn Baldwin if (txf->ctxsize != 0) { 11493e947048SJohn Baldwin swe->sw_kschedule = malloc(txf->ctxsize, M_CRYPTO_DATA, 11503e947048SJohn Baldwin M_NOWAIT); 11513e947048SJohn Baldwin if (swe->sw_kschedule == NULL) 11523e947048SJohn Baldwin return (ENOMEM); 11533e947048SJohn Baldwin } 1154c0341432SJohn Baldwin if (csp->csp_cipher_key != NULL) { 11553e947048SJohn Baldwin error = txf->setkey(swe->sw_kschedule, 1156c0341432SJohn Baldwin csp->csp_cipher_key, csp->csp_cipher_klen); 1157c0341432SJohn Baldwin if (error) 1158c0341432SJohn Baldwin return (error); 1159091d81d1SSam Leffler } 1160c0341432SJohn Baldwin swe->sw_exf = txf; 1161c0341432SJohn Baldwin return (0); 1162f6c4bc3bSPawel Jakub Dawidek } 1163091d81d1SSam Leffler 1164c0341432SJohn Baldwin static int 1165c0341432SJohn Baldwin swcr_setup_auth(struct swcr_session *ses, 1166c0341432SJohn Baldwin const struct crypto_session_params *csp) 1167c0341432SJohn Baldwin { 1168c0341432SJohn Baldwin struct swcr_auth *swa; 1169c0341432SJohn Baldwin struct auth_hash *axf; 1170c0341432SJohn Baldwin 1171c0341432SJohn Baldwin swa = &ses->swcr_auth; 1172c0341432SJohn Baldwin 1173c0341432SJohn Baldwin axf = crypto_auth_hash(csp); 1174c0341432SJohn Baldwin swa->sw_axf = axf; 1175c0341432SJohn Baldwin if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize) 1176c0341432SJohn Baldwin return (EINVAL); 1177c0341432SJohn Baldwin if (csp->csp_auth_mlen == 0) 1178c0341432SJohn Baldwin swa->sw_mlen = axf->hashsize; 1179c0341432SJohn Baldwin else 1180c0341432SJohn Baldwin swa->sw_mlen = csp->csp_auth_mlen; 1181c0341432SJohn Baldwin swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT); 1182c0341432SJohn Baldwin if (swa->sw_ictx == NULL) 1183c0341432SJohn Baldwin return (ENOBUFS); 1184c0341432SJohn Baldwin 1185c0341432SJohn Baldwin switch (csp->csp_auth_alg) { 1186091d81d1SSam Leffler case CRYPTO_SHA1_HMAC: 1187c97f39ceSConrad Meyer case CRYPTO_SHA2_224_HMAC: 1188f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_256_HMAC: 1189f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_384_HMAC: 1190f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_512_HMAC: 1191091d81d1SSam Leffler case CRYPTO_NULL_HMAC: 1192091d81d1SSam Leffler case CRYPTO_RIPEMD160_HMAC: 11933a0b6a93SJohn Baldwin swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA, 1194091d81d1SSam Leffler M_NOWAIT); 1195c0341432SJohn Baldwin if (swa->sw_octx == NULL) 1196c0341432SJohn Baldwin return (ENOBUFS); 1197c0341432SJohn Baldwin 1198c0341432SJohn Baldwin if (csp->csp_auth_key != NULL) { 1199c0341432SJohn Baldwin swcr_authprepare(axf, swa, csp->csp_auth_key, 1200c0341432SJohn Baldwin csp->csp_auth_klen); 1201091d81d1SSam Leffler } 1202091d81d1SSam Leffler 1203c0341432SJohn Baldwin if (csp->csp_mode == CSP_MODE_DIGEST) 1204c0341432SJohn Baldwin ses->swcr_process = swcr_authcompute; 1205091d81d1SSam Leffler break; 1206091d81d1SSam Leffler case CRYPTO_SHA1: 1207c4729f6eSConrad Meyer case CRYPTO_SHA2_224: 1208c4729f6eSConrad Meyer case CRYPTO_SHA2_256: 1209c4729f6eSConrad Meyer case CRYPTO_SHA2_384: 1210c4729f6eSConrad Meyer case CRYPTO_SHA2_512: 1211c0341432SJohn Baldwin axf->Init(swa->sw_ictx); 1212c0341432SJohn Baldwin if (csp->csp_mode == CSP_MODE_DIGEST) 1213c0341432SJohn Baldwin ses->swcr_process = swcr_authcompute; 1214c0341432SJohn Baldwin break; 1215c0341432SJohn Baldwin case CRYPTO_AES_NIST_GMAC: 1216c0341432SJohn Baldwin axf->Init(swa->sw_ictx); 1217c0341432SJohn Baldwin axf->Setkey(swa->sw_ictx, csp->csp_auth_key, 1218c0341432SJohn Baldwin csp->csp_auth_klen); 1219c0341432SJohn Baldwin if (csp->csp_mode == CSP_MODE_DIGEST) 1220c0341432SJohn Baldwin ses->swcr_process = swcr_gmac; 1221c0341432SJohn Baldwin break; 1222c0341432SJohn Baldwin case CRYPTO_POLY1305: 1223c0341432SJohn Baldwin case CRYPTO_BLAKE2B: 1224c0341432SJohn Baldwin case CRYPTO_BLAKE2S: 1225c0341432SJohn Baldwin /* 1226c0341432SJohn Baldwin * Blake2b and Blake2s support an optional key but do 1227c0341432SJohn Baldwin * not require one. 1228c0341432SJohn Baldwin */ 1229c0341432SJohn Baldwin if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL) 1230c0341432SJohn Baldwin axf->Setkey(swa->sw_ictx, csp->csp_auth_key, 1231c0341432SJohn Baldwin csp->csp_auth_klen); 1232c0341432SJohn Baldwin axf->Init(swa->sw_ictx); 1233c0341432SJohn Baldwin if (csp->csp_mode == CSP_MODE_DIGEST) 1234c0341432SJohn Baldwin ses->swcr_process = swcr_authcompute; 1235c0341432SJohn Baldwin break; 1236c0341432SJohn Baldwin case CRYPTO_AES_CCM_CBC_MAC: 1237c0341432SJohn Baldwin axf->Init(swa->sw_ictx); 1238c0341432SJohn Baldwin axf->Setkey(swa->sw_ictx, csp->csp_auth_key, 1239c0341432SJohn Baldwin csp->csp_auth_klen); 1240c0341432SJohn Baldwin if (csp->csp_mode == CSP_MODE_DIGEST) 1241c0341432SJohn Baldwin ses->swcr_process = swcr_ccm_cbc_mac; 1242c0341432SJohn Baldwin break; 1243091d81d1SSam Leffler } 1244091d81d1SSam Leffler 1245c0341432SJohn Baldwin return (0); 1246c0341432SJohn Baldwin } 124708fca7a5SJohn-Mark Gurney 1248c0341432SJohn Baldwin static int 1249c0341432SJohn Baldwin swcr_setup_gcm(struct swcr_session *ses, 1250c0341432SJohn Baldwin const struct crypto_session_params *csp) 1251c0341432SJohn Baldwin { 1252c0341432SJohn Baldwin struct swcr_auth *swa; 1253c0341432SJohn Baldwin struct auth_hash *axf; 1254c0341432SJohn Baldwin 1255c0341432SJohn Baldwin if (csp->csp_ivlen != AES_GCM_IV_LEN) 1256c0341432SJohn Baldwin return (EINVAL); 1257c0341432SJohn Baldwin 1258c0341432SJohn Baldwin /* First, setup the auth side. */ 1259c0341432SJohn Baldwin swa = &ses->swcr_auth; 1260c0341432SJohn Baldwin switch (csp->csp_cipher_klen * 8) { 1261c0341432SJohn Baldwin case 128: 1262c0341432SJohn Baldwin axf = &auth_hash_nist_gmac_aes_128; 1263c0341432SJohn Baldwin break; 1264c0341432SJohn Baldwin case 192: 1265c0341432SJohn Baldwin axf = &auth_hash_nist_gmac_aes_192; 1266c0341432SJohn Baldwin break; 1267c0341432SJohn Baldwin case 256: 1268c0341432SJohn Baldwin axf = &auth_hash_nist_gmac_aes_256; 1269c0341432SJohn Baldwin break; 1270c0341432SJohn Baldwin default: 1271c0341432SJohn Baldwin return (EINVAL); 1272c0341432SJohn Baldwin } 1273c0341432SJohn Baldwin swa->sw_axf = axf; 1274c0341432SJohn Baldwin if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize) 1275c0341432SJohn Baldwin return (EINVAL); 1276c0341432SJohn Baldwin if (csp->csp_auth_mlen == 0) 1277c0341432SJohn Baldwin swa->sw_mlen = axf->hashsize; 1278c0341432SJohn Baldwin else 1279c0341432SJohn Baldwin swa->sw_mlen = csp->csp_auth_mlen; 1280c0341432SJohn Baldwin swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT); 1281c0341432SJohn Baldwin if (swa->sw_ictx == NULL) 1282c0341432SJohn Baldwin return (ENOBUFS); 1283c0341432SJohn Baldwin axf->Init(swa->sw_ictx); 1284c0341432SJohn Baldwin if (csp->csp_cipher_key != NULL) 1285c0341432SJohn Baldwin axf->Setkey(swa->sw_ictx, csp->csp_cipher_key, 1286c0341432SJohn Baldwin csp->csp_cipher_klen); 1287c0341432SJohn Baldwin 1288c0341432SJohn Baldwin /* Second, setup the cipher side. */ 12893e947048SJohn Baldwin return (swcr_setup_cipher(ses, csp)); 1290c0341432SJohn Baldwin } 1291c0341432SJohn Baldwin 1292c0341432SJohn Baldwin static int 1293c0341432SJohn Baldwin swcr_setup_ccm(struct swcr_session *ses, 1294c0341432SJohn Baldwin const struct crypto_session_params *csp) 1295c0341432SJohn Baldwin { 1296c0341432SJohn Baldwin struct swcr_auth *swa; 1297c0341432SJohn Baldwin struct auth_hash *axf; 1298c0341432SJohn Baldwin 1299c0341432SJohn Baldwin if (csp->csp_ivlen != AES_CCM_IV_LEN) 1300c0341432SJohn Baldwin return (EINVAL); 1301c0341432SJohn Baldwin 1302c0341432SJohn Baldwin /* First, setup the auth side. */ 1303c0341432SJohn Baldwin swa = &ses->swcr_auth; 1304c0341432SJohn Baldwin switch (csp->csp_cipher_klen * 8) { 1305507281e5SSean Eric Fagan case 128: 1306507281e5SSean Eric Fagan axf = &auth_hash_ccm_cbc_mac_128; 1307507281e5SSean Eric Fagan break; 1308507281e5SSean Eric Fagan case 192: 1309507281e5SSean Eric Fagan axf = &auth_hash_ccm_cbc_mac_192; 1310507281e5SSean Eric Fagan break; 1311507281e5SSean Eric Fagan case 256: 1312507281e5SSean Eric Fagan axf = &auth_hash_ccm_cbc_mac_256; 1313507281e5SSean Eric Fagan break; 1314507281e5SSean Eric Fagan default: 1315c0341432SJohn Baldwin return (EINVAL); 1316507281e5SSean Eric Fagan } 1317c0341432SJohn Baldwin swa->sw_axf = axf; 1318c0341432SJohn Baldwin if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize) 1319c0341432SJohn Baldwin return (EINVAL); 1320c0341432SJohn Baldwin if (csp->csp_auth_mlen == 0) 1321c0341432SJohn Baldwin swa->sw_mlen = axf->hashsize; 1322c0341432SJohn Baldwin else 1323c0341432SJohn Baldwin swa->sw_mlen = csp->csp_auth_mlen; 1324c0341432SJohn Baldwin swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT); 1325c0341432SJohn Baldwin if (swa->sw_ictx == NULL) 1326c0341432SJohn Baldwin return (ENOBUFS); 1327c0341432SJohn Baldwin axf->Init(swa->sw_ictx); 1328c0341432SJohn Baldwin if (csp->csp_cipher_key != NULL) 1329c0341432SJohn Baldwin axf->Setkey(swa->sw_ictx, csp->csp_cipher_key, 1330c0341432SJohn Baldwin csp->csp_cipher_klen); 133108fca7a5SJohn-Mark Gurney 1332c0341432SJohn Baldwin /* Second, setup the cipher side. */ 13333e947048SJohn Baldwin return (swcr_setup_cipher(ses, csp)); 13342e2e26d1SJohn Baldwin } 1335a2bc81bfSJohn-Mark Gurney 1336*dd2e1352SJohn Baldwin static int 1337*dd2e1352SJohn Baldwin swcr_setup_chacha20_poly1305(struct swcr_session *ses, 1338*dd2e1352SJohn Baldwin const struct crypto_session_params *csp) 1339*dd2e1352SJohn Baldwin { 1340*dd2e1352SJohn Baldwin struct swcr_auth *swa; 1341*dd2e1352SJohn Baldwin struct auth_hash *axf; 1342*dd2e1352SJohn Baldwin 1343*dd2e1352SJohn Baldwin if (csp->csp_ivlen != CHACHA20_POLY1305_IV_LEN) 1344*dd2e1352SJohn Baldwin return (EINVAL); 1345*dd2e1352SJohn Baldwin 1346*dd2e1352SJohn Baldwin /* First, setup the auth side. */ 1347*dd2e1352SJohn Baldwin swa = &ses->swcr_auth; 1348*dd2e1352SJohn Baldwin axf = &auth_hash_chacha20_poly1305; 1349*dd2e1352SJohn Baldwin swa->sw_axf = axf; 1350*dd2e1352SJohn Baldwin if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize) 1351*dd2e1352SJohn Baldwin return (EINVAL); 1352*dd2e1352SJohn Baldwin if (csp->csp_auth_mlen == 0) 1353*dd2e1352SJohn Baldwin swa->sw_mlen = axf->hashsize; 1354*dd2e1352SJohn Baldwin else 1355*dd2e1352SJohn Baldwin swa->sw_mlen = csp->csp_auth_mlen; 1356*dd2e1352SJohn Baldwin 1357*dd2e1352SJohn Baldwin /* The auth state is regenerated for each nonce. */ 1358*dd2e1352SJohn Baldwin 1359*dd2e1352SJohn Baldwin /* Second, setup the cipher side. */ 1360*dd2e1352SJohn Baldwin return (swcr_setup_cipher(ses, csp)); 1361*dd2e1352SJohn Baldwin } 1362*dd2e1352SJohn Baldwin 1363c0341432SJohn Baldwin static bool 1364c0341432SJohn Baldwin swcr_auth_supported(const struct crypto_session_params *csp) 1365109919c6SBenno Rice { 1366091d81d1SSam Leffler struct auth_hash *axf; 1367091d81d1SSam Leffler 1368c0341432SJohn Baldwin axf = crypto_auth_hash(csp); 1369c0341432SJohn Baldwin if (axf == NULL) 1370c0341432SJohn Baldwin return (false); 1371c0341432SJohn Baldwin switch (csp->csp_auth_alg) { 1372091d81d1SSam Leffler case CRYPTO_SHA1_HMAC: 1373c97f39ceSConrad Meyer case CRYPTO_SHA2_224_HMAC: 1374f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_256_HMAC: 1375f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_384_HMAC: 1376f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_512_HMAC: 1377091d81d1SSam Leffler case CRYPTO_NULL_HMAC: 1378c0341432SJohn Baldwin case CRYPTO_RIPEMD160_HMAC: 1379091d81d1SSam Leffler break; 1380c0341432SJohn Baldwin case CRYPTO_AES_NIST_GMAC: 1381c0341432SJohn Baldwin switch (csp->csp_auth_klen * 8) { 1382c0341432SJohn Baldwin case 128: 1383c0341432SJohn Baldwin case 192: 1384c0341432SJohn Baldwin case 256: 1385c0341432SJohn Baldwin break; 1386c0341432SJohn Baldwin default: 1387c0341432SJohn Baldwin return (false); 1388c0341432SJohn Baldwin } 1389c0341432SJohn Baldwin if (csp->csp_auth_key == NULL) 1390c0341432SJohn Baldwin return (false); 1391c0341432SJohn Baldwin if (csp->csp_ivlen != AES_GCM_IV_LEN) 1392c0341432SJohn Baldwin return (false); 1393c0341432SJohn Baldwin break; 139425b7033bSConrad Meyer case CRYPTO_POLY1305: 1395c0341432SJohn Baldwin if (csp->csp_auth_klen != POLY1305_KEY_LEN) 1396c0341432SJohn Baldwin return (false); 1397c0341432SJohn Baldwin break; 1398c0341432SJohn Baldwin case CRYPTO_AES_CCM_CBC_MAC: 1399c0341432SJohn Baldwin switch (csp->csp_auth_klen * 8) { 1400c0341432SJohn Baldwin case 128: 1401c0341432SJohn Baldwin case 192: 1402c0341432SJohn Baldwin case 256: 1403c0341432SJohn Baldwin break; 1404c0341432SJohn Baldwin default: 1405c0341432SJohn Baldwin return (false); 1406c0341432SJohn Baldwin } 1407c0341432SJohn Baldwin if (csp->csp_auth_key == NULL) 1408c0341432SJohn Baldwin return (false); 1409c0341432SJohn Baldwin if (csp->csp_ivlen != AES_CCM_IV_LEN) 1410c0341432SJohn Baldwin return (false); 1411c0341432SJohn Baldwin break; 1412c0341432SJohn Baldwin } 1413c0341432SJohn Baldwin return (true); 1414c0341432SJohn Baldwin } 1415091d81d1SSam Leffler 1416c0341432SJohn Baldwin static bool 1417c0341432SJohn Baldwin swcr_cipher_supported(const struct crypto_session_params *csp) 1418c0341432SJohn Baldwin { 1419c0341432SJohn Baldwin struct enc_xform *txf; 1420c0341432SJohn Baldwin 1421c0341432SJohn Baldwin txf = crypto_cipher(csp); 1422c0341432SJohn Baldwin if (txf == NULL) 1423c0341432SJohn Baldwin return (false); 1424c0341432SJohn Baldwin if (csp->csp_cipher_alg != CRYPTO_NULL_CBC && 1425c0341432SJohn Baldwin txf->ivsize != csp->csp_ivlen) 1426c0341432SJohn Baldwin return (false); 1427c0341432SJohn Baldwin return (true); 1428c0341432SJohn Baldwin } 1429c0341432SJohn Baldwin 14306038018aSMarcin Wojtas #define SUPPORTED_SES (CSP_F_SEPARATE_OUTPUT | CSP_F_SEPARATE_AAD | CSP_F_ESN) 14316038018aSMarcin Wojtas 1432c0341432SJohn Baldwin static int 1433c0341432SJohn Baldwin swcr_probesession(device_t dev, const struct crypto_session_params *csp) 1434c0341432SJohn Baldwin { 14356038018aSMarcin Wojtas if ((csp->csp_flags & ~(SUPPORTED_SES)) != 0) 1436c0341432SJohn Baldwin return (EINVAL); 1437c0341432SJohn Baldwin switch (csp->csp_mode) { 1438c0341432SJohn Baldwin case CSP_MODE_COMPRESS: 1439c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1440c0341432SJohn Baldwin case CRYPTO_DEFLATE_COMP: 1441c0341432SJohn Baldwin break; 1442c0341432SJohn Baldwin default: 1443c0341432SJohn Baldwin return (EINVAL); 14445fbc5b5aSConrad Meyer } 1445091d81d1SSam Leffler break; 1446c0341432SJohn Baldwin case CSP_MODE_CIPHER: 1447c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1448c0341432SJohn Baldwin case CRYPTO_AES_NIST_GCM_16: 1449c0341432SJohn Baldwin case CRYPTO_AES_CCM_16: 1450*dd2e1352SJohn Baldwin case CRYPTO_CHACHA20_POLY1305: 1451c0341432SJohn Baldwin return (EINVAL); 1452c0341432SJohn Baldwin default: 1453c0341432SJohn Baldwin if (!swcr_cipher_supported(csp)) 1454c0341432SJohn Baldwin return (EINVAL); 1455091d81d1SSam Leffler break; 1456091d81d1SSam Leffler } 1457c0341432SJohn Baldwin break; 1458c0341432SJohn Baldwin case CSP_MODE_DIGEST: 1459c0341432SJohn Baldwin if (!swcr_auth_supported(csp)) 1460c0341432SJohn Baldwin return (EINVAL); 1461c0341432SJohn Baldwin break; 1462c0341432SJohn Baldwin case CSP_MODE_AEAD: 1463c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1464c0341432SJohn Baldwin case CRYPTO_AES_NIST_GCM_16: 1465c0341432SJohn Baldwin case CRYPTO_AES_CCM_16: 1466*dd2e1352SJohn Baldwin case CRYPTO_CHACHA20_POLY1305: 1467c0341432SJohn Baldwin break; 1468c0341432SJohn Baldwin default: 1469c0341432SJohn Baldwin return (EINVAL); 1470c0341432SJohn Baldwin } 1471c0341432SJohn Baldwin break; 1472c0341432SJohn Baldwin case CSP_MODE_ETA: 1473c0341432SJohn Baldwin /* AEAD algorithms cannot be used for EtA. */ 1474c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1475c0341432SJohn Baldwin case CRYPTO_AES_NIST_GCM_16: 1476c0341432SJohn Baldwin case CRYPTO_AES_CCM_16: 1477*dd2e1352SJohn Baldwin case CRYPTO_CHACHA20_POLY1305: 1478c0341432SJohn Baldwin return (EINVAL); 1479c0341432SJohn Baldwin } 1480c0341432SJohn Baldwin switch (csp->csp_auth_alg) { 1481c0341432SJohn Baldwin case CRYPTO_AES_NIST_GMAC: 1482c0341432SJohn Baldwin case CRYPTO_AES_CCM_CBC_MAC: 1483c0341432SJohn Baldwin return (EINVAL); 1484c0341432SJohn Baldwin } 1485c0341432SJohn Baldwin 1486c0341432SJohn Baldwin if (!swcr_cipher_supported(csp) || 1487c0341432SJohn Baldwin !swcr_auth_supported(csp)) 1488c0341432SJohn Baldwin return (EINVAL); 1489c0341432SJohn Baldwin break; 1490c0341432SJohn Baldwin default: 1491c0341432SJohn Baldwin return (EINVAL); 1492c0341432SJohn Baldwin } 1493c0341432SJohn Baldwin 1494c0341432SJohn Baldwin return (CRYPTODEV_PROBE_SOFTWARE); 1495c0341432SJohn Baldwin } 1496c0341432SJohn Baldwin 1497c0341432SJohn Baldwin /* 1498c0341432SJohn Baldwin * Generate a new software session. 1499c0341432SJohn Baldwin */ 1500c0341432SJohn Baldwin static int 1501c0341432SJohn Baldwin swcr_newsession(device_t dev, crypto_session_t cses, 1502c0341432SJohn Baldwin const struct crypto_session_params *csp) 1503c0341432SJohn Baldwin { 1504c0341432SJohn Baldwin struct swcr_session *ses; 1505c0341432SJohn Baldwin struct swcr_encdec *swe; 1506c0341432SJohn Baldwin struct swcr_auth *swa; 1507c0341432SJohn Baldwin struct comp_algo *cxf; 1508c0341432SJohn Baldwin int error; 1509c0341432SJohn Baldwin 1510c0341432SJohn Baldwin ses = crypto_get_driver_session(cses); 1511c0341432SJohn Baldwin mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF); 1512c0341432SJohn Baldwin 1513c0341432SJohn Baldwin error = 0; 1514c0341432SJohn Baldwin swe = &ses->swcr_encdec; 1515c0341432SJohn Baldwin swa = &ses->swcr_auth; 1516c0341432SJohn Baldwin switch (csp->csp_mode) { 1517c0341432SJohn Baldwin case CSP_MODE_COMPRESS: 1518c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1519c0341432SJohn Baldwin case CRYPTO_DEFLATE_COMP: 1520c0341432SJohn Baldwin cxf = &comp_algo_deflate; 1521c0341432SJohn Baldwin break; 1522c0341432SJohn Baldwin #ifdef INVARIANTS 1523c0341432SJohn Baldwin default: 1524c0341432SJohn Baldwin panic("bad compression algo"); 1525c0341432SJohn Baldwin #endif 1526c0341432SJohn Baldwin } 1527c0341432SJohn Baldwin ses->swcr_compdec.sw_cxf = cxf; 1528c0341432SJohn Baldwin ses->swcr_process = swcr_compdec; 1529c0341432SJohn Baldwin break; 1530c0341432SJohn Baldwin case CSP_MODE_CIPHER: 1531c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1532c0341432SJohn Baldwin case CRYPTO_NULL_CBC: 1533c0341432SJohn Baldwin ses->swcr_process = swcr_null; 1534c0341432SJohn Baldwin break; 1535c0341432SJohn Baldwin #ifdef INVARIANTS 1536c0341432SJohn Baldwin case CRYPTO_AES_NIST_GCM_16: 1537c0341432SJohn Baldwin case CRYPTO_AES_CCM_16: 1538*dd2e1352SJohn Baldwin case CRYPTO_CHACHA20_POLY1305: 1539c0341432SJohn Baldwin panic("bad cipher algo"); 1540c0341432SJohn Baldwin #endif 1541c0341432SJohn Baldwin default: 15423e947048SJohn Baldwin error = swcr_setup_cipher(ses, csp); 1543c0341432SJohn Baldwin if (error == 0) 1544c0341432SJohn Baldwin ses->swcr_process = swcr_encdec; 1545c0341432SJohn Baldwin } 1546c0341432SJohn Baldwin break; 1547c0341432SJohn Baldwin case CSP_MODE_DIGEST: 1548c0341432SJohn Baldwin error = swcr_setup_auth(ses, csp); 1549c0341432SJohn Baldwin break; 1550c0341432SJohn Baldwin case CSP_MODE_AEAD: 1551c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1552c0341432SJohn Baldwin case CRYPTO_AES_NIST_GCM_16: 1553c0341432SJohn Baldwin error = swcr_setup_gcm(ses, csp); 1554c0341432SJohn Baldwin if (error == 0) 1555c0341432SJohn Baldwin ses->swcr_process = swcr_gcm; 1556c0341432SJohn Baldwin break; 1557c0341432SJohn Baldwin case CRYPTO_AES_CCM_16: 1558c0341432SJohn Baldwin error = swcr_setup_ccm(ses, csp); 1559c0341432SJohn Baldwin if (error == 0) 1560c0341432SJohn Baldwin ses->swcr_process = swcr_ccm; 1561c0341432SJohn Baldwin break; 1562*dd2e1352SJohn Baldwin case CRYPTO_CHACHA20_POLY1305: 1563*dd2e1352SJohn Baldwin error = swcr_setup_chacha20_poly1305(ses, csp); 1564*dd2e1352SJohn Baldwin if (error == 0) 1565*dd2e1352SJohn Baldwin ses->swcr_process = swcr_chacha20_poly1305; 1566*dd2e1352SJohn Baldwin break; 1567c0341432SJohn Baldwin #ifdef INVARIANTS 1568c0341432SJohn Baldwin default: 1569c0341432SJohn Baldwin panic("bad aead algo"); 1570c0341432SJohn Baldwin #endif 1571c0341432SJohn Baldwin } 1572c0341432SJohn Baldwin break; 1573c0341432SJohn Baldwin case CSP_MODE_ETA: 1574c0341432SJohn Baldwin #ifdef INVARIANTS 1575c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1576c0341432SJohn Baldwin case CRYPTO_AES_NIST_GCM_16: 1577c0341432SJohn Baldwin case CRYPTO_AES_CCM_16: 1578*dd2e1352SJohn Baldwin case CRYPTO_CHACHA20_POLY1305: 1579c0341432SJohn Baldwin panic("bad eta cipher algo"); 1580c0341432SJohn Baldwin } 1581c0341432SJohn Baldwin switch (csp->csp_auth_alg) { 1582c0341432SJohn Baldwin case CRYPTO_AES_NIST_GMAC: 1583c0341432SJohn Baldwin case CRYPTO_AES_CCM_CBC_MAC: 1584c0341432SJohn Baldwin panic("bad eta auth algo"); 1585c0341432SJohn Baldwin } 1586c0341432SJohn Baldwin #endif 1587c0341432SJohn Baldwin 1588c0341432SJohn Baldwin error = swcr_setup_auth(ses, csp); 1589c0341432SJohn Baldwin if (error) 1590c0341432SJohn Baldwin break; 1591c0341432SJohn Baldwin if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) { 1592c0341432SJohn Baldwin /* Effectively degrade to digest mode. */ 1593c0341432SJohn Baldwin ses->swcr_process = swcr_authcompute; 1594c0341432SJohn Baldwin break; 1595c0341432SJohn Baldwin } 1596c0341432SJohn Baldwin 15973e947048SJohn Baldwin error = swcr_setup_cipher(ses, csp); 1598c0341432SJohn Baldwin if (error == 0) 1599c0341432SJohn Baldwin ses->swcr_process = swcr_eta; 1600c0341432SJohn Baldwin break; 1601c0341432SJohn Baldwin default: 1602c0341432SJohn Baldwin error = EINVAL; 1603c0341432SJohn Baldwin } 1604c0341432SJohn Baldwin 1605c0341432SJohn Baldwin if (error) 1606c0341432SJohn Baldwin swcr_freesession(dev, cses); 1607c0341432SJohn Baldwin return (error); 1608c0341432SJohn Baldwin } 1609c0341432SJohn Baldwin 1610c0341432SJohn Baldwin static void 1611c0341432SJohn Baldwin swcr_freesession(device_t dev, crypto_session_t cses) 1612c0341432SJohn Baldwin { 1613c0341432SJohn Baldwin struct swcr_session *ses; 1614c0341432SJohn Baldwin 1615c0341432SJohn Baldwin ses = crypto_get_driver_session(cses); 1616c0341432SJohn Baldwin 1617c0341432SJohn Baldwin mtx_destroy(&ses->swcr_lock); 1618c0341432SJohn Baldwin 16193e947048SJohn Baldwin zfree(ses->swcr_encdec.sw_kschedule, M_CRYPTO_DATA); 16204a711b8dSJohn Baldwin zfree(ses->swcr_auth.sw_ictx, M_CRYPTO_DATA); 16214a711b8dSJohn Baldwin zfree(ses->swcr_auth.sw_octx, M_CRYPTO_DATA); 1622091d81d1SSam Leffler } 1623091d81d1SSam Leffler 1624091d81d1SSam Leffler /* 1625091d81d1SSam Leffler * Process a software request. 1626091d81d1SSam Leffler */ 1627091d81d1SSam Leffler static int 16286810ad6fSSam Leffler swcr_process(device_t dev, struct cryptop *crp, int hint) 1629091d81d1SSam Leffler { 1630c0341432SJohn Baldwin struct swcr_session *ses; 1631091d81d1SSam Leffler 16321b0909d5SConrad Meyer ses = crypto_get_driver_session(crp->crp_session); 1633a7fcb1afSSean Eric Fagan mtx_lock(&ses->swcr_lock); 1634091d81d1SSam Leffler 1635c0341432SJohn Baldwin crp->crp_etype = ses->swcr_process(ses, crp); 1636091d81d1SSam Leffler 1637a7fcb1afSSean Eric Fagan mtx_unlock(&ses->swcr_lock); 1638091d81d1SSam Leffler crypto_done(crp); 1639c0341432SJohn Baldwin return (0); 1640091d81d1SSam Leffler } 1641091d81d1SSam Leffler 1642091d81d1SSam Leffler static void 16433f147ab2SWarner Losh swcr_identify(driver_t *drv, device_t parent) 1644091d81d1SSam Leffler { 16456810ad6fSSam Leffler /* NB: order 10 is so we get attached after h/w devices */ 16466810ad6fSSam Leffler if (device_find_child(parent, "cryptosoft", -1) == NULL && 164786c585d9SMarius Strobl BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0) 16486810ad6fSSam Leffler panic("cryptosoft: could not attach"); 16496810ad6fSSam Leffler } 1650f6c4bc3bSPawel Jakub Dawidek 16516810ad6fSSam Leffler static int 16526810ad6fSSam Leffler swcr_probe(device_t dev) 16536810ad6fSSam Leffler { 16546810ad6fSSam Leffler device_set_desc(dev, "software crypto"); 165586c585d9SMarius Strobl return (BUS_PROBE_NOWILDCARD); 16566810ad6fSSam Leffler } 1657f6c4bc3bSPawel Jakub Dawidek 16586810ad6fSSam Leffler static int 16596810ad6fSSam Leffler swcr_attach(device_t dev) 16606810ad6fSSam Leffler { 16616810ad6fSSam Leffler 16629ebbebe4SConrad Meyer swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session), 16636810ad6fSSam Leffler CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC); 16646810ad6fSSam Leffler if (swcr_id < 0) { 16656810ad6fSSam Leffler device_printf(dev, "cannot initialize!"); 1666c0341432SJohn Baldwin return (ENXIO); 16676810ad6fSSam Leffler } 16686810ad6fSSam Leffler 1669c0341432SJohn Baldwin return (0); 1670091d81d1SSam Leffler } 16714b465da2SPawel Jakub Dawidek 16723f147ab2SWarner Losh static int 16736810ad6fSSam Leffler swcr_detach(device_t dev) 16744b465da2SPawel Jakub Dawidek { 16756810ad6fSSam Leffler crypto_unregister_all(swcr_id); 16763f147ab2SWarner Losh return 0; 16774b465da2SPawel Jakub Dawidek } 16786810ad6fSSam Leffler 16796810ad6fSSam Leffler static device_method_t swcr_methods[] = { 16806810ad6fSSam Leffler DEVMETHOD(device_identify, swcr_identify), 16816810ad6fSSam Leffler DEVMETHOD(device_probe, swcr_probe), 16826810ad6fSSam Leffler DEVMETHOD(device_attach, swcr_attach), 16836810ad6fSSam Leffler DEVMETHOD(device_detach, swcr_detach), 16846810ad6fSSam Leffler 1685c0341432SJohn Baldwin DEVMETHOD(cryptodev_probesession, swcr_probesession), 16866810ad6fSSam Leffler DEVMETHOD(cryptodev_newsession, swcr_newsession), 16876810ad6fSSam Leffler DEVMETHOD(cryptodev_freesession,swcr_freesession), 16886810ad6fSSam Leffler DEVMETHOD(cryptodev_process, swcr_process), 16896810ad6fSSam Leffler 16906810ad6fSSam Leffler {0, 0}, 16916810ad6fSSam Leffler }; 16926810ad6fSSam Leffler 16936810ad6fSSam Leffler static driver_t swcr_driver = { 16946810ad6fSSam Leffler "cryptosoft", 16956810ad6fSSam Leffler swcr_methods, 16966810ad6fSSam Leffler 0, /* NB: no softc */ 16976810ad6fSSam Leffler }; 16986810ad6fSSam Leffler static devclass_t swcr_devclass; 16996810ad6fSSam Leffler 17006810ad6fSSam Leffler /* 17016810ad6fSSam Leffler * NB: We explicitly reference the crypto module so we 17026810ad6fSSam Leffler * get the necessary ordering when built as a loadable 17036810ad6fSSam Leffler * module. This is required because we bundle the crypto 17046810ad6fSSam Leffler * module code together with the cryptosoft driver (otherwise 17056810ad6fSSam Leffler * normal module dependencies would handle things). 17066810ad6fSSam Leffler */ 17076810ad6fSSam Leffler extern int crypto_modevent(struct module *, int, void *); 17086810ad6fSSam Leffler /* XXX where to attach */ 17096810ad6fSSam Leffler DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0); 17106810ad6fSSam Leffler MODULE_VERSION(cryptosoft, 1); 17116810ad6fSSam Leffler MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1); 1712