xref: /freebsd/sys/opencrypto/cryptosoft.c (revision 26d292d3e2fc0b7883d246f7a0834aa802be4ef5)
1091d81d1SSam Leffler /*	$OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $	*/
2091d81d1SSam Leffler 
360727d8bSWarner Losh /*-
4091d81d1SSam Leffler  * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
56810ad6fSSam Leffler  * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
6091d81d1SSam Leffler  *
7091d81d1SSam Leffler  * This code was written by Angelos D. Keromytis in Athens, Greece, in
8091d81d1SSam Leffler  * February 2000. Network Security Technologies Inc. (NSTI) kindly
9091d81d1SSam Leffler  * supported the development of this code.
10091d81d1SSam Leffler  *
11091d81d1SSam Leffler  * Copyright (c) 2000, 2001 Angelos D. Keromytis
1208fca7a5SJohn-Mark Gurney  * Copyright (c) 2014 The FreeBSD Foundation
1308fca7a5SJohn-Mark Gurney  * All rights reserved.
1408fca7a5SJohn-Mark Gurney  *
1508fca7a5SJohn-Mark Gurney  * Portions of this software were developed by John-Mark Gurney
1608fca7a5SJohn-Mark Gurney  * under sponsorship of the FreeBSD Foundation and
1708fca7a5SJohn-Mark Gurney  * Rubicon Communications, LLC (Netgate).
18091d81d1SSam Leffler  *
19091d81d1SSam Leffler  * Permission to use, copy, and modify this software with or without fee
20091d81d1SSam Leffler  * is hereby granted, provided that this entire notice is included in
21091d81d1SSam Leffler  * all source code copies of any software which is or includes a copy or
22091d81d1SSam Leffler  * modification of this software.
23091d81d1SSam Leffler  *
24091d81d1SSam Leffler  * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25091d81d1SSam Leffler  * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26091d81d1SSam Leffler  * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27091d81d1SSam Leffler  * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
28091d81d1SSam Leffler  * PURPOSE.
29091d81d1SSam Leffler  */
30091d81d1SSam Leffler 
312c446514SDavid E. O'Brien #include <sys/cdefs.h>
322c446514SDavid E. O'Brien __FBSDID("$FreeBSD$");
332c446514SDavid E. O'Brien 
34091d81d1SSam Leffler #include <sys/param.h>
35091d81d1SSam Leffler #include <sys/systm.h>
36091d81d1SSam Leffler #include <sys/malloc.h>
37091d81d1SSam Leffler #include <sys/mbuf.h>
386810ad6fSSam Leffler #include <sys/module.h>
39091d81d1SSam Leffler #include <sys/sysctl.h>
40091d81d1SSam Leffler #include <sys/errno.h>
41091d81d1SSam Leffler #include <sys/random.h>
42091d81d1SSam Leffler #include <sys/kernel.h>
43091d81d1SSam Leffler #include <sys/uio.h>
44109919c6SBenno Rice #include <sys/lock.h>
45109919c6SBenno Rice #include <sys/rwlock.h>
4608fca7a5SJohn-Mark Gurney #include <sys/endian.h>
4708fca7a5SJohn-Mark Gurney #include <sys/limits.h>
48a7fcb1afSSean Eric Fagan #include <sys/mutex.h>
49091d81d1SSam Leffler 
50091d81d1SSam Leffler #include <crypto/sha1.h>
51091d81d1SSam Leffler #include <opencrypto/rmd160.h>
52091d81d1SSam Leffler 
53091d81d1SSam Leffler #include <opencrypto/cryptodev.h>
54091d81d1SSam Leffler #include <opencrypto/xform.h>
55091d81d1SSam Leffler 
566810ad6fSSam Leffler #include <sys/kobj.h>
576810ad6fSSam Leffler #include <sys/bus.h>
586810ad6fSSam Leffler #include "cryptodev_if.h"
59091d81d1SSam Leffler 
60c0341432SJohn Baldwin struct swcr_auth {
61c0341432SJohn Baldwin 	void		*sw_ictx;
62c0341432SJohn Baldwin 	void		*sw_octx;
63c0341432SJohn Baldwin 	struct auth_hash *sw_axf;
64c0341432SJohn Baldwin 	uint16_t	sw_mlen;
65c0341432SJohn Baldwin };
66c0341432SJohn Baldwin 
67c0341432SJohn Baldwin struct swcr_encdec {
683e947048SJohn Baldwin 	void		*sw_kschedule;
69c0341432SJohn Baldwin 	struct enc_xform *sw_exf;
70c0341432SJohn Baldwin };
71c0341432SJohn Baldwin 
72c0341432SJohn Baldwin struct swcr_compdec {
73c0341432SJohn Baldwin 	struct comp_algo *sw_cxf;
74c0341432SJohn Baldwin };
75c0341432SJohn Baldwin 
76c0341432SJohn Baldwin struct swcr_session {
77c0341432SJohn Baldwin 	struct mtx	swcr_lock;
78c0341432SJohn Baldwin 	int	(*swcr_process)(struct swcr_session *, struct cryptop *);
79c0341432SJohn Baldwin 
80c0341432SJohn Baldwin 	struct swcr_auth swcr_auth;
81c0341432SJohn Baldwin 	struct swcr_encdec swcr_encdec;
82c0341432SJohn Baldwin 	struct swcr_compdec swcr_compdec;
83c0341432SJohn Baldwin };
84507281e5SSean Eric Fagan 
856810ad6fSSam Leffler static	int32_t swcr_id;
866810ad6fSSam Leffler 
871b0909d5SConrad Meyer static	void swcr_freesession(device_t dev, crypto_session_t cses);
88091d81d1SSam Leffler 
89c0341432SJohn Baldwin /* Used for CRYPTO_NULL_CBC. */
90c0341432SJohn Baldwin static int
91c0341432SJohn Baldwin swcr_null(struct swcr_session *ses, struct cryptop *crp)
92c0341432SJohn Baldwin {
93c0341432SJohn Baldwin 
94c0341432SJohn Baldwin 	return (0);
95c0341432SJohn Baldwin }
96c0341432SJohn Baldwin 
97091d81d1SSam Leffler /*
98091d81d1SSam Leffler  * Apply a symmetric encryption/decryption algorithm.
99091d81d1SSam Leffler  */
100091d81d1SSam Leffler static int
101c0341432SJohn Baldwin swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
102091d81d1SSam Leffler {
1035d7ae54aSConrad Meyer 	unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
10408fca7a5SJohn-Mark Gurney 	unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
105c0341432SJohn Baldwin 	const struct crypto_session_params *csp;
106c0341432SJohn Baldwin 	struct swcr_encdec *sw;
107091d81d1SSam Leffler 	struct enc_xform *exf;
1089c0e3d3aSJohn Baldwin 	int i, blks, inlen, ivlen, outlen, resid;
1099c0e3d3aSJohn Baldwin 	struct crypto_buffer_cursor cc_in, cc_out;
110*26d292d3SJohn Baldwin 	const unsigned char *inblk;
111*26d292d3SJohn Baldwin 	unsigned char *outblk;
11208fca7a5SJohn-Mark Gurney 	int error;
113c0341432SJohn Baldwin 	bool encrypting;
11408fca7a5SJohn-Mark Gurney 
11508fca7a5SJohn-Mark Gurney 	error = 0;
116091d81d1SSam Leffler 
117c0341432SJohn Baldwin 	sw = &ses->swcr_encdec;
118091d81d1SSam Leffler 	exf = sw->sw_exf;
11908fca7a5SJohn-Mark Gurney 	ivlen = exf->ivsize;
120091d81d1SSam Leffler 
121723d8764SJohn Baldwin 	if (exf->native_blocksize == 0) {
122091d81d1SSam Leffler 		/* Check for non-padded data */
123723d8764SJohn Baldwin 		if ((crp->crp_payload_length % exf->blocksize) != 0)
124723d8764SJohn Baldwin 			return (EINVAL);
125723d8764SJohn Baldwin 
126723d8764SJohn Baldwin 		blks = exf->blocksize;
127723d8764SJohn Baldwin 	} else
128723d8764SJohn Baldwin 		blks = exf->native_blocksize;
129091d81d1SSam Leffler 
130c0341432SJohn Baldwin 	if (exf == &enc_xform_aes_icm &&
131c0341432SJohn Baldwin 	    (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
13208fca7a5SJohn-Mark Gurney 		return (EINVAL);
13308fca7a5SJohn-Mark Gurney 
134c0341432SJohn Baldwin 	if (crp->crp_cipher_key != NULL) {
135c0341432SJohn Baldwin 		csp = crypto_get_params(crp->crp_session);
1363e947048SJohn Baldwin 		error = exf->setkey(sw->sw_kschedule,
137c0341432SJohn Baldwin 		    crp->crp_cipher_key, csp->csp_cipher_klen);
138c740ae4bSPoul-Henning Kamp 		if (error)
139c740ae4bSPoul-Henning Kamp 			return (error);
140c740ae4bSPoul-Henning Kamp 	}
141d295bdeeSPawel Jakub Dawidek 
14220c128daSJohn Baldwin 	crypto_read_iv(crp, iv);
14320c128daSJohn Baldwin 
14408fca7a5SJohn-Mark Gurney 	if (exf->reinit) {
145d295bdeeSPawel Jakub Dawidek 		/*
146d295bdeeSPawel Jakub Dawidek 		 * xforms that provide a reinit method perform all IV
147d295bdeeSPawel Jakub Dawidek 		 * handling themselves.
148d295bdeeSPawel Jakub Dawidek 		 */
149d295bdeeSPawel Jakub Dawidek 		exf->reinit(sw->sw_kschedule, iv);
150091d81d1SSam Leffler 	}
151091d81d1SSam Leffler 
1529c0e3d3aSJohn Baldwin 	ivp = iv;
153091d81d1SSam Leffler 
1549c0e3d3aSJohn Baldwin 	crypto_cursor_init(&cc_in, &crp->crp_buf);
1559c0e3d3aSJohn Baldwin 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
1569c0e3d3aSJohn Baldwin 	inlen = crypto_cursor_seglen(&cc_in);
1579c0e3d3aSJohn Baldwin 	inblk = crypto_cursor_segbase(&cc_in);
1589c0e3d3aSJohn Baldwin 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
1599c0e3d3aSJohn Baldwin 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
1609c0e3d3aSJohn Baldwin 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
1619c0e3d3aSJohn Baldwin 	} else
1629c0e3d3aSJohn Baldwin 		cc_out = cc_in;
1639c0e3d3aSJohn Baldwin 	outlen = crypto_cursor_seglen(&cc_out);
1649c0e3d3aSJohn Baldwin 	outblk = crypto_cursor_segbase(&cc_out);
1659c0e3d3aSJohn Baldwin 
1669c0e3d3aSJohn Baldwin 	resid = crp->crp_payload_length;
167c0341432SJohn Baldwin 	encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
168091d81d1SSam Leffler 
169091d81d1SSam Leffler 	/*
1709c0e3d3aSJohn Baldwin 	 * Loop through encrypting blocks.  'inlen' is the remaining
1719c0e3d3aSJohn Baldwin 	 * length of the current segment in the input buffer.
1729c0e3d3aSJohn Baldwin 	 * 'outlen' is the remaining length of current segment in the
1739c0e3d3aSJohn Baldwin 	 * output buffer.
174091d81d1SSam Leffler 	 */
1759c0e3d3aSJohn Baldwin 	while (resid >= blks) {
1769c0e3d3aSJohn Baldwin 		/*
1779c0e3d3aSJohn Baldwin 		 * If the current block is not contained within the
1789c0e3d3aSJohn Baldwin 		 * current input/output segment, use 'blk' as a local
1799c0e3d3aSJohn Baldwin 		 * buffer.
1809c0e3d3aSJohn Baldwin 		 */
1819c0e3d3aSJohn Baldwin 		if (inlen < blks) {
1829c0e3d3aSJohn Baldwin 			crypto_cursor_copydata(&cc_in, blks, blk);
1839c0e3d3aSJohn Baldwin 			inblk = blk;
184d295bdeeSPawel Jakub Dawidek 		}
1859c0e3d3aSJohn Baldwin 		if (outlen < blks)
1869c0e3d3aSJohn Baldwin 			outblk = blk;
1879c0e3d3aSJohn Baldwin 
1889c0e3d3aSJohn Baldwin 		/*
1899c0e3d3aSJohn Baldwin 		 * Ciphers without a 'reinit' hook are assumed to be
1909c0e3d3aSJohn Baldwin 		 * used in CBC mode where the chaining is done here.
1919c0e3d3aSJohn Baldwin 		 */
1929c0e3d3aSJohn Baldwin 		if (exf->reinit != NULL) {
1939c0e3d3aSJohn Baldwin 			if (encrypting)
1949c0e3d3aSJohn Baldwin 				exf->encrypt(sw->sw_kschedule, inblk, outblk);
1959c0e3d3aSJohn Baldwin 			else
1969c0e3d3aSJohn Baldwin 				exf->decrypt(sw->sw_kschedule, inblk, outblk);
197c0341432SJohn Baldwin 		} else if (encrypting) {
198091d81d1SSam Leffler 			/* XOR with previous block */
1999c0e3d3aSJohn Baldwin 			for (i = 0; i < blks; i++)
2009c0e3d3aSJohn Baldwin 				outblk[i] = inblk[i] ^ ivp[i];
201091d81d1SSam Leffler 
2029c0e3d3aSJohn Baldwin 			exf->encrypt(sw->sw_kschedule, outblk, outblk);
203091d81d1SSam Leffler 
204091d81d1SSam Leffler 			/*
205091d81d1SSam Leffler 			 * Keep encrypted block for XOR'ing
206091d81d1SSam Leffler 			 * with next block
207091d81d1SSam Leffler 			 */
2089c0e3d3aSJohn Baldwin 			memcpy(iv, outblk, blks);
209091d81d1SSam Leffler 			ivp = iv;
210091d81d1SSam Leffler 		} else {	/* decrypt */
211091d81d1SSam Leffler 			/*
212091d81d1SSam Leffler 			 * Keep encrypted block for XOR'ing
213091d81d1SSam Leffler 			 * with next block
214091d81d1SSam Leffler 			 */
21508fca7a5SJohn-Mark Gurney 			nivp = (ivp == iv) ? iv2 : iv;
2169c0e3d3aSJohn Baldwin 			memcpy(nivp, inblk, blks);
217091d81d1SSam Leffler 
2189c0e3d3aSJohn Baldwin 			exf->decrypt(sw->sw_kschedule, inblk, outblk);
219091d81d1SSam Leffler 
220091d81d1SSam Leffler 			/* XOR with previous block */
2219c0e3d3aSJohn Baldwin 			for (i = 0; i < blks; i++)
2229c0e3d3aSJohn Baldwin 				outblk[i] ^= ivp[i];
223091d81d1SSam Leffler 
22408fca7a5SJohn-Mark Gurney 			ivp = nivp;
225091d81d1SSam Leffler 		}
226091d81d1SSam Leffler 
2279c0e3d3aSJohn Baldwin 		if (inlen < blks) {
2289c0e3d3aSJohn Baldwin 			inlen = crypto_cursor_seglen(&cc_in);
2299c0e3d3aSJohn Baldwin 			inblk = crypto_cursor_segbase(&cc_in);
2309c0e3d3aSJohn Baldwin 		} else {
2319c0e3d3aSJohn Baldwin 			crypto_cursor_advance(&cc_in, blks);
2329c0e3d3aSJohn Baldwin 			inlen -= blks;
2339c0e3d3aSJohn Baldwin 			inblk += blks;
23408fca7a5SJohn-Mark Gurney 		}
235091d81d1SSam Leffler 
2369c0e3d3aSJohn Baldwin 		if (outlen < blks) {
2379c0e3d3aSJohn Baldwin 			crypto_cursor_copyback(&cc_out, blks, blk);
2389c0e3d3aSJohn Baldwin 			outlen = crypto_cursor_seglen(&cc_out);
2399c0e3d3aSJohn Baldwin 			outblk = crypto_cursor_segbase(&cc_out);
2409c0e3d3aSJohn Baldwin 		} else {
2419c0e3d3aSJohn Baldwin 			crypto_cursor_advance(&cc_out, blks);
2429c0e3d3aSJohn Baldwin 			outlen -= blks;
2439c0e3d3aSJohn Baldwin 			outblk += blks;
244091d81d1SSam Leffler 		}
245091d81d1SSam Leffler 
2469c0e3d3aSJohn Baldwin 		resid -= blks;
247f34a967bSPawel Jakub Dawidek 	}
248f34a967bSPawel Jakub Dawidek 
249723d8764SJohn Baldwin 	/* Handle trailing partial block for stream ciphers. */
2509c0e3d3aSJohn Baldwin 	if (resid > 0) {
251723d8764SJohn Baldwin 		KASSERT(exf->native_blocksize != 0,
252723d8764SJohn Baldwin 		    ("%s: partial block of %d bytes for cipher %s",
253723d8764SJohn Baldwin 		    __func__, i, exf->name));
254723d8764SJohn Baldwin 		KASSERT(exf->reinit != NULL,
255723d8764SJohn Baldwin 		    ("%s: partial block cipher %s without reinit hook",
256723d8764SJohn Baldwin 		    __func__, exf->name));
2579c0e3d3aSJohn Baldwin 		KASSERT(resid < blks, ("%s: partial block too big", __func__));
258723d8764SJohn Baldwin 
2599c0e3d3aSJohn Baldwin 		inlen = crypto_cursor_seglen(&cc_in);
2609c0e3d3aSJohn Baldwin 		outlen = crypto_cursor_seglen(&cc_out);
2619c0e3d3aSJohn Baldwin 		if (inlen < resid) {
2629c0e3d3aSJohn Baldwin 			crypto_cursor_copydata(&cc_in, resid, blk);
2639c0e3d3aSJohn Baldwin 			inblk = blk;
2649c0e3d3aSJohn Baldwin 		} else
2659c0e3d3aSJohn Baldwin 			inblk = crypto_cursor_segbase(&cc_in);
2669c0e3d3aSJohn Baldwin 		if (outlen < resid)
2679c0e3d3aSJohn Baldwin 			outblk = blk;
2689c0e3d3aSJohn Baldwin 		else
2699c0e3d3aSJohn Baldwin 			outblk = crypto_cursor_segbase(&cc_out);
2709c0e3d3aSJohn Baldwin 		if (encrypting)
2719c0e3d3aSJohn Baldwin 			exf->encrypt_last(sw->sw_kschedule, inblk, outblk,
2729c0e3d3aSJohn Baldwin 			    resid);
2739c0e3d3aSJohn Baldwin 		else
2749c0e3d3aSJohn Baldwin 			exf->decrypt_last(sw->sw_kschedule, inblk, outblk,
2759c0e3d3aSJohn Baldwin 			    resid);
2769c0e3d3aSJohn Baldwin 		if (outlen < resid)
2779c0e3d3aSJohn Baldwin 			crypto_cursor_copyback(&cc_out, resid, blk);
278723d8764SJohn Baldwin 	}
279723d8764SJohn Baldwin 
28020c128daSJohn Baldwin 	explicit_bzero(blk, sizeof(blk));
28120c128daSJohn Baldwin 	explicit_bzero(iv, sizeof(iv));
28220c128daSJohn Baldwin 	explicit_bzero(iv2, sizeof(iv2));
2839c0e3d3aSJohn Baldwin 	return (0);
284091d81d1SSam Leffler }
285091d81d1SSam Leffler 
286c0341432SJohn Baldwin static void
287c0341432SJohn Baldwin swcr_authprepare(struct auth_hash *axf, struct swcr_auth *sw,
288c0341432SJohn Baldwin     const uint8_t *key, int klen)
289f6c4bc3bSPawel Jakub Dawidek {
290f6c4bc3bSPawel Jakub Dawidek 
291f6c4bc3bSPawel Jakub Dawidek 	switch (axf->type) {
292f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA1_HMAC:
293c97f39ceSConrad Meyer 	case CRYPTO_SHA2_224_HMAC:
294f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_256_HMAC:
295f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_384_HMAC:
296f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_512_HMAC:
297f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_NULL_HMAC:
298f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_RIPEMD160_HMAC:
299c0341432SJohn Baldwin 		hmac_init_ipad(axf, key, klen, sw->sw_ictx);
300c0341432SJohn Baldwin 		hmac_init_opad(axf, key, klen, sw->sw_octx);
301f6c4bc3bSPawel Jakub Dawidek 		break;
30225b7033bSConrad Meyer 	case CRYPTO_POLY1305:
3030e33efe4SConrad Meyer 	case CRYPTO_BLAKE2B:
3040e33efe4SConrad Meyer 	case CRYPTO_BLAKE2S:
3050e33efe4SConrad Meyer 		axf->Setkey(sw->sw_ictx, key, klen);
3060e33efe4SConrad Meyer 		axf->Init(sw->sw_ictx);
3070e33efe4SConrad Meyer 		break;
308f6c4bc3bSPawel Jakub Dawidek 	default:
309c0341432SJohn Baldwin 		panic("%s: algorithm %d doesn't use keys", __func__, axf->type);
310f6c4bc3bSPawel Jakub Dawidek 	}
311f6c4bc3bSPawel Jakub Dawidek }
312f6c4bc3bSPawel Jakub Dawidek 
313091d81d1SSam Leffler /*
314c0341432SJohn Baldwin  * Compute or verify hash.
315091d81d1SSam Leffler  */
316091d81d1SSam Leffler static int
317c0341432SJohn Baldwin swcr_authcompute(struct swcr_session *ses, struct cryptop *crp)
318091d81d1SSam Leffler {
319c0341432SJohn Baldwin 	u_char aalg[HASH_MAX_LEN];
320c0341432SJohn Baldwin 	const struct crypto_session_params *csp;
321c0341432SJohn Baldwin 	struct swcr_auth *sw;
322091d81d1SSam Leffler 	struct auth_hash *axf;
323091d81d1SSam Leffler 	union authctx ctx;
324091d81d1SSam Leffler 	int err;
325091d81d1SSam Leffler 
326c0341432SJohn Baldwin 	sw = &ses->swcr_auth;
327091d81d1SSam Leffler 
328091d81d1SSam Leffler 	axf = sw->sw_axf;
329091d81d1SSam Leffler 
330c0341432SJohn Baldwin 	if (crp->crp_auth_key != NULL) {
331c0341432SJohn Baldwin 		csp = crypto_get_params(crp->crp_session);
332c0341432SJohn Baldwin 		swcr_authprepare(axf, sw, crp->crp_auth_key,
333c0341432SJohn Baldwin 		    csp->csp_auth_klen);
33425b7033bSConrad Meyer 	}
335f6c4bc3bSPawel Jakub Dawidek 
336091d81d1SSam Leffler 	bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
337091d81d1SSam Leffler 
338c0341432SJohn Baldwin 	err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
3399b6b2f86SJohn Baldwin 	    axf->Update, &ctx);
340091d81d1SSam Leffler 	if (err)
341091d81d1SSam Leffler 		return err;
342091d81d1SSam Leffler 
3439c0e3d3aSJohn Baldwin 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp) &&
3449c0e3d3aSJohn Baldwin 	    CRYPTO_OP_IS_ENCRYPT(crp->crp_op))
3459c0e3d3aSJohn Baldwin 		err = crypto_apply_buf(&crp->crp_obuf,
3469c0e3d3aSJohn Baldwin 		    crp->crp_payload_output_start, crp->crp_payload_length,
3479b6b2f86SJohn Baldwin 		    axf->Update, &ctx);
3489c0e3d3aSJohn Baldwin 	else
3499c0e3d3aSJohn Baldwin 		err = crypto_apply(crp, crp->crp_payload_start,
3509b6b2f86SJohn Baldwin 		    crp->crp_payload_length, axf->Update, &ctx);
351c0341432SJohn Baldwin 	if (err)
352c0341432SJohn Baldwin 		return err;
353c0341432SJohn Baldwin 
354c0341432SJohn Baldwin 	switch (axf->type) {
355c4729f6eSConrad Meyer 	case CRYPTO_SHA1:
356c4729f6eSConrad Meyer 	case CRYPTO_SHA2_224:
357c4729f6eSConrad Meyer 	case CRYPTO_SHA2_256:
358c4729f6eSConrad Meyer 	case CRYPTO_SHA2_384:
359c4729f6eSConrad Meyer 	case CRYPTO_SHA2_512:
360c4729f6eSConrad Meyer 		axf->Final(aalg, &ctx);
361c4729f6eSConrad Meyer 		break;
362c4729f6eSConrad Meyer 
363091d81d1SSam Leffler 	case CRYPTO_SHA1_HMAC:
364c97f39ceSConrad Meyer 	case CRYPTO_SHA2_224_HMAC:
365f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_256_HMAC:
366f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_384_HMAC:
367f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_512_HMAC:
368091d81d1SSam Leffler 	case CRYPTO_RIPEMD160_HMAC:
369091d81d1SSam Leffler 		if (sw->sw_octx == NULL)
370091d81d1SSam Leffler 			return EINVAL;
371091d81d1SSam Leffler 
372091d81d1SSam Leffler 		axf->Final(aalg, &ctx);
373091d81d1SSam Leffler 		bcopy(sw->sw_octx, &ctx, axf->ctxsize);
374091d81d1SSam Leffler 		axf->Update(&ctx, aalg, axf->hashsize);
375091d81d1SSam Leffler 		axf->Final(aalg, &ctx);
376091d81d1SSam Leffler 		break;
377091d81d1SSam Leffler 
3780e33efe4SConrad Meyer 	case CRYPTO_BLAKE2B:
3790e33efe4SConrad Meyer 	case CRYPTO_BLAKE2S:
380091d81d1SSam Leffler 	case CRYPTO_NULL_HMAC:
38125b7033bSConrad Meyer 	case CRYPTO_POLY1305:
382091d81d1SSam Leffler 		axf->Final(aalg, &ctx);
383091d81d1SSam Leffler 		break;
384091d81d1SSam Leffler 	}
385091d81d1SSam Leffler 
386c0341432SJohn Baldwin 	if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
38720c128daSJohn Baldwin 		u_char uaalg[HASH_MAX_LEN];
38820c128daSJohn Baldwin 
389c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, uaalg);
390c0341432SJohn Baldwin 		if (timingsafe_bcmp(aalg, uaalg, sw->sw_mlen) != 0)
39120c128daSJohn Baldwin 			err = EBADMSG;
39220c128daSJohn Baldwin 		explicit_bzero(uaalg, sizeof(uaalg));
393c0341432SJohn Baldwin 	} else {
394091d81d1SSam Leffler 		/* Inject the authentication data */
395c0341432SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, aalg);
396c0341432SJohn Baldwin 	}
39720c128daSJohn Baldwin 	explicit_bzero(aalg, sizeof(aalg));
39820c128daSJohn Baldwin 	return (err);
399091d81d1SSam Leffler }
400091d81d1SSam Leffler 
40108fca7a5SJohn-Mark Gurney CTASSERT(INT_MAX <= (1ll<<39) - 256);	/* GCM: plain text < 2^39-256 */
40208fca7a5SJohn-Mark Gurney CTASSERT(INT_MAX <= (uint64_t)-1);	/* GCM: associated data <= 2^64-1 */
40308fca7a5SJohn-Mark Gurney 
40408fca7a5SJohn-Mark Gurney static int
405c0341432SJohn Baldwin swcr_gmac(struct swcr_session *ses, struct cryptop *crp)
40608fca7a5SJohn-Mark Gurney {
407*26d292d3SJohn Baldwin 	uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
40808fca7a5SJohn-Mark Gurney 	u_char *blk = (u_char *)blkbuf;
409*26d292d3SJohn Baldwin 	u_char tag[GMAC_DIGEST_LEN];
410*26d292d3SJohn Baldwin 	u_char iv[AES_BLOCK_LEN];
4119c0e3d3aSJohn Baldwin 	struct crypto_buffer_cursor cc;
412*26d292d3SJohn Baldwin 	const u_char *inblk;
41308fca7a5SJohn-Mark Gurney 	union authctx ctx;
414c0341432SJohn Baldwin 	struct swcr_auth *swa;
415c0341432SJohn Baldwin 	struct auth_hash *axf;
41608fca7a5SJohn-Mark Gurney 	uint32_t *blkp;
41720c128daSJohn Baldwin 	int blksz, error, ivlen, len, resid;
41808fca7a5SJohn-Mark Gurney 
419c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
42008fca7a5SJohn-Mark Gurney 	axf = swa->sw_axf;
421c0341432SJohn Baldwin 
42208fca7a5SJohn-Mark Gurney 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
423*26d292d3SJohn Baldwin 	blksz = GMAC_BLOCK_LEN;
424*26d292d3SJohn Baldwin 	KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
425*26d292d3SJohn Baldwin 	    __func__));
42608fca7a5SJohn-Mark Gurney 
42708fca7a5SJohn-Mark Gurney 	/* Initialize the IV */
428c0341432SJohn Baldwin 	ivlen = AES_GCM_IV_LEN;
42929fe41ddSJohn Baldwin 	crypto_read_iv(crp, iv);
43008fca7a5SJohn-Mark Gurney 
43108fca7a5SJohn-Mark Gurney 	axf->Reinit(&ctx, iv, ivlen);
4329c0e3d3aSJohn Baldwin 	crypto_cursor_init(&cc, &crp->crp_buf);
4339c0e3d3aSJohn Baldwin 	crypto_cursor_advance(&cc, crp->crp_payload_start);
434*26d292d3SJohn Baldwin 	for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) {
435*26d292d3SJohn Baldwin 		len = crypto_cursor_seglen(&cc);
436*26d292d3SJohn Baldwin 		if (len >= blksz) {
437*26d292d3SJohn Baldwin 			inblk = crypto_cursor_segbase(&cc);
438*26d292d3SJohn Baldwin 			len = rounddown(MIN(len, resid), blksz);
439*26d292d3SJohn Baldwin 			crypto_cursor_advance(&cc, len);
440*26d292d3SJohn Baldwin 		} else {
441*26d292d3SJohn Baldwin 			len = blksz;
4429c0e3d3aSJohn Baldwin 			crypto_cursor_copydata(&cc, len, blk);
443*26d292d3SJohn Baldwin 			inblk = blk;
444*26d292d3SJohn Baldwin 		}
445*26d292d3SJohn Baldwin 		axf->Update(&ctx, inblk, len);
446*26d292d3SJohn Baldwin 	}
447*26d292d3SJohn Baldwin 	if (resid > 0) {
448*26d292d3SJohn Baldwin 		memset(blk, 0, blksz);
449*26d292d3SJohn Baldwin 		crypto_cursor_copydata(&cc, resid, blk);
45008fca7a5SJohn-Mark Gurney 		axf->Update(&ctx, blk, blksz);
45108fca7a5SJohn-Mark Gurney 	}
45208fca7a5SJohn-Mark Gurney 
45308fca7a5SJohn-Mark Gurney 	/* length block */
454*26d292d3SJohn Baldwin 	memset(blk, 0, blksz);
45508fca7a5SJohn-Mark Gurney 	blkp = (uint32_t *)blk + 1;
456c0341432SJohn Baldwin 	*blkp = htobe32(crp->crp_payload_length * 8);
45708fca7a5SJohn-Mark Gurney 	axf->Update(&ctx, blk, blksz);
458c0341432SJohn Baldwin 
459c0341432SJohn Baldwin 	/* Finalize MAC */
460*26d292d3SJohn Baldwin 	axf->Final(tag, &ctx);
461c0341432SJohn Baldwin 
46220c128daSJohn Baldwin 	error = 0;
463c0341432SJohn Baldwin 	if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
464*26d292d3SJohn Baldwin 		u_char tag2[GMAC_DIGEST_LEN];
46520c128daSJohn Baldwin 
466c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
467*26d292d3SJohn Baldwin 		    tag2);
468*26d292d3SJohn Baldwin 		if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0)
46920c128daSJohn Baldwin 			error = EBADMSG;
470*26d292d3SJohn Baldwin 		explicit_bzero(tag2, sizeof(tag2));
471c0341432SJohn Baldwin 	} else {
472c0341432SJohn Baldwin 		/* Inject the authentication data */
473*26d292d3SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
474c0341432SJohn Baldwin 	}
47520c128daSJohn Baldwin 	explicit_bzero(blkbuf, sizeof(blkbuf));
476*26d292d3SJohn Baldwin 	explicit_bzero(tag, sizeof(tag));
47720c128daSJohn Baldwin 	explicit_bzero(iv, sizeof(iv));
47820c128daSJohn Baldwin 	return (error);
479c0341432SJohn Baldwin }
480c0341432SJohn Baldwin 
481c0341432SJohn Baldwin static int
482c0341432SJohn Baldwin swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
483c0341432SJohn Baldwin {
484*26d292d3SJohn Baldwin 	uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
485c0341432SJohn Baldwin 	u_char *blk = (u_char *)blkbuf;
486*26d292d3SJohn Baldwin 	u_char tag[GMAC_DIGEST_LEN];
487*26d292d3SJohn Baldwin 	u_char iv[AES_BLOCK_LEN];
4889c0e3d3aSJohn Baldwin 	struct crypto_buffer_cursor cc_in, cc_out;
489*26d292d3SJohn Baldwin 	const u_char *inblk;
490*26d292d3SJohn Baldwin 	u_char *outblk;
491c0341432SJohn Baldwin 	union authctx ctx;
492c0341432SJohn Baldwin 	struct swcr_auth *swa;
493c0341432SJohn Baldwin 	struct swcr_encdec *swe;
494c0341432SJohn Baldwin 	struct auth_hash *axf;
495c0341432SJohn Baldwin 	struct enc_xform *exf;
496c0341432SJohn Baldwin 	uint32_t *blkp;
49720c128daSJohn Baldwin 	int blksz, error, ivlen, len, r, resid;
498c0341432SJohn Baldwin 
499c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
500c0341432SJohn Baldwin 	axf = swa->sw_axf;
501c0341432SJohn Baldwin 
502c0341432SJohn Baldwin 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
503*26d292d3SJohn Baldwin 	blksz = GMAC_BLOCK_LEN;
504*26d292d3SJohn Baldwin 	KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
505*26d292d3SJohn Baldwin 	    __func__));
506c0341432SJohn Baldwin 
507c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
508c0341432SJohn Baldwin 	exf = swe->sw_exf;
509723d8764SJohn Baldwin 	KASSERT(axf->blocksize == exf->native_blocksize,
510723d8764SJohn Baldwin 	    ("%s: blocksize mismatch", __func__));
511c0341432SJohn Baldwin 
512c0341432SJohn Baldwin 	if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
513c0341432SJohn Baldwin 		return (EINVAL);
514c0341432SJohn Baldwin 
515c0341432SJohn Baldwin 	/* Initialize the IV */
516c0341432SJohn Baldwin 	ivlen = AES_GCM_IV_LEN;
517c0341432SJohn Baldwin 	bcopy(crp->crp_iv, iv, ivlen);
518c0341432SJohn Baldwin 
519c0341432SJohn Baldwin 	/* Supply MAC with IV */
520c0341432SJohn Baldwin 	axf->Reinit(&ctx, iv, ivlen);
521c0341432SJohn Baldwin 
522c0341432SJohn Baldwin 	/* Supply MAC with AAD */
5239c0e3d3aSJohn Baldwin 	crypto_cursor_init(&cc_in, &crp->crp_buf);
5249c0e3d3aSJohn Baldwin 	crypto_cursor_advance(&cc_in, crp->crp_aad_start);
525*26d292d3SJohn Baldwin 	for (resid = crp->crp_aad_length; resid >= blksz; resid -= len) {
526*26d292d3SJohn Baldwin 		len = crypto_cursor_seglen(&cc_in);
527*26d292d3SJohn Baldwin 		if (len >= blksz) {
528*26d292d3SJohn Baldwin 			inblk = crypto_cursor_segbase(&cc_in);
529*26d292d3SJohn Baldwin 			len = rounddown(MIN(len, resid), blksz);
530*26d292d3SJohn Baldwin 			crypto_cursor_advance(&cc_in, len);
531*26d292d3SJohn Baldwin 		} else {
532*26d292d3SJohn Baldwin 			len = blksz;
5339c0e3d3aSJohn Baldwin 			crypto_cursor_copydata(&cc_in, len, blk);
534*26d292d3SJohn Baldwin 			inblk = blk;
535*26d292d3SJohn Baldwin 		}
536*26d292d3SJohn Baldwin 		axf->Update(&ctx, inblk, len);
537*26d292d3SJohn Baldwin 	}
538*26d292d3SJohn Baldwin 	if (resid > 0) {
539*26d292d3SJohn Baldwin 		memset(blk, 0, blksz);
540*26d292d3SJohn Baldwin 		crypto_cursor_copydata(&cc_in, resid, blk);
541c0341432SJohn Baldwin 		axf->Update(&ctx, blk, blksz);
542c0341432SJohn Baldwin 	}
543c0341432SJohn Baldwin 
544c0341432SJohn Baldwin 	exf->reinit(swe->sw_kschedule, iv);
545c0341432SJohn Baldwin 
546c0341432SJohn Baldwin 	/* Do encryption with MAC */
5479c0e3d3aSJohn Baldwin 	crypto_cursor_init(&cc_in, &crp->crp_buf);
5489c0e3d3aSJohn Baldwin 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
5499c0e3d3aSJohn Baldwin 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
5509c0e3d3aSJohn Baldwin 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
5519c0e3d3aSJohn Baldwin 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
5529c0e3d3aSJohn Baldwin 	} else
5539c0e3d3aSJohn Baldwin 		cc_out = cc_in;
554*26d292d3SJohn Baldwin 	for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
555*26d292d3SJohn Baldwin 		if (crypto_cursor_seglen(&cc_in) < blksz) {
556*26d292d3SJohn Baldwin 			crypto_cursor_copydata(&cc_in, blksz, blk);
557*26d292d3SJohn Baldwin 			inblk = blk;
558c0341432SJohn Baldwin 		} else {
559*26d292d3SJohn Baldwin 			inblk = crypto_cursor_segbase(&cc_in);
560*26d292d3SJohn Baldwin 			crypto_cursor_advance(&cc_in, blksz);
561c0341432SJohn Baldwin 		}
562*26d292d3SJohn Baldwin 		if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
563*26d292d3SJohn Baldwin 			if (crypto_cursor_seglen(&cc_out) < blksz)
564*26d292d3SJohn Baldwin 				outblk = blk;
565*26d292d3SJohn Baldwin 			else
566*26d292d3SJohn Baldwin 				outblk = crypto_cursor_segbase(&cc_out);
567*26d292d3SJohn Baldwin 			exf->encrypt(swe->sw_kschedule, inblk, outblk);
568*26d292d3SJohn Baldwin 			axf->Update(&ctx, outblk, blksz);
569*26d292d3SJohn Baldwin 			if (outblk == blk)
570*26d292d3SJohn Baldwin 				crypto_cursor_copyback(&cc_out, blksz, blk);
571*26d292d3SJohn Baldwin 			else
572*26d292d3SJohn Baldwin 				crypto_cursor_advance(&cc_out, blksz);
573*26d292d3SJohn Baldwin 		} else {
574*26d292d3SJohn Baldwin 			axf->Update(&ctx, inblk, blksz);
575*26d292d3SJohn Baldwin 		}
576*26d292d3SJohn Baldwin 	}
577*26d292d3SJohn Baldwin 	if (resid > 0) {
578*26d292d3SJohn Baldwin 		crypto_cursor_copydata(&cc_in, resid, blk);
579*26d292d3SJohn Baldwin 		if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
580*26d292d3SJohn Baldwin 			exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
581*26d292d3SJohn Baldwin 			crypto_cursor_copyback(&cc_out, resid, blk);
582*26d292d3SJohn Baldwin 		}
583*26d292d3SJohn Baldwin 		axf->Update(&ctx, blk, resid);
584c0341432SJohn Baldwin 	}
585c0341432SJohn Baldwin 
586c0341432SJohn Baldwin 	/* length block */
587*26d292d3SJohn Baldwin 	memset(blk, 0, blksz);
588c0341432SJohn Baldwin 	blkp = (uint32_t *)blk + 1;
589c0341432SJohn Baldwin 	*blkp = htobe32(crp->crp_aad_length * 8);
590c0341432SJohn Baldwin 	blkp = (uint32_t *)blk + 3;
591c0341432SJohn Baldwin 	*blkp = htobe32(crp->crp_payload_length * 8);
592c0341432SJohn Baldwin 	axf->Update(&ctx, blk, blksz);
593c0341432SJohn Baldwin 
594c0341432SJohn Baldwin 	/* Finalize MAC */
595*26d292d3SJohn Baldwin 	axf->Final(tag, &ctx);
596c0341432SJohn Baldwin 
597c0341432SJohn Baldwin 	/* Validate tag */
59820c128daSJohn Baldwin 	error = 0;
599c0341432SJohn Baldwin 	if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
600*26d292d3SJohn Baldwin 		u_char tag2[GMAC_DIGEST_LEN];
60120c128daSJohn Baldwin 
602*26d292d3SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, tag2);
603c0341432SJohn Baldwin 
604*26d292d3SJohn Baldwin 		r = timingsafe_bcmp(tag, tag2, swa->sw_mlen);
605*26d292d3SJohn Baldwin 		explicit_bzero(tag2, sizeof(tag2));
60620c128daSJohn Baldwin 		if (r != 0) {
60720c128daSJohn Baldwin 			error = EBADMSG;
60820c128daSJohn Baldwin 			goto out;
60920c128daSJohn Baldwin 		}
610c0341432SJohn Baldwin 
611c0341432SJohn Baldwin 		/* tag matches, decrypt data */
6129c0e3d3aSJohn Baldwin 		crypto_cursor_init(&cc_in, &crp->crp_buf);
6139c0e3d3aSJohn Baldwin 		crypto_cursor_advance(&cc_in, crp->crp_payload_start);
614*26d292d3SJohn Baldwin 		for (resid = crp->crp_payload_length; resid > blksz;
615*26d292d3SJohn Baldwin 		     resid -= blksz) {
616*26d292d3SJohn Baldwin 			if (crypto_cursor_seglen(&cc_in) < blksz) {
617*26d292d3SJohn Baldwin 				crypto_cursor_copydata(&cc_in, blksz, blk);
618*26d292d3SJohn Baldwin 				inblk = blk;
619*26d292d3SJohn Baldwin 			} else {
620*26d292d3SJohn Baldwin 				inblk = crypto_cursor_segbase(&cc_in);
621*26d292d3SJohn Baldwin 				crypto_cursor_advance(&cc_in, blksz);
622*26d292d3SJohn Baldwin 			}
623*26d292d3SJohn Baldwin 			if (crypto_cursor_seglen(&cc_out) < blksz)
624*26d292d3SJohn Baldwin 				outblk = blk;
625*26d292d3SJohn Baldwin 			else
626*26d292d3SJohn Baldwin 				outblk = crypto_cursor_segbase(&cc_out);
627*26d292d3SJohn Baldwin 			exf->decrypt(swe->sw_kschedule, inblk, outblk);
628*26d292d3SJohn Baldwin 			if (outblk == blk)
629*26d292d3SJohn Baldwin 				crypto_cursor_copyback(&cc_out, blksz, blk);
630*26d292d3SJohn Baldwin 			else
631*26d292d3SJohn Baldwin 				crypto_cursor_advance(&cc_out, blksz);
632*26d292d3SJohn Baldwin 		}
633*26d292d3SJohn Baldwin 		if (resid > 0) {
634*26d292d3SJohn Baldwin 			crypto_cursor_copydata(&cc_in, resid, blk);
635*26d292d3SJohn Baldwin 			exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
636*26d292d3SJohn Baldwin 			crypto_cursor_copyback(&cc_out, resid, blk);
637c0341432SJohn Baldwin 		}
638c0341432SJohn Baldwin 	} else {
639c0341432SJohn Baldwin 		/* Inject the authentication data */
640*26d292d3SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
641c0341432SJohn Baldwin 	}
642c0341432SJohn Baldwin 
64320c128daSJohn Baldwin out:
64420c128daSJohn Baldwin 	explicit_bzero(blkbuf, sizeof(blkbuf));
645*26d292d3SJohn Baldwin 	explicit_bzero(tag, sizeof(tag));
64620c128daSJohn Baldwin 	explicit_bzero(iv, sizeof(iv));
64720c128daSJohn Baldwin 
64820c128daSJohn Baldwin 	return (error);
649c0341432SJohn Baldwin }
650c0341432SJohn Baldwin 
651c0341432SJohn Baldwin static int
652c0341432SJohn Baldwin swcr_ccm_cbc_mac(struct swcr_session *ses, struct cryptop *crp)
653c0341432SJohn Baldwin {
654*26d292d3SJohn Baldwin 	u_char tag[AES_CBC_MAC_HASH_LEN];
655*26d292d3SJohn Baldwin 	u_char iv[AES_BLOCK_LEN];
656c0341432SJohn Baldwin 	union authctx ctx;
657c0341432SJohn Baldwin 	struct swcr_auth *swa;
658c0341432SJohn Baldwin 	struct auth_hash *axf;
659*26d292d3SJohn Baldwin 	int error, ivlen;
660c0341432SJohn Baldwin 
661c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
662c0341432SJohn Baldwin 	axf = swa->sw_axf;
663c0341432SJohn Baldwin 
664c0341432SJohn Baldwin 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
665c0341432SJohn Baldwin 
666c0341432SJohn Baldwin 	/* Initialize the IV */
667c0341432SJohn Baldwin 	ivlen = AES_CCM_IV_LEN;
66829fe41ddSJohn Baldwin 	crypto_read_iv(crp, iv);
669c0341432SJohn Baldwin 
670c0341432SJohn Baldwin 	/*
671c0341432SJohn Baldwin 	 * AES CCM-CBC-MAC needs to know the length of both the auth
672c0341432SJohn Baldwin 	 * data and payload data before doing the auth computation.
673c0341432SJohn Baldwin 	 */
674c0341432SJohn Baldwin 	ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_payload_length;
675c0341432SJohn Baldwin 	ctx.aes_cbc_mac_ctx.cryptDataLength = 0;
676c0341432SJohn Baldwin 
677c0341432SJohn Baldwin 	axf->Reinit(&ctx, iv, ivlen);
678*26d292d3SJohn Baldwin 	error = crypto_apply(crp, crp->crp_payload_start,
679*26d292d3SJohn Baldwin 	    crp->crp_payload_length, axf->Update, &ctx);
680*26d292d3SJohn Baldwin 	if (error)
681*26d292d3SJohn Baldwin 		return (error);
682c0341432SJohn Baldwin 
683c0341432SJohn Baldwin 	/* Finalize MAC */
684*26d292d3SJohn Baldwin 	axf->Final(tag, &ctx);
685c0341432SJohn Baldwin 
686c0341432SJohn Baldwin 	if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
687*26d292d3SJohn Baldwin 		u_char tag2[AES_CBC_MAC_HASH_LEN];
68820c128daSJohn Baldwin 
689c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
690*26d292d3SJohn Baldwin 		    tag2);
691*26d292d3SJohn Baldwin 		if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0)
69220c128daSJohn Baldwin 			error = EBADMSG;
693*26d292d3SJohn Baldwin 		explicit_bzero(tag2, sizeof(tag));
694c0341432SJohn Baldwin 	} else {
695c0341432SJohn Baldwin 		/* Inject the authentication data */
696*26d292d3SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
697c0341432SJohn Baldwin 	}
698*26d292d3SJohn Baldwin 	explicit_bzero(tag, sizeof(tag));
69920c128daSJohn Baldwin 	explicit_bzero(iv, sizeof(iv));
70020c128daSJohn Baldwin 	return (error);
701c0341432SJohn Baldwin }
702c0341432SJohn Baldwin 
703c0341432SJohn Baldwin static int
704c0341432SJohn Baldwin swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
705c0341432SJohn Baldwin {
706*26d292d3SJohn Baldwin 	uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
707c0341432SJohn Baldwin 	u_char *blk = (u_char *)blkbuf;
708*26d292d3SJohn Baldwin 	u_char tag[AES_CBC_MAC_HASH_LEN];
709*26d292d3SJohn Baldwin 	u_char iv[AES_BLOCK_LEN];
7109c0e3d3aSJohn Baldwin 	struct crypto_buffer_cursor cc_in, cc_out;
711*26d292d3SJohn Baldwin 	const u_char *inblk;
712*26d292d3SJohn Baldwin 	u_char *outblk;
713c0341432SJohn Baldwin 	union authctx ctx;
714c0341432SJohn Baldwin 	struct swcr_auth *swa;
715c0341432SJohn Baldwin 	struct swcr_encdec *swe;
716c0341432SJohn Baldwin 	struct auth_hash *axf;
717c0341432SJohn Baldwin 	struct enc_xform *exf;
718*26d292d3SJohn Baldwin 	int blksz, error, ivlen, r, resid;
719c0341432SJohn Baldwin 
720c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
721c0341432SJohn Baldwin 	axf = swa->sw_axf;
722c0341432SJohn Baldwin 
723c0341432SJohn Baldwin 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
724*26d292d3SJohn Baldwin 	blksz = AES_BLOCK_LEN;
725*26d292d3SJohn Baldwin 	KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
726*26d292d3SJohn Baldwin 	    __func__));
727c0341432SJohn Baldwin 
728c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
729c0341432SJohn Baldwin 	exf = swe->sw_exf;
730723d8764SJohn Baldwin 	KASSERT(axf->blocksize == exf->native_blocksize,
731723d8764SJohn Baldwin 	    ("%s: blocksize mismatch", __func__));
732c0341432SJohn Baldwin 
733c0341432SJohn Baldwin 	if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
734c0341432SJohn Baldwin 		return (EINVAL);
735c0341432SJohn Baldwin 
736c0341432SJohn Baldwin 	/* Initialize the IV */
737c0341432SJohn Baldwin 	ivlen = AES_CCM_IV_LEN;
738c0341432SJohn Baldwin 	bcopy(crp->crp_iv, iv, ivlen);
739c0341432SJohn Baldwin 
740c0341432SJohn Baldwin 	/*
741c0341432SJohn Baldwin 	 * AES CCM-CBC-MAC needs to know the length of both the auth
742c0341432SJohn Baldwin 	 * data and payload data before doing the auth computation.
743c0341432SJohn Baldwin 	 */
744c0341432SJohn Baldwin 	ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_aad_length;
745c0341432SJohn Baldwin 	ctx.aes_cbc_mac_ctx.cryptDataLength = crp->crp_payload_length;
746c0341432SJohn Baldwin 
747c0341432SJohn Baldwin 	/* Supply MAC with IV */
748c0341432SJohn Baldwin 	axf->Reinit(&ctx, iv, ivlen);
749c0341432SJohn Baldwin 
750c0341432SJohn Baldwin 	/* Supply MAC with AAD */
751*26d292d3SJohn Baldwin 	error = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
752*26d292d3SJohn Baldwin 	    axf->Update, &ctx);
753*26d292d3SJohn Baldwin 	if (error)
754*26d292d3SJohn Baldwin 		return (error);
755c0341432SJohn Baldwin 
756c0341432SJohn Baldwin 	exf->reinit(swe->sw_kschedule, iv);
757c0341432SJohn Baldwin 
758c0341432SJohn Baldwin 	/* Do encryption/decryption with MAC */
7599c0e3d3aSJohn Baldwin 	crypto_cursor_init(&cc_in, &crp->crp_buf);
7609c0e3d3aSJohn Baldwin 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
7619c0e3d3aSJohn Baldwin 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
7629c0e3d3aSJohn Baldwin 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
7639c0e3d3aSJohn Baldwin 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
7649c0e3d3aSJohn Baldwin 	} else
7659c0e3d3aSJohn Baldwin 		cc_out = cc_in;
766*26d292d3SJohn Baldwin 	for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
767*26d292d3SJohn Baldwin 		if (crypto_cursor_seglen(&cc_in) < blksz) {
768*26d292d3SJohn Baldwin 			crypto_cursor_copydata(&cc_in, blksz, blk);
769*26d292d3SJohn Baldwin 			inblk = blk;
770*26d292d3SJohn Baldwin 		} else {
771*26d292d3SJohn Baldwin 			inblk = crypto_cursor_segbase(&cc_in);
772*26d292d3SJohn Baldwin 			crypto_cursor_advance(&cc_in, blksz);
773*26d292d3SJohn Baldwin 		}
774c0341432SJohn Baldwin 		if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
775*26d292d3SJohn Baldwin 			if (crypto_cursor_seglen(&cc_out) < blksz)
776*26d292d3SJohn Baldwin 				outblk = blk;
777*26d292d3SJohn Baldwin 			else
778*26d292d3SJohn Baldwin 				outblk = crypto_cursor_segbase(&cc_out);
779*26d292d3SJohn Baldwin 			axf->Update(&ctx, inblk, blksz);
780*26d292d3SJohn Baldwin 			exf->encrypt(swe->sw_kschedule, inblk, outblk);
781*26d292d3SJohn Baldwin 			if (outblk == blk)
782*26d292d3SJohn Baldwin 				crypto_cursor_copyback(&cc_out, blksz, blk);
783*26d292d3SJohn Baldwin 			else
784*26d292d3SJohn Baldwin 				crypto_cursor_advance(&cc_out, blksz);
785c0341432SJohn Baldwin 		} else {
786c0341432SJohn Baldwin 			/*
787c0341432SJohn Baldwin 			 * One of the problems with CCM+CBC is that
788c0341432SJohn Baldwin 			 * the authentication is done on the
789*26d292d3SJohn Baldwin 			 * unencrypted data.  As a result, we have to
790c0341432SJohn Baldwin 			 * decrypt the data twice: once to generate
791c0341432SJohn Baldwin 			 * the tag and a second time after the tag is
792c0341432SJohn Baldwin 			 * verified.
793c0341432SJohn Baldwin 			 */
794*26d292d3SJohn Baldwin 			exf->decrypt(swe->sw_kschedule, inblk, blk);
795*26d292d3SJohn Baldwin 			axf->Update(&ctx, blk, blksz);
796*26d292d3SJohn Baldwin 		}
797*26d292d3SJohn Baldwin 	}
798*26d292d3SJohn Baldwin 	if (resid > 0) {
799*26d292d3SJohn Baldwin 		crypto_cursor_copydata(&cc_in, resid, blk);
800*26d292d3SJohn Baldwin 		if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
801*26d292d3SJohn Baldwin 			axf->Update(&ctx, blk, resid);
802*26d292d3SJohn Baldwin 			exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
803*26d292d3SJohn Baldwin 			crypto_cursor_copyback(&cc_out, resid, blk);
804*26d292d3SJohn Baldwin 		} else {
805*26d292d3SJohn Baldwin 			exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
806*26d292d3SJohn Baldwin 			axf->Update(&ctx, blk, resid);
807c0341432SJohn Baldwin 		}
80808fca7a5SJohn-Mark Gurney 	}
80908fca7a5SJohn-Mark Gurney 
81008fca7a5SJohn-Mark Gurney 	/* Finalize MAC */
811*26d292d3SJohn Baldwin 	axf->Final(tag, &ctx);
81208fca7a5SJohn-Mark Gurney 
81308fca7a5SJohn-Mark Gurney 	/* Validate tag */
81420c128daSJohn Baldwin 	error = 0;
815c0341432SJohn Baldwin 	if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
816*26d292d3SJohn Baldwin 		u_char tag2[AES_CBC_MAC_HASH_LEN];
81720c128daSJohn Baldwin 
818c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
819*26d292d3SJohn Baldwin 		    tag2);
82008fca7a5SJohn-Mark Gurney 
821*26d292d3SJohn Baldwin 		r = timingsafe_bcmp(tag, tag2, swa->sw_mlen);
822*26d292d3SJohn Baldwin 		explicit_bzero(tag2, sizeof(tag2));
82320c128daSJohn Baldwin 		if (r != 0) {
82420c128daSJohn Baldwin 			error = EBADMSG;
82520c128daSJohn Baldwin 			goto out;
82620c128daSJohn Baldwin 		}
827c0341432SJohn Baldwin 
82808fca7a5SJohn-Mark Gurney 		/* tag matches, decrypt data */
829507281e5SSean Eric Fagan 		exf->reinit(swe->sw_kschedule, iv);
8309c0e3d3aSJohn Baldwin 		crypto_cursor_init(&cc_in, &crp->crp_buf);
8319c0e3d3aSJohn Baldwin 		crypto_cursor_advance(&cc_in, crp->crp_payload_start);
832*26d292d3SJohn Baldwin 		for (resid = crp->crp_payload_length; resid > blksz;
833*26d292d3SJohn Baldwin 		     resid -= blksz) {
834*26d292d3SJohn Baldwin 			if (crypto_cursor_seglen(&cc_in) < blksz) {
835*26d292d3SJohn Baldwin 				crypto_cursor_copydata(&cc_in, blksz, blk);
836*26d292d3SJohn Baldwin 				inblk = blk;
837*26d292d3SJohn Baldwin 			} else {
838*26d292d3SJohn Baldwin 				inblk = crypto_cursor_segbase(&cc_in);
839*26d292d3SJohn Baldwin 				crypto_cursor_advance(&cc_in, blksz);
840*26d292d3SJohn Baldwin 			}
841*26d292d3SJohn Baldwin 			if (crypto_cursor_seglen(&cc_out) < blksz)
842*26d292d3SJohn Baldwin 				outblk = blk;
843*26d292d3SJohn Baldwin 			else
844*26d292d3SJohn Baldwin 				outblk = crypto_cursor_segbase(&cc_out);
845*26d292d3SJohn Baldwin 			exf->decrypt(swe->sw_kschedule, inblk, outblk);
846*26d292d3SJohn Baldwin 			if (outblk == blk)
847*26d292d3SJohn Baldwin 				crypto_cursor_copyback(&cc_out, blksz, blk);
848*26d292d3SJohn Baldwin 			else
849*26d292d3SJohn Baldwin 				crypto_cursor_advance(&cc_out, blksz);
850*26d292d3SJohn Baldwin 		}
851*26d292d3SJohn Baldwin 		if (resid > 0) {
852*26d292d3SJohn Baldwin 			crypto_cursor_copydata(&cc_in, resid, blk);
853*26d292d3SJohn Baldwin 			exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
854*26d292d3SJohn Baldwin 			crypto_cursor_copyback(&cc_out, resid, blk);
85508fca7a5SJohn-Mark Gurney 		}
85608fca7a5SJohn-Mark Gurney 	} else {
85708fca7a5SJohn-Mark Gurney 		/* Inject the authentication data */
858*26d292d3SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
85908fca7a5SJohn-Mark Gurney 	}
86008fca7a5SJohn-Mark Gurney 
86120c128daSJohn Baldwin out:
86220c128daSJohn Baldwin 	explicit_bzero(blkbuf, sizeof(blkbuf));
863*26d292d3SJohn Baldwin 	explicit_bzero(tag, sizeof(tag));
86420c128daSJohn Baldwin 	explicit_bzero(iv, sizeof(iv));
86520c128daSJohn Baldwin 	return (error);
86608fca7a5SJohn-Mark Gurney }
86708fca7a5SJohn-Mark Gurney 
868091d81d1SSam Leffler /*
869c0341432SJohn Baldwin  * Apply a cipher and a digest to perform EtA.
870c0341432SJohn Baldwin  */
871c0341432SJohn Baldwin static int
872c0341432SJohn Baldwin swcr_eta(struct swcr_session *ses, struct cryptop *crp)
873c0341432SJohn Baldwin {
874c0341432SJohn Baldwin 	int error;
875c0341432SJohn Baldwin 
876c0341432SJohn Baldwin 	if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
877c0341432SJohn Baldwin 		error = swcr_encdec(ses, crp);
878c0341432SJohn Baldwin 		if (error == 0)
879c0341432SJohn Baldwin 			error = swcr_authcompute(ses, crp);
880c0341432SJohn Baldwin 	} else {
881c0341432SJohn Baldwin 		error = swcr_authcompute(ses, crp);
882c0341432SJohn Baldwin 		if (error == 0)
883c0341432SJohn Baldwin 			error = swcr_encdec(ses, crp);
884c0341432SJohn Baldwin 	}
885c0341432SJohn Baldwin 	return (error);
886c0341432SJohn Baldwin }
887c0341432SJohn Baldwin 
888c0341432SJohn Baldwin /*
889091d81d1SSam Leffler  * Apply a compression/decompression algorithm
890091d81d1SSam Leffler  */
891091d81d1SSam Leffler static int
892c0341432SJohn Baldwin swcr_compdec(struct swcr_session *ses, struct cryptop *crp)
893091d81d1SSam Leffler {
894091d81d1SSam Leffler 	u_int8_t *data, *out;
895091d81d1SSam Leffler 	struct comp_algo *cxf;
896091d81d1SSam Leffler 	int adj;
897091d81d1SSam Leffler 	u_int32_t result;
898091d81d1SSam Leffler 
899c0341432SJohn Baldwin 	cxf = ses->swcr_compdec.sw_cxf;
900091d81d1SSam Leffler 
901091d81d1SSam Leffler 	/* We must handle the whole buffer of data in one time
902091d81d1SSam Leffler 	 * then if there is not all the data in the mbuf, we must
903091d81d1SSam Leffler 	 * copy in a buffer.
904091d81d1SSam Leffler 	 */
905091d81d1SSam Leffler 
906c0341432SJohn Baldwin 	data = malloc(crp->crp_payload_length, M_CRYPTO_DATA,  M_NOWAIT);
907091d81d1SSam Leffler 	if (data == NULL)
908091d81d1SSam Leffler 		return (EINVAL);
909c0341432SJohn Baldwin 	crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
910c0341432SJohn Baldwin 	    data);
911091d81d1SSam Leffler 
912c0341432SJohn Baldwin 	if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
913c0341432SJohn Baldwin 		result = cxf->compress(data, crp->crp_payload_length, &out);
914091d81d1SSam Leffler 	else
915c0341432SJohn Baldwin 		result = cxf->decompress(data, crp->crp_payload_length, &out);
916091d81d1SSam Leffler 
9171ede983cSDag-Erling Smørgrav 	free(data, M_CRYPTO_DATA);
918091d81d1SSam Leffler 	if (result == 0)
919c0341432SJohn Baldwin 		return (EINVAL);
920c0341432SJohn Baldwin 	crp->crp_olen = result;
921c0341432SJohn Baldwin 
922c0341432SJohn Baldwin 	/* Check the compressed size when doing compression */
923c0341432SJohn Baldwin 	if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
924c0341432SJohn Baldwin 		if (result >= crp->crp_payload_length) {
925c0341432SJohn Baldwin 			/* Compression was useless, we lost time */
926c0341432SJohn Baldwin 			free(out, M_CRYPTO_DATA);
927c0341432SJohn Baldwin 			return (0);
928c0341432SJohn Baldwin 		}
929c0341432SJohn Baldwin 	}
930091d81d1SSam Leffler 
931091d81d1SSam Leffler 	/* Copy back the (de)compressed data. m_copyback is
932091d81d1SSam Leffler 	 * extending the mbuf as necessary.
933091d81d1SSam Leffler 	 */
934c0341432SJohn Baldwin 	crypto_copyback(crp, crp->crp_payload_start, result, out);
935c0341432SJohn Baldwin 	if (result < crp->crp_payload_length) {
9369c0e3d3aSJohn Baldwin 		switch (crp->crp_buf.cb_type) {
937c0341432SJohn Baldwin 		case CRYPTO_BUF_MBUF:
938c0341432SJohn Baldwin 			adj = result - crp->crp_payload_length;
9399c0e3d3aSJohn Baldwin 			m_adj(crp->crp_buf.cb_mbuf, adj);
940c0341432SJohn Baldwin 			break;
941c0341432SJohn Baldwin 		case CRYPTO_BUF_UIO: {
9429c0e3d3aSJohn Baldwin 			struct uio *uio = crp->crp_buf.cb_uio;
943091d81d1SSam Leffler 			int ind;
944091d81d1SSam Leffler 
945c0341432SJohn Baldwin 			adj = crp->crp_payload_length - result;
946091d81d1SSam Leffler 			ind = uio->uio_iovcnt - 1;
947091d81d1SSam Leffler 
948091d81d1SSam Leffler 			while (adj > 0 && ind >= 0) {
949091d81d1SSam Leffler 				if (adj < uio->uio_iov[ind].iov_len) {
950091d81d1SSam Leffler 					uio->uio_iov[ind].iov_len -= adj;
951091d81d1SSam Leffler 					break;
952091d81d1SSam Leffler 				}
953091d81d1SSam Leffler 
954091d81d1SSam Leffler 				adj -= uio->uio_iov[ind].iov_len;
955091d81d1SSam Leffler 				uio->uio_iov[ind].iov_len = 0;
956091d81d1SSam Leffler 				ind--;
957091d81d1SSam Leffler 				uio->uio_iovcnt--;
958091d81d1SSam Leffler 			}
959091d81d1SSam Leffler 			}
960c0341432SJohn Baldwin 			break;
9619c0e3d3aSJohn Baldwin 		default:
9629c0e3d3aSJohn Baldwin 			break;
963c0341432SJohn Baldwin 		}
964091d81d1SSam Leffler 	}
9651ede983cSDag-Erling Smørgrav 	free(out, M_CRYPTO_DATA);
966091d81d1SSam Leffler 	return 0;
967091d81d1SSam Leffler }
968091d81d1SSam Leffler 
969091d81d1SSam Leffler static int
9703e947048SJohn Baldwin swcr_setup_cipher(struct swcr_session *ses,
971c0341432SJohn Baldwin     const struct crypto_session_params *csp)
972091d81d1SSam Leffler {
973c0341432SJohn Baldwin 	struct swcr_encdec *swe;
974091d81d1SSam Leffler 	struct enc_xform *txf;
975f6c4bc3bSPawel Jakub Dawidek 	int error;
976091d81d1SSam Leffler 
977c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
978c0341432SJohn Baldwin 	txf = crypto_cipher(csp);
979c0341432SJohn Baldwin 	MPASS(txf->ivsize == csp->csp_ivlen);
9803e947048SJohn Baldwin 	if (txf->ctxsize != 0) {
9813e947048SJohn Baldwin 		swe->sw_kschedule = malloc(txf->ctxsize, M_CRYPTO_DATA,
9823e947048SJohn Baldwin 		    M_NOWAIT);
9833e947048SJohn Baldwin 		if (swe->sw_kschedule == NULL)
9843e947048SJohn Baldwin 			return (ENOMEM);
9853e947048SJohn Baldwin 	}
986c0341432SJohn Baldwin 	if (csp->csp_cipher_key != NULL) {
9873e947048SJohn Baldwin 		error = txf->setkey(swe->sw_kschedule,
988c0341432SJohn Baldwin 		    csp->csp_cipher_key, csp->csp_cipher_klen);
989c0341432SJohn Baldwin 		if (error)
990c0341432SJohn Baldwin 			return (error);
991091d81d1SSam Leffler 	}
992c0341432SJohn Baldwin 	swe->sw_exf = txf;
993c0341432SJohn Baldwin 	return (0);
994f6c4bc3bSPawel Jakub Dawidek }
995091d81d1SSam Leffler 
996c0341432SJohn Baldwin static int
997c0341432SJohn Baldwin swcr_setup_auth(struct swcr_session *ses,
998c0341432SJohn Baldwin     const struct crypto_session_params *csp)
999c0341432SJohn Baldwin {
1000c0341432SJohn Baldwin 	struct swcr_auth *swa;
1001c0341432SJohn Baldwin 	struct auth_hash *axf;
1002c0341432SJohn Baldwin 
1003c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
1004c0341432SJohn Baldwin 
1005c0341432SJohn Baldwin 	axf = crypto_auth_hash(csp);
1006c0341432SJohn Baldwin 	swa->sw_axf = axf;
1007c0341432SJohn Baldwin 	if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1008c0341432SJohn Baldwin 		return (EINVAL);
1009c0341432SJohn Baldwin 	if (csp->csp_auth_mlen == 0)
1010c0341432SJohn Baldwin 		swa->sw_mlen = axf->hashsize;
1011c0341432SJohn Baldwin 	else
1012c0341432SJohn Baldwin 		swa->sw_mlen = csp->csp_auth_mlen;
1013c0341432SJohn Baldwin 	swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1014c0341432SJohn Baldwin 	if (swa->sw_ictx == NULL)
1015c0341432SJohn Baldwin 		return (ENOBUFS);
1016c0341432SJohn Baldwin 
1017c0341432SJohn Baldwin 	switch (csp->csp_auth_alg) {
1018091d81d1SSam Leffler 	case CRYPTO_SHA1_HMAC:
1019c97f39ceSConrad Meyer 	case CRYPTO_SHA2_224_HMAC:
1020f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_256_HMAC:
1021f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_384_HMAC:
1022f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_512_HMAC:
1023091d81d1SSam Leffler 	case CRYPTO_NULL_HMAC:
1024091d81d1SSam Leffler 	case CRYPTO_RIPEMD160_HMAC:
10253a0b6a93SJohn Baldwin 		swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1026091d81d1SSam Leffler 		    M_NOWAIT);
1027c0341432SJohn Baldwin 		if (swa->sw_octx == NULL)
1028c0341432SJohn Baldwin 			return (ENOBUFS);
1029c0341432SJohn Baldwin 
1030c0341432SJohn Baldwin 		if (csp->csp_auth_key != NULL) {
1031c0341432SJohn Baldwin 			swcr_authprepare(axf, swa, csp->csp_auth_key,
1032c0341432SJohn Baldwin 			    csp->csp_auth_klen);
1033091d81d1SSam Leffler 		}
1034091d81d1SSam Leffler 
1035c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
1036c0341432SJohn Baldwin 			ses->swcr_process = swcr_authcompute;
1037091d81d1SSam Leffler 		break;
1038091d81d1SSam Leffler 	case CRYPTO_SHA1:
1039c4729f6eSConrad Meyer 	case CRYPTO_SHA2_224:
1040c4729f6eSConrad Meyer 	case CRYPTO_SHA2_256:
1041c4729f6eSConrad Meyer 	case CRYPTO_SHA2_384:
1042c4729f6eSConrad Meyer 	case CRYPTO_SHA2_512:
1043c0341432SJohn Baldwin 		axf->Init(swa->sw_ictx);
1044c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
1045c0341432SJohn Baldwin 			ses->swcr_process = swcr_authcompute;
1046c0341432SJohn Baldwin 		break;
1047c0341432SJohn Baldwin 	case CRYPTO_AES_NIST_GMAC:
1048c0341432SJohn Baldwin 		axf->Init(swa->sw_ictx);
1049c0341432SJohn Baldwin 		axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1050c0341432SJohn Baldwin 		    csp->csp_auth_klen);
1051c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
1052c0341432SJohn Baldwin 			ses->swcr_process = swcr_gmac;
1053c0341432SJohn Baldwin 		break;
1054c0341432SJohn Baldwin 	case CRYPTO_POLY1305:
1055c0341432SJohn Baldwin 	case CRYPTO_BLAKE2B:
1056c0341432SJohn Baldwin 	case CRYPTO_BLAKE2S:
1057c0341432SJohn Baldwin 		/*
1058c0341432SJohn Baldwin 		 * Blake2b and Blake2s support an optional key but do
1059c0341432SJohn Baldwin 		 * not require one.
1060c0341432SJohn Baldwin 		 */
1061c0341432SJohn Baldwin 		if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL)
1062c0341432SJohn Baldwin 			axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1063c0341432SJohn Baldwin 			    csp->csp_auth_klen);
1064c0341432SJohn Baldwin 		axf->Init(swa->sw_ictx);
1065c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
1066c0341432SJohn Baldwin 			ses->swcr_process = swcr_authcompute;
1067c0341432SJohn Baldwin 		break;
1068c0341432SJohn Baldwin 	case CRYPTO_AES_CCM_CBC_MAC:
1069c0341432SJohn Baldwin 		axf->Init(swa->sw_ictx);
1070c0341432SJohn Baldwin 		axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1071c0341432SJohn Baldwin 		    csp->csp_auth_klen);
1072c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
1073c0341432SJohn Baldwin 			ses->swcr_process = swcr_ccm_cbc_mac;
1074c0341432SJohn Baldwin 		break;
1075091d81d1SSam Leffler 	}
1076091d81d1SSam Leffler 
1077c0341432SJohn Baldwin 	return (0);
1078c0341432SJohn Baldwin }
107908fca7a5SJohn-Mark Gurney 
1080c0341432SJohn Baldwin static int
1081c0341432SJohn Baldwin swcr_setup_gcm(struct swcr_session *ses,
1082c0341432SJohn Baldwin     const struct crypto_session_params *csp)
1083c0341432SJohn Baldwin {
1084c0341432SJohn Baldwin 	struct swcr_auth *swa;
1085c0341432SJohn Baldwin 	struct auth_hash *axf;
1086c0341432SJohn Baldwin 
1087c0341432SJohn Baldwin 	if (csp->csp_ivlen != AES_GCM_IV_LEN)
1088c0341432SJohn Baldwin 		return (EINVAL);
1089c0341432SJohn Baldwin 
1090c0341432SJohn Baldwin 	/* First, setup the auth side. */
1091c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
1092c0341432SJohn Baldwin 	switch (csp->csp_cipher_klen * 8) {
1093c0341432SJohn Baldwin 	case 128:
1094c0341432SJohn Baldwin 		axf = &auth_hash_nist_gmac_aes_128;
1095c0341432SJohn Baldwin 		break;
1096c0341432SJohn Baldwin 	case 192:
1097c0341432SJohn Baldwin 		axf = &auth_hash_nist_gmac_aes_192;
1098c0341432SJohn Baldwin 		break;
1099c0341432SJohn Baldwin 	case 256:
1100c0341432SJohn Baldwin 		axf = &auth_hash_nist_gmac_aes_256;
1101c0341432SJohn Baldwin 		break;
1102c0341432SJohn Baldwin 	default:
1103c0341432SJohn Baldwin 		return (EINVAL);
1104c0341432SJohn Baldwin 	}
1105c0341432SJohn Baldwin 	swa->sw_axf = axf;
1106c0341432SJohn Baldwin 	if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1107c0341432SJohn Baldwin 		return (EINVAL);
1108c0341432SJohn Baldwin 	if (csp->csp_auth_mlen == 0)
1109c0341432SJohn Baldwin 		swa->sw_mlen = axf->hashsize;
1110c0341432SJohn Baldwin 	else
1111c0341432SJohn Baldwin 		swa->sw_mlen = csp->csp_auth_mlen;
1112c0341432SJohn Baldwin 	swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1113c0341432SJohn Baldwin 	if (swa->sw_ictx == NULL)
1114c0341432SJohn Baldwin 		return (ENOBUFS);
1115c0341432SJohn Baldwin 	axf->Init(swa->sw_ictx);
1116c0341432SJohn Baldwin 	if (csp->csp_cipher_key != NULL)
1117c0341432SJohn Baldwin 		axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1118c0341432SJohn Baldwin 		    csp->csp_cipher_klen);
1119c0341432SJohn Baldwin 
1120c0341432SJohn Baldwin 	/* Second, setup the cipher side. */
11213e947048SJohn Baldwin 	return (swcr_setup_cipher(ses, csp));
1122c0341432SJohn Baldwin }
1123c0341432SJohn Baldwin 
1124c0341432SJohn Baldwin static int
1125c0341432SJohn Baldwin swcr_setup_ccm(struct swcr_session *ses,
1126c0341432SJohn Baldwin     const struct crypto_session_params *csp)
1127c0341432SJohn Baldwin {
1128c0341432SJohn Baldwin 	struct swcr_auth *swa;
1129c0341432SJohn Baldwin 	struct auth_hash *axf;
1130c0341432SJohn Baldwin 
1131c0341432SJohn Baldwin 	if (csp->csp_ivlen != AES_CCM_IV_LEN)
1132c0341432SJohn Baldwin 		return (EINVAL);
1133c0341432SJohn Baldwin 
1134c0341432SJohn Baldwin 	/* First, setup the auth side. */
1135c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
1136c0341432SJohn Baldwin 	switch (csp->csp_cipher_klen * 8) {
1137507281e5SSean Eric Fagan 	case 128:
1138507281e5SSean Eric Fagan 		axf = &auth_hash_ccm_cbc_mac_128;
1139507281e5SSean Eric Fagan 		break;
1140507281e5SSean Eric Fagan 	case 192:
1141507281e5SSean Eric Fagan 		axf = &auth_hash_ccm_cbc_mac_192;
1142507281e5SSean Eric Fagan 		break;
1143507281e5SSean Eric Fagan 	case 256:
1144507281e5SSean Eric Fagan 		axf = &auth_hash_ccm_cbc_mac_256;
1145507281e5SSean Eric Fagan 		break;
1146507281e5SSean Eric Fagan 	default:
1147c0341432SJohn Baldwin 		return (EINVAL);
1148507281e5SSean Eric Fagan 	}
1149c0341432SJohn Baldwin 	swa->sw_axf = axf;
1150c0341432SJohn Baldwin 	if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1151c0341432SJohn Baldwin 		return (EINVAL);
1152c0341432SJohn Baldwin 	if (csp->csp_auth_mlen == 0)
1153c0341432SJohn Baldwin 		swa->sw_mlen = axf->hashsize;
1154c0341432SJohn Baldwin 	else
1155c0341432SJohn Baldwin 		swa->sw_mlen = csp->csp_auth_mlen;
1156c0341432SJohn Baldwin 	swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1157c0341432SJohn Baldwin 	if (swa->sw_ictx == NULL)
1158c0341432SJohn Baldwin 		return (ENOBUFS);
1159c0341432SJohn Baldwin 	axf->Init(swa->sw_ictx);
1160c0341432SJohn Baldwin 	if (csp->csp_cipher_key != NULL)
1161c0341432SJohn Baldwin 		axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1162c0341432SJohn Baldwin 		    csp->csp_cipher_klen);
116308fca7a5SJohn-Mark Gurney 
1164c0341432SJohn Baldwin 	/* Second, setup the cipher side. */
11653e947048SJohn Baldwin 	return (swcr_setup_cipher(ses, csp));
11662e2e26d1SJohn Baldwin }
1167a2bc81bfSJohn-Mark Gurney 
1168c0341432SJohn Baldwin static bool
1169c0341432SJohn Baldwin swcr_auth_supported(const struct crypto_session_params *csp)
1170109919c6SBenno Rice {
1171091d81d1SSam Leffler 	struct auth_hash *axf;
1172091d81d1SSam Leffler 
1173c0341432SJohn Baldwin 	axf = crypto_auth_hash(csp);
1174c0341432SJohn Baldwin 	if (axf == NULL)
1175c0341432SJohn Baldwin 		return (false);
1176c0341432SJohn Baldwin 	switch (csp->csp_auth_alg) {
1177091d81d1SSam Leffler 	case CRYPTO_SHA1_HMAC:
1178c97f39ceSConrad Meyer 	case CRYPTO_SHA2_224_HMAC:
1179f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_256_HMAC:
1180f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_384_HMAC:
1181f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_512_HMAC:
1182091d81d1SSam Leffler 	case CRYPTO_NULL_HMAC:
1183c0341432SJohn Baldwin 	case CRYPTO_RIPEMD160_HMAC:
1184091d81d1SSam Leffler 		break;
1185c0341432SJohn Baldwin 	case CRYPTO_AES_NIST_GMAC:
1186c0341432SJohn Baldwin 		switch (csp->csp_auth_klen * 8) {
1187c0341432SJohn Baldwin 		case 128:
1188c0341432SJohn Baldwin 		case 192:
1189c0341432SJohn Baldwin 		case 256:
1190c0341432SJohn Baldwin 			break;
1191c0341432SJohn Baldwin 		default:
1192c0341432SJohn Baldwin 			return (false);
1193c0341432SJohn Baldwin 		}
1194c0341432SJohn Baldwin 		if (csp->csp_auth_key == NULL)
1195c0341432SJohn Baldwin 			return (false);
1196c0341432SJohn Baldwin 		if (csp->csp_ivlen != AES_GCM_IV_LEN)
1197c0341432SJohn Baldwin 			return (false);
1198c0341432SJohn Baldwin 		break;
119925b7033bSConrad Meyer 	case CRYPTO_POLY1305:
1200c0341432SJohn Baldwin 		if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1201c0341432SJohn Baldwin 			return (false);
1202c0341432SJohn Baldwin 		break;
1203c0341432SJohn Baldwin 	case CRYPTO_AES_CCM_CBC_MAC:
1204c0341432SJohn Baldwin 		switch (csp->csp_auth_klen * 8) {
1205c0341432SJohn Baldwin 		case 128:
1206c0341432SJohn Baldwin 		case 192:
1207c0341432SJohn Baldwin 		case 256:
1208c0341432SJohn Baldwin 			break;
1209c0341432SJohn Baldwin 		default:
1210c0341432SJohn Baldwin 			return (false);
1211c0341432SJohn Baldwin 		}
1212c0341432SJohn Baldwin 		if (csp->csp_auth_key == NULL)
1213c0341432SJohn Baldwin 			return (false);
1214c0341432SJohn Baldwin 		if (csp->csp_ivlen != AES_CCM_IV_LEN)
1215c0341432SJohn Baldwin 			return (false);
1216c0341432SJohn Baldwin 		break;
1217c0341432SJohn Baldwin 	}
1218c0341432SJohn Baldwin 	return (true);
1219c0341432SJohn Baldwin }
1220091d81d1SSam Leffler 
1221c0341432SJohn Baldwin static bool
1222c0341432SJohn Baldwin swcr_cipher_supported(const struct crypto_session_params *csp)
1223c0341432SJohn Baldwin {
1224c0341432SJohn Baldwin 	struct enc_xform *txf;
1225c0341432SJohn Baldwin 
1226c0341432SJohn Baldwin 	txf = crypto_cipher(csp);
1227c0341432SJohn Baldwin 	if (txf == NULL)
1228c0341432SJohn Baldwin 		return (false);
1229c0341432SJohn Baldwin 	if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1230c0341432SJohn Baldwin 	    txf->ivsize != csp->csp_ivlen)
1231c0341432SJohn Baldwin 		return (false);
1232c0341432SJohn Baldwin 	return (true);
1233c0341432SJohn Baldwin }
1234c0341432SJohn Baldwin 
1235c0341432SJohn Baldwin static int
1236c0341432SJohn Baldwin swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1237c0341432SJohn Baldwin {
1238c0341432SJohn Baldwin 
12399c0e3d3aSJohn Baldwin 	if ((csp->csp_flags & ~(CSP_F_SEPARATE_OUTPUT)) != 0)
1240c0341432SJohn Baldwin 		return (EINVAL);
1241c0341432SJohn Baldwin 	switch (csp->csp_mode) {
1242c0341432SJohn Baldwin 	case CSP_MODE_COMPRESS:
1243c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1244c0341432SJohn Baldwin 		case CRYPTO_DEFLATE_COMP:
1245c0341432SJohn Baldwin 			break;
1246c0341432SJohn Baldwin 		default:
1247c0341432SJohn Baldwin 			return (EINVAL);
12485fbc5b5aSConrad Meyer 		}
1249091d81d1SSam Leffler 		break;
1250c0341432SJohn Baldwin 	case CSP_MODE_CIPHER:
1251c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1252c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1253c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1254c0341432SJohn Baldwin 			return (EINVAL);
1255c0341432SJohn Baldwin 		default:
1256c0341432SJohn Baldwin 			if (!swcr_cipher_supported(csp))
1257c0341432SJohn Baldwin 				return (EINVAL);
1258091d81d1SSam Leffler 			break;
1259091d81d1SSam Leffler 		}
1260c0341432SJohn Baldwin 		break;
1261c0341432SJohn Baldwin 	case CSP_MODE_DIGEST:
1262c0341432SJohn Baldwin 		if (!swcr_auth_supported(csp))
1263c0341432SJohn Baldwin 			return (EINVAL);
1264c0341432SJohn Baldwin 		break;
1265c0341432SJohn Baldwin 	case CSP_MODE_AEAD:
1266c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1267c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1268c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1269c0341432SJohn Baldwin 			break;
1270c0341432SJohn Baldwin 		default:
1271c0341432SJohn Baldwin 			return (EINVAL);
1272c0341432SJohn Baldwin 		}
1273c0341432SJohn Baldwin 		break;
1274c0341432SJohn Baldwin 	case CSP_MODE_ETA:
1275c0341432SJohn Baldwin 		/* AEAD algorithms cannot be used for EtA. */
1276c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1277c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1278c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1279c0341432SJohn Baldwin 			return (EINVAL);
1280c0341432SJohn Baldwin 		}
1281c0341432SJohn Baldwin 		switch (csp->csp_auth_alg) {
1282c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GMAC:
1283c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_CBC_MAC:
1284c0341432SJohn Baldwin 			return (EINVAL);
1285c0341432SJohn Baldwin 		}
1286c0341432SJohn Baldwin 
1287c0341432SJohn Baldwin 		if (!swcr_cipher_supported(csp) ||
1288c0341432SJohn Baldwin 		    !swcr_auth_supported(csp))
1289c0341432SJohn Baldwin 			return (EINVAL);
1290c0341432SJohn Baldwin 		break;
1291c0341432SJohn Baldwin 	default:
1292c0341432SJohn Baldwin 		return (EINVAL);
1293c0341432SJohn Baldwin 	}
1294c0341432SJohn Baldwin 
1295c0341432SJohn Baldwin 	return (CRYPTODEV_PROBE_SOFTWARE);
1296c0341432SJohn Baldwin }
1297c0341432SJohn Baldwin 
1298c0341432SJohn Baldwin /*
1299c0341432SJohn Baldwin  * Generate a new software session.
1300c0341432SJohn Baldwin  */
1301c0341432SJohn Baldwin static int
1302c0341432SJohn Baldwin swcr_newsession(device_t dev, crypto_session_t cses,
1303c0341432SJohn Baldwin     const struct crypto_session_params *csp)
1304c0341432SJohn Baldwin {
1305c0341432SJohn Baldwin 	struct swcr_session *ses;
1306c0341432SJohn Baldwin 	struct swcr_encdec *swe;
1307c0341432SJohn Baldwin 	struct swcr_auth *swa;
1308c0341432SJohn Baldwin 	struct comp_algo *cxf;
1309c0341432SJohn Baldwin 	int error;
1310c0341432SJohn Baldwin 
1311c0341432SJohn Baldwin 	ses = crypto_get_driver_session(cses);
1312c0341432SJohn Baldwin 	mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
1313c0341432SJohn Baldwin 
1314c0341432SJohn Baldwin 	error = 0;
1315c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
1316c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
1317c0341432SJohn Baldwin 	switch (csp->csp_mode) {
1318c0341432SJohn Baldwin 	case CSP_MODE_COMPRESS:
1319c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1320c0341432SJohn Baldwin 		case CRYPTO_DEFLATE_COMP:
1321c0341432SJohn Baldwin 			cxf = &comp_algo_deflate;
1322c0341432SJohn Baldwin 			break;
1323c0341432SJohn Baldwin #ifdef INVARIANTS
1324c0341432SJohn Baldwin 		default:
1325c0341432SJohn Baldwin 			panic("bad compression algo");
1326c0341432SJohn Baldwin #endif
1327c0341432SJohn Baldwin 		}
1328c0341432SJohn Baldwin 		ses->swcr_compdec.sw_cxf = cxf;
1329c0341432SJohn Baldwin 		ses->swcr_process = swcr_compdec;
1330c0341432SJohn Baldwin 		break;
1331c0341432SJohn Baldwin 	case CSP_MODE_CIPHER:
1332c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1333c0341432SJohn Baldwin 		case CRYPTO_NULL_CBC:
1334c0341432SJohn Baldwin 			ses->swcr_process = swcr_null;
1335c0341432SJohn Baldwin 			break;
1336c0341432SJohn Baldwin #ifdef INVARIANTS
1337c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1338c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1339c0341432SJohn Baldwin 			panic("bad cipher algo");
1340c0341432SJohn Baldwin #endif
1341c0341432SJohn Baldwin 		default:
13423e947048SJohn Baldwin 			error = swcr_setup_cipher(ses, csp);
1343c0341432SJohn Baldwin 			if (error == 0)
1344c0341432SJohn Baldwin 				ses->swcr_process = swcr_encdec;
1345c0341432SJohn Baldwin 		}
1346c0341432SJohn Baldwin 		break;
1347c0341432SJohn Baldwin 	case CSP_MODE_DIGEST:
1348c0341432SJohn Baldwin 		error = swcr_setup_auth(ses, csp);
1349c0341432SJohn Baldwin 		break;
1350c0341432SJohn Baldwin 	case CSP_MODE_AEAD:
1351c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1352c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1353c0341432SJohn Baldwin 			error = swcr_setup_gcm(ses, csp);
1354c0341432SJohn Baldwin 			if (error == 0)
1355c0341432SJohn Baldwin 				ses->swcr_process = swcr_gcm;
1356c0341432SJohn Baldwin 			break;
1357c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1358c0341432SJohn Baldwin 			error = swcr_setup_ccm(ses, csp);
1359c0341432SJohn Baldwin 			if (error == 0)
1360c0341432SJohn Baldwin 				ses->swcr_process = swcr_ccm;
1361c0341432SJohn Baldwin 			break;
1362c0341432SJohn Baldwin #ifdef INVARIANTS
1363c0341432SJohn Baldwin 		default:
1364c0341432SJohn Baldwin 			panic("bad aead algo");
1365c0341432SJohn Baldwin #endif
1366c0341432SJohn Baldwin 		}
1367c0341432SJohn Baldwin 		break;
1368c0341432SJohn Baldwin 	case CSP_MODE_ETA:
1369c0341432SJohn Baldwin #ifdef INVARIANTS
1370c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1371c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1372c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1373c0341432SJohn Baldwin 			panic("bad eta cipher algo");
1374c0341432SJohn Baldwin 		}
1375c0341432SJohn Baldwin 		switch (csp->csp_auth_alg) {
1376c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GMAC:
1377c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_CBC_MAC:
1378c0341432SJohn Baldwin 			panic("bad eta auth algo");
1379c0341432SJohn Baldwin 		}
1380c0341432SJohn Baldwin #endif
1381c0341432SJohn Baldwin 
1382c0341432SJohn Baldwin 		error = swcr_setup_auth(ses, csp);
1383c0341432SJohn Baldwin 		if (error)
1384c0341432SJohn Baldwin 			break;
1385c0341432SJohn Baldwin 		if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1386c0341432SJohn Baldwin 			/* Effectively degrade to digest mode. */
1387c0341432SJohn Baldwin 			ses->swcr_process = swcr_authcompute;
1388c0341432SJohn Baldwin 			break;
1389c0341432SJohn Baldwin 		}
1390c0341432SJohn Baldwin 
13913e947048SJohn Baldwin 		error = swcr_setup_cipher(ses, csp);
1392c0341432SJohn Baldwin 		if (error == 0)
1393c0341432SJohn Baldwin 			ses->swcr_process = swcr_eta;
1394c0341432SJohn Baldwin 		break;
1395c0341432SJohn Baldwin 	default:
1396c0341432SJohn Baldwin 		error = EINVAL;
1397c0341432SJohn Baldwin 	}
1398c0341432SJohn Baldwin 
1399c0341432SJohn Baldwin 	if (error)
1400c0341432SJohn Baldwin 		swcr_freesession(dev, cses);
1401c0341432SJohn Baldwin 	return (error);
1402c0341432SJohn Baldwin }
1403c0341432SJohn Baldwin 
1404c0341432SJohn Baldwin static void
1405c0341432SJohn Baldwin swcr_freesession(device_t dev, crypto_session_t cses)
1406c0341432SJohn Baldwin {
1407c0341432SJohn Baldwin 	struct swcr_session *ses;
1408c0341432SJohn Baldwin 	struct swcr_auth *swa;
1409c0341432SJohn Baldwin 	struct auth_hash *axf;
1410c0341432SJohn Baldwin 
1411c0341432SJohn Baldwin 	ses = crypto_get_driver_session(cses);
1412c0341432SJohn Baldwin 
1413c0341432SJohn Baldwin 	mtx_destroy(&ses->swcr_lock);
1414c0341432SJohn Baldwin 
14153e947048SJohn Baldwin 	zfree(ses->swcr_encdec.sw_kschedule, M_CRYPTO_DATA);
1416c0341432SJohn Baldwin 
1417c0341432SJohn Baldwin 	axf = ses->swcr_auth.sw_axf;
1418c0341432SJohn Baldwin 	if (axf != NULL) {
1419c0341432SJohn Baldwin 		swa = &ses->swcr_auth;
1420c0341432SJohn Baldwin 		if (swa->sw_ictx != NULL) {
1421c0341432SJohn Baldwin 			explicit_bzero(swa->sw_ictx, axf->ctxsize);
1422c0341432SJohn Baldwin 			free(swa->sw_ictx, M_CRYPTO_DATA);
1423c0341432SJohn Baldwin 		}
1424c0341432SJohn Baldwin 		if (swa->sw_octx != NULL) {
14253a0b6a93SJohn Baldwin 			explicit_bzero(swa->sw_octx, axf->ctxsize);
1426c0341432SJohn Baldwin 			free(swa->sw_octx, M_CRYPTO_DATA);
1427c0341432SJohn Baldwin 		}
1428091d81d1SSam Leffler 	}
1429091d81d1SSam Leffler }
1430091d81d1SSam Leffler 
1431091d81d1SSam Leffler /*
1432091d81d1SSam Leffler  * Process a software request.
1433091d81d1SSam Leffler  */
1434091d81d1SSam Leffler static int
14356810ad6fSSam Leffler swcr_process(device_t dev, struct cryptop *crp, int hint)
1436091d81d1SSam Leffler {
1437c0341432SJohn Baldwin 	struct swcr_session *ses;
1438091d81d1SSam Leffler 
14391b0909d5SConrad Meyer 	ses = crypto_get_driver_session(crp->crp_session);
1440a7fcb1afSSean Eric Fagan 	mtx_lock(&ses->swcr_lock);
1441091d81d1SSam Leffler 
1442c0341432SJohn Baldwin 	crp->crp_etype = ses->swcr_process(ses, crp);
1443091d81d1SSam Leffler 
1444a7fcb1afSSean Eric Fagan 	mtx_unlock(&ses->swcr_lock);
1445091d81d1SSam Leffler 	crypto_done(crp);
1446c0341432SJohn Baldwin 	return (0);
1447091d81d1SSam Leffler }
1448091d81d1SSam Leffler 
1449091d81d1SSam Leffler static void
14503f147ab2SWarner Losh swcr_identify(driver_t *drv, device_t parent)
1451091d81d1SSam Leffler {
14526810ad6fSSam Leffler 	/* NB: order 10 is so we get attached after h/w devices */
14536810ad6fSSam Leffler 	if (device_find_child(parent, "cryptosoft", -1) == NULL &&
145486c585d9SMarius Strobl 	    BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
14556810ad6fSSam Leffler 		panic("cryptosoft: could not attach");
14566810ad6fSSam Leffler }
1457f6c4bc3bSPawel Jakub Dawidek 
14586810ad6fSSam Leffler static int
14596810ad6fSSam Leffler swcr_probe(device_t dev)
14606810ad6fSSam Leffler {
14616810ad6fSSam Leffler 	device_set_desc(dev, "software crypto");
146286c585d9SMarius Strobl 	return (BUS_PROBE_NOWILDCARD);
14636810ad6fSSam Leffler }
1464f6c4bc3bSPawel Jakub Dawidek 
14656810ad6fSSam Leffler static int
14666810ad6fSSam Leffler swcr_attach(device_t dev)
14676810ad6fSSam Leffler {
14686810ad6fSSam Leffler 
14699ebbebe4SConrad Meyer 	swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
14706810ad6fSSam Leffler 			CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
14716810ad6fSSam Leffler 	if (swcr_id < 0) {
14726810ad6fSSam Leffler 		device_printf(dev, "cannot initialize!");
1473c0341432SJohn Baldwin 		return (ENXIO);
14746810ad6fSSam Leffler 	}
14756810ad6fSSam Leffler 
1476c0341432SJohn Baldwin 	return (0);
1477091d81d1SSam Leffler }
14784b465da2SPawel Jakub Dawidek 
14793f147ab2SWarner Losh static int
14806810ad6fSSam Leffler swcr_detach(device_t dev)
14814b465da2SPawel Jakub Dawidek {
14826810ad6fSSam Leffler 	crypto_unregister_all(swcr_id);
14833f147ab2SWarner Losh 	return 0;
14844b465da2SPawel Jakub Dawidek }
14856810ad6fSSam Leffler 
14866810ad6fSSam Leffler static device_method_t swcr_methods[] = {
14876810ad6fSSam Leffler 	DEVMETHOD(device_identify,	swcr_identify),
14886810ad6fSSam Leffler 	DEVMETHOD(device_probe,		swcr_probe),
14896810ad6fSSam Leffler 	DEVMETHOD(device_attach,	swcr_attach),
14906810ad6fSSam Leffler 	DEVMETHOD(device_detach,	swcr_detach),
14916810ad6fSSam Leffler 
1492c0341432SJohn Baldwin 	DEVMETHOD(cryptodev_probesession, swcr_probesession),
14936810ad6fSSam Leffler 	DEVMETHOD(cryptodev_newsession,	swcr_newsession),
14946810ad6fSSam Leffler 	DEVMETHOD(cryptodev_freesession,swcr_freesession),
14956810ad6fSSam Leffler 	DEVMETHOD(cryptodev_process,	swcr_process),
14966810ad6fSSam Leffler 
14976810ad6fSSam Leffler 	{0, 0},
14986810ad6fSSam Leffler };
14996810ad6fSSam Leffler 
15006810ad6fSSam Leffler static driver_t swcr_driver = {
15016810ad6fSSam Leffler 	"cryptosoft",
15026810ad6fSSam Leffler 	swcr_methods,
15036810ad6fSSam Leffler 	0,		/* NB: no softc */
15046810ad6fSSam Leffler };
15056810ad6fSSam Leffler static devclass_t swcr_devclass;
15066810ad6fSSam Leffler 
15076810ad6fSSam Leffler /*
15086810ad6fSSam Leffler  * NB: We explicitly reference the crypto module so we
15096810ad6fSSam Leffler  * get the necessary ordering when built as a loadable
15106810ad6fSSam Leffler  * module.  This is required because we bundle the crypto
15116810ad6fSSam Leffler  * module code together with the cryptosoft driver (otherwise
15126810ad6fSSam Leffler  * normal module dependencies would handle things).
15136810ad6fSSam Leffler  */
15146810ad6fSSam Leffler extern int crypto_modevent(struct module *, int, void *);
15156810ad6fSSam Leffler /* XXX where to attach */
15166810ad6fSSam Leffler DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
15176810ad6fSSam Leffler MODULE_VERSION(cryptosoft, 1);
15186810ad6fSSam Leffler MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);
1519