xref: /freebsd/sys/opencrypto/cryptosoft.c (revision 3a0b6a93a7002e7c8d1c7a86bae2d933834c3357)
1091d81d1SSam Leffler /*	$OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $	*/
2091d81d1SSam Leffler 
360727d8bSWarner Losh /*-
4091d81d1SSam Leffler  * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
56810ad6fSSam Leffler  * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
6091d81d1SSam Leffler  *
7091d81d1SSam Leffler  * This code was written by Angelos D. Keromytis in Athens, Greece, in
8091d81d1SSam Leffler  * February 2000. Network Security Technologies Inc. (NSTI) kindly
9091d81d1SSam Leffler  * supported the development of this code.
10091d81d1SSam Leffler  *
11091d81d1SSam Leffler  * Copyright (c) 2000, 2001 Angelos D. Keromytis
1208fca7a5SJohn-Mark Gurney  * Copyright (c) 2014 The FreeBSD Foundation
1308fca7a5SJohn-Mark Gurney  * All rights reserved.
1408fca7a5SJohn-Mark Gurney  *
1508fca7a5SJohn-Mark Gurney  * Portions of this software were developed by John-Mark Gurney
1608fca7a5SJohn-Mark Gurney  * under sponsorship of the FreeBSD Foundation and
1708fca7a5SJohn-Mark Gurney  * Rubicon Communications, LLC (Netgate).
18091d81d1SSam Leffler  *
19091d81d1SSam Leffler  * Permission to use, copy, and modify this software with or without fee
20091d81d1SSam Leffler  * is hereby granted, provided that this entire notice is included in
21091d81d1SSam Leffler  * all source code copies of any software which is or includes a copy or
22091d81d1SSam Leffler  * modification of this software.
23091d81d1SSam Leffler  *
24091d81d1SSam Leffler  * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25091d81d1SSam Leffler  * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26091d81d1SSam Leffler  * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27091d81d1SSam Leffler  * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
28091d81d1SSam Leffler  * PURPOSE.
29091d81d1SSam Leffler  */
30091d81d1SSam Leffler 
312c446514SDavid E. O'Brien #include <sys/cdefs.h>
322c446514SDavid E. O'Brien __FBSDID("$FreeBSD$");
332c446514SDavid E. O'Brien 
34091d81d1SSam Leffler #include <sys/param.h>
35091d81d1SSam Leffler #include <sys/systm.h>
36091d81d1SSam Leffler #include <sys/malloc.h>
37091d81d1SSam Leffler #include <sys/mbuf.h>
386810ad6fSSam Leffler #include <sys/module.h>
39091d81d1SSam Leffler #include <sys/sysctl.h>
40091d81d1SSam Leffler #include <sys/errno.h>
41091d81d1SSam Leffler #include <sys/random.h>
42091d81d1SSam Leffler #include <sys/kernel.h>
43091d81d1SSam Leffler #include <sys/uio.h>
44109919c6SBenno Rice #include <sys/lock.h>
45109919c6SBenno Rice #include <sys/rwlock.h>
4608fca7a5SJohn-Mark Gurney #include <sys/endian.h>
4708fca7a5SJohn-Mark Gurney #include <sys/limits.h>
48a7fcb1afSSean Eric Fagan #include <sys/mutex.h>
49091d81d1SSam Leffler 
50091d81d1SSam Leffler #include <crypto/blowfish/blowfish.h>
51091d81d1SSam Leffler #include <crypto/sha1.h>
52091d81d1SSam Leffler #include <opencrypto/rmd160.h>
53091d81d1SSam Leffler #include <sys/md5.h>
54091d81d1SSam Leffler 
55091d81d1SSam Leffler #include <opencrypto/cryptodev.h>
56091d81d1SSam Leffler #include <opencrypto/xform.h>
57091d81d1SSam Leffler 
586810ad6fSSam Leffler #include <sys/kobj.h>
596810ad6fSSam Leffler #include <sys/bus.h>
606810ad6fSSam Leffler #include "cryptodev_if.h"
61091d81d1SSam Leffler 
62c0341432SJohn Baldwin struct swcr_auth {
63c0341432SJohn Baldwin 	void		*sw_ictx;
64c0341432SJohn Baldwin 	void		*sw_octx;
65c0341432SJohn Baldwin 	struct auth_hash *sw_axf;
66c0341432SJohn Baldwin 	uint16_t	sw_mlen;
67c0341432SJohn Baldwin };
68c0341432SJohn Baldwin 
69c0341432SJohn Baldwin struct swcr_encdec {
70c0341432SJohn Baldwin 	uint8_t		*sw_kschedule;
71c0341432SJohn Baldwin 	struct enc_xform *sw_exf;
72c0341432SJohn Baldwin };
73c0341432SJohn Baldwin 
74c0341432SJohn Baldwin struct swcr_compdec {
75c0341432SJohn Baldwin 	struct comp_algo *sw_cxf;
76c0341432SJohn Baldwin };
77c0341432SJohn Baldwin 
78c0341432SJohn Baldwin struct swcr_session {
79c0341432SJohn Baldwin 	struct mtx	swcr_lock;
80c0341432SJohn Baldwin 	int	(*swcr_process)(struct swcr_session *, struct cryptop *);
81c0341432SJohn Baldwin 
82c0341432SJohn Baldwin 	struct swcr_auth swcr_auth;
83c0341432SJohn Baldwin 	struct swcr_encdec swcr_encdec;
84c0341432SJohn Baldwin 	struct swcr_compdec swcr_compdec;
85c0341432SJohn Baldwin };
86507281e5SSean Eric Fagan 
876810ad6fSSam Leffler static	int32_t swcr_id;
886810ad6fSSam Leffler 
891b0909d5SConrad Meyer static	void swcr_freesession(device_t dev, crypto_session_t cses);
90091d81d1SSam Leffler 
91c0341432SJohn Baldwin /* Used for CRYPTO_NULL_CBC. */
92c0341432SJohn Baldwin static int
93c0341432SJohn Baldwin swcr_null(struct swcr_session *ses, struct cryptop *crp)
94c0341432SJohn Baldwin {
95c0341432SJohn Baldwin 
96c0341432SJohn Baldwin 	return (0);
97c0341432SJohn Baldwin }
98c0341432SJohn Baldwin 
99091d81d1SSam Leffler /*
100091d81d1SSam Leffler  * Apply a symmetric encryption/decryption algorithm.
101091d81d1SSam Leffler  */
102091d81d1SSam Leffler static int
103c0341432SJohn Baldwin swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
104091d81d1SSam Leffler {
1055d7ae54aSConrad Meyer 	unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
10608fca7a5SJohn-Mark Gurney 	unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
107c0341432SJohn Baldwin 	const struct crypto_session_params *csp;
108c0341432SJohn Baldwin 	struct swcr_encdec *sw;
109091d81d1SSam Leffler 	struct enc_xform *exf;
11008fca7a5SJohn-Mark Gurney 	int i, j, k, blks, ind, count, ivlen;
11108fca7a5SJohn-Mark Gurney 	struct uio *uio, uiolcl;
11208fca7a5SJohn-Mark Gurney 	struct iovec iovlcl[4];
11308fca7a5SJohn-Mark Gurney 	struct iovec *iov;
11408fca7a5SJohn-Mark Gurney 	int iovcnt, iovalloc;
11508fca7a5SJohn-Mark Gurney 	int error;
116c0341432SJohn Baldwin 	bool encrypting;
11708fca7a5SJohn-Mark Gurney 
11808fca7a5SJohn-Mark Gurney 	error = 0;
119091d81d1SSam Leffler 
120c0341432SJohn Baldwin 	sw = &ses->swcr_encdec;
121091d81d1SSam Leffler 	exf = sw->sw_exf;
122091d81d1SSam Leffler 	blks = exf->blocksize;
12308fca7a5SJohn-Mark Gurney 	ivlen = exf->ivsize;
124091d81d1SSam Leffler 
125091d81d1SSam Leffler 	/* Check for non-padded data */
126c0341432SJohn Baldwin 	if ((crp->crp_payload_length % blks) != 0)
127091d81d1SSam Leffler 		return EINVAL;
128091d81d1SSam Leffler 
129c0341432SJohn Baldwin 	if (exf == &enc_xform_aes_icm &&
130c0341432SJohn Baldwin 	    (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
13108fca7a5SJohn-Mark Gurney 		return (EINVAL);
13208fca7a5SJohn-Mark Gurney 
13329fe41ddSJohn Baldwin 	crypto_read_iv(crp, iv);
134091d81d1SSam Leffler 
135c0341432SJohn Baldwin 	if (crp->crp_cipher_key != NULL) {
136c740ae4bSPoul-Henning Kamp 		if (sw->sw_kschedule)
137c740ae4bSPoul-Henning Kamp 			exf->zerokey(&(sw->sw_kschedule));
13808fca7a5SJohn-Mark Gurney 
139c0341432SJohn Baldwin 		csp = crypto_get_params(crp->crp_session);
140c740ae4bSPoul-Henning Kamp 		error = exf->setkey(&sw->sw_kschedule,
141c0341432SJohn Baldwin 		    crp->crp_cipher_key, csp->csp_cipher_klen);
142c740ae4bSPoul-Henning Kamp 		if (error)
143c740ae4bSPoul-Henning Kamp 			return (error);
144c740ae4bSPoul-Henning Kamp 	}
145d295bdeeSPawel Jakub Dawidek 
14608fca7a5SJohn-Mark Gurney 	iov = iovlcl;
14708fca7a5SJohn-Mark Gurney 	iovcnt = nitems(iovlcl);
14808fca7a5SJohn-Mark Gurney 	iovalloc = 0;
14908fca7a5SJohn-Mark Gurney 	uio = &uiolcl;
150c0341432SJohn Baldwin 	switch (crp->crp_buf_type) {
151c0341432SJohn Baldwin 	case CRYPTO_BUF_MBUF:
152c0341432SJohn Baldwin 		error = crypto_mbuftoiov(crp->crp_mbuf, &iov, &iovcnt,
15308fca7a5SJohn-Mark Gurney 		    &iovalloc);
154748a12e2SJohn-Mark Gurney 		if (error)
155748a12e2SJohn-Mark Gurney 			return (error);
15608fca7a5SJohn-Mark Gurney 		uio->uio_iov = iov;
15708fca7a5SJohn-Mark Gurney 		uio->uio_iovcnt = iovcnt;
158c0341432SJohn Baldwin 		break;
159c0341432SJohn Baldwin 	case CRYPTO_BUF_UIO:
160c0341432SJohn Baldwin 		uio = crp->crp_uio;
161c0341432SJohn Baldwin 		break;
162c0341432SJohn Baldwin 	case CRYPTO_BUF_CONTIG:
163c0341432SJohn Baldwin 		iov[0].iov_base = crp->crp_buf;
164c0341432SJohn Baldwin 		iov[0].iov_len = crp->crp_ilen;
16508fca7a5SJohn-Mark Gurney 		uio->uio_iov = iov;
16608fca7a5SJohn-Mark Gurney 		uio->uio_iovcnt = 1;
167c0341432SJohn Baldwin 		break;
16808fca7a5SJohn-Mark Gurney 	}
16908fca7a5SJohn-Mark Gurney 
170091d81d1SSam Leffler 	ivp = iv;
171091d81d1SSam Leffler 
17208fca7a5SJohn-Mark Gurney 	if (exf->reinit) {
173d295bdeeSPawel Jakub Dawidek 		/*
174d295bdeeSPawel Jakub Dawidek 		 * xforms that provide a reinit method perform all IV
175d295bdeeSPawel Jakub Dawidek 		 * handling themselves.
176d295bdeeSPawel Jakub Dawidek 		 */
177d295bdeeSPawel Jakub Dawidek 		exf->reinit(sw->sw_kschedule, iv);
178091d81d1SSam Leffler 	}
179091d81d1SSam Leffler 
180c0341432SJohn Baldwin 	count = crp->crp_payload_start;
18108fca7a5SJohn-Mark Gurney 	ind = cuio_getptr(uio, count, &k);
18208fca7a5SJohn-Mark Gurney 	if (ind == -1) {
18308fca7a5SJohn-Mark Gurney 		error = EINVAL;
18408fca7a5SJohn-Mark Gurney 		goto out;
185091d81d1SSam Leffler 	}
186091d81d1SSam Leffler 
187c0341432SJohn Baldwin 	i = crp->crp_payload_length;
188c0341432SJohn Baldwin 	encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
189091d81d1SSam Leffler 
190091d81d1SSam Leffler 	while (i > 0) {
191091d81d1SSam Leffler 		/*
192091d81d1SSam Leffler 		 * If there's insufficient data at the end of
193091d81d1SSam Leffler 		 * an iovec, we have to do some copying.
194091d81d1SSam Leffler 		 */
19508fca7a5SJohn-Mark Gurney 		if (uio->uio_iov[ind].iov_len < k + blks &&
19608fca7a5SJohn-Mark Gurney 		    uio->uio_iov[ind].iov_len != k) {
19708fca7a5SJohn-Mark Gurney 			cuio_copydata(uio, count, blks, blk);
198091d81d1SSam Leffler 
199091d81d1SSam Leffler 			/* Actual encryption/decryption */
200d295bdeeSPawel Jakub Dawidek 			if (exf->reinit) {
201c0341432SJohn Baldwin 				if (encrypting) {
202d295bdeeSPawel Jakub Dawidek 					exf->encrypt(sw->sw_kschedule,
203d295bdeeSPawel Jakub Dawidek 					    blk);
204d295bdeeSPawel Jakub Dawidek 				} else {
205d295bdeeSPawel Jakub Dawidek 					exf->decrypt(sw->sw_kschedule,
206d295bdeeSPawel Jakub Dawidek 					    blk);
207d295bdeeSPawel Jakub Dawidek 				}
208c0341432SJohn Baldwin 			} else if (encrypting) {
209091d81d1SSam Leffler 				/* XOR with previous block */
210091d81d1SSam Leffler 				for (j = 0; j < blks; j++)
211091d81d1SSam Leffler 					blk[j] ^= ivp[j];
212091d81d1SSam Leffler 
213091d81d1SSam Leffler 				exf->encrypt(sw->sw_kschedule, blk);
214091d81d1SSam Leffler 
215091d81d1SSam Leffler 				/*
216091d81d1SSam Leffler 				 * Keep encrypted block for XOR'ing
217091d81d1SSam Leffler 				 * with next block
218091d81d1SSam Leffler 				 */
219091d81d1SSam Leffler 				bcopy(blk, iv, blks);
220091d81d1SSam Leffler 				ivp = iv;
221091d81d1SSam Leffler 			} else {	/* decrypt */
222091d81d1SSam Leffler 				/*
223091d81d1SSam Leffler 				 * Keep encrypted block for XOR'ing
224091d81d1SSam Leffler 				 * with next block
225091d81d1SSam Leffler 				 */
22608fca7a5SJohn-Mark Gurney 				nivp = (ivp == iv) ? iv2 : iv;
22708fca7a5SJohn-Mark Gurney 				bcopy(blk, nivp, blks);
228091d81d1SSam Leffler 
229091d81d1SSam Leffler 				exf->decrypt(sw->sw_kschedule, blk);
230091d81d1SSam Leffler 
231091d81d1SSam Leffler 				/* XOR with previous block */
232091d81d1SSam Leffler 				for (j = 0; j < blks; j++)
233091d81d1SSam Leffler 					blk[j] ^= ivp[j];
234091d81d1SSam Leffler 
23508fca7a5SJohn-Mark Gurney 				ivp = nivp;
236091d81d1SSam Leffler 			}
237091d81d1SSam Leffler 
238091d81d1SSam Leffler 			/* Copy back decrypted block */
23908fca7a5SJohn-Mark Gurney 			cuio_copyback(uio, count, blks, blk);
24008fca7a5SJohn-Mark Gurney 
24108fca7a5SJohn-Mark Gurney 			count += blks;
242091d81d1SSam Leffler 
243091d81d1SSam Leffler 			/* Advance pointer */
24408fca7a5SJohn-Mark Gurney 			ind = cuio_getptr(uio, count, &k);
24508fca7a5SJohn-Mark Gurney 			if (ind == -1) {
24608fca7a5SJohn-Mark Gurney 				error = EINVAL;
24708fca7a5SJohn-Mark Gurney 				goto out;
24808fca7a5SJohn-Mark Gurney 			}
249091d81d1SSam Leffler 
250091d81d1SSam Leffler 			i -= blks;
251091d81d1SSam Leffler 
252091d81d1SSam Leffler 			/* Could be done... */
253091d81d1SSam Leffler 			if (i == 0)
254091d81d1SSam Leffler 				break;
255091d81d1SSam Leffler 		}
256091d81d1SSam Leffler 
2572f1f9cceSConrad Meyer 		while (uio->uio_iov[ind].iov_len >= k + blks && i > 0) {
2585d7ae54aSConrad Meyer 			uint8_t *idat;
2592f1f9cceSConrad Meyer 			size_t nb, rem;
2602f1f9cceSConrad Meyer 
2612f1f9cceSConrad Meyer 			nb = blks;
262179b21e8SConrad Meyer 			rem = MIN((size_t)i,
263179b21e8SConrad Meyer 			    uio->uio_iov[ind].iov_len - (size_t)k);
2645d7ae54aSConrad Meyer 			idat = (uint8_t *)uio->uio_iov[ind].iov_base + k;
265091d81d1SSam Leffler 
266d295bdeeSPawel Jakub Dawidek 			if (exf->reinit) {
267c0341432SJohn Baldwin 				if (encrypting && exf->encrypt_multi == NULL)
268d295bdeeSPawel Jakub Dawidek 					exf->encrypt(sw->sw_kschedule,
269d295bdeeSPawel Jakub Dawidek 					    idat);
270c0341432SJohn Baldwin 				else if (encrypting) {
2712f1f9cceSConrad Meyer 					nb = rounddown(rem, blks);
2722f1f9cceSConrad Meyer 					exf->encrypt_multi(sw->sw_kschedule,
2732f1f9cceSConrad Meyer 					    idat, nb);
2742f1f9cceSConrad Meyer 				} else if (exf->decrypt_multi == NULL)
275d295bdeeSPawel Jakub Dawidek 					exf->decrypt(sw->sw_kschedule,
276d295bdeeSPawel Jakub Dawidek 					    idat);
2772f1f9cceSConrad Meyer 				else {
2782f1f9cceSConrad Meyer 					nb = rounddown(rem, blks);
2792f1f9cceSConrad Meyer 					exf->decrypt_multi(sw->sw_kschedule,
2802f1f9cceSConrad Meyer 					    idat, nb);
281d295bdeeSPawel Jakub Dawidek 				}
282c0341432SJohn Baldwin 			} else if (encrypting) {
283091d81d1SSam Leffler 				/* XOR with previous block/IV */
284091d81d1SSam Leffler 				for (j = 0; j < blks; j++)
285091d81d1SSam Leffler 					idat[j] ^= ivp[j];
286091d81d1SSam Leffler 
287091d81d1SSam Leffler 				exf->encrypt(sw->sw_kschedule, idat);
288091d81d1SSam Leffler 				ivp = idat;
289091d81d1SSam Leffler 			} else {	/* decrypt */
290091d81d1SSam Leffler 				/*
291091d81d1SSam Leffler 				 * Keep encrypted block to be used
292091d81d1SSam Leffler 				 * in next block's processing.
293091d81d1SSam Leffler 				 */
29408fca7a5SJohn-Mark Gurney 				nivp = (ivp == iv) ? iv2 : iv;
29508fca7a5SJohn-Mark Gurney 				bcopy(idat, nivp, blks);
296091d81d1SSam Leffler 
297091d81d1SSam Leffler 				exf->decrypt(sw->sw_kschedule, idat);
298091d81d1SSam Leffler 
299091d81d1SSam Leffler 				/* XOR with previous block/IV */
300091d81d1SSam Leffler 				for (j = 0; j < blks; j++)
301091d81d1SSam Leffler 					idat[j] ^= ivp[j];
302091d81d1SSam Leffler 
30308fca7a5SJohn-Mark Gurney 				ivp = nivp;
304091d81d1SSam Leffler 			}
305091d81d1SSam Leffler 
3062f1f9cceSConrad Meyer 			count += nb;
3072f1f9cceSConrad Meyer 			k += nb;
3082f1f9cceSConrad Meyer 			i -= nb;
309091d81d1SSam Leffler 		}
310091d81d1SSam Leffler 
311f34a967bSPawel Jakub Dawidek 		/*
31208fca7a5SJohn-Mark Gurney 		 * Advance to the next iov if the end of the current iov
31308fca7a5SJohn-Mark Gurney 		 * is aligned with the end of a cipher block.
31408fca7a5SJohn-Mark Gurney 		 * Note that the code is equivalent to calling:
31508fca7a5SJohn-Mark Gurney 		 *      ind = cuio_getptr(uio, count, &k);
316f34a967bSPawel Jakub Dawidek 		 */
31708fca7a5SJohn-Mark Gurney 		if (i > 0 && k == uio->uio_iov[ind].iov_len) {
31808fca7a5SJohn-Mark Gurney 			k = 0;
31908fca7a5SJohn-Mark Gurney 			ind++;
32008fca7a5SJohn-Mark Gurney 			if (ind >= uio->uio_iovcnt) {
32108fca7a5SJohn-Mark Gurney 				error = EINVAL;
32208fca7a5SJohn-Mark Gurney 				goto out;
32308fca7a5SJohn-Mark Gurney 			}
324f34a967bSPawel Jakub Dawidek 		}
325f34a967bSPawel Jakub Dawidek 	}
326f34a967bSPawel Jakub Dawidek 
32708fca7a5SJohn-Mark Gurney out:
32808fca7a5SJohn-Mark Gurney 	if (iovalloc)
32908fca7a5SJohn-Mark Gurney 		free(iov, M_CRYPTO_DATA);
330091d81d1SSam Leffler 
33108fca7a5SJohn-Mark Gurney 	return (error);
332091d81d1SSam Leffler }
333091d81d1SSam Leffler 
334c0341432SJohn Baldwin static void
335c0341432SJohn Baldwin swcr_authprepare(struct auth_hash *axf, struct swcr_auth *sw,
336c0341432SJohn Baldwin     const uint8_t *key, int klen)
337f6c4bc3bSPawel Jakub Dawidek {
338f6c4bc3bSPawel Jakub Dawidek 
339f6c4bc3bSPawel Jakub Dawidek 	switch (axf->type) {
340f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_MD5_HMAC:
341f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA1_HMAC:
342c97f39ceSConrad Meyer 	case CRYPTO_SHA2_224_HMAC:
343f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_256_HMAC:
344f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_384_HMAC:
345f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_512_HMAC:
346f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_NULL_HMAC:
347f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_RIPEMD160_HMAC:
348c0341432SJohn Baldwin 		hmac_init_ipad(axf, key, klen, sw->sw_ictx);
349c0341432SJohn Baldwin 		hmac_init_opad(axf, key, klen, sw->sw_octx);
350f6c4bc3bSPawel Jakub Dawidek 		break;
35125b7033bSConrad Meyer 	case CRYPTO_POLY1305:
3520e33efe4SConrad Meyer 	case CRYPTO_BLAKE2B:
3530e33efe4SConrad Meyer 	case CRYPTO_BLAKE2S:
3540e33efe4SConrad Meyer 		axf->Setkey(sw->sw_ictx, key, klen);
3550e33efe4SConrad Meyer 		axf->Init(sw->sw_ictx);
3560e33efe4SConrad Meyer 		break;
357f6c4bc3bSPawel Jakub Dawidek 	default:
358c0341432SJohn Baldwin 		panic("%s: algorithm %d doesn't use keys", __func__, axf->type);
359f6c4bc3bSPawel Jakub Dawidek 	}
360f6c4bc3bSPawel Jakub Dawidek }
361f6c4bc3bSPawel Jakub Dawidek 
362091d81d1SSam Leffler /*
363c0341432SJohn Baldwin  * Compute or verify hash.
364091d81d1SSam Leffler  */
365091d81d1SSam Leffler static int
366c0341432SJohn Baldwin swcr_authcompute(struct swcr_session *ses, struct cryptop *crp)
367091d81d1SSam Leffler {
368c0341432SJohn Baldwin 	u_char aalg[HASH_MAX_LEN];
369c0341432SJohn Baldwin 	u_char uaalg[HASH_MAX_LEN];
370c0341432SJohn Baldwin 	const struct crypto_session_params *csp;
371c0341432SJohn Baldwin 	struct swcr_auth *sw;
372091d81d1SSam Leffler 	struct auth_hash *axf;
373091d81d1SSam Leffler 	union authctx ctx;
374091d81d1SSam Leffler 	int err;
375091d81d1SSam Leffler 
376c0341432SJohn Baldwin 	sw = &ses->swcr_auth;
377091d81d1SSam Leffler 
378091d81d1SSam Leffler 	axf = sw->sw_axf;
379091d81d1SSam Leffler 
380c0341432SJohn Baldwin 	if (crp->crp_auth_key != NULL) {
381c0341432SJohn Baldwin 		csp = crypto_get_params(crp->crp_session);
382c0341432SJohn Baldwin 		swcr_authprepare(axf, sw, crp->crp_auth_key,
383c0341432SJohn Baldwin 		    csp->csp_auth_klen);
38425b7033bSConrad Meyer 	}
385f6c4bc3bSPawel Jakub Dawidek 
386091d81d1SSam Leffler 	bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
387091d81d1SSam Leffler 
388c0341432SJohn Baldwin 	err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
389c0341432SJohn Baldwin 	    (int (*)(void *, void *, unsigned int))axf->Update, &ctx);
390091d81d1SSam Leffler 	if (err)
391091d81d1SSam Leffler 		return err;
392091d81d1SSam Leffler 
393c0341432SJohn Baldwin 	err = crypto_apply(crp, crp->crp_payload_start, crp->crp_payload_length,
394c0341432SJohn Baldwin 	    (int (*)(void *, void *, unsigned int))axf->Update, &ctx);
395c0341432SJohn Baldwin 	if (err)
396c0341432SJohn Baldwin 		return err;
397c0341432SJohn Baldwin 
398c0341432SJohn Baldwin 	switch (axf->type) {
399c4729f6eSConrad Meyer 	case CRYPTO_SHA1:
400c4729f6eSConrad Meyer 	case CRYPTO_SHA2_224:
401c4729f6eSConrad Meyer 	case CRYPTO_SHA2_256:
402c4729f6eSConrad Meyer 	case CRYPTO_SHA2_384:
403c4729f6eSConrad Meyer 	case CRYPTO_SHA2_512:
404c4729f6eSConrad Meyer 		axf->Final(aalg, &ctx);
405c4729f6eSConrad Meyer 		break;
406c4729f6eSConrad Meyer 
407091d81d1SSam Leffler 	case CRYPTO_MD5_HMAC:
408091d81d1SSam Leffler 	case CRYPTO_SHA1_HMAC:
409c97f39ceSConrad Meyer 	case CRYPTO_SHA2_224_HMAC:
410f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_256_HMAC:
411f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_384_HMAC:
412f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_512_HMAC:
413091d81d1SSam Leffler 	case CRYPTO_RIPEMD160_HMAC:
414091d81d1SSam Leffler 		if (sw->sw_octx == NULL)
415091d81d1SSam Leffler 			return EINVAL;
416091d81d1SSam Leffler 
417091d81d1SSam Leffler 		axf->Final(aalg, &ctx);
418091d81d1SSam Leffler 		bcopy(sw->sw_octx, &ctx, axf->ctxsize);
419091d81d1SSam Leffler 		axf->Update(&ctx, aalg, axf->hashsize);
420091d81d1SSam Leffler 		axf->Final(aalg, &ctx);
421091d81d1SSam Leffler 		break;
422091d81d1SSam Leffler 
4230e33efe4SConrad Meyer 	case CRYPTO_BLAKE2B:
4240e33efe4SConrad Meyer 	case CRYPTO_BLAKE2S:
425091d81d1SSam Leffler 	case CRYPTO_NULL_HMAC:
42625b7033bSConrad Meyer 	case CRYPTO_POLY1305:
427091d81d1SSam Leffler 		axf->Final(aalg, &ctx);
428091d81d1SSam Leffler 		break;
429091d81d1SSam Leffler 	}
430091d81d1SSam Leffler 
431c0341432SJohn Baldwin 	if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
432c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, uaalg);
433c0341432SJohn Baldwin 		if (timingsafe_bcmp(aalg, uaalg, sw->sw_mlen) != 0)
434c0341432SJohn Baldwin 			return (EBADMSG);
435c0341432SJohn Baldwin 	} else {
436091d81d1SSam Leffler 		/* Inject the authentication data */
437c0341432SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, aalg);
438c0341432SJohn Baldwin 	}
439c0341432SJohn Baldwin 	return (0);
440091d81d1SSam Leffler }
441091d81d1SSam Leffler 
44208fca7a5SJohn-Mark Gurney CTASSERT(INT_MAX <= (1ll<<39) - 256);	/* GCM: plain text < 2^39-256 */
44308fca7a5SJohn-Mark Gurney CTASSERT(INT_MAX <= (uint64_t)-1);	/* GCM: associated data <= 2^64-1 */
44408fca7a5SJohn-Mark Gurney 
44508fca7a5SJohn-Mark Gurney static int
446c0341432SJohn Baldwin swcr_gmac(struct swcr_session *ses, struct cryptop *crp)
44708fca7a5SJohn-Mark Gurney {
44808fca7a5SJohn-Mark Gurney 	uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
44908fca7a5SJohn-Mark Gurney 	u_char *blk = (u_char *)blkbuf;
45008fca7a5SJohn-Mark Gurney 	u_char aalg[AALG_MAX_RESULT_LEN];
45108fca7a5SJohn-Mark Gurney 	u_char uaalg[AALG_MAX_RESULT_LEN];
45208fca7a5SJohn-Mark Gurney 	u_char iv[EALG_MAX_BLOCK_LEN];
45308fca7a5SJohn-Mark Gurney 	union authctx ctx;
454c0341432SJohn Baldwin 	struct swcr_auth *swa;
455c0341432SJohn Baldwin 	struct auth_hash *axf;
45608fca7a5SJohn-Mark Gurney 	uint32_t *blkp;
457c0341432SJohn Baldwin 	int blksz, i, ivlen, len;
45808fca7a5SJohn-Mark Gurney 
459c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
46008fca7a5SJohn-Mark Gurney 	axf = swa->sw_axf;
461c0341432SJohn Baldwin 
46208fca7a5SJohn-Mark Gurney 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
46308fca7a5SJohn-Mark Gurney 	blksz = axf->blocksize;
46408fca7a5SJohn-Mark Gurney 
46508fca7a5SJohn-Mark Gurney 	/* Initialize the IV */
466c0341432SJohn Baldwin 	ivlen = AES_GCM_IV_LEN;
46729fe41ddSJohn Baldwin 	crypto_read_iv(crp, iv);
46808fca7a5SJohn-Mark Gurney 
46908fca7a5SJohn-Mark Gurney 	axf->Reinit(&ctx, iv, ivlen);
470c0341432SJohn Baldwin 	for (i = 0; i < crp->crp_payload_length; i += blksz) {
471c0341432SJohn Baldwin 		len = MIN(crp->crp_payload_length - i, blksz);
472c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
473c0341432SJohn Baldwin 		bzero(blk + len, blksz - len);
47408fca7a5SJohn-Mark Gurney 		axf->Update(&ctx, blk, blksz);
47508fca7a5SJohn-Mark Gurney 	}
47608fca7a5SJohn-Mark Gurney 
47708fca7a5SJohn-Mark Gurney 	/* length block */
47808fca7a5SJohn-Mark Gurney 	bzero(blk, blksz);
47908fca7a5SJohn-Mark Gurney 	blkp = (uint32_t *)blk + 1;
480c0341432SJohn Baldwin 	*blkp = htobe32(crp->crp_payload_length * 8);
48108fca7a5SJohn-Mark Gurney 	axf->Update(&ctx, blk, blksz);
482c0341432SJohn Baldwin 
483c0341432SJohn Baldwin 	/* Finalize MAC */
484c0341432SJohn Baldwin 	axf->Final(aalg, &ctx);
485c0341432SJohn Baldwin 
486c0341432SJohn Baldwin 	if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
487c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
488c0341432SJohn Baldwin 		    uaalg);
489c0341432SJohn Baldwin 		if (timingsafe_bcmp(aalg, uaalg, swa->sw_mlen) != 0)
490c0341432SJohn Baldwin 			return (EBADMSG);
491c0341432SJohn Baldwin 	} else {
492c0341432SJohn Baldwin 		/* Inject the authentication data */
493c0341432SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, aalg);
494c0341432SJohn Baldwin 	}
495c0341432SJohn Baldwin 	return (0);
496c0341432SJohn Baldwin }
497c0341432SJohn Baldwin 
498c0341432SJohn Baldwin static int
499c0341432SJohn Baldwin swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
500c0341432SJohn Baldwin {
501c0341432SJohn Baldwin 	uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
502c0341432SJohn Baldwin 	u_char *blk = (u_char *)blkbuf;
503c0341432SJohn Baldwin 	u_char aalg[AALG_MAX_RESULT_LEN];
504c0341432SJohn Baldwin 	u_char uaalg[AALG_MAX_RESULT_LEN];
505c0341432SJohn Baldwin 	u_char iv[EALG_MAX_BLOCK_LEN];
506c0341432SJohn Baldwin 	union authctx ctx;
507c0341432SJohn Baldwin 	struct swcr_auth *swa;
508c0341432SJohn Baldwin 	struct swcr_encdec *swe;
509c0341432SJohn Baldwin 	struct auth_hash *axf;
510c0341432SJohn Baldwin 	struct enc_xform *exf;
511c0341432SJohn Baldwin 	uint32_t *blkp;
512c0341432SJohn Baldwin 	int blksz, i, ivlen, len, r;
513c0341432SJohn Baldwin 
514c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
515c0341432SJohn Baldwin 	axf = swa->sw_axf;
516c0341432SJohn Baldwin 
517c0341432SJohn Baldwin 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
518c0341432SJohn Baldwin 	blksz = axf->blocksize;
519c0341432SJohn Baldwin 
520c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
521c0341432SJohn Baldwin 	exf = swe->sw_exf;
522c0341432SJohn Baldwin 
523c0341432SJohn Baldwin 	if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
524c0341432SJohn Baldwin 		return (EINVAL);
525c0341432SJohn Baldwin 
526c0341432SJohn Baldwin 	/* Initialize the IV */
527c0341432SJohn Baldwin 	ivlen = AES_GCM_IV_LEN;
528c0341432SJohn Baldwin 	bcopy(crp->crp_iv, iv, ivlen);
529c0341432SJohn Baldwin 
530c0341432SJohn Baldwin 	/* Supply MAC with IV */
531c0341432SJohn Baldwin 	axf->Reinit(&ctx, iv, ivlen);
532c0341432SJohn Baldwin 
533c0341432SJohn Baldwin 	/* Supply MAC with AAD */
534c0341432SJohn Baldwin 	for (i = 0; i < crp->crp_aad_length; i += blksz) {
535c0341432SJohn Baldwin 		len = MIN(crp->crp_aad_length - i, blksz);
536c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_aad_start + i, len, blk);
537c0341432SJohn Baldwin 		bzero(blk + len, blksz - len);
538c0341432SJohn Baldwin 		axf->Update(&ctx, blk, blksz);
539c0341432SJohn Baldwin 	}
540c0341432SJohn Baldwin 
541c0341432SJohn Baldwin 	exf->reinit(swe->sw_kschedule, iv);
542c0341432SJohn Baldwin 
543c0341432SJohn Baldwin 	/* Do encryption with MAC */
544c0341432SJohn Baldwin 	for (i = 0; i < crp->crp_payload_length; i += len) {
545c0341432SJohn Baldwin 		len = MIN(crp->crp_payload_length - i, blksz);
546c0341432SJohn Baldwin 		if (len < blksz)
547c0341432SJohn Baldwin 			bzero(blk, blksz);
548c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
549c0341432SJohn Baldwin 		if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
550c0341432SJohn Baldwin 			exf->encrypt(swe->sw_kschedule, blk);
551c0341432SJohn Baldwin 			axf->Update(&ctx, blk, len);
552c0341432SJohn Baldwin 			crypto_copyback(crp, crp->crp_payload_start + i, len,
553c0341432SJohn Baldwin 			    blk);
554c0341432SJohn Baldwin 		} else {
555c0341432SJohn Baldwin 			axf->Update(&ctx, blk, len);
556c0341432SJohn Baldwin 		}
557c0341432SJohn Baldwin 	}
558c0341432SJohn Baldwin 
559c0341432SJohn Baldwin 	/* length block */
560c0341432SJohn Baldwin 	bzero(blk, blksz);
561c0341432SJohn Baldwin 	blkp = (uint32_t *)blk + 1;
562c0341432SJohn Baldwin 	*blkp = htobe32(crp->crp_aad_length * 8);
563c0341432SJohn Baldwin 	blkp = (uint32_t *)blk + 3;
564c0341432SJohn Baldwin 	*blkp = htobe32(crp->crp_payload_length * 8);
565c0341432SJohn Baldwin 	axf->Update(&ctx, blk, blksz);
566c0341432SJohn Baldwin 
567c0341432SJohn Baldwin 	/* Finalize MAC */
568c0341432SJohn Baldwin 	axf->Final(aalg, &ctx);
569c0341432SJohn Baldwin 
570c0341432SJohn Baldwin 	/* Validate tag */
571c0341432SJohn Baldwin 	if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
572c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
573c0341432SJohn Baldwin 		    uaalg);
574c0341432SJohn Baldwin 
575c0341432SJohn Baldwin 		r = timingsafe_bcmp(aalg, uaalg, swa->sw_mlen);
576c0341432SJohn Baldwin 		if (r != 0)
577c0341432SJohn Baldwin 			return (EBADMSG);
578c0341432SJohn Baldwin 
579c0341432SJohn Baldwin 		/* tag matches, decrypt data */
580c0341432SJohn Baldwin 		for (i = 0; i < crp->crp_payload_length; i += blksz) {
581c0341432SJohn Baldwin 			len = MIN(crp->crp_payload_length - i, blksz);
582c0341432SJohn Baldwin 			if (len < blksz)
583c0341432SJohn Baldwin 				bzero(blk, blksz);
584c0341432SJohn Baldwin 			crypto_copydata(crp, crp->crp_payload_start + i, len,
585c0341432SJohn Baldwin 			    blk);
586c0341432SJohn Baldwin 			exf->decrypt(swe->sw_kschedule, blk);
587c0341432SJohn Baldwin 			crypto_copyback(crp, crp->crp_payload_start + i, len,
588c0341432SJohn Baldwin 			    blk);
589c0341432SJohn Baldwin 		}
590c0341432SJohn Baldwin 	} else {
591c0341432SJohn Baldwin 		/* Inject the authentication data */
592c0341432SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
593c0341432SJohn Baldwin 		    aalg);
594c0341432SJohn Baldwin 	}
595c0341432SJohn Baldwin 
596c0341432SJohn Baldwin 	return (0);
597c0341432SJohn Baldwin }
598c0341432SJohn Baldwin 
599c0341432SJohn Baldwin static int
600c0341432SJohn Baldwin swcr_ccm_cbc_mac(struct swcr_session *ses, struct cryptop *crp)
601c0341432SJohn Baldwin {
602c0341432SJohn Baldwin 	uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
603c0341432SJohn Baldwin 	u_char *blk = (u_char *)blkbuf;
604c0341432SJohn Baldwin 	u_char aalg[AALG_MAX_RESULT_LEN];
605c0341432SJohn Baldwin 	u_char uaalg[AALG_MAX_RESULT_LEN];
606c0341432SJohn Baldwin 	u_char iv[EALG_MAX_BLOCK_LEN];
607c0341432SJohn Baldwin 	union authctx ctx;
608c0341432SJohn Baldwin 	struct swcr_auth *swa;
609c0341432SJohn Baldwin 	struct auth_hash *axf;
610c0341432SJohn Baldwin 	int blksz, i, ivlen, len;
611c0341432SJohn Baldwin 
612c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
613c0341432SJohn Baldwin 	axf = swa->sw_axf;
614c0341432SJohn Baldwin 
615c0341432SJohn Baldwin 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
616c0341432SJohn Baldwin 	blksz = axf->blocksize;
617c0341432SJohn Baldwin 
618c0341432SJohn Baldwin 	/* Initialize the IV */
619c0341432SJohn Baldwin 	ivlen = AES_CCM_IV_LEN;
62029fe41ddSJohn Baldwin 	crypto_read_iv(crp, iv);
621c0341432SJohn Baldwin 
622c0341432SJohn Baldwin 	/*
623c0341432SJohn Baldwin 	 * AES CCM-CBC-MAC needs to know the length of both the auth
624c0341432SJohn Baldwin 	 * data and payload data before doing the auth computation.
625c0341432SJohn Baldwin 	 */
626c0341432SJohn Baldwin 	ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_payload_length;
627c0341432SJohn Baldwin 	ctx.aes_cbc_mac_ctx.cryptDataLength = 0;
628c0341432SJohn Baldwin 
629c0341432SJohn Baldwin 	axf->Reinit(&ctx, iv, ivlen);
630c0341432SJohn Baldwin 	for (i = 0; i < crp->crp_payload_length; i += blksz) {
631c0341432SJohn Baldwin 		len = MIN(crp->crp_payload_length - i, blksz);
632c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
633c0341432SJohn Baldwin 		bzero(blk + len, blksz - len);
634c0341432SJohn Baldwin 		axf->Update(&ctx, blk, blksz);
635c0341432SJohn Baldwin 	}
636c0341432SJohn Baldwin 
637c0341432SJohn Baldwin 	/* Finalize MAC */
638c0341432SJohn Baldwin 	axf->Final(aalg, &ctx);
639c0341432SJohn Baldwin 
640c0341432SJohn Baldwin 	if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
641c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
642c0341432SJohn Baldwin 		    uaalg);
643c0341432SJohn Baldwin 		if (timingsafe_bcmp(aalg, uaalg, swa->sw_mlen) != 0)
644c0341432SJohn Baldwin 			return (EBADMSG);
645c0341432SJohn Baldwin 	} else {
646c0341432SJohn Baldwin 		/* Inject the authentication data */
647c0341432SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, aalg);
648c0341432SJohn Baldwin 	}
649c0341432SJohn Baldwin 	return (0);
650c0341432SJohn Baldwin }
651c0341432SJohn Baldwin 
652c0341432SJohn Baldwin static int
653c0341432SJohn Baldwin swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
654c0341432SJohn Baldwin {
655c0341432SJohn Baldwin 	uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
656c0341432SJohn Baldwin 	u_char *blk = (u_char *)blkbuf;
657c0341432SJohn Baldwin 	u_char aalg[AALG_MAX_RESULT_LEN];
658c0341432SJohn Baldwin 	u_char uaalg[AALG_MAX_RESULT_LEN];
659c0341432SJohn Baldwin 	u_char iv[EALG_MAX_BLOCK_LEN];
660c0341432SJohn Baldwin 	union authctx ctx;
661c0341432SJohn Baldwin 	struct swcr_auth *swa;
662c0341432SJohn Baldwin 	struct swcr_encdec *swe;
663c0341432SJohn Baldwin 	struct auth_hash *axf;
664c0341432SJohn Baldwin 	struct enc_xform *exf;
665c0341432SJohn Baldwin 	int blksz, i, ivlen, len, r;
666c0341432SJohn Baldwin 
667c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
668c0341432SJohn Baldwin 	axf = swa->sw_axf;
669c0341432SJohn Baldwin 
670c0341432SJohn Baldwin 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
671c0341432SJohn Baldwin 	blksz = axf->blocksize;
672c0341432SJohn Baldwin 
673c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
674c0341432SJohn Baldwin 	exf = swe->sw_exf;
675c0341432SJohn Baldwin 
676c0341432SJohn Baldwin 	if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
677c0341432SJohn Baldwin 		return (EINVAL);
678c0341432SJohn Baldwin 
679c0341432SJohn Baldwin 	/* Initialize the IV */
680c0341432SJohn Baldwin 	ivlen = AES_CCM_IV_LEN;
681c0341432SJohn Baldwin 	bcopy(crp->crp_iv, iv, ivlen);
682c0341432SJohn Baldwin 
683c0341432SJohn Baldwin 	/*
684c0341432SJohn Baldwin 	 * AES CCM-CBC-MAC needs to know the length of both the auth
685c0341432SJohn Baldwin 	 * data and payload data before doing the auth computation.
686c0341432SJohn Baldwin 	 */
687c0341432SJohn Baldwin 	ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_aad_length;
688c0341432SJohn Baldwin 	ctx.aes_cbc_mac_ctx.cryptDataLength = crp->crp_payload_length;
689c0341432SJohn Baldwin 
690c0341432SJohn Baldwin 	/* Supply MAC with IV */
691c0341432SJohn Baldwin 	axf->Reinit(&ctx, iv, ivlen);
692c0341432SJohn Baldwin 
693c0341432SJohn Baldwin 	/* Supply MAC with AAD */
694c0341432SJohn Baldwin 	for (i = 0; i < crp->crp_aad_length; i += blksz) {
695c0341432SJohn Baldwin 		len = MIN(crp->crp_aad_length - i, blksz);
696c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_aad_start + i, len, blk);
697c0341432SJohn Baldwin 		bzero(blk + len, blksz - len);
698c0341432SJohn Baldwin 		axf->Update(&ctx, blk, blksz);
699c0341432SJohn Baldwin 	}
700c0341432SJohn Baldwin 
701c0341432SJohn Baldwin 	exf->reinit(swe->sw_kschedule, iv);
702c0341432SJohn Baldwin 
703c0341432SJohn Baldwin 	/* Do encryption/decryption with MAC */
704c0341432SJohn Baldwin 	for (i = 0; i < crp->crp_payload_length; i += len) {
705c0341432SJohn Baldwin 		len = MIN(crp->crp_payload_length - i, blksz);
706c0341432SJohn Baldwin 		if (len < blksz)
707c0341432SJohn Baldwin 			bzero(blk, blksz);
708c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
709c0341432SJohn Baldwin 		if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
710c0341432SJohn Baldwin 			axf->Update(&ctx, blk, len);
711c0341432SJohn Baldwin 			exf->encrypt(swe->sw_kschedule, blk);
712c0341432SJohn Baldwin 			crypto_copyback(crp, crp->crp_payload_start + i, len,
713c0341432SJohn Baldwin 			    blk);
714c0341432SJohn Baldwin 		} else {
715c0341432SJohn Baldwin 			/*
716c0341432SJohn Baldwin 			 * One of the problems with CCM+CBC is that
717c0341432SJohn Baldwin 			 * the authentication is done on the
718c0341432SJohn Baldwin 			 * unecncrypted data.  As a result, we have to
719c0341432SJohn Baldwin 			 * decrypt the data twice: once to generate
720c0341432SJohn Baldwin 			 * the tag and a second time after the tag is
721c0341432SJohn Baldwin 			 * verified.
722c0341432SJohn Baldwin 			 */
723c0341432SJohn Baldwin 			exf->decrypt(swe->sw_kschedule, blk);
724c0341432SJohn Baldwin 			axf->Update(&ctx, blk, len);
725c0341432SJohn Baldwin 		}
72608fca7a5SJohn-Mark Gurney 	}
72708fca7a5SJohn-Mark Gurney 
72808fca7a5SJohn-Mark Gurney 	/* Finalize MAC */
72908fca7a5SJohn-Mark Gurney 	axf->Final(aalg, &ctx);
73008fca7a5SJohn-Mark Gurney 
73108fca7a5SJohn-Mark Gurney 	/* Validate tag */
732c0341432SJohn Baldwin 	if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
733c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
734c0341432SJohn Baldwin 		    uaalg);
73508fca7a5SJohn-Mark Gurney 
736c0341432SJohn Baldwin 		r = timingsafe_bcmp(aalg, uaalg, swa->sw_mlen);
737c0341432SJohn Baldwin 		if (r != 0)
738c0341432SJohn Baldwin 			return (EBADMSG);
739c0341432SJohn Baldwin 
74008fca7a5SJohn-Mark Gurney 		/* tag matches, decrypt data */
741507281e5SSean Eric Fagan 		exf->reinit(swe->sw_kschedule, iv);
742c0341432SJohn Baldwin 		for (i = 0; i < crp->crp_payload_length; i += blksz) {
743c0341432SJohn Baldwin 			len = MIN(crp->crp_payload_length - i, blksz);
74408fca7a5SJohn-Mark Gurney 			if (len < blksz)
74508fca7a5SJohn-Mark Gurney 				bzero(blk, blksz);
746c0341432SJohn Baldwin 			crypto_copydata(crp, crp->crp_payload_start + i, len,
747c0341432SJohn Baldwin 			    blk);
74808fca7a5SJohn-Mark Gurney 			exf->decrypt(swe->sw_kschedule, blk);
749c0341432SJohn Baldwin 			crypto_copyback(crp, crp->crp_payload_start + i, len,
750c0341432SJohn Baldwin 			    blk);
75108fca7a5SJohn-Mark Gurney 		}
75208fca7a5SJohn-Mark Gurney 	} else {
75308fca7a5SJohn-Mark Gurney 		/* Inject the authentication data */
754c0341432SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
755c0341432SJohn Baldwin 		    aalg);
75608fca7a5SJohn-Mark Gurney 	}
75708fca7a5SJohn-Mark Gurney 
75808fca7a5SJohn-Mark Gurney 	return (0);
75908fca7a5SJohn-Mark Gurney }
76008fca7a5SJohn-Mark Gurney 
761091d81d1SSam Leffler /*
762c0341432SJohn Baldwin  * Apply a cipher and a digest to perform EtA.
763c0341432SJohn Baldwin  */
764c0341432SJohn Baldwin static int
765c0341432SJohn Baldwin swcr_eta(struct swcr_session *ses, struct cryptop *crp)
766c0341432SJohn Baldwin {
767c0341432SJohn Baldwin 	int error;
768c0341432SJohn Baldwin 
769c0341432SJohn Baldwin 	if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
770c0341432SJohn Baldwin 		error = swcr_encdec(ses, crp);
771c0341432SJohn Baldwin 		if (error == 0)
772c0341432SJohn Baldwin 			error = swcr_authcompute(ses, crp);
773c0341432SJohn Baldwin 	} else {
774c0341432SJohn Baldwin 		error = swcr_authcompute(ses, crp);
775c0341432SJohn Baldwin 		if (error == 0)
776c0341432SJohn Baldwin 			error = swcr_encdec(ses, crp);
777c0341432SJohn Baldwin 	}
778c0341432SJohn Baldwin 	return (error);
779c0341432SJohn Baldwin }
780c0341432SJohn Baldwin 
781c0341432SJohn Baldwin /*
782091d81d1SSam Leffler  * Apply a compression/decompression algorithm
783091d81d1SSam Leffler  */
784091d81d1SSam Leffler static int
785c0341432SJohn Baldwin swcr_compdec(struct swcr_session *ses, struct cryptop *crp)
786091d81d1SSam Leffler {
787091d81d1SSam Leffler 	u_int8_t *data, *out;
788091d81d1SSam Leffler 	struct comp_algo *cxf;
789091d81d1SSam Leffler 	int adj;
790091d81d1SSam Leffler 	u_int32_t result;
791091d81d1SSam Leffler 
792c0341432SJohn Baldwin 	cxf = ses->swcr_compdec.sw_cxf;
793091d81d1SSam Leffler 
794091d81d1SSam Leffler 	/* We must handle the whole buffer of data in one time
795091d81d1SSam Leffler 	 * then if there is not all the data in the mbuf, we must
796091d81d1SSam Leffler 	 * copy in a buffer.
797091d81d1SSam Leffler 	 */
798091d81d1SSam Leffler 
799c0341432SJohn Baldwin 	data = malloc(crp->crp_payload_length, M_CRYPTO_DATA,  M_NOWAIT);
800091d81d1SSam Leffler 	if (data == NULL)
801091d81d1SSam Leffler 		return (EINVAL);
802c0341432SJohn Baldwin 	crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
803c0341432SJohn Baldwin 	    data);
804091d81d1SSam Leffler 
805c0341432SJohn Baldwin 	if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
806c0341432SJohn Baldwin 		result = cxf->compress(data, crp->crp_payload_length, &out);
807091d81d1SSam Leffler 	else
808c0341432SJohn Baldwin 		result = cxf->decompress(data, crp->crp_payload_length, &out);
809091d81d1SSam Leffler 
8101ede983cSDag-Erling Smørgrav 	free(data, M_CRYPTO_DATA);
811091d81d1SSam Leffler 	if (result == 0)
812c0341432SJohn Baldwin 		return (EINVAL);
813c0341432SJohn Baldwin 	crp->crp_olen = result;
814c0341432SJohn Baldwin 
815c0341432SJohn Baldwin 	/* Check the compressed size when doing compression */
816c0341432SJohn Baldwin 	if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
817c0341432SJohn Baldwin 		if (result >= crp->crp_payload_length) {
818c0341432SJohn Baldwin 			/* Compression was useless, we lost time */
819c0341432SJohn Baldwin 			free(out, M_CRYPTO_DATA);
820c0341432SJohn Baldwin 			return (0);
821c0341432SJohn Baldwin 		}
822c0341432SJohn Baldwin 	}
823091d81d1SSam Leffler 
824091d81d1SSam Leffler 	/* Copy back the (de)compressed data. m_copyback is
825091d81d1SSam Leffler 	 * extending the mbuf as necessary.
826091d81d1SSam Leffler 	 */
827c0341432SJohn Baldwin 	crypto_copyback(crp, crp->crp_payload_start, result, out);
828c0341432SJohn Baldwin 	if (result < crp->crp_payload_length) {
829c0341432SJohn Baldwin 		switch (crp->crp_buf_type) {
830c0341432SJohn Baldwin 		case CRYPTO_BUF_MBUF:
831c0341432SJohn Baldwin 			adj = result - crp->crp_payload_length;
832c0341432SJohn Baldwin 			m_adj(crp->crp_mbuf, adj);
833c0341432SJohn Baldwin 			break;
834c0341432SJohn Baldwin 		case CRYPTO_BUF_UIO: {
835c0341432SJohn Baldwin 			struct uio *uio = crp->crp_uio;
836091d81d1SSam Leffler 			int ind;
837091d81d1SSam Leffler 
838c0341432SJohn Baldwin 			adj = crp->crp_payload_length - result;
839091d81d1SSam Leffler 			ind = uio->uio_iovcnt - 1;
840091d81d1SSam Leffler 
841091d81d1SSam Leffler 			while (adj > 0 && ind >= 0) {
842091d81d1SSam Leffler 				if (adj < uio->uio_iov[ind].iov_len) {
843091d81d1SSam Leffler 					uio->uio_iov[ind].iov_len -= adj;
844091d81d1SSam Leffler 					break;
845091d81d1SSam Leffler 				}
846091d81d1SSam Leffler 
847091d81d1SSam Leffler 				adj -= uio->uio_iov[ind].iov_len;
848091d81d1SSam Leffler 				uio->uio_iov[ind].iov_len = 0;
849091d81d1SSam Leffler 				ind--;
850091d81d1SSam Leffler 				uio->uio_iovcnt--;
851091d81d1SSam Leffler 			}
852091d81d1SSam Leffler 			}
853c0341432SJohn Baldwin 			break;
854c0341432SJohn Baldwin 		}
855091d81d1SSam Leffler 	}
8561ede983cSDag-Erling Smørgrav 	free(out, M_CRYPTO_DATA);
857091d81d1SSam Leffler 	return 0;
858091d81d1SSam Leffler }
859091d81d1SSam Leffler 
860091d81d1SSam Leffler static int
861c0341432SJohn Baldwin swcr_setup_encdec(struct swcr_session *ses,
862c0341432SJohn Baldwin     const struct crypto_session_params *csp)
863091d81d1SSam Leffler {
864c0341432SJohn Baldwin 	struct swcr_encdec *swe;
865091d81d1SSam Leffler 	struct enc_xform *txf;
866f6c4bc3bSPawel Jakub Dawidek 	int error;
867091d81d1SSam Leffler 
868c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
869c0341432SJohn Baldwin 	txf = crypto_cipher(csp);
870c0341432SJohn Baldwin 	MPASS(txf->ivsize == csp->csp_ivlen);
871c0341432SJohn Baldwin 	if (csp->csp_cipher_key != NULL) {
872c0341432SJohn Baldwin 		error = txf->setkey(&swe->sw_kschedule,
873c0341432SJohn Baldwin 		    csp->csp_cipher_key, csp->csp_cipher_klen);
874c0341432SJohn Baldwin 		if (error)
875c0341432SJohn Baldwin 			return (error);
876091d81d1SSam Leffler 	}
877c0341432SJohn Baldwin 	swe->sw_exf = txf;
878c0341432SJohn Baldwin 	return (0);
879f6c4bc3bSPawel Jakub Dawidek }
880091d81d1SSam Leffler 
881c0341432SJohn Baldwin static int
882c0341432SJohn Baldwin swcr_setup_auth(struct swcr_session *ses,
883c0341432SJohn Baldwin     const struct crypto_session_params *csp)
884c0341432SJohn Baldwin {
885c0341432SJohn Baldwin 	struct swcr_auth *swa;
886c0341432SJohn Baldwin 	struct auth_hash *axf;
887c0341432SJohn Baldwin 
888c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
889c0341432SJohn Baldwin 
890c0341432SJohn Baldwin 	axf = crypto_auth_hash(csp);
891c0341432SJohn Baldwin 	swa->sw_axf = axf;
892c0341432SJohn Baldwin 	if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
893c0341432SJohn Baldwin 		return (EINVAL);
894c0341432SJohn Baldwin 	if (csp->csp_auth_mlen == 0)
895c0341432SJohn Baldwin 		swa->sw_mlen = axf->hashsize;
896c0341432SJohn Baldwin 	else
897c0341432SJohn Baldwin 		swa->sw_mlen = csp->csp_auth_mlen;
898c0341432SJohn Baldwin 	swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
899c0341432SJohn Baldwin 	if (swa->sw_ictx == NULL)
900c0341432SJohn Baldwin 		return (ENOBUFS);
901c0341432SJohn Baldwin 
902c0341432SJohn Baldwin 	switch (csp->csp_auth_alg) {
903091d81d1SSam Leffler 	case CRYPTO_MD5_HMAC:
904091d81d1SSam Leffler 	case CRYPTO_SHA1_HMAC:
905c97f39ceSConrad Meyer 	case CRYPTO_SHA2_224_HMAC:
906f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_256_HMAC:
907f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_384_HMAC:
908f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_512_HMAC:
909091d81d1SSam Leffler 	case CRYPTO_NULL_HMAC:
910091d81d1SSam Leffler 	case CRYPTO_RIPEMD160_HMAC:
911*3a0b6a93SJohn Baldwin 		swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
912091d81d1SSam Leffler 		    M_NOWAIT);
913c0341432SJohn Baldwin 		if (swa->sw_octx == NULL)
914c0341432SJohn Baldwin 			return (ENOBUFS);
915c0341432SJohn Baldwin 
916c0341432SJohn Baldwin 		if (csp->csp_auth_key != NULL) {
917c0341432SJohn Baldwin 			swcr_authprepare(axf, swa, csp->csp_auth_key,
918c0341432SJohn Baldwin 			    csp->csp_auth_klen);
919091d81d1SSam Leffler 		}
920091d81d1SSam Leffler 
921c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
922c0341432SJohn Baldwin 			ses->swcr_process = swcr_authcompute;
923091d81d1SSam Leffler 		break;
924091d81d1SSam Leffler 	case CRYPTO_SHA1:
925c4729f6eSConrad Meyer 	case CRYPTO_SHA2_224:
926c4729f6eSConrad Meyer 	case CRYPTO_SHA2_256:
927c4729f6eSConrad Meyer 	case CRYPTO_SHA2_384:
928c4729f6eSConrad Meyer 	case CRYPTO_SHA2_512:
929c0341432SJohn Baldwin 		axf->Init(swa->sw_ictx);
930c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
931c0341432SJohn Baldwin 			ses->swcr_process = swcr_authcompute;
932c0341432SJohn Baldwin 		break;
933c0341432SJohn Baldwin 	case CRYPTO_AES_NIST_GMAC:
934c0341432SJohn Baldwin 		axf->Init(swa->sw_ictx);
935c0341432SJohn Baldwin 		axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
936c0341432SJohn Baldwin 		    csp->csp_auth_klen);
937c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
938c0341432SJohn Baldwin 			ses->swcr_process = swcr_gmac;
939c0341432SJohn Baldwin 		break;
940c0341432SJohn Baldwin 	case CRYPTO_POLY1305:
941c0341432SJohn Baldwin 	case CRYPTO_BLAKE2B:
942c0341432SJohn Baldwin 	case CRYPTO_BLAKE2S:
943c0341432SJohn Baldwin 		/*
944c0341432SJohn Baldwin 		 * Blake2b and Blake2s support an optional key but do
945c0341432SJohn Baldwin 		 * not require one.
946c0341432SJohn Baldwin 		 */
947c0341432SJohn Baldwin 		if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL)
948c0341432SJohn Baldwin 			axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
949c0341432SJohn Baldwin 			    csp->csp_auth_klen);
950c0341432SJohn Baldwin 		axf->Init(swa->sw_ictx);
951c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
952c0341432SJohn Baldwin 			ses->swcr_process = swcr_authcompute;
953c0341432SJohn Baldwin 		break;
954c0341432SJohn Baldwin 	case CRYPTO_AES_CCM_CBC_MAC:
955c0341432SJohn Baldwin 		axf->Init(swa->sw_ictx);
956c0341432SJohn Baldwin 		axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
957c0341432SJohn Baldwin 		    csp->csp_auth_klen);
958c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
959c0341432SJohn Baldwin 			ses->swcr_process = swcr_ccm_cbc_mac;
960c0341432SJohn Baldwin 		break;
961091d81d1SSam Leffler 	}
962091d81d1SSam Leffler 
963c0341432SJohn Baldwin 	return (0);
964c0341432SJohn Baldwin }
96508fca7a5SJohn-Mark Gurney 
966c0341432SJohn Baldwin static int
967c0341432SJohn Baldwin swcr_setup_gcm(struct swcr_session *ses,
968c0341432SJohn Baldwin     const struct crypto_session_params *csp)
969c0341432SJohn Baldwin {
970c0341432SJohn Baldwin 	struct swcr_encdec *swe;
971c0341432SJohn Baldwin 	struct swcr_auth *swa;
972c0341432SJohn Baldwin 	struct enc_xform *txf;
973c0341432SJohn Baldwin 	struct auth_hash *axf;
974c0341432SJohn Baldwin 	int error;
975c0341432SJohn Baldwin 
976c0341432SJohn Baldwin 	if (csp->csp_ivlen != AES_GCM_IV_LEN)
977c0341432SJohn Baldwin 		return (EINVAL);
978c0341432SJohn Baldwin 
979c0341432SJohn Baldwin 	/* First, setup the auth side. */
980c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
981c0341432SJohn Baldwin 	switch (csp->csp_cipher_klen * 8) {
982c0341432SJohn Baldwin 	case 128:
983c0341432SJohn Baldwin 		axf = &auth_hash_nist_gmac_aes_128;
984c0341432SJohn Baldwin 		break;
985c0341432SJohn Baldwin 	case 192:
986c0341432SJohn Baldwin 		axf = &auth_hash_nist_gmac_aes_192;
987c0341432SJohn Baldwin 		break;
988c0341432SJohn Baldwin 	case 256:
989c0341432SJohn Baldwin 		axf = &auth_hash_nist_gmac_aes_256;
990c0341432SJohn Baldwin 		break;
991c0341432SJohn Baldwin 	default:
992c0341432SJohn Baldwin 		return (EINVAL);
993c0341432SJohn Baldwin 	}
994c0341432SJohn Baldwin 	swa->sw_axf = axf;
995c0341432SJohn Baldwin 	if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
996c0341432SJohn Baldwin 		return (EINVAL);
997c0341432SJohn Baldwin 	if (csp->csp_auth_mlen == 0)
998c0341432SJohn Baldwin 		swa->sw_mlen = axf->hashsize;
999c0341432SJohn Baldwin 	else
1000c0341432SJohn Baldwin 		swa->sw_mlen = csp->csp_auth_mlen;
1001c0341432SJohn Baldwin 	swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1002c0341432SJohn Baldwin 	if (swa->sw_ictx == NULL)
1003c0341432SJohn Baldwin 		return (ENOBUFS);
1004c0341432SJohn Baldwin 	axf->Init(swa->sw_ictx);
1005c0341432SJohn Baldwin 	if (csp->csp_cipher_key != NULL)
1006c0341432SJohn Baldwin 		axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1007c0341432SJohn Baldwin 		    csp->csp_cipher_klen);
1008c0341432SJohn Baldwin 
1009c0341432SJohn Baldwin 	/* Second, setup the cipher side. */
1010c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
1011c0341432SJohn Baldwin 	txf = &enc_xform_aes_nist_gcm;
1012c0341432SJohn Baldwin 	if (csp->csp_cipher_key != NULL) {
1013c0341432SJohn Baldwin 		error = txf->setkey(&swe->sw_kschedule,
1014c0341432SJohn Baldwin 		    csp->csp_cipher_key, csp->csp_cipher_klen);
1015c0341432SJohn Baldwin 		if (error)
1016c0341432SJohn Baldwin 			return (error);
1017c0341432SJohn Baldwin 	}
1018c0341432SJohn Baldwin 	swe->sw_exf = txf;
1019c0341432SJohn Baldwin 
1020c0341432SJohn Baldwin 	return (0);
1021c0341432SJohn Baldwin }
1022c0341432SJohn Baldwin 
1023c0341432SJohn Baldwin static int
1024c0341432SJohn Baldwin swcr_setup_ccm(struct swcr_session *ses,
1025c0341432SJohn Baldwin     const struct crypto_session_params *csp)
1026c0341432SJohn Baldwin {
1027c0341432SJohn Baldwin 	struct swcr_encdec *swe;
1028c0341432SJohn Baldwin 	struct swcr_auth *swa;
1029c0341432SJohn Baldwin 	struct enc_xform *txf;
1030c0341432SJohn Baldwin 	struct auth_hash *axf;
1031c0341432SJohn Baldwin 	int error;
1032c0341432SJohn Baldwin 
1033c0341432SJohn Baldwin 	if (csp->csp_ivlen != AES_CCM_IV_LEN)
1034c0341432SJohn Baldwin 		return (EINVAL);
1035c0341432SJohn Baldwin 
1036c0341432SJohn Baldwin 	/* First, setup the auth side. */
1037c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
1038c0341432SJohn Baldwin 	switch (csp->csp_cipher_klen * 8) {
1039507281e5SSean Eric Fagan 	case 128:
1040507281e5SSean Eric Fagan 		axf = &auth_hash_ccm_cbc_mac_128;
1041507281e5SSean Eric Fagan 		break;
1042507281e5SSean Eric Fagan 	case 192:
1043507281e5SSean Eric Fagan 		axf = &auth_hash_ccm_cbc_mac_192;
1044507281e5SSean Eric Fagan 		break;
1045507281e5SSean Eric Fagan 	case 256:
1046507281e5SSean Eric Fagan 		axf = &auth_hash_ccm_cbc_mac_256;
1047507281e5SSean Eric Fagan 		break;
1048507281e5SSean Eric Fagan 	default:
1049c0341432SJohn Baldwin 		return (EINVAL);
1050507281e5SSean Eric Fagan 	}
1051c0341432SJohn Baldwin 	swa->sw_axf = axf;
1052c0341432SJohn Baldwin 	if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1053c0341432SJohn Baldwin 		return (EINVAL);
1054c0341432SJohn Baldwin 	if (csp->csp_auth_mlen == 0)
1055c0341432SJohn Baldwin 		swa->sw_mlen = axf->hashsize;
1056c0341432SJohn Baldwin 	else
1057c0341432SJohn Baldwin 		swa->sw_mlen = csp->csp_auth_mlen;
1058c0341432SJohn Baldwin 	swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1059c0341432SJohn Baldwin 	if (swa->sw_ictx == NULL)
1060c0341432SJohn Baldwin 		return (ENOBUFS);
1061c0341432SJohn Baldwin 	axf->Init(swa->sw_ictx);
1062c0341432SJohn Baldwin 	if (csp->csp_cipher_key != NULL)
1063c0341432SJohn Baldwin 		axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1064c0341432SJohn Baldwin 		    csp->csp_cipher_klen);
106508fca7a5SJohn-Mark Gurney 
1066c0341432SJohn Baldwin 	/* Second, setup the cipher side. */
1067c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
1068c0341432SJohn Baldwin 	txf = &enc_xform_ccm;
1069c0341432SJohn Baldwin 	if (csp->csp_cipher_key != NULL) {
1070c0341432SJohn Baldwin 		error = txf->setkey(&swe->sw_kschedule,
1071c0341432SJohn Baldwin 		    csp->csp_cipher_key, csp->csp_cipher_klen);
1072c0341432SJohn Baldwin 		if (error)
1073c0341432SJohn Baldwin 			return (error);
1074c0341432SJohn Baldwin 	}
1075c0341432SJohn Baldwin 	swe->sw_exf = txf;
107608fca7a5SJohn-Mark Gurney 
1077c0341432SJohn Baldwin 	return (0);
10782e2e26d1SJohn Baldwin }
1079a2bc81bfSJohn-Mark Gurney 
1080c0341432SJohn Baldwin static bool
1081c0341432SJohn Baldwin swcr_auth_supported(const struct crypto_session_params *csp)
1082109919c6SBenno Rice {
1083091d81d1SSam Leffler 	struct auth_hash *axf;
1084091d81d1SSam Leffler 
1085c0341432SJohn Baldwin 	axf = crypto_auth_hash(csp);
1086c0341432SJohn Baldwin 	if (axf == NULL)
1087c0341432SJohn Baldwin 		return (false);
1088c0341432SJohn Baldwin 	switch (csp->csp_auth_alg) {
1089091d81d1SSam Leffler 	case CRYPTO_MD5_HMAC:
1090091d81d1SSam Leffler 	case CRYPTO_SHA1_HMAC:
1091c97f39ceSConrad Meyer 	case CRYPTO_SHA2_224_HMAC:
1092f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_256_HMAC:
1093f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_384_HMAC:
1094f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_512_HMAC:
1095091d81d1SSam Leffler 	case CRYPTO_NULL_HMAC:
1096c0341432SJohn Baldwin 	case CRYPTO_RIPEMD160_HMAC:
1097091d81d1SSam Leffler 		break;
1098c0341432SJohn Baldwin 	case CRYPTO_AES_NIST_GMAC:
1099c0341432SJohn Baldwin 		switch (csp->csp_auth_klen * 8) {
1100c0341432SJohn Baldwin 		case 128:
1101c0341432SJohn Baldwin 		case 192:
1102c0341432SJohn Baldwin 		case 256:
1103c0341432SJohn Baldwin 			break;
1104c0341432SJohn Baldwin 		default:
1105c0341432SJohn Baldwin 			return (false);
1106c0341432SJohn Baldwin 		}
1107c0341432SJohn Baldwin 		if (csp->csp_auth_key == NULL)
1108c0341432SJohn Baldwin 			return (false);
1109c0341432SJohn Baldwin 		if (csp->csp_ivlen != AES_GCM_IV_LEN)
1110c0341432SJohn Baldwin 			return (false);
1111c0341432SJohn Baldwin 		break;
111225b7033bSConrad Meyer 	case CRYPTO_POLY1305:
1113c0341432SJohn Baldwin 		if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1114c0341432SJohn Baldwin 			return (false);
1115c0341432SJohn Baldwin 		break;
1116c0341432SJohn Baldwin 	case CRYPTO_AES_CCM_CBC_MAC:
1117c0341432SJohn Baldwin 		switch (csp->csp_auth_klen * 8) {
1118c0341432SJohn Baldwin 		case 128:
1119c0341432SJohn Baldwin 		case 192:
1120c0341432SJohn Baldwin 		case 256:
1121c0341432SJohn Baldwin 			break;
1122c0341432SJohn Baldwin 		default:
1123c0341432SJohn Baldwin 			return (false);
1124c0341432SJohn Baldwin 		}
1125c0341432SJohn Baldwin 		if (csp->csp_auth_key == NULL)
1126c0341432SJohn Baldwin 			return (false);
1127c0341432SJohn Baldwin 		if (csp->csp_ivlen != AES_CCM_IV_LEN)
1128c0341432SJohn Baldwin 			return (false);
1129c0341432SJohn Baldwin 		break;
1130c0341432SJohn Baldwin 	}
1131c0341432SJohn Baldwin 	return (true);
1132c0341432SJohn Baldwin }
1133091d81d1SSam Leffler 
1134c0341432SJohn Baldwin static bool
1135c0341432SJohn Baldwin swcr_cipher_supported(const struct crypto_session_params *csp)
1136c0341432SJohn Baldwin {
1137c0341432SJohn Baldwin 	struct enc_xform *txf;
1138c0341432SJohn Baldwin 
1139c0341432SJohn Baldwin 	txf = crypto_cipher(csp);
1140c0341432SJohn Baldwin 	if (txf == NULL)
1141c0341432SJohn Baldwin 		return (false);
1142c0341432SJohn Baldwin 	if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1143c0341432SJohn Baldwin 	    txf->ivsize != csp->csp_ivlen)
1144c0341432SJohn Baldwin 		return (false);
1145c0341432SJohn Baldwin 	return (true);
1146c0341432SJohn Baldwin }
1147c0341432SJohn Baldwin 
1148c0341432SJohn Baldwin static int
1149c0341432SJohn Baldwin swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1150c0341432SJohn Baldwin {
1151c0341432SJohn Baldwin 
1152c0341432SJohn Baldwin 	if (csp->csp_flags != 0)
1153c0341432SJohn Baldwin 		return (EINVAL);
1154c0341432SJohn Baldwin 	switch (csp->csp_mode) {
1155c0341432SJohn Baldwin 	case CSP_MODE_COMPRESS:
1156c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1157c0341432SJohn Baldwin 		case CRYPTO_DEFLATE_COMP:
1158c0341432SJohn Baldwin 			break;
1159c0341432SJohn Baldwin 		default:
1160c0341432SJohn Baldwin 			return (EINVAL);
11615fbc5b5aSConrad Meyer 		}
1162091d81d1SSam Leffler 		break;
1163c0341432SJohn Baldwin 	case CSP_MODE_CIPHER:
1164c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1165c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1166c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1167c0341432SJohn Baldwin 			return (EINVAL);
1168c0341432SJohn Baldwin 		default:
1169c0341432SJohn Baldwin 			if (!swcr_cipher_supported(csp))
1170c0341432SJohn Baldwin 				return (EINVAL);
1171091d81d1SSam Leffler 			break;
1172091d81d1SSam Leffler 		}
1173c0341432SJohn Baldwin 		break;
1174c0341432SJohn Baldwin 	case CSP_MODE_DIGEST:
1175c0341432SJohn Baldwin 		if (!swcr_auth_supported(csp))
1176c0341432SJohn Baldwin 			return (EINVAL);
1177c0341432SJohn Baldwin 		break;
1178c0341432SJohn Baldwin 	case CSP_MODE_AEAD:
1179c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1180c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1181c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1182c0341432SJohn Baldwin 			break;
1183c0341432SJohn Baldwin 		default:
1184c0341432SJohn Baldwin 			return (EINVAL);
1185c0341432SJohn Baldwin 		}
1186c0341432SJohn Baldwin 		break;
1187c0341432SJohn Baldwin 	case CSP_MODE_ETA:
1188c0341432SJohn Baldwin 		/* AEAD algorithms cannot be used for EtA. */
1189c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1190c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1191c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1192c0341432SJohn Baldwin 			return (EINVAL);
1193c0341432SJohn Baldwin 		}
1194c0341432SJohn Baldwin 		switch (csp->csp_auth_alg) {
1195c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GMAC:
1196c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_CBC_MAC:
1197c0341432SJohn Baldwin 			return (EINVAL);
1198c0341432SJohn Baldwin 		}
1199c0341432SJohn Baldwin 
1200c0341432SJohn Baldwin 		if (!swcr_cipher_supported(csp) ||
1201c0341432SJohn Baldwin 		    !swcr_auth_supported(csp))
1202c0341432SJohn Baldwin 			return (EINVAL);
1203c0341432SJohn Baldwin 		break;
1204c0341432SJohn Baldwin 	default:
1205c0341432SJohn Baldwin 		return (EINVAL);
1206c0341432SJohn Baldwin 	}
1207c0341432SJohn Baldwin 
1208c0341432SJohn Baldwin 	return (CRYPTODEV_PROBE_SOFTWARE);
1209c0341432SJohn Baldwin }
1210c0341432SJohn Baldwin 
1211c0341432SJohn Baldwin /*
1212c0341432SJohn Baldwin  * Generate a new software session.
1213c0341432SJohn Baldwin  */
1214c0341432SJohn Baldwin static int
1215c0341432SJohn Baldwin swcr_newsession(device_t dev, crypto_session_t cses,
1216c0341432SJohn Baldwin     const struct crypto_session_params *csp)
1217c0341432SJohn Baldwin {
1218c0341432SJohn Baldwin 	struct swcr_session *ses;
1219c0341432SJohn Baldwin 	struct swcr_encdec *swe;
1220c0341432SJohn Baldwin 	struct swcr_auth *swa;
1221c0341432SJohn Baldwin 	struct comp_algo *cxf;
1222c0341432SJohn Baldwin 	int error;
1223c0341432SJohn Baldwin 
1224c0341432SJohn Baldwin 	ses = crypto_get_driver_session(cses);
1225c0341432SJohn Baldwin 	mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
1226c0341432SJohn Baldwin 
1227c0341432SJohn Baldwin 	error = 0;
1228c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
1229c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
1230c0341432SJohn Baldwin 	switch (csp->csp_mode) {
1231c0341432SJohn Baldwin 	case CSP_MODE_COMPRESS:
1232c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1233c0341432SJohn Baldwin 		case CRYPTO_DEFLATE_COMP:
1234c0341432SJohn Baldwin 			cxf = &comp_algo_deflate;
1235c0341432SJohn Baldwin 			break;
1236c0341432SJohn Baldwin #ifdef INVARIANTS
1237c0341432SJohn Baldwin 		default:
1238c0341432SJohn Baldwin 			panic("bad compression algo");
1239c0341432SJohn Baldwin #endif
1240c0341432SJohn Baldwin 		}
1241c0341432SJohn Baldwin 		ses->swcr_compdec.sw_cxf = cxf;
1242c0341432SJohn Baldwin 		ses->swcr_process = swcr_compdec;
1243c0341432SJohn Baldwin 		break;
1244c0341432SJohn Baldwin 	case CSP_MODE_CIPHER:
1245c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1246c0341432SJohn Baldwin 		case CRYPTO_NULL_CBC:
1247c0341432SJohn Baldwin 			ses->swcr_process = swcr_null;
1248c0341432SJohn Baldwin 			break;
1249c0341432SJohn Baldwin #ifdef INVARIANTS
1250c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1251c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1252c0341432SJohn Baldwin 			panic("bad cipher algo");
1253c0341432SJohn Baldwin #endif
1254c0341432SJohn Baldwin 		default:
1255c0341432SJohn Baldwin 			error = swcr_setup_encdec(ses, csp);
1256c0341432SJohn Baldwin 			if (error == 0)
1257c0341432SJohn Baldwin 				ses->swcr_process = swcr_encdec;
1258c0341432SJohn Baldwin 		}
1259c0341432SJohn Baldwin 		break;
1260c0341432SJohn Baldwin 	case CSP_MODE_DIGEST:
1261c0341432SJohn Baldwin 		error = swcr_setup_auth(ses, csp);
1262c0341432SJohn Baldwin 		break;
1263c0341432SJohn Baldwin 	case CSP_MODE_AEAD:
1264c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1265c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1266c0341432SJohn Baldwin 			error = swcr_setup_gcm(ses, csp);
1267c0341432SJohn Baldwin 			if (error == 0)
1268c0341432SJohn Baldwin 				ses->swcr_process = swcr_gcm;
1269c0341432SJohn Baldwin 			break;
1270c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1271c0341432SJohn Baldwin 			error = swcr_setup_ccm(ses, csp);
1272c0341432SJohn Baldwin 			if (error == 0)
1273c0341432SJohn Baldwin 				ses->swcr_process = swcr_ccm;
1274c0341432SJohn Baldwin 			break;
1275c0341432SJohn Baldwin #ifdef INVARIANTS
1276c0341432SJohn Baldwin 		default:
1277c0341432SJohn Baldwin 			panic("bad aead algo");
1278c0341432SJohn Baldwin #endif
1279c0341432SJohn Baldwin 		}
1280c0341432SJohn Baldwin 		break;
1281c0341432SJohn Baldwin 	case CSP_MODE_ETA:
1282c0341432SJohn Baldwin #ifdef INVARIANTS
1283c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1284c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1285c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1286c0341432SJohn Baldwin 			panic("bad eta cipher algo");
1287c0341432SJohn Baldwin 		}
1288c0341432SJohn Baldwin 		switch (csp->csp_auth_alg) {
1289c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GMAC:
1290c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_CBC_MAC:
1291c0341432SJohn Baldwin 			panic("bad eta auth algo");
1292c0341432SJohn Baldwin 		}
1293c0341432SJohn Baldwin #endif
1294c0341432SJohn Baldwin 
1295c0341432SJohn Baldwin 		error = swcr_setup_auth(ses, csp);
1296c0341432SJohn Baldwin 		if (error)
1297c0341432SJohn Baldwin 			break;
1298c0341432SJohn Baldwin 		if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1299c0341432SJohn Baldwin 			/* Effectively degrade to digest mode. */
1300c0341432SJohn Baldwin 			ses->swcr_process = swcr_authcompute;
1301c0341432SJohn Baldwin 			break;
1302c0341432SJohn Baldwin 		}
1303c0341432SJohn Baldwin 
1304c0341432SJohn Baldwin 		error = swcr_setup_encdec(ses, csp);
1305c0341432SJohn Baldwin 		if (error == 0)
1306c0341432SJohn Baldwin 			ses->swcr_process = swcr_eta;
1307c0341432SJohn Baldwin 		break;
1308c0341432SJohn Baldwin 	default:
1309c0341432SJohn Baldwin 		error = EINVAL;
1310c0341432SJohn Baldwin 	}
1311c0341432SJohn Baldwin 
1312c0341432SJohn Baldwin 	if (error)
1313c0341432SJohn Baldwin 		swcr_freesession(dev, cses);
1314c0341432SJohn Baldwin 	return (error);
1315c0341432SJohn Baldwin }
1316c0341432SJohn Baldwin 
1317c0341432SJohn Baldwin static void
1318c0341432SJohn Baldwin swcr_freesession(device_t dev, crypto_session_t cses)
1319c0341432SJohn Baldwin {
1320c0341432SJohn Baldwin 	struct swcr_session *ses;
1321c0341432SJohn Baldwin 	struct swcr_auth *swa;
1322c0341432SJohn Baldwin 	struct enc_xform *txf;
1323c0341432SJohn Baldwin 	struct auth_hash *axf;
1324c0341432SJohn Baldwin 
1325c0341432SJohn Baldwin 	ses = crypto_get_driver_session(cses);
1326c0341432SJohn Baldwin 
1327c0341432SJohn Baldwin 	mtx_destroy(&ses->swcr_lock);
1328c0341432SJohn Baldwin 
1329c0341432SJohn Baldwin 	txf = ses->swcr_encdec.sw_exf;
1330c0341432SJohn Baldwin 	if (txf != NULL) {
1331c0341432SJohn Baldwin 		if (ses->swcr_encdec.sw_kschedule != NULL)
1332c0341432SJohn Baldwin 			txf->zerokey(&(ses->swcr_encdec.sw_kschedule));
1333c0341432SJohn Baldwin 	}
1334c0341432SJohn Baldwin 
1335c0341432SJohn Baldwin 	axf = ses->swcr_auth.sw_axf;
1336c0341432SJohn Baldwin 	if (axf != NULL) {
1337c0341432SJohn Baldwin 		swa = &ses->swcr_auth;
1338c0341432SJohn Baldwin 		if (swa->sw_ictx != NULL) {
1339c0341432SJohn Baldwin 			explicit_bzero(swa->sw_ictx, axf->ctxsize);
1340c0341432SJohn Baldwin 			free(swa->sw_ictx, M_CRYPTO_DATA);
1341c0341432SJohn Baldwin 		}
1342c0341432SJohn Baldwin 		if (swa->sw_octx != NULL) {
1343*3a0b6a93SJohn Baldwin 			explicit_bzero(swa->sw_octx, axf->ctxsize);
1344c0341432SJohn Baldwin 			free(swa->sw_octx, M_CRYPTO_DATA);
1345c0341432SJohn Baldwin 		}
1346091d81d1SSam Leffler 	}
1347091d81d1SSam Leffler }
1348091d81d1SSam Leffler 
1349091d81d1SSam Leffler /*
1350091d81d1SSam Leffler  * Process a software request.
1351091d81d1SSam Leffler  */
1352091d81d1SSam Leffler static int
13536810ad6fSSam Leffler swcr_process(device_t dev, struct cryptop *crp, int hint)
1354091d81d1SSam Leffler {
1355c0341432SJohn Baldwin 	struct swcr_session *ses;
1356091d81d1SSam Leffler 
13571b0909d5SConrad Meyer 	ses = crypto_get_driver_session(crp->crp_session);
1358a7fcb1afSSean Eric Fagan 	mtx_lock(&ses->swcr_lock);
1359091d81d1SSam Leffler 
1360c0341432SJohn Baldwin 	crp->crp_etype = ses->swcr_process(ses, crp);
1361091d81d1SSam Leffler 
1362a7fcb1afSSean Eric Fagan 	mtx_unlock(&ses->swcr_lock);
1363091d81d1SSam Leffler 	crypto_done(crp);
1364c0341432SJohn Baldwin 	return (0);
1365091d81d1SSam Leffler }
1366091d81d1SSam Leffler 
1367091d81d1SSam Leffler static void
13683f147ab2SWarner Losh swcr_identify(driver_t *drv, device_t parent)
1369091d81d1SSam Leffler {
13706810ad6fSSam Leffler 	/* NB: order 10 is so we get attached after h/w devices */
13716810ad6fSSam Leffler 	if (device_find_child(parent, "cryptosoft", -1) == NULL &&
137286c585d9SMarius Strobl 	    BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
13736810ad6fSSam Leffler 		panic("cryptosoft: could not attach");
13746810ad6fSSam Leffler }
1375f6c4bc3bSPawel Jakub Dawidek 
13766810ad6fSSam Leffler static int
13776810ad6fSSam Leffler swcr_probe(device_t dev)
13786810ad6fSSam Leffler {
13796810ad6fSSam Leffler 	device_set_desc(dev, "software crypto");
138086c585d9SMarius Strobl 	return (BUS_PROBE_NOWILDCARD);
13816810ad6fSSam Leffler }
1382f6c4bc3bSPawel Jakub Dawidek 
13836810ad6fSSam Leffler static int
13846810ad6fSSam Leffler swcr_attach(device_t dev)
13856810ad6fSSam Leffler {
13866810ad6fSSam Leffler 
13879ebbebe4SConrad Meyer 	swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
13886810ad6fSSam Leffler 			CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
13896810ad6fSSam Leffler 	if (swcr_id < 0) {
13906810ad6fSSam Leffler 		device_printf(dev, "cannot initialize!");
1391c0341432SJohn Baldwin 		return (ENXIO);
13926810ad6fSSam Leffler 	}
13936810ad6fSSam Leffler 
1394c0341432SJohn Baldwin 	return (0);
1395091d81d1SSam Leffler }
13964b465da2SPawel Jakub Dawidek 
13973f147ab2SWarner Losh static int
13986810ad6fSSam Leffler swcr_detach(device_t dev)
13994b465da2SPawel Jakub Dawidek {
14006810ad6fSSam Leffler 	crypto_unregister_all(swcr_id);
14013f147ab2SWarner Losh 	return 0;
14024b465da2SPawel Jakub Dawidek }
14036810ad6fSSam Leffler 
14046810ad6fSSam Leffler static device_method_t swcr_methods[] = {
14056810ad6fSSam Leffler 	DEVMETHOD(device_identify,	swcr_identify),
14066810ad6fSSam Leffler 	DEVMETHOD(device_probe,		swcr_probe),
14076810ad6fSSam Leffler 	DEVMETHOD(device_attach,	swcr_attach),
14086810ad6fSSam Leffler 	DEVMETHOD(device_detach,	swcr_detach),
14096810ad6fSSam Leffler 
1410c0341432SJohn Baldwin 	DEVMETHOD(cryptodev_probesession, swcr_probesession),
14116810ad6fSSam Leffler 	DEVMETHOD(cryptodev_newsession,	swcr_newsession),
14126810ad6fSSam Leffler 	DEVMETHOD(cryptodev_freesession,swcr_freesession),
14136810ad6fSSam Leffler 	DEVMETHOD(cryptodev_process,	swcr_process),
14146810ad6fSSam Leffler 
14156810ad6fSSam Leffler 	{0, 0},
14166810ad6fSSam Leffler };
14176810ad6fSSam Leffler 
14186810ad6fSSam Leffler static driver_t swcr_driver = {
14196810ad6fSSam Leffler 	"cryptosoft",
14206810ad6fSSam Leffler 	swcr_methods,
14216810ad6fSSam Leffler 	0,		/* NB: no softc */
14226810ad6fSSam Leffler };
14236810ad6fSSam Leffler static devclass_t swcr_devclass;
14246810ad6fSSam Leffler 
14256810ad6fSSam Leffler /*
14266810ad6fSSam Leffler  * NB: We explicitly reference the crypto module so we
14276810ad6fSSam Leffler  * get the necessary ordering when built as a loadable
14286810ad6fSSam Leffler  * module.  This is required because we bundle the crypto
14296810ad6fSSam Leffler  * module code together with the cryptosoft driver (otherwise
14306810ad6fSSam Leffler  * normal module dependencies would handle things).
14316810ad6fSSam Leffler  */
14326810ad6fSSam Leffler extern int crypto_modevent(struct module *, int, void *);
14336810ad6fSSam Leffler /* XXX where to attach */
14346810ad6fSSam Leffler DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
14356810ad6fSSam Leffler MODULE_VERSION(cryptosoft, 1);
14366810ad6fSSam Leffler MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);
1437