xref: /freebsd/sys/opencrypto/cryptosoft.c (revision 3e9470482a1357eef90d007b27ec5d9725ae1111)
1091d81d1SSam Leffler /*	$OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $	*/
2091d81d1SSam Leffler 
360727d8bSWarner Losh /*-
4091d81d1SSam Leffler  * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
56810ad6fSSam Leffler  * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
6091d81d1SSam Leffler  *
7091d81d1SSam Leffler  * This code was written by Angelos D. Keromytis in Athens, Greece, in
8091d81d1SSam Leffler  * February 2000. Network Security Technologies Inc. (NSTI) kindly
9091d81d1SSam Leffler  * supported the development of this code.
10091d81d1SSam Leffler  *
11091d81d1SSam Leffler  * Copyright (c) 2000, 2001 Angelos D. Keromytis
1208fca7a5SJohn-Mark Gurney  * Copyright (c) 2014 The FreeBSD Foundation
1308fca7a5SJohn-Mark Gurney  * All rights reserved.
1408fca7a5SJohn-Mark Gurney  *
1508fca7a5SJohn-Mark Gurney  * Portions of this software were developed by John-Mark Gurney
1608fca7a5SJohn-Mark Gurney  * under sponsorship of the FreeBSD Foundation and
1708fca7a5SJohn-Mark Gurney  * Rubicon Communications, LLC (Netgate).
18091d81d1SSam Leffler  *
19091d81d1SSam Leffler  * Permission to use, copy, and modify this software with or without fee
20091d81d1SSam Leffler  * is hereby granted, provided that this entire notice is included in
21091d81d1SSam Leffler  * all source code copies of any software which is or includes a copy or
22091d81d1SSam Leffler  * modification of this software.
23091d81d1SSam Leffler  *
24091d81d1SSam Leffler  * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25091d81d1SSam Leffler  * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26091d81d1SSam Leffler  * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27091d81d1SSam Leffler  * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
28091d81d1SSam Leffler  * PURPOSE.
29091d81d1SSam Leffler  */
30091d81d1SSam Leffler 
312c446514SDavid E. O'Brien #include <sys/cdefs.h>
322c446514SDavid E. O'Brien __FBSDID("$FreeBSD$");
332c446514SDavid E. O'Brien 
34091d81d1SSam Leffler #include <sys/param.h>
35091d81d1SSam Leffler #include <sys/systm.h>
36091d81d1SSam Leffler #include <sys/malloc.h>
37091d81d1SSam Leffler #include <sys/mbuf.h>
386810ad6fSSam Leffler #include <sys/module.h>
39091d81d1SSam Leffler #include <sys/sysctl.h>
40091d81d1SSam Leffler #include <sys/errno.h>
41091d81d1SSam Leffler #include <sys/random.h>
42091d81d1SSam Leffler #include <sys/kernel.h>
43091d81d1SSam Leffler #include <sys/uio.h>
44109919c6SBenno Rice #include <sys/lock.h>
45109919c6SBenno Rice #include <sys/rwlock.h>
4608fca7a5SJohn-Mark Gurney #include <sys/endian.h>
4708fca7a5SJohn-Mark Gurney #include <sys/limits.h>
48a7fcb1afSSean Eric Fagan #include <sys/mutex.h>
49091d81d1SSam Leffler 
50091d81d1SSam Leffler #include <crypto/sha1.h>
51091d81d1SSam Leffler #include <opencrypto/rmd160.h>
52091d81d1SSam Leffler 
53091d81d1SSam Leffler #include <opencrypto/cryptodev.h>
54091d81d1SSam Leffler #include <opencrypto/xform.h>
55091d81d1SSam Leffler 
566810ad6fSSam Leffler #include <sys/kobj.h>
576810ad6fSSam Leffler #include <sys/bus.h>
586810ad6fSSam Leffler #include "cryptodev_if.h"
59091d81d1SSam Leffler 
60c0341432SJohn Baldwin struct swcr_auth {
61c0341432SJohn Baldwin 	void		*sw_ictx;
62c0341432SJohn Baldwin 	void		*sw_octx;
63c0341432SJohn Baldwin 	struct auth_hash *sw_axf;
64c0341432SJohn Baldwin 	uint16_t	sw_mlen;
65c0341432SJohn Baldwin };
66c0341432SJohn Baldwin 
67c0341432SJohn Baldwin struct swcr_encdec {
68*3e947048SJohn Baldwin 	void		*sw_kschedule;
69c0341432SJohn Baldwin 	struct enc_xform *sw_exf;
70c0341432SJohn Baldwin };
71c0341432SJohn Baldwin 
72c0341432SJohn Baldwin struct swcr_compdec {
73c0341432SJohn Baldwin 	struct comp_algo *sw_cxf;
74c0341432SJohn Baldwin };
75c0341432SJohn Baldwin 
76c0341432SJohn Baldwin struct swcr_session {
77c0341432SJohn Baldwin 	struct mtx	swcr_lock;
78c0341432SJohn Baldwin 	int	(*swcr_process)(struct swcr_session *, struct cryptop *);
79c0341432SJohn Baldwin 
80c0341432SJohn Baldwin 	struct swcr_auth swcr_auth;
81c0341432SJohn Baldwin 	struct swcr_encdec swcr_encdec;
82c0341432SJohn Baldwin 	struct swcr_compdec swcr_compdec;
83c0341432SJohn Baldwin };
84507281e5SSean Eric Fagan 
856810ad6fSSam Leffler static	int32_t swcr_id;
866810ad6fSSam Leffler 
871b0909d5SConrad Meyer static	void swcr_freesession(device_t dev, crypto_session_t cses);
88091d81d1SSam Leffler 
89c0341432SJohn Baldwin /* Used for CRYPTO_NULL_CBC. */
90c0341432SJohn Baldwin static int
91c0341432SJohn Baldwin swcr_null(struct swcr_session *ses, struct cryptop *crp)
92c0341432SJohn Baldwin {
93c0341432SJohn Baldwin 
94c0341432SJohn Baldwin 	return (0);
95c0341432SJohn Baldwin }
96c0341432SJohn Baldwin 
97091d81d1SSam Leffler /*
98091d81d1SSam Leffler  * Apply a symmetric encryption/decryption algorithm.
99091d81d1SSam Leffler  */
100091d81d1SSam Leffler static int
101c0341432SJohn Baldwin swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
102091d81d1SSam Leffler {
1035d7ae54aSConrad Meyer 	unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
10408fca7a5SJohn-Mark Gurney 	unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
105c0341432SJohn Baldwin 	const struct crypto_session_params *csp;
106c0341432SJohn Baldwin 	struct swcr_encdec *sw;
107091d81d1SSam Leffler 	struct enc_xform *exf;
10808fca7a5SJohn-Mark Gurney 	int i, j, k, blks, ind, count, ivlen;
10908fca7a5SJohn-Mark Gurney 	struct uio *uio, uiolcl;
11008fca7a5SJohn-Mark Gurney 	struct iovec iovlcl[4];
11108fca7a5SJohn-Mark Gurney 	struct iovec *iov;
11208fca7a5SJohn-Mark Gurney 	int iovcnt, iovalloc;
11308fca7a5SJohn-Mark Gurney 	int error;
114c0341432SJohn Baldwin 	bool encrypting;
11508fca7a5SJohn-Mark Gurney 
11608fca7a5SJohn-Mark Gurney 	error = 0;
117091d81d1SSam Leffler 
118c0341432SJohn Baldwin 	sw = &ses->swcr_encdec;
119091d81d1SSam Leffler 	exf = sw->sw_exf;
120091d81d1SSam Leffler 	blks = exf->blocksize;
12108fca7a5SJohn-Mark Gurney 	ivlen = exf->ivsize;
122091d81d1SSam Leffler 
123091d81d1SSam Leffler 	/* Check for non-padded data */
124c0341432SJohn Baldwin 	if ((crp->crp_payload_length % blks) != 0)
125091d81d1SSam Leffler 		return EINVAL;
126091d81d1SSam Leffler 
127c0341432SJohn Baldwin 	if (exf == &enc_xform_aes_icm &&
128c0341432SJohn Baldwin 	    (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
12908fca7a5SJohn-Mark Gurney 		return (EINVAL);
13008fca7a5SJohn-Mark Gurney 
13129fe41ddSJohn Baldwin 	crypto_read_iv(crp, iv);
132091d81d1SSam Leffler 
133c0341432SJohn Baldwin 	if (crp->crp_cipher_key != NULL) {
134c0341432SJohn Baldwin 		csp = crypto_get_params(crp->crp_session);
135*3e947048SJohn Baldwin 		error = exf->setkey(sw->sw_kschedule,
136c0341432SJohn Baldwin 		    crp->crp_cipher_key, csp->csp_cipher_klen);
137c740ae4bSPoul-Henning Kamp 		if (error)
138c740ae4bSPoul-Henning Kamp 			return (error);
139c740ae4bSPoul-Henning Kamp 	}
140d295bdeeSPawel Jakub Dawidek 
14108fca7a5SJohn-Mark Gurney 	iov = iovlcl;
14208fca7a5SJohn-Mark Gurney 	iovcnt = nitems(iovlcl);
14308fca7a5SJohn-Mark Gurney 	iovalloc = 0;
14408fca7a5SJohn-Mark Gurney 	uio = &uiolcl;
145c0341432SJohn Baldwin 	switch (crp->crp_buf_type) {
146c0341432SJohn Baldwin 	case CRYPTO_BUF_MBUF:
147c0341432SJohn Baldwin 		error = crypto_mbuftoiov(crp->crp_mbuf, &iov, &iovcnt,
14808fca7a5SJohn-Mark Gurney 		    &iovalloc);
149748a12e2SJohn-Mark Gurney 		if (error)
150748a12e2SJohn-Mark Gurney 			return (error);
15108fca7a5SJohn-Mark Gurney 		uio->uio_iov = iov;
15208fca7a5SJohn-Mark Gurney 		uio->uio_iovcnt = iovcnt;
153c0341432SJohn Baldwin 		break;
154c0341432SJohn Baldwin 	case CRYPTO_BUF_UIO:
155c0341432SJohn Baldwin 		uio = crp->crp_uio;
156c0341432SJohn Baldwin 		break;
157c0341432SJohn Baldwin 	case CRYPTO_BUF_CONTIG:
158c0341432SJohn Baldwin 		iov[0].iov_base = crp->crp_buf;
159c0341432SJohn Baldwin 		iov[0].iov_len = crp->crp_ilen;
16008fca7a5SJohn-Mark Gurney 		uio->uio_iov = iov;
16108fca7a5SJohn-Mark Gurney 		uio->uio_iovcnt = 1;
162c0341432SJohn Baldwin 		break;
16308fca7a5SJohn-Mark Gurney 	}
16408fca7a5SJohn-Mark Gurney 
165091d81d1SSam Leffler 	ivp = iv;
166091d81d1SSam Leffler 
16708fca7a5SJohn-Mark Gurney 	if (exf->reinit) {
168d295bdeeSPawel Jakub Dawidek 		/*
169d295bdeeSPawel Jakub Dawidek 		 * xforms that provide a reinit method perform all IV
170d295bdeeSPawel Jakub Dawidek 		 * handling themselves.
171d295bdeeSPawel Jakub Dawidek 		 */
172d295bdeeSPawel Jakub Dawidek 		exf->reinit(sw->sw_kschedule, iv);
173091d81d1SSam Leffler 	}
174091d81d1SSam Leffler 
175c0341432SJohn Baldwin 	count = crp->crp_payload_start;
17608fca7a5SJohn-Mark Gurney 	ind = cuio_getptr(uio, count, &k);
17708fca7a5SJohn-Mark Gurney 	if (ind == -1) {
17808fca7a5SJohn-Mark Gurney 		error = EINVAL;
17908fca7a5SJohn-Mark Gurney 		goto out;
180091d81d1SSam Leffler 	}
181091d81d1SSam Leffler 
182c0341432SJohn Baldwin 	i = crp->crp_payload_length;
183c0341432SJohn Baldwin 	encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
184091d81d1SSam Leffler 
185091d81d1SSam Leffler 	while (i > 0) {
186091d81d1SSam Leffler 		/*
187091d81d1SSam Leffler 		 * If there's insufficient data at the end of
188091d81d1SSam Leffler 		 * an iovec, we have to do some copying.
189091d81d1SSam Leffler 		 */
19008fca7a5SJohn-Mark Gurney 		if (uio->uio_iov[ind].iov_len < k + blks &&
19108fca7a5SJohn-Mark Gurney 		    uio->uio_iov[ind].iov_len != k) {
19208fca7a5SJohn-Mark Gurney 			cuio_copydata(uio, count, blks, blk);
193091d81d1SSam Leffler 
194091d81d1SSam Leffler 			/* Actual encryption/decryption */
195d295bdeeSPawel Jakub Dawidek 			if (exf->reinit) {
196c0341432SJohn Baldwin 				if (encrypting) {
197*3e947048SJohn Baldwin 					exf->encrypt(sw->sw_kschedule, blk,
198d295bdeeSPawel Jakub Dawidek 					    blk);
199d295bdeeSPawel Jakub Dawidek 				} else {
200*3e947048SJohn Baldwin 					exf->decrypt(sw->sw_kschedule, blk,
201d295bdeeSPawel Jakub Dawidek 					    blk);
202d295bdeeSPawel Jakub Dawidek 				}
203c0341432SJohn Baldwin 			} else if (encrypting) {
204091d81d1SSam Leffler 				/* XOR with previous block */
205091d81d1SSam Leffler 				for (j = 0; j < blks; j++)
206091d81d1SSam Leffler 					blk[j] ^= ivp[j];
207091d81d1SSam Leffler 
208*3e947048SJohn Baldwin 				exf->encrypt(sw->sw_kschedule, blk, blk);
209091d81d1SSam Leffler 
210091d81d1SSam Leffler 				/*
211091d81d1SSam Leffler 				 * Keep encrypted block for XOR'ing
212091d81d1SSam Leffler 				 * with next block
213091d81d1SSam Leffler 				 */
214091d81d1SSam Leffler 				bcopy(blk, iv, blks);
215091d81d1SSam Leffler 				ivp = iv;
216091d81d1SSam Leffler 			} else {	/* decrypt */
217091d81d1SSam Leffler 				/*
218091d81d1SSam Leffler 				 * Keep encrypted block for XOR'ing
219091d81d1SSam Leffler 				 * with next block
220091d81d1SSam Leffler 				 */
22108fca7a5SJohn-Mark Gurney 				nivp = (ivp == iv) ? iv2 : iv;
22208fca7a5SJohn-Mark Gurney 				bcopy(blk, nivp, blks);
223091d81d1SSam Leffler 
224*3e947048SJohn Baldwin 				exf->decrypt(sw->sw_kschedule, blk, blk);
225091d81d1SSam Leffler 
226091d81d1SSam Leffler 				/* XOR with previous block */
227091d81d1SSam Leffler 				for (j = 0; j < blks; j++)
228091d81d1SSam Leffler 					blk[j] ^= ivp[j];
229091d81d1SSam Leffler 
23008fca7a5SJohn-Mark Gurney 				ivp = nivp;
231091d81d1SSam Leffler 			}
232091d81d1SSam Leffler 
233091d81d1SSam Leffler 			/* Copy back decrypted block */
23408fca7a5SJohn-Mark Gurney 			cuio_copyback(uio, count, blks, blk);
23508fca7a5SJohn-Mark Gurney 
23608fca7a5SJohn-Mark Gurney 			count += blks;
237091d81d1SSam Leffler 
238091d81d1SSam Leffler 			/* Advance pointer */
23908fca7a5SJohn-Mark Gurney 			ind = cuio_getptr(uio, count, &k);
24008fca7a5SJohn-Mark Gurney 			if (ind == -1) {
24108fca7a5SJohn-Mark Gurney 				error = EINVAL;
24208fca7a5SJohn-Mark Gurney 				goto out;
24308fca7a5SJohn-Mark Gurney 			}
244091d81d1SSam Leffler 
245091d81d1SSam Leffler 			i -= blks;
246091d81d1SSam Leffler 
247091d81d1SSam Leffler 			/* Could be done... */
248091d81d1SSam Leffler 			if (i == 0)
249091d81d1SSam Leffler 				break;
250091d81d1SSam Leffler 		}
251091d81d1SSam Leffler 
2522f1f9cceSConrad Meyer 		while (uio->uio_iov[ind].iov_len >= k + blks && i > 0) {
2535d7ae54aSConrad Meyer 			uint8_t *idat;
2542f1f9cceSConrad Meyer 			size_t nb, rem;
2552f1f9cceSConrad Meyer 
2562f1f9cceSConrad Meyer 			nb = blks;
257179b21e8SConrad Meyer 			rem = MIN((size_t)i,
258179b21e8SConrad Meyer 			    uio->uio_iov[ind].iov_len - (size_t)k);
2595d7ae54aSConrad Meyer 			idat = (uint8_t *)uio->uio_iov[ind].iov_base + k;
260091d81d1SSam Leffler 
261d295bdeeSPawel Jakub Dawidek 			if (exf->reinit) {
262c0341432SJohn Baldwin 				if (encrypting && exf->encrypt_multi == NULL)
263d295bdeeSPawel Jakub Dawidek 					exf->encrypt(sw->sw_kschedule,
264*3e947048SJohn Baldwin 					    idat, idat);
265c0341432SJohn Baldwin 				else if (encrypting) {
2662f1f9cceSConrad Meyer 					nb = rounddown(rem, blks);
2672f1f9cceSConrad Meyer 					exf->encrypt_multi(sw->sw_kschedule,
268*3e947048SJohn Baldwin 					    idat, idat, nb);
2692f1f9cceSConrad Meyer 				} else if (exf->decrypt_multi == NULL)
270d295bdeeSPawel Jakub Dawidek 					exf->decrypt(sw->sw_kschedule,
271*3e947048SJohn Baldwin 					    idat, idat);
2722f1f9cceSConrad Meyer 				else {
2732f1f9cceSConrad Meyer 					nb = rounddown(rem, blks);
2742f1f9cceSConrad Meyer 					exf->decrypt_multi(sw->sw_kschedule,
275*3e947048SJohn Baldwin 					    idat, idat, nb);
276d295bdeeSPawel Jakub Dawidek 				}
277c0341432SJohn Baldwin 			} else if (encrypting) {
278091d81d1SSam Leffler 				/* XOR with previous block/IV */
279091d81d1SSam Leffler 				for (j = 0; j < blks; j++)
280091d81d1SSam Leffler 					idat[j] ^= ivp[j];
281091d81d1SSam Leffler 
282*3e947048SJohn Baldwin 				exf->encrypt(sw->sw_kschedule, idat, idat);
283091d81d1SSam Leffler 				ivp = idat;
284091d81d1SSam Leffler 			} else {	/* decrypt */
285091d81d1SSam Leffler 				/*
286091d81d1SSam Leffler 				 * Keep encrypted block to be used
287091d81d1SSam Leffler 				 * in next block's processing.
288091d81d1SSam Leffler 				 */
28908fca7a5SJohn-Mark Gurney 				nivp = (ivp == iv) ? iv2 : iv;
29008fca7a5SJohn-Mark Gurney 				bcopy(idat, nivp, blks);
291091d81d1SSam Leffler 
292*3e947048SJohn Baldwin 				exf->decrypt(sw->sw_kschedule, idat, idat);
293091d81d1SSam Leffler 
294091d81d1SSam Leffler 				/* XOR with previous block/IV */
295091d81d1SSam Leffler 				for (j = 0; j < blks; j++)
296091d81d1SSam Leffler 					idat[j] ^= ivp[j];
297091d81d1SSam Leffler 
29808fca7a5SJohn-Mark Gurney 				ivp = nivp;
299091d81d1SSam Leffler 			}
300091d81d1SSam Leffler 
3012f1f9cceSConrad Meyer 			count += nb;
3022f1f9cceSConrad Meyer 			k += nb;
3032f1f9cceSConrad Meyer 			i -= nb;
304091d81d1SSam Leffler 		}
305091d81d1SSam Leffler 
306f34a967bSPawel Jakub Dawidek 		/*
30708fca7a5SJohn-Mark Gurney 		 * Advance to the next iov if the end of the current iov
30808fca7a5SJohn-Mark Gurney 		 * is aligned with the end of a cipher block.
30908fca7a5SJohn-Mark Gurney 		 * Note that the code is equivalent to calling:
31008fca7a5SJohn-Mark Gurney 		 *      ind = cuio_getptr(uio, count, &k);
311f34a967bSPawel Jakub Dawidek 		 */
31208fca7a5SJohn-Mark Gurney 		if (i > 0 && k == uio->uio_iov[ind].iov_len) {
31308fca7a5SJohn-Mark Gurney 			k = 0;
31408fca7a5SJohn-Mark Gurney 			ind++;
31508fca7a5SJohn-Mark Gurney 			if (ind >= uio->uio_iovcnt) {
31608fca7a5SJohn-Mark Gurney 				error = EINVAL;
31708fca7a5SJohn-Mark Gurney 				goto out;
31808fca7a5SJohn-Mark Gurney 			}
319f34a967bSPawel Jakub Dawidek 		}
320f34a967bSPawel Jakub Dawidek 	}
321f34a967bSPawel Jakub Dawidek 
32208fca7a5SJohn-Mark Gurney out:
32308fca7a5SJohn-Mark Gurney 	if (iovalloc)
32408fca7a5SJohn-Mark Gurney 		free(iov, M_CRYPTO_DATA);
325091d81d1SSam Leffler 
32608fca7a5SJohn-Mark Gurney 	return (error);
327091d81d1SSam Leffler }
328091d81d1SSam Leffler 
329c0341432SJohn Baldwin static void
330c0341432SJohn Baldwin swcr_authprepare(struct auth_hash *axf, struct swcr_auth *sw,
331c0341432SJohn Baldwin     const uint8_t *key, int klen)
332f6c4bc3bSPawel Jakub Dawidek {
333f6c4bc3bSPawel Jakub Dawidek 
334f6c4bc3bSPawel Jakub Dawidek 	switch (axf->type) {
335f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA1_HMAC:
336c97f39ceSConrad Meyer 	case CRYPTO_SHA2_224_HMAC:
337f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_256_HMAC:
338f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_384_HMAC:
339f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_512_HMAC:
340f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_NULL_HMAC:
341f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_RIPEMD160_HMAC:
342c0341432SJohn Baldwin 		hmac_init_ipad(axf, key, klen, sw->sw_ictx);
343c0341432SJohn Baldwin 		hmac_init_opad(axf, key, klen, sw->sw_octx);
344f6c4bc3bSPawel Jakub Dawidek 		break;
34525b7033bSConrad Meyer 	case CRYPTO_POLY1305:
3460e33efe4SConrad Meyer 	case CRYPTO_BLAKE2B:
3470e33efe4SConrad Meyer 	case CRYPTO_BLAKE2S:
3480e33efe4SConrad Meyer 		axf->Setkey(sw->sw_ictx, key, klen);
3490e33efe4SConrad Meyer 		axf->Init(sw->sw_ictx);
3500e33efe4SConrad Meyer 		break;
351f6c4bc3bSPawel Jakub Dawidek 	default:
352c0341432SJohn Baldwin 		panic("%s: algorithm %d doesn't use keys", __func__, axf->type);
353f6c4bc3bSPawel Jakub Dawidek 	}
354f6c4bc3bSPawel Jakub Dawidek }
355f6c4bc3bSPawel Jakub Dawidek 
356091d81d1SSam Leffler /*
357c0341432SJohn Baldwin  * Compute or verify hash.
358091d81d1SSam Leffler  */
359091d81d1SSam Leffler static int
360c0341432SJohn Baldwin swcr_authcompute(struct swcr_session *ses, struct cryptop *crp)
361091d81d1SSam Leffler {
362c0341432SJohn Baldwin 	u_char aalg[HASH_MAX_LEN];
363c0341432SJohn Baldwin 	u_char uaalg[HASH_MAX_LEN];
364c0341432SJohn Baldwin 	const struct crypto_session_params *csp;
365c0341432SJohn Baldwin 	struct swcr_auth *sw;
366091d81d1SSam Leffler 	struct auth_hash *axf;
367091d81d1SSam Leffler 	union authctx ctx;
368091d81d1SSam Leffler 	int err;
369091d81d1SSam Leffler 
370c0341432SJohn Baldwin 	sw = &ses->swcr_auth;
371091d81d1SSam Leffler 
372091d81d1SSam Leffler 	axf = sw->sw_axf;
373091d81d1SSam Leffler 
374c0341432SJohn Baldwin 	if (crp->crp_auth_key != NULL) {
375c0341432SJohn Baldwin 		csp = crypto_get_params(crp->crp_session);
376c0341432SJohn Baldwin 		swcr_authprepare(axf, sw, crp->crp_auth_key,
377c0341432SJohn Baldwin 		    csp->csp_auth_klen);
37825b7033bSConrad Meyer 	}
379f6c4bc3bSPawel Jakub Dawidek 
380091d81d1SSam Leffler 	bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
381091d81d1SSam Leffler 
382c0341432SJohn Baldwin 	err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
383c0341432SJohn Baldwin 	    (int (*)(void *, void *, unsigned int))axf->Update, &ctx);
384091d81d1SSam Leffler 	if (err)
385091d81d1SSam Leffler 		return err;
386091d81d1SSam Leffler 
387c0341432SJohn Baldwin 	err = crypto_apply(crp, crp->crp_payload_start, crp->crp_payload_length,
388c0341432SJohn Baldwin 	    (int (*)(void *, void *, unsigned int))axf->Update, &ctx);
389c0341432SJohn Baldwin 	if (err)
390c0341432SJohn Baldwin 		return err;
391c0341432SJohn Baldwin 
392c0341432SJohn Baldwin 	switch (axf->type) {
393c4729f6eSConrad Meyer 	case CRYPTO_SHA1:
394c4729f6eSConrad Meyer 	case CRYPTO_SHA2_224:
395c4729f6eSConrad Meyer 	case CRYPTO_SHA2_256:
396c4729f6eSConrad Meyer 	case CRYPTO_SHA2_384:
397c4729f6eSConrad Meyer 	case CRYPTO_SHA2_512:
398c4729f6eSConrad Meyer 		axf->Final(aalg, &ctx);
399c4729f6eSConrad Meyer 		break;
400c4729f6eSConrad Meyer 
401091d81d1SSam Leffler 	case CRYPTO_SHA1_HMAC:
402c97f39ceSConrad Meyer 	case CRYPTO_SHA2_224_HMAC:
403f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_256_HMAC:
404f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_384_HMAC:
405f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_512_HMAC:
406091d81d1SSam Leffler 	case CRYPTO_RIPEMD160_HMAC:
407091d81d1SSam Leffler 		if (sw->sw_octx == NULL)
408091d81d1SSam Leffler 			return EINVAL;
409091d81d1SSam Leffler 
410091d81d1SSam Leffler 		axf->Final(aalg, &ctx);
411091d81d1SSam Leffler 		bcopy(sw->sw_octx, &ctx, axf->ctxsize);
412091d81d1SSam Leffler 		axf->Update(&ctx, aalg, axf->hashsize);
413091d81d1SSam Leffler 		axf->Final(aalg, &ctx);
414091d81d1SSam Leffler 		break;
415091d81d1SSam Leffler 
4160e33efe4SConrad Meyer 	case CRYPTO_BLAKE2B:
4170e33efe4SConrad Meyer 	case CRYPTO_BLAKE2S:
418091d81d1SSam Leffler 	case CRYPTO_NULL_HMAC:
41925b7033bSConrad Meyer 	case CRYPTO_POLY1305:
420091d81d1SSam Leffler 		axf->Final(aalg, &ctx);
421091d81d1SSam Leffler 		break;
422091d81d1SSam Leffler 	}
423091d81d1SSam Leffler 
424c0341432SJohn Baldwin 	if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
425c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, uaalg);
426c0341432SJohn Baldwin 		if (timingsafe_bcmp(aalg, uaalg, sw->sw_mlen) != 0)
427c0341432SJohn Baldwin 			return (EBADMSG);
428c0341432SJohn Baldwin 	} else {
429091d81d1SSam Leffler 		/* Inject the authentication data */
430c0341432SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, aalg);
431c0341432SJohn Baldwin 	}
432c0341432SJohn Baldwin 	return (0);
433091d81d1SSam Leffler }
434091d81d1SSam Leffler 
43508fca7a5SJohn-Mark Gurney CTASSERT(INT_MAX <= (1ll<<39) - 256);	/* GCM: plain text < 2^39-256 */
43608fca7a5SJohn-Mark Gurney CTASSERT(INT_MAX <= (uint64_t)-1);	/* GCM: associated data <= 2^64-1 */
43708fca7a5SJohn-Mark Gurney 
43808fca7a5SJohn-Mark Gurney static int
439c0341432SJohn Baldwin swcr_gmac(struct swcr_session *ses, struct cryptop *crp)
44008fca7a5SJohn-Mark Gurney {
44108fca7a5SJohn-Mark Gurney 	uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
44208fca7a5SJohn-Mark Gurney 	u_char *blk = (u_char *)blkbuf;
44308fca7a5SJohn-Mark Gurney 	u_char aalg[AALG_MAX_RESULT_LEN];
44408fca7a5SJohn-Mark Gurney 	u_char uaalg[AALG_MAX_RESULT_LEN];
44508fca7a5SJohn-Mark Gurney 	u_char iv[EALG_MAX_BLOCK_LEN];
44608fca7a5SJohn-Mark Gurney 	union authctx ctx;
447c0341432SJohn Baldwin 	struct swcr_auth *swa;
448c0341432SJohn Baldwin 	struct auth_hash *axf;
44908fca7a5SJohn-Mark Gurney 	uint32_t *blkp;
450c0341432SJohn Baldwin 	int blksz, i, ivlen, len;
45108fca7a5SJohn-Mark Gurney 
452c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
45308fca7a5SJohn-Mark Gurney 	axf = swa->sw_axf;
454c0341432SJohn Baldwin 
45508fca7a5SJohn-Mark Gurney 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
45608fca7a5SJohn-Mark Gurney 	blksz = axf->blocksize;
45708fca7a5SJohn-Mark Gurney 
45808fca7a5SJohn-Mark Gurney 	/* Initialize the IV */
459c0341432SJohn Baldwin 	ivlen = AES_GCM_IV_LEN;
46029fe41ddSJohn Baldwin 	crypto_read_iv(crp, iv);
46108fca7a5SJohn-Mark Gurney 
46208fca7a5SJohn-Mark Gurney 	axf->Reinit(&ctx, iv, ivlen);
463c0341432SJohn Baldwin 	for (i = 0; i < crp->crp_payload_length; i += blksz) {
464c0341432SJohn Baldwin 		len = MIN(crp->crp_payload_length - i, blksz);
465c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
466c0341432SJohn Baldwin 		bzero(blk + len, blksz - len);
46708fca7a5SJohn-Mark Gurney 		axf->Update(&ctx, blk, blksz);
46808fca7a5SJohn-Mark Gurney 	}
46908fca7a5SJohn-Mark Gurney 
47008fca7a5SJohn-Mark Gurney 	/* length block */
47108fca7a5SJohn-Mark Gurney 	bzero(blk, blksz);
47208fca7a5SJohn-Mark Gurney 	blkp = (uint32_t *)blk + 1;
473c0341432SJohn Baldwin 	*blkp = htobe32(crp->crp_payload_length * 8);
47408fca7a5SJohn-Mark Gurney 	axf->Update(&ctx, blk, blksz);
475c0341432SJohn Baldwin 
476c0341432SJohn Baldwin 	/* Finalize MAC */
477c0341432SJohn Baldwin 	axf->Final(aalg, &ctx);
478c0341432SJohn Baldwin 
479c0341432SJohn Baldwin 	if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
480c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
481c0341432SJohn Baldwin 		    uaalg);
482c0341432SJohn Baldwin 		if (timingsafe_bcmp(aalg, uaalg, swa->sw_mlen) != 0)
483c0341432SJohn Baldwin 			return (EBADMSG);
484c0341432SJohn Baldwin 	} else {
485c0341432SJohn Baldwin 		/* Inject the authentication data */
486c0341432SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, aalg);
487c0341432SJohn Baldwin 	}
488c0341432SJohn Baldwin 	return (0);
489c0341432SJohn Baldwin }
490c0341432SJohn Baldwin 
491c0341432SJohn Baldwin static int
492c0341432SJohn Baldwin swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
493c0341432SJohn Baldwin {
494c0341432SJohn Baldwin 	uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
495c0341432SJohn Baldwin 	u_char *blk = (u_char *)blkbuf;
496c0341432SJohn Baldwin 	u_char aalg[AALG_MAX_RESULT_LEN];
497c0341432SJohn Baldwin 	u_char uaalg[AALG_MAX_RESULT_LEN];
498c0341432SJohn Baldwin 	u_char iv[EALG_MAX_BLOCK_LEN];
499c0341432SJohn Baldwin 	union authctx ctx;
500c0341432SJohn Baldwin 	struct swcr_auth *swa;
501c0341432SJohn Baldwin 	struct swcr_encdec *swe;
502c0341432SJohn Baldwin 	struct auth_hash *axf;
503c0341432SJohn Baldwin 	struct enc_xform *exf;
504c0341432SJohn Baldwin 	uint32_t *blkp;
505c0341432SJohn Baldwin 	int blksz, i, ivlen, len, r;
506c0341432SJohn Baldwin 
507c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
508c0341432SJohn Baldwin 	axf = swa->sw_axf;
509c0341432SJohn Baldwin 
510c0341432SJohn Baldwin 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
511c0341432SJohn Baldwin 	blksz = axf->blocksize;
512c0341432SJohn Baldwin 
513c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
514c0341432SJohn Baldwin 	exf = swe->sw_exf;
515c0341432SJohn Baldwin 
516c0341432SJohn Baldwin 	if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
517c0341432SJohn Baldwin 		return (EINVAL);
518c0341432SJohn Baldwin 
519c0341432SJohn Baldwin 	/* Initialize the IV */
520c0341432SJohn Baldwin 	ivlen = AES_GCM_IV_LEN;
521c0341432SJohn Baldwin 	bcopy(crp->crp_iv, iv, ivlen);
522c0341432SJohn Baldwin 
523c0341432SJohn Baldwin 	/* Supply MAC with IV */
524c0341432SJohn Baldwin 	axf->Reinit(&ctx, iv, ivlen);
525c0341432SJohn Baldwin 
526c0341432SJohn Baldwin 	/* Supply MAC with AAD */
527c0341432SJohn Baldwin 	for (i = 0; i < crp->crp_aad_length; i += blksz) {
528c0341432SJohn Baldwin 		len = MIN(crp->crp_aad_length - i, blksz);
529c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_aad_start + i, len, blk);
530c0341432SJohn Baldwin 		bzero(blk + len, blksz - len);
531c0341432SJohn Baldwin 		axf->Update(&ctx, blk, blksz);
532c0341432SJohn Baldwin 	}
533c0341432SJohn Baldwin 
534c0341432SJohn Baldwin 	exf->reinit(swe->sw_kschedule, iv);
535c0341432SJohn Baldwin 
536c0341432SJohn Baldwin 	/* Do encryption with MAC */
537c0341432SJohn Baldwin 	for (i = 0; i < crp->crp_payload_length; i += len) {
538c0341432SJohn Baldwin 		len = MIN(crp->crp_payload_length - i, blksz);
539c0341432SJohn Baldwin 		if (len < blksz)
540c0341432SJohn Baldwin 			bzero(blk, blksz);
541c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
542c0341432SJohn Baldwin 		if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
543*3e947048SJohn Baldwin 			exf->encrypt(swe->sw_kschedule, blk, blk);
544c0341432SJohn Baldwin 			axf->Update(&ctx, blk, len);
545c0341432SJohn Baldwin 			crypto_copyback(crp, crp->crp_payload_start + i, len,
546c0341432SJohn Baldwin 			    blk);
547c0341432SJohn Baldwin 		} else {
548c0341432SJohn Baldwin 			axf->Update(&ctx, blk, len);
549c0341432SJohn Baldwin 		}
550c0341432SJohn Baldwin 	}
551c0341432SJohn Baldwin 
552c0341432SJohn Baldwin 	/* length block */
553c0341432SJohn Baldwin 	bzero(blk, blksz);
554c0341432SJohn Baldwin 	blkp = (uint32_t *)blk + 1;
555c0341432SJohn Baldwin 	*blkp = htobe32(crp->crp_aad_length * 8);
556c0341432SJohn Baldwin 	blkp = (uint32_t *)blk + 3;
557c0341432SJohn Baldwin 	*blkp = htobe32(crp->crp_payload_length * 8);
558c0341432SJohn Baldwin 	axf->Update(&ctx, blk, blksz);
559c0341432SJohn Baldwin 
560c0341432SJohn Baldwin 	/* Finalize MAC */
561c0341432SJohn Baldwin 	axf->Final(aalg, &ctx);
562c0341432SJohn Baldwin 
563c0341432SJohn Baldwin 	/* Validate tag */
564c0341432SJohn Baldwin 	if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
565c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
566c0341432SJohn Baldwin 		    uaalg);
567c0341432SJohn Baldwin 
568c0341432SJohn Baldwin 		r = timingsafe_bcmp(aalg, uaalg, swa->sw_mlen);
569c0341432SJohn Baldwin 		if (r != 0)
570c0341432SJohn Baldwin 			return (EBADMSG);
571c0341432SJohn Baldwin 
572c0341432SJohn Baldwin 		/* tag matches, decrypt data */
573c0341432SJohn Baldwin 		for (i = 0; i < crp->crp_payload_length; i += blksz) {
574c0341432SJohn Baldwin 			len = MIN(crp->crp_payload_length - i, blksz);
575c0341432SJohn Baldwin 			if (len < blksz)
576c0341432SJohn Baldwin 				bzero(blk, blksz);
577c0341432SJohn Baldwin 			crypto_copydata(crp, crp->crp_payload_start + i, len,
578c0341432SJohn Baldwin 			    blk);
579*3e947048SJohn Baldwin 			exf->decrypt(swe->sw_kschedule, blk, blk);
580c0341432SJohn Baldwin 			crypto_copyback(crp, crp->crp_payload_start + i, len,
581c0341432SJohn Baldwin 			    blk);
582c0341432SJohn Baldwin 		}
583c0341432SJohn Baldwin 	} else {
584c0341432SJohn Baldwin 		/* Inject the authentication data */
585c0341432SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
586c0341432SJohn Baldwin 		    aalg);
587c0341432SJohn Baldwin 	}
588c0341432SJohn Baldwin 
589c0341432SJohn Baldwin 	return (0);
590c0341432SJohn Baldwin }
591c0341432SJohn Baldwin 
592c0341432SJohn Baldwin static int
593c0341432SJohn Baldwin swcr_ccm_cbc_mac(struct swcr_session *ses, struct cryptop *crp)
594c0341432SJohn Baldwin {
595c0341432SJohn Baldwin 	uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
596c0341432SJohn Baldwin 	u_char *blk = (u_char *)blkbuf;
597c0341432SJohn Baldwin 	u_char aalg[AALG_MAX_RESULT_LEN];
598c0341432SJohn Baldwin 	u_char uaalg[AALG_MAX_RESULT_LEN];
599c0341432SJohn Baldwin 	u_char iv[EALG_MAX_BLOCK_LEN];
600c0341432SJohn Baldwin 	union authctx ctx;
601c0341432SJohn Baldwin 	struct swcr_auth *swa;
602c0341432SJohn Baldwin 	struct auth_hash *axf;
603c0341432SJohn Baldwin 	int blksz, i, ivlen, len;
604c0341432SJohn Baldwin 
605c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
606c0341432SJohn Baldwin 	axf = swa->sw_axf;
607c0341432SJohn Baldwin 
608c0341432SJohn Baldwin 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
609c0341432SJohn Baldwin 	blksz = axf->blocksize;
610c0341432SJohn Baldwin 
611c0341432SJohn Baldwin 	/* Initialize the IV */
612c0341432SJohn Baldwin 	ivlen = AES_CCM_IV_LEN;
61329fe41ddSJohn Baldwin 	crypto_read_iv(crp, iv);
614c0341432SJohn Baldwin 
615c0341432SJohn Baldwin 	/*
616c0341432SJohn Baldwin 	 * AES CCM-CBC-MAC needs to know the length of both the auth
617c0341432SJohn Baldwin 	 * data and payload data before doing the auth computation.
618c0341432SJohn Baldwin 	 */
619c0341432SJohn Baldwin 	ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_payload_length;
620c0341432SJohn Baldwin 	ctx.aes_cbc_mac_ctx.cryptDataLength = 0;
621c0341432SJohn Baldwin 
622c0341432SJohn Baldwin 	axf->Reinit(&ctx, iv, ivlen);
623c0341432SJohn Baldwin 	for (i = 0; i < crp->crp_payload_length; i += blksz) {
624c0341432SJohn Baldwin 		len = MIN(crp->crp_payload_length - i, blksz);
625c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
626c0341432SJohn Baldwin 		bzero(blk + len, blksz - len);
627c0341432SJohn Baldwin 		axf->Update(&ctx, blk, blksz);
628c0341432SJohn Baldwin 	}
629c0341432SJohn Baldwin 
630c0341432SJohn Baldwin 	/* Finalize MAC */
631c0341432SJohn Baldwin 	axf->Final(aalg, &ctx);
632c0341432SJohn Baldwin 
633c0341432SJohn Baldwin 	if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
634c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
635c0341432SJohn Baldwin 		    uaalg);
636c0341432SJohn Baldwin 		if (timingsafe_bcmp(aalg, uaalg, swa->sw_mlen) != 0)
637c0341432SJohn Baldwin 			return (EBADMSG);
638c0341432SJohn Baldwin 	} else {
639c0341432SJohn Baldwin 		/* Inject the authentication data */
640c0341432SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, aalg);
641c0341432SJohn Baldwin 	}
642c0341432SJohn Baldwin 	return (0);
643c0341432SJohn Baldwin }
644c0341432SJohn Baldwin 
645c0341432SJohn Baldwin static int
646c0341432SJohn Baldwin swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
647c0341432SJohn Baldwin {
648c0341432SJohn Baldwin 	uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
649c0341432SJohn Baldwin 	u_char *blk = (u_char *)blkbuf;
650c0341432SJohn Baldwin 	u_char aalg[AALG_MAX_RESULT_LEN];
651c0341432SJohn Baldwin 	u_char uaalg[AALG_MAX_RESULT_LEN];
652c0341432SJohn Baldwin 	u_char iv[EALG_MAX_BLOCK_LEN];
653c0341432SJohn Baldwin 	union authctx ctx;
654c0341432SJohn Baldwin 	struct swcr_auth *swa;
655c0341432SJohn Baldwin 	struct swcr_encdec *swe;
656c0341432SJohn Baldwin 	struct auth_hash *axf;
657c0341432SJohn Baldwin 	struct enc_xform *exf;
658c0341432SJohn Baldwin 	int blksz, i, ivlen, len, r;
659c0341432SJohn Baldwin 
660c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
661c0341432SJohn Baldwin 	axf = swa->sw_axf;
662c0341432SJohn Baldwin 
663c0341432SJohn Baldwin 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
664c0341432SJohn Baldwin 	blksz = axf->blocksize;
665c0341432SJohn Baldwin 
666c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
667c0341432SJohn Baldwin 	exf = swe->sw_exf;
668c0341432SJohn Baldwin 
669c0341432SJohn Baldwin 	if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
670c0341432SJohn Baldwin 		return (EINVAL);
671c0341432SJohn Baldwin 
672c0341432SJohn Baldwin 	/* Initialize the IV */
673c0341432SJohn Baldwin 	ivlen = AES_CCM_IV_LEN;
674c0341432SJohn Baldwin 	bcopy(crp->crp_iv, iv, ivlen);
675c0341432SJohn Baldwin 
676c0341432SJohn Baldwin 	/*
677c0341432SJohn Baldwin 	 * AES CCM-CBC-MAC needs to know the length of both the auth
678c0341432SJohn Baldwin 	 * data and payload data before doing the auth computation.
679c0341432SJohn Baldwin 	 */
680c0341432SJohn Baldwin 	ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_aad_length;
681c0341432SJohn Baldwin 	ctx.aes_cbc_mac_ctx.cryptDataLength = crp->crp_payload_length;
682c0341432SJohn Baldwin 
683c0341432SJohn Baldwin 	/* Supply MAC with IV */
684c0341432SJohn Baldwin 	axf->Reinit(&ctx, iv, ivlen);
685c0341432SJohn Baldwin 
686c0341432SJohn Baldwin 	/* Supply MAC with AAD */
687c0341432SJohn Baldwin 	for (i = 0; i < crp->crp_aad_length; i += blksz) {
688c0341432SJohn Baldwin 		len = MIN(crp->crp_aad_length - i, blksz);
689c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_aad_start + i, len, blk);
690c0341432SJohn Baldwin 		bzero(blk + len, blksz - len);
691c0341432SJohn Baldwin 		axf->Update(&ctx, blk, blksz);
692c0341432SJohn Baldwin 	}
693c0341432SJohn Baldwin 
694c0341432SJohn Baldwin 	exf->reinit(swe->sw_kschedule, iv);
695c0341432SJohn Baldwin 
696c0341432SJohn Baldwin 	/* Do encryption/decryption with MAC */
697c0341432SJohn Baldwin 	for (i = 0; i < crp->crp_payload_length; i += len) {
698c0341432SJohn Baldwin 		len = MIN(crp->crp_payload_length - i, blksz);
699c0341432SJohn Baldwin 		if (len < blksz)
700c0341432SJohn Baldwin 			bzero(blk, blksz);
701c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
702c0341432SJohn Baldwin 		if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
703c0341432SJohn Baldwin 			axf->Update(&ctx, blk, len);
704*3e947048SJohn Baldwin 			exf->encrypt(swe->sw_kschedule, blk, blk);
705c0341432SJohn Baldwin 			crypto_copyback(crp, crp->crp_payload_start + i, len,
706c0341432SJohn Baldwin 			    blk);
707c0341432SJohn Baldwin 		} else {
708c0341432SJohn Baldwin 			/*
709c0341432SJohn Baldwin 			 * One of the problems with CCM+CBC is that
710c0341432SJohn Baldwin 			 * the authentication is done on the
711c0341432SJohn Baldwin 			 * unecncrypted data.  As a result, we have to
712c0341432SJohn Baldwin 			 * decrypt the data twice: once to generate
713c0341432SJohn Baldwin 			 * the tag and a second time after the tag is
714c0341432SJohn Baldwin 			 * verified.
715c0341432SJohn Baldwin 			 */
716*3e947048SJohn Baldwin 			exf->decrypt(swe->sw_kschedule, blk, blk);
717c0341432SJohn Baldwin 			axf->Update(&ctx, blk, len);
718c0341432SJohn Baldwin 		}
71908fca7a5SJohn-Mark Gurney 	}
72008fca7a5SJohn-Mark Gurney 
72108fca7a5SJohn-Mark Gurney 	/* Finalize MAC */
72208fca7a5SJohn-Mark Gurney 	axf->Final(aalg, &ctx);
72308fca7a5SJohn-Mark Gurney 
72408fca7a5SJohn-Mark Gurney 	/* Validate tag */
725c0341432SJohn Baldwin 	if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
726c0341432SJohn Baldwin 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
727c0341432SJohn Baldwin 		    uaalg);
72808fca7a5SJohn-Mark Gurney 
729c0341432SJohn Baldwin 		r = timingsafe_bcmp(aalg, uaalg, swa->sw_mlen);
730c0341432SJohn Baldwin 		if (r != 0)
731c0341432SJohn Baldwin 			return (EBADMSG);
732c0341432SJohn Baldwin 
73308fca7a5SJohn-Mark Gurney 		/* tag matches, decrypt data */
734507281e5SSean Eric Fagan 		exf->reinit(swe->sw_kschedule, iv);
735c0341432SJohn Baldwin 		for (i = 0; i < crp->crp_payload_length; i += blksz) {
736c0341432SJohn Baldwin 			len = MIN(crp->crp_payload_length - i, blksz);
73708fca7a5SJohn-Mark Gurney 			if (len < blksz)
73808fca7a5SJohn-Mark Gurney 				bzero(blk, blksz);
739c0341432SJohn Baldwin 			crypto_copydata(crp, crp->crp_payload_start + i, len,
740c0341432SJohn Baldwin 			    blk);
741*3e947048SJohn Baldwin 			exf->decrypt(swe->sw_kschedule, blk, blk);
742c0341432SJohn Baldwin 			crypto_copyback(crp, crp->crp_payload_start + i, len,
743c0341432SJohn Baldwin 			    blk);
74408fca7a5SJohn-Mark Gurney 		}
74508fca7a5SJohn-Mark Gurney 	} else {
74608fca7a5SJohn-Mark Gurney 		/* Inject the authentication data */
747c0341432SJohn Baldwin 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
748c0341432SJohn Baldwin 		    aalg);
74908fca7a5SJohn-Mark Gurney 	}
75008fca7a5SJohn-Mark Gurney 
75108fca7a5SJohn-Mark Gurney 	return (0);
75208fca7a5SJohn-Mark Gurney }
75308fca7a5SJohn-Mark Gurney 
754091d81d1SSam Leffler /*
755c0341432SJohn Baldwin  * Apply a cipher and a digest to perform EtA.
756c0341432SJohn Baldwin  */
757c0341432SJohn Baldwin static int
758c0341432SJohn Baldwin swcr_eta(struct swcr_session *ses, struct cryptop *crp)
759c0341432SJohn Baldwin {
760c0341432SJohn Baldwin 	int error;
761c0341432SJohn Baldwin 
762c0341432SJohn Baldwin 	if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
763c0341432SJohn Baldwin 		error = swcr_encdec(ses, crp);
764c0341432SJohn Baldwin 		if (error == 0)
765c0341432SJohn Baldwin 			error = swcr_authcompute(ses, crp);
766c0341432SJohn Baldwin 	} else {
767c0341432SJohn Baldwin 		error = swcr_authcompute(ses, crp);
768c0341432SJohn Baldwin 		if (error == 0)
769c0341432SJohn Baldwin 			error = swcr_encdec(ses, crp);
770c0341432SJohn Baldwin 	}
771c0341432SJohn Baldwin 	return (error);
772c0341432SJohn Baldwin }
773c0341432SJohn Baldwin 
774c0341432SJohn Baldwin /*
775091d81d1SSam Leffler  * Apply a compression/decompression algorithm
776091d81d1SSam Leffler  */
777091d81d1SSam Leffler static int
778c0341432SJohn Baldwin swcr_compdec(struct swcr_session *ses, struct cryptop *crp)
779091d81d1SSam Leffler {
780091d81d1SSam Leffler 	u_int8_t *data, *out;
781091d81d1SSam Leffler 	struct comp_algo *cxf;
782091d81d1SSam Leffler 	int adj;
783091d81d1SSam Leffler 	u_int32_t result;
784091d81d1SSam Leffler 
785c0341432SJohn Baldwin 	cxf = ses->swcr_compdec.sw_cxf;
786091d81d1SSam Leffler 
787091d81d1SSam Leffler 	/* We must handle the whole buffer of data in one time
788091d81d1SSam Leffler 	 * then if there is not all the data in the mbuf, we must
789091d81d1SSam Leffler 	 * copy in a buffer.
790091d81d1SSam Leffler 	 */
791091d81d1SSam Leffler 
792c0341432SJohn Baldwin 	data = malloc(crp->crp_payload_length, M_CRYPTO_DATA,  M_NOWAIT);
793091d81d1SSam Leffler 	if (data == NULL)
794091d81d1SSam Leffler 		return (EINVAL);
795c0341432SJohn Baldwin 	crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
796c0341432SJohn Baldwin 	    data);
797091d81d1SSam Leffler 
798c0341432SJohn Baldwin 	if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
799c0341432SJohn Baldwin 		result = cxf->compress(data, crp->crp_payload_length, &out);
800091d81d1SSam Leffler 	else
801c0341432SJohn Baldwin 		result = cxf->decompress(data, crp->crp_payload_length, &out);
802091d81d1SSam Leffler 
8031ede983cSDag-Erling Smørgrav 	free(data, M_CRYPTO_DATA);
804091d81d1SSam Leffler 	if (result == 0)
805c0341432SJohn Baldwin 		return (EINVAL);
806c0341432SJohn Baldwin 	crp->crp_olen = result;
807c0341432SJohn Baldwin 
808c0341432SJohn Baldwin 	/* Check the compressed size when doing compression */
809c0341432SJohn Baldwin 	if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
810c0341432SJohn Baldwin 		if (result >= crp->crp_payload_length) {
811c0341432SJohn Baldwin 			/* Compression was useless, we lost time */
812c0341432SJohn Baldwin 			free(out, M_CRYPTO_DATA);
813c0341432SJohn Baldwin 			return (0);
814c0341432SJohn Baldwin 		}
815c0341432SJohn Baldwin 	}
816091d81d1SSam Leffler 
817091d81d1SSam Leffler 	/* Copy back the (de)compressed data. m_copyback is
818091d81d1SSam Leffler 	 * extending the mbuf as necessary.
819091d81d1SSam Leffler 	 */
820c0341432SJohn Baldwin 	crypto_copyback(crp, crp->crp_payload_start, result, out);
821c0341432SJohn Baldwin 	if (result < crp->crp_payload_length) {
822c0341432SJohn Baldwin 		switch (crp->crp_buf_type) {
823c0341432SJohn Baldwin 		case CRYPTO_BUF_MBUF:
824c0341432SJohn Baldwin 			adj = result - crp->crp_payload_length;
825c0341432SJohn Baldwin 			m_adj(crp->crp_mbuf, adj);
826c0341432SJohn Baldwin 			break;
827c0341432SJohn Baldwin 		case CRYPTO_BUF_UIO: {
828c0341432SJohn Baldwin 			struct uio *uio = crp->crp_uio;
829091d81d1SSam Leffler 			int ind;
830091d81d1SSam Leffler 
831c0341432SJohn Baldwin 			adj = crp->crp_payload_length - result;
832091d81d1SSam Leffler 			ind = uio->uio_iovcnt - 1;
833091d81d1SSam Leffler 
834091d81d1SSam Leffler 			while (adj > 0 && ind >= 0) {
835091d81d1SSam Leffler 				if (adj < uio->uio_iov[ind].iov_len) {
836091d81d1SSam Leffler 					uio->uio_iov[ind].iov_len -= adj;
837091d81d1SSam Leffler 					break;
838091d81d1SSam Leffler 				}
839091d81d1SSam Leffler 
840091d81d1SSam Leffler 				adj -= uio->uio_iov[ind].iov_len;
841091d81d1SSam Leffler 				uio->uio_iov[ind].iov_len = 0;
842091d81d1SSam Leffler 				ind--;
843091d81d1SSam Leffler 				uio->uio_iovcnt--;
844091d81d1SSam Leffler 			}
845091d81d1SSam Leffler 			}
846c0341432SJohn Baldwin 			break;
847c0341432SJohn Baldwin 		}
848091d81d1SSam Leffler 	}
8491ede983cSDag-Erling Smørgrav 	free(out, M_CRYPTO_DATA);
850091d81d1SSam Leffler 	return 0;
851091d81d1SSam Leffler }
852091d81d1SSam Leffler 
853091d81d1SSam Leffler static int
854*3e947048SJohn Baldwin swcr_setup_cipher(struct swcr_session *ses,
855c0341432SJohn Baldwin     const struct crypto_session_params *csp)
856091d81d1SSam Leffler {
857c0341432SJohn Baldwin 	struct swcr_encdec *swe;
858091d81d1SSam Leffler 	struct enc_xform *txf;
859f6c4bc3bSPawel Jakub Dawidek 	int error;
860091d81d1SSam Leffler 
861c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
862c0341432SJohn Baldwin 	txf = crypto_cipher(csp);
863c0341432SJohn Baldwin 	MPASS(txf->ivsize == csp->csp_ivlen);
864*3e947048SJohn Baldwin 	if (txf->ctxsize != 0) {
865*3e947048SJohn Baldwin 		swe->sw_kschedule = malloc(txf->ctxsize, M_CRYPTO_DATA,
866*3e947048SJohn Baldwin 		    M_NOWAIT);
867*3e947048SJohn Baldwin 		if (swe->sw_kschedule == NULL)
868*3e947048SJohn Baldwin 			return (ENOMEM);
869*3e947048SJohn Baldwin 	}
870c0341432SJohn Baldwin 	if (csp->csp_cipher_key != NULL) {
871*3e947048SJohn Baldwin 		error = txf->setkey(swe->sw_kschedule,
872c0341432SJohn Baldwin 		    csp->csp_cipher_key, csp->csp_cipher_klen);
873c0341432SJohn Baldwin 		if (error)
874c0341432SJohn Baldwin 			return (error);
875091d81d1SSam Leffler 	}
876c0341432SJohn Baldwin 	swe->sw_exf = txf;
877c0341432SJohn Baldwin 	return (0);
878f6c4bc3bSPawel Jakub Dawidek }
879091d81d1SSam Leffler 
880c0341432SJohn Baldwin static int
881c0341432SJohn Baldwin swcr_setup_auth(struct swcr_session *ses,
882c0341432SJohn Baldwin     const struct crypto_session_params *csp)
883c0341432SJohn Baldwin {
884c0341432SJohn Baldwin 	struct swcr_auth *swa;
885c0341432SJohn Baldwin 	struct auth_hash *axf;
886c0341432SJohn Baldwin 
887c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
888c0341432SJohn Baldwin 
889c0341432SJohn Baldwin 	axf = crypto_auth_hash(csp);
890c0341432SJohn Baldwin 	swa->sw_axf = axf;
891c0341432SJohn Baldwin 	if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
892c0341432SJohn Baldwin 		return (EINVAL);
893c0341432SJohn Baldwin 	if (csp->csp_auth_mlen == 0)
894c0341432SJohn Baldwin 		swa->sw_mlen = axf->hashsize;
895c0341432SJohn Baldwin 	else
896c0341432SJohn Baldwin 		swa->sw_mlen = csp->csp_auth_mlen;
897c0341432SJohn Baldwin 	swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
898c0341432SJohn Baldwin 	if (swa->sw_ictx == NULL)
899c0341432SJohn Baldwin 		return (ENOBUFS);
900c0341432SJohn Baldwin 
901c0341432SJohn Baldwin 	switch (csp->csp_auth_alg) {
902091d81d1SSam Leffler 	case CRYPTO_SHA1_HMAC:
903c97f39ceSConrad Meyer 	case CRYPTO_SHA2_224_HMAC:
904f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_256_HMAC:
905f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_384_HMAC:
906f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_512_HMAC:
907091d81d1SSam Leffler 	case CRYPTO_NULL_HMAC:
908091d81d1SSam Leffler 	case CRYPTO_RIPEMD160_HMAC:
9093a0b6a93SJohn Baldwin 		swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
910091d81d1SSam Leffler 		    M_NOWAIT);
911c0341432SJohn Baldwin 		if (swa->sw_octx == NULL)
912c0341432SJohn Baldwin 			return (ENOBUFS);
913c0341432SJohn Baldwin 
914c0341432SJohn Baldwin 		if (csp->csp_auth_key != NULL) {
915c0341432SJohn Baldwin 			swcr_authprepare(axf, swa, csp->csp_auth_key,
916c0341432SJohn Baldwin 			    csp->csp_auth_klen);
917091d81d1SSam Leffler 		}
918091d81d1SSam Leffler 
919c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
920c0341432SJohn Baldwin 			ses->swcr_process = swcr_authcompute;
921091d81d1SSam Leffler 		break;
922091d81d1SSam Leffler 	case CRYPTO_SHA1:
923c4729f6eSConrad Meyer 	case CRYPTO_SHA2_224:
924c4729f6eSConrad Meyer 	case CRYPTO_SHA2_256:
925c4729f6eSConrad Meyer 	case CRYPTO_SHA2_384:
926c4729f6eSConrad Meyer 	case CRYPTO_SHA2_512:
927c0341432SJohn Baldwin 		axf->Init(swa->sw_ictx);
928c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
929c0341432SJohn Baldwin 			ses->swcr_process = swcr_authcompute;
930c0341432SJohn Baldwin 		break;
931c0341432SJohn Baldwin 	case CRYPTO_AES_NIST_GMAC:
932c0341432SJohn Baldwin 		axf->Init(swa->sw_ictx);
933c0341432SJohn Baldwin 		axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
934c0341432SJohn Baldwin 		    csp->csp_auth_klen);
935c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
936c0341432SJohn Baldwin 			ses->swcr_process = swcr_gmac;
937c0341432SJohn Baldwin 		break;
938c0341432SJohn Baldwin 	case CRYPTO_POLY1305:
939c0341432SJohn Baldwin 	case CRYPTO_BLAKE2B:
940c0341432SJohn Baldwin 	case CRYPTO_BLAKE2S:
941c0341432SJohn Baldwin 		/*
942c0341432SJohn Baldwin 		 * Blake2b and Blake2s support an optional key but do
943c0341432SJohn Baldwin 		 * not require one.
944c0341432SJohn Baldwin 		 */
945c0341432SJohn Baldwin 		if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL)
946c0341432SJohn Baldwin 			axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
947c0341432SJohn Baldwin 			    csp->csp_auth_klen);
948c0341432SJohn Baldwin 		axf->Init(swa->sw_ictx);
949c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
950c0341432SJohn Baldwin 			ses->swcr_process = swcr_authcompute;
951c0341432SJohn Baldwin 		break;
952c0341432SJohn Baldwin 	case CRYPTO_AES_CCM_CBC_MAC:
953c0341432SJohn Baldwin 		axf->Init(swa->sw_ictx);
954c0341432SJohn Baldwin 		axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
955c0341432SJohn Baldwin 		    csp->csp_auth_klen);
956c0341432SJohn Baldwin 		if (csp->csp_mode == CSP_MODE_DIGEST)
957c0341432SJohn Baldwin 			ses->swcr_process = swcr_ccm_cbc_mac;
958c0341432SJohn Baldwin 		break;
959091d81d1SSam Leffler 	}
960091d81d1SSam Leffler 
961c0341432SJohn Baldwin 	return (0);
962c0341432SJohn Baldwin }
96308fca7a5SJohn-Mark Gurney 
964c0341432SJohn Baldwin static int
965c0341432SJohn Baldwin swcr_setup_gcm(struct swcr_session *ses,
966c0341432SJohn Baldwin     const struct crypto_session_params *csp)
967c0341432SJohn Baldwin {
968c0341432SJohn Baldwin 	struct swcr_auth *swa;
969c0341432SJohn Baldwin 	struct auth_hash *axf;
970c0341432SJohn Baldwin 
971c0341432SJohn Baldwin 	if (csp->csp_ivlen != AES_GCM_IV_LEN)
972c0341432SJohn Baldwin 		return (EINVAL);
973c0341432SJohn Baldwin 
974c0341432SJohn Baldwin 	/* First, setup the auth side. */
975c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
976c0341432SJohn Baldwin 	switch (csp->csp_cipher_klen * 8) {
977c0341432SJohn Baldwin 	case 128:
978c0341432SJohn Baldwin 		axf = &auth_hash_nist_gmac_aes_128;
979c0341432SJohn Baldwin 		break;
980c0341432SJohn Baldwin 	case 192:
981c0341432SJohn Baldwin 		axf = &auth_hash_nist_gmac_aes_192;
982c0341432SJohn Baldwin 		break;
983c0341432SJohn Baldwin 	case 256:
984c0341432SJohn Baldwin 		axf = &auth_hash_nist_gmac_aes_256;
985c0341432SJohn Baldwin 		break;
986c0341432SJohn Baldwin 	default:
987c0341432SJohn Baldwin 		return (EINVAL);
988c0341432SJohn Baldwin 	}
989c0341432SJohn Baldwin 	swa->sw_axf = axf;
990c0341432SJohn Baldwin 	if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
991c0341432SJohn Baldwin 		return (EINVAL);
992c0341432SJohn Baldwin 	if (csp->csp_auth_mlen == 0)
993c0341432SJohn Baldwin 		swa->sw_mlen = axf->hashsize;
994c0341432SJohn Baldwin 	else
995c0341432SJohn Baldwin 		swa->sw_mlen = csp->csp_auth_mlen;
996c0341432SJohn Baldwin 	swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
997c0341432SJohn Baldwin 	if (swa->sw_ictx == NULL)
998c0341432SJohn Baldwin 		return (ENOBUFS);
999c0341432SJohn Baldwin 	axf->Init(swa->sw_ictx);
1000c0341432SJohn Baldwin 	if (csp->csp_cipher_key != NULL)
1001c0341432SJohn Baldwin 		axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1002c0341432SJohn Baldwin 		    csp->csp_cipher_klen);
1003c0341432SJohn Baldwin 
1004c0341432SJohn Baldwin 	/* Second, setup the cipher side. */
1005*3e947048SJohn Baldwin 	return (swcr_setup_cipher(ses, csp));
1006c0341432SJohn Baldwin }
1007c0341432SJohn Baldwin 
1008c0341432SJohn Baldwin static int
1009c0341432SJohn Baldwin swcr_setup_ccm(struct swcr_session *ses,
1010c0341432SJohn Baldwin     const struct crypto_session_params *csp)
1011c0341432SJohn Baldwin {
1012c0341432SJohn Baldwin 	struct swcr_auth *swa;
1013c0341432SJohn Baldwin 	struct auth_hash *axf;
1014c0341432SJohn Baldwin 
1015c0341432SJohn Baldwin 	if (csp->csp_ivlen != AES_CCM_IV_LEN)
1016c0341432SJohn Baldwin 		return (EINVAL);
1017c0341432SJohn Baldwin 
1018c0341432SJohn Baldwin 	/* First, setup the auth side. */
1019c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
1020c0341432SJohn Baldwin 	switch (csp->csp_cipher_klen * 8) {
1021507281e5SSean Eric Fagan 	case 128:
1022507281e5SSean Eric Fagan 		axf = &auth_hash_ccm_cbc_mac_128;
1023507281e5SSean Eric Fagan 		break;
1024507281e5SSean Eric Fagan 	case 192:
1025507281e5SSean Eric Fagan 		axf = &auth_hash_ccm_cbc_mac_192;
1026507281e5SSean Eric Fagan 		break;
1027507281e5SSean Eric Fagan 	case 256:
1028507281e5SSean Eric Fagan 		axf = &auth_hash_ccm_cbc_mac_256;
1029507281e5SSean Eric Fagan 		break;
1030507281e5SSean Eric Fagan 	default:
1031c0341432SJohn Baldwin 		return (EINVAL);
1032507281e5SSean Eric Fagan 	}
1033c0341432SJohn Baldwin 	swa->sw_axf = axf;
1034c0341432SJohn Baldwin 	if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1035c0341432SJohn Baldwin 		return (EINVAL);
1036c0341432SJohn Baldwin 	if (csp->csp_auth_mlen == 0)
1037c0341432SJohn Baldwin 		swa->sw_mlen = axf->hashsize;
1038c0341432SJohn Baldwin 	else
1039c0341432SJohn Baldwin 		swa->sw_mlen = csp->csp_auth_mlen;
1040c0341432SJohn Baldwin 	swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1041c0341432SJohn Baldwin 	if (swa->sw_ictx == NULL)
1042c0341432SJohn Baldwin 		return (ENOBUFS);
1043c0341432SJohn Baldwin 	axf->Init(swa->sw_ictx);
1044c0341432SJohn Baldwin 	if (csp->csp_cipher_key != NULL)
1045c0341432SJohn Baldwin 		axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1046c0341432SJohn Baldwin 		    csp->csp_cipher_klen);
104708fca7a5SJohn-Mark Gurney 
1048c0341432SJohn Baldwin 	/* Second, setup the cipher side. */
1049*3e947048SJohn Baldwin 	return (swcr_setup_cipher(ses, csp));
10502e2e26d1SJohn Baldwin }
1051a2bc81bfSJohn-Mark Gurney 
1052c0341432SJohn Baldwin static bool
1053c0341432SJohn Baldwin swcr_auth_supported(const struct crypto_session_params *csp)
1054109919c6SBenno Rice {
1055091d81d1SSam Leffler 	struct auth_hash *axf;
1056091d81d1SSam Leffler 
1057c0341432SJohn Baldwin 	axf = crypto_auth_hash(csp);
1058c0341432SJohn Baldwin 	if (axf == NULL)
1059c0341432SJohn Baldwin 		return (false);
1060c0341432SJohn Baldwin 	switch (csp->csp_auth_alg) {
1061091d81d1SSam Leffler 	case CRYPTO_SHA1_HMAC:
1062c97f39ceSConrad Meyer 	case CRYPTO_SHA2_224_HMAC:
1063f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_256_HMAC:
1064f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_384_HMAC:
1065f6c4bc3bSPawel Jakub Dawidek 	case CRYPTO_SHA2_512_HMAC:
1066091d81d1SSam Leffler 	case CRYPTO_NULL_HMAC:
1067c0341432SJohn Baldwin 	case CRYPTO_RIPEMD160_HMAC:
1068091d81d1SSam Leffler 		break;
1069c0341432SJohn Baldwin 	case CRYPTO_AES_NIST_GMAC:
1070c0341432SJohn Baldwin 		switch (csp->csp_auth_klen * 8) {
1071c0341432SJohn Baldwin 		case 128:
1072c0341432SJohn Baldwin 		case 192:
1073c0341432SJohn Baldwin 		case 256:
1074c0341432SJohn Baldwin 			break;
1075c0341432SJohn Baldwin 		default:
1076c0341432SJohn Baldwin 			return (false);
1077c0341432SJohn Baldwin 		}
1078c0341432SJohn Baldwin 		if (csp->csp_auth_key == NULL)
1079c0341432SJohn Baldwin 			return (false);
1080c0341432SJohn Baldwin 		if (csp->csp_ivlen != AES_GCM_IV_LEN)
1081c0341432SJohn Baldwin 			return (false);
1082c0341432SJohn Baldwin 		break;
108325b7033bSConrad Meyer 	case CRYPTO_POLY1305:
1084c0341432SJohn Baldwin 		if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1085c0341432SJohn Baldwin 			return (false);
1086c0341432SJohn Baldwin 		break;
1087c0341432SJohn Baldwin 	case CRYPTO_AES_CCM_CBC_MAC:
1088c0341432SJohn Baldwin 		switch (csp->csp_auth_klen * 8) {
1089c0341432SJohn Baldwin 		case 128:
1090c0341432SJohn Baldwin 		case 192:
1091c0341432SJohn Baldwin 		case 256:
1092c0341432SJohn Baldwin 			break;
1093c0341432SJohn Baldwin 		default:
1094c0341432SJohn Baldwin 			return (false);
1095c0341432SJohn Baldwin 		}
1096c0341432SJohn Baldwin 		if (csp->csp_auth_key == NULL)
1097c0341432SJohn Baldwin 			return (false);
1098c0341432SJohn Baldwin 		if (csp->csp_ivlen != AES_CCM_IV_LEN)
1099c0341432SJohn Baldwin 			return (false);
1100c0341432SJohn Baldwin 		break;
1101c0341432SJohn Baldwin 	}
1102c0341432SJohn Baldwin 	return (true);
1103c0341432SJohn Baldwin }
1104091d81d1SSam Leffler 
1105c0341432SJohn Baldwin static bool
1106c0341432SJohn Baldwin swcr_cipher_supported(const struct crypto_session_params *csp)
1107c0341432SJohn Baldwin {
1108c0341432SJohn Baldwin 	struct enc_xform *txf;
1109c0341432SJohn Baldwin 
1110c0341432SJohn Baldwin 	txf = crypto_cipher(csp);
1111c0341432SJohn Baldwin 	if (txf == NULL)
1112c0341432SJohn Baldwin 		return (false);
1113c0341432SJohn Baldwin 	if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1114c0341432SJohn Baldwin 	    txf->ivsize != csp->csp_ivlen)
1115c0341432SJohn Baldwin 		return (false);
1116c0341432SJohn Baldwin 	return (true);
1117c0341432SJohn Baldwin }
1118c0341432SJohn Baldwin 
1119c0341432SJohn Baldwin static int
1120c0341432SJohn Baldwin swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1121c0341432SJohn Baldwin {
1122c0341432SJohn Baldwin 
1123c0341432SJohn Baldwin 	if (csp->csp_flags != 0)
1124c0341432SJohn Baldwin 		return (EINVAL);
1125c0341432SJohn Baldwin 	switch (csp->csp_mode) {
1126c0341432SJohn Baldwin 	case CSP_MODE_COMPRESS:
1127c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1128c0341432SJohn Baldwin 		case CRYPTO_DEFLATE_COMP:
1129c0341432SJohn Baldwin 			break;
1130c0341432SJohn Baldwin 		default:
1131c0341432SJohn Baldwin 			return (EINVAL);
11325fbc5b5aSConrad Meyer 		}
1133091d81d1SSam Leffler 		break;
1134c0341432SJohn Baldwin 	case CSP_MODE_CIPHER:
1135c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1136c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1137c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1138c0341432SJohn Baldwin 			return (EINVAL);
1139c0341432SJohn Baldwin 		default:
1140c0341432SJohn Baldwin 			if (!swcr_cipher_supported(csp))
1141c0341432SJohn Baldwin 				return (EINVAL);
1142091d81d1SSam Leffler 			break;
1143091d81d1SSam Leffler 		}
1144c0341432SJohn Baldwin 		break;
1145c0341432SJohn Baldwin 	case CSP_MODE_DIGEST:
1146c0341432SJohn Baldwin 		if (!swcr_auth_supported(csp))
1147c0341432SJohn Baldwin 			return (EINVAL);
1148c0341432SJohn Baldwin 		break;
1149c0341432SJohn Baldwin 	case CSP_MODE_AEAD:
1150c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1151c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1152c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1153c0341432SJohn Baldwin 			break;
1154c0341432SJohn Baldwin 		default:
1155c0341432SJohn Baldwin 			return (EINVAL);
1156c0341432SJohn Baldwin 		}
1157c0341432SJohn Baldwin 		break;
1158c0341432SJohn Baldwin 	case CSP_MODE_ETA:
1159c0341432SJohn Baldwin 		/* AEAD algorithms cannot be used for EtA. */
1160c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1161c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1162c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1163c0341432SJohn Baldwin 			return (EINVAL);
1164c0341432SJohn Baldwin 		}
1165c0341432SJohn Baldwin 		switch (csp->csp_auth_alg) {
1166c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GMAC:
1167c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_CBC_MAC:
1168c0341432SJohn Baldwin 			return (EINVAL);
1169c0341432SJohn Baldwin 		}
1170c0341432SJohn Baldwin 
1171c0341432SJohn Baldwin 		if (!swcr_cipher_supported(csp) ||
1172c0341432SJohn Baldwin 		    !swcr_auth_supported(csp))
1173c0341432SJohn Baldwin 			return (EINVAL);
1174c0341432SJohn Baldwin 		break;
1175c0341432SJohn Baldwin 	default:
1176c0341432SJohn Baldwin 		return (EINVAL);
1177c0341432SJohn Baldwin 	}
1178c0341432SJohn Baldwin 
1179c0341432SJohn Baldwin 	return (CRYPTODEV_PROBE_SOFTWARE);
1180c0341432SJohn Baldwin }
1181c0341432SJohn Baldwin 
1182c0341432SJohn Baldwin /*
1183c0341432SJohn Baldwin  * Generate a new software session.
1184c0341432SJohn Baldwin  */
1185c0341432SJohn Baldwin static int
1186c0341432SJohn Baldwin swcr_newsession(device_t dev, crypto_session_t cses,
1187c0341432SJohn Baldwin     const struct crypto_session_params *csp)
1188c0341432SJohn Baldwin {
1189c0341432SJohn Baldwin 	struct swcr_session *ses;
1190c0341432SJohn Baldwin 	struct swcr_encdec *swe;
1191c0341432SJohn Baldwin 	struct swcr_auth *swa;
1192c0341432SJohn Baldwin 	struct comp_algo *cxf;
1193c0341432SJohn Baldwin 	int error;
1194c0341432SJohn Baldwin 
1195c0341432SJohn Baldwin 	ses = crypto_get_driver_session(cses);
1196c0341432SJohn Baldwin 	mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
1197c0341432SJohn Baldwin 
1198c0341432SJohn Baldwin 	error = 0;
1199c0341432SJohn Baldwin 	swe = &ses->swcr_encdec;
1200c0341432SJohn Baldwin 	swa = &ses->swcr_auth;
1201c0341432SJohn Baldwin 	switch (csp->csp_mode) {
1202c0341432SJohn Baldwin 	case CSP_MODE_COMPRESS:
1203c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1204c0341432SJohn Baldwin 		case CRYPTO_DEFLATE_COMP:
1205c0341432SJohn Baldwin 			cxf = &comp_algo_deflate;
1206c0341432SJohn Baldwin 			break;
1207c0341432SJohn Baldwin #ifdef INVARIANTS
1208c0341432SJohn Baldwin 		default:
1209c0341432SJohn Baldwin 			panic("bad compression algo");
1210c0341432SJohn Baldwin #endif
1211c0341432SJohn Baldwin 		}
1212c0341432SJohn Baldwin 		ses->swcr_compdec.sw_cxf = cxf;
1213c0341432SJohn Baldwin 		ses->swcr_process = swcr_compdec;
1214c0341432SJohn Baldwin 		break;
1215c0341432SJohn Baldwin 	case CSP_MODE_CIPHER:
1216c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1217c0341432SJohn Baldwin 		case CRYPTO_NULL_CBC:
1218c0341432SJohn Baldwin 			ses->swcr_process = swcr_null;
1219c0341432SJohn Baldwin 			break;
1220c0341432SJohn Baldwin #ifdef INVARIANTS
1221c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1222c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1223c0341432SJohn Baldwin 			panic("bad cipher algo");
1224c0341432SJohn Baldwin #endif
1225c0341432SJohn Baldwin 		default:
1226*3e947048SJohn Baldwin 			error = swcr_setup_cipher(ses, csp);
1227c0341432SJohn Baldwin 			if (error == 0)
1228c0341432SJohn Baldwin 				ses->swcr_process = swcr_encdec;
1229c0341432SJohn Baldwin 		}
1230c0341432SJohn Baldwin 		break;
1231c0341432SJohn Baldwin 	case CSP_MODE_DIGEST:
1232c0341432SJohn Baldwin 		error = swcr_setup_auth(ses, csp);
1233c0341432SJohn Baldwin 		break;
1234c0341432SJohn Baldwin 	case CSP_MODE_AEAD:
1235c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1236c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1237c0341432SJohn Baldwin 			error = swcr_setup_gcm(ses, csp);
1238c0341432SJohn Baldwin 			if (error == 0)
1239c0341432SJohn Baldwin 				ses->swcr_process = swcr_gcm;
1240c0341432SJohn Baldwin 			break;
1241c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1242c0341432SJohn Baldwin 			error = swcr_setup_ccm(ses, csp);
1243c0341432SJohn Baldwin 			if (error == 0)
1244c0341432SJohn Baldwin 				ses->swcr_process = swcr_ccm;
1245c0341432SJohn Baldwin 			break;
1246c0341432SJohn Baldwin #ifdef INVARIANTS
1247c0341432SJohn Baldwin 		default:
1248c0341432SJohn Baldwin 			panic("bad aead algo");
1249c0341432SJohn Baldwin #endif
1250c0341432SJohn Baldwin 		}
1251c0341432SJohn Baldwin 		break;
1252c0341432SJohn Baldwin 	case CSP_MODE_ETA:
1253c0341432SJohn Baldwin #ifdef INVARIANTS
1254c0341432SJohn Baldwin 		switch (csp->csp_cipher_alg) {
1255c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GCM_16:
1256c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_16:
1257c0341432SJohn Baldwin 			panic("bad eta cipher algo");
1258c0341432SJohn Baldwin 		}
1259c0341432SJohn Baldwin 		switch (csp->csp_auth_alg) {
1260c0341432SJohn Baldwin 		case CRYPTO_AES_NIST_GMAC:
1261c0341432SJohn Baldwin 		case CRYPTO_AES_CCM_CBC_MAC:
1262c0341432SJohn Baldwin 			panic("bad eta auth algo");
1263c0341432SJohn Baldwin 		}
1264c0341432SJohn Baldwin #endif
1265c0341432SJohn Baldwin 
1266c0341432SJohn Baldwin 		error = swcr_setup_auth(ses, csp);
1267c0341432SJohn Baldwin 		if (error)
1268c0341432SJohn Baldwin 			break;
1269c0341432SJohn Baldwin 		if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1270c0341432SJohn Baldwin 			/* Effectively degrade to digest mode. */
1271c0341432SJohn Baldwin 			ses->swcr_process = swcr_authcompute;
1272c0341432SJohn Baldwin 			break;
1273c0341432SJohn Baldwin 		}
1274c0341432SJohn Baldwin 
1275*3e947048SJohn Baldwin 		error = swcr_setup_cipher(ses, csp);
1276c0341432SJohn Baldwin 		if (error == 0)
1277c0341432SJohn Baldwin 			ses->swcr_process = swcr_eta;
1278c0341432SJohn Baldwin 		break;
1279c0341432SJohn Baldwin 	default:
1280c0341432SJohn Baldwin 		error = EINVAL;
1281c0341432SJohn Baldwin 	}
1282c0341432SJohn Baldwin 
1283c0341432SJohn Baldwin 	if (error)
1284c0341432SJohn Baldwin 		swcr_freesession(dev, cses);
1285c0341432SJohn Baldwin 	return (error);
1286c0341432SJohn Baldwin }
1287c0341432SJohn Baldwin 
1288c0341432SJohn Baldwin static void
1289c0341432SJohn Baldwin swcr_freesession(device_t dev, crypto_session_t cses)
1290c0341432SJohn Baldwin {
1291c0341432SJohn Baldwin 	struct swcr_session *ses;
1292c0341432SJohn Baldwin 	struct swcr_auth *swa;
1293c0341432SJohn Baldwin 	struct auth_hash *axf;
1294c0341432SJohn Baldwin 
1295c0341432SJohn Baldwin 	ses = crypto_get_driver_session(cses);
1296c0341432SJohn Baldwin 
1297c0341432SJohn Baldwin 	mtx_destroy(&ses->swcr_lock);
1298c0341432SJohn Baldwin 
1299*3e947048SJohn Baldwin 	zfree(ses->swcr_encdec.sw_kschedule, M_CRYPTO_DATA);
1300c0341432SJohn Baldwin 
1301c0341432SJohn Baldwin 	axf = ses->swcr_auth.sw_axf;
1302c0341432SJohn Baldwin 	if (axf != NULL) {
1303c0341432SJohn Baldwin 		swa = &ses->swcr_auth;
1304c0341432SJohn Baldwin 		if (swa->sw_ictx != NULL) {
1305c0341432SJohn Baldwin 			explicit_bzero(swa->sw_ictx, axf->ctxsize);
1306c0341432SJohn Baldwin 			free(swa->sw_ictx, M_CRYPTO_DATA);
1307c0341432SJohn Baldwin 		}
1308c0341432SJohn Baldwin 		if (swa->sw_octx != NULL) {
13093a0b6a93SJohn Baldwin 			explicit_bzero(swa->sw_octx, axf->ctxsize);
1310c0341432SJohn Baldwin 			free(swa->sw_octx, M_CRYPTO_DATA);
1311c0341432SJohn Baldwin 		}
1312091d81d1SSam Leffler 	}
1313091d81d1SSam Leffler }
1314091d81d1SSam Leffler 
1315091d81d1SSam Leffler /*
1316091d81d1SSam Leffler  * Process a software request.
1317091d81d1SSam Leffler  */
1318091d81d1SSam Leffler static int
13196810ad6fSSam Leffler swcr_process(device_t dev, struct cryptop *crp, int hint)
1320091d81d1SSam Leffler {
1321c0341432SJohn Baldwin 	struct swcr_session *ses;
1322091d81d1SSam Leffler 
13231b0909d5SConrad Meyer 	ses = crypto_get_driver_session(crp->crp_session);
1324a7fcb1afSSean Eric Fagan 	mtx_lock(&ses->swcr_lock);
1325091d81d1SSam Leffler 
1326c0341432SJohn Baldwin 	crp->crp_etype = ses->swcr_process(ses, crp);
1327091d81d1SSam Leffler 
1328a7fcb1afSSean Eric Fagan 	mtx_unlock(&ses->swcr_lock);
1329091d81d1SSam Leffler 	crypto_done(crp);
1330c0341432SJohn Baldwin 	return (0);
1331091d81d1SSam Leffler }
1332091d81d1SSam Leffler 
1333091d81d1SSam Leffler static void
13343f147ab2SWarner Losh swcr_identify(driver_t *drv, device_t parent)
1335091d81d1SSam Leffler {
13366810ad6fSSam Leffler 	/* NB: order 10 is so we get attached after h/w devices */
13376810ad6fSSam Leffler 	if (device_find_child(parent, "cryptosoft", -1) == NULL &&
133886c585d9SMarius Strobl 	    BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
13396810ad6fSSam Leffler 		panic("cryptosoft: could not attach");
13406810ad6fSSam Leffler }
1341f6c4bc3bSPawel Jakub Dawidek 
13426810ad6fSSam Leffler static int
13436810ad6fSSam Leffler swcr_probe(device_t dev)
13446810ad6fSSam Leffler {
13456810ad6fSSam Leffler 	device_set_desc(dev, "software crypto");
134686c585d9SMarius Strobl 	return (BUS_PROBE_NOWILDCARD);
13476810ad6fSSam Leffler }
1348f6c4bc3bSPawel Jakub Dawidek 
13496810ad6fSSam Leffler static int
13506810ad6fSSam Leffler swcr_attach(device_t dev)
13516810ad6fSSam Leffler {
13526810ad6fSSam Leffler 
13539ebbebe4SConrad Meyer 	swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
13546810ad6fSSam Leffler 			CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
13556810ad6fSSam Leffler 	if (swcr_id < 0) {
13566810ad6fSSam Leffler 		device_printf(dev, "cannot initialize!");
1357c0341432SJohn Baldwin 		return (ENXIO);
13586810ad6fSSam Leffler 	}
13596810ad6fSSam Leffler 
1360c0341432SJohn Baldwin 	return (0);
1361091d81d1SSam Leffler }
13624b465da2SPawel Jakub Dawidek 
13633f147ab2SWarner Losh static int
13646810ad6fSSam Leffler swcr_detach(device_t dev)
13654b465da2SPawel Jakub Dawidek {
13666810ad6fSSam Leffler 	crypto_unregister_all(swcr_id);
13673f147ab2SWarner Losh 	return 0;
13684b465da2SPawel Jakub Dawidek }
13696810ad6fSSam Leffler 
13706810ad6fSSam Leffler static device_method_t swcr_methods[] = {
13716810ad6fSSam Leffler 	DEVMETHOD(device_identify,	swcr_identify),
13726810ad6fSSam Leffler 	DEVMETHOD(device_probe,		swcr_probe),
13736810ad6fSSam Leffler 	DEVMETHOD(device_attach,	swcr_attach),
13746810ad6fSSam Leffler 	DEVMETHOD(device_detach,	swcr_detach),
13756810ad6fSSam Leffler 
1376c0341432SJohn Baldwin 	DEVMETHOD(cryptodev_probesession, swcr_probesession),
13776810ad6fSSam Leffler 	DEVMETHOD(cryptodev_newsession,	swcr_newsession),
13786810ad6fSSam Leffler 	DEVMETHOD(cryptodev_freesession,swcr_freesession),
13796810ad6fSSam Leffler 	DEVMETHOD(cryptodev_process,	swcr_process),
13806810ad6fSSam Leffler 
13816810ad6fSSam Leffler 	{0, 0},
13826810ad6fSSam Leffler };
13836810ad6fSSam Leffler 
13846810ad6fSSam Leffler static driver_t swcr_driver = {
13856810ad6fSSam Leffler 	"cryptosoft",
13866810ad6fSSam Leffler 	swcr_methods,
13876810ad6fSSam Leffler 	0,		/* NB: no softc */
13886810ad6fSSam Leffler };
13896810ad6fSSam Leffler static devclass_t swcr_devclass;
13906810ad6fSSam Leffler 
13916810ad6fSSam Leffler /*
13926810ad6fSSam Leffler  * NB: We explicitly reference the crypto module so we
13936810ad6fSSam Leffler  * get the necessary ordering when built as a loadable
13946810ad6fSSam Leffler  * module.  This is required because we bundle the crypto
13956810ad6fSSam Leffler  * module code together with the cryptosoft driver (otherwise
13966810ad6fSSam Leffler  * normal module dependencies would handle things).
13976810ad6fSSam Leffler  */
13986810ad6fSSam Leffler extern int crypto_modevent(struct module *, int, void *);
13996810ad6fSSam Leffler /* XXX where to attach */
14006810ad6fSSam Leffler DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
14016810ad6fSSam Leffler MODULE_VERSION(cryptosoft, 1);
14026810ad6fSSam Leffler MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);
1403