xref: /freebsd/sys/opencrypto/cryptosoft.c (revision 4149c6a3ecd5526a6819443c8cfd17cc77bcbb3a)
1 /*	$OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $	*/
2 
3 /*-
4  * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5  * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
6  *
7  * This code was written by Angelos D. Keromytis in Athens, Greece, in
8  * February 2000. Network Security Technologies Inc. (NSTI) kindly
9  * supported the development of this code.
10  *
11  * Copyright (c) 2000, 2001 Angelos D. Keromytis
12  * Copyright (c) 2014 The FreeBSD Foundation
13  * All rights reserved.
14  *
15  * Portions of this software were developed by John-Mark Gurney
16  * under sponsorship of the FreeBSD Foundation and
17  * Rubicon Communications, LLC (Netgate).
18  *
19  * Permission to use, copy, and modify this software with or without fee
20  * is hereby granted, provided that this entire notice is included in
21  * all source code copies of any software which is or includes a copy or
22  * modification of this software.
23  *
24  * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25  * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26  * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27  * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
28  * PURPOSE.
29  */
30 
31 #include <sys/cdefs.h>
32 __FBSDID("$FreeBSD$");
33 
34 #include <sys/param.h>
35 #include <sys/systm.h>
36 #include <sys/malloc.h>
37 #include <sys/mbuf.h>
38 #include <sys/module.h>
39 #include <sys/sysctl.h>
40 #include <sys/errno.h>
41 #include <sys/random.h>
42 #include <sys/kernel.h>
43 #include <sys/uio.h>
44 #include <sys/lock.h>
45 #include <sys/rwlock.h>
46 #include <sys/endian.h>
47 #include <sys/limits.h>
48 #include <sys/mutex.h>
49 
50 #include <crypto/sha1.h>
51 #include <opencrypto/rmd160.h>
52 
53 #include <opencrypto/cryptodev.h>
54 #include <opencrypto/xform.h>
55 
56 #include <sys/kobj.h>
57 #include <sys/bus.h>
58 #include "cryptodev_if.h"
59 
60 struct swcr_auth {
61 	void		*sw_ictx;
62 	void		*sw_octx;
63 	struct auth_hash *sw_axf;
64 	uint16_t	sw_mlen;
65 };
66 
67 struct swcr_encdec {
68 	void		*sw_kschedule;
69 	struct enc_xform *sw_exf;
70 };
71 
72 struct swcr_compdec {
73 	struct comp_algo *sw_cxf;
74 };
75 
76 struct swcr_session {
77 	struct mtx	swcr_lock;
78 	int	(*swcr_process)(struct swcr_session *, struct cryptop *);
79 
80 	struct swcr_auth swcr_auth;
81 	struct swcr_encdec swcr_encdec;
82 	struct swcr_compdec swcr_compdec;
83 };
84 
85 static	int32_t swcr_id;
86 
87 static	void swcr_freesession(device_t dev, crypto_session_t cses);
88 
89 /* Used for CRYPTO_NULL_CBC. */
90 static int
91 swcr_null(struct swcr_session *ses, struct cryptop *crp)
92 {
93 
94 	return (0);
95 }
96 
97 /*
98  * Apply a symmetric encryption/decryption algorithm.
99  */
100 static int
101 swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
102 {
103 	unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
104 	unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
105 	const struct crypto_session_params *csp;
106 	struct swcr_encdec *sw;
107 	struct enc_xform *exf;
108 	int i, blks, inlen, ivlen, outlen, resid;
109 	struct crypto_buffer_cursor cc_in, cc_out;
110 	const char *inblk;
111 	char *outblk;
112 	int error;
113 	bool encrypting;
114 
115 	error = 0;
116 
117 	sw = &ses->swcr_encdec;
118 	exf = sw->sw_exf;
119 	ivlen = exf->ivsize;
120 
121 	if (exf->native_blocksize == 0) {
122 		/* Check for non-padded data */
123 		if ((crp->crp_payload_length % exf->blocksize) != 0)
124 			return (EINVAL);
125 
126 		blks = exf->blocksize;
127 	} else
128 		blks = exf->native_blocksize;
129 
130 	if (exf == &enc_xform_aes_icm &&
131 	    (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
132 		return (EINVAL);
133 
134 	if (crp->crp_cipher_key != NULL) {
135 		csp = crypto_get_params(crp->crp_session);
136 		error = exf->setkey(sw->sw_kschedule,
137 		    crp->crp_cipher_key, csp->csp_cipher_klen);
138 		if (error)
139 			return (error);
140 	}
141 
142 	crypto_read_iv(crp, iv);
143 
144 	if (exf->reinit) {
145 		/*
146 		 * xforms that provide a reinit method perform all IV
147 		 * handling themselves.
148 		 */
149 		exf->reinit(sw->sw_kschedule, iv);
150 	}
151 
152 	ivp = iv;
153 
154 	crypto_cursor_init(&cc_in, &crp->crp_buf);
155 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
156 	inlen = crypto_cursor_seglen(&cc_in);
157 	inblk = crypto_cursor_segbase(&cc_in);
158 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
159 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
160 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
161 	} else
162 		cc_out = cc_in;
163 	outlen = crypto_cursor_seglen(&cc_out);
164 	outblk = crypto_cursor_segbase(&cc_out);
165 
166 	resid = crp->crp_payload_length;
167 	encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
168 
169 	/*
170 	 * Loop through encrypting blocks.  'inlen' is the remaining
171 	 * length of the current segment in the input buffer.
172 	 * 'outlen' is the remaining length of current segment in the
173 	 * output buffer.
174 	 */
175 	while (resid >= blks) {
176 		/*
177 		 * If the current block is not contained within the
178 		 * current input/output segment, use 'blk' as a local
179 		 * buffer.
180 		 */
181 		if (inlen < blks) {
182 			crypto_cursor_copydata(&cc_in, blks, blk);
183 			inblk = blk;
184 		}
185 		if (outlen < blks)
186 			outblk = blk;
187 
188 		/*
189 		 * Ciphers without a 'reinit' hook are assumed to be
190 		 * used in CBC mode where the chaining is done here.
191 		 */
192 		if (exf->reinit != NULL) {
193 			if (encrypting)
194 				exf->encrypt(sw->sw_kschedule, inblk, outblk);
195 			else
196 				exf->decrypt(sw->sw_kschedule, inblk, outblk);
197 		} else if (encrypting) {
198 			/* XOR with previous block */
199 			for (i = 0; i < blks; i++)
200 				outblk[i] = inblk[i] ^ ivp[i];
201 
202 			exf->encrypt(sw->sw_kschedule, outblk, outblk);
203 
204 			/*
205 			 * Keep encrypted block for XOR'ing
206 			 * with next block
207 			 */
208 			memcpy(iv, outblk, blks);
209 			ivp = iv;
210 		} else {	/* decrypt */
211 			/*
212 			 * Keep encrypted block for XOR'ing
213 			 * with next block
214 			 */
215 			nivp = (ivp == iv) ? iv2 : iv;
216 			memcpy(nivp, inblk, blks);
217 
218 			exf->decrypt(sw->sw_kschedule, inblk, outblk);
219 
220 			/* XOR with previous block */
221 			for (i = 0; i < blks; i++)
222 				outblk[i] ^= ivp[i];
223 
224 			ivp = nivp;
225 		}
226 
227 		if (inlen < blks) {
228 			inlen = crypto_cursor_seglen(&cc_in);
229 			inblk = crypto_cursor_segbase(&cc_in);
230 		} else {
231 			crypto_cursor_advance(&cc_in, blks);
232 			inlen -= blks;
233 			inblk += blks;
234 		}
235 
236 		if (outlen < blks) {
237 			crypto_cursor_copyback(&cc_out, blks, blk);
238 			outlen = crypto_cursor_seglen(&cc_out);
239 			outblk = crypto_cursor_segbase(&cc_out);
240 		} else {
241 			crypto_cursor_advance(&cc_out, blks);
242 			outlen -= blks;
243 			outblk += blks;
244 		}
245 
246 		resid -= blks;
247 	}
248 
249 	/* Handle trailing partial block for stream ciphers. */
250 	if (resid > 0) {
251 		KASSERT(exf->native_blocksize != 0,
252 		    ("%s: partial block of %d bytes for cipher %s",
253 		    __func__, i, exf->name));
254 		KASSERT(exf->reinit != NULL,
255 		    ("%s: partial block cipher %s without reinit hook",
256 		    __func__, exf->name));
257 		KASSERT(resid < blks, ("%s: partial block too big", __func__));
258 
259 		inlen = crypto_cursor_seglen(&cc_in);
260 		outlen = crypto_cursor_seglen(&cc_out);
261 		if (inlen < resid) {
262 			crypto_cursor_copydata(&cc_in, resid, blk);
263 			inblk = blk;
264 		} else
265 			inblk = crypto_cursor_segbase(&cc_in);
266 		if (outlen < resid)
267 			outblk = blk;
268 		else
269 			outblk = crypto_cursor_segbase(&cc_out);
270 		if (encrypting)
271 			exf->encrypt_last(sw->sw_kschedule, inblk, outblk,
272 			    resid);
273 		else
274 			exf->decrypt_last(sw->sw_kschedule, inblk, outblk,
275 			    resid);
276 		if (outlen < resid)
277 			crypto_cursor_copyback(&cc_out, resid, blk);
278 	}
279 
280 	explicit_bzero(blk, sizeof(blk));
281 	explicit_bzero(iv, sizeof(iv));
282 	explicit_bzero(iv2, sizeof(iv2));
283 	return (0);
284 }
285 
286 static void
287 swcr_authprepare(struct auth_hash *axf, struct swcr_auth *sw,
288     const uint8_t *key, int klen)
289 {
290 
291 	switch (axf->type) {
292 	case CRYPTO_SHA1_HMAC:
293 	case CRYPTO_SHA2_224_HMAC:
294 	case CRYPTO_SHA2_256_HMAC:
295 	case CRYPTO_SHA2_384_HMAC:
296 	case CRYPTO_SHA2_512_HMAC:
297 	case CRYPTO_NULL_HMAC:
298 	case CRYPTO_RIPEMD160_HMAC:
299 		hmac_init_ipad(axf, key, klen, sw->sw_ictx);
300 		hmac_init_opad(axf, key, klen, sw->sw_octx);
301 		break;
302 	case CRYPTO_POLY1305:
303 	case CRYPTO_BLAKE2B:
304 	case CRYPTO_BLAKE2S:
305 		axf->Setkey(sw->sw_ictx, key, klen);
306 		axf->Init(sw->sw_ictx);
307 		break;
308 	default:
309 		panic("%s: algorithm %d doesn't use keys", __func__, axf->type);
310 	}
311 }
312 
313 /*
314  * Compute or verify hash.
315  */
316 static int
317 swcr_authcompute(struct swcr_session *ses, struct cryptop *crp)
318 {
319 	u_char aalg[HASH_MAX_LEN];
320 	const struct crypto_session_params *csp;
321 	struct swcr_auth *sw;
322 	struct auth_hash *axf;
323 	union authctx ctx;
324 	int err;
325 
326 	sw = &ses->swcr_auth;
327 
328 	axf = sw->sw_axf;
329 
330 	if (crp->crp_auth_key != NULL) {
331 		csp = crypto_get_params(crp->crp_session);
332 		swcr_authprepare(axf, sw, crp->crp_auth_key,
333 		    csp->csp_auth_klen);
334 	}
335 
336 	bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
337 
338 	err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
339 	    axf->Update, &ctx);
340 	if (err)
341 		return err;
342 
343 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp) &&
344 	    CRYPTO_OP_IS_ENCRYPT(crp->crp_op))
345 		err = crypto_apply_buf(&crp->crp_obuf,
346 		    crp->crp_payload_output_start, crp->crp_payload_length,
347 		    axf->Update, &ctx);
348 	else
349 		err = crypto_apply(crp, crp->crp_payload_start,
350 		    crp->crp_payload_length, axf->Update, &ctx);
351 	if (err)
352 		return err;
353 
354 	switch (axf->type) {
355 	case CRYPTO_SHA1:
356 	case CRYPTO_SHA2_224:
357 	case CRYPTO_SHA2_256:
358 	case CRYPTO_SHA2_384:
359 	case CRYPTO_SHA2_512:
360 		axf->Final(aalg, &ctx);
361 		break;
362 
363 	case CRYPTO_SHA1_HMAC:
364 	case CRYPTO_SHA2_224_HMAC:
365 	case CRYPTO_SHA2_256_HMAC:
366 	case CRYPTO_SHA2_384_HMAC:
367 	case CRYPTO_SHA2_512_HMAC:
368 	case CRYPTO_RIPEMD160_HMAC:
369 		if (sw->sw_octx == NULL)
370 			return EINVAL;
371 
372 		axf->Final(aalg, &ctx);
373 		bcopy(sw->sw_octx, &ctx, axf->ctxsize);
374 		axf->Update(&ctx, aalg, axf->hashsize);
375 		axf->Final(aalg, &ctx);
376 		break;
377 
378 	case CRYPTO_BLAKE2B:
379 	case CRYPTO_BLAKE2S:
380 	case CRYPTO_NULL_HMAC:
381 	case CRYPTO_POLY1305:
382 		axf->Final(aalg, &ctx);
383 		break;
384 	}
385 
386 	if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
387 		u_char uaalg[HASH_MAX_LEN];
388 
389 		crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, uaalg);
390 		if (timingsafe_bcmp(aalg, uaalg, sw->sw_mlen) != 0)
391 			err = EBADMSG;
392 		explicit_bzero(uaalg, sizeof(uaalg));
393 	} else {
394 		/* Inject the authentication data */
395 		crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, aalg);
396 	}
397 	explicit_bzero(aalg, sizeof(aalg));
398 	return (err);
399 }
400 
401 CTASSERT(INT_MAX <= (1ll<<39) - 256);	/* GCM: plain text < 2^39-256 */
402 CTASSERT(INT_MAX <= (uint64_t)-1);	/* GCM: associated data <= 2^64-1 */
403 
404 static int
405 swcr_gmac(struct swcr_session *ses, struct cryptop *crp)
406 {
407 	uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
408 	u_char *blk = (u_char *)blkbuf;
409 	u_char aalg[AALG_MAX_RESULT_LEN];
410 	u_char iv[EALG_MAX_BLOCK_LEN];
411 	struct crypto_buffer_cursor cc;
412 	union authctx ctx;
413 	struct swcr_auth *swa;
414 	struct auth_hash *axf;
415 	uint32_t *blkp;
416 	int blksz, error, ivlen, len, resid;
417 
418 	swa = &ses->swcr_auth;
419 	axf = swa->sw_axf;
420 
421 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
422 	blksz = axf->blocksize;
423 
424 	/* Initialize the IV */
425 	ivlen = AES_GCM_IV_LEN;
426 	crypto_read_iv(crp, iv);
427 
428 	axf->Reinit(&ctx, iv, ivlen);
429 	crypto_cursor_init(&cc, &crp->crp_buf);
430 	crypto_cursor_advance(&cc, crp->crp_payload_start);
431 	for (resid = crp->crp_payload_length; resid > 0; resid -= len) {
432 		len = MIN(resid, blksz);
433 		crypto_cursor_copydata(&cc, len, blk);
434 		bzero(blk + len, blksz - len);
435 		axf->Update(&ctx, blk, blksz);
436 	}
437 
438 	/* length block */
439 	bzero(blk, blksz);
440 	blkp = (uint32_t *)blk + 1;
441 	*blkp = htobe32(crp->crp_payload_length * 8);
442 	axf->Update(&ctx, blk, blksz);
443 
444 	/* Finalize MAC */
445 	axf->Final(aalg, &ctx);
446 
447 	error = 0;
448 	if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
449 		u_char uaalg[AALG_MAX_RESULT_LEN];
450 
451 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
452 		    uaalg);
453 		if (timingsafe_bcmp(aalg, uaalg, swa->sw_mlen) != 0)
454 			error = EBADMSG;
455 		explicit_bzero(uaalg, sizeof(uaalg));
456 	} else {
457 		/* Inject the authentication data */
458 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, aalg);
459 	}
460 	explicit_bzero(blkbuf, sizeof(blkbuf));
461 	explicit_bzero(aalg, sizeof(aalg));
462 	explicit_bzero(iv, sizeof(iv));
463 	return (error);
464 }
465 
466 static int
467 swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
468 {
469 	uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
470 	u_char *blk = (u_char *)blkbuf;
471 	u_char aalg[AALG_MAX_RESULT_LEN];
472 	u_char iv[EALG_MAX_BLOCK_LEN];
473 	struct crypto_buffer_cursor cc_in, cc_out;
474 	union authctx ctx;
475 	struct swcr_auth *swa;
476 	struct swcr_encdec *swe;
477 	struct auth_hash *axf;
478 	struct enc_xform *exf;
479 	uint32_t *blkp;
480 	int blksz, error, ivlen, len, r, resid;
481 
482 	swa = &ses->swcr_auth;
483 	axf = swa->sw_axf;
484 
485 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
486 	blksz = axf->blocksize;
487 
488 	swe = &ses->swcr_encdec;
489 	exf = swe->sw_exf;
490 	KASSERT(axf->blocksize == exf->native_blocksize,
491 	    ("%s: blocksize mismatch", __func__));
492 
493 	if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
494 		return (EINVAL);
495 
496 	/* Initialize the IV */
497 	ivlen = AES_GCM_IV_LEN;
498 	bcopy(crp->crp_iv, iv, ivlen);
499 
500 	/* Supply MAC with IV */
501 	axf->Reinit(&ctx, iv, ivlen);
502 
503 	/* Supply MAC with AAD */
504 	crypto_cursor_init(&cc_in, &crp->crp_buf);
505 	crypto_cursor_advance(&cc_in, crp->crp_aad_start);
506 	for (resid = crp->crp_aad_length; resid > 0; resid -= len) {
507 		len = MIN(resid, blksz);
508 		crypto_cursor_copydata(&cc_in, len, blk);
509 		bzero(blk + len, blksz - len);
510 		axf->Update(&ctx, blk, blksz);
511 	}
512 
513 	exf->reinit(swe->sw_kschedule, iv);
514 
515 	/* Do encryption with MAC */
516 	crypto_cursor_init(&cc_in, &crp->crp_buf);
517 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
518 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
519 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
520 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
521 	} else
522 		cc_out = cc_in;
523 	for (resid = crp->crp_payload_length; resid > 0; resid -= len) {
524 		len = MIN(resid, blksz);
525 		if (len < blksz)
526 			bzero(blk, blksz);
527 		crypto_cursor_copydata(&cc_in, len, blk);
528 		if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
529 			exf->encrypt(swe->sw_kschedule, blk, blk);
530 			axf->Update(&ctx, blk, len);
531 			crypto_cursor_copyback(&cc_out, len, blk);
532 		} else {
533 			axf->Update(&ctx, blk, len);
534 		}
535 	}
536 
537 	/* length block */
538 	bzero(blk, blksz);
539 	blkp = (uint32_t *)blk + 1;
540 	*blkp = htobe32(crp->crp_aad_length * 8);
541 	blkp = (uint32_t *)blk + 3;
542 	*blkp = htobe32(crp->crp_payload_length * 8);
543 	axf->Update(&ctx, blk, blksz);
544 
545 	/* Finalize MAC */
546 	axf->Final(aalg, &ctx);
547 
548 	/* Validate tag */
549 	error = 0;
550 	if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
551 		u_char uaalg[AALG_MAX_RESULT_LEN];
552 
553 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
554 		    uaalg);
555 
556 		r = timingsafe_bcmp(aalg, uaalg, swa->sw_mlen);
557 		explicit_bzero(uaalg, sizeof(uaalg));
558 		if (r != 0) {
559 			error = EBADMSG;
560 			goto out;
561 		}
562 
563 		/* tag matches, decrypt data */
564 		crypto_cursor_init(&cc_in, &crp->crp_buf);
565 		crypto_cursor_advance(&cc_in, crp->crp_payload_start);
566 		for (resid = crp->crp_payload_length; resid > 0;
567 		     resid -= len) {
568 			len = MIN(resid, blksz);
569 			if (len < blksz)
570 				bzero(blk, blksz);
571 			crypto_cursor_copydata(&cc_in, len, blk);
572 			exf->decrypt(swe->sw_kschedule, blk, blk);
573 			crypto_cursor_copyback(&cc_out, len, blk);
574 		}
575 	} else {
576 		/* Inject the authentication data */
577 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
578 		    aalg);
579 	}
580 
581 out:
582 	explicit_bzero(blkbuf, sizeof(blkbuf));
583 	explicit_bzero(aalg, sizeof(aalg));
584 	explicit_bzero(iv, sizeof(iv));
585 
586 	return (error);
587 }
588 
589 static int
590 swcr_ccm_cbc_mac(struct swcr_session *ses, struct cryptop *crp)
591 {
592 	uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
593 	u_char *blk = (u_char *)blkbuf;
594 	u_char aalg[AALG_MAX_RESULT_LEN];
595 	u_char iv[EALG_MAX_BLOCK_LEN];
596 	struct crypto_buffer_cursor cc;
597 	union authctx ctx;
598 	struct swcr_auth *swa;
599 	struct auth_hash *axf;
600 	int blksz, error, ivlen, len, resid;
601 
602 	swa = &ses->swcr_auth;
603 	axf = swa->sw_axf;
604 
605 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
606 	blksz = axf->blocksize;
607 
608 	/* Initialize the IV */
609 	ivlen = AES_CCM_IV_LEN;
610 	crypto_read_iv(crp, iv);
611 
612 	/*
613 	 * AES CCM-CBC-MAC needs to know the length of both the auth
614 	 * data and payload data before doing the auth computation.
615 	 */
616 	ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_payload_length;
617 	ctx.aes_cbc_mac_ctx.cryptDataLength = 0;
618 
619 	axf->Reinit(&ctx, iv, ivlen);
620 	crypto_cursor_init(&cc, &crp->crp_buf);
621 	crypto_cursor_advance(&cc, crp->crp_aad_start);
622 	for (resid = crp->crp_payload_length; resid > 0; resid -= len) {
623 		len = MIN(resid, blksz);
624 		crypto_cursor_copydata(&cc, len, blk);
625 		bzero(blk + len, blksz - len);
626 		axf->Update(&ctx, blk, blksz);
627 	}
628 
629 	/* Finalize MAC */
630 	axf->Final(aalg, &ctx);
631 
632 	error = 0;
633 	if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
634 		u_char uaalg[AALG_MAX_RESULT_LEN];
635 
636 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
637 		    uaalg);
638 		if (timingsafe_bcmp(aalg, uaalg, swa->sw_mlen) != 0)
639 			error = EBADMSG;
640 		explicit_bzero(uaalg, sizeof(uaalg));
641 	} else {
642 		/* Inject the authentication data */
643 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, aalg);
644 	}
645 	explicit_bzero(blkbuf, sizeof(blkbuf));
646 	explicit_bzero(aalg, sizeof(aalg));
647 	explicit_bzero(iv, sizeof(iv));
648 	return (error);
649 }
650 
651 static int
652 swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
653 {
654 	uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
655 	u_char *blk = (u_char *)blkbuf;
656 	u_char aalg[AALG_MAX_RESULT_LEN];
657 	u_char iv[EALG_MAX_BLOCK_LEN];
658 	struct crypto_buffer_cursor cc_in, cc_out;
659 	union authctx ctx;
660 	struct swcr_auth *swa;
661 	struct swcr_encdec *swe;
662 	struct auth_hash *axf;
663 	struct enc_xform *exf;
664 	int blksz, error, ivlen, len, r, resid;
665 
666 	swa = &ses->swcr_auth;
667 	axf = swa->sw_axf;
668 
669 	bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
670 	blksz = axf->blocksize;
671 
672 	swe = &ses->swcr_encdec;
673 	exf = swe->sw_exf;
674 	KASSERT(axf->blocksize == exf->native_blocksize,
675 	    ("%s: blocksize mismatch", __func__));
676 
677 	if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
678 		return (EINVAL);
679 
680 	/* Initialize the IV */
681 	ivlen = AES_CCM_IV_LEN;
682 	bcopy(crp->crp_iv, iv, ivlen);
683 
684 	/*
685 	 * AES CCM-CBC-MAC needs to know the length of both the auth
686 	 * data and payload data before doing the auth computation.
687 	 */
688 	ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_aad_length;
689 	ctx.aes_cbc_mac_ctx.cryptDataLength = crp->crp_payload_length;
690 
691 	/* Supply MAC with IV */
692 	axf->Reinit(&ctx, iv, ivlen);
693 
694 	/* Supply MAC with AAD */
695 	crypto_cursor_init(&cc_in, &crp->crp_buf);
696 	crypto_cursor_advance(&cc_in, crp->crp_aad_start);
697 	for (resid = crp->crp_aad_length; resid > 0; resid -= len) {
698 		len = MIN(resid, blksz);
699 		crypto_cursor_copydata(&cc_in, len, blk);
700 		bzero(blk + len, blksz - len);
701 		axf->Update(&ctx, blk, blksz);
702 	}
703 
704 	exf->reinit(swe->sw_kschedule, iv);
705 
706 	/* Do encryption/decryption with MAC */
707 	crypto_cursor_init(&cc_in, &crp->crp_buf);
708 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
709 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
710 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
711 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
712 	} else
713 		cc_out = cc_in;
714 	for (resid = crp->crp_payload_length; resid > 0; resid -= len) {
715 		len = MIN(resid, blksz);
716 		if (len < blksz)
717 			bzero(blk, blksz);
718 		crypto_cursor_copydata(&cc_in, len, blk);
719 		if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
720 			axf->Update(&ctx, blk, len);
721 			exf->encrypt(swe->sw_kschedule, blk, blk);
722 			crypto_cursor_copyback(&cc_out, len, blk);
723 		} else {
724 			/*
725 			 * One of the problems with CCM+CBC is that
726 			 * the authentication is done on the
727 			 * unecncrypted data.  As a result, we have to
728 			 * decrypt the data twice: once to generate
729 			 * the tag and a second time after the tag is
730 			 * verified.
731 			 */
732 			exf->decrypt(swe->sw_kschedule, blk, blk);
733 			axf->Update(&ctx, blk, len);
734 		}
735 	}
736 
737 	/* Finalize MAC */
738 	axf->Final(aalg, &ctx);
739 
740 	/* Validate tag */
741 	error = 0;
742 	if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
743 		u_char uaalg[AALG_MAX_RESULT_LEN];
744 
745 		crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
746 		    uaalg);
747 
748 		r = timingsafe_bcmp(aalg, uaalg, swa->sw_mlen);
749 		explicit_bzero(uaalg, sizeof(uaalg));
750 		if (r != 0) {
751 			error = EBADMSG;
752 			goto out;
753 		}
754 
755 		/* tag matches, decrypt data */
756 		exf->reinit(swe->sw_kschedule, iv);
757 		crypto_cursor_init(&cc_in, &crp->crp_buf);
758 		crypto_cursor_advance(&cc_in, crp->crp_payload_start);
759 		for (resid = crp->crp_payload_length; resid > 0;
760 		     resid -= len) {
761 			len = MIN(resid, blksz);
762 			if (len < blksz)
763 				bzero(blk, blksz);
764 			crypto_cursor_copydata(&cc_in, len, blk);
765 			exf->decrypt(swe->sw_kschedule, blk, blk);
766 			crypto_cursor_copyback(&cc_out, len, blk);
767 		}
768 	} else {
769 		/* Inject the authentication data */
770 		crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
771 		    aalg);
772 	}
773 
774 out:
775 	explicit_bzero(blkbuf, sizeof(blkbuf));
776 	explicit_bzero(aalg, sizeof(aalg));
777 	explicit_bzero(iv, sizeof(iv));
778 	return (error);
779 }
780 
781 /*
782  * Apply a cipher and a digest to perform EtA.
783  */
784 static int
785 swcr_eta(struct swcr_session *ses, struct cryptop *crp)
786 {
787 	int error;
788 
789 	if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
790 		error = swcr_encdec(ses, crp);
791 		if (error == 0)
792 			error = swcr_authcompute(ses, crp);
793 	} else {
794 		error = swcr_authcompute(ses, crp);
795 		if (error == 0)
796 			error = swcr_encdec(ses, crp);
797 	}
798 	return (error);
799 }
800 
801 /*
802  * Apply a compression/decompression algorithm
803  */
804 static int
805 swcr_compdec(struct swcr_session *ses, struct cryptop *crp)
806 {
807 	u_int8_t *data, *out;
808 	struct comp_algo *cxf;
809 	int adj;
810 	u_int32_t result;
811 
812 	cxf = ses->swcr_compdec.sw_cxf;
813 
814 	/* We must handle the whole buffer of data in one time
815 	 * then if there is not all the data in the mbuf, we must
816 	 * copy in a buffer.
817 	 */
818 
819 	data = malloc(crp->crp_payload_length, M_CRYPTO_DATA,  M_NOWAIT);
820 	if (data == NULL)
821 		return (EINVAL);
822 	crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
823 	    data);
824 
825 	if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
826 		result = cxf->compress(data, crp->crp_payload_length, &out);
827 	else
828 		result = cxf->decompress(data, crp->crp_payload_length, &out);
829 
830 	free(data, M_CRYPTO_DATA);
831 	if (result == 0)
832 		return (EINVAL);
833 	crp->crp_olen = result;
834 
835 	/* Check the compressed size when doing compression */
836 	if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
837 		if (result >= crp->crp_payload_length) {
838 			/* Compression was useless, we lost time */
839 			free(out, M_CRYPTO_DATA);
840 			return (0);
841 		}
842 	}
843 
844 	/* Copy back the (de)compressed data. m_copyback is
845 	 * extending the mbuf as necessary.
846 	 */
847 	crypto_copyback(crp, crp->crp_payload_start, result, out);
848 	if (result < crp->crp_payload_length) {
849 		switch (crp->crp_buf.cb_type) {
850 		case CRYPTO_BUF_MBUF:
851 			adj = result - crp->crp_payload_length;
852 			m_adj(crp->crp_buf.cb_mbuf, adj);
853 			break;
854 		case CRYPTO_BUF_UIO: {
855 			struct uio *uio = crp->crp_buf.cb_uio;
856 			int ind;
857 
858 			adj = crp->crp_payload_length - result;
859 			ind = uio->uio_iovcnt - 1;
860 
861 			while (adj > 0 && ind >= 0) {
862 				if (adj < uio->uio_iov[ind].iov_len) {
863 					uio->uio_iov[ind].iov_len -= adj;
864 					break;
865 				}
866 
867 				adj -= uio->uio_iov[ind].iov_len;
868 				uio->uio_iov[ind].iov_len = 0;
869 				ind--;
870 				uio->uio_iovcnt--;
871 			}
872 			}
873 			break;
874 		default:
875 			break;
876 		}
877 	}
878 	free(out, M_CRYPTO_DATA);
879 	return 0;
880 }
881 
882 static int
883 swcr_setup_cipher(struct swcr_session *ses,
884     const struct crypto_session_params *csp)
885 {
886 	struct swcr_encdec *swe;
887 	struct enc_xform *txf;
888 	int error;
889 
890 	swe = &ses->swcr_encdec;
891 	txf = crypto_cipher(csp);
892 	MPASS(txf->ivsize == csp->csp_ivlen);
893 	if (txf->ctxsize != 0) {
894 		swe->sw_kschedule = malloc(txf->ctxsize, M_CRYPTO_DATA,
895 		    M_NOWAIT);
896 		if (swe->sw_kschedule == NULL)
897 			return (ENOMEM);
898 	}
899 	if (csp->csp_cipher_key != NULL) {
900 		error = txf->setkey(swe->sw_kschedule,
901 		    csp->csp_cipher_key, csp->csp_cipher_klen);
902 		if (error)
903 			return (error);
904 	}
905 	swe->sw_exf = txf;
906 	return (0);
907 }
908 
909 static int
910 swcr_setup_auth(struct swcr_session *ses,
911     const struct crypto_session_params *csp)
912 {
913 	struct swcr_auth *swa;
914 	struct auth_hash *axf;
915 
916 	swa = &ses->swcr_auth;
917 
918 	axf = crypto_auth_hash(csp);
919 	swa->sw_axf = axf;
920 	if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
921 		return (EINVAL);
922 	if (csp->csp_auth_mlen == 0)
923 		swa->sw_mlen = axf->hashsize;
924 	else
925 		swa->sw_mlen = csp->csp_auth_mlen;
926 	swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
927 	if (swa->sw_ictx == NULL)
928 		return (ENOBUFS);
929 
930 	switch (csp->csp_auth_alg) {
931 	case CRYPTO_SHA1_HMAC:
932 	case CRYPTO_SHA2_224_HMAC:
933 	case CRYPTO_SHA2_256_HMAC:
934 	case CRYPTO_SHA2_384_HMAC:
935 	case CRYPTO_SHA2_512_HMAC:
936 	case CRYPTO_NULL_HMAC:
937 	case CRYPTO_RIPEMD160_HMAC:
938 		swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
939 		    M_NOWAIT);
940 		if (swa->sw_octx == NULL)
941 			return (ENOBUFS);
942 
943 		if (csp->csp_auth_key != NULL) {
944 			swcr_authprepare(axf, swa, csp->csp_auth_key,
945 			    csp->csp_auth_klen);
946 		}
947 
948 		if (csp->csp_mode == CSP_MODE_DIGEST)
949 			ses->swcr_process = swcr_authcompute;
950 		break;
951 	case CRYPTO_SHA1:
952 	case CRYPTO_SHA2_224:
953 	case CRYPTO_SHA2_256:
954 	case CRYPTO_SHA2_384:
955 	case CRYPTO_SHA2_512:
956 		axf->Init(swa->sw_ictx);
957 		if (csp->csp_mode == CSP_MODE_DIGEST)
958 			ses->swcr_process = swcr_authcompute;
959 		break;
960 	case CRYPTO_AES_NIST_GMAC:
961 		axf->Init(swa->sw_ictx);
962 		axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
963 		    csp->csp_auth_klen);
964 		if (csp->csp_mode == CSP_MODE_DIGEST)
965 			ses->swcr_process = swcr_gmac;
966 		break;
967 	case CRYPTO_POLY1305:
968 	case CRYPTO_BLAKE2B:
969 	case CRYPTO_BLAKE2S:
970 		/*
971 		 * Blake2b and Blake2s support an optional key but do
972 		 * not require one.
973 		 */
974 		if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL)
975 			axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
976 			    csp->csp_auth_klen);
977 		axf->Init(swa->sw_ictx);
978 		if (csp->csp_mode == CSP_MODE_DIGEST)
979 			ses->swcr_process = swcr_authcompute;
980 		break;
981 	case CRYPTO_AES_CCM_CBC_MAC:
982 		axf->Init(swa->sw_ictx);
983 		axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
984 		    csp->csp_auth_klen);
985 		if (csp->csp_mode == CSP_MODE_DIGEST)
986 			ses->swcr_process = swcr_ccm_cbc_mac;
987 		break;
988 	}
989 
990 	return (0);
991 }
992 
993 static int
994 swcr_setup_gcm(struct swcr_session *ses,
995     const struct crypto_session_params *csp)
996 {
997 	struct swcr_auth *swa;
998 	struct auth_hash *axf;
999 
1000 	if (csp->csp_ivlen != AES_GCM_IV_LEN)
1001 		return (EINVAL);
1002 
1003 	/* First, setup the auth side. */
1004 	swa = &ses->swcr_auth;
1005 	switch (csp->csp_cipher_klen * 8) {
1006 	case 128:
1007 		axf = &auth_hash_nist_gmac_aes_128;
1008 		break;
1009 	case 192:
1010 		axf = &auth_hash_nist_gmac_aes_192;
1011 		break;
1012 	case 256:
1013 		axf = &auth_hash_nist_gmac_aes_256;
1014 		break;
1015 	default:
1016 		return (EINVAL);
1017 	}
1018 	swa->sw_axf = axf;
1019 	if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1020 		return (EINVAL);
1021 	if (csp->csp_auth_mlen == 0)
1022 		swa->sw_mlen = axf->hashsize;
1023 	else
1024 		swa->sw_mlen = csp->csp_auth_mlen;
1025 	swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1026 	if (swa->sw_ictx == NULL)
1027 		return (ENOBUFS);
1028 	axf->Init(swa->sw_ictx);
1029 	if (csp->csp_cipher_key != NULL)
1030 		axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1031 		    csp->csp_cipher_klen);
1032 
1033 	/* Second, setup the cipher side. */
1034 	return (swcr_setup_cipher(ses, csp));
1035 }
1036 
1037 static int
1038 swcr_setup_ccm(struct swcr_session *ses,
1039     const struct crypto_session_params *csp)
1040 {
1041 	struct swcr_auth *swa;
1042 	struct auth_hash *axf;
1043 
1044 	if (csp->csp_ivlen != AES_CCM_IV_LEN)
1045 		return (EINVAL);
1046 
1047 	/* First, setup the auth side. */
1048 	swa = &ses->swcr_auth;
1049 	switch (csp->csp_cipher_klen * 8) {
1050 	case 128:
1051 		axf = &auth_hash_ccm_cbc_mac_128;
1052 		break;
1053 	case 192:
1054 		axf = &auth_hash_ccm_cbc_mac_192;
1055 		break;
1056 	case 256:
1057 		axf = &auth_hash_ccm_cbc_mac_256;
1058 		break;
1059 	default:
1060 		return (EINVAL);
1061 	}
1062 	swa->sw_axf = axf;
1063 	if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1064 		return (EINVAL);
1065 	if (csp->csp_auth_mlen == 0)
1066 		swa->sw_mlen = axf->hashsize;
1067 	else
1068 		swa->sw_mlen = csp->csp_auth_mlen;
1069 	swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1070 	if (swa->sw_ictx == NULL)
1071 		return (ENOBUFS);
1072 	axf->Init(swa->sw_ictx);
1073 	if (csp->csp_cipher_key != NULL)
1074 		axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1075 		    csp->csp_cipher_klen);
1076 
1077 	/* Second, setup the cipher side. */
1078 	return (swcr_setup_cipher(ses, csp));
1079 }
1080 
1081 static bool
1082 swcr_auth_supported(const struct crypto_session_params *csp)
1083 {
1084 	struct auth_hash *axf;
1085 
1086 	axf = crypto_auth_hash(csp);
1087 	if (axf == NULL)
1088 		return (false);
1089 	switch (csp->csp_auth_alg) {
1090 	case CRYPTO_SHA1_HMAC:
1091 	case CRYPTO_SHA2_224_HMAC:
1092 	case CRYPTO_SHA2_256_HMAC:
1093 	case CRYPTO_SHA2_384_HMAC:
1094 	case CRYPTO_SHA2_512_HMAC:
1095 	case CRYPTO_NULL_HMAC:
1096 	case CRYPTO_RIPEMD160_HMAC:
1097 		break;
1098 	case CRYPTO_AES_NIST_GMAC:
1099 		switch (csp->csp_auth_klen * 8) {
1100 		case 128:
1101 		case 192:
1102 		case 256:
1103 			break;
1104 		default:
1105 			return (false);
1106 		}
1107 		if (csp->csp_auth_key == NULL)
1108 			return (false);
1109 		if (csp->csp_ivlen != AES_GCM_IV_LEN)
1110 			return (false);
1111 		break;
1112 	case CRYPTO_POLY1305:
1113 		if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1114 			return (false);
1115 		break;
1116 	case CRYPTO_AES_CCM_CBC_MAC:
1117 		switch (csp->csp_auth_klen * 8) {
1118 		case 128:
1119 		case 192:
1120 		case 256:
1121 			break;
1122 		default:
1123 			return (false);
1124 		}
1125 		if (csp->csp_auth_key == NULL)
1126 			return (false);
1127 		if (csp->csp_ivlen != AES_CCM_IV_LEN)
1128 			return (false);
1129 		break;
1130 	}
1131 	return (true);
1132 }
1133 
1134 static bool
1135 swcr_cipher_supported(const struct crypto_session_params *csp)
1136 {
1137 	struct enc_xform *txf;
1138 
1139 	txf = crypto_cipher(csp);
1140 	if (txf == NULL)
1141 		return (false);
1142 	if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1143 	    txf->ivsize != csp->csp_ivlen)
1144 		return (false);
1145 	return (true);
1146 }
1147 
1148 static int
1149 swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1150 {
1151 
1152 	if ((csp->csp_flags & ~(CSP_F_SEPARATE_OUTPUT)) != 0)
1153 		return (EINVAL);
1154 	switch (csp->csp_mode) {
1155 	case CSP_MODE_COMPRESS:
1156 		switch (csp->csp_cipher_alg) {
1157 		case CRYPTO_DEFLATE_COMP:
1158 			break;
1159 		default:
1160 			return (EINVAL);
1161 		}
1162 		break;
1163 	case CSP_MODE_CIPHER:
1164 		switch (csp->csp_cipher_alg) {
1165 		case CRYPTO_AES_NIST_GCM_16:
1166 		case CRYPTO_AES_CCM_16:
1167 			return (EINVAL);
1168 		default:
1169 			if (!swcr_cipher_supported(csp))
1170 				return (EINVAL);
1171 			break;
1172 		}
1173 		break;
1174 	case CSP_MODE_DIGEST:
1175 		if (!swcr_auth_supported(csp))
1176 			return (EINVAL);
1177 		break;
1178 	case CSP_MODE_AEAD:
1179 		switch (csp->csp_cipher_alg) {
1180 		case CRYPTO_AES_NIST_GCM_16:
1181 		case CRYPTO_AES_CCM_16:
1182 			break;
1183 		default:
1184 			return (EINVAL);
1185 		}
1186 		break;
1187 	case CSP_MODE_ETA:
1188 		/* AEAD algorithms cannot be used for EtA. */
1189 		switch (csp->csp_cipher_alg) {
1190 		case CRYPTO_AES_NIST_GCM_16:
1191 		case CRYPTO_AES_CCM_16:
1192 			return (EINVAL);
1193 		}
1194 		switch (csp->csp_auth_alg) {
1195 		case CRYPTO_AES_NIST_GMAC:
1196 		case CRYPTO_AES_CCM_CBC_MAC:
1197 			return (EINVAL);
1198 		}
1199 
1200 		if (!swcr_cipher_supported(csp) ||
1201 		    !swcr_auth_supported(csp))
1202 			return (EINVAL);
1203 		break;
1204 	default:
1205 		return (EINVAL);
1206 	}
1207 
1208 	return (CRYPTODEV_PROBE_SOFTWARE);
1209 }
1210 
1211 /*
1212  * Generate a new software session.
1213  */
1214 static int
1215 swcr_newsession(device_t dev, crypto_session_t cses,
1216     const struct crypto_session_params *csp)
1217 {
1218 	struct swcr_session *ses;
1219 	struct swcr_encdec *swe;
1220 	struct swcr_auth *swa;
1221 	struct comp_algo *cxf;
1222 	int error;
1223 
1224 	ses = crypto_get_driver_session(cses);
1225 	mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
1226 
1227 	error = 0;
1228 	swe = &ses->swcr_encdec;
1229 	swa = &ses->swcr_auth;
1230 	switch (csp->csp_mode) {
1231 	case CSP_MODE_COMPRESS:
1232 		switch (csp->csp_cipher_alg) {
1233 		case CRYPTO_DEFLATE_COMP:
1234 			cxf = &comp_algo_deflate;
1235 			break;
1236 #ifdef INVARIANTS
1237 		default:
1238 			panic("bad compression algo");
1239 #endif
1240 		}
1241 		ses->swcr_compdec.sw_cxf = cxf;
1242 		ses->swcr_process = swcr_compdec;
1243 		break;
1244 	case CSP_MODE_CIPHER:
1245 		switch (csp->csp_cipher_alg) {
1246 		case CRYPTO_NULL_CBC:
1247 			ses->swcr_process = swcr_null;
1248 			break;
1249 #ifdef INVARIANTS
1250 		case CRYPTO_AES_NIST_GCM_16:
1251 		case CRYPTO_AES_CCM_16:
1252 			panic("bad cipher algo");
1253 #endif
1254 		default:
1255 			error = swcr_setup_cipher(ses, csp);
1256 			if (error == 0)
1257 				ses->swcr_process = swcr_encdec;
1258 		}
1259 		break;
1260 	case CSP_MODE_DIGEST:
1261 		error = swcr_setup_auth(ses, csp);
1262 		break;
1263 	case CSP_MODE_AEAD:
1264 		switch (csp->csp_cipher_alg) {
1265 		case CRYPTO_AES_NIST_GCM_16:
1266 			error = swcr_setup_gcm(ses, csp);
1267 			if (error == 0)
1268 				ses->swcr_process = swcr_gcm;
1269 			break;
1270 		case CRYPTO_AES_CCM_16:
1271 			error = swcr_setup_ccm(ses, csp);
1272 			if (error == 0)
1273 				ses->swcr_process = swcr_ccm;
1274 			break;
1275 #ifdef INVARIANTS
1276 		default:
1277 			panic("bad aead algo");
1278 #endif
1279 		}
1280 		break;
1281 	case CSP_MODE_ETA:
1282 #ifdef INVARIANTS
1283 		switch (csp->csp_cipher_alg) {
1284 		case CRYPTO_AES_NIST_GCM_16:
1285 		case CRYPTO_AES_CCM_16:
1286 			panic("bad eta cipher algo");
1287 		}
1288 		switch (csp->csp_auth_alg) {
1289 		case CRYPTO_AES_NIST_GMAC:
1290 		case CRYPTO_AES_CCM_CBC_MAC:
1291 			panic("bad eta auth algo");
1292 		}
1293 #endif
1294 
1295 		error = swcr_setup_auth(ses, csp);
1296 		if (error)
1297 			break;
1298 		if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1299 			/* Effectively degrade to digest mode. */
1300 			ses->swcr_process = swcr_authcompute;
1301 			break;
1302 		}
1303 
1304 		error = swcr_setup_cipher(ses, csp);
1305 		if (error == 0)
1306 			ses->swcr_process = swcr_eta;
1307 		break;
1308 	default:
1309 		error = EINVAL;
1310 	}
1311 
1312 	if (error)
1313 		swcr_freesession(dev, cses);
1314 	return (error);
1315 }
1316 
1317 static void
1318 swcr_freesession(device_t dev, crypto_session_t cses)
1319 {
1320 	struct swcr_session *ses;
1321 	struct swcr_auth *swa;
1322 	struct auth_hash *axf;
1323 
1324 	ses = crypto_get_driver_session(cses);
1325 
1326 	mtx_destroy(&ses->swcr_lock);
1327 
1328 	zfree(ses->swcr_encdec.sw_kschedule, M_CRYPTO_DATA);
1329 
1330 	axf = ses->swcr_auth.sw_axf;
1331 	if (axf != NULL) {
1332 		swa = &ses->swcr_auth;
1333 		if (swa->sw_ictx != NULL) {
1334 			explicit_bzero(swa->sw_ictx, axf->ctxsize);
1335 			free(swa->sw_ictx, M_CRYPTO_DATA);
1336 		}
1337 		if (swa->sw_octx != NULL) {
1338 			explicit_bzero(swa->sw_octx, axf->ctxsize);
1339 			free(swa->sw_octx, M_CRYPTO_DATA);
1340 		}
1341 	}
1342 }
1343 
1344 /*
1345  * Process a software request.
1346  */
1347 static int
1348 swcr_process(device_t dev, struct cryptop *crp, int hint)
1349 {
1350 	struct swcr_session *ses;
1351 
1352 	ses = crypto_get_driver_session(crp->crp_session);
1353 	mtx_lock(&ses->swcr_lock);
1354 
1355 	crp->crp_etype = ses->swcr_process(ses, crp);
1356 
1357 	mtx_unlock(&ses->swcr_lock);
1358 	crypto_done(crp);
1359 	return (0);
1360 }
1361 
1362 static void
1363 swcr_identify(driver_t *drv, device_t parent)
1364 {
1365 	/* NB: order 10 is so we get attached after h/w devices */
1366 	if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1367 	    BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1368 		panic("cryptosoft: could not attach");
1369 }
1370 
1371 static int
1372 swcr_probe(device_t dev)
1373 {
1374 	device_set_desc(dev, "software crypto");
1375 	return (BUS_PROBE_NOWILDCARD);
1376 }
1377 
1378 static int
1379 swcr_attach(device_t dev)
1380 {
1381 
1382 	swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1383 			CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1384 	if (swcr_id < 0) {
1385 		device_printf(dev, "cannot initialize!");
1386 		return (ENXIO);
1387 	}
1388 
1389 	return (0);
1390 }
1391 
1392 static int
1393 swcr_detach(device_t dev)
1394 {
1395 	crypto_unregister_all(swcr_id);
1396 	return 0;
1397 }
1398 
1399 static device_method_t swcr_methods[] = {
1400 	DEVMETHOD(device_identify,	swcr_identify),
1401 	DEVMETHOD(device_probe,		swcr_probe),
1402 	DEVMETHOD(device_attach,	swcr_attach),
1403 	DEVMETHOD(device_detach,	swcr_detach),
1404 
1405 	DEVMETHOD(cryptodev_probesession, swcr_probesession),
1406 	DEVMETHOD(cryptodev_newsession,	swcr_newsession),
1407 	DEVMETHOD(cryptodev_freesession,swcr_freesession),
1408 	DEVMETHOD(cryptodev_process,	swcr_process),
1409 
1410 	{0, 0},
1411 };
1412 
1413 static driver_t swcr_driver = {
1414 	"cryptosoft",
1415 	swcr_methods,
1416 	0,		/* NB: no softc */
1417 };
1418 static devclass_t swcr_devclass;
1419 
1420 /*
1421  * NB: We explicitly reference the crypto module so we
1422  * get the necessary ordering when built as a loadable
1423  * module.  This is required because we bundle the crypto
1424  * module code together with the cryptosoft driver (otherwise
1425  * normal module dependencies would handle things).
1426  */
1427 extern int crypto_modevent(struct module *, int, void *);
1428 /* XXX where to attach */
1429 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1430 MODULE_VERSION(cryptosoft, 1);
1431 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);
1432