xref: /freebsd/sys/netipsec/xform_esp.c (revision 35d9e00dba8cf0c25fbfdbd41ad4d6d1906eed4b)
188768458SSam Leffler /*	$FreeBSD$	*/
288768458SSam Leffler /*	$OpenBSD: ip_esp.c,v 1.69 2001/06/26 06:18:59 angelos Exp $ */
3c398230bSWarner Losh /*-
488768458SSam Leffler  * The authors of this code are John Ioannidis (ji@tla.org),
588768458SSam Leffler  * Angelos D. Keromytis (kermit@csd.uch.gr) and
688768458SSam Leffler  * Niels Provos (provos@physnet.uni-hamburg.de).
788768458SSam Leffler  *
888768458SSam Leffler  * The original version of this code was written by John Ioannidis
988768458SSam Leffler  * for BSD/OS in Athens, Greece, in November 1995.
1088768458SSam Leffler  *
1188768458SSam Leffler  * Ported to OpenBSD and NetBSD, with additional transforms, in December 1996,
1288768458SSam Leffler  * by Angelos D. Keromytis.
1388768458SSam Leffler  *
1488768458SSam Leffler  * Additional transforms and features in 1997 and 1998 by Angelos D. Keromytis
1588768458SSam Leffler  * and Niels Provos.
1688768458SSam Leffler  *
1788768458SSam Leffler  * Additional features in 1999 by Angelos D. Keromytis.
1888768458SSam Leffler  *
1988768458SSam Leffler  * Copyright (C) 1995, 1996, 1997, 1998, 1999 by John Ioannidis,
2088768458SSam Leffler  * Angelos D. Keromytis and Niels Provos.
2188768458SSam Leffler  * Copyright (c) 2001 Angelos D. Keromytis.
2288768458SSam Leffler  *
2388768458SSam Leffler  * Permission to use, copy, and modify this software with or without fee
2488768458SSam Leffler  * is hereby granted, provided that this entire notice is included in
2588768458SSam Leffler  * all copies of any software which is or includes a copy or
2688768458SSam Leffler  * modification of this software.
2788768458SSam Leffler  * You may use this code under the GNU public license if you so wish. Please
2888768458SSam Leffler  * contribute changes back to the authors under this freer than GPL license
2988768458SSam Leffler  * so that we may further the use of strong encryption without limitations to
3088768458SSam Leffler  * all.
3188768458SSam Leffler  *
3288768458SSam Leffler  * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
3388768458SSam Leffler  * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
3488768458SSam Leffler  * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
3588768458SSam Leffler  * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
3688768458SSam Leffler  * PURPOSE.
3788768458SSam Leffler  */
3888768458SSam Leffler #include "opt_inet.h"
3988768458SSam Leffler #include "opt_inet6.h"
4028d2a72bSJohn Baldwin #include "opt_ipsec.h"
4188768458SSam Leffler 
4288768458SSam Leffler #include <sys/param.h>
4388768458SSam Leffler #include <sys/systm.h>
44*35d9e00dSJohn Baldwin #include <sys/malloc.h>
4588768458SSam Leffler #include <sys/mbuf.h>
4688768458SSam Leffler #include <sys/socket.h>
4788768458SSam Leffler #include <sys/syslog.h>
4888768458SSam Leffler #include <sys/kernel.h>
49eedc7fd9SGleb Smirnoff #include <sys/lock.h>
5088768458SSam Leffler #include <sys/random.h>
51fcf59617SAndrey V. Elsukov #include <sys/mutex.h>
5288768458SSam Leffler #include <sys/sysctl.h>
53a2bc81bfSJohn-Mark Gurney #include <sys/mutex.h>
54a2bc81bfSJohn-Mark Gurney #include <machine/atomic.h>
5588768458SSam Leffler 
5688768458SSam Leffler #include <net/if.h>
57eddfbb76SRobert Watson #include <net/vnet.h>
5888768458SSam Leffler 
5988768458SSam Leffler #include <netinet/in.h>
6088768458SSam Leffler #include <netinet/in_systm.h>
6188768458SSam Leffler #include <netinet/ip.h>
6288768458SSam Leffler #include <netinet/ip_ecn.h>
6388768458SSam Leffler #include <netinet/ip6.h>
6488768458SSam Leffler 
6588768458SSam Leffler #include <netipsec/ipsec.h>
6688768458SSam Leffler #include <netipsec/ah.h>
6788768458SSam Leffler #include <netipsec/ah_var.h>
6888768458SSam Leffler #include <netipsec/esp.h>
6988768458SSam Leffler #include <netipsec/esp_var.h>
7088768458SSam Leffler #include <netipsec/xform.h>
7188768458SSam Leffler 
7288768458SSam Leffler #ifdef INET6
7388768458SSam Leffler #include <netinet6/ip6_var.h>
7488768458SSam Leffler #include <netipsec/ipsec6.h>
7588768458SSam Leffler #include <netinet6/ip6_ecn.h>
7688768458SSam Leffler #endif
7788768458SSam Leffler 
7888768458SSam Leffler #include <netipsec/key.h>
7988768458SSam Leffler #include <netipsec/key_debug.h>
8088768458SSam Leffler 
8188768458SSam Leffler #include <opencrypto/cryptodev.h>
8288768458SSam Leffler #include <opencrypto/xform.h>
8388768458SSam Leffler 
844d36d1fdSMarcin Wojtas #define SPI_SIZE	4
854d36d1fdSMarcin Wojtas 
86eddfbb76SRobert Watson VNET_DEFINE(int, esp_enable) = 1;
87b01edfb5SMarcin Wojtas VNET_DEFINE_STATIC(int, esp_ctr_compatibility) = 1;
88b01edfb5SMarcin Wojtas #define V_esp_ctr_compatibility VNET(esp_ctr_compatibility)
89db8c0879SAndrey V. Elsukov VNET_PCPUSTAT_DEFINE(struct espstat, espstat);
90db8c0879SAndrey V. Elsukov VNET_PCPUSTAT_SYSINIT(espstat);
91db8c0879SAndrey V. Elsukov 
92db8c0879SAndrey V. Elsukov #ifdef VIMAGE
93db8c0879SAndrey V. Elsukov VNET_PCPUSTAT_SYSUNINIT(espstat);
94db8c0879SAndrey V. Elsukov #endif /* VIMAGE */
9588768458SSam Leffler 
9688768458SSam Leffler SYSCTL_DECL(_net_inet_esp);
976df8a710SGleb Smirnoff SYSCTL_INT(_net_inet_esp, OID_AUTO, esp_enable,
986df8a710SGleb Smirnoff 	CTLFLAG_VNET | CTLFLAG_RW, &VNET_NAME(esp_enable), 0, "");
99b01edfb5SMarcin Wojtas SYSCTL_INT(_net_inet_esp, OID_AUTO, ctr_compatibility,
100b01edfb5SMarcin Wojtas     CTLFLAG_VNET | CTLFLAG_RW, &VNET_NAME(esp_ctr_compatibility), 0,
101b01edfb5SMarcin Wojtas     "Align AES-CTR encrypted transmitted frames to blocksize");
102db8c0879SAndrey V. Elsukov SYSCTL_VNET_PCPUSTAT(_net_inet_esp, IPSECCTL_STATS, stats,
103db8c0879SAndrey V. Elsukov     struct espstat, espstat,
104db8c0879SAndrey V. Elsukov     "ESP statistics (struct espstat, netipsec/esp_var.h");
105eddfbb76SRobert Watson 
106*35d9e00dSJohn Baldwin static MALLOC_DEFINE(M_ESP, "esp", "IPsec ESP");
107*35d9e00dSJohn Baldwin 
10888768458SSam Leffler static int esp_input_cb(struct cryptop *op);
10988768458SSam Leffler static int esp_output_cb(struct cryptop *crp);
110bfe1aba4SMarko Zec 
11188768458SSam Leffler size_t
11288768458SSam Leffler esp_hdrsiz(struct secasvar *sav)
11388768458SSam Leffler {
11488768458SSam Leffler 	size_t size;
11588768458SSam Leffler 
11688768458SSam Leffler 	if (sav != NULL) {
11788768458SSam Leffler 		/*XXX not right for null algorithm--does it matter??*/
1189ffa9677SSam Leffler 		IPSEC_ASSERT(sav->tdb_encalgxform != NULL,
1199ffa9677SSam Leffler 			("SA with null xform"));
12088768458SSam Leffler 		if (sav->flags & SADB_X_EXT_OLD)
12188768458SSam Leffler 			size = sizeof (struct esp);
12288768458SSam Leffler 		else
12388768458SSam Leffler 			size = sizeof (struct newesp);
12488768458SSam Leffler 		size += sav->tdb_encalgxform->blocksize + 9;
12588768458SSam Leffler 		/*XXX need alg check???*/
12688768458SSam Leffler 		if (sav->tdb_authalgxform != NULL && sav->replay)
12788768458SSam Leffler 			size += ah_hdrsiz(sav);
12888768458SSam Leffler 	} else {
12988768458SSam Leffler 		/*
13088768458SSam Leffler 		 *   base header size
13188768458SSam Leffler 		 * + max iv length for CBC mode
13288768458SSam Leffler 		 * + max pad length
13388768458SSam Leffler 		 * + sizeof (pad length field)
13488768458SSam Leffler 		 * + sizeof (next header field)
13588768458SSam Leffler 		 * + max icv supported.
13688768458SSam Leffler 		 */
137cdb7ebe3SPawel Jakub Dawidek 		size = sizeof (struct newesp) + EALG_MAX_BLOCK_LEN + 9 + 16;
13888768458SSam Leffler 	}
13988768458SSam Leffler 	return size;
14088768458SSam Leffler }
14188768458SSam Leffler 
14288768458SSam Leffler /*
14388768458SSam Leffler  * esp_init() is called when an SPI is being set up.
14488768458SSam Leffler  */
14588768458SSam Leffler static int
14688768458SSam Leffler esp_init(struct secasvar *sav, struct xformsw *xsp)
14788768458SSam Leffler {
148fcf59617SAndrey V. Elsukov 	const struct enc_xform *txform;
149c0341432SJohn Baldwin 	struct crypto_session_params csp;
15088768458SSam Leffler 	int keylen;
15188768458SSam Leffler 	int error;
15288768458SSam Leffler 
153fcf59617SAndrey V. Elsukov 	txform = enc_algorithm_lookup(sav->alg_enc);
15488768458SSam Leffler 	if (txform == NULL) {
1559ffa9677SSam Leffler 		DPRINTF(("%s: unsupported encryption algorithm %d\n",
1569ffa9677SSam Leffler 			__func__, sav->alg_enc));
15788768458SSam Leffler 		return EINVAL;
15888768458SSam Leffler 	}
15988768458SSam Leffler 	if (sav->key_enc == NULL) {
1609ffa9677SSam Leffler 		DPRINTF(("%s: no encoding key for %s algorithm\n",
1619ffa9677SSam Leffler 			 __func__, txform->name));
16288768458SSam Leffler 		return EINVAL;
16388768458SSam Leffler 	}
164a2bc81bfSJohn-Mark Gurney 	if ((sav->flags & (SADB_X_EXT_OLD | SADB_X_EXT_IV4B)) ==
165a2bc81bfSJohn-Mark Gurney 	    SADB_X_EXT_IV4B) {
1669ffa9677SSam Leffler 		DPRINTF(("%s: 4-byte IV not supported with protocol\n",
1679ffa9677SSam Leffler 			__func__));
16888768458SSam Leffler 		return EINVAL;
16988768458SSam Leffler 	}
170c2fd516fSJohn Baldwin 
171a2bc81bfSJohn-Mark Gurney 	/* subtract off the salt, RFC4106, 8.1 and RFC3686, 5.1 */
172a2bc81bfSJohn-Mark Gurney 	keylen = _KEYLEN(sav->key_enc) - SAV_ISCTRORGCM(sav) * 4;
17388768458SSam Leffler 	if (txform->minkey > keylen || keylen > txform->maxkey) {
1749ffa9677SSam Leffler 		DPRINTF(("%s: invalid key length %u, must be in the range "
1759ffa9677SSam Leffler 			"[%u..%u] for algorithm %s\n", __func__,
17688768458SSam Leffler 			keylen, txform->minkey, txform->maxkey,
17788768458SSam Leffler 			txform->name));
17888768458SSam Leffler 		return EINVAL;
17988768458SSam Leffler 	}
18088768458SSam Leffler 
181a2bc81bfSJohn-Mark Gurney 	if (SAV_ISCTRORGCM(sav))
182a2bc81bfSJohn-Mark Gurney 		sav->ivlen = 8;	/* RFC4106 3.1 and RFC3686 3.1 */
183a2bc81bfSJohn-Mark Gurney 	else
1840c80e7dfSAndrey V. Elsukov 		sav->ivlen = txform->ivsize;
18588768458SSam Leffler 
186c0341432SJohn Baldwin 	memset(&csp, 0, sizeof(csp));
187c0341432SJohn Baldwin 
18888768458SSam Leffler 	/*
18988768458SSam Leffler 	 * Setup AH-related state.
19088768458SSam Leffler 	 */
19188768458SSam Leffler 	if (sav->alg_auth != 0) {
192c0341432SJohn Baldwin 		error = ah_init0(sav, xsp, &csp);
19388768458SSam Leffler 		if (error)
19488768458SSam Leffler 			return error;
19588768458SSam Leffler 	}
19688768458SSam Leffler 
19788768458SSam Leffler 	/* NB: override anything set in ah_init0 */
19888768458SSam Leffler 	sav->tdb_xform = xsp;
19988768458SSam Leffler 	sav->tdb_encalgxform = txform;
20088768458SSam Leffler 
20116de9ac1SGeorge V. Neville-Neil 	/*
20216de9ac1SGeorge V. Neville-Neil 	 * Whenever AES-GCM is used for encryption, one
20316de9ac1SGeorge V. Neville-Neil 	 * of the AES authentication algorithms is chosen
20416de9ac1SGeorge V. Neville-Neil 	 * as well, based on the key size.
20516de9ac1SGeorge V. Neville-Neil 	 */
20616de9ac1SGeorge V. Neville-Neil 	if (sav->alg_enc == SADB_X_EALG_AESGCM16) {
20716de9ac1SGeorge V. Neville-Neil 		switch (keylen) {
208a2bc81bfSJohn-Mark Gurney 		case AES_128_GMAC_KEY_LEN:
20916de9ac1SGeorge V. Neville-Neil 			sav->alg_auth = SADB_X_AALG_AES128GMAC;
21016de9ac1SGeorge V. Neville-Neil 			sav->tdb_authalgxform = &auth_hash_nist_gmac_aes_128;
21116de9ac1SGeorge V. Neville-Neil 			break;
212a2bc81bfSJohn-Mark Gurney 		case AES_192_GMAC_KEY_LEN:
21316de9ac1SGeorge V. Neville-Neil 			sav->alg_auth = SADB_X_AALG_AES192GMAC;
21416de9ac1SGeorge V. Neville-Neil 			sav->tdb_authalgxform = &auth_hash_nist_gmac_aes_192;
21516de9ac1SGeorge V. Neville-Neil 			break;
216a2bc81bfSJohn-Mark Gurney 		case AES_256_GMAC_KEY_LEN:
21716de9ac1SGeorge V. Neville-Neil 			sav->alg_auth = SADB_X_AALG_AES256GMAC;
21816de9ac1SGeorge V. Neville-Neil 			sav->tdb_authalgxform = &auth_hash_nist_gmac_aes_256;
21916de9ac1SGeorge V. Neville-Neil 			break;
22016de9ac1SGeorge V. Neville-Neil 		default:
22116de9ac1SGeorge V. Neville-Neil 			DPRINTF(("%s: invalid key length %u"
22216de9ac1SGeorge V. Neville-Neil 				 "for algorithm %s\n", __func__,
22316de9ac1SGeorge V. Neville-Neil 				 keylen, txform->name));
22416de9ac1SGeorge V. Neville-Neil 			return EINVAL;
22516de9ac1SGeorge V. Neville-Neil 		}
226c0341432SJohn Baldwin 		csp.csp_mode = CSP_MODE_AEAD;
2274d36d1fdSMarcin Wojtas 		if (sav->flags & SADB_X_SAFLAGS_ESN)
2284d36d1fdSMarcin Wojtas 			csp.csp_flags |= CSP_F_SEPARATE_AAD;
2294d36d1fdSMarcin Wojtas 	} else if (sav->alg_auth != 0) {
230c0341432SJohn Baldwin 		csp.csp_mode = CSP_MODE_ETA;
2314d36d1fdSMarcin Wojtas 		if (sav->flags & SADB_X_SAFLAGS_ESN)
2324d36d1fdSMarcin Wojtas 			csp.csp_flags |= CSP_F_ESN;
2334d36d1fdSMarcin Wojtas 	} else
234c0341432SJohn Baldwin 		csp.csp_mode = CSP_MODE_CIPHER;
23516de9ac1SGeorge V. Neville-Neil 
23688768458SSam Leffler 	/* Initialize crypto session. */
237c0341432SJohn Baldwin 	csp.csp_cipher_alg = sav->tdb_encalgxform->type;
238897e4312SJohn Baldwin 	if (csp.csp_cipher_alg != CRYPTO_NULL_CBC) {
239c0341432SJohn Baldwin 		csp.csp_cipher_key = sav->key_enc->key_data;
240c0341432SJohn Baldwin 		csp.csp_cipher_klen = _KEYBITS(sav->key_enc) / 8 -
241c0341432SJohn Baldwin 		    SAV_ISCTRORGCM(sav) * 4;
242897e4312SJohn Baldwin 	};
243c0341432SJohn Baldwin 	csp.csp_ivlen = txform->ivsize;
24488768458SSam Leffler 
245c0341432SJohn Baldwin 	error = crypto_newsession(&sav->tdb_cryptoid, &csp, V_crypto_support);
24688768458SSam Leffler 	return error;
24788768458SSam Leffler }
24888768458SSam Leffler 
249dae61c9dSJohn Baldwin static void
250dae61c9dSJohn Baldwin esp_cleanup(struct secasvar *sav)
25188768458SSam Leffler {
25288768458SSam Leffler 
253dae61c9dSJohn Baldwin 	crypto_freesession(sav->tdb_cryptoid);
254dae61c9dSJohn Baldwin 	sav->tdb_cryptoid = NULL;
255dae61c9dSJohn Baldwin 	sav->tdb_authalgxform = NULL;
25688768458SSam Leffler 	sav->tdb_encalgxform = NULL;
25788768458SSam Leffler }
25888768458SSam Leffler 
25988768458SSam Leffler /*
26088768458SSam Leffler  * ESP input processing, called (eventually) through the protocol switch.
26188768458SSam Leffler  */
26288768458SSam Leffler static int
26388768458SSam Leffler esp_input(struct mbuf *m, struct secasvar *sav, int skip, int protoff)
26488768458SSam Leffler {
2657f1f6591SAndrey V. Elsukov 	IPSEC_DEBUG_DECLARE(char buf[128]);
266fcf59617SAndrey V. Elsukov 	const struct auth_hash *esph;
267fcf59617SAndrey V. Elsukov 	const struct enc_xform *espx;
268fcf59617SAndrey V. Elsukov 	struct xform_data *xd;
26988768458SSam Leffler 	struct cryptop *crp;
270fcf59617SAndrey V. Elsukov 	struct newesp *esp;
271fcf59617SAndrey V. Elsukov 	uint8_t *ivp;
2722e08e39fSConrad Meyer 	crypto_session_t cryptoid;
2735f7c516fSAndrey V. Elsukov 	int alen, error, hlen, plen;
2748b7f3994SMarcin Wojtas 	uint32_t seqh;
2754d36d1fdSMarcin Wojtas 	const struct crypto_session_params *csp;
27688768458SSam Leffler 
2779ffa9677SSam Leffler 	IPSEC_ASSERT(sav != NULL, ("null SA"));
2789ffa9677SSam Leffler 	IPSEC_ASSERT(sav->tdb_encalgxform != NULL, ("null encoding xform"));
279a45bff04SVANHULLEBUS Yvan 
2805f7c516fSAndrey V. Elsukov 	error = EINVAL;
281a45bff04SVANHULLEBUS Yvan 	/* Valid IP Packet length ? */
282a45bff04SVANHULLEBUS Yvan 	if ( (skip&3) || (m->m_pkthdr.len&3) ){
283a45bff04SVANHULLEBUS Yvan 		DPRINTF(("%s: misaligned packet, skip %u pkt len %u",
284a45bff04SVANHULLEBUS Yvan 				__func__, skip, m->m_pkthdr.len));
285a04d64d8SAndrey V. Elsukov 		ESPSTAT_INC(esps_badilen);
2865f7c516fSAndrey V. Elsukov 		goto bad;
287a45bff04SVANHULLEBUS Yvan 	}
28863abacc2SBjoern A. Zeeb 
289a4adf6ccSBjoern A. Zeeb 	if (m->m_len < skip + sizeof(*esp)) {
29063abacc2SBjoern A. Zeeb 		m = m_pullup(m, skip + sizeof(*esp));
29163abacc2SBjoern A. Zeeb 		if (m == NULL) {
29263abacc2SBjoern A. Zeeb 			DPRINTF(("%s: cannot pullup header\n", __func__));
29363abacc2SBjoern A. Zeeb 			ESPSTAT_INC(esps_hdrops);	/*XXX*/
29463abacc2SBjoern A. Zeeb 			error = ENOBUFS;
29563abacc2SBjoern A. Zeeb 			goto bad;
29663abacc2SBjoern A. Zeeb 		}
297a4adf6ccSBjoern A. Zeeb 	}
29863abacc2SBjoern A. Zeeb 	esp = (struct newesp *)(mtod(m, caddr_t) + skip);
29988768458SSam Leffler 
30088768458SSam Leffler 	esph = sav->tdb_authalgxform;
30188768458SSam Leffler 	espx = sav->tdb_encalgxform;
30288768458SSam Leffler 
303a09a7146SJohn-Mark Gurney 	/* Determine the ESP header and auth length */
30488768458SSam Leffler 	if (sav->flags & SADB_X_EXT_OLD)
30588768458SSam Leffler 		hlen = sizeof (struct esp) + sav->ivlen;
30688768458SSam Leffler 	else
30788768458SSam Leffler 		hlen = sizeof (struct newesp) + sav->ivlen;
308a09a7146SJohn-Mark Gurney 
309a09a7146SJohn-Mark Gurney 	alen = xform_ah_authsize(esph);
31088768458SSam Leffler 
31188768458SSam Leffler 	/*
31288768458SSam Leffler 	 * Verify payload length is multiple of encryption algorithm
31388768458SSam Leffler 	 * block size.
31488768458SSam Leffler 	 *
31588768458SSam Leffler 	 * NB: This works for the null algorithm because the blocksize
31688768458SSam Leffler 	 *     is 4 and all packets must be 4-byte aligned regardless
31788768458SSam Leffler 	 *     of the algorithm.
31888768458SSam Leffler 	 */
31988768458SSam Leffler 	plen = m->m_pkthdr.len - (skip + hlen + alen);
32088768458SSam Leffler 	if ((plen & (espx->blocksize - 1)) || (plen <= 0)) {
3219ffa9677SSam Leffler 		DPRINTF(("%s: payload of %d octets not a multiple of %d octets,"
322a2bc81bfSJohn-Mark Gurney 		    "  SA %s/%08lx\n", __func__, plen, espx->blocksize,
323a2bc81bfSJohn-Mark Gurney 		    ipsec_address(&sav->sah->saidx.dst, buf, sizeof(buf)),
324a2bc81bfSJohn-Mark Gurney 		    (u_long)ntohl(sav->spi)));
325a04d64d8SAndrey V. Elsukov 		ESPSTAT_INC(esps_badilen);
3265f7c516fSAndrey V. Elsukov 		goto bad;
32788768458SSam Leffler 	}
32888768458SSam Leffler 
32988768458SSam Leffler 	/*
33088768458SSam Leffler 	 * Check sequence number.
33188768458SSam Leffler 	 */
332fcf59617SAndrey V. Elsukov 	SECASVAR_LOCK(sav);
333fcf59617SAndrey V. Elsukov 	if (esph != NULL && sav->replay != NULL && sav->replay->wsize != 0) {
3348b7f3994SMarcin Wojtas 		if (ipsec_chkreplay(ntohl(esp->esp_seq), &seqh, sav) == 0) {
335fcf59617SAndrey V. Elsukov 			SECASVAR_UNLOCK(sav);
3369ffa9677SSam Leffler 			DPRINTF(("%s: packet replay check for %s\n", __func__,
337fcf59617SAndrey V. Elsukov 			    ipsec_sa2str(sav, buf, sizeof(buf))));
338a04d64d8SAndrey V. Elsukov 			ESPSTAT_INC(esps_replay);
3395f7c516fSAndrey V. Elsukov 			error = EACCES;
3405f7c516fSAndrey V. Elsukov 			goto bad;
34188768458SSam Leffler 		}
3424d36d1fdSMarcin Wojtas 		seqh = htonl(seqh);
343fcf59617SAndrey V. Elsukov 	}
344fcf59617SAndrey V. Elsukov 	cryptoid = sav->tdb_cryptoid;
345fcf59617SAndrey V. Elsukov 	SECASVAR_UNLOCK(sav);
34688768458SSam Leffler 
34788768458SSam Leffler 	/* Update the counters */
348a04d64d8SAndrey V. Elsukov 	ESPSTAT_ADD(esps_ibytes, m->m_pkthdr.len - (skip + hlen + alen));
34988768458SSam Leffler 
35088768458SSam Leffler 	/* Get crypto descriptors */
351c0341432SJohn Baldwin 	crp = crypto_getreq(cryptoid, M_NOWAIT);
35288768458SSam Leffler 	if (crp == NULL) {
3539ffa9677SSam Leffler 		DPRINTF(("%s: failed to acquire crypto descriptors\n",
3549ffa9677SSam Leffler 			__func__));
355a04d64d8SAndrey V. Elsukov 		ESPSTAT_INC(esps_crypto);
3565f7c516fSAndrey V. Elsukov 		error = ENOBUFS;
3575f7c516fSAndrey V. Elsukov 		goto bad;
35888768458SSam Leffler 	}
35988768458SSam Leffler 
36088768458SSam Leffler 	/* Get IPsec-specific opaque pointer */
361*35d9e00dSJohn Baldwin 	xd = malloc(sizeof(*xd), M_ESP, M_NOWAIT | M_ZERO);
362fcf59617SAndrey V. Elsukov 	if (xd == NULL) {
363fcf59617SAndrey V. Elsukov 		DPRINTF(("%s: failed to allocate xform_data\n", __func__));
3644d36d1fdSMarcin Wojtas 		goto xd_fail;
36588768458SSam Leffler 	}
36688768458SSam Leffler 
36708537f45SAndrey V. Elsukov 	if (esph != NULL) {
368c0341432SJohn Baldwin 		crp->crp_op = CRYPTO_OP_VERIFY_DIGEST;
369a2bc81bfSJohn-Mark Gurney 		if (SAV_ISGCM(sav))
370c0341432SJohn Baldwin 			crp->crp_aad_length = 8; /* RFC4106 5, SPI + SN */
37116de9ac1SGeorge V. Neville-Neil 		else
372c0341432SJohn Baldwin 			crp->crp_aad_length = hlen;
3734d36d1fdSMarcin Wojtas 
3744d36d1fdSMarcin Wojtas 		csp = crypto_get_params(crp->crp_session);
3754d36d1fdSMarcin Wojtas 		if ((csp->csp_flags & CSP_F_SEPARATE_AAD) &&
3764d36d1fdSMarcin Wojtas 		    (sav->replay != NULL) && (sav->replay->wsize != 0)) {
3774d36d1fdSMarcin Wojtas 			int aad_skip;
3784d36d1fdSMarcin Wojtas 
3794d36d1fdSMarcin Wojtas 			crp->crp_aad_length += sizeof(seqh);
380*35d9e00dSJohn Baldwin 			crp->crp_aad = malloc(crp->crp_aad_length, M_ESP, M_NOWAIT);
3814d36d1fdSMarcin Wojtas 			if (crp->crp_aad == NULL) {
3824d36d1fdSMarcin Wojtas 				DPRINTF(("%s: failed to allocate xform_data\n",
3834d36d1fdSMarcin Wojtas 					 __func__));
3844d36d1fdSMarcin Wojtas 				goto crp_aad_fail;
3854d36d1fdSMarcin Wojtas 			}
3864d36d1fdSMarcin Wojtas 
3874d36d1fdSMarcin Wojtas 			/* SPI */
3884d36d1fdSMarcin Wojtas 			m_copydata(m, skip, SPI_SIZE, crp->crp_aad);
3894d36d1fdSMarcin Wojtas 			aad_skip = SPI_SIZE;
3904d36d1fdSMarcin Wojtas 
3914d36d1fdSMarcin Wojtas 			/* ESN */
3924d36d1fdSMarcin Wojtas 			bcopy(&seqh, (char *)crp->crp_aad + aad_skip, sizeof(seqh));
3934d36d1fdSMarcin Wojtas 			aad_skip += sizeof(seqh);
3944d36d1fdSMarcin Wojtas 
3954d36d1fdSMarcin Wojtas 			/* Rest of aad */
3964d36d1fdSMarcin Wojtas 			if (crp->crp_aad_length - aad_skip > 0)
3974d36d1fdSMarcin Wojtas 				m_copydata(m, skip + SPI_SIZE,
3984d36d1fdSMarcin Wojtas 					   crp->crp_aad_length - aad_skip,
3994d36d1fdSMarcin Wojtas 					   (char *)crp->crp_aad + aad_skip);
4004d36d1fdSMarcin Wojtas 		} else
4014d36d1fdSMarcin Wojtas 			crp->crp_aad_start = skip;
4024d36d1fdSMarcin Wojtas 
4034d36d1fdSMarcin Wojtas 		if (csp->csp_flags & CSP_F_ESN &&
4044d36d1fdSMarcin Wojtas 			   sav->replay != NULL && sav->replay->wsize != 0)
4054d36d1fdSMarcin Wojtas 			memcpy(crp->crp_esn, &seqh, sizeof(seqh));
4064d36d1fdSMarcin Wojtas 
407c0341432SJohn Baldwin 		crp->crp_digest_start = m->m_pkthdr.len - alen;
40888768458SSam Leffler 	}
40988768458SSam Leffler 
41088768458SSam Leffler 	/* Crypto operation descriptor */
411c0341432SJohn Baldwin 	crp->crp_flags = CRYPTO_F_CBIFSYNC;
4129c0e3d3aSJohn Baldwin 	crypto_use_mbuf(crp, m);
41388768458SSam Leffler 	crp->crp_callback = esp_input_cb;
414c0341432SJohn Baldwin 	crp->crp_opaque = xd;
41588768458SSam Leffler 
41688768458SSam Leffler 	/* These are passed as-is to the callback */
417fcf59617SAndrey V. Elsukov 	xd->sav = sav;
418fcf59617SAndrey V. Elsukov 	xd->protoff = protoff;
419fcf59617SAndrey V. Elsukov 	xd->skip = skip;
420fcf59617SAndrey V. Elsukov 	xd->cryptoid = cryptoid;
421fd40ecf3SJohn Baldwin 	xd->vnet = curvnet;
42288768458SSam Leffler 
42388768458SSam Leffler 	/* Decryption descriptor */
424c0341432SJohn Baldwin 	crp->crp_op |= CRYPTO_OP_DECRYPT;
425c0341432SJohn Baldwin 	crp->crp_payload_start = skip + hlen;
426c0341432SJohn Baldwin 	crp->crp_payload_length = m->m_pkthdr.len - (skip + hlen + alen);
42788768458SSam Leffler 
428c161c46dSJohn Baldwin 	/* Generate or read cipher IV. */
429a2bc81bfSJohn-Mark Gurney 	if (SAV_ISCTRORGCM(sav)) {
430c0341432SJohn Baldwin 		ivp = &crp->crp_iv[0];
43116de9ac1SGeorge V. Neville-Neil 
432c161c46dSJohn Baldwin 		/*
433c161c46dSJohn Baldwin 		 * AES-GCM and AES-CTR use similar cipher IV formats
434c161c46dSJohn Baldwin 		 * defined in RFC 4106 section 4 and RFC 3686 section
435c161c46dSJohn Baldwin 		 * 4, respectively.
436c161c46dSJohn Baldwin 		 *
437c161c46dSJohn Baldwin 		 * The first 4 bytes of the cipher IV contain an
438c161c46dSJohn Baldwin 		 * implicit salt, or nonce, obtained from the last 4
439c161c46dSJohn Baldwin 		 * bytes of the encryption key.  The next 8 bytes hold
440c161c46dSJohn Baldwin 		 * an explicit IV unique to each packet.  This
441c161c46dSJohn Baldwin 		 * explicit IV is used as the ESP IV for the packet.
442c161c46dSJohn Baldwin 		 * The last 4 bytes hold a big-endian block counter
443c161c46dSJohn Baldwin 		 * incremented for each block.  For AES-GCM, the block
444c161c46dSJohn Baldwin 		 * counter's initial value is defined as part of the
445c161c46dSJohn Baldwin 		 * algorithm.  For AES-CTR, the block counter's
446c161c46dSJohn Baldwin 		 * initial value for each packet is defined as 1 by
447c161c46dSJohn Baldwin 		 * RFC 3686.
448c161c46dSJohn Baldwin 		 *
449c161c46dSJohn Baldwin 		 * ------------------------------------------
450c161c46dSJohn Baldwin 		 * | Salt | Explicit ESP IV | Block Counter |
451c161c46dSJohn Baldwin 		 * ------------------------------------------
452c161c46dSJohn Baldwin 		 *  4 bytes     8 bytes          4 bytes
453c161c46dSJohn Baldwin 		 */
454a2bc81bfSJohn-Mark Gurney 		memcpy(ivp, sav->key_enc->key_data +
455a2bc81bfSJohn-Mark Gurney 		    _KEYLEN(sav->key_enc) - 4, 4);
456c161c46dSJohn Baldwin 		m_copydata(m, skip + hlen - sav->ivlen, sav->ivlen, &ivp[4]);
457a2bc81bfSJohn-Mark Gurney 		if (SAV_ISCTR(sav)) {
458a2bc81bfSJohn-Mark Gurney 			be32enc(&ivp[sav->ivlen + 4], 1);
459a2bc81bfSJohn-Mark Gurney 		}
460c0341432SJohn Baldwin 		crp->crp_flags |= CRYPTO_F_IV_SEPARATE;
461c0341432SJohn Baldwin 	} else if (sav->ivlen != 0)
462c0341432SJohn Baldwin 		crp->crp_iv_start = skip + hlen - sav->ivlen;
46388768458SSam Leffler 
46468f6800cSMark Johnston 	if (V_async_crypto)
46568f6800cSMark Johnston 		return (crypto_dispatch_async(crp, CRYPTO_ASYNC_ORDERED));
46668f6800cSMark Johnston 	else
46708537f45SAndrey V. Elsukov 		return (crypto_dispatch(crp));
4684d36d1fdSMarcin Wojtas 
4694d36d1fdSMarcin Wojtas crp_aad_fail:
470*35d9e00dSJohn Baldwin 	free(xd, M_ESP);
4714d36d1fdSMarcin Wojtas xd_fail:
4724d36d1fdSMarcin Wojtas 	crypto_freereq(crp);
4734d36d1fdSMarcin Wojtas 	ESPSTAT_INC(esps_crypto);
4744d36d1fdSMarcin Wojtas 	error = ENOBUFS;
4755f7c516fSAndrey V. Elsukov bad:
4765f7c516fSAndrey V. Elsukov 	m_freem(m);
4775f7c516fSAndrey V. Elsukov 	key_freesav(&sav);
4785f7c516fSAndrey V. Elsukov 	return (error);
47988768458SSam Leffler }
48088768458SSam Leffler 
48188768458SSam Leffler /*
48288768458SSam Leffler  * ESP input callback from the crypto driver.
48388768458SSam Leffler  */
48488768458SSam Leffler static int
48588768458SSam Leffler esp_input_cb(struct cryptop *crp)
48688768458SSam Leffler {
4877f1f6591SAndrey V. Elsukov 	IPSEC_DEBUG_DECLARE(char buf[128]);
488c0341432SJohn Baldwin 	uint8_t lastthree[3];
489fcf59617SAndrey V. Elsukov 	const struct auth_hash *esph;
49088768458SSam Leffler 	struct mbuf *m;
491fcf59617SAndrey V. Elsukov 	struct xform_data *xd;
49288768458SSam Leffler 	struct secasvar *sav;
49388768458SSam Leffler 	struct secasindex *saidx;
4942e08e39fSConrad Meyer 	crypto_session_t cryptoid;
495fcf59617SAndrey V. Elsukov 	int hlen, skip, protoff, error, alen;
49688768458SSam Leffler 
4979c0e3d3aSJohn Baldwin 	m = crp->crp_buf.cb_mbuf;
498c0341432SJohn Baldwin 	xd = crp->crp_opaque;
499fd40ecf3SJohn Baldwin 	CURVNET_SET(xd->vnet);
500fcf59617SAndrey V. Elsukov 	sav = xd->sav;
501fcf59617SAndrey V. Elsukov 	skip = xd->skip;
502fcf59617SAndrey V. Elsukov 	protoff = xd->protoff;
503fcf59617SAndrey V. Elsukov 	cryptoid = xd->cryptoid;
50488768458SSam Leffler 	saidx = &sav->sah->saidx;
50588768458SSam Leffler 	esph = sav->tdb_authalgxform;
50688768458SSam Leffler 
50788768458SSam Leffler 	/* Check for crypto errors */
50888768458SSam Leffler 	if (crp->crp_etype) {
509fcf59617SAndrey V. Elsukov 		if (crp->crp_etype == EAGAIN) {
51088768458SSam Leffler 			/* Reset the session ID */
5111b0909d5SConrad Meyer 			if (ipsec_updateid(sav, &crp->crp_session, &cryptoid) != 0)
512fcf59617SAndrey V. Elsukov 				crypto_freesession(cryptoid);
5131b0909d5SConrad Meyer 			xd->cryptoid = crp->crp_session;
514fd40ecf3SJohn Baldwin 			CURVNET_RESTORE();
5150a95a08eSPawel Jakub Dawidek 			return (crypto_dispatch(crp));
516fcf59617SAndrey V. Elsukov 		}
517c0341432SJohn Baldwin 
518c0341432SJohn Baldwin 		/* EBADMSG indicates authentication failure. */
519c0341432SJohn Baldwin 		if (!(crp->crp_etype == EBADMSG && esph != NULL)) {
520a04d64d8SAndrey V. Elsukov 			ESPSTAT_INC(esps_noxform);
521c0341432SJohn Baldwin 			DPRINTF(("%s: crypto error %d\n", __func__,
522c0341432SJohn Baldwin 				crp->crp_etype));
52388768458SSam Leffler 			error = crp->crp_etype;
52488768458SSam Leffler 			goto bad;
52588768458SSam Leffler 		}
526c0341432SJohn Baldwin 	}
52788768458SSam Leffler 
52888768458SSam Leffler 	/* Shouldn't happen... */
52988768458SSam Leffler 	if (m == NULL) {
530a04d64d8SAndrey V. Elsukov 		ESPSTAT_INC(esps_crypto);
5319ffa9677SSam Leffler 		DPRINTF(("%s: bogus returned buffer from crypto\n", __func__));
53288768458SSam Leffler 		error = EINVAL;
53388768458SSam Leffler 		goto bad;
53488768458SSam Leffler 	}
535a04d64d8SAndrey V. Elsukov 	ESPSTAT_INC(esps_hist[sav->alg_enc]);
53688768458SSam Leffler 
53788768458SSam Leffler 	/* If authentication was performed, check now. */
53888768458SSam Leffler 	if (esph != NULL) {
539a09a7146SJohn-Mark Gurney 		alen = xform_ah_authsize(esph);
540a04d64d8SAndrey V. Elsukov 		AHSTAT_INC(ahs_hist[sav->alg_auth]);
541c0341432SJohn Baldwin 		if (crp->crp_etype == EBADMSG) {
54208537f45SAndrey V. Elsukov 			DPRINTF(("%s: authentication hash mismatch for "
54308537f45SAndrey V. Elsukov 			    "packet in SA %s/%08lx\n", __func__,
544962ac6c7SAndrey V. Elsukov 			    ipsec_address(&saidx->dst, buf, sizeof(buf)),
54588768458SSam Leffler 			    (u_long) ntohl(sav->spi)));
546a04d64d8SAndrey V. Elsukov 			ESPSTAT_INC(esps_badauth);
54788768458SSam Leffler 			error = EACCES;
54888768458SSam Leffler 			goto bad;
54988768458SSam Leffler 		}
550fcf59617SAndrey V. Elsukov 		m->m_flags |= M_AUTHIPDGM;
55188768458SSam Leffler 		/* Remove trailing authenticator */
552442da28aSVANHULLEBUS Yvan 		m_adj(m, -alen);
55388768458SSam Leffler 	}
55488768458SSam Leffler 
55588768458SSam Leffler 	/* Release the crypto descriptors */
556*35d9e00dSJohn Baldwin 	free(xd, M_ESP), xd = NULL;
557*35d9e00dSJohn Baldwin 	free(crp->crp_aad, M_ESP), crp->crp_aad = NULL;
55888768458SSam Leffler 	crypto_freereq(crp), crp = NULL;
55988768458SSam Leffler 
56088768458SSam Leffler 	/*
56188768458SSam Leffler 	 * Packet is now decrypted.
56288768458SSam Leffler 	 */
56388768458SSam Leffler 	m->m_flags |= M_DECRYPTED;
56488768458SSam Leffler 
565d16f6f50SColin Percival 	/*
566d16f6f50SColin Percival 	 * Update replay sequence number, if appropriate.
567d16f6f50SColin Percival 	 */
568d16f6f50SColin Percival 	if (sav->replay) {
569d16f6f50SColin Percival 		u_int32_t seq;
570d16f6f50SColin Percival 
571d16f6f50SColin Percival 		m_copydata(m, skip + offsetof(struct newesp, esp_seq),
572d16f6f50SColin Percival 			   sizeof (seq), (caddr_t) &seq);
573fcf59617SAndrey V. Elsukov 		SECASVAR_LOCK(sav);
574d16f6f50SColin Percival 		if (ipsec_updatereplay(ntohl(seq), sav)) {
575fcf59617SAndrey V. Elsukov 			SECASVAR_UNLOCK(sav);
576d16f6f50SColin Percival 			DPRINTF(("%s: packet replay check for %s\n", __func__,
577fcf59617SAndrey V. Elsukov 			    ipsec_sa2str(sav, buf, sizeof(buf))));
578a04d64d8SAndrey V. Elsukov 			ESPSTAT_INC(esps_replay);
579fcf59617SAndrey V. Elsukov 			error = EACCES;
580d16f6f50SColin Percival 			goto bad;
581d16f6f50SColin Percival 		}
582fcf59617SAndrey V. Elsukov 		SECASVAR_UNLOCK(sav);
583d16f6f50SColin Percival 	}
584d16f6f50SColin Percival 
58588768458SSam Leffler 	/* Determine the ESP header length */
58688768458SSam Leffler 	if (sav->flags & SADB_X_EXT_OLD)
58788768458SSam Leffler 		hlen = sizeof (struct esp) + sav->ivlen;
58888768458SSam Leffler 	else
58988768458SSam Leffler 		hlen = sizeof (struct newesp) + sav->ivlen;
59088768458SSam Leffler 
59188768458SSam Leffler 	/* Remove the ESP header and IV from the mbuf. */
59288768458SSam Leffler 	error = m_striphdr(m, skip, hlen);
59388768458SSam Leffler 	if (error) {
594a04d64d8SAndrey V. Elsukov 		ESPSTAT_INC(esps_hdrops);
5959ffa9677SSam Leffler 		DPRINTF(("%s: bad mbuf chain, SA %s/%08lx\n", __func__,
596962ac6c7SAndrey V. Elsukov 		    ipsec_address(&sav->sah->saidx.dst, buf, sizeof(buf)),
59788768458SSam Leffler 		    (u_long) ntohl(sav->spi)));
59888768458SSam Leffler 		goto bad;
59988768458SSam Leffler 	}
60088768458SSam Leffler 
60188768458SSam Leffler 	/* Save the last three bytes of decrypted data */
60288768458SSam Leffler 	m_copydata(m, m->m_pkthdr.len - 3, 3, lastthree);
60388768458SSam Leffler 
60488768458SSam Leffler 	/* Verify pad length */
60588768458SSam Leffler 	if (lastthree[1] + 2 > m->m_pkthdr.len - skip) {
606a04d64d8SAndrey V. Elsukov 		ESPSTAT_INC(esps_badilen);
6079ffa9677SSam Leffler 		DPRINTF(("%s: invalid padding length %d for %u byte packet "
608962ac6c7SAndrey V. Elsukov 		    "in SA %s/%08lx\n", __func__, lastthree[1],
609962ac6c7SAndrey V. Elsukov 		    m->m_pkthdr.len - skip,
610962ac6c7SAndrey V. Elsukov 		    ipsec_address(&sav->sah->saidx.dst, buf, sizeof(buf)),
61188768458SSam Leffler 		    (u_long) ntohl(sav->spi)));
61288768458SSam Leffler 		error = EINVAL;
61388768458SSam Leffler 		goto bad;
61488768458SSam Leffler 	}
61588768458SSam Leffler 
61688768458SSam Leffler 	/* Verify correct decryption by checking the last padding bytes */
61788768458SSam Leffler 	if ((sav->flags & SADB_X_EXT_PMASK) != SADB_X_EXT_PRAND) {
61888768458SSam Leffler 		if (lastthree[1] != lastthree[0] && lastthree[1] != 0) {
619a04d64d8SAndrey V. Elsukov 			ESPSTAT_INC(esps_badenc);
6209ffa9677SSam Leffler 			DPRINTF(("%s: decryption failed for packet in "
621962ac6c7SAndrey V. Elsukov 			    "SA %s/%08lx\n", __func__, ipsec_address(
622962ac6c7SAndrey V. Elsukov 			    &sav->sah->saidx.dst, buf, sizeof(buf)),
62388768458SSam Leffler 			    (u_long) ntohl(sav->spi)));
62488768458SSam Leffler 			error = EINVAL;
62588768458SSam Leffler 			goto bad;
62688768458SSam Leffler 		}
62788768458SSam Leffler 	}
62888768458SSam Leffler 
6293f44ee8eSAndrey V. Elsukov 	/*
6303f44ee8eSAndrey V. Elsukov 	 * RFC4303 2.6:
6313f44ee8eSAndrey V. Elsukov 	 * Silently drop packet if next header field is IPPROTO_NONE.
6323f44ee8eSAndrey V. Elsukov 	 */
6333f44ee8eSAndrey V. Elsukov 	if (lastthree[2] == IPPROTO_NONE)
6343f44ee8eSAndrey V. Elsukov 		goto bad;
6353f44ee8eSAndrey V. Elsukov 
63688768458SSam Leffler 	/* Trim the mbuf chain to remove trailing authenticator and padding */
63788768458SSam Leffler 	m_adj(m, -(lastthree[1] + 2));
63888768458SSam Leffler 
63988768458SSam Leffler 	/* Restore the Next Protocol field */
64088768458SSam Leffler 	m_copyback(m, protoff, sizeof (u_int8_t), lastthree + 2);
64188768458SSam Leffler 
642db178eb8SBjoern A. Zeeb 	switch (saidx->dst.sa.sa_family) {
643db178eb8SBjoern A. Zeeb #ifdef INET6
644db178eb8SBjoern A. Zeeb 	case AF_INET6:
645f0514a8bSAndrey V. Elsukov 		error = ipsec6_common_input_cb(m, sav, skip, protoff);
646db178eb8SBjoern A. Zeeb 		break;
647db178eb8SBjoern A. Zeeb #endif
648db178eb8SBjoern A. Zeeb #ifdef INET
649db178eb8SBjoern A. Zeeb 	case AF_INET:
650f0514a8bSAndrey V. Elsukov 		error = ipsec4_common_input_cb(m, sav, skip, protoff);
651db178eb8SBjoern A. Zeeb 		break;
652db178eb8SBjoern A. Zeeb #endif
653db178eb8SBjoern A. Zeeb 	default:
654db178eb8SBjoern A. Zeeb 		panic("%s: Unexpected address family: %d saidx=%p", __func__,
655db178eb8SBjoern A. Zeeb 		    saidx->dst.sa.sa_family, saidx);
656db178eb8SBjoern A. Zeeb 	}
657fd40ecf3SJohn Baldwin 	CURVNET_RESTORE();
65888768458SSam Leffler 	return error;
65988768458SSam Leffler bad:
660fd40ecf3SJohn Baldwin 	CURVNET_RESTORE();
661fcf59617SAndrey V. Elsukov 	if (sav != NULL)
662fcf59617SAndrey V. Elsukov 		key_freesav(&sav);
66388768458SSam Leffler 	if (m != NULL)
66488768458SSam Leffler 		m_freem(m);
665fcf59617SAndrey V. Elsukov 	if (xd != NULL)
666*35d9e00dSJohn Baldwin 		free(xd, M_ESP);
6674d36d1fdSMarcin Wojtas 	if (crp != NULL) {
668*35d9e00dSJohn Baldwin 		free(crp->crp_aad, M_ESP);
66988768458SSam Leffler 		crypto_freereq(crp);
6704d36d1fdSMarcin Wojtas 	}
67188768458SSam Leffler 	return error;
67288768458SSam Leffler }
67388768458SSam Leffler /*
674fcf59617SAndrey V. Elsukov  * ESP output routine, called by ipsec[46]_perform_request().
67588768458SSam Leffler  */
67688768458SSam Leffler static int
677fcf59617SAndrey V. Elsukov esp_output(struct mbuf *m, struct secpolicy *sp, struct secasvar *sav,
678fcf59617SAndrey V. Elsukov     u_int idx, int skip, int protoff)
67988768458SSam Leffler {
6807f1f6591SAndrey V. Elsukov 	IPSEC_DEBUG_DECLARE(char buf[IPSEC_ADDRSTRLEN]);
68188768458SSam Leffler 	struct cryptop *crp;
682fcf59617SAndrey V. Elsukov 	const struct auth_hash *esph;
683fcf59617SAndrey V. Elsukov 	const struct enc_xform *espx;
684fcf59617SAndrey V. Elsukov 	struct mbuf *mo = NULL;
685fcf59617SAndrey V. Elsukov 	struct xform_data *xd;
686fcf59617SAndrey V. Elsukov 	struct secasindex *saidx;
687fcf59617SAndrey V. Elsukov 	unsigned char *pad;
688fcf59617SAndrey V. Elsukov 	uint8_t *ivp;
6892e08e39fSConrad Meyer 	uint64_t cntr;
6902e08e39fSConrad Meyer 	crypto_session_t cryptoid;
691fcf59617SAndrey V. Elsukov 	int hlen, rlen, padding, blks, alen, i, roff;
692fcf59617SAndrey V. Elsukov 	int error, maxpacketsize;
693fcf59617SAndrey V. Elsukov 	uint8_t prot;
6944d36d1fdSMarcin Wojtas 	uint32_t seqh;
6954d36d1fdSMarcin Wojtas 	const struct crypto_session_params *csp;
69688768458SSam Leffler 
6979ffa9677SSam Leffler 	IPSEC_ASSERT(sav != NULL, ("null SA"));
69888768458SSam Leffler 	esph = sav->tdb_authalgxform;
69988768458SSam Leffler 	espx = sav->tdb_encalgxform;
7009ffa9677SSam Leffler 	IPSEC_ASSERT(espx != NULL, ("null encoding xform"));
70188768458SSam Leffler 
70288768458SSam Leffler 	if (sav->flags & SADB_X_EXT_OLD)
70388768458SSam Leffler 		hlen = sizeof (struct esp) + sav->ivlen;
70488768458SSam Leffler 	else
70588768458SSam Leffler 		hlen = sizeof (struct newesp) + sav->ivlen;
70688768458SSam Leffler 
70788768458SSam Leffler 	rlen = m->m_pkthdr.len - skip;	/* Raw payload length. */
70888768458SSam Leffler 	/*
709a2bc81bfSJohn-Mark Gurney 	 * RFC4303 2.4 Requires 4 byte alignment.
710b01edfb5SMarcin Wojtas 	 * Old versions of FreeBSD can't decrypt partial blocks encrypted
711b01edfb5SMarcin Wojtas 	 * with AES-CTR. Align payload to native_blocksize (16 bytes)
712b01edfb5SMarcin Wojtas 	 * in order to preserve compatibility.
71388768458SSam Leffler 	 */
714b01edfb5SMarcin Wojtas 	if (SAV_ISCTR(sav) && V_esp_ctr_compatibility)
715b01edfb5SMarcin Wojtas 		blks = MAX(4, espx->native_blocksize);	/* Cipher blocksize */
716b01edfb5SMarcin Wojtas 	else
717b01edfb5SMarcin Wojtas 		blks = MAX(4, espx->blocksize);
71888768458SSam Leffler 
71988768458SSam Leffler 	/* XXX clamp padding length a la KAME??? */
72088768458SSam Leffler 	padding = ((blks - ((rlen + 2) % blks)) % blks) + 2;
72188768458SSam Leffler 
722a09a7146SJohn-Mark Gurney 	alen = xform_ah_authsize(esph);
72388768458SSam Leffler 
724a04d64d8SAndrey V. Elsukov 	ESPSTAT_INC(esps_output);
72588768458SSam Leffler 
72688768458SSam Leffler 	saidx = &sav->sah->saidx;
72788768458SSam Leffler 	/* Check for maximum packet size violations. */
72888768458SSam Leffler 	switch (saidx->dst.sa.sa_family) {
72988768458SSam Leffler #ifdef INET
73088768458SSam Leffler 	case AF_INET:
73188768458SSam Leffler 		maxpacketsize = IP_MAXPACKET;
73288768458SSam Leffler 		break;
73388768458SSam Leffler #endif /* INET */
73488768458SSam Leffler #ifdef INET6
73588768458SSam Leffler 	case AF_INET6:
73688768458SSam Leffler 		maxpacketsize = IPV6_MAXPACKET;
73788768458SSam Leffler 		break;
73888768458SSam Leffler #endif /* INET6 */
73988768458SSam Leffler 	default:
7409ffa9677SSam Leffler 		DPRINTF(("%s: unknown/unsupported protocol "
7419ffa9677SSam Leffler 		    "family %d, SA %s/%08lx\n", __func__,
742962ac6c7SAndrey V. Elsukov 		    saidx->dst.sa.sa_family, ipsec_address(&saidx->dst,
743962ac6c7SAndrey V. Elsukov 			buf, sizeof(buf)), (u_long) ntohl(sav->spi)));
744a04d64d8SAndrey V. Elsukov 		ESPSTAT_INC(esps_nopf);
74588768458SSam Leffler 		error = EPFNOSUPPORT;
74688768458SSam Leffler 		goto bad;
74788768458SSam Leffler 	}
748fcf59617SAndrey V. Elsukov 	/*
74916de9ac1SGeorge V. Neville-Neil 	DPRINTF(("%s: skip %d hlen %d rlen %d padding %d alen %d blksd %d\n",
750fcf59617SAndrey V. Elsukov 		__func__, skip, hlen, rlen, padding, alen, blks)); */
75188768458SSam Leffler 	if (skip + hlen + rlen + padding + alen > maxpacketsize) {
7529ffa9677SSam Leffler 		DPRINTF(("%s: packet in SA %s/%08lx got too big "
7539ffa9677SSam Leffler 		    "(len %u, max len %u)\n", __func__,
754962ac6c7SAndrey V. Elsukov 		    ipsec_address(&saidx->dst, buf, sizeof(buf)),
755962ac6c7SAndrey V. Elsukov 		    (u_long) ntohl(sav->spi),
75688768458SSam Leffler 		    skip + hlen + rlen + padding + alen, maxpacketsize));
757a04d64d8SAndrey V. Elsukov 		ESPSTAT_INC(esps_toobig);
75888768458SSam Leffler 		error = EMSGSIZE;
75988768458SSam Leffler 		goto bad;
76088768458SSam Leffler 	}
76188768458SSam Leffler 
76288768458SSam Leffler 	/* Update the counters. */
763a04d64d8SAndrey V. Elsukov 	ESPSTAT_ADD(esps_obytes, m->m_pkthdr.len - skip);
76488768458SSam Leffler 
76547e2996eSSam Leffler 	m = m_unshare(m, M_NOWAIT);
76688768458SSam Leffler 	if (m == NULL) {
7679ffa9677SSam Leffler 		DPRINTF(("%s: cannot clone mbuf chain, SA %s/%08lx\n", __func__,
768962ac6c7SAndrey V. Elsukov 		    ipsec_address(&saidx->dst, buf, sizeof(buf)),
769962ac6c7SAndrey V. Elsukov 		    (u_long) ntohl(sav->spi)));
770a04d64d8SAndrey V. Elsukov 		ESPSTAT_INC(esps_hdrops);
77188768458SSam Leffler 		error = ENOBUFS;
77288768458SSam Leffler 		goto bad;
77388768458SSam Leffler 	}
77488768458SSam Leffler 
77588768458SSam Leffler 	/* Inject ESP header. */
77688768458SSam Leffler 	mo = m_makespace(m, skip, hlen, &roff);
77788768458SSam Leffler 	if (mo == NULL) {
7789ffa9677SSam Leffler 		DPRINTF(("%s: %u byte ESP hdr inject failed for SA %s/%08lx\n",
779962ac6c7SAndrey V. Elsukov 		    __func__, hlen, ipsec_address(&saidx->dst, buf,
780962ac6c7SAndrey V. Elsukov 		    sizeof(buf)), (u_long) ntohl(sav->spi)));
781a04d64d8SAndrey V. Elsukov 		ESPSTAT_INC(esps_hdrops);	/* XXX diffs from openbsd */
78288768458SSam Leffler 		error = ENOBUFS;
78388768458SSam Leffler 		goto bad;
78488768458SSam Leffler 	}
78588768458SSam Leffler 
78688768458SSam Leffler 	/* Initialize ESP header. */
787fcf59617SAndrey V. Elsukov 	bcopy((caddr_t) &sav->spi, mtod(mo, caddr_t) + roff,
788fcf59617SAndrey V. Elsukov 	    sizeof(uint32_t));
789bf435626SFabien Thomas 	SECASVAR_LOCK(sav);
790fcf59617SAndrey V. Elsukov 	if (sav->replay) {
791fcf59617SAndrey V. Elsukov 		uint32_t replay;
792fcf59617SAndrey V. Elsukov 
7936131838bSPawel Jakub Dawidek #ifdef REGRESSION
794dfa9422bSPawel Jakub Dawidek 		/* Emulate replay attack when ipsec_replay is TRUE. */
795603724d3SBjoern A. Zeeb 		if (!V_ipsec_replay)
7966131838bSPawel Jakub Dawidek #endif
797dfa9422bSPawel Jakub Dawidek 			sav->replay->count++;
7988b7f3994SMarcin Wojtas 		replay = htonl((uint32_t)sav->replay->count);
799fcf59617SAndrey V. Elsukov 
800fcf59617SAndrey V. Elsukov 		bcopy((caddr_t) &replay, mtod(mo, caddr_t) + roff +
801fcf59617SAndrey V. Elsukov 		    sizeof(uint32_t), sizeof(uint32_t));
8024d36d1fdSMarcin Wojtas 
8034d36d1fdSMarcin Wojtas 		seqh = htonl((uint32_t)(sav->replay->count >> IPSEC_SEQH_SHIFT));
80488768458SSam Leffler 	}
805fcf59617SAndrey V. Elsukov 	cryptoid = sav->tdb_cryptoid;
806fcf59617SAndrey V. Elsukov 	if (SAV_ISCTRORGCM(sav))
807fcf59617SAndrey V. Elsukov 		cntr = sav->cntr++;
808fcf59617SAndrey V. Elsukov 	SECASVAR_UNLOCK(sav);
80988768458SSam Leffler 
81088768458SSam Leffler 	/*
81188768458SSam Leffler 	 * Add padding -- better to do it ourselves than use the crypto engine,
81288768458SSam Leffler 	 * although if/when we support compression, we'd have to do that.
81388768458SSam Leffler 	 */
81488768458SSam Leffler 	pad = (u_char *) m_pad(m, padding + alen);
81588768458SSam Leffler 	if (pad == NULL) {
8169ffa9677SSam Leffler 		DPRINTF(("%s: m_pad failed for SA %s/%08lx\n", __func__,
817962ac6c7SAndrey V. Elsukov 		    ipsec_address(&saidx->dst, buf, sizeof(buf)),
818962ac6c7SAndrey V. Elsukov 		    (u_long) ntohl(sav->spi)));
81988768458SSam Leffler 		m = NULL;		/* NB: free'd by m_pad */
82088768458SSam Leffler 		error = ENOBUFS;
82188768458SSam Leffler 		goto bad;
82288768458SSam Leffler 	}
82388768458SSam Leffler 
82488768458SSam Leffler 	/*
82588768458SSam Leffler 	 * Add padding: random, zero, or self-describing.
82688768458SSam Leffler 	 * XXX catch unexpected setting
82788768458SSam Leffler 	 */
82888768458SSam Leffler 	switch (sav->flags & SADB_X_EXT_PMASK) {
82988768458SSam Leffler 	case SADB_X_EXT_PRAND:
830a8a16c71SConrad Meyer 		arc4random_buf(pad, padding - 2);
83188768458SSam Leffler 		break;
83288768458SSam Leffler 	case SADB_X_EXT_PZERO:
83388768458SSam Leffler 		bzero(pad, padding - 2);
83488768458SSam Leffler 		break;
83588768458SSam Leffler 	case SADB_X_EXT_PSEQ:
83688768458SSam Leffler 		for (i = 0; i < padding - 2; i++)
83788768458SSam Leffler 			pad[i] = i+1;
83888768458SSam Leffler 		break;
83988768458SSam Leffler 	}
84088768458SSam Leffler 
84188768458SSam Leffler 	/* Fix padding length and Next Protocol in padding itself. */
84288768458SSam Leffler 	pad[padding - 2] = padding - 2;
84388768458SSam Leffler 	m_copydata(m, protoff, sizeof(u_int8_t), pad + padding - 1);
84488768458SSam Leffler 
84588768458SSam Leffler 	/* Fix Next Protocol in IPv4/IPv6 header. */
84688768458SSam Leffler 	prot = IPPROTO_ESP;
84788768458SSam Leffler 	m_copyback(m, protoff, sizeof(u_int8_t), (u_char *) &prot);
84888768458SSam Leffler 
849c0341432SJohn Baldwin 	/* Get crypto descriptor. */
850c0341432SJohn Baldwin 	crp = crypto_getreq(cryptoid, M_NOWAIT);
85188768458SSam Leffler 	if (crp == NULL) {
852c0341432SJohn Baldwin 		DPRINTF(("%s: failed to acquire crypto descriptor\n",
8539ffa9677SSam Leffler 			__func__));
854a04d64d8SAndrey V. Elsukov 		ESPSTAT_INC(esps_crypto);
85588768458SSam Leffler 		error = ENOBUFS;
85688768458SSam Leffler 		goto bad;
85788768458SSam Leffler 	}
85888768458SSam Leffler 
859a2bc81bfSJohn-Mark Gurney 	/* IPsec-specific opaque crypto info. */
860*35d9e00dSJohn Baldwin 	xd = malloc(sizeof(struct xform_data), M_ESP, M_NOWAIT | M_ZERO);
861fcf59617SAndrey V. Elsukov 	if (xd == NULL) {
862fcf59617SAndrey V. Elsukov 		DPRINTF(("%s: failed to allocate xform_data\n", __func__));
8634d36d1fdSMarcin Wojtas 		goto xd_fail;
864a2bc81bfSJohn-Mark Gurney 	}
865a2bc81bfSJohn-Mark Gurney 
86688768458SSam Leffler 	/* Encryption descriptor. */
867c0341432SJohn Baldwin 	crp->crp_payload_start = skip + hlen;
868c0341432SJohn Baldwin 	crp->crp_payload_length = m->m_pkthdr.len - (skip + hlen + alen);
869c0341432SJohn Baldwin 	crp->crp_op = CRYPTO_OP_ENCRYPT;
87088768458SSam Leffler 
871c161c46dSJohn Baldwin 	/* Generate cipher and ESP IVs. */
872c0341432SJohn Baldwin 	ivp = &crp->crp_iv[0];
8738cbde414SJohn Baldwin 	if (SAV_ISCTRORGCM(sav)) {
874c161c46dSJohn Baldwin 		/*
875c161c46dSJohn Baldwin 		 * See comment in esp_input() for details on the
876c161c46dSJohn Baldwin 		 * cipher IV.  A simple per-SA counter stored in
877c161c46dSJohn Baldwin 		 * 'cntr' is used as the explicit ESP IV.
878c161c46dSJohn Baldwin 		 */
879a2bc81bfSJohn-Mark Gurney 		memcpy(ivp, sav->key_enc->key_data +
880a2bc81bfSJohn-Mark Gurney 		    _KEYLEN(sav->key_enc) - 4, 4);
881a2bc81bfSJohn-Mark Gurney 		be64enc(&ivp[4], cntr);
882a2bc81bfSJohn-Mark Gurney 		if (SAV_ISCTR(sav)) {
883a2bc81bfSJohn-Mark Gurney 			be32enc(&ivp[sav->ivlen + 4], 1);
884a2bc81bfSJohn-Mark Gurney 		}
885a2bc81bfSJohn-Mark Gurney 		m_copyback(m, skip + hlen - sav->ivlen, sav->ivlen, &ivp[4]);
886c0341432SJohn Baldwin 		crp->crp_flags |= CRYPTO_F_IV_SEPARATE;
887c0341432SJohn Baldwin 	} else if (sav->ivlen != 0) {
8888cbde414SJohn Baldwin 		arc4rand(ivp, sav->ivlen, 0);
889c0341432SJohn Baldwin 		crp->crp_iv_start = skip + hlen - sav->ivlen;
8908cbde414SJohn Baldwin 		m_copyback(m, crp->crp_iv_start, sav->ivlen, ivp);
89188768458SSam Leffler 	}
89288768458SSam Leffler 
89388768458SSam Leffler 	/* Callback parameters */
894fcf59617SAndrey V. Elsukov 	xd->sp = sp;
895fcf59617SAndrey V. Elsukov 	xd->sav = sav;
896fcf59617SAndrey V. Elsukov 	xd->idx = idx;
897fcf59617SAndrey V. Elsukov 	xd->cryptoid = cryptoid;
898fd40ecf3SJohn Baldwin 	xd->vnet = curvnet;
89988768458SSam Leffler 
90088768458SSam Leffler 	/* Crypto operation descriptor. */
901c0341432SJohn Baldwin 	crp->crp_flags |= CRYPTO_F_CBIFSYNC;
9029c0e3d3aSJohn Baldwin 	crypto_use_mbuf(crp, m);
90388768458SSam Leffler 	crp->crp_callback = esp_output_cb;
904c0341432SJohn Baldwin 	crp->crp_opaque = xd;
90588768458SSam Leffler 
90688768458SSam Leffler 	if (esph) {
90788768458SSam Leffler 		/* Authentication descriptor. */
908c0341432SJohn Baldwin 		crp->crp_op |= CRYPTO_OP_COMPUTE_DIGEST;
909a2bc81bfSJohn-Mark Gurney 		if (SAV_ISGCM(sav))
910c0341432SJohn Baldwin 			crp->crp_aad_length = 8; /* RFC4106 5, SPI + SN */
91116de9ac1SGeorge V. Neville-Neil 		else
912c0341432SJohn Baldwin 			crp->crp_aad_length = hlen;
9134d36d1fdSMarcin Wojtas 
9144d36d1fdSMarcin Wojtas 		csp = crypto_get_params(crp->crp_session);
9154d36d1fdSMarcin Wojtas 		if (csp->csp_flags & CSP_F_SEPARATE_AAD &&
9164d36d1fdSMarcin Wojtas 		    sav->replay != NULL) {
9174d36d1fdSMarcin Wojtas 			int aad_skip;
9184d36d1fdSMarcin Wojtas 
9194d36d1fdSMarcin Wojtas 			crp->crp_aad_length += sizeof(seqh);
920*35d9e00dSJohn Baldwin 			crp->crp_aad = malloc(crp->crp_aad_length, M_ESP, M_NOWAIT);
9214d36d1fdSMarcin Wojtas 			if (crp->crp_aad == NULL) {
9224d36d1fdSMarcin Wojtas 				DPRINTF(("%s: failed to allocate xform_data\n",
9234d36d1fdSMarcin Wojtas 					 __func__));
9244d36d1fdSMarcin Wojtas 				goto crp_aad_fail;
9254d36d1fdSMarcin Wojtas 			}
9264d36d1fdSMarcin Wojtas 
9274d36d1fdSMarcin Wojtas 			/* SPI */
9284d36d1fdSMarcin Wojtas 			m_copydata(m, skip, SPI_SIZE, crp->crp_aad);
9294d36d1fdSMarcin Wojtas 			aad_skip = SPI_SIZE;
9304d36d1fdSMarcin Wojtas 
9314d36d1fdSMarcin Wojtas 			/* ESN */
9324d36d1fdSMarcin Wojtas 			bcopy(&seqh, (char *)crp->crp_aad + aad_skip, sizeof(seqh));
9334d36d1fdSMarcin Wojtas 			aad_skip += sizeof(seqh);
9344d36d1fdSMarcin Wojtas 
9354d36d1fdSMarcin Wojtas 			/* Rest of aad */
9364d36d1fdSMarcin Wojtas 			if (crp->crp_aad_length - aad_skip > 0)
9374d36d1fdSMarcin Wojtas 				m_copydata(m, skip + SPI_SIZE,
9384d36d1fdSMarcin Wojtas 					   crp->crp_aad_length - aad_skip,
9394d36d1fdSMarcin Wojtas 					   (char *)crp->crp_aad + aad_skip);
9404d36d1fdSMarcin Wojtas 		} else
9414d36d1fdSMarcin Wojtas 			crp->crp_aad_start = skip;
9424d36d1fdSMarcin Wojtas 
9434d36d1fdSMarcin Wojtas 		if (csp->csp_flags & CSP_F_ESN && sav->replay != NULL)
9444d36d1fdSMarcin Wojtas 			memcpy(crp->crp_esn, &seqh, sizeof(seqh));
9454d36d1fdSMarcin Wojtas 
946c0341432SJohn Baldwin 		crp->crp_digest_start = m->m_pkthdr.len - alen;
94716de9ac1SGeorge V. Neville-Neil 	}
94816de9ac1SGeorge V. Neville-Neil 
94968f6800cSMark Johnston 	if (V_async_crypto)
95068f6800cSMark Johnston 		return (crypto_dispatch_async(crp, CRYPTO_ASYNC_ORDERED));
95168f6800cSMark Johnston 	else
95268f6800cSMark Johnston 		return (crypto_dispatch(crp));
9534d36d1fdSMarcin Wojtas 
9544d36d1fdSMarcin Wojtas crp_aad_fail:
955*35d9e00dSJohn Baldwin 	free(xd, M_ESP);
9564d36d1fdSMarcin Wojtas xd_fail:
9574d36d1fdSMarcin Wojtas 	crypto_freereq(crp);
9584d36d1fdSMarcin Wojtas 	ESPSTAT_INC(esps_crypto);
9594d36d1fdSMarcin Wojtas 	error = ENOBUFS;
96088768458SSam Leffler bad:
96188768458SSam Leffler 	if (m)
96288768458SSam Leffler 		m_freem(m);
9633aee7099SAndrey V. Elsukov 	key_freesav(&sav);
9643aee7099SAndrey V. Elsukov 	key_freesp(&sp);
96588768458SSam Leffler 	return (error);
96688768458SSam Leffler }
96788768458SSam Leffler /*
96888768458SSam Leffler  * ESP output callback from the crypto driver.
96988768458SSam Leffler  */
97088768458SSam Leffler static int
97188768458SSam Leffler esp_output_cb(struct cryptop *crp)
97288768458SSam Leffler {
973fcf59617SAndrey V. Elsukov 	struct xform_data *xd;
974fcf59617SAndrey V. Elsukov 	struct secpolicy *sp;
97588768458SSam Leffler 	struct secasvar *sav;
97688768458SSam Leffler 	struct mbuf *m;
9772e08e39fSConrad Meyer 	crypto_session_t cryptoid;
978fcf59617SAndrey V. Elsukov 	u_int idx;
9790e4fb1dbSPawel Jakub Dawidek 	int error;
98088768458SSam Leffler 
981fcf59617SAndrey V. Elsukov 	xd = (struct xform_data *) crp->crp_opaque;
982fd40ecf3SJohn Baldwin 	CURVNET_SET(xd->vnet);
9839c0e3d3aSJohn Baldwin 	m = crp->crp_buf.cb_mbuf;
984fcf59617SAndrey V. Elsukov 	sp = xd->sp;
985fcf59617SAndrey V. Elsukov 	sav = xd->sav;
986fcf59617SAndrey V. Elsukov 	idx = xd->idx;
987fcf59617SAndrey V. Elsukov 	cryptoid = xd->cryptoid;
98888768458SSam Leffler 
98988768458SSam Leffler 	/* Check for crypto errors. */
99088768458SSam Leffler 	if (crp->crp_etype) {
99188768458SSam Leffler 		if (crp->crp_etype == EAGAIN) {
992fcf59617SAndrey V. Elsukov 			/* Reset the session ID */
9931b0909d5SConrad Meyer 			if (ipsec_updateid(sav, &crp->crp_session, &cryptoid) != 0)
994fcf59617SAndrey V. Elsukov 				crypto_freesession(cryptoid);
9951b0909d5SConrad Meyer 			xd->cryptoid = crp->crp_session;
996fd40ecf3SJohn Baldwin 			CURVNET_RESTORE();
9970a95a08eSPawel Jakub Dawidek 			return (crypto_dispatch(crp));
99888768458SSam Leffler 		}
999a04d64d8SAndrey V. Elsukov 		ESPSTAT_INC(esps_noxform);
10009ffa9677SSam Leffler 		DPRINTF(("%s: crypto error %d\n", __func__, crp->crp_etype));
100188768458SSam Leffler 		error = crp->crp_etype;
1002fcf59617SAndrey V. Elsukov 		m_freem(m);
100388768458SSam Leffler 		goto bad;
100488768458SSam Leffler 	}
100588768458SSam Leffler 
100688768458SSam Leffler 	/* Shouldn't happen... */
100788768458SSam Leffler 	if (m == NULL) {
1008a04d64d8SAndrey V. Elsukov 		ESPSTAT_INC(esps_crypto);
10099ffa9677SSam Leffler 		DPRINTF(("%s: bogus returned buffer from crypto\n", __func__));
101088768458SSam Leffler 		error = EINVAL;
101188768458SSam Leffler 		goto bad;
101288768458SSam Leffler 	}
1013*35d9e00dSJohn Baldwin 	free(xd, M_ESP);
1014*35d9e00dSJohn Baldwin 	free(crp->crp_aad, M_ESP);
1015fcf59617SAndrey V. Elsukov 	crypto_freereq(crp);
1016a04d64d8SAndrey V. Elsukov 	ESPSTAT_INC(esps_hist[sav->alg_enc]);
101788768458SSam Leffler 	if (sav->tdb_authalgxform != NULL)
1018a04d64d8SAndrey V. Elsukov 		AHSTAT_INC(ahs_hist[sav->alg_auth]);
101988768458SSam Leffler 
10206131838bSPawel Jakub Dawidek #ifdef REGRESSION
1021dfa9422bSPawel Jakub Dawidek 	/* Emulate man-in-the-middle attack when ipsec_integrity is TRUE. */
1022603724d3SBjoern A. Zeeb 	if (V_ipsec_integrity) {
1023442da28aSVANHULLEBUS Yvan 		static unsigned char ipseczeroes[AH_HMAC_MAXHASHLEN];
1024fcf59617SAndrey V. Elsukov 		const struct auth_hash *esph;
1025dfa9422bSPawel Jakub Dawidek 
1026dfa9422bSPawel Jakub Dawidek 		/*
1027dfa9422bSPawel Jakub Dawidek 		 * Corrupt HMAC if we want to test integrity verification of
1028dfa9422bSPawel Jakub Dawidek 		 * the other side.
1029dfa9422bSPawel Jakub Dawidek 		 */
1030dfa9422bSPawel Jakub Dawidek 		esph = sav->tdb_authalgxform;
1031dfa9422bSPawel Jakub Dawidek 		if (esph !=  NULL) {
1032442da28aSVANHULLEBUS Yvan 			int alen;
1033442da28aSVANHULLEBUS Yvan 
1034a09a7146SJohn-Mark Gurney 			alen = xform_ah_authsize(esph);
1035442da28aSVANHULLEBUS Yvan 			m_copyback(m, m->m_pkthdr.len - alen,
1036442da28aSVANHULLEBUS Yvan 			    alen, ipseczeroes);
1037dfa9422bSPawel Jakub Dawidek 		}
1038dfa9422bSPawel Jakub Dawidek 	}
10396131838bSPawel Jakub Dawidek #endif
1040dfa9422bSPawel Jakub Dawidek 
104188768458SSam Leffler 	/* NB: m is reclaimed by ipsec_process_done. */
1042fcf59617SAndrey V. Elsukov 	error = ipsec_process_done(m, sp, sav, idx);
1043fd40ecf3SJohn Baldwin 	CURVNET_RESTORE();
10443d80e82dSAndrey V. Elsukov 	return (error);
104588768458SSam Leffler bad:
1046fd40ecf3SJohn Baldwin 	CURVNET_RESTORE();
1047*35d9e00dSJohn Baldwin 	free(xd, M_ESP);
1048*35d9e00dSJohn Baldwin 	free(crp->crp_aad, M_ESP);
104988768458SSam Leffler 	crypto_freereq(crp);
1050fcf59617SAndrey V. Elsukov 	key_freesav(&sav);
1051fcf59617SAndrey V. Elsukov 	key_freesp(&sp);
10523d80e82dSAndrey V. Elsukov 	return (error);
105388768458SSam Leffler }
105488768458SSam Leffler 
105588768458SSam Leffler static struct xformsw esp_xformsw = {
1056fcf59617SAndrey V. Elsukov 	.xf_type =	XF_ESP,
1057fcf59617SAndrey V. Elsukov 	.xf_name =	"IPsec ESP",
1058fcf59617SAndrey V. Elsukov 	.xf_init =	esp_init,
1059dae61c9dSJohn Baldwin 	.xf_cleanup =	esp_cleanup,
1060fcf59617SAndrey V. Elsukov 	.xf_input =	esp_input,
1061fcf59617SAndrey V. Elsukov 	.xf_output =	esp_output,
106288768458SSam Leffler };
106388768458SSam Leffler 
1064fcf59617SAndrey V. Elsukov SYSINIT(esp_xform_init, SI_SUB_PROTO_DOMAIN, SI_ORDER_MIDDLE,
1065fcf59617SAndrey V. Elsukov     xform_attach, &esp_xformsw);
1066fcf59617SAndrey V. Elsukov SYSUNINIT(esp_xform_uninit, SI_SUB_PROTO_DOMAIN, SI_ORDER_MIDDLE,
1067fcf59617SAndrey V. Elsukov     xform_detach, &esp_xformsw);
1068